Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
path: root/spec
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2023-08-18 13:50:51 +0300
committerGitLab Bot <gitlab-bot@gitlab.com>2023-08-18 13:50:51 +0300
commitdb384e6b19af03b4c3c82a5760d83a3fd79f7982 (patch)
tree34beaef37df5f47ccbcf5729d7583aae093cffa0 /spec
parent54fd7b1bad233e3944434da91d257fa7f63c3996 (diff)
Add latest changes from gitlab-org/gitlab@16-3-stable-eev16.3.0-rc42
Diffstat (limited to 'spec')
-rw-r--r--spec/channels/noteable/notes_channel_spec.rb40
-rw-r--r--spec/commands/sidekiq_cluster/cli_spec.rb4
-rw-r--r--spec/components/projects/ml/models_index_component_spec.rb41
-rw-r--r--spec/controllers/admin/clusters_controller_spec.rb3
-rw-r--r--spec/controllers/admin/labels_controller_spec.rb57
-rw-r--r--spec/controllers/autocomplete_controller_spec.rb35
-rw-r--r--spec/controllers/concerns/kas_cookie_spec.rb14
-rw-r--r--spec/controllers/concerns/onboarding/status_spec.rb10
-rw-r--r--spec/controllers/concerns/product_analytics_tracking_spec.rb352
-rw-r--r--spec/controllers/graphql_controller_spec.rb40
-rw-r--r--spec/controllers/groups/clusters_controller_spec.rb3
-rw-r--r--spec/controllers/groups/dependency_proxy_auth_controller_spec.rb101
-rw-r--r--spec/controllers/groups/dependency_proxy_for_containers_controller_spec.rb2
-rw-r--r--spec/controllers/groups/labels_controller_spec.rb7
-rw-r--r--spec/controllers/groups_controller_spec.rb38
-rw-r--r--spec/controllers/import/manifest_controller_spec.rb2
-rw-r--r--spec/controllers/jira_connect/app_descriptor_controller_spec.rb17
-rw-r--r--spec/controllers/omniauth_callbacks_controller_spec.rb37
-rw-r--r--spec/controllers/profiles/preferences_controller_spec.rb29
-rw-r--r--spec/controllers/projects/autocomplete_sources_controller_spec.rb15
-rw-r--r--spec/controllers/projects/clusters_controller_spec.rb3
-rw-r--r--spec/controllers/projects/compare_controller_spec.rb13
-rw-r--r--spec/controllers/projects/discussions_controller_spec.rb109
-rw-r--r--spec/controllers/projects/environments_controller_spec.rb16
-rw-r--r--spec/controllers/projects/issues_controller_spec.rb22
-rw-r--r--spec/controllers/projects/labels_controller_spec.rb46
-rw-r--r--spec/controllers/projects/merge_requests_controller_spec.rb14
-rw-r--r--spec/controllers/projects/notes_controller_spec.rb17
-rw-r--r--spec/controllers/projects/pages_controller_spec.rb47
-rw-r--r--spec/controllers/projects/performance_monitoring/dashboards_controller_spec.rb287
-rw-r--r--spec/controllers/projects/pipeline_schedules_controller_spec.rb51
-rw-r--r--spec/controllers/projects/settings/ci_cd_controller_spec.rb8
-rw-r--r--spec/controllers/projects/settings/integrations_controller_spec.rb20
-rw-r--r--spec/controllers/projects/tree_controller_spec.rb21
-rw-r--r--spec/controllers/projects_controller_spec.rb5
-rw-r--r--spec/controllers/search_controller_spec.rb6
-rw-r--r--spec/controllers/sessions_controller_spec.rb20
-rw-r--r--spec/controllers/snippets/notes_controller_spec.rb4
-rw-r--r--spec/db/schema_spec.rb17
-rw-r--r--spec/factories/ci/bridge.rb8
-rw-r--r--spec/factories/ci/builds.rb134
-rw-r--r--spec/factories/ci/catalog/resources/components.rb10
-rw-r--r--spec/factories/ci/catalog/resources/versions.rb9
-rw-r--r--spec/factories/ci/deployable.rb141
-rw-r--r--spec/factories/ci/job_artifacts.rb10
-rw-r--r--spec/factories/ci/reports/sbom/components.rb2
-rw-r--r--spec/factories/gitlab/database/background_migration/schema_inconsistencies.rb12
-rw-r--r--spec/factories/groups.rb10
-rw-r--r--spec/factories/integrations.rb8
-rw-r--r--spec/factories/issues.rb8
-rw-r--r--spec/factories/keys.rb10
-rw-r--r--spec/factories/labels.rb2
-rw-r--r--spec/factories/metrics/dashboard/annotations.rb6
-rw-r--r--spec/factories/ml/model_versions.rb4
-rw-r--r--spec/factories/ml/models.rb12
-rw-r--r--spec/factories/namespace_package_settings.rb3
-rw-r--r--spec/factories/namespaces.rb4
-rw-r--r--spec/factories/project_group_links.rb6
-rw-r--r--spec/factories/projects.rb9
-rw-r--r--spec/factories/service_desk/custom_email_verification.rb4
-rw-r--r--spec/factories/system/broadcast_messages.rb (renamed from spec/factories/broadcast_messages.rb)2
-rw-r--r--spec/factories/todos.rb4
-rw-r--r--spec/factories/users.rb5
-rw-r--r--spec/factories/work_items.rb13
-rw-r--r--spec/factories/work_items/related_work_item_links.rb8
-rw-r--r--spec/fast_spec_helper.rb2
-rw-r--r--spec/features/abuse_report_spec.rb121
-rw-r--r--spec/features/admin/admin_abuse_reports_spec.rb214
-rw-r--r--spec/features/admin/admin_labels_spec.rb6
-rw-r--r--spec/features/admin/admin_runners_spec.rb16
-rw-r--r--spec/features/admin/admin_users_impersonation_tokens_spec.rb1
-rw-r--r--spec/features/admin/broadcast_messages_spec.rb14
-rw-r--r--spec/features/admin_variables_spec.rb11
-rw-r--r--spec/features/broadcast_messages_spec.rb4
-rw-r--r--spec/features/dashboard/projects_spec.rb4
-rw-r--r--spec/features/dashboard/todos/todos_filtering_spec.rb2
-rw-r--r--spec/features/group_variables_spec.rb12
-rw-r--r--spec/features/groups/container_registry_spec.rb1
-rw-r--r--spec/features/groups/labels/edit_spec.rb2
-rw-r--r--spec/features/groups/settings/group_badges_spec.rb41
-rw-r--r--spec/features/incidents/incident_details_spec.rb2
-rw-r--r--spec/features/incidents/user_views_incident_spec.rb31
-rw-r--r--spec/features/issues/filtered_search/dropdown_assignee_spec.rb34
-rw-r--r--spec/features/issues/incident_issue_spec.rb4
-rw-r--r--spec/features/issues/note_polling_spec.rb20
-rw-r--r--spec/features/issues/service_desk_spec.rb38
-rw-r--r--spec/features/issues/user_creates_branch_and_merge_request_spec.rb19
-rw-r--r--spec/features/issues/user_edits_issue_spec.rb6
-rw-r--r--spec/features/issues/user_toggles_subscription_spec.rb6
-rw-r--r--spec/features/issues/user_views_issue_spec.rb38
-rw-r--r--spec/features/markdown/gitlab_flavored_markdown_spec.rb35
-rw-r--r--spec/features/merge_request/creating_mr_for_projects_with_different_visibility_spec.rb100
-rw-r--r--spec/features/merge_request/user_edits_mr_spec.rb5
-rw-r--r--spec/features/merge_request/user_sees_merge_request_pipelines_spec.rb24
-rw-r--r--spec/features/merge_request/user_sees_merge_widget_spec.rb9
-rw-r--r--spec/features/merge_request/user_sees_mini_pipeline_graph_spec.rb4
-rw-r--r--spec/features/merge_requests/user_sees_note_updates_in_real_time_spec.rb45
-rw-r--r--spec/features/nav/top_nav_tooltip_spec.rb2
-rw-r--r--spec/features/oauth_provider_authorize_spec.rb12
-rw-r--r--spec/features/participants_autocomplete_spec.rb3
-rw-r--r--spec/features/profile_spec.rb11
-rw-r--r--spec/features/profiles/active_sessions_spec.rb4
-rw-r--r--spec/features/profiles/gpg_keys_spec.rb12
-rw-r--r--spec/features/profiles/keys_spec.rb25
-rw-r--r--spec/features/profiles/list_users_comment_template_spec.rb2
-rw-r--r--spec/features/profiles/oauth_applications_spec.rb28
-rw-r--r--spec/features/profiles/personal_access_tokens_spec.rb5
-rw-r--r--spec/features/profiles/user_changes_notified_of_own_activity_spec.rb2
-rw-r--r--spec/features/profiles/user_creates_comment_template_spec.rb3
-rw-r--r--spec/features/profiles/user_edit_profile_spec.rb2
-rw-r--r--spec/features/profiles/user_manages_emails_spec.rb6
-rw-r--r--spec/features/profiles/user_visits_notifications_tab_spec.rb2
-rw-r--r--spec/features/project_variables_spec.rb12
-rw-r--r--spec/features/projects/blobs/user_follows_pipeline_suggest_nudge_spec.rb2
-rw-r--r--spec/features/projects/ci/editor_spec.rb68
-rw-r--r--spec/features/projects/commit/mini_pipeline_graph_spec.rb29
-rw-r--r--spec/features/projects/container_registry_spec.rb4
-rw-r--r--spec/features/projects/files/dockerfile_dropdown_spec.rb14
-rw-r--r--spec/features/projects/files/gitignore_dropdown_spec.rb14
-rw-r--r--spec/features/projects/files/gitlab_ci_yml_dropdown_spec.rb18
-rw-r--r--spec/features/projects/files/project_owner_creates_license_file_spec.rb7
-rw-r--r--spec/features/projects/files/template_selector_menu_spec.rb2
-rw-r--r--spec/features/projects/files/undo_template_spec.rb8
-rw-r--r--spec/features/projects/files/user_browses_files_spec.rb2
-rw-r--r--spec/features/projects/fork_spec.rb113
-rw-r--r--spec/features/projects/labels/user_edits_labels_spec.rb2
-rw-r--r--spec/features/projects/labels/user_removes_labels_spec.rb2
-rw-r--r--spec/features/projects/members/import_project_members_spec.rb79
-rw-r--r--spec/features/projects/members/user_manages_project_members_spec.rb (renamed from spec/features/projects/settings/user_manages_project_members_spec.rb)23
-rw-r--r--spec/features/projects/new_project_spec.rb23
-rw-r--r--spec/features/projects/pages/user_adds_domain_spec.rb7
-rw-r--r--spec/features/projects/pipelines/pipelines_spec.rb4
-rw-r--r--spec/features/projects/settings/access_tokens_spec.rb3
-rw-r--r--spec/features/projects/settings/packages_settings_spec.rb2
-rw-r--r--spec/features/projects/settings/pipelines_settings_spec.rb40
-rw-r--r--spec/features/projects/settings/project_badges_spec.rb41
-rw-r--r--spec/features/projects/settings/repository_settings_spec.rb1
-rw-r--r--spec/features/projects/settings/secure_files_spec.rb2
-rw-r--r--spec/features/projects/settings/user_interacts_with_deploy_keys_spec.rb11
-rw-r--r--spec/features/projects/settings/user_renames_a_project_spec.rb2
-rw-r--r--spec/features/projects/settings/user_searches_in_settings_spec.rb2
-rw-r--r--spec/features/projects/settings/visibility_settings_spec.rb6
-rw-r--r--spec/features/projects/show/user_manages_notifications_spec.rb2
-rw-r--r--spec/features/projects/user_changes_project_visibility_spec.rb4
-rw-r--r--spec/features/projects/work_items/work_item_spec.rb2
-rw-r--r--spec/features/protected_branches_spec.rb16
-rw-r--r--spec/features/protected_tags_spec.rb13
-rw-r--r--spec/features/triggers_spec.rb7
-rw-r--r--spec/features/uploads/user_uploads_avatar_to_profile_spec.rb69
-rw-r--r--spec/features/users/email_verification_on_login_spec.rb113
-rw-r--r--spec/features/users/google_syndication_csp_spec.rb54
-rw-r--r--spec/features/users/rss_spec.rb57
-rw-r--r--spec/features/users/signup_spec.rb4
-rw-r--r--spec/finders/abuse_reports_finder_spec.rb225
-rw-r--r--spec/finders/admin/abuse_report_labels_finder_spec.rb40
-rw-r--r--spec/finders/autocomplete/group_users_finder_spec.rb88
-rw-r--r--spec/finders/autocomplete/users_finder_spec.rb19
-rw-r--r--spec/finders/deployments_finder_spec.rb33
-rw-r--r--spec/finders/group_members_finder_spec.rb84
-rw-r--r--spec/finders/group_projects_finder_spec.rb20
-rw-r--r--spec/finders/labels_finder_spec.rb22
-rw-r--r--spec/finders/metrics/dashboards/annotations_finder_spec.rb108
-rw-r--r--spec/finders/metrics/users_starred_dashboards_finder_spec.rb56
-rw-r--r--spec/finders/packages/go/version_finder_spec.rb2
-rw-r--r--spec/finders/packages/nuget/package_finder_spec.rb66
-rw-r--r--spec/finders/packages/pipelines_finder_spec.rb24
-rw-r--r--spec/finders/projects/ml/model_finder_spec.rb35
-rw-r--r--spec/finders/repositories/tree_finder_spec.rb4
-rw-r--r--spec/finders/snippets_finder_spec.rb22
-rw-r--r--spec/finders/work_items/namespace_work_items_finder_spec.rb128
-rw-r--r--spec/fixtures/api/schemas/entities/discussion.json3
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/packages/nuget/package_metadata.json4
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/packages/nuget/packages_metadata.json4
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/packages/nuget/search.json4
-rw-r--r--spec/fixtures/gl-annotations.json.gzbin0 -> 124 bytes
-rw-r--r--spec/fixtures/lib/backup/design_repo.refs2
-rw-r--r--spec/fixtures/lib/backup/personal_snippet_repo.refs2
-rw-r--r--spec/fixtures/lib/backup/project_repo.refs2
-rw-r--r--spec/fixtures/lib/backup/project_snippet_repo.refs2
-rw-r--r--spec/fixtures/lib/backup/wiki_repo.refs2
-rw-r--r--spec/fixtures/lib/generators/gitlab/usage_metric_definition_generator/sample_metric.yml12
-rw-r--r--spec/fixtures/lib/generators/gitlab/usage_metric_definition_generator/sample_metric_with_ee.yml1
-rw-r--r--spec/fixtures/lib/generators/gitlab/usage_metric_definition_generator/sample_metric_with_name_suggestions.yml24
-rw-r--r--spec/fixtures/migrations/db/migrate/database_migration.txt11
-rw-r--r--spec/fixtures/migrations/db/migrate/database_migration_two.txt11
-rw-r--r--spec/fixtures/migrations/db/post_migrate/database_clash_migration.txt11
-rw-r--r--spec/fixtures/migrations/db/post_migrate/database_clash_migration_two.txt11
-rw-r--r--spec/fixtures/migrations/elasticsearch/elasticsearch_clash_migration.txt15
-rw-r--r--spec/fixtures/migrations/elasticsearch/elasticsearch_clash_migration_two.txt11
-rw-r--r--spec/fixtures/migrations/elasticsearch/elasticsearch_migration.txt22
-rw-r--r--spec/frontend/__helpers__/shared_test_setup.js2
-rw-r--r--spec/frontend/__mocks__/jed/index.js17
-rw-r--r--spec/frontend/access_tokens/components/access_token_table_app_spec.js18
-rw-r--r--spec/frontend/access_tokens/components/new_access_token_app_spec.js12
-rw-r--r--spec/frontend/access_tokens/components/token_spec.js1
-rw-r--r--spec/frontend/access_tokens/index_spec.js4
-rw-r--r--spec/frontend/add_context_commits_modal/components/add_context_commits_modal_spec.js1
-rw-r--r--spec/frontend/admin/abuse_report/components/report_actions_spec.js31
-rw-r--r--spec/frontend/admin/abuse_report/mock_data.js1
-rw-r--r--spec/frontend/admin/abuse_reports/components/abuse_report_row_spec.js15
-rw-r--r--spec/frontend/admin/abuse_reports/components/abuse_reports_filtered_search_bar_spec.js91
-rw-r--r--spec/frontend/admin/abuse_reports/mock_data.js2
-rw-r--r--spec/frontend/admin/analytics/devops_score/components/devops_score_spec.js2
-rw-r--r--spec/frontend/admin/applications/components/delete_application_spec.js4
-rw-r--r--spec/frontend/admin/broadcast_messages/components/message_form_spec.js132
-rw-r--r--spec/frontend/admin/broadcast_messages/components/messages_table_spec.js4
-rw-r--r--spec/frontend/admin/deploy_keys/components/table_spec.js20
-rw-r--r--spec/frontend/admin/statistics_panel/components/app_spec.js1
-rw-r--r--spec/frontend/alerts_settings/components/alerts_integrations_list_spec.js6
-rw-r--r--spec/frontend/alerts_settings/components/alerts_settings_wrapper_spec.js429
-rw-r--r--spec/frontend/alerts_settings/components/mocks/apollo_mock.js54
-rw-r--r--spec/frontend/analytics/cycle_analytics/components/base_spec.js1
-rw-r--r--spec/frontend/analytics/cycle_analytics/components/filter_bar_spec.js1
-rw-r--r--spec/frontend/api_spec.js73
-rw-r--r--spec/frontend/artifacts_settings/components/__snapshots__/keep_latest_artifact_checkbox_spec.js.snap76
-rw-r--r--spec/frontend/authentication/two_factor_auth/components/recovery_codes_spec.js8
-rw-r--r--spec/frontend/badges/components/badge_form_spec.js15
-rw-r--r--spec/frontend/badges/components/badge_list_row_spec.js119
-rw-r--r--spec/frontend/badges/components/badge_list_spec.js95
-rw-r--r--spec/frontend/badges/components/badge_settings_spec.js41
-rw-r--r--spec/frontend/batch_comments/components/diff_file_drafts_spec.js1
-rw-r--r--spec/frontend/batch_comments/components/preview_dropdown_spec.js1
-rw-r--r--spec/frontend/batch_comments/components/publish_dropdown_spec.js1
-rw-r--r--spec/frontend/batch_comments/components/submit_dropdown_spec.js8
-rw-r--r--spec/frontend/batch_comments/create_batch_comments_store.js1
-rw-r--r--spec/frontend/behaviors/toasts_spec.js59
-rw-r--r--spec/frontend/blob/file_template_mediator_spec.js6
-rw-r--r--spec/frontend/blob/file_template_selector_spec.js64
-rw-r--r--spec/frontend/blob/filepath_form/components/filepath_form_spec.js70
-rw-r--r--spec/frontend/blob/filepath_form/components/mock_data.js57
-rw-r--r--spec/frontend/blob/filepath_form/components/template_selector_spec.js167
-rw-r--r--spec/frontend/blob_edit/blob_bundle_spec.js5
-rw-r--r--spec/frontend/blob_edit/edit_blob_spec.js1
-rw-r--r--spec/frontend/boards/board_card_inner_spec.js1
-rw-r--r--spec/frontend/boards/board_list_helper.js1
-rw-r--r--spec/frontend/boards/cache_updates_spec.js37
-rw-r--r--spec/frontend/boards/components/board_add_new_column_form_spec.js1
-rw-r--r--spec/frontend/boards/components/board_add_new_column_spec.js66
-rw-r--r--spec/frontend/boards/components/board_add_new_column_trigger_spec.js1
-rw-r--r--spec/frontend/boards/components/board_app_spec.js26
-rw-r--r--spec/frontend/boards/components/board_card_move_to_position_spec.js1
-rw-r--r--spec/frontend/boards/components/board_card_spec.js1
-rw-r--r--spec/frontend/boards/components/board_content_sidebar_spec.js1
-rw-r--r--spec/frontend/boards/components/board_content_spec.js65
-rw-r--r--spec/frontend/boards/components/board_filtered_search_spec.js1
-rw-r--r--spec/frontend/boards/components/board_form_spec.js1
-rw-r--r--spec/frontend/boards/components/board_list_header_spec.js57
-rw-r--r--spec/frontend/boards/components/board_new_issue_spec.js1
-rw-r--r--spec/frontend/boards/components/board_settings_sidebar_spec.js34
-rw-r--r--spec/frontend/boards/components/board_top_bar_spec.js41
-rw-r--r--spec/frontend/boards/components/boards_selector_spec.js38
-rw-r--r--spec/frontend/boards/components/config_toggle_spec.js21
-rw-r--r--spec/frontend/boards/components/sidebar/board_sidebar_time_tracker_spec.js67
-rw-r--r--spec/frontend/boards/mock_data.js22
-rw-r--r--spec/frontend/boards/stores/actions_spec.js1
-rw-r--r--spec/frontend/ci/ci_variable_list/components/ci_group_variables_spec.js27
-rw-r--r--spec/frontend/ci/ci_variable_list/components/ci_variable_drawer_spec.js69
-rw-r--r--spec/frontend/ci/ci_variable_list/components/ci_variable_modal_spec.js27
-rw-r--r--spec/frontend/ci/ci_variable_list/components/ci_variable_settings_spec.js69
-rw-r--r--spec/frontend/ci/ci_variable_list/components/ci_variable_table_spec.js25
-rw-r--r--spec/frontend/ci/pipeline_editor/components/file-nav/branch_switcher_spec.js124
-rw-r--r--spec/frontend/ci/pipeline_editor/components/header/pipeline_editor_mini_graph_spec.js10
-rw-r--r--spec/frontend/ci/pipeline_editor/components/header/pipeline_status_spec.js19
-rw-r--r--spec/frontend/ci/pipeline_editor/mock_data.js13
-rw-r--r--spec/frontend/ci/pipeline_editor/pipeline_editor_home_spec.js170
-rw-r--r--spec/frontend/ci/pipeline_new/mock_data.js8
-rw-r--r--spec/frontend/ci/pipeline_schedules/components/pipeline_schedules_form_spec.js61
-rw-r--r--spec/frontend/ci/pipeline_schedules/components/pipeline_schedules_spec.js15
-rw-r--r--spec/frontend/ci/pipeline_schedules/components/table/pipeline_schedules_table_spec.js4
-rw-r--r--spec/frontend/ci/pipeline_schedules/mock_data.js13
-rw-r--r--spec/frontend/ci/runner/admin_runners/provide_spec.js34
-rw-r--r--spec/frontend/ci/runner/components/registration/registration_dropdown_spec.js5
-rw-r--r--spec/frontend/ci/runner/components/registration/registration_token_spec.js11
-rw-r--r--spec/frontend/ci/runner/components/search_tokens/tag_token_spec.js13
-rw-r--r--spec/frontend/ci/runner/mock_data.js9
-rw-r--r--spec/frontend/ci/runner/runner_search_utils_spec.js3
-rw-r--r--spec/frontend/ci_settings_pipeline_triggers/components/triggers_list_spec.js24
-rw-r--r--spec/frontend/clusters/forms/components/integration_form_spec.js1
-rw-r--r--spec/frontend/clusters_list/components/agent_token_spec.js3
-rw-r--r--spec/frontend/clusters_list/components/clusters_view_all_spec.js1
-rw-r--r--spec/frontend/clusters_list/components/install_agent_modal_spec.js114
-rw-r--r--spec/frontend/code_navigation/components/app_spec.js1
-rw-r--r--spec/frontend/comment_templates/components/__snapshots__/list_item_spec.js.snap9
-rw-r--r--spec/frontend/comment_templates/components/form_spec.js12
-rw-r--r--spec/frontend/comment_templates/components/list_spec.js6
-rw-r--r--spec/frontend/comment_templates/pages/index_spec.js9
-rw-r--r--spec/frontend/commit/commit_box_pipeline_mini_graph_spec.js40
-rw-r--r--spec/frontend/content_editor/extensions/copy_paste_spec.js34
-rw-r--r--spec/frontend/content_editor/markdown_snapshot_spec.js2
-rw-r--r--spec/frontend/contribution_events/components/contribution_event/contribution_event_closed_spec.js63
-rw-r--r--spec/frontend/contribution_events/components/contribution_event/contribution_event_commented_spec.js103
-rw-r--r--spec/frontend/contribution_events/components/contribution_event/contribution_event_created_spec.js70
-rw-r--r--spec/frontend/contribution_events/components/contribution_event/contribution_event_reopened_spec.js60
-rw-r--r--spec/frontend/contribution_events/components/contribution_events_spec.js32
-rw-r--r--spec/frontend/contribution_events/components/target_link_spec.js17
-rw-r--r--spec/frontend/contribution_events/utils.js115
-rw-r--r--spec/frontend/contribution_events/utils_spec.js24
-rw-r--r--spec/frontend/custom_emoji/components/__snapshots__/list_spec.js.snap220
-rw-r--r--spec/frontend/custom_emoji/components/delete_item_spec.js89
-rw-r--r--spec/frontend/custom_emoji/components/form_spec.js116
-rw-r--r--spec/frontend/custom_emoji/components/list_spec.js79
-rw-r--r--spec/frontend/custom_emoji/mock_data.js27
-rw-r--r--spec/frontend/deploy_freeze/components/deploy_freeze_modal_spec.js1
-rw-r--r--spec/frontend/deploy_freeze/components/deploy_freeze_settings_spec.js1
-rw-r--r--spec/frontend/deploy_freeze/components/deploy_freeze_table_spec.js5
-rw-r--r--spec/frontend/deploy_keys/components/keys_panel_spec.js6
-rw-r--r--spec/frontend/deploy_tokens/components/new_deploy_token_spec.js313
-rw-r--r--spec/frontend/design_management/components/design_description/description_form_spec.js7
-rw-r--r--spec/frontend/design_management/pages/design/__snapshots__/index_spec.js.snap6
-rw-r--r--spec/frontend/design_management/pages/design/index_spec.js8
-rw-r--r--spec/frontend/diffs/components/app_spec.js44
-rw-r--r--spec/frontend/diffs/components/collapsed_files_warning_spec.js1
-rw-r--r--spec/frontend/diffs/components/compare_versions_spec.js2
-rw-r--r--spec/frontend/diffs/components/diff_code_quality_spec.js61
-rw-r--r--spec/frontend/diffs/components/diff_content_spec.js28
-rw-r--r--spec/frontend/diffs/components/diff_discussion_reply_spec.js1
-rw-r--r--spec/frontend/diffs/components/diff_file_header_spec.js1
-rw-r--r--spec/frontend/diffs/components/diff_file_spec.js30
-rw-r--r--spec/frontend/diffs/components/diff_inline_findings_item_spec.js (renamed from spec/frontend/diffs/components/diff_code_quality_item_spec.js)8
-rw-r--r--spec/frontend/diffs/components/diff_inline_findings_spec.js12
-rw-r--r--spec/frontend/diffs/components/diff_line_note_form_spec.js13
-rw-r--r--spec/frontend/diffs/components/diff_line_spec.js33
-rw-r--r--spec/frontend/diffs/components/diff_row_spec.js1
-rw-r--r--spec/frontend/diffs/components/diff_view_spec.js55
-rw-r--r--spec/frontend/diffs/components/diffs_file_tree_spec.js116
-rw-r--r--spec/frontend/diffs/components/inline_findings_spec.js33
-rw-r--r--spec/frontend/diffs/components/shared/__snapshots__/findings_drawer_spec.js.snap2
-rw-r--r--spec/frontend/diffs/components/tree_list_spec.js1
-rw-r--r--spec/frontend/diffs/create_diffs_store.js1
-rw-r--r--spec/frontend/diffs/mock_data/inline_findings.js (renamed from spec/frontend/diffs/mock_data/diff_code_quality.js)1
-rw-r--r--spec/frontend/diffs/store/actions_spec.js200
-rw-r--r--spec/frontend/diffs/store/mutations_spec.js30
-rw-r--r--spec/frontend/diffs/store/utils_spec.js4
-rw-r--r--spec/frontend/diffs/utils/tree_worker_utils_spec.js8
-rw-r--r--spec/frontend/editor/schema/ci/ci_schema_spec.js8
-rw-r--r--spec/frontend/editor/schema/ci/yaml_tests/negative_tests/needs/parallel_matrix/numeric.yml6
-rw-r--r--spec/frontend/editor/schema/ci/yaml_tests/negative_tests/needs/parallel_matrix/wrong_matrix_value.yml30
-rw-r--r--spec/frontend/editor/schema/ci/yaml_tests/negative_tests/needs/parallel_matrix/wrong_parallel_value.yml27
-rw-r--r--spec/frontend/editor/schema/ci/yaml_tests/negative_tests/secrets.yml36
-rw-r--r--spec/frontend/editor/schema/ci/yaml_tests/positive_tests/needs_parallel_matrix.yml32
-rw-r--r--spec/frontend/editor/schema/ci/yaml_tests/positive_tests/secrets.yml17
-rw-r--r--spec/frontend/environments/edit_environment_spec.js19
-rw-r--r--spec/frontend/environments/environment_flux_resource_selector_spec.js178
-rw-r--r--spec/frontend/environments/environment_folder_spec.js17
-rw-r--r--spec/frontend/environments/environment_form_spec.js234
-rw-r--r--spec/frontend/environments/graphql/mock_data.js9
-rw-r--r--spec/frontend/environments/graphql/resolvers/base_spec.js (renamed from spec/frontend/environments/graphql/resolvers_spec.js)223
-rw-r--r--spec/frontend/environments/graphql/resolvers/flux_spec.js140
-rw-r--r--spec/frontend/environments/graphql/resolvers/kubernetes_spec.js238
-rw-r--r--spec/frontend/environments/kubernetes_overview_spec.js17
-rw-r--r--spec/frontend/environments/kubernetes_status_bar_spec.js274
-rw-r--r--spec/frontend/environments/new_environment_item_spec.js35
-rw-r--r--spec/frontend/error_tracking/components/error_details_spec.js1
-rw-r--r--spec/frontend/error_tracking/components/error_tracking_list_spec.js1
-rw-r--r--spec/frontend/error_tracking_settings/components/app_spec.js1
-rw-r--r--spec/frontend/error_tracking_settings/components/error_tracking_form_spec.js1
-rw-r--r--spec/frontend/error_tracking_settings/components/project_dropdown_spec.js1
-rw-r--r--spec/frontend/feature_flags/components/edit_feature_flag_spec.js1
-rw-r--r--spec/frontend/feature_flags/components/feature_flags_spec.js3
-rw-r--r--spec/frontend/feature_flags/components/new_feature_flag_spec.js1
-rw-r--r--spec/frontend/feature_flags/components/strategies/gitlab_user_list_spec.js1
-rw-r--r--spec/frontend/feature_flags/components/strategy_spec.js1
-rw-r--r--spec/frontend/fixtures/time_estimates.rb61
-rw-r--r--spec/frontend/fixtures/users.rb11
-rw-r--r--spec/frontend/forks/component/forks_button_spec.js74
-rw-r--r--spec/frontend/frequent_items/components/app_spec.js1
-rw-r--r--spec/frontend/frequent_items/components/frequent_items_list_item_spec.js1
-rw-r--r--spec/frontend/frequent_items/components/frequent_items_list_spec.js1
-rw-r--r--spec/frontend/frequent_items/components/frequent_items_search_input_spec.js1
-rw-r--r--spec/frontend/group_settings/components/shared_runners_form_spec.js51
-rw-r--r--spec/frontend/groups/service/archived_projects_service_spec.js39
-rw-r--r--spec/frontend/header_search/components/app_spec.js1
-rw-r--r--spec/frontend/header_search/components/header_search_autocomplete_items_spec.js1
-rw-r--r--spec/frontend/header_search/components/header_search_default_items_spec.js1
-rw-r--r--spec/frontend/header_search/components/header_search_scoped_items_spec.js1
-rw-r--r--spec/frontend/ide/components/branches/search_list_spec.js1
-rw-r--r--spec/frontend/ide/components/commit_sidebar/editor_header_spec.js1
-rw-r--r--spec/frontend/ide/components/commit_sidebar/new_merge_request_option_spec.js1
-rw-r--r--spec/frontend/ide/components/error_message_spec.js1
-rw-r--r--spec/frontend/ide/components/file_row_extra_spec.js1
-rw-r--r--spec/frontend/ide/components/file_templates/dropdown_spec.js1
-rw-r--r--spec/frontend/ide/components/ide_file_row_spec.js1
-rw-r--r--spec/frontend/ide/components/ide_review_spec.js13
-rw-r--r--spec/frontend/ide/components/ide_side_bar_spec.js1
-rw-r--r--spec/frontend/ide/components/ide_spec.js1
-rw-r--r--spec/frontend/ide/components/ide_status_list_spec.js1
-rw-r--r--spec/frontend/ide/components/ide_tree_spec.js1
-rw-r--r--spec/frontend/ide/components/jobs/list_spec.js1
-rw-r--r--spec/frontend/ide/components/merge_requests/item_spec.js1
-rw-r--r--spec/frontend/ide/components/merge_requests/list_spec.js1
-rw-r--r--spec/frontend/ide/components/new_dropdown/index_spec.js1
-rw-r--r--spec/frontend/ide/components/panes/collapsible_sidebar_spec.js1
-rw-r--r--spec/frontend/ide/components/panes/right_spec.js1
-rw-r--r--spec/frontend/ide/components/pipelines/list_spec.js1
-rw-r--r--spec/frontend/ide/components/repo_editor_spec.js1
-rw-r--r--spec/frontend/ide/components/repo_tab_spec.js1
-rw-r--r--spec/frontend/ide/components/repo_tabs_spec.js1
-rw-r--r--spec/frontend/ide/components/resizable_panel_spec.js1
-rw-r--r--spec/frontend/ide/components/terminal/session_spec.js1
-rw-r--r--spec/frontend/ide/components/terminal/terminal_spec.js1
-rw-r--r--spec/frontend/ide/components/terminal/view_spec.js1
-rw-r--r--spec/frontend/ide/components/terminal_sync/terminal_sync_status_safe_spec.js1
-rw-r--r--spec/frontend/ide/components/terminal_sync/terminal_sync_status_spec.js1
-rw-r--r--spec/frontend/ide/stores/modules/editor/setup_spec.js1
-rw-r--r--spec/frontend/ide/stores/plugins/terminal_spec.js1
-rw-r--r--spec/frontend/ide/utils_spec.js11
-rw-r--r--spec/frontend/import_entities/components/import_status_spec.js19
-rw-r--r--spec/frontend/import_entities/components/import_target_dropdown_spec.js99
-rw-r--r--spec/frontend/import_entities/import_groups/components/import_actions_cell_spec.js48
-rw-r--r--spec/frontend/import_entities/import_projects/components/github_status_table_spec.js1
-rw-r--r--spec/frontend/import_entities/import_projects/components/import_projects_table_spec.js1
-rw-r--r--spec/frontend/import_entities/import_projects/components/provider_repo_table_row_spec.js31
-rw-r--r--spec/frontend/import_entities/mock_data.js34
-rw-r--r--spec/frontend/incidents_settings/components/__snapshots__/pagerduty_form_spec.js.snap11
-rw-r--r--spec/frontend/integrations/edit/components/sections/google_play_spec.js2
-rw-r--r--spec/frontend/integrations/index/components/integrations_list_spec.js1
-rw-r--r--spec/frontend/integrations/index/components/integrations_table_spec.js20
-rw-r--r--spec/frontend/invite_members/components/group_select_spec.js46
-rw-r--r--spec/frontend/invite_members/components/import_project_members_modal_spec.js95
-rw-r--r--spec/frontend/invite_members/components/user_limit_notification_spec.js18
-rw-r--r--spec/frontend/invite_members/mock_data/api_responses.js24
-rw-r--r--spec/frontend/issuable/issuable_form_spec.js42
-rw-r--r--spec/frontend/issues/create_merge_request_dropdown_spec.js25
-rw-r--r--spec/frontend/issues/dashboard/components/issues_dashboard_app_spec.js5
-rw-r--r--spec/frontend/issues/issue_spec.js34
-rw-r--r--spec/frontend/issues/list/components/issues_list_app_spec.js247
-rw-r--r--spec/frontend/issues/show/components/app_spec.js70
-rw-r--r--spec/frontend/issues/show/components/fields/description_spec.js7
-rw-r--r--spec/frontend/issues/show/components/header_actions_spec.js1
-rw-r--r--spec/frontend/issues/show/components/issue_header_spec.js129
-rw-r--r--spec/frontend/issues/show/components/sentry_error_stack_trace_spec.js1
-rw-r--r--spec/frontend/jira_connect/subscriptions/api_spec.js4
-rw-r--r--spec/frontend/jira_connect/subscriptions/components/add_namespace_modal/groups_list_spec.js65
-rw-r--r--spec/frontend/jobs/components/job/artifacts_block_spec.js36
-rw-r--r--spec/frontend/jobs/components/job/job_app_spec.js1
-rw-r--r--spec/frontend/jobs/components/job/job_log_controllers_spec.js1
-rw-r--r--spec/frontend/jobs/components/job/sidebar_detail_row_spec.js3
-rw-r--r--spec/frontend/jobs/components/log/line_header_spec.js29
-rw-r--r--spec/frontend/jobs/components/log/line_spec.js47
-rw-r--r--spec/frontend/jobs/components/log/log_spec.js1
-rw-r--r--spec/frontend/lib/print_markdown_dom_spec.js102
-rw-r--r--spec/frontend/lib/utils/error_util_spec.js194
-rw-r--r--spec/frontend/lib/utils/file_utility_spec.js13
-rw-r--r--spec/frontend/lib/utils/text_utility_spec.js30
-rw-r--r--spec/frontend/lib/utils/url_utility_spec.js25
-rw-r--r--spec/frontend/lib/utils/vuex_module_mappers_spec.js1
-rw-r--r--spec/frontend/members/components/action_buttons/approve_access_request_button_spec.js1
-rw-r--r--spec/frontend/members/components/action_buttons/remove_group_link_button_spec.js1
-rw-r--r--spec/frontend/members/components/action_buttons/remove_member_button_spec.js1
-rw-r--r--spec/frontend/members/components/action_buttons/resend_invite_button_spec.js1
-rw-r--r--spec/frontend/members/components/action_dropdowns/remove_member_dropdown_item_spec.js1
-rw-r--r--spec/frontend/members/components/app_spec.js1
-rw-r--r--spec/frontend/members/components/filter_sort/filter_sort_container_spec.js1
-rw-r--r--spec/frontend/members/components/filter_sort/members_filtered_search_bar_spec.js1
-rw-r--r--spec/frontend/members/components/filter_sort/sort_dropdown_spec.js1
-rw-r--r--spec/frontend/members/components/members_tabs_spec.js1
-rw-r--r--spec/frontend/members/components/modals/leave_modal_spec.js1
-rw-r--r--spec/frontend/members/components/modals/remove_group_link_modal_spec.js1
-rw-r--r--spec/frontend/members/components/modals/remove_member_modal_spec.js1
-rw-r--r--spec/frontend/members/components/table/expiration_datepicker_spec.js1
-rw-r--r--spec/frontend/members/components/table/members_table_cell_spec.js1
-rw-r--r--spec/frontend/members/components/table/members_table_spec.js1
-rw-r--r--spec/frontend/members/components/table/role_dropdown_spec.js1
-rw-r--r--spec/frontend/merge_conflicts/components/merge_conflict_resolver_app_spec.js1
-rw-r--r--spec/frontend/milestones/components/milestone_combobox_spec.js1
-rw-r--r--spec/frontend/milestones/index_spec.js1
-rw-r--r--spec/frontend/nav/components/top_nav_dropdown_menu_spec.js3
-rw-r--r--spec/frontend/notes/components/comment_form_spec.js8
-rw-r--r--spec/frontend/notes/components/discussion_counter_spec.js1
-rw-r--r--spec/frontend/notes/components/discussion_filter_spec.js1
-rw-r--r--spec/frontend/notes/components/mr_discussion_filter_spec.js1
-rw-r--r--spec/frontend/notes/components/multiline_comment_form_spec.js1
-rw-r--r--spec/frontend/notes/components/note_awards_list_spec.js1
-rw-r--r--spec/frontend/notes/components/note_body_spec.js1
-rw-r--r--spec/frontend/notes/components/note_form_spec.js62
-rw-r--r--spec/frontend/notes/components/note_header_spec.js6
-rw-r--r--spec/frontend/notes/components/noteable_discussion_spec.js1
-rw-r--r--spec/frontend/notes/components/noteable_note_spec.js1
-rw-r--r--spec/frontend/notes/components/timeline_toggle_spec.js1
-rw-r--r--spec/frontend/notes/mixins/discussion_navigation_spec.js1
-rw-r--r--spec/frontend/notes/mock_data.js4
-rw-r--r--spec/frontend/notes/stores/actions_spec.js63
-rw-r--r--spec/frontend/notes/stores/mutation_spec.js27
-rw-r--r--spec/frontend/notifications/components/notifications_dropdown_spec.js2
-rw-r--r--spec/frontend/oauth_application/components/oauth_secret_spec.js4
-rw-r--r--spec/frontend/observability/client_spec.js163
-rw-r--r--spec/frontend/organizations/groups_and_projects/components/app_spec.js205
-rw-r--r--spec/frontend/organizations/groups_and_projects/components/groups_page_spec.js88
-rw-r--r--spec/frontend/organizations/groups_and_projects/components/mock_data.js98
-rw-r--r--spec/frontend/organizations/groups_and_projects/components/projects_page_spec.js88
-rw-r--r--spec/frontend/organizations/groups_and_projects/mock_data.js252
-rw-r--r--spec/frontend/organizations/groups_and_projects/utils_spec.js35
-rw-r--r--spec/frontend/packages_and_registries/container_registry/explorer/components/details_page/details_header_spec.js93
-rw-r--r--spec/frontend/packages_and_registries/container_registry/explorer/components/details_page/tags_list_row_spec.js1
-rw-r--r--spec/frontend/packages_and_registries/container_registry/explorer/components/details_page/tags_list_spec.js112
-rw-r--r--spec/frontend/packages_and_registries/container_registry/explorer/components/list_page/group_empty_state_spec.js1
-rw-r--r--spec/frontend/packages_and_registries/container_registry/explorer/components/list_page/image_list_row_spec.js2
-rw-r--r--spec/frontend/packages_and_registries/container_registry/explorer/components/list_page/image_list_spec.js45
-rw-r--r--spec/frontend/packages_and_registries/container_registry/explorer/components/list_page/project_empty_state_spec.js1
-rw-r--r--spec/frontend/packages_and_registries/container_registry/explorer/mock_data.js2
-rw-r--r--spec/frontend/packages_and_registries/container_registry/explorer/pages/details_spec.js19
-rw-r--r--spec/frontend/packages_and_registries/container_registry/explorer/pages/list_spec.js149
-rw-r--r--spec/frontend/packages_and_registries/container_registry/explorer/utils_spec.js48
-rw-r--r--spec/frontend/packages_and_registries/dependency_proxy/components/manifest_list_spec.js6
-rw-r--r--spec/frontend/packages_and_registries/infrastructure_registry/components/details/components/app_spec.js1
-rw-r--r--spec/frontend/packages_and_registries/infrastructure_registry/components/details/components/details_title_spec.js1
-rw-r--r--spec/frontend/packages_and_registries/infrastructure_registry/components/details/components/terraform_installation_spec.js1
-rw-r--r--spec/frontend/packages_and_registries/infrastructure_registry/components/list/components/infrastructure_search_spec.js1
-rw-r--r--spec/frontend/packages_and_registries/infrastructure_registry/components/list/components/packages_list_app_spec.js1
-rw-r--r--spec/frontend/packages_and_registries/infrastructure_registry/components/list/components/packages_list_spec.js1
-rw-r--r--spec/frontend/packages_and_registries/infrastructure_registry/components/shared/__snapshots__/package_list_row_spec.js.snap2
-rw-r--r--spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/pypi_installation_spec.js.snap150
-rw-r--r--spec/frontend/packages_and_registries/package_registry/components/details/package_files_spec.js16
-rw-r--r--spec/frontend/packages_and_registries/package_registry/components/list/__snapshots__/package_list_row_spec.js.snap39
-rw-r--r--spec/frontend/packages_and_registries/package_registry/components/list/package_list_row_spec.js37
-rw-r--r--spec/frontend/packages_and_registries/shared/components/cli_commands_spec.js1
-rw-r--r--spec/frontend/packages_and_registries/shared/components/persisted_pagination_spec.js100
-rw-r--r--spec/frontend/packages_and_registries/shared/components/persisted_search_spec.js69
-rw-r--r--spec/frontend/packages_and_registries/shared/components/registry_list_spec.js20
-rw-r--r--spec/frontend/packages_and_registries/shared/utils_spec.js16
-rw-r--r--spec/frontend/pages/import/bulk_imports/history/components/bulk_imports_history_app_spec.js71
-rw-r--r--spec/frontend/pages/projects/forks/new/components/fork_form_spec.js12
-rw-r--r--spec/frontend/pages/projects/pipeline_schedules/shared/components/interval_pattern_input_spec.js2
-rw-r--r--spec/frontend/pages/projects/shared/permissions/components/settings_panel_spec.js28
-rw-r--r--spec/frontend/pages/shared/wikis/components/wiki_export_spec.js48
-rw-r--r--spec/frontend/pages/shared/wikis/components/wiki_form_spec.js7
-rw-r--r--spec/frontend/pipelines/components/jobs/failed_jobs_table_spec.js12
-rw-r--r--spec/frontend/pipelines/components/pipeline_mini_graph/graphql_pipeline_mini_graph_spec.js123
-rw-r--r--spec/frontend/pipelines/components/pipeline_mini_graph/job_item_spec.js29
-rw-r--r--spec/frontend/pipelines/components/pipeline_mini_graph/legacy_pipeline_mini_graph_spec.js122
-rw-r--r--spec/frontend/pipelines/components/pipeline_mini_graph/legacy_pipeline_stage_spec.js247
-rw-r--r--spec/frontend/pipelines/components/pipeline_mini_graph/pipeline_mini_graph_spec.js179
-rw-r--r--spec/frontend/pipelines/components/pipeline_mini_graph/pipeline_stage_spec.js255
-rw-r--r--spec/frontend/pipelines/components/pipeline_mini_graph/pipeline_stages_spec.js10
-rw-r--r--spec/frontend/pipelines/components/pipeline_tabs_spec.js63
-rw-r--r--spec/frontend/pipelines/components/pipelines_list/failure_widget/failed_job_details_spec.js40
-rw-r--r--spec/frontend/pipelines/components/pipelines_list/failure_widget/failed_jobs_list_spec.js49
-rw-r--r--spec/frontend/pipelines/components/pipelines_list/failure_widget/mock.js5
-rw-r--r--spec/frontend/pipelines/components/pipelines_list/failure_widget/pipeline_failed_jobs_widget_spec.js29
-rw-r--r--spec/frontend/pipelines/pipeline_graph/utils_spec.js6
-rw-r--r--spec/frontend/pipelines/pipeline_multi_actions_spec.js95
-rw-r--r--spec/frontend/pipelines/pipelines_spec.js15
-rw-r--r--spec/frontend/pipelines/pipelines_table_spec.js31
-rw-r--r--spec/frontend/pipelines/test_reports/test_reports_spec.js1
-rw-r--r--spec/frontend/pipelines/test_reports/test_suite_table_spec.js1
-rw-r--r--spec/frontend/pipelines/test_reports/test_summary_table_spec.js1
-rw-r--r--spec/frontend/profile/edit/components/profile_edit_app_spec.js181
-rw-r--r--spec/frontend/profile/edit/components/user_avatar_spec.js139
-rw-r--r--spec/frontend/projects/commit/components/branches_dropdown_spec.js1
-rw-r--r--spec/frontend/projects/commit/components/projects_dropdown_spec.js1
-rw-r--r--spec/frontend/projects/commits/components/author_select_spec.js1
-rw-r--r--spec/frontend/projects/components/__snapshots__/project_delete_button_spec.js.snap97
-rw-r--r--spec/frontend/projects/components/shared/__snapshots__/delete_button_spec.js.snap116
-rw-r--r--spec/frontend/projects/components/shared/delete_button_spec.js114
-rw-r--r--spec/frontend/projects/components/shared/delete_modal_spec.js167
-rw-r--r--spec/frontend/projects/pipelines/charts/components/ci_cd_analytics_charts_spec.js22
-rw-r--r--spec/frontend/projects/pipelines/charts/components/pipeline_charts_spec.js1
-rw-r--r--spec/frontend/projects/project_new_spec.js4
-rw-r--r--spec/frontend/projects/settings/components/shared_runners_toggle_spec.js33
-rw-r--r--spec/frontend/projects/settings_service_desk/components/custom_email_confirm_modal_spec.js52
-rw-r--r--spec/frontend/projects/settings_service_desk/components/custom_email_form_spec.js199
-rw-r--r--spec/frontend/projects/settings_service_desk/components/custom_email_spec.js134
-rw-r--r--spec/frontend/projects/settings_service_desk/components/custom_email_wrapper_spec.js336
-rw-r--r--spec/frontend/projects/settings_service_desk/components/mock_data.js62
-rw-r--r--spec/frontend/projects/settings_service_desk/components/service_desk_root_spec.js47
-rw-r--r--spec/frontend/ref/components/ref_selector_spec.js1
-rw-r--r--spec/frontend/releases/components/app_edit_new_spec.js1
-rw-r--r--spec/frontend/releases/components/asset_links_form_spec.js1
-rw-r--r--spec/frontend/releases/components/confirm_delete_modal_spec.js1
-rw-r--r--spec/frontend/releases/components/tag_field_exsting_spec.js1
-rw-r--r--spec/frontend/repository/commits_service_spec.js13
-rw-r--r--spec/frontend/repository/components/blob_button_group_spec.js4
-rw-r--r--spec/frontend/repository/components/blob_content_viewer_spec.js46
-rw-r--r--spec/frontend/repository/components/blob_viewers/image_viewer_spec.js30
-rw-r--r--spec/frontend/repository/components/blob_viewers/index_spec.js11
-rw-r--r--spec/frontend/repository/components/delete_blob_modal_spec.js44
-rw-r--r--spec/frontend/repository/components/table/parent_row_spec.js17
-rw-r--r--spec/frontend/repository/components/table/row_spec.js11
-rw-r--r--spec/frontend/repository/components/tree_content_spec.js7
-rw-r--r--spec/frontend/repository/mixins/highlight_mixin_spec.js26
-rw-r--r--spec/frontend/repository/pages/index_spec.js6
-rw-r--r--spec/frontend/search/mock_data.js19
-rw-r--r--spec/frontend/search/sidebar/components/app_spec.js45
-rw-r--r--spec/frontend/search/sidebar/components/archived_filter_spec.js73
-rw-r--r--spec/frontend/search/sidebar/components/blobs_filters_spec.js28
-rw-r--r--spec/frontend/search/sidebar/components/checkbox_filter_spec.js1
-rw-r--r--spec/frontend/search/sidebar/components/confidentiality_filter_spec.js16
-rw-r--r--spec/frontend/search/sidebar/components/filters_spec.js85
-rw-r--r--spec/frontend/search/sidebar/components/filters_template_spec.js168
-rw-r--r--spec/frontend/search/sidebar/components/issues_filters_spec.js107
-rw-r--r--spec/frontend/search/sidebar/components/label_dropdown_items_spec.js1
-rw-r--r--spec/frontend/search/sidebar/components/label_filter_spec.js1
-rw-r--r--spec/frontend/search/sidebar/components/language_filter_spec.js71
-rw-r--r--spec/frontend/search/sidebar/components/merge_requests_filters_spec.js28
-rw-r--r--spec/frontend/search/sidebar/components/projects_filters_specs.js28
-rw-r--r--spec/frontend/search/sidebar/components/radio_filter_spec.js17
-rw-r--r--spec/frontend/search/sidebar/components/scope_legacy_navigation_spec.js1
-rw-r--r--spec/frontend/search/sidebar/components/scope_sidebar_navigation_spec.js1
-rw-r--r--spec/frontend/search/sidebar/components/status_filter_spec.js16
-rw-r--r--spec/frontend/search/sort/components/app_spec.js1
-rw-r--r--spec/frontend/search/store/actions_spec.js59
-rw-r--r--spec/frontend/search/store/getters_spec.js32
-rw-r--r--spec/frontend/search/topbar/components/app_spec.js1
-rw-r--r--spec/frontend/search/topbar/components/group_filter_spec.js1
-rw-r--r--spec/frontend/search/topbar/components/project_filter_spec.js1
-rw-r--r--spec/frontend/search/topbar/components/searchable_dropdown_spec.js1
-rw-r--r--spec/frontend/service_desk/components/empty_state_with_any_issues_spec.js74
-rw-r--r--spec/frontend/service_desk/components/empty_state_without_any_issues_spec.js86
-rw-r--r--spec/frontend/service_desk/components/service_desk_list_app_spec.js295
-rw-r--r--spec/frontend/service_desk/mock_data.js118
-rw-r--r--spec/frontend/sessions/new/components/email_verification_spec.js251
-rw-r--r--spec/frontend/sessions/new/components/update_email_spec.js180
-rw-r--r--spec/frontend/set_status_modal/set_status_form_spec.js2
-rw-r--r--spec/frontend/set_status_modal/set_status_modal_wrapper_spec.js2
-rw-r--r--spec/frontend/sidebar/components/confidential/confidentiality_dropdown_spec.js62
-rw-r--r--spec/frontend/sidebar/components/incidents/escalation_status_spec.js68
-rw-r--r--spec/frontend/sidebar/components/labels/labels_select_vue/dropdown_button_spec.js1
-rw-r--r--spec/frontend/sidebar/components/labels/labels_select_vue/dropdown_contents_create_view_spec.js1
-rw-r--r--spec/frontend/sidebar/components/labels/labels_select_vue/dropdown_contents_labels_view_spec.js1
-rw-r--r--spec/frontend/sidebar/components/labels/labels_select_vue/dropdown_contents_spec.js1
-rw-r--r--spec/frontend/sidebar/components/labels/labels_select_vue/dropdown_title_spec.js1
-rw-r--r--spec/frontend/sidebar/components/labels/labels_select_vue/dropdown_value_spec.js1
-rw-r--r--spec/frontend/sidebar/components/labels/labels_select_vue/labels_select_root_spec.js1
-rw-r--r--spec/frontend/sidebar/components/lock/issuable_lock_form_spec.js1
-rw-r--r--spec/frontend/sidebar/components/time_tracking/set_time_estimate_form_spec.js416
-rw-r--r--spec/frontend/sidebar/components/time_tracking/time_tracker_spec.js62
-rw-r--r--spec/frontend/snippets/components/__snapshots__/snippet_description_edit_spec.js.snap6
-rw-r--r--spec/frontend/super_sidebar/components/context_header_spec.js50
-rw-r--r--spec/frontend/super_sidebar/components/context_switcher_spec.js17
-rw-r--r--spec/frontend/super_sidebar/components/context_switcher_toggle_spec.js33
-rw-r--r--spec/frontend/super_sidebar/components/flyout_menu_spec.js25
-rw-r--r--spec/frontend/super_sidebar/components/global_search/components/frequent_groups_spec.js63
-rw-r--r--spec/frontend/super_sidebar/components/global_search/components/frequent_item_spec.js98
-rw-r--r--spec/frontend/super_sidebar/components/global_search/components/frequent_items_spec.js159
-rw-r--r--spec/frontend/super_sidebar/components/global_search/components/frequent_projects_spec.js63
-rw-r--r--spec/frontend/super_sidebar/components/global_search/components/global_search_autocomplete_items_spec.js1
-rw-r--r--spec/frontend/super_sidebar/components/global_search/components/global_search_default_issuables_spec.js140
-rw-r--r--spec/frontend/super_sidebar/components/global_search/components/global_search_default_items_spec.js115
-rw-r--r--spec/frontend/super_sidebar/components/global_search/components/global_search_default_places_spec.js78
-rw-r--r--spec/frontend/super_sidebar/components/global_search/components/global_search_scoped_items_spec.js1
-rw-r--r--spec/frontend/super_sidebar/components/global_search/components/global_search_spec.js155
-rw-r--r--spec/frontend/super_sidebar/components/global_search/mock_data.js18
-rw-r--r--spec/frontend/super_sidebar/components/global_search/store/getters_spec.js149
-rw-r--r--spec/frontend/super_sidebar/components/menu_section_spec.js57
-rw-r--r--spec/frontend/super_sidebar/components/nav_item_spec.js8
-rw-r--r--spec/frontend/super_sidebar/components/pinned_section_spec.js17
-rw-r--r--spec/frontend/super_sidebar/components/sidebar_menu_spec.js65
-rw-r--r--spec/frontend/super_sidebar/components/sidebar_peek_behavior_spec.js17
-rw-r--r--spec/frontend/super_sidebar/components/super_sidebar_spec.js24
-rw-r--r--spec/frontend/super_sidebar/components/user_bar_spec.js55
-rw-r--r--spec/frontend/super_sidebar/components/user_name_group_spec.js16
-rw-r--r--spec/frontend/super_sidebar/mock_data.js28
-rw-r--r--spec/frontend/super_sidebar/utils_spec.js167
-rw-r--r--spec/frontend/tags/components/delete_tag_modal_spec.js25
-rw-r--r--spec/frontend/token_access/inbound_token_access_spec.js27
-rw-r--r--spec/frontend/token_access/outbound_token_access_spec.js6
-rw-r--r--spec/frontend/tracing/components/tracing_details_spec.js103
-rw-r--r--spec/frontend/tracing/components/tracing_empty_state_spec.js11
-rw-r--r--spec/frontend/tracing/components/tracing_list_filtered_search_spec.js38
-rw-r--r--spec/frontend/tracing/components/tracing_list_spec.js103
-rw-r--r--spec/frontend/tracing/components/tracing_table_list_spec.js16
-rw-r--r--spec/frontend/tracing/details_index_spec.js42
-rw-r--r--spec/frontend/tracing/filters_spec.js141
-rw-r--r--spec/frontend/tracking/internal_events_spec.js58
-rw-r--r--spec/frontend/usage_quotas/storage/components/project_storage_app_spec.js73
-rw-r--r--spec/frontend/usage_quotas/storage/components/project_storage_detail_spec.js90
-rw-r--r--spec/frontend/usage_quotas/storage/components/usage_graph_spec.js12
-rw-r--r--spec/frontend/usage_quotas/storage/mock_data.js89
-rw-r--r--spec/frontend/usage_quotas/storage/utils_spec.js67
-rw-r--r--spec/frontend/user_lists/components/edit_user_list_spec.js1
-rw-r--r--spec/frontend/user_lists/components/new_user_list_spec.js1
-rw-r--r--spec/frontend/user_lists/components/user_list_spec.js1
-rw-r--r--spec/frontend/user_lists/components/user_lists_spec.js3
-rw-r--r--spec/frontend/users/profile/actions/components/user_actions_app_spec.js58
-rw-r--r--spec/frontend/users/profile/components/report_abuse_button_spec.js8
-rw-r--r--spec/frontend/vue3migration/compiler_spec.js4
-rw-r--r--spec/frontend/vue3migration/components/default_slot_with_comment.vue8
-rw-r--r--spec/frontend/vue3migration/components/simple_component.vue (renamed from spec/frontend/vue3migration/components/simple.vue)2
-rw-r--r--spec/frontend/vue3migration/components/slot_with_comment.vue8
-rw-r--r--spec/frontend/vue3migration/components/slots_with_same_name.vue8
-rw-r--r--spec/frontend/vue_compat_test_setup.js43
-rw-r--r--spec/frontend/vue_merge_request_widget/components/artifacts_list_app_spec.js1
-rw-r--r--spec/frontend/vue_merge_request_widget/components/mr_widget_pipeline_spec.js63
-rw-r--r--spec/frontend/vue_merge_request_widget/components/widget/__snapshots__/dynamic_content_spec.js.snap26
-rw-r--r--spec/frontend/vue_merge_request_widget/components/widget/widget_spec.js48
-rw-r--r--spec/frontend/vue_merge_request_widget/extentions/code_quality/index_spec.js18
-rw-r--r--spec/frontend/vue_merge_request_widget/mock_data.js2
-rw-r--r--spec/frontend/vue_merge_request_widget/mr_widget_how_to_merge_modal_spec.js20
-rw-r--r--spec/frontend/vue_merge_request_widget/stores/mr_widget_store_spec.js26
-rw-r--r--spec/frontend/vue_shared/components/__snapshots__/split_button_spec.js.snap1
-rw-r--r--spec/frontend/vue_shared/components/actions_button_spec.js119
-rw-r--r--spec/frontend/vue_shared/components/badges/__snapshots__/beta_badge_spec.js.snap54
-rw-r--r--spec/frontend/vue_shared/components/badges/beta_badge_spec.js32
-rw-r--r--spec/frontend/vue_shared/components/blob_viewers/rich_viewer_spec.js6
-rw-r--r--spec/frontend/vue_shared/components/clipboard_button_spec.js9
-rw-r--r--spec/frontend/vue_shared/components/diff_viewer/viewers/renamed_spec.js1
-rw-r--r--spec/frontend/vue_shared/components/filtered_search_bar/filtered_search_utils_spec.js101
-rw-r--r--spec/frontend/vue_shared/components/filtered_search_bar/tokens/date_token_spec.js49
-rw-r--r--spec/frontend/vue_shared/components/filtered_search_bar/tokens/emoji_token_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/form/input_copy_toggle_visibility_spec.js296
-rw-r--r--spec/frontend/vue_shared/components/gl_modal_vuex_spec.js1
-rw-r--r--spec/frontend/vue_shared/components/groups_list/groups_list_item_spec.js182
-rw-r--r--spec/frontend/vue_shared/components/groups_list/groups_list_spec.js34
-rw-r--r--spec/frontend/vue_shared/components/groups_list/mock_data.js35
-rw-r--r--spec/frontend/vue_shared/components/help_popover_spec.js16
-rw-r--r--spec/frontend/vue_shared/components/listbox_input/listbox_input_spec.js9
-rw-r--r--spec/frontend/vue_shared/components/markdown/comment_templates_dropdown_spec.js93
-rw-r--r--spec/frontend/vue_shared/components/markdown/header_spec.js93
-rw-r--r--spec/frontend/vue_shared/components/markdown/toolbar_button_spec.js29
-rw-r--r--spec/frontend/vue_shared/components/markdown/toolbar_spec.js3
-rw-r--r--spec/frontend/vue_shared/components/metric_images/metric_images_tab_spec.js1
-rw-r--r--spec/frontend/vue_shared/components/metric_images/metric_images_table_spec.js1
-rw-r--r--spec/frontend/vue_shared/components/metric_images/store/actions_spec.js1
-rw-r--r--spec/frontend/vue_shared/components/modal_copy_button_spec.js7
-rw-r--r--spec/frontend/vue_shared/components/new_resource_dropdown/new_resource_dropdown_spec.js8
-rw-r--r--spec/frontend/vue_shared/components/notes/placeholder_note_spec.js1
-rw-r--r--spec/frontend/vue_shared/components/page_size_selector_spec.js26
-rw-r--r--spec/frontend/vue_shared/components/projects_list/projects_list_item_spec.js84
-rw-r--r--spec/frontend/vue_shared/components/projects_list/projects_list_spec.js14
-rw-r--r--spec/frontend/vue_shared/components/registry/persisted_dropdown_selection_spec.js52
-rw-r--r--spec/frontend/vue_shared/components/registry/registry_search_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/registry/title_area_spec.js63
-rw-r--r--spec/frontend/vue_shared/components/source_viewer/source_viewer_new_spec.js15
-rw-r--r--spec/frontend/vue_shared/components/source_viewer/source_viewer_spec.js80
-rw-r--r--spec/frontend/vue_shared/components/web_ide_link_spec.js119
-rw-r--r--spec/frontend/vue_shared/issuable/create/components/issuable_create_root_spec.js6
-rw-r--r--spec/frontend/vue_shared/issuable/create/components/issuable_form_spec.js27
-rw-r--r--spec/frontend/vue_shared/issuable/list/components/issuable_item_spec.js37
-rw-r--r--spec/frontend/vue_shared/issuable/list/components/issuable_list_root_spec.js24
-rw-r--r--spec/frontend/vue_shared/issuable/show/components/issuable_header_spec.js350
-rw-r--r--spec/frontend/vue_shared/issuable/show/components/issuable_show_root_spec.js3
-rw-r--r--spec/frontend/whats_new/components/app_spec.js1
-rw-r--r--spec/frontend/whats_new/components/feature_spec.js23
-rw-r--r--spec/frontend/work_items/components/item_state_spec.js66
-rw-r--r--spec/frontend/work_items/components/notes/work_item_add_note_spec.js8
-rw-r--r--spec/frontend/work_items/components/notes/work_item_comment_form_spec.js79
-rw-r--r--spec/frontend/work_items/components/shared/work_item_link_child_contents_spec.js179
-rw-r--r--spec/frontend/work_items/components/shared/work_item_link_child_metadata_spec.js (renamed from spec/frontend/work_items/components/work_item_links/work_item_link_child_metadata_spec.js)2
-rw-r--r--spec/frontend/work_items/components/shared/work_item_links_menu_spec.js (renamed from spec/frontend/work_items/components/work_item_links/work_item_links_menu_spec.js)2
-rw-r--r--spec/frontend/work_items/components/work_item_actions_spec.js79
-rw-r--r--spec/frontend/work_items/components/work_item_assignees_spec.js4
-rw-r--r--spec/frontend/work_items/components/work_item_attributes_wrapper_spec.js10
-rw-r--r--spec/frontend/work_items/components/work_item_created_updated_spec.js58
-rw-r--r--spec/frontend/work_items/components/work_item_detail_spec.js66
-rw-r--r--spec/frontend/work_items/components/work_item_links/okr_actions_split_button_spec.js14
-rw-r--r--spec/frontend/work_items/components/work_item_links/work_item_link_child_spec.js169
-rw-r--r--spec/frontend/work_items/components/work_item_links/work_item_links_form_spec.js3
-rw-r--r--spec/frontend/work_items/components/work_item_links/work_item_links_spec.js12
-rw-r--r--spec/frontend/work_items/components/work_item_links/work_item_tree_spec.js10
-rw-r--r--spec/frontend/work_items/components/work_item_state_badge_spec.js32
-rw-r--r--spec/frontend/work_items/components/work_item_state_toggle_button_spec.js (renamed from spec/frontend/work_items/components/work_item_state_spec.js)69
-rw-r--r--spec/frontend/work_items/components/work_item_type_icon_spec.js7
-rw-r--r--spec/frontend/work_items/list/components/work_items_list_app_spec.js85
-rw-r--r--spec/frontend/work_items/mock_data.js70
-rw-r--r--spec/graphql/mutations/issues/update_spec.rb13
-rw-r--r--spec/graphql/mutations/merge_requests/update_spec.rb14
-rw-r--r--spec/graphql/mutations/namespace/package_settings/update_spec.rb18
-rw-r--r--spec/graphql/mutations/work_items/linked_items/base_spec.rb17
-rw-r--r--spec/graphql/resolvers/group_labels_resolver_spec.rb4
-rw-r--r--spec/graphql/resolvers/labels_resolver_spec.rb4
-rw-r--r--spec/graphql/resolvers/metrics/dashboards/annotation_resolver_spec.rb37
-rw-r--r--spec/graphql/types/access_levels/deploy_key_type_spec.rb13
-rw-r--r--spec/graphql/types/access_levels/user_type_spec.rb41
-rw-r--r--spec/graphql/types/alert_management/alert_type_spec.rb1
-rw-r--r--spec/graphql/types/branch_protections/merge_access_level_type_spec.rb2
-rw-r--r--spec/graphql/types/branch_protections/push_access_level_type_spec.rb4
-rw-r--r--spec/graphql/types/branch_rules/branch_protection_type_spec.rb2
-rw-r--r--spec/graphql/types/ci/detailed_status_type_spec.rb2
-rw-r--r--spec/graphql/types/ci/pipeline_trigger_type_spec.rb17
-rw-r--r--spec/graphql/types/ci/runner_manager_type_spec.rb2
-rw-r--r--spec/graphql/types/commit_type_spec.rb10
-rw-r--r--spec/graphql/types/custom_emoji_type_spec.rb13
-rw-r--r--spec/graphql/types/diff_type_spec.rb27
-rw-r--r--spec/graphql/types/group_type_spec.rb9
-rw-r--r--spec/graphql/types/issue_type_enum_spec.rb2
-rw-r--r--spec/graphql/types/issue_type_spec.rb2
-rw-r--r--spec/graphql/types/merge_request_state_enum_spec.rb2
-rw-r--r--spec/graphql/types/namespace/package_settings_type_spec.rb2
-rw-r--r--spec/graphql/types/project_type_spec.rb4
-rw-r--r--spec/graphql/types/projects/branch_rule_type_spec.rb2
-rw-r--r--spec/graphql/types/users/autocompleted_user_type_spec.rb19
-rw-r--r--spec/graphql/types/work_items/linked_item_type_spec.rb13
-rw-r--r--spec/graphql/types/work_items/related_link_type_enum_spec.rb13
-rw-r--r--spec/graphql/types/work_items/widget_interface_spec.rb3
-rw-r--r--spec/graphql/types/work_items/widgets/linked_items_type_spec.rb12
-rw-r--r--spec/helpers/admin/application_settings/settings_helper_spec.rb20
-rw-r--r--spec/helpers/admin/broadcast_messages_helper_spec.rb (renamed from spec/helpers/broadcast_messages_helper_spec.rb)34
-rw-r--r--spec/helpers/admin/deploy_key_helper_spec.rb4
-rw-r--r--spec/helpers/application_helper_spec.rb46
-rw-r--r--spec/helpers/application_settings_helper_spec.rb4
-rw-r--r--spec/helpers/ci/runners_helper_spec.rb79
-rw-r--r--spec/helpers/ci/variables_helper_spec.rb62
-rw-r--r--spec/helpers/commits_helper_spec.rb50
-rw-r--r--spec/helpers/environments_helper_spec.rb3
-rw-r--r--spec/helpers/events_helper_spec.rb155
-rw-r--r--spec/helpers/integrations_helper_spec.rb3
-rw-r--r--spec/helpers/issuables_helper_spec.rb82
-rw-r--r--spec/helpers/issues_helper_spec.rb13
-rw-r--r--spec/helpers/labels_helper_spec.rb15
-rw-r--r--spec/helpers/markup_helper_spec.rb6
-rw-r--r--spec/helpers/nav_helper_spec.rb36
-rw-r--r--spec/helpers/notes_helper_spec.rb13
-rw-r--r--spec/helpers/profiles_helper_spec.rb35
-rw-r--r--spec/helpers/projects/observability_helper_spec.rb22
-rw-r--r--spec/helpers/projects_helper_spec.rb117
-rw-r--r--spec/helpers/sessions_helper_spec.rb66
-rw-r--r--spec/helpers/sidebars_helper_spec.rb60
-rw-r--r--spec/helpers/snippets_helper_spec.rb6
-rw-r--r--spec/helpers/time_helper_spec.rb2
-rw-r--r--spec/helpers/todos_helper_spec.rb1
-rw-r--r--spec/helpers/tree_helper_spec.rb4
-rw-r--r--spec/helpers/users_helper_spec.rb126
-rw-r--r--spec/initializers/6_validations_spec.rb6
-rw-r--r--spec/initializers/action_cable_subscription_adapter_identifier_spec.rb5
-rw-r--r--spec/initializers/postgresql_cte_spec.rb17
-rw-r--r--spec/lib/api/entities/nuget/metadatum_spec.rb5
-rw-r--r--spec/lib/api/entities/nuget/package_metadata_catalog_entry_spec.rb1
-rw-r--r--spec/lib/api/entities/nuget/search_result_spec.rb1
-rw-r--r--spec/lib/api/entities/user_spec.rb16
-rw-r--r--spec/lib/api/ml/mlflow/api_helpers_spec.rb40
-rw-r--r--spec/lib/api/validations/validators/git_sha_spec.rb3
-rw-r--r--spec/lib/atlassian/jira_connect/serializers/deployment_entity_spec.rb10
-rw-r--r--spec/lib/backup/database_spec.rb16
-rw-r--r--spec/lib/backup/gitaly_backup_spec.rb81
-rw-r--r--spec/lib/banzai/filter/issuable_reference_expansion_filter_spec.rb42
-rw-r--r--spec/lib/banzai/filter/truncate_visible_filter_spec.rb2
-rw-r--r--spec/lib/bulk_imports/common/graphql/get_members_query_spec.rb4
-rw-r--r--spec/lib/bulk_imports/groups/transformers/group_attributes_transformer_spec.rb47
-rw-r--r--spec/lib/bulk_imports/projects/transformers/project_attributes_transformer_spec.rb4
-rw-r--r--spec/lib/click_house/bind_index_manager_spec.rb33
-rw-r--r--spec/lib/click_house/query_builder_spec.rb334
-rw-r--r--spec/lib/click_house/redactor_spec.rb166
-rw-r--r--spec/lib/container_registry/gitlab_api_client_spec.rb83
-rw-r--r--spec/lib/csv_builder_spec.rb121
-rw-r--r--spec/lib/csv_builders/stream_spec.rb44
-rw-r--r--spec/lib/generators/batched_background_migration/expected_files/ee_my_batched_migration.txt2
-rw-r--r--spec/lib/generators/batched_background_migration/expected_files/my_batched_migration.txt2
-rw-r--r--spec/lib/generators/gitlab/analytics/internal_events_generator_spec.rb49
-rw-r--r--spec/lib/generators/gitlab/snowplow_event_definition_generator_spec.rb2
-rw-r--r--spec/lib/generators/gitlab/usage_metric_definition_generator_spec.rb13
-rw-r--r--spec/lib/gitlab/alert_management/payload/managed_prometheus_spec.rb153
-rw-r--r--spec/lib/gitlab/alert_management/payload_spec.rb12
-rw-r--r--spec/lib/gitlab/analytics/cycle_analytics/records_fetcher_spec.rb2
-rw-r--r--spec/lib/gitlab/analytics/cycle_analytics/stage_events/stage_event_spec.rb2
-rw-r--r--spec/lib/gitlab/audit/auditor_spec.rb30
-rw-r--r--spec/lib/gitlab/auth/auth_finders_spec.rb18
-rw-r--r--spec/lib/gitlab/auth/saml/auth_hash_spec.rb26
-rw-r--r--spec/lib/gitlab/auth/two_factor_auth_verifier_spec.rb59
-rw-r--r--spec/lib/gitlab/avatar_cache_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/backfill_default_branch_protection_namespace_setting_spec.rb65
-rw-r--r--spec/lib/gitlab/background_migration/backfill_project_statistics_storage_size_without_pipeline_artifacts_size_job_spec.rb113
-rw-r--r--spec/lib/gitlab/background_migration/fix_allow_descendants_override_disabled_shared_runners_spec.rb53
-rw-r--r--spec/lib/gitlab/background_migration/redis/backfill_project_pipeline_status_ttl_spec.rb11
-rw-r--r--spec/lib/gitlab/background_migration/remove_backfilled_job_artifacts_expire_at_spec.rb3
-rw-r--r--spec/lib/gitlab/blame_spec.rb6
-rw-r--r--spec/lib/gitlab/cache/json_cache_spec.rb2
-rw-r--r--spec/lib/gitlab/cache/json_caches/json_keyed_spec.rb2
-rw-r--r--spec/lib/gitlab/cache/json_caches/redis_keyed_spec.rb4
-rw-r--r--spec/lib/gitlab/checks/branch_check_spec.rb12
-rw-r--r--spec/lib/gitlab/checks/file_size_check/allow_existing_oversized_blobs_spec.rb86
-rw-r--r--spec/lib/gitlab/checks/file_size_check/hook_environment_aware_any_oversized_blobs_spec.rb75
-rw-r--r--spec/lib/gitlab/checks/global_file_size_check_spec.rb34
-rw-r--r--spec/lib/gitlab/ci/artifacts/decompressed_artifact_size_validator_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/components/instance_path_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/config/entry/include/rules/rule_spec.rb131
-rw-r--r--spec/lib/gitlab/ci/config/entry/include/rules_spec.rb71
-rw-r--r--spec/lib/gitlab/ci/config/entry/need_spec.rb77
-rw-r--r--spec/lib/gitlab/ci/config/entry/needs_spec.rb137
-rw-r--r--spec/lib/gitlab/ci/config/entry/reports_spec.rb1
-rw-r--r--spec/lib/gitlab/ci/config/external/context_spec.rb18
-rw-r--r--spec/lib/gitlab/ci/config/external/file/base_spec.rb28
-rw-r--r--spec/lib/gitlab/ci/config/external/file/component_spec.rb8
-rw-r--r--spec/lib/gitlab/ci/config/external/mapper/matcher_spec.rb64
-rw-r--r--spec/lib/gitlab/ci/config/external/mapper/verifier_spec.rb72
-rw-r--r--spec/lib/gitlab/ci/config/external/processor_spec.rb23
-rw-r--r--spec/lib/gitlab/ci/config/external/rules_spec.rb96
-rw-r--r--spec/lib/gitlab/ci/config/header/input_spec.rb19
-rw-r--r--spec/lib/gitlab/ci/config/interpolation/access_spec.rb (renamed from spec/lib/gitlab/ci/interpolation/access_spec.rb)11
-rw-r--r--spec/lib/gitlab/ci/config/interpolation/block_spec.rb112
-rw-r--r--spec/lib/gitlab/ci/config/interpolation/config_spec.rb86
-rw-r--r--spec/lib/gitlab/ci/config/interpolation/context_spec.rb (renamed from spec/lib/gitlab/ci/interpolation/context_spec.rb)16
-rw-r--r--spec/lib/gitlab/ci/config/interpolation/functions/base_spec.rb23
-rw-r--r--spec/lib/gitlab/ci/config/interpolation/functions/truncate_spec.rb35
-rw-r--r--spec/lib/gitlab/ci/config/interpolation/functions_stack_spec.rb38
-rw-r--r--spec/lib/gitlab/ci/config/interpolation/inputs/base_input_spec.rb27
-rw-r--r--spec/lib/gitlab/ci/config/interpolation/inputs_spec.rb137
-rw-r--r--spec/lib/gitlab/ci/config/interpolation/interpolator_spec.rb (renamed from spec/lib/gitlab/ci/config/yaml/interpolator_spec.rb)56
-rw-r--r--spec/lib/gitlab/ci/config/interpolation/template_spec.rb (renamed from spec/lib/gitlab/ci/interpolation/template_spec.rb)6
-rw-r--r--spec/lib/gitlab/ci/config/normalizer_spec.rb105
-rw-r--r--spec/lib/gitlab/ci/config/yaml/loader_spec.rb3
-rw-r--r--spec/lib/gitlab/ci/config/yaml/result_spec.rb10
-rw-r--r--spec/lib/gitlab/ci/config/yaml_spec.rb12
-rw-r--r--spec/lib/gitlab/ci/decompressed_gzip_size_validator_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/input/arguments/base_spec.rb19
-rw-r--r--spec/lib/gitlab/ci/input/arguments/default_spec.rb53
-rw-r--r--spec/lib/gitlab/ci/input/arguments/options_spec.rb54
-rw-r--r--spec/lib/gitlab/ci/input/arguments/required_spec.rb45
-rw-r--r--spec/lib/gitlab/ci/input/arguments/unknown_spec.rb18
-rw-r--r--spec/lib/gitlab/ci/input/inputs_spec.rb126
-rw-r--r--spec/lib/gitlab/ci/interpolation/block_spec.rb39
-rw-r--r--spec/lib/gitlab/ci/interpolation/config_spec.rb49
-rw-r--r--spec/lib/gitlab/ci/jwt_v2/claim_mapper/repository_spec.rb34
-rw-r--r--spec/lib/gitlab/ci/jwt_v2/claim_mapper_spec.rb40
-rw-r--r--spec/lib/gitlab/ci/jwt_v2_spec.rb78
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/config/content_spec.rb8
-rw-r--r--spec/lib/gitlab/ci/project_config/repository_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/project_config_spec.rb8
-rw-r--r--spec/lib/gitlab/ci/queue/metrics_spec.rb71
-rw-r--r--spec/lib/gitlab/ci/reports/sbom/component_spec.rb148
-rw-r--r--spec/lib/gitlab/ci/status/stage/factory_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/status/stage/play_manual_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/tags/bulk_insert_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/variables/builder/pipeline_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/variables/builder_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/variables/downstream/expandable_variable_generator_spec.rb59
-rw-r--r--spec/lib/gitlab/ci/variables/downstream/generator_spec.rb57
-rw-r--r--spec/lib/gitlab/ci/yaml_processor_spec.rb36
-rw-r--r--spec/lib/gitlab/cleanup/orphan_job_artifact_files_batch_spec.rb22
-rw-r--r--spec/lib/gitlab/config/entry/validators_spec.rb33
-rw-r--r--spec/lib/gitlab/container_repository/tags/cache_spec.rb4
-rw-r--r--spec/lib/gitlab/content_security_policy/config_loader_spec.rb338
-rw-r--r--spec/lib/gitlab/data_builder/build_spec.rb32
-rw-r--r--spec/lib/gitlab/data_builder/deployment_spec.rb4
-rw-r--r--spec/lib/gitlab/data_builder/issuable_spec.rb27
-rw-r--r--spec/lib/gitlab/database/async_constraints/postgres_async_constraint_validation_spec.rb10
-rw-r--r--spec/lib/gitlab/database/batch_count_spec.rb4
-rw-r--r--spec/lib/gitlab/database/bump_sequences_spec.rb83
-rw-r--r--spec/lib/gitlab/database/click_house_client_spec.rb13
-rw-r--r--spec/lib/gitlab/database/gitlab_schema_spec.rb2
-rw-r--r--spec/lib/gitlab/database/health_status/indicators/patroni_apdex_spec.rb151
-rw-r--r--spec/lib/gitlab/database/health_status/indicators/prometheus_alert_indicator_spec.rb66
-rw-r--r--spec/lib/gitlab/database/health_status/indicators/wal_rate_spec.rb29
-rw-r--r--spec/lib/gitlab/database/health_status_spec.rb16
-rw-r--r--spec/lib/gitlab/database/migration_helpers/convert_to_bigint_spec.rb53
-rw-r--r--spec/lib/gitlab/database/migration_helpers_spec.rb39
-rw-r--r--spec/lib/gitlab/database/migrations/batched_background_migration_helpers_spec.rb2
-rw-r--r--spec/lib/gitlab/database/migrations/squasher_spec.rb65
-rw-r--r--spec/lib/gitlab/database/no_cross_db_foreign_keys_spec.rb27
-rw-r--r--spec/lib/gitlab/database/postgresql_adapter/force_disconnectable_mixin_spec.rb19
-rw-r--r--spec/lib/gitlab/database/query_analyzers/gitlab_schemas_metrics_spec.rb6
-rw-r--r--spec/lib/gitlab/database/query_analyzers/query_recorder_spec.rb114
-rw-r--r--spec/lib/gitlab/database/schema_validation/schema_inconsistency_spec.rb41
-rw-r--r--spec/lib/gitlab/database/tables_sorted_by_foreign_keys_spec.rb20
-rw-r--r--spec/lib/gitlab/database/tables_truncate_spec.rb1
-rw-r--r--spec/lib/gitlab/database_spec.rb27
-rw-r--r--spec/lib/gitlab/dependency_linker/cargo_toml_linker_spec.rb38
-rw-r--r--spec/lib/gitlab/exclusive_lease_spec.rb328
-rw-r--r--spec/lib/gitlab/git/blame_spec.rb8
-rw-r--r--spec/lib/gitlab/git/commit_spec.rb108
-rw-r--r--spec/lib/gitlab/git/diff_tree_spec.rb30
-rw-r--r--spec/lib/gitlab/git/object_pool_spec.rb25
-rw-r--r--spec/lib/gitlab/git/repository_spec.rb93
-rw-r--r--spec/lib/gitlab/git/rugged_impl/use_rugged_spec.rb2
-rw-r--r--spec/lib/gitlab/git/tree_spec.rb39
-rw-r--r--spec/lib/gitlab/git_access_snippet_spec.rb4
-rw-r--r--spec/lib/gitlab/gitaly_client/blob_service_spec.rb19
-rw-r--r--spec/lib/gitlab/gitaly_client/commit_service_spec.rb56
-rw-r--r--spec/lib/gitlab/gitaly_client/conflicts_service_spec.rb26
-rw-r--r--spec/lib/gitlab/gitaly_client/object_pool_service_spec.rb2
-rw-r--r--spec/lib/gitlab/gitaly_client/operation_service_spec.rb34
-rw-r--r--spec/lib/gitlab/gitaly_client/repository_service_spec.rb10
-rw-r--r--spec/lib/gitlab/github_import_spec.rb2
-rw-r--r--spec/lib/gitlab/graphql/pagination/connections_spec.rb2
-rw-r--r--spec/lib/gitlab/group_search_results_spec.rb11
-rw-r--r--spec/lib/gitlab/hook_data/issue_builder_spec.rb11
-rw-r--r--spec/lib/gitlab/hook_data/merge_request_builder_spec.rb2
-rw-r--r--spec/lib/gitlab/http_spec.rb2
-rw-r--r--spec/lib/gitlab/import_export/all_models.yml15
-rw-r--r--spec/lib/gitlab/import_export/command_line_util_spec.rb2
-rw-r--r--spec/lib/gitlab/import_export/decompressed_archive_size_validator_spec.rb21
-rw-r--r--spec/lib/gitlab/import_export/file_importer_spec.rb4
-rw-r--r--spec/lib/gitlab/import_export/project/relation_tree_restorer_spec.rb2
-rw-r--r--spec/lib/gitlab/internal_events/event_definitions_spec.rb4
-rw-r--r--spec/lib/gitlab/internal_events_spec.rb2
-rw-r--r--spec/lib/gitlab/jwt_authenticatable_spec.rb36
-rw-r--r--spec/lib/gitlab/kas_spec.rb79
-rw-r--r--spec/lib/gitlab/merge_requests/message_generator_spec.rb20
-rw-r--r--spec/lib/gitlab/metrics/dashboard/defaults_spec.rb7
-rw-r--r--spec/lib/gitlab/metrics/dashboard/finder_spec.rb178
-rw-r--r--spec/lib/gitlab/metrics/dashboard/importer_spec.rb55
-rw-r--r--spec/lib/gitlab/metrics/dashboard/importers/prometheus_metrics_spec.rb97
-rw-r--r--spec/lib/gitlab/metrics/dashboard/processor_spec.rb178
-rw-r--r--spec/lib/gitlab/metrics/dashboard/service_selector_spec.rb148
-rw-r--r--spec/lib/gitlab/metrics/dashboard/stages/grafana_formatter_spec.rb58
-rw-r--r--spec/lib/gitlab/metrics/dashboard/stages/metric_endpoint_inserter_spec.rb59
-rw-r--r--spec/lib/gitlab/metrics/dashboard/stages/panel_ids_inserter_spec.rb88
-rw-r--r--spec/lib/gitlab/metrics/dashboard/stages/track_panel_type_spec.rb26
-rw-r--r--spec/lib/gitlab/metrics/dashboard/stages/variable_endpoint_inserter_spec.rb77
-rw-r--r--spec/lib/gitlab/metrics/dashboard/transformers/yml/v1/prometheus_metrics_spec.rb99
-rw-r--r--spec/lib/gitlab/metrics/dashboard/validator/client_spec.rb29
-rw-r--r--spec/lib/gitlab/metrics/dashboard/validator/custom_formats_spec.rb15
-rw-r--r--spec/lib/gitlab/metrics/dashboard/validator/errors_spec.rb149
-rw-r--r--spec/lib/gitlab/metrics/dashboard/validator/post_schema_validator_spec.rb78
-rw-r--r--spec/lib/gitlab/metrics/dashboard/validator_spec.rb146
-rw-r--r--spec/lib/gitlab/metrics/global_search_slis_spec.rb3
-rw-r--r--spec/lib/gitlab/metrics/subscribers/active_record_spec.rb2
-rw-r--r--spec/lib/gitlab/middleware/webhook_recursion_detection_spec.rb2
-rw-r--r--spec/lib/gitlab/null_request_store_spec.rb75
-rw-r--r--spec/lib/gitlab/pages/url_builder_spec.rb72
-rw-r--r--spec/lib/gitlab/pagination/keyset/in_operator_optimization/strategies/record_loader_strategy_spec.rb8
-rw-r--r--spec/lib/gitlab/plantuml_spec.rb2
-rw-r--r--spec/lib/gitlab/project_search_results_spec.rb2
-rw-r--r--spec/lib/gitlab/redis/cache_spec.rb4
-rw-r--r--spec/lib/gitlab/redis/cluster_shared_state_spec.rb (renamed from spec/lib/gitlab/redis/cluster_cache_spec.rb)4
-rw-r--r--spec/lib/gitlab/redis/etag_cache_spec.rb56
-rw-r--r--spec/lib/gitlab/regex_requires_app_spec.rb2
-rw-r--r--spec/lib/gitlab/regex_spec.rb17
-rw-r--r--spec/lib/gitlab/repository_size_checker_spec.rb5
-rw-r--r--spec/lib/gitlab/safe_request_store_spec.rb257
-rw-r--r--spec/lib/gitlab/search_results_spec.rb26
-rw-r--r--spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb5
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/pause_control/client_spec.rb53
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/pause_control/pause_control_service_spec.rb178
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/pause_control/server_spec.rb76
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/pause_control/strategy_handler_spec.rb68
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/pause_control_spec.rb19
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb192
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/skip_jobs_spec.rb2
-rw-r--r--spec/lib/gitlab/time_tracking_formatter_spec.rb2
-rw-r--r--spec/lib/gitlab/tracking/standard_context_spec.rb48
-rw-r--r--spec/lib/gitlab/usage/metric_definition_spec.rb2
-rw-r--r--spec/lib/gitlab/usage/metric_spec.rb8
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/batched_background_migration_failed_jobs_metric_spec.rb40
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/count_bulk_imports_entities_metric_spec.rb2
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/count_deployments_metric_spec.rb2
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/work_items_activity_aggregated_metric_spec.rb72
-rw-r--r--spec/lib/gitlab/usage/metrics/name_suggestion_spec.rb113
-rw-r--r--spec/lib/gitlab/usage/metrics/names_suggestions/generator_spec.rb97
-rw-r--r--spec/lib/gitlab/usage/metrics/names_suggestions/relation_parsers/having_constraints_spec.rb19
-rw-r--r--spec/lib/gitlab/usage/metrics/names_suggestions/relation_parsers/joins_spec.rb45
-rw-r--r--spec/lib/gitlab/usage/metrics/names_suggestions/relation_parsers/where_constraints_spec.rb18
-rw-r--r--spec/lib/gitlab/usage_data_counters/editor_unique_counter_spec.rb30
-rw-r--r--spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb165
-rw-r--r--spec/lib/gitlab/usage_data_counters/neovim_plugin_activity_unique_counter_spec.rb19
-rw-r--r--spec/lib/gitlab/usage_data_counters/visual_studio_extension_activity_unique_counter_spec.rb19
-rw-r--r--spec/lib/gitlab/with_request_store_spec.rb30
-rw-r--r--spec/lib/gitlab/x509/signature_spec.rb44
-rw-r--r--spec/lib/peek/views/click_house_spec.rb37
-rw-r--r--spec/lib/product_analytics/settings_spec.rb2
-rw-r--r--spec/lib/sbom/package_url/encoder_spec.rb15
-rw-r--r--spec/lib/sidebars/groups/super_sidebar_menus/deploy_menu_spec.rb3
-rw-r--r--spec/lib/sidebars/groups/super_sidebar_menus/operations_menu_spec.rb1
-rw-r--r--spec/lib/sidebars/menu_spec.rb13
-rw-r--r--spec/lib/slack_markdown_sanitizer_spec.rb17
-rw-r--r--spec/lib/unnested_in_filters/rewriter_spec.rb2
-rw-r--r--spec/mailers/devise_mailer_spec.rb20
-rw-r--r--spec/mailers/emails/projects_spec.rb35
-rw-r--r--spec/mailers/notify_spec.rb62
-rw-r--r--spec/mailers/previews_spec.rb1
-rw-r--r--spec/migrations/20221219122320_copy_clickhouse_connection_string_to_encrypted_var_spec.rb2
-rw-r--r--spec/migrations/20230327123333_backfill_product_analytics_data_collector_host_spec.rb2
-rw-r--r--spec/migrations/20230612232000_queue_backfill_dismissal_reason_in_vulnerability_reads_spec.rb26
-rw-r--r--spec/migrations/20230712145557_queue_backfill_missing_vulnerability_dismissal_details_spec.rb26
-rw-r--r--spec/migrations/20230714015909_add_index_for_member_expiring_query_spec.rb16
-rw-r--r--spec/migrations/20230719083202_backfill_project_statistics_storage_size_without_pipeline_artifacts_size_spec.rb44
-rw-r--r--spec/migrations/20230723203612_backfill_default_branch_protection_application_setting_spec.rb50
-rw-r--r--spec/migrations/20230724071541_queue_backfill_default_branch_protection_namespace_setting_spec.rb26
-rw-r--r--spec/migrations/20230724164745_queue_delete_orphaned_transferred_project_approval_rules_spec.rb26
-rw-r--r--spec/migrations/20230728174927_add_epic_work_item_type_spec.rb91
-rw-r--r--spec/migrations/20230801150214_retry_cleanup_bigint_conversion_for_events_for_gitlab_com_spec.rb147
-rw-r--r--spec/migrations/20230802085923_queue_fix_allow_descendants_override_disabled_shared_runners_spec.rb26
-rw-r--r--spec/migrations/20230803125434_add_has_merge_request_on_vulnerability_reads_trigger_spec.rb178
-rw-r--r--spec/migrations/20230804053643_add_ticket_work_item_type_spec.rb97
-rw-r--r--spec/migrations/20230807083334_add_linked_items_work_item_widget_spec.rb10
-rw-r--r--spec/migrations/20230809104753_swap_epic_user_mentions_note_id_to_bigint_for_self_hosts_spec.rb149
-rw-r--r--spec/migrations/20230810103534_swap_suggestions_note_id_to_bigint_for_self_hosts_spec.rb149
-rw-r--r--spec/migrations/20230810123044_swap_snippet_user_mentions_note_id_to_bigint_for_self_hosts_spec.rb125
-rw-r--r--spec/migrations/20230811103941_swap_vulnerability_user_mentions_note_id_to_bigint_for_self_hosts_spec.rb133
-rw-r--r--spec/migrations/20230814144045_swap_timelogs_note_id_to_bigint_for_self_hosts_spec.rb125
-rw-r--r--spec/migrations/add_expiry_notified_at_to_member_spec.rb21
-rw-r--r--spec/migrations/cleanup_conversion_big_int_ci_build_needs_self_managed_spec.rb107
-rw-r--r--spec/migrations/ensure_commit_user_mentions_note_id_bigint_backfill_is_finished_for_self_managed_spec.rb35
-rw-r--r--spec/migrations/ensure_todos_bigint_backfill_completed_for_self_managed_spec.rb35
-rw-r--r--spec/migrations/swap_commit_user_mentions_note_id_to_bigint_for_self_managed_spec.rb123
-rw-r--r--spec/migrations/swap_events_target_id_to_bigint_for_gitlab_dot_com_spec.rb67
-rw-r--r--spec/migrations/swap_todos_note_id_to_bigint_for_self_managed_spec.rb159
-rw-r--r--spec/models/abuse_report_spec.rb28
-rw-r--r--spec/models/ai/service_access_token_spec.rb9
-rw-r--r--spec/models/application_setting_spec.rb120
-rw-r--r--spec/models/batched_git_ref_updates/deletion_spec.rb125
-rw-r--r--spec/models/ci/bridge_spec.rb182
-rw-r--r--spec/models/ci/build_spec.rb686
-rw-r--r--spec/models/ci/catalog/resource_spec.rb2
-rw-r--r--spec/models/ci/catalog/resources/component_spec.rb46
-rw-r--r--spec/models/ci/catalog/resources/version_spec.rb16
-rw-r--r--spec/models/ci/commit_with_pipeline_spec.rb2
-rw-r--r--spec/models/ci/job_annotation_spec.rb1
-rw-r--r--spec/models/ci/job_artifact_spec.rb78
-rw-r--r--spec/models/ci/persistent_ref_spec.rb28
-rw-r--r--spec/models/ci/pipeline_spec.rb274
-rw-r--r--spec/models/ci/processable_spec.rb64
-rw-r--r--spec/models/ci/runner_manager_spec.rb82
-rw-r--r--spec/models/ci/runner_spec.rb40
-rw-r--r--spec/models/clusters/cluster_spec.rb1
-rw-r--r--spec/models/commit_collection_spec.rb20
-rw-r--r--spec/models/commit_spec.rb3
-rw-r--r--spec/models/commit_status_spec.rb22
-rw-r--r--spec/models/concerns/approvable_spec.rb24
-rw-r--r--spec/models/concerns/bulk_insert_safe_spec.rb2
-rw-r--r--spec/models/concerns/bulk_insertable_associations_spec.rb2
-rw-r--r--spec/models/concerns/ci/partitionable/switch_spec.rb42
-rw-r--r--spec/models/concerns/ci/partitionable_spec.rb10
-rw-r--r--spec/models/concerns/cross_database_ignored_tables_spec.rb222
-rw-r--r--spec/models/concerns/cross_database_modification_spec.rb2
-rw-r--r--spec/models/concerns/enum_inheritance_spec.rb97
-rw-r--r--spec/models/concerns/integrations/reset_secret_fields_spec.rb12
-rw-r--r--spec/models/concerns/milestoneable_spec.rb25
-rw-r--r--spec/models/concerns/noteable_spec.rb4
-rw-r--r--spec/models/concerns/reset_on_union_error_spec.rb (renamed from spec/initializers/active_record_relation_union_reset_spec.rb)14
-rw-r--r--spec/models/concerns/resolvable_discussion_spec.rb43
-rw-r--r--spec/models/concerns/resolvable_note_spec.rb54
-rw-r--r--spec/models/concerns/token_authenticatable_spec.rb2
-rw-r--r--spec/models/concerns/where_composite_spec.rb2
-rw-r--r--spec/models/customer_relations/contact_spec.rb23
-rw-r--r--spec/models/deployment_spec.rb572
-rw-r--r--spec/models/discussion_spec.rb10
-rw-r--r--spec/models/environment_spec.rb299
-rw-r--r--spec/models/group_spec.rb121
-rw-r--r--spec/models/instance_configuration_spec.rb12
-rw-r--r--spec/models/integration_spec.rb87
-rw-r--r--spec/models/integrations/chat_message/issue_message_spec.rb10
-rw-r--r--spec/models/integrations/discord_spec.rb22
-rw-r--r--spec/models/integrations/every_integration_spec.rb6
-rw-r--r--spec/models/integrations/field_spec.rb27
-rw-r--r--spec/models/integrations/google_play_spec.rb41
-rw-r--r--spec/models/integrations/pumble_spec.rb2
-rw-r--r--spec/models/issue_link_spec.rb53
-rw-r--r--spec/models/issue_spec.rb12
-rw-r--r--spec/models/label_spec.rb80
-rw-r--r--spec/models/loose_foreign_keys/deleted_record_spec.rb30
-rw-r--r--spec/models/member_spec.rb13
-rw-r--r--spec/models/members/project_member_spec.rb2
-rw-r--r--spec/models/merge_request_spec.rb71
-rw-r--r--spec/models/metrics/dashboard/annotation_spec.rb39
-rw-r--r--spec/models/milestone_spec.rb2
-rw-r--r--spec/models/ml/experiment_spec.rb39
-rw-r--r--spec/models/ml/model_spec.rb73
-rw-r--r--spec/models/ml/model_version_spec.rb66
-rw-r--r--spec/models/namespace/aggregation_schedule_spec.rb18
-rw-r--r--spec/models/namespace/package_setting_spec.rb23
-rw-r--r--spec/models/namespace/root_storage_statistics_spec.rb31
-rw-r--r--spec/models/namespace_spec.rb29
-rw-r--r--spec/models/namespaces/project_namespace_spec.rb77
-rw-r--r--spec/models/network/graph_spec.rb8
-rw-r--r--spec/models/note_spec.rb36
-rw-r--r--spec/models/operations/feature_flags/strategy_spec.rb24
-rw-r--r--spec/models/organizations/organization_spec.rb18
-rw-r--r--spec/models/packages/nuget/metadatum_spec.rb64
-rw-r--r--spec/models/packages/package_spec.rb86
-rw-r--r--spec/models/pages_deployment_spec.rb54
-rw-r--r--spec/models/performance_monitoring/prometheus_dashboard_spec.rb277
-rw-r--r--spec/models/plan_limits_spec.rb3
-rw-r--r--spec/models/pool_repository_spec.rb33
-rw-r--r--spec/models/postgresql/replication_slot_spec.rb2
-rw-r--r--spec/models/preloaders/user_max_access_level_in_groups_preloader_spec.rb2
-rw-r--r--spec/models/preloaders/user_max_access_level_in_projects_preloader_spec.rb2
-rw-r--r--spec/models/project_authorization_spec.rb229
-rw-r--r--spec/models/project_authorizations/changes_spec.rb326
-rw-r--r--spec/models/project_setting_spec.rb96
-rw-r--r--spec/models/project_spec.rb244
-rw-r--r--spec/models/project_statistics_spec.rb126
-rw-r--r--spec/models/project_team_spec.rb11
-rw-r--r--spec/models/release_spec.rb1
-rw-r--r--spec/models/repository_spec.rb134
-rw-r--r--spec/models/service_desk/custom_email_verification_spec.rb14
-rw-r--r--spec/models/system/broadcast_message_spec.rb (renamed from spec/models/broadcast_message_spec.rb)2
-rw-r--r--spec/models/user_spec.rb69
-rw-r--r--spec/models/users/calloutable_spec.rb11
-rw-r--r--spec/models/wiki_page_spec.rb12
-rw-r--r--spec/models/work_item_spec.rb25
-rw-r--r--spec/models/work_items/parent_link_spec.rb44
-rw-r--r--spec/models/work_items/related_work_item_link_spec.rb31
-rw-r--r--spec/models/work_items/type_spec.rb6
-rw-r--r--spec/models/work_items/widget_definition_spec.rb3
-rw-r--r--spec/models/work_items/widgets/linked_items_spec.rb25
-rw-r--r--spec/policies/ci/bridge_policy_spec.rb2
-rw-r--r--spec/policies/ci/build_policy_spec.rb29
-rw-r--r--spec/policies/deploy_key_policy_spec.rb100
-rw-r--r--spec/policies/metrics/dashboard/annotation_policy_spec.rb67
-rw-r--r--spec/policies/organizations/organization_policy_spec.rb18
-rw-r--r--spec/policies/packages/policies/project_policy_spec.rb25
-rw-r--r--spec/policies/project_policy_spec.rb66
-rw-r--r--spec/policies/work_item_policy_spec.rb22
-rw-r--r--spec/presenters/merge_request_presenter_spec.rb15
-rw-r--r--spec/presenters/ml/model_presenter_spec.rb43
-rw-r--r--spec/presenters/ml/models_index_presenter_spec.rb33
-rw-r--r--spec/presenters/packages/npm/package_presenter_spec.rb33
-rw-r--r--spec/presenters/packages/nuget/v2/metadata_index_presenter_spec.rb35
-rw-r--r--spec/presenters/packages/nuget/v2/service_index_presenter_spec.rb54
-rw-r--r--spec/rack_servers/puma_spec.rb2
-rw-r--r--spec/requests/admin/abuse_reports_controller_spec.rb24
-rw-r--r--spec/requests/api/admin/batched_background_migrations_spec.rb2
-rw-r--r--spec/requests/api/admin/broadcast_messages_spec.rb (renamed from spec/requests/api/broadcast_messages_spec.rb)43
-rw-r--r--spec/requests/api/admin/plan_limits_spec.rb6
-rw-r--r--spec/requests/api/api_spec.rb2
-rw-r--r--spec/requests/api/ci/jobs_spec.rb2
-rw-r--r--spec/requests/api/ci/pipeline_schedules_spec.rb18
-rw-r--r--spec/requests/api/ci/pipelines_spec.rb39
-rw-r--r--spec/requests/api/commit_statuses_spec.rb1
-rw-r--r--spec/requests/api/commits_spec.rb8
-rw-r--r--spec/requests/api/composer_packages_spec.rb2
-rw-r--r--spec/requests/api/draft_notes_spec.rb20
-rw-r--r--spec/requests/api/files_spec.rb17
-rw-r--r--spec/requests/api/graphql/abuse_report_labels_spec.rb58
-rw-r--r--spec/requests/api/graphql/abuse_report_spec.rb50
-rw-r--r--spec/requests/api/graphql/achievements/user_achievements_query_spec.rb7
-rw-r--r--spec/requests/api/graphql/ci/application_setting_spec.rb6
-rw-r--r--spec/requests/api/graphql/ci/group_environment_scopes_spec.rb53
-rw-r--r--spec/requests/api/graphql/ci/runner_spec.rb39
-rw-r--r--spec/requests/api/graphql/ci/runners_spec.rb139
-rw-r--r--spec/requests/api/graphql/current_user/todos_query_spec.rb2
-rw-r--r--spec/requests/api/graphql/current_user_query_spec.rb6
-rw-r--r--spec/requests/api/graphql/environments/deployments_spec.rb19
-rw-r--r--spec/requests/api/graphql/group/autocomplete_users_spec.rb48
-rw-r--r--spec/requests/api/graphql/group/work_items_spec.rb106
-rw-r--r--spec/requests/api/graphql/mutations/award_emojis/add_spec.rb2
-rw-r--r--spec/requests/api/graphql/mutations/award_emojis/toggle_spec.rb2
-rw-r--r--spec/requests/api/graphql/mutations/ci/pipeline_schedule/create_spec.rb13
-rw-r--r--spec/requests/api/graphql/mutations/ci/pipeline_trigger/create_spec.rb75
-rw-r--r--spec/requests/api/graphql/mutations/ci/pipeline_trigger/delete_spec.rb63
-rw-r--r--spec/requests/api/graphql/mutations/ci/pipeline_trigger/update_spec.rb71
-rw-r--r--spec/requests/api/graphql/mutations/issues/update_spec.rb5
-rw-r--r--spec/requests/api/graphql/mutations/merge_requests/set_time_estimate_spec.rb41
-rw-r--r--spec/requests/api/graphql/mutations/metrics/dashboard/annotations/create_spec.rb44
-rw-r--r--spec/requests/api/graphql/mutations/metrics/dashboard/annotations/delete_spec.rb26
-rw-r--r--spec/requests/api/graphql/mutations/namespace/package_settings/update_spec.rb30
-rw-r--r--spec/requests/api/graphql/mutations/snippets/update_spec.rb10
-rw-r--r--spec/requests/api/graphql/mutations/work_items/create_spec.rb20
-rw-r--r--spec/requests/api/graphql/mutations/work_items/linked_items/add_spec.rb131
-rw-r--r--spec/requests/api/graphql/mutations/work_items/subscribe_spec.rb73
-rw-r--r--spec/requests/api/graphql/mutations/work_items/update_spec.rb4
-rw-r--r--spec/requests/api/graphql/project/alert_management/alerts_spec.rb1
-rw-r--r--spec/requests/api/graphql/project/autocomplete_users_spec.rb99
-rw-r--r--spec/requests/api/graphql/project/merge_requests_spec.rb22
-rw-r--r--spec/requests/api/graphql/project/work_items_spec.rb53
-rw-r--r--spec/requests/api/graphql/project_query_spec.rb68
-rw-r--r--spec/requests/api/graphql/user/user_achievements_query_spec.rb5
-rw-r--r--spec/requests/api/graphql/work_item_spec.rb73
-rw-r--r--spec/requests/api/internal/base_spec.rb15
-rw-r--r--spec/requests/api/internal/kubernetes_spec.rb6
-rw-r--r--spec/requests/api/internal/pages_spec.rb14
-rw-r--r--spec/requests/api/labels_spec.rb12
-rw-r--r--spec/requests/api/merge_requests_spec.rb41
-rw-r--r--spec/requests/api/metrics/dashboard/annotations_spec.rb73
-rw-r--r--spec/requests/api/metrics/user_starred_dashboards_spec.rb87
-rw-r--r--spec/requests/api/ml/mlflow/runs_spec.rb16
-rw-r--r--spec/requests/api/npm_group_packages_spec.rb3
-rw-r--r--spec/requests/api/npm_project_packages_spec.rb24
-rw-r--r--spec/requests/api/nuget_group_packages_spec.rb6
-rw-r--r--spec/requests/api/nuget_project_packages_spec.rb126
-rw-r--r--spec/requests/api/project_attributes.yml3
-rw-r--r--spec/requests/api/projects_spec.rb97
-rw-r--r--spec/requests/api/repositories_spec.rb46
-rw-r--r--spec/requests/api/settings_spec.rb15
-rw-r--r--spec/requests/api/snippets_spec.rb121
-rw-r--r--spec/requests/api/users_spec.rb2
-rw-r--r--spec/requests/groups/work_items_controller_spec.rb49
-rw-r--r--spec/requests/import/github_controller_spec.rb26
-rw-r--r--spec/requests/lfs_http_spec.rb2
-rw-r--r--spec/requests/organizations/organizations_controller_spec.rb58
-rw-r--r--spec/requests/projects/blob_spec.rb87
-rw-r--r--spec/requests/projects/merge_requests/creations_spec.rb59
-rw-r--r--spec/requests/projects/merge_requests_controller_spec.rb40
-rw-r--r--spec/requests/projects/merge_requests_discussions_spec.rb23
-rw-r--r--spec/requests/projects/metrics/dashboards/builder_spec.rb123
-rw-r--r--spec/requests/projects/ml/models_controller_spec.rb15
-rw-r--r--spec/requests/projects/noteable_notes_spec.rb8
-rw-r--r--spec/requests/projects/notes_controller_spec.rb27
-rw-r--r--spec/requests/projects/service_desk_controller_spec.rb8
-rw-r--r--spec/requests/projects/tracing_controller_spec.rb60
-rw-r--r--spec/requests/sessions_spec.rb29
-rw-r--r--spec/requests/verifies_with_email_spec.rb156
-rw-r--r--spec/routing/project_routing_spec.rb7
-rw-r--r--spec/rubocop/cop/database/avoid_inheritance_column_spec.rb23
-rw-r--r--spec/rubocop/cop/experiments_test_coverage_spec.rb169
-rw-r--r--spec/rubocop/cop/rspec/before_all_spec.rb74
-rw-r--r--spec/scripts/database/migration_collision_checker_spec.rb48
-rw-r--r--spec/scripts/generate_message_to_run_e2e_pipeline_spec.rb17
-rw-r--r--spec/scripts/lib/glfm/update_example_snapshots_spec.rb2
-rw-r--r--spec/scripts/trigger-build_spec.rb22
-rw-r--r--spec/serializers/admin/abuse_report_details_entity_spec.rb3
-rw-r--r--spec/serializers/admin/abuse_report_entity_spec.rb1
-rw-r--r--spec/serializers/base_discussion_entity_spec.rb12
-rw-r--r--spec/serializers/deployment_entity_spec.rb20
-rw-r--r--spec/serializers/integrations/event_entity_spec.rb21
-rw-r--r--spec/serializers/profile/event_entity_spec.rb11
-rw-r--r--spec/serializers/project_note_entity_spec.rb14
-rw-r--r--spec/serializers/stage_entity_spec.rb4
-rw-r--r--spec/services/admin/abuse_reports/moderate_user_service_spec.rb (renamed from spec/services/admin/abuse_report_update_service_spec.rb)2
-rw-r--r--spec/services/admin/plan_limits/update_service_spec.rb127
-rw-r--r--spec/services/alert_management/process_prometheus_alert_service_spec.rb16
-rw-r--r--spec/services/application_settings/update_service_spec.rb12
-rw-r--r--spec/services/authorized_project_update/find_records_due_for_refresh_service_spec.rb6
-rw-r--r--spec/services/auto_merge/base_service_spec.rb16
-rw-r--r--spec/services/auto_merge/merge_when_pipeline_succeeds_service_spec.rb23
-rw-r--r--spec/services/auto_merge_service_spec.rb43
-rw-r--r--spec/services/award_emojis/destroy_service_spec.rb4
-rw-r--r--spec/services/batched_git_ref_updates/cleanup_scheduler_service_spec.rb55
-rw-r--r--spec/services/batched_git_ref_updates/project_cleanup_service_spec.rb98
-rw-r--r--spec/services/bulk_imports/create_service_spec.rb6
-rw-r--r--spec/services/bulk_imports/get_importable_data_service_spec.rb6
-rw-r--r--spec/services/bulk_imports/relation_batch_export_service_spec.rb2
-rw-r--r--spec/services/ci/job_artifacts/create_service_spec.rb41
-rw-r--r--spec/services/ci/parse_annotations_artifact_service_spec.rb182
-rw-r--r--spec/services/ci/pipeline_creation/cancel_redundant_pipelines_service_spec.rb36
-rw-r--r--spec/services/ci/pipeline_processing/atomic_processing_service_spec.rb14
-rw-r--r--spec/services/ci/pipeline_schedules/create_service_spec.rb8
-rw-r--r--spec/services/ci/pipeline_schedules/update_service_spec.rb12
-rw-r--r--spec/services/ci/retry_job_service_spec.rb78
-rw-r--r--spec/services/ci/retry_pipeline_service_spec.rb16
-rw-r--r--spec/services/clusters/kubernetes/create_or_update_namespace_service_spec.rb34
-rw-r--r--spec/services/clusters/kubernetes/create_or_update_service_account_service_spec.rb9
-rw-r--r--spec/services/cohorts_service_spec.rb3
-rw-r--r--spec/services/commits/commit_patch_service_spec.rb3
-rw-r--r--spec/services/deployments/create_for_job_service_spec.rb18
-rw-r--r--spec/services/deployments/link_merge_requests_service_spec.rb6
-rw-r--r--spec/services/deployments/older_deployments_drop_service_spec.rb117
-rw-r--r--spec/services/deployments/update_environment_service_spec.rb92
-rw-r--r--spec/services/environments/create_for_job_service_spec.rb19
-rw-r--r--spec/services/environments/create_service_spec.rb3
-rw-r--r--spec/services/environments/update_service_spec.rb15
-rw-r--r--spec/services/git/base_hooks_service_spec.rb12
-rw-r--r--spec/services/grafana/proxy_service_spec.rb169
-rw-r--r--spec/services/groups/create_service_spec.rb27
-rw-r--r--spec/services/groups/group_links/create_service_spec.rb2
-rw-r--r--spec/services/groups/participants_service_spec.rb10
-rw-r--r--spec/services/groups/update_service_spec.rb208
-rw-r--r--spec/services/groups/update_shared_runners_service_spec.rb21
-rw-r--r--spec/services/import/gitlab_projects/file_acquisition_strategies/remote_file_s3_spec.rb8
-rw-r--r--spec/services/import/gitlab_projects/file_acquisition_strategies/remote_file_spec.rb8
-rw-r--r--spec/services/issuable/bulk_update_service_spec.rb51
-rw-r--r--spec/services/issues/import_csv_service_spec.rb10
-rw-r--r--spec/services/issues/update_service_spec.rb91
-rw-r--r--spec/services/labels/available_labels_service_spec.rb69
-rw-r--r--spec/services/labels/create_service_spec.rb36
-rw-r--r--spec/services/labels/update_service_spec.rb36
-rw-r--r--spec/services/loose_foreign_keys/batch_cleaner_service_spec.rb2
-rw-r--r--spec/services/loose_foreign_keys/process_deleted_records_service_spec.rb2
-rw-r--r--spec/services/members/create_service_spec.rb7
-rw-r--r--spec/services/members/import_project_team_service_spec.rb93
-rw-r--r--spec/services/members/update_service_spec.rb37
-rw-r--r--spec/services/merge_requests/cleanup_refs_service_spec.rb1
-rw-r--r--spec/services/merge_requests/create_ref_service_spec.rb176
-rw-r--r--spec/services/merge_requests/ff_merge_service_spec.rb1
-rw-r--r--spec/services/merge_requests/merge_orchestration_service_spec.rb4
-rw-r--r--spec/services/merge_requests/merge_service_spec.rb807
-rw-r--r--spec/services/merge_requests/merge_strategies/from_source_branch_spec.rb87
-rw-r--r--spec/services/merge_requests/merge_to_ref_service_spec.rb23
-rw-r--r--spec/services/merge_requests/squash_service_spec.rb25
-rw-r--r--spec/services/merge_requests/update_service_spec.rb33
-rw-r--r--spec/services/metrics/dashboard/annotations/create_service_spec.rb161
-rw-r--r--spec/services/metrics/dashboard/annotations/delete_service_spec.rb93
-rw-r--r--spec/services/metrics/dashboard/clone_dashboard_service_spec.rb195
-rw-r--r--spec/services/metrics/dashboard/cluster_dashboard_service_spec.rb60
-rw-r--r--spec/services/metrics/dashboard/cluster_metrics_embed_service_spec.rb74
-rw-r--r--spec/services/metrics/dashboard/custom_dashboard_service_spec.rb167
-rw-r--r--spec/services/metrics/dashboard/custom_metric_embed_service_spec.rb147
-rw-r--r--spec/services/metrics/dashboard/default_embed_service_spec.rb59
-rw-r--r--spec/services/metrics/dashboard/dynamic_embed_service_spec.rb158
-rw-r--r--spec/services/metrics/dashboard/gitlab_alert_embed_service_spec.rb103
-rw-r--r--spec/services/metrics/dashboard/grafana_metric_embed_service_spec.rb279
-rw-r--r--spec/services/metrics/dashboard/panel_preview_service_spec.rb85
-rw-r--r--spec/services/metrics/dashboard/pod_dashboard_service_spec.rb74
-rw-r--r--spec/services/metrics/dashboard/system_dashboard_service_spec.rb82
-rw-r--r--spec/services/metrics/dashboard/transient_embed_service_spec.rb99
-rw-r--r--spec/services/metrics/dashboard/update_dashboard_service_spec.rb159
-rw-r--r--spec/services/metrics/users_starred_dashboards/create_service_spec.rb73
-rw-r--r--spec/services/metrics/users_starred_dashboards/delete_service_spec.rb41
-rw-r--r--spec/services/ml/experiment_tracking/candidate_repository_spec.rb9
-rw-r--r--spec/services/ml/find_or_create_experiment_service_spec.rb33
-rw-r--r--spec/services/ml/find_or_create_model_service_spec.rb45
-rw-r--r--spec/services/ml/find_or_create_model_version_service_spec.rb49
-rw-r--r--spec/services/namespace_settings/update_service_spec.rb59
-rw-r--r--spec/services/namespaces/package_settings/update_service_spec.rb6
-rw-r--r--spec/services/notes/quick_actions_service_spec.rb56
-rw-r--r--spec/services/notification_service_spec.rb32
-rw-r--r--spec/services/packages/composer/create_package_service_spec.rb2
-rw-r--r--spec/services/packages/go/create_package_service_spec.rb2
-rw-r--r--spec/services/packages/ml_model/create_package_file_service_spec.rb21
-rw-r--r--spec/services/packages/nuget/update_package_from_metadata_service_spec.rb10
-rw-r--r--spec/services/packages/rubygems/process_gem_service_spec.rb9
-rw-r--r--spec/services/personal_access_tokens/revoke_token_family_service_spec.rb7
-rw-r--r--spec/services/post_receive_service_spec.rb2
-rw-r--r--spec/services/product_analytics/build_activity_graph_service_spec.rb2
-rw-r--r--spec/services/product_analytics/build_graph_service_spec.rb2
-rw-r--r--spec/services/projects/container_repository/third_party/cleanup_tags_service_spec.rb12
-rw-r--r--spec/services/projects/create_service_spec.rb4
-rw-r--r--spec/services/projects/destroy_service_spec.rb17
-rw-r--r--spec/services/projects/fork_service_spec.rb5
-rw-r--r--spec/services/projects/participants_service_spec.rb146
-rw-r--r--spec/services/projects/prometheus/alerts/notify_service_spec.rb85
-rw-r--r--spec/services/projects/transfer_service_spec.rb12
-rw-r--r--spec/services/projects/update_repository_storage_service_spec.rb171
-rw-r--r--spec/services/projects/update_service_spec.rb145
-rw-r--r--spec/services/projects/update_statistics_service_spec.rb10
-rw-r--r--spec/services/prometheus/proxy_service_spec.rb240
-rw-r--r--spec/services/prometheus/proxy_variable_substitution_service_spec.rb204
-rw-r--r--spec/services/quick_actions/interpret_service_spec.rb58
-rw-r--r--spec/services/search_service_spec.rb6
-rw-r--r--spec/services/security/ci_configuration/sast_create_service_spec.rb9
-rw-r--r--spec/services/service_response_spec.rb6
-rw-r--r--spec/services/spam/spam_action_service_spec.rb17
-rw-r--r--spec/services/suggestions/apply_service_spec.rb69
-rw-r--r--spec/services/suggestions/create_service_spec.rb51
-rw-r--r--spec/services/suggestions/outdate_service_spec.rb21
-rw-r--r--spec/services/task_list_toggle_service_spec.rb124
-rw-r--r--spec/services/todo_service_spec.rb20
-rw-r--r--spec/services/todos/destroy/group_private_service_spec.rb34
-rw-r--r--spec/services/users/activity_service_spec.rb12
-rw-r--r--spec/services/users/destroy_service_spec.rb25
-rw-r--r--spec/services/users/email_verification/update_email_service_spec.rb119
-rw-r--r--spec/services/users/migrate_records_to_ghost_user_in_batches_service_spec.rb15
-rw-r--r--spec/services/users/migrate_records_to_ghost_user_service_spec.rb52
-rw-r--r--spec/services/users/refresh_authorized_projects_service_spec.rb28
-rw-r--r--spec/services/users/reject_service_spec.rb11
-rw-r--r--spec/services/users/update_service_spec.rb9
-rw-r--r--spec/services/web_hook_service_spec.rb27
-rw-r--r--spec/services/webauthn/authenticate_service_spec.rb12
-rw-r--r--spec/services/work_items/related_work_item_links/create_service_spec.rb37
-rw-r--r--spec/services/work_items/task_list_reference_removal_service_spec.rb2
-rw-r--r--spec/services/work_items/widgets/description_service/update_service_spec.rb9
-rw-r--r--spec/services/work_items/widgets/hierarchy_service/create_service_spec.rb2
-rw-r--r--spec/simplecov_env.rb60
-rw-r--r--spec/simplecov_env_core.rb64
-rw-r--r--spec/spec_helper.rb12
-rw-r--r--spec/support/database/auto_explain.rb135
-rw-r--r--spec/support/database/prevent_cross_database_modification.rb2
-rw-r--r--spec/support/database/query_recorder.rb17
-rw-r--r--spec/support/fast_quarantine.rb7
-rw-r--r--spec/support/formatters/json_formatter.rb3
-rw-r--r--spec/support/helpers/content_editor_helpers.rb4
-rw-r--r--spec/support/helpers/features/dom_helpers.rb13
-rw-r--r--spec/support/helpers/features/runners_helpers.rb13
-rw-r--r--spec/support/helpers/filter_spec_helper.rb4
-rw-r--r--spec/support/helpers/graphql_helpers.rb4
-rw-r--r--spec/support/helpers/kubernetes_helpers.rb4
-rw-r--r--spec/support/helpers/metrics_dashboard_helpers.rb8
-rw-r--r--spec/support/helpers/migrations_helpers/project_statistics_helper.rb37
-rw-r--r--spec/support/helpers/models/ci/partitioning_testing/rspec_hooks.rb12
-rw-r--r--spec/support/helpers/models/ci/partitioning_testing/schema_helpers.rb35
-rw-r--r--spec/support/helpers/prometheus_helpers.rb22
-rw-r--r--spec/support/helpers/stub_gitlab_calls.rb4
-rw-r--r--spec/support/matchers/exceed_query_limit.rb19
-rw-r--r--spec/support/protected_branch_helpers.rb4
-rw-r--r--spec/support/rspec_order_todo.yml14
-rw-r--r--spec/support/shared_contexts/features/integrations/integrations_shared_context.rb2
-rw-r--r--spec/support/shared_contexts/glfm/api_markdown_snapshot_shared_context.rb1
-rw-r--r--spec/support/shared_contexts/graphql/types/query_type_shared_context.rb4
-rw-r--r--spec/support/shared_contexts/lib/gitlab/background_migration/backfill_project_statistics.rb106
-rw-r--r--spec/support/shared_contexts/lib/gitlab/sidekiq_middleware/server_metrics_shared_context.rb10
-rw-r--r--spec/support/shared_contexts/lib/sbom/package_url_shared_contexts.rb6
-rw-r--r--spec/support/shared_contexts/navbar_structure_context.rb1
-rw-r--r--spec/support/shared_contexts/policies/group_policy_shared_context.rb3
-rw-r--r--spec/support/shared_contexts/services/packages/rubygems/invalid_metadata.rb9
-rw-r--r--spec/support/shared_contexts/user_contribution_events_shared_context.rb52
-rw-r--r--spec/support/shared_examples/bulk_imports/visibility_level_examples.rb147
-rw-r--r--spec/support/shared_examples/channels/noteable/notes_channel_shared_examples.rb30
-rw-r--r--spec/support/shared_examples/ci/deployable_policy_shared_examples.rb25
-rw-r--r--spec/support/shared_examples/ci/deployable_policy_shared_examples_ee.rb57
-rw-r--r--spec/support/shared_examples/ci/deployable_shared_examples.rb582
-rw-r--r--spec/support/shared_examples/ci/deployable_shared_examples_ee.rb34
-rw-r--r--spec/support/shared_examples/ci/pipeline_schedules_create_or_update_shared_examples.rb121
-rw-r--r--spec/support/shared_examples/ci/stage_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/ci/waiting_for_approval_status_shared_examples.rb79
-rw-r--r--spec/support/shared_examples/controllers/internal_event_tracking_examples.rb12
-rw-r--r--spec/support/shared_examples/controllers/known_sign_in_shared_examples.rb18
-rw-r--r--spec/support/shared_examples/controllers/unique_visits_shared_examples.rb4
-rw-r--r--spec/support/shared_examples/database_health_status_indicators/prometheus_alert_based_shared_examples.rb133
-rw-r--r--spec/support/shared_examples/deployments/create_for_job_shared_examples.rb (renamed from spec/services/deployments/create_for_build_service_spec.rb)83
-rw-r--r--spec/support/shared_examples/environments/create_for_job_shared_examples.rb (renamed from spec/services/environments/create_for_build_service_spec.rb)19
-rw-r--r--spec/support/shared_examples/features/access_tokens_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/features/content_editor_shared_examples.rb13
-rw-r--r--spec/support/shared_examples/features/deploy_token_shared_examples.rb8
-rw-r--r--spec/support/shared_examples/features/discussion_comments_shared_example.rb2
-rw-r--r--spec/support/shared_examples/features/editable_merge_request_shared_examples.rb5
-rw-r--r--spec/support/shared_examples/features/manage_applications_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/features/protected_branches_access_control_ce_shared_examples.rb4
-rw-r--r--spec/support/shared_examples/features/protected_branches_with_deploy_keys_examples.rb3
-rw-r--r--spec/support/shared_examples/features/protected_tags_with_deploy_keys_examples.rb3
-rw-r--r--spec/support/shared_examples/features/runners_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/features/sidebar_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/features/variable_list_drawer_shared_examples.rb23
-rw-r--r--spec/support/shared_examples/features/work_items_shared_examples.rb12
-rw-r--r--spec/support/shared_examples/finders/issues_finder_shared_examples.rb8
-rw-r--r--spec/support/shared_examples/graphql/mutations/update_time_estimate_shared_examples.rb59
-rw-r--r--spec/support/shared_examples/graphql/notes_quick_actions_for_work_items_shared_examples.rb100
-rw-r--r--spec/support/shared_examples/helpers/runners_shared_examples.rb12
-rw-r--r--spec/support/shared_examples/helpers/super_sidebar_shared_examples.rb37
-rw-r--r--spec/support/shared_examples/lib/gitlab/cache/json_cache_shared_examples.rb46
-rw-r--r--spec/support/shared_examples/lib/gitlab/search_archived_filter_shared_examples.rb33
-rw-r--r--spec/support/shared_examples/metrics/active_record_subscriber_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/migrations/add_work_item_widget_shared_examples.rb5
-rw-r--r--spec/support/shared_examples/models/concerns/auto_disabling_hooks_shared_examples.rb130
-rw-r--r--spec/support/shared_examples/models/concerns/has_repository_shared_examples.rb14
-rw-r--r--spec/support/shared_examples/models/concerns/integrations/slack_mattermost_notifier_shared_examples.rb10
-rw-r--r--spec/support/shared_examples/models/concerns/linkable_items_shared_examples.rb82
-rw-r--r--spec/support/shared_examples/models/concerns/unstoppable_hooks_shared_examples.rb30
-rw-r--r--spec/support/shared_examples/models/issuable_link_shared_examples.rb7
-rw-r--r--spec/support/shared_examples/protected_tags/access_control_ce_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/quick_actions/issuable/time_tracking_quick_action_shared_examples.rb9
-rw-r--r--spec/support/shared_examples/quick_actions/merge_request/merge_quick_action_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/quick_actions/work_item/type_change_quick_actions_shared_examples.rb10
-rw-r--r--spec/support/shared_examples/requests/api/draft_notes_shared_examples.rb91
-rw-r--r--spec/support/shared_examples/requests/api/graphql/packages/group_and_project_packages_list_shared_examples.rb14
-rw-r--r--spec/support/shared_examples/requests/api/graphql/remote_development_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/requests/api/hooks_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/requests/api/nuget_endpoints_shared_examples.rb14
-rw-r--r--spec/support/shared_examples/requests/api/nuget_packages_shared_examples.rb120
-rw-r--r--spec/support/shared_examples/requests/api/time_tracking_shared_examples.rb43
-rw-r--r--spec/support/shared_examples/requests/graphql_shared_examples.rb28
-rw-r--r--spec/support/shared_examples/services/container_registry_auth_service_shared_examples.rb28
-rw-r--r--spec/support/shared_examples/services/import_csv_service_shared_examples.rb12
-rw-r--r--spec/support/shared_examples/services/issuable/issuable_update_service_shared_examples.rb81
-rw-r--r--spec/support/shared_examples/services/issuable_links/create_links_shared_examples.rb80
-rw-r--r--spec/support/shared_examples/services/metrics/dashboard_shared_examples.rb21
-rw-r--r--spec/support/shared_examples/services/namespace_package_settings_shared_examples.rb4
-rw-r--r--spec/support/shared_examples/services/notification_service_shared_examples.rb10
-rw-r--r--spec/support/shared_examples/services/security/ci_configuration/create_service_shared_examples.rb30
-rw-r--r--spec/support/shared_examples/usage_data_counters/work_item_activity_unique_counter_shared_examples.rb34
-rw-r--r--spec/support/shared_examples/work_item_hierarchy_restrictions_importer.rb4
-rw-r--r--spec/support_specs/helpers/stub_feature_flags_spec.rb4
-rw-r--r--spec/tasks/gitlab/audit_event_types/audit_event_types_rake_spec.rb43
-rw-r--r--spec/tasks/gitlab/audit_event_types/check_docs_task_spec.rb67
-rw-r--r--spec/tasks/gitlab/audit_event_types/compile_docs_task_spec.rb29
-rw-r--r--spec/tasks/gitlab/backup_rake_spec.rb5
-rw-r--r--spec/tasks/gitlab/ci_secure_files/migrate_rake_spec.rb2
-rw-r--r--spec/tasks/gitlab/container_registry_rake_spec.rb2
-rw-r--r--spec/tasks/gitlab/db/cells/bump_cell_sequences_rake_spec.rb82
-rw-r--r--spec/tasks/gitlab/db/decomposition/connection_status_rake_spec.rb2
-rw-r--r--spec/tasks/gitlab/db/decomposition/rollback/bump_ci_sequences_rake_spec.rb2
-rw-r--r--spec/tasks/gitlab/db/lock_writes_rake_spec.rb2
-rw-r--r--spec/tasks/gitlab/db/migration_fix_15_11_rake_spec.rb2
-rw-r--r--spec/tasks/gitlab/db/truncate_legacy_tables_rake_spec.rb2
-rw-r--r--spec/tasks/gitlab/db/validate_config_rake_spec.rb2
-rw-r--r--spec/tasks/gitlab/db_rake_spec.rb2
-rw-r--r--spec/tasks/gitlab/dependency_proxy/migrate_rake_spec.rb2
-rw-r--r--spec/tasks/gitlab/gitaly_rake_spec.rb2
-rw-r--r--spec/tasks/gitlab/lfs/migrate_rake_spec.rb2
-rw-r--r--spec/tasks/gitlab/packages/migrate_rake_spec.rb2
-rw-r--r--spec/tasks/gitlab/snippets_rake_spec.rb2
-rw-r--r--spec/tasks/gitlab/terraform/migrate_rake_spec.rb2
-rw-r--r--spec/tasks/gitlab/user_management_rake_spec.rb17
-rw-r--r--spec/tasks/gitlab/workhorse_rake_spec.rb2
-rw-r--r--spec/tasks/gitlab/x509/update_rake_spec.rb2
-rw-r--r--spec/tasks/migrate/schema_check_rake_spec.rb2
-rw-r--r--spec/tooling/danger/bulk_database_actions_spec.rb124
-rw-r--r--spec/tooling/danger/database_spec.rb90
-rw-r--r--spec/tooling/danger/model_validations_spec.rb111
-rw-r--r--spec/tooling/danger/project_helper_spec.rb73
-rw-r--r--spec/tooling/danger/required_stops_spec.rb98
-rw-r--r--spec/tooling/danger/specs/project_factory_suggestion_spec.rb13
-rw-r--r--spec/validators/import/gitlab_projects/remote_file_validator_spec.rb26
-rw-r--r--spec/views/admin/application_settings/_ai_access.html.haml_spec.rb38
-rw-r--r--spec/views/admin/application_settings/general.html.haml_spec.rb18
-rw-r--r--spec/views/devise/registrations/new.html.haml_spec.rb30
-rw-r--r--spec/views/devise/sessions/new.html.haml_spec.rb21
-rw-r--r--spec/views/layouts/application.html.haml_spec.rb111
-rw-r--r--spec/views/layouts/header/_super_sidebar_logged_out.html.haml_spec.rb39
-rw-r--r--spec/views/profiles/keys/_key.html.haml_spec.rb14
-rw-r--r--spec/views/projects/commits/show.html.haml_spec.rb34
-rw-r--r--spec/views/projects/issues/show.html.haml_spec.rb123
-rw-r--r--spec/views/projects/pages/_pages_settings.html.haml_spec.rb21
-rw-r--r--spec/views/pwa/manifest.json.erb_spec.rb36
-rw-r--r--spec/views/registrations/welcome/show.html.haml_spec.rb1
-rw-r--r--spec/views/shared/_label_row.html.haml_spec.rb16
-rw-r--r--spec/workers/background_migration/ci_database_worker_spec.rb3
-rw-r--r--spec/workers/background_migration_worker_spec.rb3
-rw-r--r--spec/workers/batched_git_ref_updates/cleanup_scheduler_worker_spec.rb31
-rw-r--r--spec/workers/batched_git_ref_updates/project_cleanup_worker_spec.rb33
-rw-r--r--spec/workers/bulk_imports/pipeline_worker_spec.rb34
-rw-r--r--spec/workers/ci/pipeline_success_unlock_artifacts_worker_spec.rb39
-rw-r--r--spec/workers/click_house/events_sync_worker_spec.rb57
-rw-r--r--spec/workers/clusters/agents/notify_git_push_worker_spec.rb12
-rw-r--r--spec/workers/concerns/gitlab/github_import/rescheduling_methods_spec.rb9
-rw-r--r--spec/workers/concerns/packages/error_handling_spec.rb81
-rw-r--r--spec/workers/concerns/worker_attributes_spec.rb4
-rw-r--r--spec/workers/environments/stop_job_success_worker_spec.rb (renamed from spec/workers/build_success_worker_spec.rb)4
-rw-r--r--spec/workers/every_sidekiq_worker_spec.rb1
-rw-r--r--spec/workers/integrations/group_mention_worker_spec.rb18
-rw-r--r--spec/workers/integrations/slack_event_worker_spec.rb3
-rw-r--r--spec/workers/loose_foreign_keys/cleanup_worker_spec.rb2
-rw-r--r--spec/workers/members/expiring_email_notification_worker_spec.rb50
-rw-r--r--spec/workers/members/expiring_worker_spec.rb27
-rw-r--r--spec/workers/merge_requests/mergeability_check_batch_worker_spec.rb19
-rw-r--r--spec/workers/packages/debian/process_package_file_worker_spec.rb196
-rw-r--r--spec/workers/packages/helm/extraction_worker_spec.rb54
-rw-r--r--spec/workers/packages/nuget/extraction_worker_spec.rb126
-rw-r--r--spec/workers/packages/rubygems/extraction_worker_spec.rb80
-rw-r--r--spec/workers/process_commit_worker_spec.rb5
-rw-r--r--spec/workers/redis_migration_worker_spec.rb2
-rw-r--r--spec/workers/service_desk/custom_email_verification_cleanup_worker_spec.rb44
-rw-r--r--spec/workers/users/deactivate_dormant_users_worker_spec.rb55
-rw-r--r--spec/workers/web_hook_worker_spec.rb12
1593 files changed, 43890 insertions, 20791 deletions
diff --git a/spec/channels/noteable/notes_channel_spec.rb b/spec/channels/noteable/notes_channel_spec.rb
new file mode 100644
index 00000000000..a38155e7ffd
--- /dev/null
+++ b/spec/channels/noteable/notes_channel_spec.rb
@@ -0,0 +1,40 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Noteable::NotesChannel, feature_category: :team_planning do
+ let_it_be(:project) { create(:project, :repository, :private) }
+ let_it_be(:developer) { create(:user).tap { |u| project.add_developer(u) } }
+
+ describe '#subscribed' do
+ let(:subscribe_params) do
+ {
+ project_id: noteable.project_id,
+ noteable_type: noteable.class.underscore,
+ noteable_id: noteable.id
+ }
+ end
+
+ before do
+ stub_action_cable_connection current_user: developer
+ end
+
+ it 'rejects the subscription when noteable params are missing' do
+ subscribe(project_id: project.id)
+
+ expect(subscription).to be_rejected
+ end
+
+ context 'on an issue' do
+ let_it_be(:noteable) { create(:issue, project: project) }
+
+ it_behaves_like 'handle subscription based on user access'
+ end
+
+ context 'on a merge request' do
+ let_it_be(:noteable) { create(:merge_request, source_project: project) }
+
+ it_behaves_like 'handle subscription based on user access'
+ end
+ end
+end
diff --git a/spec/commands/sidekiq_cluster/cli_spec.rb b/spec/commands/sidekiq_cluster/cli_spec.rb
index a63e7158c2a..09e40d8f91a 100644
--- a/spec/commands/sidekiq_cluster/cli_spec.rb
+++ b/spec/commands/sidekiq_cluster/cli_spec.rb
@@ -248,13 +248,13 @@ RSpec.describe Gitlab::SidekiqCluster::CLI, feature_category: :gitlab_cli, stub_
if Gitlab.ee?
[
%w[incident_management_close_incident status_page_publish] + described_class::DEFAULT_QUEUES,
- %w[bulk_imports_pipeline bulk_imports_relation_export project_export projects_import_export_parallel_project_export projects_import_export_relation_export repository_import project_template_export] +
+ %w[bulk_imports_pipeline bulk_imports_pipeline_batch bulk_imports_relation_export project_export projects_import_export_parallel_project_export projects_import_export_relation_export repository_import project_template_export] +
described_class::DEFAULT_QUEUES
]
else
[
%w[incident_management_close_incident] + described_class::DEFAULT_QUEUES,
- %w[bulk_imports_pipeline bulk_imports_relation_export project_export projects_import_export_parallel_project_export projects_import_export_relation_export repository_import] +
+ %w[bulk_imports_pipeline bulk_imports_pipeline_batch bulk_imports_relation_export project_export projects_import_export_parallel_project_export projects_import_export_relation_export repository_import] +
described_class::DEFAULT_QUEUES
]
end
diff --git a/spec/components/projects/ml/models_index_component_spec.rb b/spec/components/projects/ml/models_index_component_spec.rb
new file mode 100644
index 00000000000..e4599cc5eec
--- /dev/null
+++ b/spec/components/projects/ml/models_index_component_spec.rb
@@ -0,0 +1,41 @@
+# frozen_string_literal: true
+
+require "spec_helper"
+
+RSpec.describe Projects::Ml::ModelsIndexComponent, type: :component, feature_category: :mlops do
+ let_it_be(:project) { build_stubbed(:project) }
+ let_it_be(:model1) { build_stubbed(:ml_models, :with_latest_version_and_package, project: project) }
+ let_it_be(:model2) { build_stubbed(:ml_models, project: project) }
+ let_it_be(:models) { [model1, model2] }
+
+ subject(:component) do
+ described_class.new(models: models)
+ end
+
+ describe 'rendered' do
+ let(:element) { page.find("#js-index-ml-models") }
+
+ before do
+ render_inline component
+ end
+
+ it 'renders element with view_model' do
+ element = page.find("#js-index-ml-models")
+
+ expect(Gitlab::Json.parse(element['data-view-model'])).to eq({
+ 'models' => [
+ {
+ 'name' => model1.name,
+ 'version' => model1.latest_version.version,
+ 'path' => "/#{project.full_path}/-/packages/#{model1.latest_version.package_id}"
+ },
+ {
+ 'name' => model2.name,
+ 'version' => nil,
+ 'path' => nil
+ }
+ ]
+ })
+ end
+ end
+end
diff --git a/spec/controllers/admin/clusters_controller_spec.rb b/spec/controllers/admin/clusters_controller_spec.rb
index 35bfb829bf7..83498295676 100644
--- a/spec/controllers/admin/clusters_controller_spec.rb
+++ b/spec/controllers/admin/clusters_controller_spec.rb
@@ -58,7 +58,8 @@ RSpec.describe Admin::ClustersController, feature_category: :deployment_manageme
let(:total_count) { Clusters::Cluster.instance_type.page.total_count }
before do
- create_list(:cluster, 30, :provided_by_gcp, :production_environment, :instance)
+ allow(Clusters::Cluster).to receive(:default_per_page).and_return(1)
+ create_list(:cluster, 2, :provided_by_gcp, :production_environment, :instance)
end
it 'redirects to the page' do
diff --git a/spec/controllers/admin/labels_controller_spec.rb b/spec/controllers/admin/labels_controller_spec.rb
new file mode 100644
index 00000000000..c753426326d
--- /dev/null
+++ b/spec/controllers/admin/labels_controller_spec.rb
@@ -0,0 +1,57 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Admin::LabelsController, feature_category: :team_planning do
+ let_it_be(:admin) { create(:admin) }
+
+ before do
+ sign_in(admin)
+ end
+
+ describe 'DELETE #destroy' do
+ context 'when current user has ability to destroy the label' do
+ it 'removes the label' do
+ label = create(:admin_label)
+ delete :destroy, params: { id: label.to_param }
+
+ expect { label.reload }.to raise_error(ActiveRecord::RecordNotFound)
+ end
+
+ it 'does not remove the label if it is locked' do
+ label = create(:admin_label, lock_on_merge: true)
+ delete :destroy, params: { id: label.to_param }
+
+ expect(label.reload).to eq label
+ end
+
+ context 'when label is succesfuly destroyed' do
+ it 'redirects to the admin labels page' do
+ label = create(:admin_label)
+ delete :destroy, params: { id: label.to_param }
+
+ expect(response).to redirect_to(admin_labels_path)
+ end
+ end
+ end
+
+ context 'when current_user does not have ability to destroy the label' do
+ let(:group) { create(:group) }
+ let(:project) { create(:project, namespace: group) }
+ let(:another_user) { create(:user) }
+
+ before do
+ project.add_maintainer(another_user)
+
+ sign_in(another_user)
+ end
+
+ it 'responds with status 404' do
+ label = create(:admin_label)
+ delete :destroy, params: { id: label.to_param }
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+end
diff --git a/spec/controllers/autocomplete_controller_spec.rb b/spec/controllers/autocomplete_controller_spec.rb
index 4f3045eaa48..a66cb4364d7 100644
--- a/spec/controllers/autocomplete_controller_spec.rb
+++ b/spec/controllers/autocomplete_controller_spec.rb
@@ -3,8 +3,8 @@
require 'spec_helper'
RSpec.describe AutocompleteController do
- let(:project) { create(:project) }
- let(:user) { project.first_owner }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { project.first_owner }
context 'GET users', feature_category: :user_management do
let!(:user2) { create(:user) }
@@ -25,6 +25,22 @@ RSpec.describe AutocompleteController do
expect(json_response.size).to eq(1)
expect(json_response.map { |u| u["username"] }).to include(user.username)
end
+
+ context "with push_code param" do
+ let(:reporter) { create(:user) }
+
+ before do
+ project.add_reporter(reporter)
+
+ get(:users, params: { project_id: project.id, push_code: 'true' })
+ end
+
+ it 'returns users that can push code', :aggregate_failures do
+ expect(json_response).to be_kind_of(Array)
+ expect(json_response.size).to eq(1)
+ expect(json_response.map { |user| user["username"] }).to match_array([user.username])
+ end
+ end
end
describe 'GET #users with unknown project' do
@@ -67,6 +83,7 @@ RSpec.describe AutocompleteController do
context 'non-member login for public project' do
let(:project) { create(:project, :public) }
+ let(:user) { project.first_owner }
before do
sign_in(non_member)
@@ -207,20 +224,6 @@ RSpec.describe AutocompleteController do
end
end
- context 'skip_users parameter included' do
- before do
- sign_in(user)
- end
-
- it 'skips the user IDs passed' do
- get(:users, params: { skip_users: [user, user2].map(&:id) })
-
- response_user_ids = json_response.map { |user| user['id'] }
-
- expect(response_user_ids).to contain_exactly(non_member.id)
- end
- end
-
context 'merge_request_iid parameter included' do
before do
sign_in(user)
diff --git a/spec/controllers/concerns/kas_cookie_spec.rb b/spec/controllers/concerns/kas_cookie_spec.rb
index c9490508690..355d1e04b86 100644
--- a/spec/controllers/concerns/kas_cookie_spec.rb
+++ b/spec/controllers/concerns/kas_cookie_spec.rb
@@ -89,12 +89,16 @@ RSpec.describe KasCookie, feature_category: :deployment_management do
end
context 'when KAS is on subdomain' do
- let_it_be(:kas_tunnel_url) { 'ws://kas.gitlab.example.com/k8s-proxy/' }
+ let_it_be(:kas_tunnel_url) { 'http://kas.gitlab.example.com/k8s-proxy/' }
it 'adds KAS url to CSP connect-src directive' do
expect(kas_csp_connect_src).to include(::Gitlab::Kas.tunnel_url)
end
+ it 'adds websocket connections' do
+ expect(kas_csp_connect_src).to include('ws://kas.gitlab.example.com/k8s-proxy/')
+ end
+
context 'when content_security_policy is disabled' do
let(:content_security_policy_enabled) { false }
@@ -104,6 +108,14 @@ RSpec.describe KasCookie, feature_category: :deployment_management do
end
end
+ context 'when KAS tunnel has ssl' do
+ let_it_be(:kas_tunnel_url) { 'https://kas.gitlab.example.com/k8s-proxy/' }
+
+ it 'adds websocket connections' do
+ expect(kas_csp_connect_src).to include('wss://kas.gitlab.example.com/k8s-proxy/')
+ end
+ end
+
context 'when KAS tunnel url is configured without trailing slash' do
let_it_be(:kas_tunnel_url) { 'ws://kas.gitlab.example.com/k8s-proxy' }
diff --git a/spec/controllers/concerns/onboarding/status_spec.rb b/spec/controllers/concerns/onboarding/status_spec.rb
index 3f6e597a235..b14346dc052 100644
--- a/spec/controllers/concerns/onboarding/status_spec.rb
+++ b/spec/controllers/concerns/onboarding/status_spec.rb
@@ -9,13 +9,13 @@ RSpec.describe Onboarding::Status, feature_category: :onboarding do
let_it_be(:source) { member.group }
describe '#continue_full_onboarding?' do
- subject { described_class.new(nil).continue_full_onboarding? }
+ subject { described_class.new(nil, {}, user).continue_full_onboarding? }
it { is_expected.to eq(false) }
end
describe '#single_invite?' do
- subject { described_class.new(user).single_invite? }
+ subject { described_class.new(nil, nil, user).single_invite? }
context 'when there is only one member for the user' do
context 'when the member source exists' do
@@ -39,7 +39,7 @@ RSpec.describe Onboarding::Status, feature_category: :onboarding do
end
describe '#last_invited_member' do
- subject { described_class.new(user).last_invited_member }
+ subject { described_class.new(nil, nil, user).last_invited_member }
it { is_expected.to eq(member) }
@@ -57,7 +57,7 @@ RSpec.describe Onboarding::Status, feature_category: :onboarding do
end
describe '#last_invited_member_source' do
- subject { described_class.new(user).last_invited_member_source }
+ subject { described_class.new(nil, nil, user).last_invited_member_source }
context 'when a member exists' do
it { is_expected.to eq(source) }
@@ -77,7 +77,7 @@ RSpec.describe Onboarding::Status, feature_category: :onboarding do
end
describe '#invite_with_tasks_to_be_done?' do
- subject { described_class.new(user).invite_with_tasks_to_be_done? }
+ subject { described_class.new(nil, nil, user).invite_with_tasks_to_be_done? }
context 'when there are tasks_to_be_done with one member' do
let_it_be(:member) { create(:group_member, user: user, tasks_to_be_done: tasks_to_be_done) }
diff --git a/spec/controllers/concerns/product_analytics_tracking_spec.rb b/spec/controllers/concerns/product_analytics_tracking_spec.rb
index 65c2c77c027..1394325014b 100644
--- a/spec/controllers/concerns/product_analytics_tracking_spec.rb
+++ b/spec/controllers/concerns/product_analytics_tracking_spec.rb
@@ -2,7 +2,7 @@
require "spec_helper"
-RSpec.describe ProductAnalyticsTracking, :snowplow, feature_category: :product_analytics do
+RSpec.describe ProductAnalyticsTracking, :snowplow, feature_category: :product_analytics_data_management do
include TrackingHelpers
include SnowplowHelpers
@@ -14,178 +14,296 @@ RSpec.describe ProductAnalyticsTracking, :snowplow, feature_category: :product_a
let!(:group) { create(:group) }
let_it_be(:project) { create(:project) }
- before do
- allow(Gitlab::UsageDataCounters::HLLRedisCounter).to receive(:track_event)
- stub_const("#{described_class}::MIGRATED_EVENTS", ['an_event'])
- end
+ describe '.track_internal_event' do
+ controller(ApplicationController) do
+ include ProductAnalyticsTracking
- controller(ApplicationController) do
- include ProductAnalyticsTracking
-
- skip_before_action :authenticate_user!, only: :show
- track_event(
- :index,
- :show,
- name: 'an_event',
- action: 'an_action',
- label: 'a_label',
- destinations: [:redis_hll, :snowplow],
- conditions: [:custom_condition_one?, :custom_condition_two?]
- ) { |controller| controller.get_custom_id }
-
- def index
- render html: 'index'
- end
+ skip_before_action :authenticate_user!, only: [:show]
+ track_internal_event :index, :show, name: 'g_compliance_dashboard', conditions: [:custom_condition?]
- def new
- render html: 'new'
- end
+ def index
+ render html: 'index'
+ end
- def show
- render html: 'show'
- end
+ def show
+ render html: 'show'
+ end
- def get_custom_id
- 'some_custom_id'
- end
+ private
- private
+ def tracking_namespace_source
+ tracking_project_source.namespace
+ end
- def tracking_namespace_source
- Group.first
- end
+ def tracking_project_source
+ Project.first
+ end
- def tracking_project_source
- Project.first
+ def custom_condition?
+ true
+ end
end
- def custom_condition_one?
- true
+ def expect_internal_tracking(tracked_user: user)
+ expect(Gitlab::InternalEvents).to receive(:track_event).with('g_compliance_dashboard',
+ user: tracked_user,
+ project: project,
+ namespace: project.namespace).once
end
- def custom_condition_two?
- true
+ def expect_no_internal_tracking
+ expect(Gitlab::InternalEvents).not_to receive(:track_event)
end
- end
- def expect_redis_hll_tracking
- expect(Gitlab::UsageDataCounters::HLLRedisCounter).to have_received(:track_event)
- .with(event_name, values: instance_of(String))
- end
+ context 'when user is logged in' do
+ let(:namespace) { project.namespace }
- def expect_snowplow_tracking(user)
- context = Gitlab::Tracking::ServicePingContext.new(data_source: :redis_hll, event: event_name)
- .to_context.to_json
-
- expect_snowplow_event(
- category: anything,
- action: event_action,
- property: event_name,
- label: event_label,
- project: project,
- namespace: group,
- user: user,
- context: [context]
- )
- end
+ subject(:track_internal_event) { get :index }
- def expect_no_tracking
- expect(Gitlab::UsageDataCounters::HLLRedisCounter).not_to receive(:track_event)
+ before do
+ sign_in(user)
+ end
- expect_no_snowplow_event
- end
+ it 'tracks the events if DNT is disabled' do
+ stub_do_not_track('0')
- context 'when user is logged in' do
- before do
- sign_in(user)
- end
+ expect_internal_tracking
- it 'tracks the event' do
- get :index
+ subject
+ end
- expect_redis_hll_tracking
- expect_snowplow_tracking(user)
- end
+ it 'does not track the event if DNT is enabled' do
+ stub_do_not_track('1')
- it 'tracks the event if DNT is not enabled' do
- stub_do_not_track('0')
+ expect_no_internal_tracking
- get :index
+ subject
+ end
- expect_redis_hll_tracking
- expect_snowplow_tracking(user)
- end
+ it 'does not track the event if the format is not HTML' do
+ expect_no_internal_tracking
- it 'does not track the event if DNT is enabled' do
- stub_do_not_track('1')
+ get :index, format: :json
+ end
- get :index
+ it 'does not track the event if a custom condition returns false' do
+ allow(controller).to receive(:custom_condition?).and_return(false)
- expect_no_tracking
+ expect_no_internal_tracking
+
+ subject
+ end
end
- it 'does not track the event if the format is not HTML' do
- get :index, format: :json
+ context 'when user is not logged in' do
+ let(:visitor_id) { SecureRandom.uuid }
- expect_no_tracking
- end
+ it 'tracks the event when there is a visitor id' do
+ cookies[:visitor_id] = { value: visitor_id, expires: 24.months }
- it 'does not track the event if a custom condition returns false' do
- allow(controller).to receive(:custom_condition_two?).and_return(false)
+ expect_internal_tracking(tracked_user: nil)
- get :index
+ get :show, params: { id: 1 }
+ end
+
+ context 'and there is no visitor_id' do
+ it 'does not track the event' do
+ expect_no_internal_tracking
- expect_no_tracking
+ subject
+ end
+ end
end
- it 'does not track the event for untracked actions' do
- get :new
+ context 'when there is no custom_id set' do
+ before do
+ allow(controller).to receive(:get_custom_id).and_return(nil)
+ end
+
+ it 'does not track' do
+ expect_no_internal_tracking
- expect_no_tracking
+ subject
+ end
end
end
- context 'when user is not logged in' do
- let(:visitor_id) { SecureRandom.uuid }
+ describe '.track_event' do
+ before do
+ allow(Gitlab::UsageDataCounters::HLLRedisCounter).to receive(:track_event)
+ stub_const("#{described_class}::MIGRATED_EVENTS", ['an_event'])
+ end
+
+ controller(ApplicationController) do
+ include ProductAnalyticsTracking
+
+ skip_before_action :authenticate_user!, only: :show
+ track_event(
+ :index,
+ :show,
+ name: 'an_event',
+ action: 'an_action',
+ label: 'a_label',
+ destinations: [:redis_hll, :snowplow],
+ conditions: [:custom_condition_one?, :custom_condition_two?]
+ ) { |controller| controller.get_custom_id }
+
+ track_internal_event :internal_event, name: 'an_internal_event'
+
+ def index
+ render html: 'index'
+ end
+
+ def new
+ render html: 'new'
+ end
+
+ def show
+ render html: 'show'
+ end
+
+ def get_custom_id
+ 'some_custom_id'
+ end
+
+ private
- it 'tracks the event when there is a visitor id' do
- cookies[:visitor_id] = { value: visitor_id, expires: 24.months }
+ def tracking_namespace_source
+ Group.first
+ end
+
+ def tracking_project_source
+ Project.first
+ end
- get :show, params: { id: 1 }
+ def custom_condition_one?
+ true
+ end
- expect_redis_hll_tracking
- expect_snowplow_tracking(nil)
+ def custom_condition_two?
+ true
+ end
end
- end
- context 'when user is not logged in and there is no visitor_id' do
- it 'does not track the event' do
- get :index
+ def expect_redis_hll_tracking
+ expect(Gitlab::UsageDataCounters::HLLRedisCounter).to have_received(:track_event)
+ .with(event_name, values: instance_of(String))
+ end
- expect_no_tracking
+ def expect_snowplow_tracking(user)
+ context = Gitlab::Tracking::ServicePingContext.new(data_source: :redis_hll, event: event_name)
+ .to_context.to_json
+
+ expect_snowplow_event(
+ category: anything,
+ action: event_action,
+ property: event_name,
+ label: event_label,
+ project: project,
+ namespace: group,
+ user: user,
+ context: [context]
+ )
end
- it 'tracks the event when there is custom id' do
- get :show, params: { id: 1 }
+ def expect_no_tracking
+ expect(Gitlab::UsageDataCounters::HLLRedisCounter).not_to receive(:track_event)
- expect_redis_hll_tracking
- expect_snowplow_tracking(nil)
+ expect_no_snowplow_event
end
- context 'when there is no custom_id set' do
+ context 'when user is logged in' do
before do
- allow(controller).to receive(:get_custom_id).and_return(nil)
+ sign_in(user)
+ end
+
+ it 'tracks the event' do
+ get :index
+
+ expect_redis_hll_tracking
+ expect_snowplow_tracking(user)
+ end
+
+ it 'tracks the event if DNT is not enabled' do
+ stub_do_not_track('0')
+
+ get :index
- get :show, params: { id: 2 }
+ expect_redis_hll_tracking
+ expect_snowplow_tracking(user)
end
- it 'does not track the HLL event' do
- expect(Gitlab::UsageDataCounters::HLLRedisCounter).not_to receive(:track_event)
+ it 'does not track the event if DNT is enabled' do
+ stub_do_not_track('1')
+
+ get :index
+
+ expect_no_tracking
+ end
+
+ it 'does not track the event if the format is not HTML' do
+ get :index, format: :json
+
+ expect_no_tracking
end
- it 'tracks Snowplow event' do
+ it 'does not track the event if a custom condition returns false' do
+ allow(controller).to receive(:custom_condition_two?).and_return(false)
+
+ get :index
+
+ expect_no_tracking
+ end
+
+ it 'does not track the event for untracked actions' do
+ get :new
+
+ expect_no_tracking
+ end
+ end
+
+ context 'when user is not logged in' do
+ let(:visitor_id) { SecureRandom.uuid }
+
+ it 'tracks the event when there is a visitor id' do
+ cookies[:visitor_id] = { value: visitor_id, expires: 24.months }
+
+ get :show, params: { id: 1 }
+
+ expect_redis_hll_tracking
expect_snowplow_tracking(nil)
end
end
+
+ context 'when user is not logged in and there is no visitor_id' do
+ it 'does not track the event' do
+ get :index
+
+ expect_no_tracking
+ end
+
+ it 'tracks the event when there is custom id' do
+ get :show, params: { id: 1 }
+
+ expect_redis_hll_tracking
+ expect_snowplow_tracking(nil)
+ end
+
+ context 'when there is no custom_id set' do
+ before do
+ allow(controller).to receive(:get_custom_id).and_return(nil)
+
+ get :show, params: { id: 2 }
+ end
+
+ it 'does not track the HLL event' do
+ expect(Gitlab::UsageDataCounters::HLLRedisCounter).not_to receive(:track_event)
+ end
+
+ it 'tracks Snowplow event' do
+ expect_snowplow_tracking(nil)
+ end
+ end
+ end
end
end
diff --git a/spec/controllers/graphql_controller_spec.rb b/spec/controllers/graphql_controller_spec.rb
index be47b32ec4f..8fcbf4049a5 100644
--- a/spec/controllers/graphql_controller_spec.rb
+++ b/spec/controllers/graphql_controller_spec.rb
@@ -234,6 +234,26 @@ RSpec.describe GraphqlController, feature_category: :integrations do
post :execute
end
+ it 'calls the track visual studio extension api when trackable method' do
+ agent = 'code-completions-language-server-experiment (gl-visual-studio-extension:1.0.0.0; arch:X64;)'
+ request.env['HTTP_USER_AGENT'] = agent
+
+ expect(Gitlab::UsageDataCounters::VisualStudioExtensionActivityUniqueCounter)
+ .to receive(:track_api_request_when_trackable).with(user_agent: agent, user: user)
+
+ post :execute
+ end
+
+ it 'calls the track neovim plugin api when trackable method' do
+ agent = 'code-completions-language-server-experiment (Neovim:0.9.0; gitlab.vim (v0.1.0); arch:amd64; os:darwin)'
+ request.env['HTTP_USER_AGENT'] = agent
+
+ expect(Gitlab::UsageDataCounters::NeovimPluginActivityUniqueCounter)
+ .to receive(:track_api_request_when_trackable).with(user_agent: agent, user: user)
+
+ post :execute
+ end
+
context 'if using the GitLab CLI' do
it 'call trackable for the old UserAgent' do
agent = 'GLab - GitLab CLI'
@@ -379,6 +399,26 @@ RSpec.describe GraphqlController, feature_category: :integrations do
subject
end
+ it 'calls the track visual studio extension api when trackable method' do
+ agent = 'code-completions-language-server-experiment (gl-visual-studio-extension:1.0.0.0; arch:X64;)'
+ request.env['HTTP_USER_AGENT'] = agent
+
+ expect(Gitlab::UsageDataCounters::VisualStudioExtensionActivityUniqueCounter)
+ .to receive(:track_api_request_when_trackable).with(user_agent: agent, user: user)
+
+ subject
+ end
+
+ it 'calls the track neovim plugin api when trackable method' do
+ agent = 'code-completions-language-server-experiment (Neovim:0.9.0; gitlab.vim (v0.1.0); arch:amd64; os:darwin)'
+ request.env['HTTP_USER_AGENT'] = agent
+
+ expect(Gitlab::UsageDataCounters::NeovimPluginActivityUniqueCounter)
+ .to receive(:track_api_request_when_trackable).with(user_agent: agent, user: user)
+
+ subject
+ end
+
it 'calls the track gitlab cli when trackable method' do
agent = 'GLab - GitLab CLI'
request.env['HTTP_USER_AGENT'] = agent
diff --git a/spec/controllers/groups/clusters_controller_spec.rb b/spec/controllers/groups/clusters_controller_spec.rb
index 6c747d4f00f..16510c618ef 100644
--- a/spec/controllers/groups/clusters_controller_spec.rb
+++ b/spec/controllers/groups/clusters_controller_spec.rb
@@ -63,7 +63,8 @@ RSpec.describe Groups::ClustersController, feature_category: :deployment_managem
let(:total_count) { group.clusters.page.total_count }
before do
- create_list(:cluster, 30, :provided_by_gcp, :production_environment, cluster_type: :group_type, groups: [group])
+ allow(Clusters::Cluster).to receive(:default_per_page).and_return(1)
+ create_list(:cluster, 2, :provided_by_gcp, :production_environment, cluster_type: :group_type, groups: [group])
end
it 'redirects to the page' do
diff --git a/spec/controllers/groups/dependency_proxy_auth_controller_spec.rb b/spec/controllers/groups/dependency_proxy_auth_controller_spec.rb
index ed79712f828..344e15557ce 100644
--- a/spec/controllers/groups/dependency_proxy_auth_controller_spec.rb
+++ b/spec/controllers/groups/dependency_proxy_auth_controller_spec.rb
@@ -17,93 +17,78 @@ RSpec.describe Groups::DependencyProxyAuthController do
end
end
- context 'with valid JWT' do
- context 'user' do
- let_it_be(:user) { create(:user) }
+ context 'with JWT' do
+ let(:jwt) { build_jwt(user) }
+ let(:token_header) { "Bearer #{jwt.encoded}" }
- let(:jwt) { build_jwt(user) }
- let(:token_header) { "Bearer #{jwt.encoded}" }
+ before do
+ request.headers['HTTP_AUTHORIZATION'] = token_header
+ end
+
+ context 'with valid JWT' do
+ context 'user' do
+ let_it_be(:user) { create(:user) }
- before do
- request.headers['HTTP_AUTHORIZATION'] = token_header
+ it { is_expected.to have_gitlab_http_status(:success) }
end
- it { is_expected.to have_gitlab_http_status(:success) }
- end
+ context 'group bot user' do
+ let_it_be(:user) { create(:user, :project_bot) }
- context 'deploy token' do
- let_it_be(:user) { create(:deploy_token) }
+ it { is_expected.to have_gitlab_http_status(:success) }
+ end
- let(:jwt) { build_jwt(user) }
- let(:token_header) { "Bearer #{jwt.encoded}" }
+ context 'deploy token' do
+ let_it_be(:user) { create(:deploy_token) }
- before do
- request.headers['HTTP_AUTHORIZATION'] = token_header
+ it { is_expected.to have_gitlab_http_status(:success) }
end
-
- it { is_expected.to have_gitlab_http_status(:success) }
end
- end
- context 'with invalid JWT' do
- context 'bad user' do
- let(:jwt) { build_jwt(double('bad_user', id: 999)) }
- let(:token_header) { "Bearer #{jwt.encoded}" }
+ context 'with invalid JWT' do
+ context 'bad user' do
+ let(:jwt) { build_jwt(double('bad_user', id: 999)) }
- before do
- request.headers['HTTP_AUTHORIZATION'] = token_header
+ it { is_expected.to have_gitlab_http_status(:unauthorized) }
end
- it { is_expected.to have_gitlab_http_status(:unauthorized) }
- end
+ context 'token with no user id' do
+ let(:token_header) { "Bearer #{build_jwt.encoded}" }
- context 'token with no user id' do
- let(:token_header) { "Bearer #{build_jwt.encoded}" }
+ before do
+ request.headers['HTTP_AUTHORIZATION'] = token_header
+ end
- before do
- request.headers['HTTP_AUTHORIZATION'] = token_header
+ it { is_expected.to have_gitlab_http_status(:unauthorized) }
end
- it { is_expected.to have_gitlab_http_status(:unauthorized) }
- end
-
- context 'expired token' do
- let_it_be(:user) { create(:user) }
+ context 'expired token' do
+ let_it_be(:user) { create(:user) }
- let(:jwt) { build_jwt(user, expire_time: Time.zone.now - 1.hour) }
- let(:token_header) { "Bearer #{jwt.encoded}" }
+ let(:jwt) { build_jwt(user, expire_time: Time.zone.now - 1.hour) }
- before do
- request.headers['HTTP_AUTHORIZATION'] = token_header
+ it { is_expected.to have_gitlab_http_status(:unauthorized) }
end
- it { is_expected.to have_gitlab_http_status(:unauthorized) }
- end
-
- context 'expired deploy token' do
- let_it_be(:user) { create(:deploy_token, :expired) }
+ context 'group bot user from an expired token' do
+ let_it_be(:user) { create(:user, :project_bot) }
- let(:jwt) { build_jwt(user) }
- let(:token_header) { "Bearer #{jwt.encoded}" }
+ let(:jwt) { build_jwt(user, expire_time: Time.zone.now - 1.hour) }
- before do
- request.headers['HTTP_AUTHORIZATION'] = token_header
+ it { is_expected.to have_gitlab_http_status(:unauthorized) }
end
- it { is_expected.to have_gitlab_http_status(:unauthorized) }
- end
+ context 'expired deploy token' do
+ let_it_be(:user) { create(:deploy_token, :expired) }
- context 'revoked deploy token' do
- let_it_be(:user) { create(:deploy_token, :revoked) }
+ it { is_expected.to have_gitlab_http_status(:unauthorized) }
+ end
- let(:jwt) { build_jwt(user) }
- let(:token_header) { "Bearer #{jwt.encoded}" }
+ context 'revoked deploy token' do
+ let_it_be(:user) { create(:deploy_token, :revoked) }
- before do
- request.headers['HTTP_AUTHORIZATION'] = token_header
+ it { is_expected.to have_gitlab_http_status(:unauthorized) }
end
-
- it { is_expected.to have_gitlab_http_status(:unauthorized) }
end
end
end
diff --git a/spec/controllers/groups/dependency_proxy_for_containers_controller_spec.rb b/spec/controllers/groups/dependency_proxy_for_containers_controller_spec.rb
index 89a75fb53f2..3fb5e08f065 100644
--- a/spec/controllers/groups/dependency_proxy_for_containers_controller_spec.rb
+++ b/spec/controllers/groups/dependency_proxy_for_containers_controller_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Groups::DependencyProxyForContainersController do
+RSpec.describe Groups::DependencyProxyForContainersController, feature_category: :dependency_proxy do
include HttpBasicAuthHelpers
include DependencyProxyHelpers
include WorkhorseHelpers
diff --git a/spec/controllers/groups/labels_controller_spec.rb b/spec/controllers/groups/labels_controller_spec.rb
index 916b2cf10dd..ebe379d948c 100644
--- a/spec/controllers/groups/labels_controller_spec.rb
+++ b/spec/controllers/groups/labels_controller_spec.rb
@@ -89,6 +89,13 @@ RSpec.describe Groups::LabelsController, feature_category: :team_planning do
expect { label.reload }.to raise_error(ActiveRecord::RecordNotFound)
end
+ it 'does not remove the label if it is locked' do
+ label = create(:group_label, group: group, lock_on_merge: true)
+ delete :destroy, params: { group_id: group.to_param, id: label.to_param }
+
+ expect(label.reload).to eq label
+ end
+
context 'when label is succesfuly destroyed' do
it 'redirects to the group labels page' do
label = create(:group_label, group: group)
diff --git a/spec/controllers/groups_controller_spec.rb b/spec/controllers/groups_controller_spec.rb
index 8617cc8af8f..87a30ed1234 100644
--- a/spec/controllers/groups_controller_spec.rb
+++ b/spec/controllers/groups_controller_spec.rb
@@ -351,6 +351,44 @@ RSpec.describe GroupsController, factory_default: :keep, feature_category: :code
end
end
+ context 'when creating a group with `default_branch_protection_defaults` attribute' do
+ before do
+ sign_in(user)
+ end
+
+ context 'when user has ability to write update_default_branch_protection' do
+ before do
+ allow(Ability).to receive(:allowed?).and_call_original
+ allow(Ability).to receive(:allowed?).with(user, :update_default_branch_protection, an_instance_of(Group)).and_return(true)
+ end
+
+ subject do
+ post :create, params: { group: { name: 'new_group', path: 'new_group', default_branch_protection_defaults: ::Gitlab::Access::BranchProtection.protected_against_developer_pushes.stringify_keys } }, as: :json
+ end
+
+ context 'for users who have the ability to create a group with `default_branch_protection_defaults`' do
+ it 'creates group with the specified default branch protection level' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:found)
+ expect(Group.last.default_branch_protection_defaults).to eq(::Gitlab::Access::BranchProtection.protected_against_developer_pushes.stringify_keys)
+ end
+ end
+ end
+
+ context 'for users who do not have the ability to create a group with `default_branch_protection`' do
+ it 'does not create the group with the specified branch protection level' do
+ allow(Ability).to receive(:allowed?).and_call_original
+ allow(Ability).to receive(:allowed?).with(user, :create_group_with_default_branch_protection) { false }
+
+ subject
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(Group.last.default_branch_protection_defaults).not_to eq(::Gitlab::Access::BranchProtection.protected_against_developer_pushes.stringify_keys)
+ end
+ end
+ end
+
context 'when creating a group with captcha protection' do
before do
sign_in(user)
diff --git a/spec/controllers/import/manifest_controller_spec.rb b/spec/controllers/import/manifest_controller_spec.rb
index 69eb736375c..c06bd660cd2 100644
--- a/spec/controllers/import/manifest_controller_spec.rb
+++ b/spec/controllers/import/manifest_controller_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe Import::ManifestController, :clean_gitlab_redis_shared_state, fea
let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group) }
- before(:all) do
+ before_all do
group.add_maintainer(user)
end
diff --git a/spec/controllers/jira_connect/app_descriptor_controller_spec.rb b/spec/controllers/jira_connect/app_descriptor_controller_spec.rb
index 3c9d495c33c..48b315646de 100644
--- a/spec/controllers/jira_connect/app_descriptor_controller_spec.rb
+++ b/spec/controllers/jira_connect/app_descriptor_controller_spec.rb
@@ -37,8 +37,7 @@ RSpec.describe JiraConnect::AppDescriptorController, feature_category: :integrat
url: 'https://gitlab.com'
},
links: {
- documentation: 'http://test.host/help/integration/jira_development_panel#gitlabcom-1',
- feedback: 'https://gitlab.com/gitlab-org/gitlab/-/issues/413652'
+ documentation: 'http://test.host/help/integration/jira_development_panel#gitlabcom-1'
},
authentication: {
type: 'jwt'
@@ -91,19 +90,5 @@ RSpec.describe JiraConnect::AppDescriptorController, feature_category: :integrat
)
)
end
-
- context 'when feature flag jira_for_cloud_app_feedback_link is disabled' do
- before do
- stub_feature_flags(jira_for_cloud_app_feedback_link: false)
- end
-
- it 'does not include the feedback link' do
- get :show
-
- expect(descriptor[:links]).not_to include(
- feedback: 'https://gitlab.com/gitlab-org/gitlab/-/issues/413652'
- )
- end
- end
end
end
diff --git a/spec/controllers/omniauth_callbacks_controller_spec.rb b/spec/controllers/omniauth_callbacks_controller_spec.rb
index 75bdad8b19e..8d2face0233 100644
--- a/spec/controllers/omniauth_callbacks_controller_spec.rb
+++ b/spec/controllers/omniauth_callbacks_controller_spec.rb
@@ -5,6 +5,32 @@ require 'spec_helper'
RSpec.describe OmniauthCallbacksController, type: :controller, feature_category: :system_access do
include LoginHelpers
+ shared_examples 'store provider2FA value in session' do
+ before do
+ stub_omniauth_setting(allow_bypass_two_factor: true)
+ saml_config.args[:upstream_two_factor_authn_contexts] << "urn:oasis:names:tc:SAML:2.0:ac:classes:Password"
+ sign_in user
+ end
+
+ it "sets the session varible for provider 2FA" do
+ post :saml, params: { SAMLResponse: mock_saml_response }
+
+ expect(session[:provider_2FA]).to eq(true)
+ end
+
+ context 'when by_pass_two_factor_for_current_session feature flag is false' do
+ before do
+ stub_feature_flags(by_pass_two_factor_for_current_session: false)
+ end
+
+ it "does not set the session variable for provider 2FA" do
+ post :saml, params: { SAMLResponse: mock_saml_response }
+
+ expect(session[:provider_2FA]).to be_nil
+ end
+ end
+ end
+
describe 'omniauth' do
let(:user) { create(:omniauth_user, extern_uid: extern_uid, provider: provider) }
let(:additional_info) { {} }
@@ -593,6 +619,12 @@ RSpec.describe OmniauthCallbacksController, type: :controller, feature_category:
post :saml, params: { SAMLResponse: mock_saml_response }
end
+
+ context 'with IDP bypass two factor request' do
+ let(:user) { create(:omniauth_user, extern_uid: 'my-uid', provider: 'saml') }
+
+ it_behaves_like 'store provider2FA value in session'
+ end
end
context 'with a blocked user trying to log in when there are hooks set up' do
@@ -623,9 +655,14 @@ RSpec.describe OmniauthCallbacksController, type: :controller, feature_category:
it 'authenticate with SAML module' do
expect(@controller).to receive(:omniauth_flow).with(Gitlab::Auth::Saml).and_call_original
post :saml_okta, params: { SAMLResponse: mock_saml_response }
+
expect(request.env['warden']).to be_authenticated
end
end
+
+ context 'with IDP bypass two factor request' do
+ it_behaves_like 'store provider2FA value in session'
+ end
end
describe 'enable admin mode' do
diff --git a/spec/controllers/profiles/preferences_controller_spec.rb b/spec/controllers/profiles/preferences_controller_spec.rb
index f5c97f63293..b4ffe0bc844 100644
--- a/spec/controllers/profiles/preferences_controller_spec.rb
+++ b/spec/controllers/profiles/preferences_controller_spec.rb
@@ -110,31 +110,14 @@ RSpec.describe Profiles::PreferencesController do
end
end
- context 'on disable_follow_users feature flag' do
- context 'with feature flag disabled' do
- before do
- stub_feature_flags(disable_follow_users: false)
- end
+ context 'on enabled_following setting' do
+ it 'does not update enabled_following preference of user' do
+ prefs = { enabled_following: false }
- it 'does not update enabled_following preference of user' do
- prefs = { enabled_following: false }
-
- go params: prefs
- user.reload
-
- expect(user.enabled_following).to eq(true)
- end
- end
-
- context 'with feature flag enabled' do
- it 'does not update enabled_following preference of user' do
- prefs = { enabled_following: false }
-
- go params: prefs
- user.reload
+ go params: prefs
+ user.reload
- expect(user.enabled_following).to eq(false)
- end
+ expect(user.enabled_following).to eq(false)
end
end
end
diff --git a/spec/controllers/projects/autocomplete_sources_controller_spec.rb b/spec/controllers/projects/autocomplete_sources_controller_spec.rb
index d0bfbeae78f..1745dfe3af0 100644
--- a/spec/controllers/projects/autocomplete_sources_controller_spec.rb
+++ b/spec/controllers/projects/autocomplete_sources_controller_spec.rb
@@ -196,23 +196,28 @@ RSpec.describe Projects::AutocompleteSourcesController do
end
end
- shared_examples 'only public members are returned for public project' do
+ shared_examples 'returns all members of public project' do
before do
stub_feature_flags(disable_all_mention: false)
end
- it 'only returns public members' do
+ it 'returns members including those from invited private groups' do
get :members, format: :json, params: { namespace_id: group.path, project_id: public_project.path, type: issuable_type }
expect(members_by_username('all').symbolize_keys).to include(
username: 'all',
name: 'All Project and Group Members',
- count: 1)
+ count: 2)
expect(members_by_username(user.username).symbolize_keys).to include(
type: user.class.name,
name: user.name,
avatar_url: user.avatar_url)
+
+ expect(members_by_username(invited_private_member.username).symbolize_keys).to include(
+ type: invited_private_member.class.name,
+ name: invited_private_member.name,
+ avatar_url: invited_private_member.avatar_url)
end
context 'when `disable_all_mention` FF is enabled' do
@@ -234,7 +239,7 @@ RSpec.describe Projects::AutocompleteSourcesController do
let(:issuable_type) { private_issue.class.name }
end
- it_behaves_like 'only public members are returned for public project' do
+ it_behaves_like 'returns all members of public project' do
let(:issuable_type) { issue.class.name }
end
end
@@ -244,7 +249,7 @@ RSpec.describe Projects::AutocompleteSourcesController do
let(:issuable_type) { private_work_item.class.name }
end
- it_behaves_like 'only public members are returned for public project' do
+ it_behaves_like 'returns all members of public project' do
let(:issuable_type) { work_item.class.name }
end
end
diff --git a/spec/controllers/projects/clusters_controller_spec.rb b/spec/controllers/projects/clusters_controller_spec.rb
index 15b7ddd85ea..55296ed3d5c 100644
--- a/spec/controllers/projects/clusters_controller_spec.rb
+++ b/spec/controllers/projects/clusters_controller_spec.rb
@@ -57,7 +57,8 @@ RSpec.describe Projects::ClustersController, feature_category: :deployment_manag
let(:total_count) { project.clusters.page.total_count }
before do
- create_list(:cluster, 30, :provided_by_gcp, :production_environment, projects: [project])
+ allow(Clusters::Cluster).to receive(:default_per_page).and_return(1)
+ create_list(:cluster, 2, :provided_by_gcp, :production_environment, projects: [project])
end
it 'redirects to the page' do
diff --git a/spec/controllers/projects/compare_controller_spec.rb b/spec/controllers/projects/compare_controller_spec.rb
index 7dc9bcd9677..662d2920e12 100644
--- a/spec/controllers/projects/compare_controller_spec.rb
+++ b/spec/controllers/projects/compare_controller_spec.rb
@@ -282,6 +282,19 @@ RSpec.describe Projects::CompareController, feature_category: :source_code_manag
end
end
+ context 'when the from_ref and to_ref are the same' do
+ let(:from_project_id) { nil }
+ let(:from_ref) { 'master' }
+ let(:to_ref) { "master" }
+
+ it 'shows a message that refs are identical' do
+ show_request
+
+ expect(response).to be_successful
+ expect(response.body).to include('are the same')
+ end
+ end
+
context 'when the source ref is invalid' do
let(:from_project_id) { nil }
let(:from_ref) { "master%' AND 2554=4423 AND '%'='" }
diff --git a/spec/controllers/projects/discussions_controller_spec.rb b/spec/controllers/projects/discussions_controller_spec.rb
index 0c8677ea4b9..8251b963563 100644
--- a/spec/controllers/projects/discussions_controller_spec.rb
+++ b/spec/controllers/projects/discussions_controller_spec.rb
@@ -2,18 +2,19 @@
require 'spec_helper'
-RSpec.describe Projects::DiscussionsController do
- let(:user) { create(:user) }
- let(:merge_request) { create(:merge_request) }
- let(:project) { merge_request.source_project }
- let(:note) { create(:discussion_note_on_merge_request, noteable: merge_request, project: project) }
- let(:discussion) { note.discussion }
+RSpec.describe Projects::DiscussionsController, feature_category: :team_planning do
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:merge_request) { create(:merge_request, source_project: project) }
+ let_it_be(:issue) { create(:issue, project: project) }
+ let_it_be(:note, reload: true) { create(:discussion_note_on_merge_request, noteable: merge_request, project: project) }
+ let_it_be(:user) { create(:user) }
let(:request_params) do
{
namespace_id: project.namespace,
project_id: project,
- merge_request_id: merge_request,
+ noteable_type: 'merge_requests',
+ noteable_id: merge_request,
id: note.discussion_id
}
end
@@ -32,7 +33,7 @@ RSpec.describe Projects::DiscussionsController do
end
context 'when user is authorized to read the MR' do
- before do
+ before_all do
project.add_reporter(user)
end
@@ -43,21 +44,20 @@ RSpec.describe Projects::DiscussionsController do
end
it 'returns status 404 if MR does not exists' do
- merge_request.destroy!
-
- get :show, params: request_params, session: { format: :json }
+ get :show, params: request_params.merge(noteable_id: non_existing_record_id), session: { format: :json }
expect(response).to have_gitlab_http_status(:not_found)
end
end
context 'when user is authorized but note is LegacyDiffNote' do
- before do
+ before_all do
project.add_developer(user)
- note.update!(type: 'LegacyDiffNote')
end
it 'returns status 200' do
+ note.update!(type: 'LegacyDiffNote')
+
get :show, params: request_params, session: { format: :json }
expect(response).to have_gitlab_http_status(:ok)
@@ -79,7 +79,7 @@ RSpec.describe Projects::DiscussionsController do
end
context "when the user is authorized to resolve the discussion" do
- before do
+ before_all do
project.add_developer(user)
end
@@ -134,7 +134,6 @@ RSpec.describe Projects::DiscussionsController do
context 'diff discussion' do
let(:note) { create(:diff_note_on_merge_request, noteable: merge_request, project: project) }
- let(:discussion) { note.discussion }
it "returns truncated diff lines" do
post :resolve, params: request_params
@@ -143,6 +142,40 @@ RSpec.describe Projects::DiscussionsController do
end
end
end
+
+ context 'on an Issue' do
+ let_it_be(:note, reload: true) { create(:discussion_note_on_issue, noteable: issue, project: project) }
+
+ let(:request_params) do
+ {
+ namespace_id: project.namespace,
+ project_id: project,
+ noteable_type: 'issues',
+ noteable_id: issue,
+ id: note.discussion_id
+ }
+ end
+
+ it 'resolves the discussion and returns status 200' do
+ post :resolve, params: request_params
+
+ expect(note.reload.resolved_at).not_to be_nil
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+
+ context 'when resolvable_issue_threads is disabled' do
+ before do
+ stub_feature_flags(resolvable_issue_threads: false)
+ end
+
+ it 'does not resolve the discussion and returns status 404' do
+ post :resolve, params: request_params
+
+ expect(note.reload.resolved_at).to be_nil
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
end
end
@@ -162,7 +195,7 @@ RSpec.describe Projects::DiscussionsController do
end
context "when the user is authorized to resolve the discussion" do
- before do
+ before_all do
project.add_developer(user)
end
@@ -199,18 +232,46 @@ RSpec.describe Projects::DiscussionsController do
expect(response).to have_gitlab_http_status(:ok)
end
- context "when vue_mr_discussions cookie is present" do
- before do
- cookies[:vue_mr_discussions] = 'true'
+ it "renders discussion with serializer" do
+ expect_next_instance_of(DiscussionSerializer) do |instance|
+ expect(instance).to receive(:represent)
+ .with(instance_of(Discussion), { context: instance_of(described_class), render_truncated_diff_lines: true })
end
- it "renders discussion with serializer" do
- expect_next_instance_of(DiscussionSerializer) do |instance|
- expect(instance).to receive(:represent)
- .with(instance_of(Discussion), { context: instance_of(described_class), render_truncated_diff_lines: true })
- end
+ delete :unresolve, params: request_params
+ end
+ end
+
+ context 'on an Issue' do
+ let_it_be(:note, reload: true) { create(:discussion_note_on_issue, noteable: issue, project: project) }
+
+ let(:request_params) do
+ {
+ namespace_id: project.namespace,
+ project_id: project,
+ noteable_type: 'issues',
+ noteable_id: issue,
+ id: note.discussion_id
+ }
+ end
+
+ it 'unresolves the discussion and returns status 200' do
+ delete :unresolve, params: request_params
+
+ expect(note.reload.resolved_at).to be_nil
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ context 'when resolvable_issue_threads is disabled' do
+ before do
+ stub_feature_flags(resolvable_issue_threads: false)
+ end
+
+ it 'does not unresolve the discussion and returns status 404' do
delete :unresolve, params: request_params
+
+ expect(note.reload.resolved_at).not_to be_nil
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
diff --git a/spec/controllers/projects/environments_controller_spec.rb b/spec/controllers/projects/environments_controller_spec.rb
index 5f03d721fe7..4b091e9221e 100644
--- a/spec/controllers/projects/environments_controller_spec.rb
+++ b/spec/controllers/projects/environments_controller_spec.rb
@@ -373,7 +373,7 @@ RSpec.describe Projects::EnvironmentsController, feature_category: :continuous_d
end
context 'when stop action' do
- it 'returns action url for single stop action' do
+ it 'returns job url for a stop action when job is build' do
action = create(:ci_build, :manual)
allow_any_instance_of(Environment)
@@ -387,6 +387,20 @@ RSpec.describe Projects::EnvironmentsController, feature_category: :continuous_d
project_job_url(project, action) })
end
+ it 'returns pipeline url for a stop action when job is bridge' do
+ action = create(:ci_bridge, :manual)
+
+ allow_any_instance_of(Environment)
+ .to receive_messages(available?: true, stop_with_actions!: [action])
+
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to eq(
+ { 'redirect_url' =>
+ project_pipeline_url(project, action.pipeline_id) })
+ end
+
it 'returns environment url for multiple stop actions' do
actions = create_list(:ci_build, 2, :manual)
diff --git a/spec/controllers/projects/issues_controller_spec.rb b/spec/controllers/projects/issues_controller_spec.rb
index f9ce77a44ba..7b576533ae5 100644
--- a/spec/controllers/projects/issues_controller_spec.rb
+++ b/spec/controllers/projects/issues_controller_spec.rb
@@ -1102,6 +1102,14 @@ RSpec.describe Projects::IssuesController, :request_store, feature_category: :te
end
end
+ context 'when trying to create an epic' do
+ it 'defaults to issue type' do
+ issue = post_new_issue(issue_type: 'epic')
+
+ expect(issue.work_item_type.base_type).to eq('issue')
+ end
+ end
+
context 'when create service return an unrecoverable error with http_status' do
let(:http_status) { 403 }
@@ -1786,7 +1794,19 @@ RSpec.describe Projects::IssuesController, :request_store, feature_category: :te
it 'returns discussion json' do
get :discussions, params: { namespace_id: project.namespace, project_id: project, id: issue.iid }
- expect(json_response.first.keys).to match_array(%w[id reply_id expanded notes diff_discussion discussion_path individual_note resolvable resolved resolved_at resolved_by resolved_by_push commit_id for_commit project_id confidential])
+ expect(json_response.first.keys).to match_array(%w[id reply_id expanded notes diff_discussion discussion_path individual_note resolvable commit_id for_commit project_id confidential resolve_path resolved resolved_at resolved_by resolved_by_push])
+ end
+
+ context 'when resolvable_issue_threads is disabled' do
+ before do
+ stub_feature_flags(resolvable_issue_threads: false)
+ end
+
+ it 'returns discussion json without resolved fields' do
+ get :discussions, params: { namespace_id: project.namespace, project_id: project, id: issue.iid }
+
+ expect(json_response.first.keys).to match_array(%w[id reply_id expanded notes diff_discussion discussion_path individual_note resolvable commit_id for_commit project_id confidential])
+ end
end
it 'renders the author status html if there is a status' do
diff --git a/spec/controllers/projects/labels_controller_spec.rb b/spec/controllers/projects/labels_controller_spec.rb
index 98982856d6c..74c16621fc5 100644
--- a/spec/controllers/projects/labels_controller_spec.rb
+++ b/spec/controllers/projects/labels_controller_spec.rb
@@ -297,6 +297,52 @@ RSpec.describe Projects::LabelsController, feature_category: :team_planning do
end
end
+ describe 'DELETE #destroy' do
+ context 'when current user has ability to destroy the label' do
+ before do
+ sign_in(user)
+ end
+
+ it 'removes the label' do
+ label = create(:label, project: project)
+ delete :destroy, params: { namespace_id: group.to_param, project_id: project.to_param, id: label.to_param }
+
+ expect { label.reload }.to raise_error(ActiveRecord::RecordNotFound)
+ end
+
+ it 'does not remove the label if it is locked' do
+ label = create(:label, project: project, lock_on_merge: true)
+ delete :destroy, params: { namespace_id: group.to_param, project_id: project.to_param, id: label.to_param }
+
+ expect(label.reload).to eq label
+ end
+
+ context 'when label is succesfuly destroyed' do
+ it 'redirects to the project labels page' do
+ label = create(:label, project: project)
+ delete :destroy, params: { namespace_id: group.to_param, project_id: project.to_param, id: label.to_param }
+
+ expect(response).to redirect_to(project_labels_path(project))
+ end
+ end
+ end
+
+ context 'when current_user does not have ability to destroy the label' do
+ let(:another_user) { create(:user) }
+
+ before do
+ sign_in(another_user)
+ end
+
+ it 'responds with status 404' do
+ label = create(:label, project: project)
+ delete :destroy, params: { namespace_id: group.to_param, project_id: project.to_param, id: label.to_param }
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+
def project_moved_message(redirect_route, project)
"Project '#{redirect_route.path}' was moved to '#{project.full_path}'. Please update any links and bookmarks that may still have the old path."
end
diff --git a/spec/controllers/projects/merge_requests_controller_spec.rb b/spec/controllers/projects/merge_requests_controller_spec.rb
index f78d50bba24..0e3e3f31783 100644
--- a/spec/controllers/projects/merge_requests_controller_spec.rb
+++ b/spec/controllers/projects/merge_requests_controller_spec.rb
@@ -658,21 +658,17 @@ RSpec.describe Projects::MergeRequestsController, feature_category: :code_review
let(:message) { 'My custom squash commit message' }
it 'passes the same message to SquashService', :sidekiq_inline do
- params = { squash: '1',
- squash_commit_message: message,
- sha: merge_request.diff_head_sha }
- expected_squash_params = { squash_commit_message: message,
- sha: merge_request.diff_head_sha,
- merge_request: merge_request }
-
- expect_next_instance_of(MergeRequests::SquashService, project: project, current_user: user, params: expected_squash_params) do |squash_service|
+ expect_next_instance_of(MergeRequests::SquashService,
+ merge_request: merge_request,
+ current_user: user,
+ commit_message: message) do |squash_service|
expect(squash_service).to receive(:execute).and_return({
status: :success,
squash_sha: SecureRandom.hex(20)
})
end
- merge_with_sha(params)
+ merge_with_sha(squash: '1', squash_commit_message: message, sha: merge_request.diff_head_sha)
end
end
diff --git a/spec/controllers/projects/notes_controller_spec.rb b/spec/controllers/projects/notes_controller_spec.rb
index 940f6fed906..500fab471ef 100644
--- a/spec/controllers/projects/notes_controller_spec.rb
+++ b/spec/controllers/projects/notes_controller_spec.rb
@@ -18,7 +18,7 @@ RSpec.describe Projects::NotesController, type: :controller, feature_category: :
}
end
- describe 'GET index' do
+ describe 'GET index', :freeze_time do
let(:request_params) do
{
namespace_id: project.namespace,
@@ -31,10 +31,13 @@ RSpec.describe Projects::NotesController, type: :controller, feature_category: :
let(:parsed_response) { json_response.with_indifferent_access }
let(:note_json) { parsed_response[:notes].first }
+ let(:last_fetched_at) { Time.zone.at(3.hours.ago.to_i) }
before do
sign_in(user)
project.add_developer(user)
+
+ request.headers['X-Last-Fetched-At'] = microseconds(last_fetched_at)
end
specify { expect(get(:index, params: request_params)).to have_request_urgency(:medium) }
@@ -46,10 +49,6 @@ RSpec.describe Projects::NotesController, type: :controller, feature_category: :
end
it 'passes last_fetched_at from headers to NotesFinder and MergeIntoNotesService' do
- last_fetched_at = Time.zone.at(3.hours.ago.to_i) # remove nanoseconds
-
- request.headers['X-Last-Fetched-At'] = microseconds(last_fetched_at)
-
expect(NotesFinder).to receive(:new)
.with(anything, hash_including(last_fetched_at: last_fetched_at))
.and_call_original
@@ -61,6 +60,14 @@ RSpec.describe Projects::NotesController, type: :controller, feature_category: :
get :index, params: request_params
end
+ it 'returns status 400 when last_fetched_at is not present' do
+ request.headers['X-Last-Fetched-At'] = nil
+
+ get :index, params: request_params
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+
context 'when user notes_filter is present' do
let(:notes_json) { parsed_response[:notes] }
let!(:comment) { create(:note, noteable: issue, project: project) }
diff --git a/spec/controllers/projects/pages_controller_spec.rb b/spec/controllers/projects/pages_controller_spec.rb
index ded5dd57e3e..34ec8d8d575 100644
--- a/spec/controllers/projects/pages_controller_spec.rb
+++ b/spec/controllers/projects/pages_controller_spec.rb
@@ -182,44 +182,29 @@ RSpec.describe Projects::PagesController, feature_category: :pages do
create(:project_setting, project: project, pages_unique_domain_enabled: false)
end
- context 'with pages_unique_domain feature flag disabled' do
- it 'does not update pages unique domain' do
- stub_feature_flags(pages_unique_domain: false)
+ it 'updates pages_https_only and pages_unique_domain and redirects back to pages settings' do
+ expect { patch :update, params: request_params }
+ .to change { project.project_setting.reload.pages_unique_domain_enabled }
+ .from(false).to(true)
- expect { patch :update, params: request_params }
- .not_to change { project.project_setting.reload.pages_unique_domain_enabled }
- end
+ expect(project.project_setting.pages_unique_domain).not_to be_nil
+ expect(response).to have_gitlab_http_status(:found)
+ expect(response).to redirect_to(project_pages_path(project))
end
- context 'with pages_unique_domain feature flag enabled' do
- before do
- stub_feature_flags(pages_unique_domain: true)
- end
+ context 'when it fails to update' do
+ it 'adds an error message' do
+ expect_next_instance_of(Projects::UpdateService) do |service|
+ expect(service)
+ .to receive(:execute)
+ .and_return(status: :error, message: 'some error happened')
+ end
- it 'updates pages_https_only and pages_unique_domain and redirects back to pages settings' do
expect { patch :update, params: request_params }
- .to change { project.project_setting.reload.pages_unique_domain_enabled }
- .from(false).to(true)
+ .not_to change { project.project_setting.reload.pages_unique_domain_enabled }
- expect(project.project_setting.pages_unique_domain).not_to be_nil
- expect(response).to have_gitlab_http_status(:found)
expect(response).to redirect_to(project_pages_path(project))
- end
-
- context 'when it fails to update' do
- it 'adds an error message' do
- expect_next_instance_of(Projects::UpdateService) do |service|
- expect(service)
- .to receive(:execute)
- .and_return(status: :error, message: 'some error happened')
- end
-
- expect { patch :update, params: request_params }
- .not_to change { project.project_setting.reload.pages_unique_domain_enabled }
-
- expect(response).to redirect_to(project_pages_path(project))
- expect(flash[:alert]).to eq('some error happened')
- end
+ expect(flash[:alert]).to eq('some error happened')
end
end
end
diff --git a/spec/controllers/projects/performance_monitoring/dashboards_controller_spec.rb b/spec/controllers/projects/performance_monitoring/dashboards_controller_spec.rb
deleted file mode 100644
index 02407e31756..00000000000
--- a/spec/controllers/projects/performance_monitoring/dashboards_controller_spec.rb
+++ /dev/null
@@ -1,287 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Projects::PerformanceMonitoring::DashboardsController, feature_category: :metrics do
- let_it_be(:user) { create(:user) }
- let_it_be(:namespace) { create(:namespace) }
-
- let_it_be(:project) { create(:project, :repository, namespace: namespace) }
- let(:repository) { project.repository }
- let(:branch) { double(name: branch_name) }
- let(:commit_message) { 'test' }
- let(:branch_name) { "#{Time.current.to_i}_dashboard_new_branch" }
- let(:dashboard) { 'config/prometheus/common_metrics.yml' }
- let(:file_name) { 'custom_dashboard.yml' }
- let(:params) do
- {
- namespace_id: namespace,
- project_id: project,
- dashboard: dashboard,
- file_name: file_name,
- commit_message: commit_message,
- branch: branch_name,
- format: :json
- }
- end
-
- before do
- stub_feature_flags(remove_monitor_metrics: false)
- end
-
- describe 'POST #create' do
- context 'authenticated user' do
- before do
- sign_in(user)
- end
-
- context 'project with repository feature' do
- context 'with rights to push to the repository' do
- before do
- project.add_maintainer(user)
- end
-
- context 'valid parameters' do
- it 'delegates cloning to ::Metrics::Dashboard::CloneDashboardService' do
- allow(controller).to receive(:repository).and_return(repository)
- allow(repository).to receive(:find_branch).and_return(branch)
- dashboard_attrs = {
- dashboard: dashboard,
- file_name: file_name,
- commit_message: commit_message,
- branch: branch_name
- }
-
- service_instance = instance_double(::Metrics::Dashboard::CloneDashboardService)
- expect(::Metrics::Dashboard::CloneDashboardService).to receive(:new).with(project, user, dashboard_attrs).and_return(service_instance)
- expect(service_instance).to receive(:execute).and_return(status: :success, http_status: :created, dashboard: { path: 'dashboard/path' })
-
- post :create, params: params
- end
-
- context 'request format json' do
- it 'returns services response' do
- allow(::Metrics::Dashboard::CloneDashboardService).to receive(:new).and_return(double(execute: { status: :success, dashboard: { path: ".gitlab/dashboards/#{file_name}" }, http_status: :created }))
- allow(controller).to receive(:repository).and_return(repository)
- allow(repository).to receive(:find_branch).and_return(branch)
-
- post :create, params: params
-
- expect(response).to have_gitlab_http_status :created
- expect(controller).to set_flash[:notice].to eq("Your dashboard has been copied. You can <a href=\"/-/ide/project/#{project.full_path}/edit/#{branch_name}/-/.gitlab/dashboards/#{file_name}\">edit it here</a>.")
- expect(json_response).to eq('status' => 'success', 'dashboard' => { 'path' => ".gitlab/dashboards/#{file_name}" })
- end
-
- context 'Metrics::Dashboard::CloneDashboardService failure' do
- it 'returns json with failure message', :aggregate_failures do
- allow(::Metrics::Dashboard::CloneDashboardService).to receive(:new).and_return(double(execute: { status: :error, message: 'something went wrong', http_status: :bad_request }))
-
- post :create, params: params
-
- expect(response).to have_gitlab_http_status :bad_request
- expect(json_response).to eq('error' => 'something went wrong')
- end
- end
-
- %w(commit_message file_name dashboard).each do |param|
- context "param #{param} is missing" do
- let(param.to_s) { nil }
-
- it 'responds with bad request status and error message', :aggregate_failures do
- post :create, params: params
-
- expect(response).to have_gitlab_http_status :bad_request
- expect(json_response).to eq('error' => "Request parameter #{param} is missing.")
- end
- end
- end
-
- context "param branch_name is missing" do
- let(:branch_name) { nil }
-
- it 'responds with bad request status and error message', :aggregate_failures do
- post :create, params: params
-
- expect(response).to have_gitlab_http_status :bad_request
- expect(json_response).to eq('error' => "Request parameter branch is missing.")
- end
- end
-
- context 'when metrics dashboard feature is unavailable' do
- before do
- stub_feature_flags(remove_monitor_metrics: true)
- end
-
- it 'returns 404 not found' do
- post :create, params: params
-
- expect(response).to have_gitlab_http_status :not_found
- end
- end
- end
- end
- end
-
- context 'without rights to push to repository' do
- before do
- project.add_guest(user)
- end
-
- it 'responds with :forbidden status code' do
- post :create, params: params
-
- expect(response).to have_gitlab_http_status :forbidden
- end
- end
- end
-
- context 'project without repository feature' do
- let_it_be(:project) { create(:project, namespace: namespace) }
-
- it 'responds with :not_found status code' do
- post :create, params: params
-
- expect(response).to have_gitlab_http_status :not_found
- end
- end
- end
- end
-
- describe 'PUT #update' do
- context 'authenticated user' do
- before do
- sign_in(user)
- end
-
- let(:file_content) do
- {
- "dashboard" => "Dashboard Title",
- "panel_groups" => [{
- "group" => "Group Title",
- "panels" => [{
- "type" => "area-chart",
- "title" => "Chart Title",
- "y_label" => "Y-Axis",
- "metrics" => [{
- "id" => "metric_of_ages",
- "unit" => "count",
- "label" => "Metric of Ages",
- "query_range" => "http_requests_total"
- }]
- }]
- }]
- }
- end
-
- let(:params) do
- {
- namespace_id: namespace,
- project_id: project,
- dashboard: dashboard,
- file_name: file_name,
- file_content: file_content,
- commit_message: commit_message,
- branch: branch_name,
- format: :json
- }
- end
-
- context 'project with repository feature' do
- context 'with rights to push to the repository' do
- before do
- project.add_maintainer(user)
- end
-
- context 'valid parameters' do
- context 'request format json' do
- let(:update_dashboard_service_params) { params.except(:namespace_id, :project_id, :format) }
-
- let(:update_dashboard_service_results) do
- {
- status: :success,
- http_status: :created,
- dashboard: {
- path: ".gitlab/dashboards/custom_dashboard.yml",
- display_name: "custom_dashboard.yml",
- default: false,
- system_dashboard: false
- }
- }
- end
-
- let(:update_dashboard_service) { instance_double(::Metrics::Dashboard::UpdateDashboardService, execute: update_dashboard_service_results) }
-
- it 'returns path to new file' do
- allow(controller).to receive(:repository).and_return(repository)
- allow(repository).to receive(:find_branch).and_return(branch)
- allow(::Metrics::Dashboard::UpdateDashboardService).to receive(:new).with(project, user, update_dashboard_service_params).and_return(update_dashboard_service)
-
- put :update, params: params
-
- expect(response).to have_gitlab_http_status :created
- expect(controller).to set_flash[:notice].to eq("Your dashboard has been updated. You can <a href=\"/-/ide/project/#{project.full_path}/edit/#{branch_name}/-/.gitlab/dashboards/#{file_name}\">edit it here</a>.")
- expect(json_response).to eq('status' => 'success', 'dashboard' => { 'default' => false, 'display_name' => "custom_dashboard.yml", 'path' => ".gitlab/dashboards/#{file_name}", 'system_dashboard' => false })
- end
-
- context 'UpdateDashboardService failure' do
- it 'returns json with failure message' do
- allow(::Metrics::Dashboard::UpdateDashboardService).to receive(:new).and_return(double(execute: { status: :error, message: 'something went wrong', http_status: :bad_request }))
-
- put :update, params: params
-
- expect(response).to have_gitlab_http_status :bad_request
- expect(json_response).to eq('error' => 'something went wrong')
- end
- end
-
- context 'when metrics dashboard feature is unavailable' do
- before do
- stub_feature_flags(remove_monitor_metrics: true)
- end
-
- it 'returns 404 not found' do
- put :update, params: params
-
- expect(response).to have_gitlab_http_status :not_found
- end
- end
- end
- end
-
- context 'missing branch' do
- let(:branch_name) { nil }
-
- it 'raises responds with :bad_request status code and error message' do
- put :update, params: params
-
- expect(response).to have_gitlab_http_status :bad_request
- expect(json_response).to eq('error' => "Request parameter branch is missing.")
- end
- end
- end
-
- context 'without rights to push to repository' do
- before do
- project.add_guest(user)
- end
-
- it 'responds with :forbidden status code' do
- put :update, params: params
-
- expect(response).to have_gitlab_http_status :forbidden
- end
- end
- end
-
- context 'project without repository feature' do
- let_it_be(:project) { create(:project, namespace: namespace) }
-
- it 'responds with :not_found status code' do
- put :update, params: params
-
- expect(response).to have_gitlab_http_status :not_found
- end
- end
- end
- end
-end
diff --git a/spec/controllers/projects/pipeline_schedules_controller_spec.rb b/spec/controllers/projects/pipeline_schedules_controller_spec.rb
index e15f07a4e22..cd828c956a0 100644
--- a/spec/controllers/projects/pipeline_schedules_controller_spec.rb
+++ b/spec/controllers/projects/pipeline_schedules_controller_spec.rb
@@ -7,8 +7,8 @@ RSpec.describe Projects::PipelineSchedulesController, feature_category: :continu
using RSpec::Parameterized::TableSyntax
let_it_be(:user) { create(:user) }
- let_it_be(:project) { create(:project, :public, :repository) }
- let_it_be(:pipeline_schedule) { create(:ci_pipeline_schedule, project: project) }
+ let_it_be_with_reload(:project) { create(:project, :public, :repository) }
+ let_it_be_with_reload(:pipeline_schedule) { create(:ci_pipeline_schedule, project: project) }
before do
project.add_developer(user)
@@ -144,8 +144,7 @@ RSpec.describe Projects::PipelineSchedulesController, feature_category: :continu
end
end
- # Move this from `shared_context` to `describe` when `ci_refactoring_pipeline_schedule_create_service` is removed.
- shared_context 'POST #create' do # rubocop:disable RSpec/ContextWording
+ describe 'POST #create' do
describe 'functionality' do
before do
project.add_developer(user)
@@ -176,6 +175,20 @@ RSpec.describe Projects::PipelineSchedulesController, feature_category: :continu
expect(v.variable_type).to eq("file")
end
end
+
+ context 'when the user is not allowed to create a pipeline schedule with variables' do
+ before do
+ project.update!(restrict_user_defined_variables: true)
+ end
+
+ it 'does not create a new schedule' do
+ expect { go }
+ .to not_change { Ci::PipelineSchedule.count }
+ .and not_change { Ci::PipelineScheduleVariable.count }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ end
end
context 'when variables_attributes has two variables and duplicated' do
@@ -188,8 +201,8 @@ RSpec.describe Projects::PipelineSchedulesController, feature_category: :continu
it 'returns an error that the keys of variable are duplicated' do
expect { go }
- .to change { Ci::PipelineSchedule.count }.by(0)
- .and change { Ci::PipelineScheduleVariable.count }.by(0)
+ .to not_change { Ci::PipelineSchedule.count }
+ .and not_change { Ci::PipelineScheduleVariable.count }
expect(assigns(:schedule).errors['variables']).not_to be_empty
end
@@ -227,16 +240,6 @@ RSpec.describe Projects::PipelineSchedulesController, feature_category: :continu
end
end
- it_behaves_like 'POST #create'
-
- context 'when the FF ci_refactoring_pipeline_schedule_create_service is disabled' do
- before do
- stub_feature_flags(ci_refactoring_pipeline_schedule_create_service: false)
- end
-
- it_behaves_like 'POST #create'
- end
-
describe 'PUT #update' do
describe 'functionality' do
let!(:pipeline_schedule) { create(:ci_pipeline_schedule, project: project, owner: user) }
@@ -266,6 +269,22 @@ RSpec.describe Projects::PipelineSchedulesController, feature_category: :continu
expect(pipeline_schedule.variables.last.key).to eq('AAA')
expect(pipeline_schedule.variables.last.value).to eq('AAA123')
end
+
+ context 'when the user is not allowed to update pipeline schedule variables' do
+ before do
+ project.update!(restrict_user_defined_variables: true)
+ end
+
+ it 'does not update the schedule' do
+ expect { go }
+ .to not_change { Ci::PipelineScheduleVariable.count }
+
+ expect(response).to have_gitlab_http_status(:ok)
+
+ pipeline_schedule.reload
+ expect(pipeline_schedule.variables).to be_empty
+ end
+ end
end
context 'when params include two duplicated variables' do
diff --git a/spec/controllers/projects/settings/ci_cd_controller_spec.rb b/spec/controllers/projects/settings/ci_cd_controller_spec.rb
index a1dbd27f49a..63c870eb133 100644
--- a/spec/controllers/projects/settings/ci_cd_controller_spec.rb
+++ b/spec/controllers/projects/settings/ci_cd_controller_spec.rb
@@ -325,6 +325,14 @@ RSpec.describe Projects::Settings::CiCdController, feature_category: :continuous
end
end
+ context 'when changing forward_deployment_rollback_allowed' do
+ let(:params) { { ci_cd_settings_attributes: { forward_deployment_rollback_allowed: false } } }
+
+ it 'changes forward deployment rollback allowed' do
+ expect { subject }.to change { project.reload.ci_forward_deployment_rollback_allowed }.from(true).to(false)
+ end
+ end
+
context 'when max_artifacts_size is specified' do
let(:params) { { max_artifacts_size: 10 } }
diff --git a/spec/controllers/projects/settings/integrations_controller_spec.rb b/spec/controllers/projects/settings/integrations_controller_spec.rb
index 8c1cdf784aa..49851c82cc5 100644
--- a/spec/controllers/projects/settings/integrations_controller_spec.rb
+++ b/spec/controllers/projects/settings/integrations_controller_spec.rb
@@ -379,6 +379,26 @@ RSpec.describe Projects::Settings::IntegrationsController, feature_category: :in
end
end
end
+
+ context 'with chat notification integration which masks channel params' do
+ let_it_be(:integration) do
+ create(:discord_integration, project: project, note_channel: 'https://discord.com/api/webhook/note')
+ end
+
+ let(:message) { 'Discord Notifications settings saved and active.' }
+
+ it_behaves_like 'integration update'
+
+ context 'with masked channel param' do
+ let(:integration_params) { { active: true, note_channel: '************' } }
+
+ it_behaves_like 'integration update'
+
+ it 'does not update the channel' do
+ expect(integration.reload.note_channel).to eq('https://discord.com/api/webhook/note')
+ end
+ end
+ end
end
describe 'as JSON' do
diff --git a/spec/controllers/projects/tree_controller_spec.rb b/spec/controllers/projects/tree_controller_spec.rb
index ffec670e97d..a409030e359 100644
--- a/spec/controllers/projects/tree_controller_spec.rb
+++ b/spec/controllers/projects/tree_controller_spec.rb
@@ -90,8 +90,15 @@ RSpec.describe Projects::TreeController, feature_category: :source_code_manageme
context 'and explicitly requesting a branch' do
let(:ref_type) { 'heads' }
+ it 'checks for tree with ref_type' do
+ allow(project.repository).to receive(:tree).and_call_original
+ expect(project.repository).to receive(:tree).with(id, '', ref_type: 'heads').and_call_original
+ request
+ end
+
it 'finds the branch' do
expect(requested_ref_double).not_to receive(:find)
+
request
expect(response).to be_ok
end
@@ -100,6 +107,12 @@ RSpec.describe Projects::TreeController, feature_category: :source_code_manageme
context 'and explicitly requesting a tag' do
let(:ref_type) { 'tags' }
+ it 'checks for tree with ref_type' do
+ allow(project.repository).to receive(:tree).and_call_original
+ expect(project.repository).to receive(:tree).with(id, '', ref_type: 'tags').and_call_original
+ request
+ end
+
it 'finds the tag' do
expect(requested_ref_double).not_to receive(:find)
request
@@ -110,7 +123,13 @@ RSpec.describe Projects::TreeController, feature_category: :source_code_manageme
end
context "valid branch, no path" do
- let(:id) { 'master' }
+ let(:id) { 'flatten-dir' }
+
+ it 'checks for tree without ref_type' do
+ allow(project.repository).to receive(:tree).and_call_original
+ expect(project.repository).to receive(:tree).with(RepoHelpers.another_sample_commit.id, '').and_call_original
+ request
+ end
it 'responds with success' do
request
diff --git a/spec/controllers/projects_controller_spec.rb b/spec/controllers/projects_controller_spec.rb
index 46913cfa649..7d7bebb7106 100644
--- a/spec/controllers/projects_controller_spec.rb
+++ b/spec/controllers/projects_controller_spec.rb
@@ -974,7 +974,8 @@ RSpec.describe ProjectsController, feature_category: :groups_and_projects do
project: {
project_setting_attributes: {
show_default_award_emojis: boolean_value,
- enforce_auth_checks_on_uploads: boolean_value
+ enforce_auth_checks_on_uploads: boolean_value,
+ emails_enabled: boolean_value
}
}
}
@@ -983,6 +984,8 @@ RSpec.describe ProjectsController, feature_category: :groups_and_projects do
expect(project.show_default_award_emojis?).to eq(result)
expect(project.enforce_auth_checks_on_uploads?).to eq(result)
+ expect(project.emails_enabled?).to eq(result)
+ expect(project.emails_disabled?).to eq(!result)
end
end
end
diff --git a/spec/controllers/search_controller_spec.rb b/spec/controllers/search_controller_spec.rb
index 9e69566d18f..57ae1d5a1db 100644
--- a/spec/controllers/search_controller_spec.rb
+++ b/spec/controllers/search_controller_spec.rb
@@ -439,6 +439,12 @@ RSpec.describe SearchController, feature_category: :global_search do
it_behaves_like 'with external authorization service enabled', :autocomplete, { term: 'hello' }
it_behaves_like 'support for active record query timeouts', :autocomplete, { term: 'hello' }, :project, :json
+ it 'raises an error if search term is missing' do
+ expect do
+ get :autocomplete
+ end.to raise_error(ActionController::ParameterMissing)
+ end
+
it 'returns an empty array when given abusive search term' do
get :autocomplete, params: { term: ('hal' * 4000), scope: 'projects' }
expect(response).to have_gitlab_http_status(:ok)
diff --git a/spec/controllers/sessions_controller_spec.rb b/spec/controllers/sessions_controller_spec.rb
index a09b3318c25..ce9703753cf 100644
--- a/spec/controllers/sessions_controller_spec.rb
+++ b/spec/controllers/sessions_controller_spec.rb
@@ -538,6 +538,26 @@ RSpec.describe SessionsController, feature_category: :system_access do
expect(AuthenticationEvent.last.provider).to eq("two-factor-via-webauthn-device")
end
end
+
+ context 'when the user is locked and submits a valid verification token' do
+ let(:user) { create(:user) }
+ let(:user_params) { { verification_token: 'token' } }
+ let(:session_params) { { verification_user_id: user.id } }
+ let(:post_action) { post(:create, params: { user: user_params }, session: session_params) }
+
+ before do
+ encrypted_token = Devise.token_generator.digest(User, user.email, 'token')
+ user.update!(locked_at: Time.current, unlock_token: encrypted_token)
+ end
+
+ it_behaves_like 'known sign in'
+
+ it 'successfully logs in a user' do
+ post_action
+
+ expect(subject.current_user).to eq user
+ end
+ end
end
context 'when login fails' do
diff --git a/spec/controllers/snippets/notes_controller_spec.rb b/spec/controllers/snippets/notes_controller_spec.rb
index 578973d5b3d..834415a5c87 100644
--- a/spec/controllers/snippets/notes_controller_spec.rb
+++ b/spec/controllers/snippets/notes_controller_spec.rb
@@ -14,6 +14,10 @@ RSpec.describe Snippets::NotesController, feature_category: :team_planning do
let(:note_on_public) { create(:note_on_personal_snippet, noteable: public_snippet) }
describe 'GET index' do
+ before do
+ request.headers['X-Last-Fetched-At'] = 0
+ end
+
context 'when a snippet is public' do
before do
note_on_public
diff --git a/spec/db/schema_spec.rb b/spec/db/schema_spec.rb
index c22292cb82c..3c99393b14b 100644
--- a/spec/db/schema_spec.rb
+++ b/spec/db/schema_spec.rb
@@ -14,7 +14,7 @@ RSpec.describe 'Database schema', feature_category: :database do
# but in Search::NamespaceIndexAssignment model, only `search_index_id` is used as foreign key and indexed
search_namespace_index_assignments: [%w[search_index_id index_type]],
slack_integrations_scopes: [%w[slack_api_scope_id]],
- namespaces: %w[organization_id] # this index is added in an async manner, hence it needs to be ignored in the first phase.
+ notes: %w[namespace_id] # this index is added in an async manner, hence it needs to be ignored in the first phase.
}.with_indifferent_access.freeze
TABLE_PARTITIONS = %w[ci_builds_metadata].freeze
@@ -44,7 +44,7 @@ RSpec.describe 'Database schema', feature_category: :database do
broadcast_messages: %w[namespace_id],
chat_names: %w[chat_id team_id user_id integration_id],
chat_teams: %w[team_id],
- ci_builds: %w[erased_by_id trigger_request_id partition_id],
+ ci_builds: %w[project_id runner_id user_id erased_by_id trigger_request_id partition_id],
ci_namespace_monthly_usages: %w[namespace_id],
ci_pipeline_variables: %w[partition_id],
ci_pipelines: %w[partition_id],
@@ -82,12 +82,13 @@ RSpec.describe 'Database schema', feature_category: :database do
merge_requests_compliance_violations: %w[target_project_id],
merge_request_diff_commits: %w[commit_author_id committer_id],
namespaces: %w[owner_id parent_id],
- notes: %w[author_id commit_id noteable_id updated_by_id resolved_by_id confirmed_by_id discussion_id],
+ notes: %w[author_id commit_id noteable_id updated_by_id resolved_by_id confirmed_by_id discussion_id namespace_id],
notification_settings: %w[source_id],
oauth_access_grants: %w[resource_owner_id application_id],
oauth_access_tokens: %w[resource_owner_id application_id],
oauth_applications: %w[owner_id],
- p_ci_builds: %w[project_id runner_id user_id erased_by_id trigger_request_id partition_id],
+ p_ci_builds: %w[erased_by_id trigger_request_id partition_id],
+ p_batched_git_ref_updates_deletions: %w[project_id partition_id],
product_analytics_events_experimental: %w[event_id txn_id user_id],
project_build_artifacts_size_refreshes: %w[last_job_artifact_id],
project_data_transfers: %w[project_id namespace_id],
@@ -109,11 +110,12 @@ RSpec.describe 'Database schema', feature_category: :database do
todos: %w[target_id commit_id],
uploads: %w[model_id],
user_agent_details: %w[subject_id],
- users: %w[color_scheme_id created_by_id theme_id email_opted_in_source_id],
+ users: %w[color_scheme_id created_by_id theme_id email_opted_in_source_id managing_group_id],
users_star_projects: %w[user_id],
vulnerability_identifiers: %w[external_id],
vulnerability_scanners: %w[external_id],
security_scans: %w[pipeline_id], # foreign key is not added as ci_pipeline table will be moved into different db soon
+ dependency_list_exports: %w[pipeline_id], # foreign key is not added as ci_pipeline table is in different db
vulnerability_reads: %w[cluster_agent_id],
# See: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/87584
# Fixes performance issues with the deletion of web-hooks with many log entries
@@ -194,18 +196,23 @@ RSpec.describe 'Database schema', feature_category: :database do
IGNORED_LIMIT_ENUMS = {
'Analytics::CycleAnalytics::Stage' => %w[start_event_identifier end_event_identifier],
'Ci::Bridge' => %w[failure_reason],
+ 'Ci::Bridge::Partitioned' => %w[failure_reason],
'Ci::Build' => %w[failure_reason],
+ 'Ci::Build::Partitioned' => %w[failure_reason],
'Ci::BuildMetadata' => %w[timeout_source],
'Ci::BuildTraceChunk' => %w[data_store],
'Ci::DailyReportResult' => %w[param_type],
'Ci::JobArtifact' => %w[file_type],
'Ci::Pipeline' => %w[source config_source failure_reason],
'Ci::Processable' => %w[failure_reason],
+ 'Ci::Processable::Partitioned' => %w[failure_reason],
'Ci::Runner' => %w[access_level],
'Ci::Stage' => %w[status],
'Clusters::Cluster' => %w[platform_type provider_type],
'CommitStatus' => %w[failure_reason],
+ 'CommitStatus::Partitioned' => %w[failure_reason],
'GenericCommitStatus' => %w[failure_reason],
+ 'GenericCommitStatus::Partitioned' => %w[failure_reason],
'InternalId' => %w[usage],
'List' => %w[list_type],
'NotificationSetting' => %w[level],
diff --git a/spec/factories/ci/bridge.rb b/spec/factories/ci/bridge.rb
index 49ac74f6f86..1f953ba0c2f 100644
--- a/spec/factories/ci/bridge.rb
+++ b/spec/factories/ci/bridge.rb
@@ -1,7 +1,11 @@
# frozen_string_literal: true
+require_relative 'deployable'
+
FactoryBot.define do
factory :ci_bridge, class: 'Ci::Bridge', parent: :ci_processable do
+ instance_eval ::Factories::Ci::Deployable.traits
+
name { 'bridge' }
created_at { '2013-10-29 09:50:00 CET' }
status { :created }
@@ -45,6 +49,10 @@ FactoryBot.define do
status { 'created' }
end
+ trait :running do
+ status { 'running' }
+ end
+
trait :started do
started_at { '2013-10-29 09:51:28 CET' }
end
diff --git a/spec/factories/ci/builds.rb b/spec/factories/ci/builds.rb
index dc75e17499c..7325ab30989 100644
--- a/spec/factories/ci/builds.rb
+++ b/spec/factories/ci/builds.rb
@@ -1,7 +1,11 @@
# frozen_string_literal: true
+require_relative 'deployable'
+
FactoryBot.define do
factory :ci_build, class: 'Ci::Build', parent: :ci_processable do
+ instance_eval ::Factories::Ci::Deployable.traits
+
name { 'test' }
add_attribute(:protected) { false }
created_at { 'Di 29. Okt 09:50:00 CET 2013' }
@@ -137,122 +141,6 @@ FactoryBot.define do
self.when { 'manual' }
end
- trait :teardown_environment do
- environment { 'staging' }
- options do
- {
- script: %w(ls),
- environment: { name: 'staging',
- action: 'stop',
- url: 'http://staging.example.com/$CI_JOB_NAME' }
- }
- end
- end
-
- trait :environment_with_deployment_tier do
- environment { 'test_portal' }
- options do
- {
- script: %w(ls),
- environment: { name: 'test_portal',
- action: 'start',
- url: 'http://staging.example.com/$CI_JOB_NAME',
- deployment_tier: 'testing' }
- }
- end
- end
-
- trait :deploy_to_production do
- environment { 'production' }
-
- options do
- {
- script: %w(ls),
- environment: { name: 'production',
- url: 'http://prd.example.com/$CI_JOB_NAME' }
- }
- end
- end
-
- trait :start_review_app do
- environment { 'review/$CI_COMMIT_REF_NAME' }
-
- options do
- {
- script: %w(ls),
- environment: { name: 'review/$CI_COMMIT_REF_NAME',
- url: 'http://staging.example.com/$CI_JOB_NAME',
- on_stop: 'stop_review_app' }
- }
- end
- end
-
- trait :stop_review_app do
- name { 'stop_review_app' }
- environment { 'review/$CI_COMMIT_REF_NAME' }
-
- options do
- {
- script: %w(ls),
- environment: { name: 'review/$CI_COMMIT_REF_NAME',
- url: 'http://staging.example.com/$CI_JOB_NAME',
- action: 'stop' }
- }
- end
- end
-
- trait :prepare_staging do
- name { 'prepare staging' }
- environment { 'staging' }
-
- options do
- {
- script: %w(ls),
- environment: { name: 'staging', action: 'prepare' }
- }
- end
-
- set_expanded_environment_name
- end
-
- trait :start_staging do
- name { 'start staging' }
- environment { 'staging' }
-
- options do
- {
- script: %w(ls),
- environment: { name: 'staging', action: 'start' }
- }
- end
-
- set_expanded_environment_name
- end
-
- trait :stop_staging do
- name { 'stop staging' }
- environment { 'staging' }
-
- options do
- {
- script: %w(ls),
- environment: { name: 'staging', action: 'stop' }
- }
- end
-
- set_expanded_environment_name
- end
-
- trait :set_expanded_environment_name do
- after(:build) do |build, evaluator|
- build.assign_attributes(
- metadata_attributes: {
- expanded_environment_name: build.expanded_environment_name
- }
- )
- end
- end
-
trait :allowed_to_fail do
allow_failure { true }
end
@@ -311,20 +199,6 @@ FactoryBot.define do
trigger_request factory: :ci_trigger_request
end
- trait :with_deployment do
- after(:build) do |build, evaluator|
- ##
- # Build deployment/environment relations if environment name is set
- # to the job. If `build.deployment` has already been set, it doesn't
- # build a new instance.
- Environments::CreateForBuildService.new.execute(build)
- end
-
- after(:create) do |build, evaluator|
- Deployments::CreateForBuildService.new.execute(build)
- end
- end
-
trait :tag do
tag { true }
end
diff --git a/spec/factories/ci/catalog/resources/components.rb b/spec/factories/ci/catalog/resources/components.rb
new file mode 100644
index 00000000000..3eeb2f4251a
--- /dev/null
+++ b/spec/factories/ci/catalog/resources/components.rb
@@ -0,0 +1,10 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :catalog_resource_component, class: 'Ci::Catalog::Resources::Component' do
+ version factory: :catalog_resource_version
+ catalog_resource { version.catalog_resource }
+ project { version.project }
+ name { catalog_resource.name }
+ end
+end
diff --git a/spec/factories/ci/catalog/resources/versions.rb b/spec/factories/ci/catalog/resources/versions.rb
new file mode 100644
index 00000000000..d5057969273
--- /dev/null
+++ b/spec/factories/ci/catalog/resources/versions.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :catalog_resource_version, class: 'Ci::Catalog::Resources::Version' do
+ catalog_resource
+ project { catalog_resource.project }
+ release { association :release, project: project }
+ end
+end
diff --git a/spec/factories/ci/deployable.rb b/spec/factories/ci/deployable.rb
new file mode 100644
index 00000000000..15b37e44e07
--- /dev/null
+++ b/spec/factories/ci/deployable.rb
@@ -0,0 +1,141 @@
+# frozen_string_literal: true
+
+module Factories
+ module Ci
+ module Deployable
+ def self.traits
+ <<-RUBY
+ trait :teardown_environment do
+ environment { 'staging' }
+ options do
+ {
+ script: %w(ls),
+ environment: { name: 'staging',
+ action: 'stop',
+ url: 'http://staging.example.com/$CI_JOB_NAME' }
+ }
+ end
+ end
+
+ trait :environment_with_deployment_tier do
+ environment { 'test_portal' }
+ options do
+ {
+ script: %w(ls),
+ environment: { name: 'test_portal',
+ action: 'start',
+ url: 'http://staging.example.com/$CI_JOB_NAME',
+ deployment_tier: 'testing' }
+ }
+ end
+ end
+
+ trait :deploy_to_production do
+ environment { 'production' }
+
+ options do
+ {
+ script: %w(ls),
+ environment: { name: 'production',
+ url: 'http://prd.example.com/$CI_JOB_NAME' }
+ }
+ end
+ end
+
+ trait :start_review_app do
+ environment { 'review/$CI_COMMIT_REF_NAME' }
+
+ options do
+ {
+ script: %w(ls),
+ environment: { name: 'review/$CI_COMMIT_REF_NAME',
+ url: 'http://staging.example.com/$CI_JOB_NAME',
+ on_stop: 'stop_review_app' }
+ }
+ end
+ end
+
+ trait :stop_review_app do
+ name { 'stop_review_app' }
+ environment { 'review/$CI_COMMIT_REF_NAME' }
+
+ options do
+ {
+ script: %w(ls),
+ environment: { name: 'review/$CI_COMMIT_REF_NAME',
+ url: 'http://staging.example.com/$CI_JOB_NAME',
+ action: 'stop' }
+ }
+ end
+ end
+
+ trait :prepare_staging do
+ name { 'prepare staging' }
+ environment { 'staging' }
+
+ options do
+ {
+ script: %w(ls),
+ environment: { name: 'staging', action: 'prepare' }
+ }
+ end
+
+ set_expanded_environment_name
+ end
+
+ trait :start_staging do
+ name { 'start staging' }
+ environment { 'staging' }
+
+ options do
+ {
+ script: %w(ls),
+ environment: { name: 'staging', action: 'start' }
+ }
+ end
+
+ set_expanded_environment_name
+ end
+
+ trait :stop_staging do
+ name { 'stop staging' }
+ environment { 'staging' }
+
+ options do
+ {
+ script: %w(ls),
+ environment: { name: 'staging', action: 'stop' }
+ }
+ end
+
+ set_expanded_environment_name
+ end
+
+ trait :set_expanded_environment_name do
+ after(:build) do |job, evaluator|
+ job.assign_attributes(
+ metadata_attributes: {
+ expanded_environment_name: job.expanded_environment_name
+ }
+ )
+ end
+ end
+
+ trait :with_deployment do
+ after(:build) do |job, evaluator|
+ ##
+ # Build deployment/environment relations if environment name is set
+ # to the job. If `job.deployment` has already been set, it doesn't
+ # build a new instance.
+ Environments::CreateForJobService.new.execute(job)
+ end
+
+ after(:create) do |job, evaluator|
+ Deployments::CreateForJobService.new.execute(job)
+ end
+ end
+ RUBY
+ end
+ end
+ end
+end
diff --git a/spec/factories/ci/job_artifacts.rb b/spec/factories/ci/job_artifacts.rb
index 5e049e0375b..1c418f646f6 100644
--- a/spec/factories/ci/job_artifacts.rb
+++ b/spec/factories/ci/job_artifacts.rb
@@ -478,5 +478,15 @@ FactoryBot.define do
artifact.file_sha256 = Digest::SHA256.file(artifact.file.path).hexdigest
end
end
+
+ trait :annotations do
+ file_type { :annotations }
+ file_format { :gzip }
+
+ after(:build) do |artifact, evaluator|
+ artifact.file = fixture_file_upload(
+ Rails.root.join('spec/fixtures/gl-annotations.json.gz'), 'application/x-gzip')
+ end
+ end
end
end
diff --git a/spec/factories/ci/reports/sbom/components.rb b/spec/factories/ci/reports/sbom/components.rb
index 8f2c00b695a..76bfbe13acb 100644
--- a/spec/factories/ci/reports/sbom/components.rb
+++ b/spec/factories/ci/reports/sbom/components.rb
@@ -9,12 +9,14 @@ FactoryBot.define do
transient do
purl_type { 'npm' }
+ namespace { nil }
end
purl do
::Sbom::PackageUrl.new(
type: purl_type,
name: name,
+ namespace: namespace,
version: version
).to_s
end
diff --git a/spec/factories/gitlab/database/background_migration/schema_inconsistencies.rb b/spec/factories/gitlab/database/background_migration/schema_inconsistencies.rb
deleted file mode 100644
index 1d2c460144d..00000000000
--- a/spec/factories/gitlab/database/background_migration/schema_inconsistencies.rb
+++ /dev/null
@@ -1,12 +0,0 @@
-# frozen_string_literal: true
-
-FactoryBot.define do
- factory :schema_inconsistency, class: '::Gitlab::Database::SchemaValidation::SchemaInconsistency' do
- issue factory: :issue
-
- object_name { 'name' }
- table_name { 'table' }
- valitador_name { 'validator' }
- diff { 'diff' }
- end
-end
diff --git a/spec/factories/groups.rb b/spec/factories/groups.rb
index 5b4839df2d3..807df94e115 100644
--- a/spec/factories/groups.rb
+++ b/spec/factories/groups.rb
@@ -65,18 +65,14 @@ FactoryBot.define do
end
end
- trait :allow_descendants_override_disabled_shared_runners do
- allow_descendants_override_disabled_shared_runners { true }
- end
-
- trait :disabled_and_unoverridable do
+ trait :shared_runners_disabled_and_unoverridable do
shared_runners_disabled
allow_descendants_override_disabled_shared_runners { false }
end
- trait :disabled_and_overridable do
+ trait :shared_runners_disabled_and_overridable do
shared_runners_disabled
- allow_descendants_override_disabled_shared_runners
+ allow_descendants_override_disabled_shared_runners { true }
end
trait :shared_runners_enabled do
diff --git a/spec/factories/integrations.rb b/spec/factories/integrations.rb
index a89edc19cc7..b74b81d4db9 100644
--- a/spec/factories/integrations.rb
+++ b/spec/factories/integrations.rb
@@ -254,6 +254,13 @@ FactoryBot.define do
active { false }
end
+ factory :discord_integration, class: 'Integrations::Discord' do
+ chat_notification
+ project
+ active { true }
+ type { 'Integrations::Discord' }
+ end
+
factory :mattermost_integration, class: 'Integrations::Mattermost' do
chat_notification
project
@@ -319,6 +326,7 @@ FactoryBot.define do
package_name { 'com.gitlab.foo.bar' }
service_account_key_file_name { 'service_account.json' }
service_account_key { File.read('spec/fixtures/service_account.json') }
+ google_play_protected_refs { true }
end
factory :squash_tm_integration, class: 'Integrations::SquashTm' do
diff --git a/spec/factories/issues.rb b/spec/factories/issues.rb
index 062e5294e4f..3f17d4d5a97 100644
--- a/spec/factories/issues.rb
+++ b/spec/factories/issues.rb
@@ -93,6 +93,14 @@ FactoryBot.define do
association :work_item_type, :default, :test_case
end
+ trait :epic do
+ association :work_item_type, :default, :epic
+ end
+
+ trait :ticket do
+ association :work_item_type, :default, :ticket
+ end
+
factory :incident do
association :work_item_type, :default, :incident
diff --git a/spec/factories/keys.rb b/spec/factories/keys.rb
index f6f06a99494..4bd41c1faa1 100644
--- a/spec/factories/keys.rb
+++ b/spec/factories/keys.rb
@@ -30,7 +30,15 @@ FactoryBot.define do
key { SSHData::PrivateKey::RSA.generate(3072, unsafe_allow_small_key: true).public_key.openssh }
end
- factory :deploy_key, class: 'DeployKey'
+ factory :deploy_key, class: 'DeployKey' do
+ trait :private do
+ public { false }
+ end
+
+ trait :public do
+ public { true }
+ end
+ end
factory :group_deploy_key, class: 'GroupDeployKey' do
user
diff --git a/spec/factories/labels.rb b/spec/factories/labels.rb
index 250c92c0038..e592565eef7 100644
--- a/spec/factories/labels.rb
+++ b/spec/factories/labels.rb
@@ -37,4 +37,6 @@ FactoryBot.define do
end
factory :admin_label, traits: [:base_label], class: 'Label'
+
+ factory :abuse_report_label, traits: [:base_label], class: 'Admin::AbuseReportLabel'
end
diff --git a/spec/factories/metrics/dashboard/annotations.rb b/spec/factories/metrics/dashboard/annotations.rb
index 2e5c373918e..50c9ed01fd8 100644
--- a/spec/factories/metrics/dashboard/annotations.rb
+++ b/spec/factories/metrics/dashboard/annotations.rb
@@ -5,11 +5,5 @@ FactoryBot.define do
description { "Dashbaord annoation description" }
dashboard_path { "custom_dashbaord.yml" }
starting_at { Time.current }
- environment
-
- trait :with_cluster do
- cluster
- environment { nil }
- end
end
end
diff --git a/spec/factories/ml/model_versions.rb b/spec/factories/ml/model_versions.rb
index 5ae0446b78d..456d1b1e913 100644
--- a/spec/factories/ml/model_versions.rb
+++ b/spec/factories/ml/model_versions.rb
@@ -2,14 +2,14 @@
FactoryBot.define do
factory :ml_model_versions, class: '::Ml::ModelVersion' do
- sequence(:version) { |n| "version#{n}" }
+ sequence(:version) { |n| "1.0.#{n}-alpha+test" }
model { association :ml_models }
project { model.project }
trait :with_package do
package do
- association :ml_model_package, name: model.name, version: version, project_id: project.id
+ association :ml_model_package, name: model.name, version: version, project: project
end
end
end
diff --git a/spec/factories/ml/models.rb b/spec/factories/ml/models.rb
index 2d1b29289a5..158c26499b0 100644
--- a/spec/factories/ml/models.rb
+++ b/spec/factories/ml/models.rb
@@ -6,5 +6,17 @@ FactoryBot.define do
project
default_experiment { association :ml_experiments, project_id: project.id, name: name }
+
+ trait :with_versions do
+ versions { Array.new(2) { association(:ml_model_versions, model: instance) } }
+ end
+
+ trait :with_latest_version_and_package do
+ transient do
+ version { association(:ml_model_versions, :with_package, model: instance) }
+ end
+ versions { [version] }
+ latest_version { version }
+ end
end
end
diff --git a/spec/factories/namespace_package_settings.rb b/spec/factories/namespace_package_settings.rb
index 042808f042f..9d794e794a4 100644
--- a/spec/factories/namespace_package_settings.rb
+++ b/spec/factories/namespace_package_settings.rb
@@ -10,6 +10,9 @@ FactoryBot.define do
generic_duplicates_allowed { true }
generic_duplicate_exception_regex { 'foo' }
+ nuget_duplicates_allowed { true }
+ nuget_duplicate_exception_regex { 'foo' }
+
trait :group do
namespace { association(:group) }
end
diff --git a/spec/factories/namespaces.rb b/spec/factories/namespaces.rb
index e88bb634898..ee994d32f10 100644
--- a/spec/factories/namespaces.rb
+++ b/spec/factories/namespaces.rb
@@ -40,9 +40,5 @@ FactoryBot.define do
trait :shared_runners_disabled do
shared_runners_enabled { false }
end
-
- trait :allow_descendants_override_disabled_shared_runners do
- allow_descendants_override_disabled_shared_runners { true }
- end
end
end
diff --git a/spec/factories/project_group_links.rb b/spec/factories/project_group_links.rb
index 84c590e3ea1..5edd57d5fe1 100644
--- a/spec/factories/project_group_links.rb
+++ b/spec/factories/project_group_links.rb
@@ -12,8 +12,10 @@ FactoryBot.define do
trait(:developer) { group_access { Gitlab::Access::DEVELOPER } }
trait(:maintainer) { group_access { Gitlab::Access::MAINTAINER } }
- after(:create) do |project_group_link, evaluator|
- AuthorizedProjectUpdate::ProjectRecalculateService.new(project_group_link.project).execute
+ after(:create) do |project_group_link|
+ project_group_link.run_after_commit_or_now do
+ AuthorizedProjectUpdate::ProjectRecalculateService.new(project_group_link.project).execute
+ end
end
end
end
diff --git a/spec/factories/projects.rb b/spec/factories/projects.rb
index 6e3e119ddab..0111083298c 100644
--- a/spec/factories/projects.rb
+++ b/spec/factories/projects.rb
@@ -55,6 +55,7 @@ FactoryBot.define do
import_correlation_id { nil }
import_last_error { nil }
forward_deployment_enabled { nil }
+ forward_deployment_rollback_allowed { nil }
restrict_user_defined_variables { nil }
ci_outbound_job_token_scope_enabled { nil }
ci_inbound_job_token_scope_enabled { nil }
@@ -99,6 +100,10 @@ FactoryBot.define do
project.set_runners_token(evaluator.runners_token) if evaluator.runners_token.present?
end
+ to_create do |project|
+ project.project_namespace.save! if project.valid?
+ end
+
after(:create) do |project, evaluator|
# Normally the class Projects::CreateService is used for creating
# projects, and this class takes care of making sure the owner and current
@@ -109,7 +114,9 @@ FactoryBot.define do
end
if project.group
- AuthorizedProjectUpdate::ProjectRecalculateService.new(project).execute
+ project.run_after_commit_or_now do
+ AuthorizedProjectUpdate::ProjectRecalculateService.new(project).execute
+ end
end
# assign the delegated `#ci_cd_settings` attributes after create
diff --git a/spec/factories/service_desk/custom_email_verification.rb b/spec/factories/service_desk/custom_email_verification.rb
index 3f3a2ea570d..a3b72da2e9e 100644
--- a/spec/factories/service_desk/custom_email_verification.rb
+++ b/spec/factories/service_desk/custom_email_verification.rb
@@ -7,5 +7,9 @@ FactoryBot.define do
project
triggerer factory: :user
triggered_at { Time.current }
+
+ trait :overdue do
+ triggered_at { (ServiceDesk::CustomEmailVerification::TIMEFRAME + 1).minutes.ago }
+ end
end
end
diff --git a/spec/factories/broadcast_messages.rb b/spec/factories/system/broadcast_messages.rb
index 0602ce31136..4742d183ab7 100644
--- a/spec/factories/broadcast_messages.rb
+++ b/spec/factories/system/broadcast_messages.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
FactoryBot.define do
- factory :broadcast_message do
+ factory :broadcast_message, class: 'System::BroadcastMessage' do
message { "MyText" }
starts_at { 1.day.ago }
ends_at { 1.day.from_now }
diff --git a/spec/factories/todos.rb b/spec/factories/todos.rb
index 760367539fc..12d77abe5c9 100644
--- a/spec/factories/todos.rb
+++ b/spec/factories/todos.rb
@@ -45,6 +45,10 @@ FactoryBot.define do
action { Todo::MEMBER_ACCESS_REQUESTED }
end
+ trait :review_submitted do
+ action { Todo::REVIEW_SUBMITTED }
+ end
+
trait :pending do
state { :pending }
end
diff --git a/spec/factories/users.rb b/spec/factories/users.rb
index a9d5da93bc5..67c857165fc 100644
--- a/spec/factories/users.rb
+++ b/spec/factories/users.rb
@@ -148,6 +148,11 @@ FactoryBot.define do
end
end
+ trait :invalid do
+ first_name { 'A' * 130 } # Exceed `first_name` character limit in model to make it invalid
+ to_create { |user| user.save!(validate: false) }
+ end
+
transient do
developer_projects { [] }
maintainer_projects { [] }
diff --git a/spec/factories/work_items.rb b/spec/factories/work_items.rb
index 1e47dc0e348..4a2186f2fcf 100644
--- a/spec/factories/work_items.rb
+++ b/spec/factories/work_items.rb
@@ -58,11 +58,24 @@ FactoryBot.define do
association :work_item_type, :default, :key_result
end
+ trait :epic do
+ association :work_item_type, :default, :epic
+ end
+
+ trait :ticket do
+ association :work_item_type, :default, :ticket
+ end
+
before(:create, :build) do |work_item, evaluator|
if evaluator.namespace.present?
work_item.project = nil
work_item.namespace = evaluator.namespace
end
end
+
+ # Service Desk Ticket
+ factory :ticket do
+ association :work_item_type, :default, :ticket
+ end
end
end
diff --git a/spec/factories/work_items/related_work_item_links.rb b/spec/factories/work_items/related_work_item_links.rb
new file mode 100644
index 00000000000..327323af803
--- /dev/null
+++ b/spec/factories/work_items/related_work_item_links.rb
@@ -0,0 +1,8 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :work_item_link, class: 'WorkItems::RelatedWorkItemLink' do
+ source factory: :work_item
+ target factory: :work_item
+ end
+end
diff --git a/spec/fast_spec_helper.rb b/spec/fast_spec_helper.rb
index 47a90efab1e..d03f8b18b3e 100644
--- a/spec/fast_spec_helper.rb
+++ b/spec/fast_spec_helper.rb
@@ -32,3 +32,5 @@ ActiveSupport::XmlMini.backend = 'Nokogiri'
# Consider tweaking configuration in `spec/support/rspec.rb` which is also
# used by `spec/spec_helper.rb`.
+
+require_relative('../jh/spec/fast_spec_helper') if Gitlab.jh?
diff --git a/spec/features/abuse_report_spec.rb b/spec/features/abuse_report_spec.rb
index ae3859280b1..f934736ced9 100644
--- a/spec/features/abuse_report_spec.rb
+++ b/spec/features/abuse_report_spec.rb
@@ -13,10 +13,19 @@ RSpec.describe 'Abuse reports', :js, feature_category: :insider_threat do
before do
sign_in(reporter1)
stub_feature_flags(moved_mr_sidebar: false)
- stub_feature_flags(user_profile_overflow_menu_vue: false)
end
describe 'report abuse to administrator' do
+ shared_examples 'cancel report' do
+ it 'redirects backs to user profile when cancel button is clicked' do
+ fill_and_submit_abuse_category_form
+
+ click_link 'Cancel'
+
+ expect(page).to have_current_path(user_path(abusive_user))
+ end
+ end
+
context 'when reporting an issue for abuse' do
before do
visit project_issue_path(project, issue)
@@ -46,54 +55,102 @@ RSpec.describe 'Abuse reports', :js, feature_category: :insider_threat do
it_behaves_like 'reports the user with an abuse category'
end
- context 'when reporting a user profile for abuse' do
- let_it_be(:reporter2) { create(:user) }
+ describe 'when user_profile_overflow_menu FF turned on' do
+ context 'when reporting a user profile for abuse' do
+ let_it_be(:reporter2) { create(:user) }
- before do
- visit user_path(abusive_user)
- end
+ before do
+ visit user_path(abusive_user)
+ find_by_testid('base-dropdown-toggle').click
+ end
- it_behaves_like 'reports the user with an abuse category'
+ it_behaves_like 'reports the user with an abuse category'
- it 'allows the reporter to report the same user for different abuse categories' do
- visit user_path(abusive_user)
+ it 'allows the reporter to report the same user for different abuse categories' do
+ visit user_path(abusive_user)
- fill_and_submit_abuse_category_form
- fill_and_submit_report_abuse_form
+ find_by_testid('base-dropdown-toggle').click
+ fill_and_submit_abuse_category_form
+ fill_and_submit_report_abuse_form
- expect(page).to have_content 'Thank you for your report'
+ expect(page).to have_content 'Thank you for your report'
- visit user_path(abusive_user)
+ visit user_path(abusive_user)
- fill_and_submit_abuse_category_form("They're being offensive or abusive.")
- fill_and_submit_report_abuse_form
+ find_by_testid('base-dropdown-toggle').click
+ fill_and_submit_abuse_category_form("They're being offensive or abusive.")
+ fill_and_submit_report_abuse_form
- expect(page).to have_content 'Thank you for your report'
- end
+ expect(page).to have_content 'Thank you for your report'
+ end
- it 'allows multiple users to report the same user' do
- fill_and_submit_abuse_category_form
- fill_and_submit_report_abuse_form
+ it 'allows multiple users to report the same user' do
+ fill_and_submit_abuse_category_form
+ fill_and_submit_report_abuse_form
- expect(page).to have_content 'Thank you for your report'
+ expect(page).to have_content 'Thank you for your report'
- gitlab_sign_out
- gitlab_sign_in(reporter2)
+ gitlab_sign_out
+ gitlab_sign_in(reporter2)
- visit user_path(abusive_user)
+ visit user_path(abusive_user)
- fill_and_submit_abuse_category_form
- fill_and_submit_report_abuse_form
+ find_by_testid('base-dropdown-toggle').click
+ fill_and_submit_abuse_category_form
+ fill_and_submit_report_abuse_form
- expect(page).to have_content 'Thank you for your report'
+ expect(page).to have_content 'Thank you for your report'
+ end
+
+ it_behaves_like 'cancel report'
end
+ end
- it 'redirects backs to user profile when cancel button is clicked' do
- fill_and_submit_abuse_category_form
+ describe 'when user_profile_overflow_menu FF turned off' do
+ context 'when reporting a user profile for abuse' do
+ let_it_be(:reporter2) { create(:user) }
- click_link 'Cancel'
+ before do
+ stub_feature_flags(user_profile_overflow_menu_vue: false)
+ visit user_path(abusive_user)
+ end
- expect(page).to have_current_path(user_path(abusive_user))
+ it_behaves_like 'reports the user with an abuse category'
+
+ it 'allows the reporter to report the same user for different abuse categories' do
+ visit user_path(abusive_user)
+
+ fill_and_submit_abuse_category_form
+ fill_and_submit_report_abuse_form
+
+ expect(page).to have_content 'Thank you for your report'
+
+ visit user_path(abusive_user)
+
+ fill_and_submit_abuse_category_form("They're being offensive or abusive.")
+ fill_and_submit_report_abuse_form
+
+ expect(page).to have_content 'Thank you for your report'
+ end
+
+ it 'allows multiple users to report the same user' do
+ fill_and_submit_abuse_category_form
+ fill_and_submit_report_abuse_form
+
+ expect(page).to have_content 'Thank you for your report'
+
+ gitlab_sign_out
+ gitlab_sign_in(reporter2)
+
+ visit user_path(abusive_user)
+
+ fill_and_submit_abuse_category_form
+ fill_and_submit_report_abuse_form
+
+ expect(page).to have_content 'Thank you for your report'
+ end
+
+ it_behaves_like 'cancel report'
end
end
@@ -102,7 +159,7 @@ RSpec.describe 'Abuse reports', :js, feature_category: :insider_threat do
before do
visit project_merge_request_path(project, merge_request)
- find('[data-testid="merge-request-actions"]').click
+ find_by_testid('merge-request-actions').click
end
it_behaves_like 'reports the user with an abuse category'
diff --git a/spec/features/admin/admin_abuse_reports_spec.rb b/spec/features/admin/admin_abuse_reports_spec.rb
index 9739ea53f81..18bc851558d 100644
--- a/spec/features/admin/admin_abuse_reports_spec.rb
+++ b/spec/features/admin/admin_abuse_reports_spec.rb
@@ -2,27 +2,29 @@
require 'spec_helper'
-RSpec.describe "Admin::AbuseReports", :js, feature_category: :shared do
+RSpec.describe "Admin::AbuseReports", :js, feature_category: :insider_threat do
let_it_be(:user) { create(:user) }
let_it_be(:admin) { create(:admin) }
- context 'as an admin' do
- describe 'displayed reports' do
- include FilteredSearchHelpers
+ let_it_be(:open_report) { create(:abuse_report, created_at: 5.days.ago, updated_at: 2.days.ago, category: 'spam', user: user) }
+ let_it_be(:open_report2) { create(:abuse_report, created_at: 4.days.ago, updated_at: 3.days.ago, category: 'phishing') }
+ let_it_be(:closed_report) { create(:abuse_report, :closed, user: user, category: 'spam') }
- let_it_be(:open_report) { create(:abuse_report, created_at: 5.days.ago, updated_at: 2.days.ago) }
- let_it_be(:open_report2) { create(:abuse_report, created_at: 4.days.ago, updated_at: 3.days.ago, category: 'phishing') }
- let_it_be(:closed_report) { create(:abuse_report, :closed) }
+ describe 'as an admin' do
+ before do
+ sign_in(admin)
+ gitlab_enable_admin_mode_sign_in(admin)
+ end
- let(:abuse_report_row_selector) { '[data-testid="abuse-report-row"]' }
+ context 'when abuse_reports_list feature flag is enabled' do
+ include FilteredSearchHelpers
before do
- sign_in(admin)
- gitlab_enable_admin_mode_sign_in(admin)
-
visit admin_abuse_reports_path
end
+ let(:abuse_report_row_selector) { '[data-testid="abuse-report-row"]' }
+
it 'only includes open reports by default' do
expect_displayed_reports_count(2)
@@ -68,7 +70,8 @@ RSpec.describe "Admin::AbuseReports", :js, feature_category: :shared do
end
it 'can be sorted by created_at and updated_at in desc and asc order', :aggregate_failures do
- # created_at desc (default)
+ sort_by 'Created date'
+ # created_at desc
expect(report_rows[0].text).to include(report_text(open_report2))
expect(report_rows[1].text).to include(report_text(open_report))
@@ -78,25 +81,90 @@ RSpec.describe "Admin::AbuseReports", :js, feature_category: :shared do
expect(report_rows[0].text).to include(report_text(open_report))
expect(report_rows[1].text).to include(report_text(open_report2))
- # updated_at ascending
+ # updated_at asc
sort_by 'Updated date'
expect(report_rows[0].text).to include(report_text(open_report2))
expect(report_rows[1].text).to include(report_text(open_report))
- # updated_at descending
+ # updated_at desc
toggle_sort_direction
expect(report_rows[0].text).to include(report_text(open_report))
expect(report_rows[1].text).to include(report_text(open_report2))
end
+ context 'when multiple reports for the same user are created' do
+ let_it_be(:open_report3) { create(:abuse_report, category: 'spam', user: user) }
+ let_it_be(:closed_report2) { create(:abuse_report, :closed, user: user, category: 'spam') }
+
+ it 'aggregates open reports by user & category', :aggregate_failures do
+ expect_displayed_reports_count(2)
+
+ expect_aggregated_report_shown(open_report, 2)
+ expect_report_shown(open_report2)
+ end
+
+ it 'can sort aggregated reports by number_of_reports in desc order only', :aggregate_failures do
+ sort_by 'Number of Reports'
+
+ expect(report_rows[0].text).to include(aggregated_report_text(open_report, 2))
+ expect(report_rows[1].text).to include(report_text(open_report2))
+
+ toggle_sort_direction
+
+ expect(report_rows[0].text).to include(aggregated_report_text(open_report, 2))
+ expect(report_rows[1].text).to include(report_text(open_report2))
+ end
+
+ it 'can sort aggregated reports by created_at and updated_at in desc and asc order', :aggregate_failures do
+ # number_of_reports desc (default)
+ expect(report_rows[0].text).to include(aggregated_report_text(open_report, 2))
+ expect(report_rows[1].text).to include(report_text(open_report2))
+
+ # created_at desc
+ sort_by 'Created date'
+
+ expect(report_rows[0].text).to include(report_text(open_report2))
+ expect(report_rows[1].text).to include(aggregated_report_text(open_report, 2))
+
+ # created_at asc
+ toggle_sort_direction
+
+ expect(report_rows[0].text).to include(aggregated_report_text(open_report, 2))
+ expect(report_rows[1].text).to include(report_text(open_report2))
+
+ sort_by 'Updated date'
+
+ # updated_at asc
+ expect(report_rows[0].text).to include(report_text(open_report2))
+ expect(report_rows[1].text).to include(aggregated_report_text(open_report, 2))
+
+ # updated_at desc
+ toggle_sort_direction
+
+ expect(report_rows[0].text).to include(aggregated_report_text(open_report, 2))
+ expect(report_rows[1].text).to include(report_text(open_report2))
+ end
+
+ it 'does not aggregate closed reports', :aggregate_failures do
+ filter %w[Status Closed]
+
+ expect_displayed_reports_count(2)
+ expect_report_shown(closed_report, closed_report2)
+ end
+ end
+
def report_rows
page.all(abuse_report_row_selector)
end
def report_text(report)
- "#{report.user.name} reported for #{report.category}"
+ "#{report.user.name} reported for #{report.category} by #{report.reporter.name}"
+ end
+
+ def aggregated_report_text(report, count)
+ "#{report.user.name} reported for #{report.category} by #{count} users"
end
def expect_report_shown(*reports)
@@ -111,6 +179,12 @@ RSpec.describe "Admin::AbuseReports", :js, feature_category: :shared do
end
end
+ def expect_aggregated_report_shown(*reports, count)
+ reports.each do |r|
+ expect(page).to have_content(aggregated_report_text(r, count))
+ end
+ end
+
def expect_displayed_reports_count(count)
expect(page).to have_css(abuse_report_row_selector, count: count)
end
@@ -138,71 +212,30 @@ RSpec.describe "Admin::AbuseReports", :js, feature_category: :shared do
before do
stub_feature_flags(abuse_reports_list: false)
- sign_in(admin)
- gitlab_enable_admin_mode_sign_in(admin)
+ visit admin_abuse_reports_path
end
- describe 'if a user has been reported for abuse' do
- let_it_be(:abuse_report) { create(:abuse_report, user: user) }
-
- describe 'in the abuse report view' do
- before do
- visit admin_abuse_reports_path
- end
-
- it 'presents information about abuse report' do
- expect(page).to have_content('Abuse Reports')
-
- expect(page).to have_content(user.name)
- expect(page).to have_content(abuse_report.reporter.name)
- expect(page).to have_content(abuse_report.message)
- expect(page).to have_link(user.name, href: user_path(user))
- end
-
- it 'present actions items' do
- expect(page).to have_link('Remove user & report')
- expect(page).to have_link('Block user')
- expect(page).to have_link('Remove user')
- end
- end
+ it 'displays all abuse reports', :aggregate_failures do
+ expect_report_shown(open_report)
+ expect_report_actions_shown(open_report)
- describe 'in the profile page of the user' do
- it 'shows a link to view user in the admin area' do
- visit user_path(user)
+ expect_report_shown(open_report2)
+ expect_report_actions_shown(open_report2)
- expect(page).to have_link 'View user in admin area', href: admin_user_path(user)
- end
- end
+ expect_report_shown(closed_report)
+ expect_report_actions_shown(closed_report)
end
- describe 'if an admin has been reported for abuse' do
+ context 'when an admin has been reported for abuse' do
let_it_be(:admin_abuse_report) { create(:abuse_report, user: admin) }
- describe 'in the abuse report view' do
- before do
- visit admin_abuse_reports_path
- end
-
- it 'presents information about abuse report' do
- page.within(:table_row, { "User" => admin.name }) do
- expect(page).to have_content(admin.name)
- expect(page).to have_content(admin_abuse_report.reporter.name)
- expect(page).to have_content(admin_abuse_report.message)
- expect(page).to have_link(admin.name, href: user_path(admin))
- end
- end
-
- it 'does not present actions items' do
- page.within(:table_row, { "User" => admin.name }) do
- expect(page).not_to have_link('Remove user & report')
- expect(page).not_to have_link('Block user')
- expect(page).not_to have_link('Remove user')
- end
- end
+ it 'displays the abuse report without actions' do
+ expect_report_shown(admin_abuse_report)
+ expect_report_actions_not_shown(admin_abuse_report)
end
end
- describe 'if a many users have been reported for abuse' do
+ context 'when multiple users have been reported for abuse' do
let(:report_count) { AbuseReport.default_per_page + 3 }
before do
@@ -211,8 +244,8 @@ RSpec.describe "Admin::AbuseReports", :js, feature_category: :shared do
end
end
- describe 'in the abuse report view' do
- it 'presents information about abuse report' do
+ context 'in the abuse report view', :aggregate_failures do
+ it 'adds pagination' do
visit admin_abuse_reports_path
expect(page).to have_selector('.pagination')
@@ -221,12 +254,8 @@ RSpec.describe "Admin::AbuseReports", :js, feature_category: :shared do
end
end
- describe 'filtering by user' do
- let!(:user2) { create(:user) }
- let!(:abuse_report) { create(:abuse_report, user: user) }
- let!(:abuse_report_2) { create(:abuse_report, user: user2) }
-
- it 'shows only single user report' do
+ context 'when filtering reports' do
+ it 'can be filtered by reported-user', :aggregate_failures do
visit admin_abuse_reports_path
page.within '.filter-form' do
@@ -234,14 +263,39 @@ RSpec.describe "Admin::AbuseReports", :js, feature_category: :shared do
wait_for_requests
page.within '.dropdown-menu-user' do
- click_link user2.name
+ click_link user.name
end
wait_for_requests
end
- expect(page).to have_content(user2.name)
- expect(page).not_to have_content(user.name)
+ expect_report_shown(open_report)
+ expect_report_shown(closed_report)
+ end
+ end
+
+ def expect_report_shown(report)
+ page.within(:table_row, { "User" => report.user.name, "Reported by" => report.reporter.name }) do
+ expect(page).to have_content(report.user.name)
+ expect(page).to have_content(report.reporter.name)
+ expect(page).to have_content(report.message)
+ expect(page).to have_link(report.user.name, href: user_path(report.user))
+ end
+ end
+
+ def expect_report_actions_shown(report)
+ page.within(:table_row, { "User" => report.user.name, "Reported by" => report.reporter.name }) do
+ expect(page).to have_link('Remove user & report')
+ expect(page).to have_link('Block user')
+ expect(page).to have_link('Remove user')
+ end
+ end
+
+ def expect_report_actions_not_shown(report)
+ page.within(:table_row, { "User" => report.user.name, "Reported by" => report.reporter.name }) do
+ expect(page).not_to have_link('Remove user & report')
+ expect(page).not_to have_link('Block user')
+ expect(page).not_to have_link('Remove user')
end
end
end
diff --git a/spec/features/admin/admin_labels_spec.rb b/spec/features/admin/admin_labels_spec.rb
index 68d63ac321e..47dc8577037 100644
--- a/spec/features/admin/admin_labels_spec.rb
+++ b/spec/features/admin/admin_labels_spec.rb
@@ -50,6 +50,10 @@ RSpec.describe 'admin issues labels', feature_category: :team_planning do
expect(page).to have_content("Define your default set of project labels")
expect(page).not_to have_content('bug')
expect(page).not_to have_content('feature_label')
+
+ page.within '.js-admin-labels-count' do
+ expect(page).to have_content('0')
+ end
end
end
@@ -113,7 +117,7 @@ RSpec.describe 'admin issues labels', feature_category: :team_planning do
click_link 'Delete label'
end
- expect(page).to have_content('Label was removed')
+ expect(page).to have_content("#{bug_label.title} was removed").and have_no_content("#{bug_label.title}</span>")
end
end
end
diff --git a/spec/features/admin/admin_runners_spec.rb b/spec/features/admin/admin_runners_spec.rb
index 7fb2202ca1d..af6ba318ac6 100644
--- a/spec/features/admin/admin_runners_spec.rb
+++ b/spec/features/admin/admin_runners_spec.rb
@@ -117,8 +117,8 @@ RSpec.describe "Admin Runners", feature_category: :runner_fleet do
describe 'search' do
before_all do
- create(:ci_runner, :instance, description: 'runner-foo')
- create(:ci_runner, :instance, description: 'runner-bar')
+ create(:ci_runner, :instance, description: 'runner foo')
+ create(:ci_runner, :instance, description: 'runner bar')
end
before do
@@ -133,23 +133,23 @@ RSpec.describe "Admin Runners", feature_category: :runner_fleet do
end
it 'shows runners' do
- expect(page).to have_content("runner-foo")
- expect(page).to have_content("runner-bar")
+ expect(page).to have_content("runner foo")
+ expect(page).to have_content("runner bar")
end
it 'shows correct runner when description matches' do
- input_filtered_search_keys('runner-foo')
+ input_filtered_search_keys('runner foo')
expect(page).to have_link('All 1')
expect(page).to have_link('Instance 1')
- expect(page).to have_content("runner-foo")
- expect(page).not_to have_content("runner-bar")
+ expect(page).to have_content("runner foo")
+ expect(page).not_to have_content("runner bar")
end
context 'when description does not match' do
before do
- input_filtered_search_keys('runner-baz')
+ input_filtered_search_keys('runner baz')
end
it_behaves_like 'shows no runners found'
diff --git a/spec/features/admin/admin_users_impersonation_tokens_spec.rb b/spec/features/admin/admin_users_impersonation_tokens_spec.rb
index 0350c8ab066..543dc2cc2a6 100644
--- a/spec/features/admin/admin_users_impersonation_tokens_spec.rb
+++ b/spec/features/admin/admin_users_impersonation_tokens_spec.rb
@@ -19,6 +19,7 @@ RSpec.describe 'Admin > Users > Impersonation Tokens', :js, feature_category: :s
name = 'Hello World'
visit admin_user_impersonation_tokens_path(user_id: user.username)
+ click_button 'Add new token'
fill_in "Token name", with: name
# Set date to 1st of next month
diff --git a/spec/features/admin/broadcast_messages_spec.rb b/spec/features/admin/broadcast_messages_spec.rb
index fca4cdb0ff4..b89ebc34d6a 100644
--- a/spec/features/admin/broadcast_messages_spec.rb
+++ b/spec/features/admin/broadcast_messages_spec.rb
@@ -12,6 +12,12 @@ RSpec.describe 'Admin Broadcast Messages', :js, feature_category: :onboarding do
# create
visit admin_broadcast_messages_path
+ click_button('Add new message')
+
+ page.within(preview_container) do
+ expect(page).to have_content('Your message here')
+ end
+
fill_in 'Message', with: 'test message'
wait_for_requests
@@ -24,10 +30,6 @@ RSpec.describe 'Admin Broadcast Messages', :js, feature_category: :onboarding do
wait_for_requests
- page.within(preview_container) do
- expect(page).to have_content('Your message here')
- end
-
page.within(first_message_container) do
expect(page).to have_content('test message')
end
@@ -53,10 +55,6 @@ RSpec.describe 'Admin Broadcast Messages', :js, feature_category: :onboarding do
wait_for_requests
- page.within(preview_container) do
- expect(page).to have_content('Your message here')
- end
-
page.within(first_message_container) do
expect(page).to have_content('changed test message')
end
diff --git a/spec/features/admin_variables_spec.rb b/spec/features/admin_variables_spec.rb
index 744d18a3b6d..91e7a46849c 100644
--- a/spec/features/admin_variables_spec.rb
+++ b/spec/features/admin_variables_spec.rb
@@ -13,6 +13,7 @@ RSpec.describe 'Instance variables', :js, feature_category: :secrets_management
sign_in(admin)
gitlab_enable_admin_mode_sign_in(admin)
+ stub_feature_flags(ci_variable_drawer: false)
visit page_path
wait_for_requests
end
@@ -29,4 +30,14 @@ RSpec.describe 'Instance variables', :js, feature_category: :secrets_management
it_behaves_like 'variable list', is_admin: true
end
+
+ context 'when ci_variable_drawer FF is enabled' do
+ before do
+ stub_feature_flags(ci_variable_drawer: true)
+ visit page_path
+ wait_for_requests
+ end
+
+ it_behaves_like 'variable list drawer', is_admin: true
+ end
end
diff --git a/spec/features/broadcast_messages_spec.rb b/spec/features/broadcast_messages_spec.rb
index 2e0f4e3b83b..98f87face15 100644
--- a/spec/features/broadcast_messages_spec.rb
+++ b/spec/features/broadcast_messages_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe 'Broadcast Messages', feature_category: :onboarding do
+ include Spec::Support::Helpers::ModalHelpers
+
let_it_be(:user) { create(:user) }
let(:path) { explore_projects_path }
@@ -127,6 +129,8 @@ RSpec.describe 'Broadcast Messages', feature_category: :onboarding do
find("[data-testid='delete-message-#{message.id}']").click
end
+ accept_gl_confirm(button_text: 'Delete message')
+
visit path
expect_no_broadcast_message
diff --git a/spec/features/dashboard/projects_spec.rb b/spec/features/dashboard/projects_spec.rb
index 04b7f1ca821..747d09f5d08 100644
--- a/spec/features/dashboard/projects_spec.rb
+++ b/spec/features/dashboard/projects_spec.rb
@@ -117,9 +117,7 @@ RSpec.describe 'Dashboard Projects', feature_category: :groups_and_projects do
it 'shows the empty state when there are no starred projects' do
visit(starred_dashboard_projects_path)
- element = page.find('.row.empty-state')
-
- expect(element).to have_content("You don't have starred projects yet.")
+ expect(page).to have_text(s_("StarredProjectsEmptyState|You don't have starred projects yet."))
end
it 'shows only starred projects' do
diff --git a/spec/features/dashboard/todos/todos_filtering_spec.rb b/spec/features/dashboard/todos/todos_filtering_spec.rb
index ea8c7e800c5..990b2f18120 100644
--- a/spec/features/dashboard/todos/todos_filtering_spec.rb
+++ b/spec/features/dashboard/todos/todos_filtering_spec.rb
@@ -130,7 +130,7 @@ RSpec.describe 'Dashboard > User filters todos', :js, feature_category: :team_pl
before do
create(:todo, :build_failed, user: user_1, author: user_2, project: project_1, target: merge_request)
create(:todo, :marked, user: user_1, author: user_2, project: project_1, target: issue1)
- create(:todo, :review_requested, user: user_1, author: user_2, project: project_1, target: issue1)
+ create(:todo, :review_requested, user: user_1, author: user_2, project: project_1, target: merge_request)
end
it 'filters by Assigned' do
diff --git a/spec/features/group_variables_spec.rb b/spec/features/group_variables_spec.rb
index 3e87c90e7dc..b4a0678cb5f 100644
--- a/spec/features/group_variables_spec.rb
+++ b/spec/features/group_variables_spec.rb
@@ -11,6 +11,8 @@ RSpec.describe 'Group variables', :js, feature_category: :secrets_management do
before do
group.add_owner(user)
gitlab_sign_in(user)
+
+ stub_feature_flags(ci_variable_drawer: false)
visit page_path
wait_for_requests
end
@@ -27,4 +29,14 @@ RSpec.describe 'Group variables', :js, feature_category: :secrets_management do
it_behaves_like 'variable list'
end
+
+ context 'when ci_variable_drawer FF is enabled' do
+ before do
+ stub_feature_flags(ci_variable_drawer: true)
+ visit page_path
+ wait_for_requests
+ end
+
+ it_behaves_like 'variable list drawer'
+ end
end
diff --git a/spec/features/groups/container_registry_spec.rb b/spec/features/groups/container_registry_spec.rb
index ab8d8238bdc..d68b4ccf8f8 100644
--- a/spec/features/groups/container_registry_spec.rb
+++ b/spec/features/groups/container_registry_spec.rb
@@ -49,6 +49,7 @@ RSpec.describe 'Container Registry', :js, feature_category: :container_registry
it 'list page has a list of images' do
visit_container_registry
+ expect(page).to have_content '1 Image repository'
expect(page).to have_content 'my/image'
end
diff --git a/spec/features/groups/labels/edit_spec.rb b/spec/features/groups/labels/edit_spec.rb
index 2cbe44e11bf..6e056d35435 100644
--- a/spec/features/groups/labels/edit_spec.rb
+++ b/spec/features/groups/labels/edit_spec.rb
@@ -32,6 +32,6 @@ RSpec.describe 'Edit group label', feature_category: :team_planning do
click_link 'Delete label'
end
- expect(page).to have_content("#{label.title} deleted permanently")
+ expect(page).to have_content("#{label.title} was removed").and have_no_content("#{label.title}</span>")
end
end
diff --git a/spec/features/groups/settings/group_badges_spec.rb b/spec/features/groups/settings/group_badges_spec.rb
index 4a4cb297fcf..1f16a288882 100644
--- a/spec/features/groups/settings/group_badges_spec.rb
+++ b/spec/features/groups/settings/group_badges_spec.rb
@@ -24,7 +24,7 @@ RSpec.describe 'Group Badges', feature_category: :groups_and_projects do
page.within '.badge-settings' do
wait_for_requests
- rows = all('.card-body > div')
+ rows = all('.gl-card-body tbody tr')
expect(rows.length).to eq 2
expect(rows[0]).to have_content badge_1.link_url
expect(rows[1]).to have_content badge_2.link_url
@@ -33,6 +33,7 @@ RSpec.describe 'Group Badges', feature_category: :groups_and_projects do
context 'adding a badge', :js do
it 'user can preview a badge' do
+ click_button 'Add badge'
page.within '.badge-settings form' do
fill_in 'badge-link-url', with: badge_link_url
fill_in 'badge-image-url', with: badge_image_url
@@ -44,6 +45,7 @@ RSpec.describe 'Group Badges', feature_category: :groups_and_projects do
end
it do
+ click_button 'Add badge'
page.within '.badge-settings' do
fill_in 'badge-link-url', with: badge_link_url
fill_in 'badge-image-url', with: badge_image_url
@@ -51,7 +53,7 @@ RSpec.describe 'Group Badges', feature_category: :groups_and_projects do
click_button 'Add badge'
wait_for_requests
- within '.card-body' do
+ within '.gl-card-body' do
expect(find('a')[:href]).to eq badge_link_url
expect(find('a img')[:src]).to eq badge_image_url
end
@@ -63,32 +65,35 @@ RSpec.describe 'Group Badges', feature_category: :groups_and_projects do
it 'form is shown when clicking edit button in list' do
page.within '.badge-settings' do
wait_for_requests
- rows = all('.card-body > div')
+ rows = all('.gl-card-body tbody tr')
expect(rows.length).to eq 2
rows[1].find('[aria-label="Edit"]').click
+ end
- within 'form' do
- expect(find('#badge-link-url').value).to eq badge_2.link_url
- expect(find('#badge-image-url').value).to eq badge_2.image_url
- end
+ page.within '.gl-modal' do
+ expect(find('#badge-link-url').value).to eq badge_2.link_url
+ expect(find('#badge-image-url').value).to eq badge_2.image_url
end
end
it 'updates a badge when submitting the edit form' do
page.within '.badge-settings' do
wait_for_requests
- rows = all('.card-body > div')
+ rows = all('.gl-card-body tbody tr')
expect(rows.length).to eq 2
rows[1].find('[aria-label="Edit"]').click
- within 'form' do
- fill_in 'badge-link-url', with: badge_link_url
- fill_in 'badge-image-url', with: badge_image_url
+ end
- click_button 'Save changes'
- wait_for_requests
- end
+ page.within '.gl-modal' do
+ fill_in 'badge-link-url', with: badge_link_url
+ fill_in 'badge-image-url', with: badge_image_url
- rows = all('.card-body > div')
+ click_button 'Save changes'
+ wait_for_requests
+ end
+
+ page.within '.badge-settings' do
+ rows = all('.gl-card-body tbody tr')
expect(rows.length).to eq 2
expect(rows[1]).to have_content badge_link_url
end
@@ -102,7 +107,7 @@ RSpec.describe 'Group Badges', feature_category: :groups_and_projects do
it 'shows a modal when deleting a badge' do
wait_for_requests
- rows = all('.card-body > div')
+ rows = all('.gl-card-body tbody tr')
expect(rows.length).to eq 2
click_delete_button(rows[1])
@@ -112,14 +117,14 @@ RSpec.describe 'Group Badges', feature_category: :groups_and_projects do
it 'deletes a badge when confirming the modal' do
wait_for_requests
- rows = all('.card-body > div')
+ rows = all('.gl-card-body tbody tr')
expect(rows.length).to eq 2
click_delete_button(rows[1])
find('.modal .btn-danger').click
wait_for_requests
- rows = all('.card-body > div')
+ rows = all('.gl-card-body tbody tr')
expect(rows.length).to eq 1
expect(rows[0]).to have_content badge_1.link_url
end
diff --git a/spec/features/incidents/incident_details_spec.rb b/spec/features/incidents/incident_details_spec.rb
index 2be0c95addd..d6feb008d47 100644
--- a/spec/features/incidents/incident_details_spec.rb
+++ b/spec/features/incidents/incident_details_spec.rb
@@ -86,7 +86,7 @@ RSpec.describe 'Incident details', :js, feature_category: :incident_management d
private
def dropdown_options
- widget.all('[data-testid="status-dropdown-item"]', count: 3)
+ widget.all('[data-testid="escalation-status-dropdown"] .gl-new-dropdown-item', count: 3)
end
def select_resolved(options)
diff --git a/spec/features/incidents/user_views_incident_spec.rb b/spec/features/incidents/user_views_incident_spec.rb
index bbf579b09a8..65bd88582db 100644
--- a/spec/features/incidents/user_views_incident_spec.rb
+++ b/spec/features/incidents/user_views_incident_spec.rb
@@ -75,35 +75,4 @@ RSpec.describe "User views incident", feature_category: :incident_management do
expect(page).not_to have_button('Incident actions')
end
end
-
- describe 'user status' do
- context 'when showing status of the author of the incident' do
- subject { visit(incident_project_issues_path(project, incident)) }
-
- it_behaves_like 'showing user status' do
- let(:user_with_status) { user }
- end
- end
-
- context 'when status message has an emoji', :js do
- let_it_be(:message) { 'My status with an emoji' }
- let_it_be(:message_emoji) { 'basketball' }
- let_it_be(:status) { create(:user_status, user: user, emoji: 'smirk', message: "#{message} :#{message_emoji}:") }
-
- it 'correctly renders the emoji' do
- wait_for_requests
-
- tooltip_span = page.first(".user-status-emoji[title^='#{message}']")
- tooltip_span.hover
-
- wait_for_requests
-
- tooltip = page.find('.tooltip .tooltip-inner')
-
- page.within(tooltip) do
- expect(page).to have_emoji(message_emoji)
- end
- end
- end
- end
end
diff --git a/spec/features/issues/filtered_search/dropdown_assignee_spec.rb b/spec/features/issues/filtered_search/dropdown_assignee_spec.rb
index 57270e8f7c7..0a06a052bc2 100644
--- a/spec/features/issues/filtered_search/dropdown_assignee_spec.rb
+++ b/spec/features/issues/filtered_search/dropdown_assignee_spec.rb
@@ -92,7 +92,7 @@ RSpec.describe 'Dropdown assignee', :js, feature_category: :team_planning do
sign_in(subgroup_user)
end
- it 'shows inherited, direct, and invited group members but not descendent members', :aggregate_failures do
+ it 'shows inherited, direct, and invited group members including descendent members', :aggregate_failures do
visit issues_group_path(subgroup)
select_tokens 'Assignee', '='
@@ -100,8 +100,8 @@ RSpec.describe 'Dropdown assignee', :js, feature_category: :team_planning do
expect(page).to have_text group_user.name
expect(page).to have_text subgroup_user.name
expect(page).to have_text invited_to_group_group_user.name
- expect(page).not_to have_text subsubgroup_user.name
- expect(page).not_to have_text invited_to_project_group_user.name
+ expect(page).to have_text subsubgroup_user.name
+ expect(page).to have_text invited_to_project_group_user.name
visit project_issues_path(subgroup_project)
@@ -113,5 +113,33 @@ RSpec.describe 'Dropdown assignee', :js, feature_category: :team_planning do
expect(page).to have_text invited_to_group_group_user.name
expect(page).not_to have_text subsubgroup_user.name
end
+
+ context 'when new_graphql_users_autocomplete is disabled' do
+ before do
+ stub_feature_flags(new_graphql_users_autocomplete: false)
+ end
+
+ it 'shows inherited, direct, and invited group members but not descendent members', :aggregate_failures do
+ visit issues_group_path(subgroup)
+
+ select_tokens 'Assignee', '='
+
+ expect(page).to have_text group_user.name
+ expect(page).to have_text subgroup_user.name
+ expect(page).to have_text invited_to_group_group_user.name
+ expect(page).not_to have_text subsubgroup_user.name
+ expect(page).not_to have_text invited_to_project_group_user.name
+
+ visit project_issues_path(subgroup_project)
+
+ select_tokens 'Assignee', '='
+
+ expect(page).to have_text group_user.name
+ expect(page).to have_text subgroup_user.name
+ expect(page).to have_text invited_to_project_group_user.name
+ expect(page).to have_text invited_to_group_group_user.name
+ expect(page).not_to have_text subsubgroup_user.name
+ end
+ end
end
end
diff --git a/spec/features/issues/incident_issue_spec.rb b/spec/features/issues/incident_issue_spec.rb
index 145b51d207a..5197f5d1e33 100644
--- a/spec/features/issues/incident_issue_spec.rb
+++ b/spec/features/issues/incident_issue_spec.rb
@@ -70,10 +70,6 @@ RSpec.describe 'Incident Detail', :js, feature_category: :team_planning do
# Linked Issues/MRs and comment box are hidden on page
expect(hidden_items.count).to eq(0)
-
- # does not show the edit title and description button
- edit_button = find_all('[aria-label="Edit title and description"]', wait: false)
- expect(edit_button.count).to eq(0)
end
end
end
diff --git a/spec/features/issues/note_polling_spec.rb b/spec/features/issues/note_polling_spec.rb
index 23f9347d726..a390dca6822 100644
--- a/spec/features/issues/note_polling_spec.rb
+++ b/spec/features/issues/note_polling_spec.rb
@@ -10,17 +10,31 @@ RSpec.describe 'Issue notes polling', :js, feature_category: :team_planning do
let(:issue) { create(:issue, project: project) }
describe 'creates' do
- before do
+ it 'displays the new comment' do
visit project_issue_path(project, issue)
close_rich_text_promo_popover_if_present
- end
- it 'displays the new comment' do
note = create(:note, noteable: issue, project: project, note: 'Looks good!')
wait_for_requests
expect(page).to have_selector("#note_#{note.id}", text: 'Looks good!')
end
+
+ context 'when action_cable_notes is disabled' do
+ before do
+ stub_feature_flags(action_cable_notes: false)
+ end
+
+ it 'displays the new comment' do
+ visit project_issue_path(project, issue)
+ close_rich_text_promo_popover_if_present
+
+ note = create(:note, noteable: issue, project: project, note: 'Looks good!')
+ wait_for_requests
+
+ expect(page).to have_selector("#note_#{note.id}", text: 'Looks good!')
+ end
+ end
end
describe 'updates' do
diff --git a/spec/features/issues/service_desk_spec.rb b/spec/features/issues/service_desk_spec.rb
index 923967c52c0..1b99c8b39d3 100644
--- a/spec/features/issues/service_desk_spec.rb
+++ b/spec/features/issues/service_desk_spec.rb
@@ -184,6 +184,42 @@ RSpec.describe 'Service Desk Issue Tracker', :js, feature_category: :service_des
stub_feature_flags(frontend_caching: true)
end
+ context 'when there are no issues' do
+ describe 'service desk empty state' do
+ it 'displays the large empty state, documentation, and the email address' do
+ visit service_desk_project_issues_path(project)
+
+ aggregate_failures do
+ expect(page).to have_css('.empty-state')
+ expect(page).to have_text('Use Service Desk to connect with your users')
+ expect(page).to have_link('Learn more about Service Desk', href: help_page_path('user/project/service_desk/index'))
+ expect(page).not_to have_link('Enable Service Desk')
+ expect(page).to have_content(project.service_desk_address)
+ end
+ end
+
+ context 'when user does not have permission to edit project settings' do
+ before do
+ user_2 = create(:user)
+
+ project.add_guest(user_2)
+ sign_in(user_2)
+ visit service_desk_project_issues_path(project)
+ end
+
+ it 'displays the large info box and the documentation link' do
+ aggregate_failures do
+ expect(page).to have_css('.empty-state')
+ expect(page).to have_text('Use Service Desk to connect with your users')
+ expect(page).to have_link('Learn more about Service Desk', href: help_page_path('user/project/service_desk/index'))
+ expect(page).not_to have_link('Enable Service Desk')
+ expect(page).not_to have_content(project.service_desk_address)
+ end
+ end
+ end
+ end
+ end
+
context 'when there are issues' do
let_it_be(:project) { create(:project, :private, service_desk_enabled: true) }
let_it_be(:other_user) { create(:user) }
@@ -197,7 +233,7 @@ RSpec.describe 'Service Desk Issue Tracker', :js, feature_category: :service_des
it 'displays the small info box, documentation, a button to configure service desk, and the address' do
aggregate_failures do
- expect(page).to have_link('Learn more', href: help_page_path('user/project/service_desk'))
+ expect(page).to have_link('Learn more about Service Desk', href: help_page_path('user/project/service_desk/index'))
expect(page).not_to have_link('Enable Service Desk')
expect(page).to have_content(project.service_desk_address)
end
diff --git a/spec/features/issues/user_creates_branch_and_merge_request_spec.rb b/spec/features/issues/user_creates_branch_and_merge_request_spec.rb
index ecb899a7ca2..0c50b7b2475 100644
--- a/spec/features/issues/user_creates_branch_and_merge_request_spec.rb
+++ b/spec/features/issues/user_creates_branch_and_merge_request_spec.rb
@@ -112,6 +112,18 @@ RSpec.describe 'User creates branch and merge request on issue page', :js, featu
expect(page).to have_selector('.ref-selector', text: branch_name)
expect(page).to have_current_path project_tree_path(project, branch_name), ignore_query: true
end
+
+ context 'when source branch is non-default' do
+ let(:source_branch) { 'feature' }
+
+ it 'creates a branch' do
+ select_dropdown_option('create-branch', branch_name, source_branch)
+ wait_for_requests
+
+ expect(page).to have_selector('.ref-selector', text: branch_name)
+ expect(page).to have_current_path project_tree_path(project, branch_name), ignore_query: true
+ end
+ end
end
context 'when branch name is invalid' do
@@ -231,12 +243,13 @@ RSpec.describe 'User creates branch and merge request on issue page', :js, featu
private
- def select_dropdown_option(option, branch_name = nil)
+ def select_dropdown_option(option, branch_name = nil, source_branch = nil)
find('.create-mr-dropdown-wrap .dropdown-toggle').click
find("li[data-value='#{option}']").click
- if branch_name
- find('.js-branch-name').set(branch_name)
+ if branch_name || source_branch
+ find('.js-branch-name').set(branch_name) if branch_name
+ find('.js-ref').set(source_branch) if source_branch
# Javascript debounces AJAX calls.
# So we have to wait until AJAX requests are started.
diff --git a/spec/features/issues/user_edits_issue_spec.rb b/spec/features/issues/user_edits_issue_spec.rb
index 0938f9c7d12..45d95db8ff1 100644
--- a/spec/features/issues/user_edits_issue_spec.rb
+++ b/spec/features/issues/user_edits_issue_spec.rb
@@ -130,7 +130,7 @@ RSpec.describe "Issues > User edits issue", :js, feature_category: :team_plannin
click_button("Switch to rich text editing")
end
- expect(issuable_form).not_to have_selector(content_editor_focused_selector)
+ expect(issuable_form).to have_selector(content_editor_focused_selector)
refresh
@@ -142,11 +142,11 @@ RSpec.describe "Issues > User edits issue", :js, feature_category: :team_plannin
click_button("Switch to plain text editing")
end
- expect(issuable_form).not_to have_selector(markdown_field_focused_selector)
+ expect(issuable_form).to have_selector(markdown_field_focused_selector)
end
end
- describe 'update labels' do
+ describe 'update labels', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/345229' do
it 'will not send ajax request when no data is changed' do
page.within '.labels' do
click_on 'Edit'
diff --git a/spec/features/issues/user_toggles_subscription_spec.rb b/spec/features/issues/user_toggles_subscription_spec.rb
index 00b04c10d33..af8a31afd5f 100644
--- a/spec/features/issues/user_toggles_subscription_spec.rb
+++ b/spec/features/issues/user_toggles_subscription_spec.rb
@@ -34,7 +34,7 @@ RSpec.describe "User toggles subscription", :js, feature_category: :team_plannin
expect(subscription_button).to have_css("button.is-checked")
# Toggle subscription.
- find('[data-testid="subscription-toggle"]').click
+ subscription_button.find('button').click
wait_for_requests
# Check we're unsubscribed.
@@ -42,7 +42,7 @@ RSpec.describe "User toggles subscription", :js, feature_category: :team_plannin
end
context 'when project emails are disabled' do
- let(:project) { create(:project_empty_repo, :public, emails_disabled: true) }
+ let_it_be(:project) { create(:project_empty_repo, :public, emails_enabled: false) }
it 'is disabled' do
expect(page).to have_content('Disabled by project owner')
@@ -66,7 +66,7 @@ RSpec.describe "User toggles subscription", :js, feature_category: :team_plannin
expect(subscription_button).to have_css("button:not(.is-checked)")
# Toggle subscription.
- find('[data-testid="subscription-toggle"]').click
+ subscription_button.find('button').click
wait_for_requests
# Check we're subscribed.
diff --git a/spec/features/issues/user_views_issue_spec.rb b/spec/features/issues/user_views_issue_spec.rb
index 17ff3e0c702..00aa7685e9d 100644
--- a/spec/features/issues/user_views_issue_spec.rb
+++ b/spec/features/issues/user_views_issue_spec.rb
@@ -39,42 +39,4 @@ RSpec.describe "User views issue", feature_category: :team_planning do
expect(page).not_to have_link('Close issue')
end
end
-
- describe 'user status' do
- subject { visit(project_issue_path(project, issue)) }
-
- context 'when showing status of the author of the issue' do
- it_behaves_like 'showing user status' do
- let(:user_with_status) { user }
- end
- end
-
- context 'when showing status of a user who commented on an issue', :js do
- it_behaves_like 'showing user status' do
- let(:user_with_status) { user }
- end
- end
-
- context 'when status message has an emoji', :js do
- let_it_be(:message) { 'My status with an emoji' }
- let_it_be(:message_emoji) { 'basketball' }
- let_it_be(:status) { create(:user_status, user: user, emoji: 'smirk', message: "#{message} :#{message_emoji}:") }
-
- it 'correctly renders the emoji' do
- wait_for_requests
-
- tooltip_span = page.first(".user-status-emoji[title^='#{message}']")
-
- tooltip_span.hover
-
- wait_for_requests
-
- tooltip = page.find('.tooltip .tooltip-inner')
-
- page.within(tooltip) do
- expect(page).to have_emoji(message_emoji)
- end
- end
- end
- end
end
diff --git a/spec/features/markdown/gitlab_flavored_markdown_spec.rb b/spec/features/markdown/gitlab_flavored_markdown_spec.rb
index 36b02b17924..cd011315ed0 100644
--- a/spec/features/markdown/gitlab_flavored_markdown_spec.rb
+++ b/spec/features/markdown/gitlab_flavored_markdown_spec.rb
@@ -57,16 +57,21 @@ RSpec.describe "GitLab Flavored Markdown", feature_category: :team_planning do
describe "for issues", :js do
before do
- @other_issue = create(:issue,
- author: user,
- assignees: [user],
- project: project)
- @issue = create(:issue,
- author: user,
- assignees: [user],
- project: project,
- title: "fix #{@other_issue.to_reference}",
- description: "ask #{fred.to_reference} for details")
+ @other_issue = create(
+ :issue,
+ author: user,
+ assignees: [user],
+ project: project
+ )
+
+ @issue = create(
+ :issue,
+ author: user,
+ assignees: [user],
+ project: project,
+ title: "fix #{@other_issue.to_reference}",
+ description: "ask #{fred.to_reference} for details"
+ )
@note = create(:note_on_issue, noteable: @issue, project: @issue.project, note: "Hello world")
end
@@ -112,10 +117,12 @@ RSpec.describe "GitLab Flavored Markdown", feature_category: :team_planning do
describe "for milestones" do
before do
- @milestone = create(:milestone,
- project: project,
- title: "fix #{issue.to_reference}",
- description: "ask #{fred.to_reference} for details")
+ @milestone = create(
+ :milestone,
+ project: project,
+ title: "fix #{issue.to_reference}",
+ description: "ask #{fred.to_reference} for details"
+ )
end
it "renders title in milestones#index" do
diff --git a/spec/features/merge_request/creating_mr_for_projects_with_different_visibility_spec.rb b/spec/features/merge_request/creating_mr_for_projects_with_different_visibility_spec.rb
new file mode 100644
index 00000000000..3597c2ed1c7
--- /dev/null
+++ b/spec/features/merge_request/creating_mr_for_projects_with_different_visibility_spec.rb
@@ -0,0 +1,100 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Merge Request > Selecting projects with different visibility', feature_category: :source_code_management do
+ include ProjectForksHelper
+
+ let_it_be(:public_project) { create(:project, :public, :small_repo) }
+ let_it_be(:internal_project) { create(:project, :internal, :small_repo) }
+ let_it_be(:private_project) { create(:project, :private, :small_repo) }
+ let(:private_fork_public_project) do
+ fork_project(public_project, nil, target_project: create(:project, :private, :small_repo))
+ end
+
+ let(:private_fork_internal_project) do
+ fork_project(internal_project, nil, target_project: create(:project, :private, :small_repo))
+ end
+
+ let(:internal_fork_public_project) do
+ fork_project(public_project, nil, target_project: create(:project, :internal, :small_repo))
+ end
+
+ let(:user) { source_project.creator }
+
+ before do
+ sign_in(user)
+ end
+
+ describe 'warnings for more permissive visibility in target project', :js do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:source_project, :target_project, :warning_message) do
+ ref(:private_fork_internal_project) |
+ ref(:internal_project) |
+ _('This merge request is from a private project to an internal project.')
+
+ ref(:private_fork_public_project) |
+ ref(:public_project) |
+ _('This merge request is from a private project to a public project.')
+
+ ref(:internal_fork_public_project) |
+ ref(:public_project) |
+ _('This merge request is from an internal project to a public project.')
+ end
+
+ with_them do
+ it 'shows a warning message' do
+ visit project_new_merge_request_path(source_project,
+ merge_request: { source_branch: 'master', target_project_id: target_project.id })
+ expect(page).to have_content(warning_message)
+ end
+ end
+
+ describe 'warnings for more permissive repository access level in target project' do
+ let(:source_project) do
+ fork_project(internal_project, nil, target_project: create(:project, :internal, :small_repo))
+ end
+
+ let(:target_project) { internal_project }
+
+ let(:warning_message) do
+ "Project #{source_project.name_with_namespace} has more restricted access settings than " \
+ "#{target_project.name_with_namespace}. To avoid exposing private changes, make sure " \
+ "you're submitting changes to the correct project."
+ end
+
+ context 'when the source repository access level is private' do
+ before do
+ source_access_level = Featurable::PRIVATE
+ source_project.project_feature.update!(
+ repository_access_level: source_access_level,
+ merge_requests_access_level: source_access_level,
+ builds_access_level: source_access_level
+ )
+ end
+
+ it 'shows a warning' do
+ visit project_new_merge_request_path(source_project,
+ merge_request: { source_branch: 'master', target_project_id: target_project.id })
+ expect(page).to have_content(warning_message)
+ end
+
+ context 'when target project is private' do
+ let(:source_project) do
+ fork_project(private_project, nil, target_project: create(:project, :private, :small_repo))
+ end
+
+ let(:target_project) { private_project }
+
+ it 'does not show a warning' do
+ visit project_new_merge_request_path(source_project,
+ merge_request: { source_branch: 'master', target_project_id: target_project.id })
+
+ expect(page).not_to have_content(warning_message)
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/features/merge_request/user_edits_mr_spec.rb b/spec/features/merge_request/user_edits_mr_spec.rb
index ab7183775b9..bf237e07ac8 100644
--- a/spec/features/merge_request/user_edits_mr_spec.rb
+++ b/spec/features/merge_request/user_edits_mr_spec.rb
@@ -184,11 +184,6 @@ RSpec.describe 'Merge request > User edits MR', feature_category: :code_review_w
it 'allows to unselect "Remove source branch"', :js do
expect(merge_request.merge_params['force_remove_source_branch']).to be_truthy
- begin
- visit edit_project_merge_request_path(target_project, merge_request)
- rescue Selenium::WebDriver::Error::UnexpectedAlertOpenError
- end
-
uncheck 'Delete source branch when merge request is accepted'
click_button 'Save changes'
diff --git a/spec/features/merge_request/user_sees_merge_request_pipelines_spec.rb b/spec/features/merge_request/user_sees_merge_request_pipelines_spec.rb
index 8c4dbf5ebfd..add8e9f30de 100644
--- a/spec/features/merge_request/user_sees_merge_request_pipelines_spec.rb
+++ b/spec/features/merge_request/user_sees_merge_request_pipelines_spec.rb
@@ -28,12 +28,26 @@ RSpec.describe 'Merge request > User sees pipelines triggered by merge request',
let(:expected_detached_mr_tag) { 'merge request' }
before do
+ # rubocop:disable RSpec/AvoidConditionalStatements
+ stub_licensed_features(merge_request_approvers: true) if Gitlab.ee?
+ # rubocop:enable RSpec/AvoidConditionalStatements
+
stub_application_setting(auto_devops_enabled: false)
stub_ci_pipeline_yaml_file(YAML.dump(config))
project.add_maintainer(user)
sign_in(user)
end
+ # rubocop:disable RSpec/AvoidConditionalStatements
+ def mr_widget_title
+ if Gitlab.ee?
+ 'to be merged automatically when all merge checks pass'
+ else
+ 'to be merged automatically when the pipeline succeeds'
+ end
+ end
+ # rubocop:enable RSpec/AvoidConditionalStatements
+
context 'when a user created a merge request in the parent project' do
let!(:merge_request) do
create(
@@ -163,7 +177,7 @@ RSpec.describe 'Merge request > User sees pipelines triggered by merge request',
context 'when detached merge request pipeline is pending' do
it 'waits the head pipeline' do
- expect(page).to have_content('to be merged automatically when the pipeline succeeds')
+ expect(page).to have_content mr_widget_title
expect(page).to have_button('Cancel auto-merge')
end
end
@@ -177,7 +191,7 @@ RSpec.describe 'Merge request > User sees pipelines triggered by merge request',
end
it 'waits the head pipeline' do
- expect(page).to have_content('to be merged automatically when the pipeline succeeds')
+ expect(page).to have_content mr_widget_title
expect(page).to have_button('Cancel auto-merge')
end
end
@@ -388,7 +402,7 @@ RSpec.describe 'Merge request > User sees pipelines triggered by merge request',
context 'when detached merge request pipeline is pending' do
it 'waits the head pipeline' do
- expect(page).to have_content('to be merged automatically when the pipeline succeeds')
+ expect(page).to have_content mr_widget_title
expect(page).to have_button('Cancel auto-merge')
end
end
@@ -397,7 +411,7 @@ RSpec.describe 'Merge request > User sees pipelines triggered by merge request',
before do
detached_merge_request_pipeline.reload.succeed!
- wait_for_requests
+ refresh
end
it 'merges the merge request' do
@@ -414,7 +428,7 @@ RSpec.describe 'Merge request > User sees pipelines triggered by merge request',
end
it 'waits the head pipeline' do
- expect(page).to have_content('to be merged automatically when the pipeline succeeds')
+ expect(page).to have_content mr_widget_title
expect(page).to have_button('Cancel auto-merge')
end
end
diff --git a/spec/features/merge_request/user_sees_merge_widget_spec.rb b/spec/features/merge_request/user_sees_merge_widget_spec.rb
index 3cac24838a3..75df93d1a6c 100644
--- a/spec/features/merge_request/user_sees_merge_widget_spec.rb
+++ b/spec/features/merge_request/user_sees_merge_widget_spec.rb
@@ -197,7 +197,8 @@ RSpec.describe 'Merge request > User sees merge widget', :js, feature_category:
it 'shows head pipeline information' do
within '.ci-widget-content' do
- expect(page).to have_content("Pipeline ##{pipeline.id} pending " \
+ expect(page).to have_content("Pipeline ##{pipeline.id} pending")
+ expect(page).to have_content("Pipeline pending " \
"for #{pipeline.short_sha} " \
"on #{pipeline.ref}")
end
@@ -227,7 +228,8 @@ RSpec.describe 'Merge request > User sees merge widget', :js, feature_category:
shared_examples 'pipeline widget' do
it 'shows head pipeline information', :sidekiq_might_not_need_inline do
within '.ci-widget-content' do
- expect(page).to have_content("Merge request pipeline ##{pipeline.id} pending for #{pipeline.short_sha}")
+ expect(page).to have_content("Merge request pipeline ##{pipeline.id} pending")
+ expect(page).to have_content("Merge request pipeline pending for #{pipeline.short_sha}")
end
end
end
@@ -266,7 +268,8 @@ RSpec.describe 'Merge request > User sees merge widget', :js, feature_category:
shared_examples 'pipeline widget' do
it 'shows head pipeline information', :sidekiq_might_not_need_inline do
within '.ci-widget-content' do
- expect(page).to have_content("Merged result pipeline ##{pipeline.id} pending for #{pipeline.short_sha}")
+ expect(page).to have_content("Merged result pipeline ##{pipeline.id} pending")
+ expect(page).to have_content("Merged result pipeline pending for #{pipeline.short_sha}")
end
end
end
diff --git a/spec/features/merge_request/user_sees_mini_pipeline_graph_spec.rb b/spec/features/merge_request/user_sees_mini_pipeline_graph_spec.rb
index 5756218d20f..9883434eb68 100644
--- a/spec/features/merge_request/user_sees_mini_pipeline_graph_spec.rb
+++ b/spec/features/merge_request/user_sees_mini_pipeline_graph_spec.rb
@@ -97,8 +97,8 @@ RSpec.describe 'Merge request < User sees mini pipeline graph', :js, feature_cat
describe 'build list build item' do
let(:build_item) do
- find('.mini-pipeline-graph-dropdown-item')
- first('.mini-pipeline-graph-dropdown-item')
+ find('.pipeline-job-item')
+ first('.pipeline-job-item')
end
it 'visits the build page when clicked' do
diff --git a/spec/features/merge_requests/user_sees_note_updates_in_real_time_spec.rb b/spec/features/merge_requests/user_sees_note_updates_in_real_time_spec.rb
new file mode 100644
index 00000000000..2c7567b1b40
--- /dev/null
+++ b/spec/features/merge_requests/user_sees_note_updates_in_real_time_spec.rb
@@ -0,0 +1,45 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Merge request note updates in real time', :js, feature_category: :code_review_workflow do
+ include NoteInteractionHelpers
+ include ContentEditorHelpers
+
+ let_it_be(:project) { create(:project, :public, :repository) }
+ let_it_be(:merge_request) { create(:merge_request, source_project: project) }
+
+ before do
+ visit project_merge_request_path(project, merge_request)
+ close_rich_text_promo_popover_if_present
+ end
+
+ describe 'new notes' do
+ it 'displays the new note' do
+ note = create(:note, noteable: merge_request, project: project, note: 'Looks good!')
+
+ expect(page).to have_selector("#note_#{note.id}", text: 'Looks good!')
+ end
+ end
+
+ describe 'updated notes' do
+ let(:note_text) { "Hello World" }
+ let(:updated_text) { "Bye World" }
+ let!(:existing_note) do
+ create(:discussion_note_on_merge_request, noteable: merge_request, project: project, note: note_text)
+ end
+
+ it 'displays the updated note', :aggregate_failures do
+ expect(page).to have_selector("#note_#{existing_note.id}", text: note_text)
+
+ existing_note.update!(note: updated_text)
+ expect(page).to have_selector("#note_#{existing_note.id}", text: updated_text)
+
+ existing_note.resolve!(merge_request.author)
+ expect(page).to have_selector(
+ "#note_#{existing_note.id} .discussion-resolved-text",
+ text: /\AResolved .* by #{merge_request.author.name}\z/
+ )
+ end
+ end
+end
diff --git a/spec/features/nav/top_nav_tooltip_spec.rb b/spec/features/nav/top_nav_tooltip_spec.rb
index 17828778112..1afd1981a86 100644
--- a/spec/features/nav/top_nav_tooltip_spec.rb
+++ b/spec/features/nav/top_nav_tooltip_spec.rb
@@ -11,7 +11,7 @@ RSpec.describe 'top nav tooltips', :js, feature_category: :navigation do
end
it 'clicking new dropdown hides tooltip', :aggregate_failures,
- quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/382786' do
+ quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/382786' do
btn = '#js-onboarding-new-project-link'
page.find(btn).hover
diff --git a/spec/features/oauth_provider_authorize_spec.rb b/spec/features/oauth_provider_authorize_spec.rb
index 7638563b4a3..310a2e4c2de 100644
--- a/spec/features/oauth_provider_authorize_spec.rb
+++ b/spec/features/oauth_provider_authorize_spec.rb
@@ -9,11 +9,13 @@ RSpec.describe 'OAuth Provider', feature_category: :system_access do
before do
sign_in(user)
- visit oauth_authorization_path(client_id: application.uid,
- redirect_uri: application.redirect_uri.split.first,
- response_type: 'code',
- state: 'my_state',
- scope: 'read_user')
+ visit oauth_authorization_path(
+ client_id: application.uid,
+ redirect_uri: application.redirect_uri.split.first,
+ response_type: 'code',
+ state: 'my_state',
+ scope: 'read_user'
+ )
end
it_behaves_like 'Secure OAuth Authorizations'
diff --git a/spec/features/participants_autocomplete_spec.rb b/spec/features/participants_autocomplete_spec.rb
index d8501116134..dbeca601617 100644
--- a/spec/features/participants_autocomplete_spec.rb
+++ b/spec/features/participants_autocomplete_spec.rb
@@ -62,8 +62,7 @@ RSpec.describe 'Member autocomplete', :js, feature_category: :groups_and_project
context 'adding a new note on a Merge Request' do
let(:noteable) do
- create(:merge_request, source_project: project,
- target_project: project, author: author)
+ create(:merge_request, source_project: project, target_project: project, author: author)
end
before do
diff --git a/spec/features/profile_spec.rb b/spec/features/profile_spec.rb
index e190dfda937..b6c96555767 100644
--- a/spec/features/profile_spec.rb
+++ b/spec/features/profile_spec.rb
@@ -38,8 +38,7 @@ RSpec.describe 'Profile account page', :js, feature_category: :user_profile do
expect(page).to have_content('Account scheduled for removal')
expect(
- Users::GhostUserMigration.where(user: user,
- initiator_user: user)
+ Users::GhostUserMigration.where(user: user, initiator_user: user)
).to be_exists
end
@@ -71,7 +70,7 @@ RSpec.describe 'Profile account page', :js, feature_category: :user_profile do
previous_token = ''
- within('[data-testid="feed-token-container"]') do
+ within_testid('feed-token-container') do
previous_token = find_field('Feed token').value
click_link('reset this token')
@@ -79,7 +78,7 @@ RSpec.describe 'Profile account page', :js, feature_category: :user_profile do
accept_gl_confirm
- within('[data-testid="feed-token-container"]') do
+ within_testid('feed-token-container') do
click_button('Click to reveal')
expect(find_field('Feed token').value).not_to eq(previous_token)
@@ -93,7 +92,7 @@ RSpec.describe 'Profile account page', :js, feature_category: :user_profile do
previous_token = ''
- within('[data-testid="incoming-email-token-container"]') do
+ within_testid('incoming-email-token-container') do
previous_token = find_field('Incoming email token').value
click_link('reset this token')
@@ -101,7 +100,7 @@ RSpec.describe 'Profile account page', :js, feature_category: :user_profile do
accept_gl_confirm
- within('[data-testid="incoming-email-token-container"]') do
+ within_testid('incoming-email-token-container') do
click_button('Click to reveal')
expect(find_field('Incoming email token').value).not_to eq(previous_token)
diff --git a/spec/features/profiles/active_sessions_spec.rb b/spec/features/profiles/active_sessions_spec.rb
index 0de4ad47f9a..2e800ae88b6 100644
--- a/spec/features/profiles/active_sessions_spec.rb
+++ b/spec/features/profiles/active_sessions_spec.rb
@@ -57,9 +57,7 @@ RSpec.describe 'Profile > Active Sessions', :clean_gitlab_redis_shared_state, fe
using_session :session1 do
visit profile_active_sessions_path
- expect(page).to(
- have_selector('ul.list-group li.list-group-item', text: 'Signed in on',
- count: 2))
+ expect(page).to(have_selector('ul.list-group li.list-group-item', text: 'Signed in on', count: 2))
expect(page).to have_content(
'127.0.0.1 ' \
diff --git a/spec/features/profiles/gpg_keys_spec.rb b/spec/features/profiles/gpg_keys_spec.rb
index f39d9ddaf56..38abf9d20b0 100644
--- a/spec/features/profiles/gpg_keys_spec.rb
+++ b/spec/features/profiles/gpg_keys_spec.rb
@@ -15,6 +15,7 @@ RSpec.describe 'Profile > GPG Keys', feature_category: :user_profile do
end
it 'saves the new key' do
+ click_button('Add new key')
fill_in('Key', with: GpgHelpers::User2.public_key)
click_button('Add key')
@@ -24,6 +25,7 @@ RSpec.describe 'Profile > GPG Keys', feature_category: :user_profile do
end
it 'with multiple subkeys' do
+ click_button('Add new key')
fill_in('Key', with: GpgHelpers::User3.public_key)
click_button('Add key')
@@ -52,7 +54,10 @@ RSpec.describe 'Profile > GPG Keys', feature_category: :user_profile do
click_link('Remove')
- expect(page).to have_content('Your GPG keys (0)')
+ expect(page).to have_content('Your GPG keys')
+ page.within('.gl-new-card-count') do
+ expect(page).to have_content('0')
+ end
end
it 'user revokes a key via the key index' do
@@ -63,7 +68,10 @@ RSpec.describe 'Profile > GPG Keys', feature_category: :user_profile do
click_link('Revoke')
- expect(page).to have_content('Your GPG keys (0)')
+ expect(page).to have_content('Your GPG keys')
+ page.within('.gl-new-card-count') do
+ expect(page).to have_content('0')
+ end
expect(gpg_signature.reload).to have_attributes(
verification_status: 'unknown_key',
diff --git a/spec/features/profiles/keys_spec.rb b/spec/features/profiles/keys_spec.rb
index ae61f1cf492..cb270b669d3 100644
--- a/spec/features/profiles/keys_spec.rb
+++ b/spec/features/profiles/keys_spec.rb
@@ -15,6 +15,7 @@ RSpec.describe 'Profile > SSH Keys', feature_category: :user_profile do
end
it 'auto-populates the title', :js do
+ click_button('Add new key')
fill_in('Key', with: attributes_for(:key).fetch(:key))
expect(page).to have_field("Title", with: "dummy@gitlab.com")
@@ -23,11 +24,12 @@ RSpec.describe 'Profile > SSH Keys', feature_category: :user_profile do
it 'saves the new key' do
attrs = attributes_for(:key)
+ click_button('Add new key')
fill_in('Key', with: attrs[:key])
fill_in('Title', with: attrs[:title])
click_button('Add key')
- expect(page).to have_content("Title: #{attrs[:title]}")
+ expect(page).to have_content(format(s_('Profiles|SSH Key: %{title}'), title: attrs[:title]))
expect(page).to have_content(attrs[:key])
expect(find('[data-testid="breadcrumb-current-link"]')).to have_link(attrs[:title])
end
@@ -35,6 +37,7 @@ RSpec.describe 'Profile > SSH Keys', feature_category: :user_profile do
it 'shows a confirmable warning if the key begins with an algorithm name that is unsupported' do
attrs = attributes_for(:key)
+ click_button('Add new key')
fill_in('Key', with: 'unsupported-ssh-rsa key')
fill_in('Title', with: attrs[:title])
click_button('Add key')
@@ -60,6 +63,7 @@ RSpec.describe 'Profile > SSH Keys', feature_category: :user_profile do
it 'shows a validation error' do
attrs = attributes_for(:key)
+ click_button('Add new key')
fill_in('Key', with: attrs[:key])
fill_in('Title', with: attrs[:title])
click_button('Add key')
@@ -79,13 +83,16 @@ RSpec.describe 'Profile > SSH Keys', feature_category: :user_profile do
def destroy_key(path, action, confirmation_button)
visit path
- page.click_button(action)
+ page.find("button[aria-label=\"#{action}\"]").click
page.within('.modal') do
page.click_button(confirmation_button)
end
- expect(page).to have_content('Your SSH keys (0)')
+ expect(page).to have_content('Your SSH keys')
+ page.within('.gl-new-card-count') do
+ expect(page).to have_content('0')
+ end
end
describe 'User removes a key', :js do
@@ -111,11 +118,13 @@ RSpec.describe 'Profile > SSH Keys', feature_category: :user_profile do
let!(:commit) { project.commit('ssh-signed-commit') }
let!(:signature) do
- create(:ssh_signature,
- project: project,
- key: key,
- key_fingerprint_sha256: key.fingerprint_sha256,
- commit_sha: commit.sha)
+ create(
+ :ssh_signature,
+ project: project,
+ key: key,
+ key_fingerprint_sha256: key.fingerprint_sha256,
+ commit_sha: commit.sha
+ )
end
before do
diff --git a/spec/features/profiles/list_users_comment_template_spec.rb b/spec/features/profiles/list_users_comment_template_spec.rb
index 85e455ba988..b2faee3ae04 100644
--- a/spec/features/profiles/list_users_comment_template_spec.rb
+++ b/spec/features/profiles/list_users_comment_template_spec.rb
@@ -14,7 +14,7 @@ RSpec.describe 'Profile > Comment templates > List users comment templates', :js
it 'shows the user a list of their comment templates' do
visit profile_comment_templates_path
- expect(page).to have_content('My comment templates (1)')
+ expect(page).to have_content('My comment templates')
expect(page).to have_content(saved_reply.name)
expect(page).to have_content(saved_reply.content)
end
diff --git a/spec/features/profiles/oauth_applications_spec.rb b/spec/features/profiles/oauth_applications_spec.rb
index d088f73f9df..0b1d67d00c9 100644
--- a/spec/features/profiles/oauth_applications_spec.rb
+++ b/spec/features/profiles/oauth_applications_spec.rb
@@ -25,15 +25,21 @@ RSpec.describe 'Profile > Applications', feature_category: :user_profile do
visit oauth_applications_path
page.within('.oauth-applications') do
- expect(page).to have_content('Your applications (1)')
+ page.within('.gl-new-card-count') do
+ expect(page).to have_content('1')
+ end
click_button 'Destroy'
end
accept_gl_confirm(button_text: 'Destroy')
expect(page).to have_content('The application was deleted successfully')
- expect(page).to have_content('Your applications (0)')
- expect(page).to have_content('Authorized applications (0)')
+ page.within('.oauth-applications .gl-new-card-count') do
+ expect(page).to have_content('0')
+ end
+ page.within('.oauth-authorized-applications .gl-new-card-count') do
+ expect(page).to have_content('0')
+ end
end
end
@@ -57,7 +63,9 @@ RSpec.describe 'Profile > Applications', feature_category: :user_profile do
it 'displays the correct authorized applications' do
visit oauth_applications_path
- expect(page).to have_content('Authorized applications (2)')
+ page.within('.oauth-authorized-applications .gl-new-card-count') do
+ expect(page).to have_content('2')
+ end
page.within('div.oauth-authorized-applications') do
# Ensure the correct user's token details are displayed
@@ -85,7 +93,9 @@ RSpec.describe 'Profile > Applications', feature_category: :user_profile do
accept_gl_confirm(button_text: 'Revoke application')
expect(page).to have_content('The application was revoked access.')
- expect(page).to have_content('Authorized applications (0)')
+ page.within('.oauth-authorized-applications .gl-new-card-count') do
+ expect(page).to have_content('0')
+ end
end
it 'deletes an anonymous authorized application' do
@@ -93,14 +103,18 @@ RSpec.describe 'Profile > Applications', feature_category: :user_profile do
visit oauth_applications_path
page.within('.oauth-authorized-applications') do
- expect(page).to have_content('Authorized applications (1)')
+ page.within('.oauth-authorized-applications .gl-new-card-count') do
+ expect(page).to have_content('1')
+ end
click_button 'Revoke'
end
accept_gl_confirm(button_text: 'Revoke application')
expect(page).to have_content('The application was revoked access.')
- expect(page).to have_content('Authorized applications (0)')
+ page.within('.oauth-authorized-applications .gl-new-card-count') do
+ expect(page).to have_content('0')
+ end
end
end
end
diff --git a/spec/features/profiles/personal_access_tokens_spec.rb b/spec/features/profiles/personal_access_tokens_spec.rb
index 65fe1330be2..094855393be 100644
--- a/spec/features/profiles/personal_access_tokens_spec.rb
+++ b/spec/features/profiles/personal_access_tokens_spec.rb
@@ -18,6 +18,8 @@ RSpec.describe 'Profile > Personal Access Tokens', :js, feature_category: :user_
name = 'My PAT'
visit profile_personal_access_tokens_path
+
+ click_button 'Add new token'
fill_in "Token name", with: name
# Set date to 1st of next month
@@ -43,6 +45,8 @@ RSpec.describe 'Profile > Personal Access Tokens', :js, feature_category: :user_
it "displays an error message" do
number_tokens_before = PersonalAccessToken.count
visit profile_personal_access_tokens_path
+
+ click_button 'Add new token'
fill_in "Token name", with: 'My PAT'
click_on "Create personal access token"
@@ -145,6 +149,7 @@ RSpec.describe 'Profile > Personal Access Tokens', :js, feature_category: :user_
visit profile_personal_access_tokens_path({ name: name, scopes: scopes })
+ click_button 'Add new token'
expect(page).to have_field("Token name", with: name)
expect(find("#personal_access_token_scopes_api")).to be_checked
expect(find("#personal_access_token_scopes_read_user")).to be_checked
diff --git a/spec/features/profiles/user_changes_notified_of_own_activity_spec.rb b/spec/features/profiles/user_changes_notified_of_own_activity_spec.rb
index 89887cb4772..bdaf6262566 100644
--- a/spec/features/profiles/user_changes_notified_of_own_activity_spec.rb
+++ b/spec/features/profiles/user_changes_notified_of_own_activity_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe 'Profile > Notifications > User changes notified_of_own_activity setting', :js,
-feature_category: :user_profile do
+ feature_category: :user_profile do
let(:user) { create(:user) }
before do
diff --git a/spec/features/profiles/user_creates_comment_template_spec.rb b/spec/features/profiles/user_creates_comment_template_spec.rb
index 44e2b932c00..dcaf47088b0 100644
--- a/spec/features/profiles/user_creates_comment_template_spec.rb
+++ b/spec/features/profiles/user_creates_comment_template_spec.rb
@@ -15,6 +15,7 @@ RSpec.describe 'Profile > Comment templates > User creates comment template', :j
end
it 'shows the user a list of their saved replies' do
+ click_button 'Add new'
find('[data-testid="comment-template-name-input"]').set('test')
find('[data-testid="comment-template-content-input"]').set('Test content')
@@ -22,7 +23,7 @@ RSpec.describe 'Profile > Comment templates > User creates comment template', :j
wait_for_requests
- expect(page).to have_content('My comment templates (1)')
+ expect(page).to have_content('My comment templates')
expect(page).to have_content('test')
expect(page).to have_content('Test content')
end
diff --git a/spec/features/profiles/user_edit_profile_spec.rb b/spec/features/profiles/user_edit_profile_spec.rb
index de8719630ee..a756c524cbb 100644
--- a/spec/features/profiles/user_edit_profile_spec.rb
+++ b/spec/features/profiles/user_edit_profile_spec.rb
@@ -83,7 +83,7 @@ RSpec.describe 'User edit profile', feature_category: :user_profile do
page.within('.rspec-full-name') do
expect(page).to have_css '.gl-field-error-outline'
expect(find('.gl-field-error')).not_to have_selector('.hidden')
- expect(find('.gl-field-error')).to have_content('Using emojis in names seems fun, but please try to set a status message instead')
+ expect(find('.gl-field-error')).to have_content('Using emoji in names seems fun, but please try to set a status message instead')
end
end
diff --git a/spec/features/profiles/user_manages_emails_spec.rb b/spec/features/profiles/user_manages_emails_spec.rb
index b875dfec217..35f2ccf0f34 100644
--- a/spec/features/profiles/user_manages_emails_spec.rb
+++ b/spec/features/profiles/user_manages_emails_spec.rb
@@ -22,7 +22,7 @@ RSpec.describe 'User manages emails', feature_category: :user_profile do
it 'adds an email', :aggregate_failures do
fill_in('email_email', with: 'my@email.com')
- click_button('Add')
+ click_button('Add email address')
email = user.emails.find_by(email: 'my@email.com')
@@ -37,7 +37,7 @@ RSpec.describe 'User manages emails', feature_category: :user_profile do
it 'does not add an email that is the primary email of another user', :aggregate_failures do
fill_in('email_email', with: other_user.email)
- click_button('Add')
+ click_button('Add email address')
email = user.emails.find_by(email: other_user.email)
@@ -51,7 +51,7 @@ RSpec.describe 'User manages emails', feature_category: :user_profile do
it 'removes an email', :aggregate_failures do
fill_in('email_email', with: 'my@email.com')
- click_button('Add')
+ click_button('Add email address')
email = user.emails.find_by(email: 'my@email.com')
diff --git a/spec/features/profiles/user_visits_notifications_tab_spec.rb b/spec/features/profiles/user_visits_notifications_tab_spec.rb
index 1295a0b6150..7d858e3c92c 100644
--- a/spec/features/profiles/user_visits_notifications_tab_spec.rb
+++ b/spec/features/profiles/user_visits_notifications_tab_spec.rb
@@ -30,7 +30,7 @@ RSpec.describe 'User visits the notifications tab', :js, feature_category: :user
end
context 'when project emails are disabled' do
- let(:project) { create(:project, emails_disabled: true) }
+ let_it_be(:project) { create(:project, emails_enabled: false) }
it 'notification button is disabled' do
expect(page).to have_selector('[data-testid="notification-dropdown"] .disabled')
diff --git a/spec/features/project_variables_spec.rb b/spec/features/project_variables_spec.rb
index c4f78bf4ea3..e2fa924af67 100644
--- a/spec/features/project_variables_spec.rb
+++ b/spec/features/project_variables_spec.rb
@@ -12,6 +12,8 @@ RSpec.describe 'Project variables', :js, feature_category: :secrets_management d
sign_in(user)
project.add_maintainer(user)
project.variables << variable
+
+ stub_feature_flags(ci_variable_drawer: false)
visit page_path
wait_for_requests
end
@@ -49,4 +51,14 @@ RSpec.describe 'Project variables', :js, feature_category: :secrets_management d
expect(find('.js-ci-variable-row:first-child [data-label="Environments"]').text).to eq('review/*')
end
end
+
+ context 'when ci_variable_drawer FF is enabled' do
+ before do
+ stub_feature_flags(ci_variable_drawer: true)
+ visit page_path
+ wait_for_requests
+ end
+
+ it_behaves_like 'variable list drawer'
+ end
end
diff --git a/spec/features/projects/blobs/user_follows_pipeline_suggest_nudge_spec.rb b/spec/features/projects/blobs/user_follows_pipeline_suggest_nudge_spec.rb
index 1990526b5fc..cd7601aa94e 100644
--- a/spec/features/projects/blobs/user_follows_pipeline_suggest_nudge_spec.rb
+++ b/spec/features/projects/blobs/user_follows_pipeline_suggest_nudge_spec.rb
@@ -25,8 +25,6 @@ RSpec.describe 'User follows pipeline suggest nudge spec when feature is enabled
end
it 'displays suggest_gitlab_ci_yml popover' do
- page.find(:css, '.gitlab-ci-yml-selector').click
-
popover_selector = '.suggest-gitlab-ci-yml'
expect(page).to have_css(popover_selector, visible: true)
diff --git a/spec/features/projects/ci/editor_spec.rb b/spec/features/projects/ci/editor_spec.rb
index 43da57c16d1..b09aa91f4ab 100644
--- a/spec/features/projects/ci/editor_spec.rb
+++ b/spec/features/projects/ci/editor_spec.rb
@@ -11,6 +11,7 @@ RSpec.describe 'Pipeline Editor', :js, feature_category: :pipeline_composition d
let(:default_branch) { 'main' }
let(:other_branch) { 'test' }
let(:branch_with_invalid_ci) { 'despair' }
+ let(:branch_without_ci) { 'empty' }
let(:default_content) { 'Default' }
@@ -45,6 +46,7 @@ RSpec.describe 'Pipeline Editor', :js, feature_category: :pipeline_composition d
project.repository.create_file(user, project.ci_config_path_or_default, default_content, message: 'Create CI file for main', branch_name: default_branch)
project.repository.create_file(user, project.ci_config_path_or_default, valid_content, message: 'Create CI file for test', branch_name: other_branch)
project.repository.create_file(user, project.ci_config_path_or_default, invalid_content, message: 'Create CI file for test', branch_name: branch_with_invalid_ci)
+ project.repository.create_file(user, 'index.js', "file", message: 'New js file', branch_name: branch_without_ci)
visit project_ci_pipeline_editor_path(project)
wait_for_requests
@@ -62,6 +64,31 @@ RSpec.describe 'Pipeline Editor', :js, feature_category: :pipeline_composition d
end
end
+ describe 'when there are no CI config file' do
+ before do
+ visit project_ci_pipeline_editor_path(project, branch_name: branch_without_ci)
+ end
+
+ it 'renders the empty page', :aggregate_failures do
+ expect(page).to have_content 'Optimize your workflow with CI/CD Pipelines'
+ expect(page).to have_selector '[data-testid="create_new_ci_button"]'
+ end
+
+ context 'when clicking on the create new CI button' do
+ before do
+ click_button 'Configure pipeline'
+ end
+
+ it 'renders the source editor with default content', :aggregate_failures do
+ expect(page).to have_selector('#source-editor-')
+
+ page.within('#source-editor-') do
+ expect(page).to have_content('This file is a template, and might need editing before it works on your project.')
+ end
+ end
+ end
+ end
+
describe 'When CI yml has valid syntax' do
before do
visit project_ci_pipeline_editor_path(project, branch_name: other_branch)
@@ -149,15 +176,6 @@ RSpec.describe 'Pipeline Editor', :js, feature_category: :pipeline_composition d
end
shared_examples 'default branch switcher behavior' do
- def switch_to_branch(branch)
- find('[data-testid="branch-selector"]').click
-
- page.within '[data-testid="branch-selector"]' do
- click_button branch
- wait_for_requests
- end
- end
-
it 'displays current branch' do
page.within('[data-testid="branch-selector"]') do
expect(page).to have_content(default_branch)
@@ -195,12 +213,20 @@ RSpec.describe 'Pipeline Editor', :js, feature_category: :pipeline_composition d
end
describe 'Branch Switcher' do
+ def switch_to_branch(branch)
+ # close button for the popover
+ find('[data-testid="close-button"]').click
+ find('[data-testid="branch-selector"]').click
+
+ page.within '[data-testid="branch-selector"]' do
+ click_button branch
+ wait_for_requests
+ end
+ end
+
before do
visit project_ci_pipeline_editor_path(project)
wait_for_requests
-
- # close button for the popover
- find('[data-testid="close-button"]').click
end
it_behaves_like 'default branch switcher behavior'
@@ -262,6 +288,24 @@ RSpec.describe 'Pipeline Editor', :js, feature_category: :pipeline_composition d
end
describe 'Commit Form' do
+ context 'when targetting the main branch' do
+ it 'does not show the option to create a Merge request', :aggregate_failures do
+ expect(page).not_to have_selector('[data-testid="new-mr-checkbox"]')
+ expect(page).not_to have_content('Start a new merge request with these changes')
+ end
+ end
+
+ context 'when targetting any non-main branch' do
+ before do
+ find('#source-branch-field').set('new_branch', clear: :backspace)
+ end
+
+ it 'shows the option to create a Merge request', :aggregate_failures do
+ expect(page).to have_selector('[data-testid="new-mr-checkbox"]')
+ expect(page).to have_content('Start a new merge request with these changes')
+ end
+ end
+
it 'is preserved when changing tabs' do
find('#commit-message').set('message', clear: :backspace)
find('#source-branch-field').set('new_branch', clear: :backspace)
diff --git a/spec/features/projects/commit/mini_pipeline_graph_spec.rb b/spec/features/projects/commit/mini_pipeline_graph_spec.rb
index d2104799e79..5bb3d1af924 100644
--- a/spec/features/projects/commit/mini_pipeline_graph_spec.rb
+++ b/spec/features/projects/commit/mini_pipeline_graph_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe 'Mini Pipeline Graph in Commit View', :js, feature_category: :source_code_management do
let(:project) { create(:project, :public, :repository) }
- context 'when commit has pipelines' do
+ context 'when commit has pipelines and feature flag is enabled' do
let(:pipeline) do
create(
:ci_pipeline,
@@ -24,6 +24,33 @@ RSpec.describe 'Mini Pipeline Graph in Commit View', :js, feature_category: :sou
wait_for_requests
end
+ it 'displays the graphql pipeline stage' do
+ expect(page).to have_selector('[data-testid="pipeline-stage"]')
+
+ build.drop
+ end
+ end
+
+ context 'when commit has pipelines and feature flag is disabled' do
+ let(:pipeline) do
+ create(
+ :ci_pipeline,
+ status: :running,
+ project: project,
+ ref: project.default_branch,
+ sha: project.commit.sha
+ )
+ end
+
+ let(:build) { create(:ci_build, pipeline: pipeline, status: :running) }
+
+ before do
+ stub_feature_flags(ci_graphql_pipeline_mini_graph: false)
+ build.run
+ visit project_commit_path(project, project.commit.id)
+ wait_for_requests
+ end
+
it 'display icon with status' do
expect(page).to have_selector('.ci-status-icon-running')
end
diff --git a/spec/features/projects/container_registry_spec.rb b/spec/features/projects/container_registry_spec.rb
index 493435d3439..0a77c671fce 100644
--- a/spec/features/projects/container_registry_spec.rb
+++ b/spec/features/projects/container_registry_spec.rb
@@ -67,6 +67,7 @@ RSpec.describe 'Container Registry', :js, feature_category: :groups_and_projects
it 'list page has a list of images' do
visit_container_registry
+ expect(page).to have_content '1 Image repository'
expect(page).to have_content 'my/image'
end
@@ -189,8 +190,7 @@ RSpec.describe 'Container Registry', :js, feature_category: :groups_and_projects
it 'pagination is preserved after navigating back from details' do
visit_next_page
click_link 'my/image'
- breadcrumb = find '.breadcrumbs'
- breadcrumb.click_link 'Container Registry'
+ page.go_back
expect(page).to have_content 'my/image'
end
end
diff --git a/spec/features/projects/files/dockerfile_dropdown_spec.rb b/spec/features/projects/files/dockerfile_dropdown_spec.rb
index a74cde35be6..55b15ad95d1 100644
--- a/spec/features/projects/files/dockerfile_dropdown_spec.rb
+++ b/spec/features/projects/files/dockerfile_dropdown_spec.rb
@@ -12,20 +12,16 @@ RSpec.describe 'Projects > Files > User wants to add a Dockerfile file', :js, fe
end
it 'user can pick a Dockerfile file from the dropdown' do
- expect(page).to have_css('.dockerfile-selector')
+ click_button 'Apply a template'
- find('.js-dockerfile-selector').click
-
- wait_for_requests
-
- within '.dockerfile-selector' do
- find('.dropdown-input-field').set('HTTPd')
- find('.dropdown-content li', text: 'HTTPd').click
+ within '.gl-new-dropdown-panel' do
+ find('.gl-listbox-search-input').set('HTTPd')
+ find('.gl-new-dropdown-contents li', text: 'HTTPd').click
end
wait_for_requests
- expect(page).to have_css('.dockerfile-selector .dropdown-toggle-text', text: 'Apply a template')
+ expect(page).to have_css('.gl-new-dropdown-button-text', text: 'HTTPd')
expect(find('.monaco-editor')).to have_content('COPY ./ /usr/local/apache2/htdocs/')
end
end
diff --git a/spec/features/projects/files/gitignore_dropdown_spec.rb b/spec/features/projects/files/gitignore_dropdown_spec.rb
index 36b02b9b948..b1f7f1c5716 100644
--- a/spec/features/projects/files/gitignore_dropdown_spec.rb
+++ b/spec/features/projects/files/gitignore_dropdown_spec.rb
@@ -12,20 +12,16 @@ RSpec.describe 'Projects > Files > User wants to add a .gitignore file', :js, fe
end
it 'user can pick a .gitignore file from the dropdown' do
- expect(page).to have_css('.gitignore-selector')
+ click_button 'Apply a template'
- find('.js-gitignore-selector').click
-
- wait_for_requests
-
- within '.gitignore-selector' do
- find('.dropdown-input-field').set('rails')
- find('.dropdown-content li', text: 'Rails').click
+ within '.gl-new-dropdown-panel' do
+ find('.gl-listbox-search-input').set('rails')
+ find('.gl-new-dropdown-contents li', text: 'Rails').click
end
wait_for_requests
- expect(page).to have_css('.gitignore-selector .dropdown-toggle-text', text: 'Apply a template')
+ expect(page).to have_css('.gl-new-dropdown-button-text', text: 'Rails')
expect(find('.monaco-editor')).to have_content('/.bundle')
expect(find('.monaco-editor')).to have_content('config/initializers/secret_token.rb')
end
diff --git a/spec/features/projects/files/gitlab_ci_yml_dropdown_spec.rb b/spec/features/projects/files/gitlab_ci_yml_dropdown_spec.rb
index 929554ff0d6..7bfff6b68e8 100644
--- a/spec/features/projects/files/gitlab_ci_yml_dropdown_spec.rb
+++ b/spec/features/projects/files/gitlab_ci_yml_dropdown_spec.rb
@@ -16,20 +16,16 @@ RSpec.describe 'Projects > Files > User wants to add a .gitlab-ci.yml file', :js
end
it 'user can pick a template from the dropdown' do
- expect(page).to have_css('.gitlab-ci-yml-selector')
+ click_button 'Apply a template'
- find('.js-gitlab-ci-yml-selector').click
-
- wait_for_requests
-
- within '.gitlab-ci-yml-selector' do
- find('.dropdown-input-field').set('Jekyll')
- find('.dropdown-content li', text: 'Jekyll').click
+ within '.gl-new-dropdown-panel' do
+ find('.gl-listbox-search-input').set('Jekyll')
+ find('.gl-new-dropdown-contents li', text: 'Jekyll').click
end
wait_for_requests
- expect(page).to have_css('.gitlab-ci-yml-selector .dropdown-toggle-text', text: 'Apply a template')
+ expect(page).to have_css('.gl-new-dropdown-button-text', text: 'Jekyll')
expect(find('.monaco-editor')).to have_content('This file is a template, and might need editing before it works on your project')
expect(find('.monaco-editor')).to have_content('jekyll build -d test')
end
@@ -40,7 +36,7 @@ RSpec.describe 'Projects > Files > User wants to add a .gitlab-ci.yml file', :js
it 'uses the given template' do
wait_for_requests
- expect(page).to have_css('.gitlab-ci-yml-selector .dropdown-toggle-text', text: 'Apply a template')
+ expect(page).to have_css('.gl-new-dropdown-button-text', text: 'Jekyll')
expect(find('.monaco-editor')).to have_content('This file is a template, and might need editing before it works on your project')
expect(find('.monaco-editor')).to have_content('jekyll build -d test')
end
@@ -52,7 +48,7 @@ RSpec.describe 'Projects > Files > User wants to add a .gitlab-ci.yml file', :js
it 'leaves the editor empty' do
wait_for_requests
- expect(page).to have_css('.gitlab-ci-yml-selector .dropdown-toggle-text', text: 'Apply a template')
+ expect(page).to have_css('.gl-new-dropdown-button-text', text: 'Apply a template')
expect(find('.monaco-editor')).to have_content('')
end
end
diff --git a/spec/features/projects/files/project_owner_creates_license_file_spec.rb b/spec/features/projects/files/project_owner_creates_license_file_spec.rb
index 8ec9adaeb9a..95e96159744 100644
--- a/spec/features/projects/files/project_owner_creates_license_file_spec.rb
+++ b/spec/features/projects/files/project_owner_creates_license_file_spec.rb
@@ -20,8 +20,6 @@ RSpec.describe 'Projects > Files > Project owner creates a license file', :js, f
fill_in :file_name, with: 'LICENSE'
- expect(page).to have_selector('.license-selector')
-
select_template('MIT License')
file_content = first('.file-editor')
@@ -44,7 +42,6 @@ RSpec.describe 'Projects > Files > Project owner creates a license file', :js, f
expect(page).to have_current_path(
project_new_blob_path(project, 'master'), ignore_query: true)
expect(find('#file_name').value).to eq('LICENSE')
- expect(page).to have_selector('.license-selector')
select_template('MIT License')
@@ -62,9 +59,9 @@ RSpec.describe 'Projects > Files > Project owner creates a license file', :js, f
end
def select_template(template)
- page.within('.js-license-selector-wrap') do
+ page.within('.gl-new-dropdown') do
click_button 'Apply a template'
- click_link template
+ find('.gl-new-dropdown-contents li', text: template).click
wait_for_requests
end
end
diff --git a/spec/features/projects/files/template_selector_menu_spec.rb b/spec/features/projects/files/template_selector_menu_spec.rb
index 46c4b69bc89..920da6e72ce 100644
--- a/spec/features/projects/files/template_selector_menu_spec.rb
+++ b/spec/features/projects/files/template_selector_menu_spec.rb
@@ -58,7 +58,7 @@ end
def check_template_selector_menu_display(is_visible)
count = is_visible ? 1 : 0
- expect(page).to have_css('.template-selectors-menu', count: count)
+ expect(page).to have_css('[data-testid="template-selector"]', count: count)
end
def create_and_edit_file(file_name)
diff --git a/spec/features/projects/files/undo_template_spec.rb b/spec/features/projects/files/undo_template_spec.rb
index 4b6e6b7282c..d6f9acc68a0 100644
--- a/spec/features/projects/files/undo_template_spec.rb
+++ b/spec/features/projects/files/undo_template_spec.rb
@@ -13,7 +13,7 @@ RSpec.describe 'Projects > Files > Template Undo Button', :js, feature_category:
context 'editing a matching file and applying a template' do
before do
visit project_edit_blob_path(project, File.join(project.default_branch, "LICENSE"))
- select_file_template('.js-license-selector', 'Apache License 2.0')
+ select_file_template('Apache License 2.0')
end
it 'reverts template application' do
@@ -42,8 +42,8 @@ def check_content_reverted(template_content)
expect(page).not_to have_content(template_content)
end
-def select_file_template(template_selector_selector, template_name)
- find(template_selector_selector).click
- find('.dropdown-content li', text: template_name).click
+def select_file_template(template_name)
+ click_button 'Apply a template'
+ find('.gl-new-dropdown-contents li', text: template_name).click
wait_for_requests
end
diff --git a/spec/features/projects/files/user_browses_files_spec.rb b/spec/features/projects/files/user_browses_files_spec.rb
index 3b30a620257..e93c9427c91 100644
--- a/spec/features/projects/files/user_browses_files_spec.rb
+++ b/spec/features/projects/files/user_browses_files_spec.rb
@@ -155,7 +155,7 @@ RSpec.describe "User browses files", :js, feature_category: :groups_and_projects
click_link("d")
end
- expect(page).to have_link("..", href: project_tree_path(project, "markdown/"))
+ expect(page).to have_link("..", href: project_tree_path(project, "markdown"))
page.within(".tree-table") do
click_link("README.md")
diff --git a/spec/features/projects/fork_spec.rb b/spec/features/projects/fork_spec.rb
index 7d734d5d2df..c159b40003c 100644
--- a/spec/features/projects/fork_spec.rb
+++ b/spec/features/projects/fork_spec.rb
@@ -14,23 +14,80 @@ RSpec.describe 'Project fork', feature_category: :groups_and_projects do
end
shared_examples 'fork button on project page' do
- it 'allows user to fork project from the project page' do
- visit project_path(project)
+ context 'when the user has access to only one namespace and has already forked the project', :js do
+ before do
+ fork_project(project, user, repository: true, namespace: user.namespace)
+ end
- expect(page).not_to have_css('a.disabled', text: 'Fork')
- end
+ it 'allows user to go to their fork' do
+ visit project_path(project)
- context 'user has exceeded personal project limit' do
- before do
- user.update!(projects_limit: 0)
+ path = namespace_project_path(user, user.fork_of(project))
+
+ fork_button = find_link 'Fork'
+ expect(fork_button['href']).to include(path)
+ expect(fork_button['class']).not_to include('disabled')
end
+ end
- it 'disables fork button on project page' do
+ shared_examples 'fork button creates new fork' do
+ it 'allows user to fork the project from the project page' do
visit project_path(project)
- expect(page).to have_css('a.disabled', text: 'Fork')
+ path = new_project_fork_path(project)
+
+ fork_button = find_link 'Fork'
+ expect(fork_button['href']).to include(path)
+ expect(fork_button['class']).not_to include('disabled')
+ end
+
+ context 'when the user cannot fork the project' do
+ let(:project) do
+ # Disabling the repository makes sure that the user cannot fork the project
+ create(:project, :public, :repository, :repository_disabled, description: 'some description')
+ end
+
+ it 'disables fork button on project page' do
+ visit project_path(project)
+
+ path = new_project_fork_path(project)
+
+ fork_button = find_link 'Fork'
+ expect(fork_button['href']).to include(path)
+ expect(fork_button['class']).to include('disabled')
+ end
+ end
+
+ context 'user has exceeded personal project limit' do
+ before do
+ user.update!(projects_limit: 0)
+ end
+
+ it 'disables fork button on project page' do
+ visit project_path(project)
+
+ path = new_project_fork_path(project)
+
+ fork_button = find_link 'Fork'
+ expect(fork_button['href']).to include(path)
+ expect(fork_button['class']).to include('disabled')
+ end
end
end
+
+ context 'when the user has not already forked the project', :js do
+ it_behaves_like 'fork button creates new fork'
+ end
+
+ context 'when the user has access to more than one namespace', :js do
+ let(:group) { create(:group) }
+
+ before do
+ group.add_developer(user)
+ end
+
+ it_behaves_like 'fork button creates new fork'
+ end
end
shared_examples 'create fork page' do |fork_page_text|
@@ -42,11 +99,11 @@ RSpec.describe 'Project fork', feature_category: :groups_and_projects do
context 'forking is enabled' do
let(:forking_access_level) { ProjectFeature::ENABLED }
- it 'enables fork button' do
+ it 'enables fork button', :js do
visit project_path(project)
- expect(page).to have_css('a', text: 'Fork')
- expect(page).not_to have_css('a.disabled', text: 'Select')
+ fork_button = find_link 'Fork'
+ expect(fork_button['class']).not_to include('disabled')
end
it 'renders new project fork page' do
@@ -60,11 +117,13 @@ RSpec.describe 'Project fork', feature_category: :groups_and_projects do
context 'forking is disabled' do
let(:forking_access_level) { ProjectFeature::DISABLED }
- it 'render a disabled fork button' do
+ it 'render a disabled fork button', :js do
visit project_path(project)
- expect(page).to have_css('a.disabled', text: 'Fork')
- expect(page).to have_css('a.count', text: '0')
+ fork_button = find_link 'Fork'
+
+ expect(fork_button['class']).to include('disabled')
+ expect(page).to have_selector('[data-testid="forks-count"]')
end
it 'does not render new project fork page' do
@@ -82,11 +141,13 @@ RSpec.describe 'Project fork', feature_category: :groups_and_projects do
end
context 'user is not a team member' do
- it 'render a disabled fork button' do
+ it 'render a disabled fork button', :js do
visit project_path(project)
- expect(page).to have_css('a.disabled', text: 'Fork')
- expect(page).to have_css('a.count', text: '0')
+ fork_button = find_link 'Fork'
+
+ expect(fork_button['class']).to include('disabled')
+ expect(page).to have_selector('[data-testid="forks-count"]')
end
it 'does not render new project fork page' do
@@ -101,12 +162,13 @@ RSpec.describe 'Project fork', feature_category: :groups_and_projects do
project.add_developer(user)
end
- it 'enables fork button' do
+ it 'enables fork button', :js do
visit project_path(project)
- expect(page).to have_css('a', text: 'Fork')
- expect(page).to have_css('a.count', text: '0')
- expect(page).not_to have_css('a.disabled', text: 'Fork')
+ fork_button = find_link 'Fork'
+
+ expect(fork_button['class']).not_to include('disabled')
+ expect(page).to have_selector('[data-testid="forks-count"]')
end
it 'renders new project fork page' do
@@ -185,7 +247,8 @@ RSpec.describe 'Project fork', feature_category: :groups_and_projects do
visit project_path(project)
- expect(page).to have_css('.fork-count', text: 2)
+ forks_count_button = find('[data-testid="forks-count"]')
+ expect(forks_count_button).to have_content("2")
end
end
end
@@ -195,7 +258,9 @@ private
def create_fork(group_obj = group)
visit project_path(project)
- find('.fork-btn').click
+
+ click_link 'Fork'
+
submit_form(group_obj)
wait_for_requests
end
diff --git a/spec/features/projects/labels/user_edits_labels_spec.rb b/spec/features/projects/labels/user_edits_labels_spec.rb
index f90f215f9fc..bf1182cfddd 100644
--- a/spec/features/projects/labels/user_edits_labels_spec.rb
+++ b/spec/features/projects/labels/user_edits_labels_spec.rb
@@ -36,6 +36,6 @@ RSpec.describe "User edits labels", feature_category: :team_planning do
click_link 'Delete label'
end
- expect(page).to have_content('Label was removed')
+ expect(page).to have_content("#{label.title} was removed").and have_no_content("#{label.title}</span>")
end
end
diff --git a/spec/features/projects/labels/user_removes_labels_spec.rb b/spec/features/projects/labels/user_removes_labels_spec.rb
index 55dc52b8ccf..d0175c53951 100644
--- a/spec/features/projects/labels/user_removes_labels_spec.rb
+++ b/spec/features/projects/labels/user_removes_labels_spec.rb
@@ -30,7 +30,7 @@ RSpec.describe "User removes labels", feature_category: :team_planning do
first(:link, "Delete label").click
- expect(page).to have_content("Label was removed").and have_no_content(label.title)
+ expect(page).to have_content("#{label.title} was removed").and have_no_content("#{label.title}</span>")
end
end
diff --git a/spec/features/projects/members/import_project_members_spec.rb b/spec/features/projects/members/import_project_members_spec.rb
new file mode 100644
index 00000000000..20cf42cd135
--- /dev/null
+++ b/spec/features/projects/members/import_project_members_spec.rb
@@ -0,0 +1,79 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Projects > Members > Import project members', :js, feature_category: :groups_and_projects do
+ include Features::MembersHelpers
+ include ListboxHelpers
+
+ let_it_be(:user) { create(:user) }
+ let_it_be(:user_mike) { create(:user, name: 'Mike') }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) do
+ create(:project, group: group).tap do |p|
+ p.add_maintainer(user)
+ p.add_developer(create(:user))
+ end
+ end
+
+ let_it_be(:project2) do
+ create(:project).tap do |p|
+ p.add_maintainer(user)
+ p.add_reporter(user_mike)
+ end
+ end
+
+ before do
+ sign_in(user)
+
+ visit(project_project_members_path(project))
+ end
+
+ it 'imports a team from another project' do
+ select_project(project2)
+ submit_import
+
+ expect(find_member_row(user_mike)).to have_content('Reporter')
+ end
+
+ it 'fails to import the other team when source project does not exist' do
+ select_project(project2)
+ submit_import { project2.destroy! }
+
+ within import_project_members_modal_selector do
+ expect(page).to have_content('Unable to import project members')
+ end
+ end
+
+ it 'fails to import some members' do
+ group.add_owner(user_mike)
+
+ select_project(project2)
+ submit_import
+
+ within import_project_members_modal_selector do
+ expect(page).to have_content "The following 1 out of 2 members could not be added"
+ expect(page).to have_content "@#{user_mike.username}: Access level should be greater than or equal to " \
+ "Owner inherited membership from group #{group.name}"
+ end
+ end
+
+ def select_project(source_project)
+ click_on 'Import from a project'
+ click_on 'Select a project'
+ wait_for_requests
+
+ select_listbox_item(source_project.name_with_namespace)
+ end
+
+ def submit_import
+ yield if block_given? # rubocop:disable RSpec/AvoidConditionalStatements
+
+ click_button 'Import project members'
+ wait_for_requests
+ end
+
+ def import_project_members_modal_selector
+ '[data-testid="import-project-members-modal"]'
+ end
+end
diff --git a/spec/features/projects/settings/user_manages_project_members_spec.rb b/spec/features/projects/members/user_manages_project_members_spec.rb
index df571e13979..b1c3132767c 100644
--- a/spec/features/projects/settings/user_manages_project_members_spec.rb
+++ b/spec/features/projects/members/user_manages_project_members_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Projects > Settings > User manages project members', feature_category: :groups_and_projects do
+RSpec.describe 'Projects > Settings > User manages project members', :js, feature_category: :groups_and_projects do
include Features::MembersHelpers
include Spec::Support::Helpers::ModalHelpers
include ListboxHelpers
@@ -20,7 +20,7 @@ RSpec.describe 'Projects > Settings > User manages project members', feature_cat
sign_in(user)
end
- it 'cancels a team member', :js do
+ it 'cancels a team member' do
visit(project_project_members_path(project))
show_actions_for_username(user_dmitriy)
@@ -37,24 +37,7 @@ RSpec.describe 'Projects > Settings > User manages project members', feature_cat
expect(members_table).not_to have_content(user_dmitriy.username)
end
- it 'imports a team from another project', :js do
- project2.add_maintainer(user)
- project2.add_reporter(user_mike)
-
- visit(project_project_members_path(project))
-
- click_on 'Import from a project'
- click_on 'Select a project'
- wait_for_requests
-
- select_listbox_item(project2.name_with_namespace)
- click_button 'Import project members'
- wait_for_requests
-
- expect(find_member_row(user_mike)).to have_content('Reporter')
- end
-
- it 'shows all members of project shared group', :js do
+ it 'shows all members of project shared group' do
group.add_owner(user)
group.add_developer(user_dmitriy)
diff --git a/spec/features/projects/new_project_spec.rb b/spec/features/projects/new_project_spec.rb
index d05b7649f94..6e6d9ff4af9 100644
--- a/spec/features/projects/new_project_spec.rb
+++ b/spec/features/projects/new_project_spec.rb
@@ -46,7 +46,8 @@ RSpec.describe 'New project', :js, feature_category: :groups_and_projects do
end
it 'shows a message if multiple levels are restricted' do
- Gitlab::CurrentSettings.update!(
+ stub_application_setting(default_project_visibility: Gitlab::VisibilityLevel::PUBLIC)
+ stub_application_setting(
restricted_visibility_levels: [Gitlab::VisibilityLevel::PRIVATE, Gitlab::VisibilityLevel::INTERNAL]
)
@@ -56,15 +57,21 @@ RSpec.describe 'New project', :js, feature_category: :groups_and_projects do
expect(page).to have_content 'Other visibility settings have been disabled by the administrator.'
end
- it 'shows a message if all levels are restricted' do
- Gitlab::CurrentSettings.update!(
- restricted_visibility_levels: Gitlab::VisibilityLevel.values
- )
+ context 'with prevent_visibility_restriction feature flag off' do
+ before do
+ stub_feature_flags(prevent_visibility_restriction: false)
+ end
- visit new_project_path
- click_link 'Create blank project'
+ it 'shows a message if all levels are restricted' do
+ Gitlab::CurrentSettings.update!(
+ restricted_visibility_levels: Gitlab::VisibilityLevel.values
+ )
- expect(page).to have_content 'Visibility settings have been disabled by the administrator.'
+ visit new_project_path
+ click_link 'Create blank project'
+
+ expect(page).to have_content 'Visibility settings have been disabled by the administrator.'
+ end
end
end
diff --git a/spec/features/projects/pages/user_adds_domain_spec.rb b/spec/features/projects/pages/user_adds_domain_spec.rb
index ae459197b38..14b01cb63d2 100644
--- a/spec/features/projects/pages/user_adds_domain_spec.rb
+++ b/spec/features/projects/pages/user_adds_domain_spec.rb
@@ -178,7 +178,12 @@ RSpec.describe 'User adds pages domain', :js, feature_category: :pages do
visit project_pages_path(project)
within('#content-body') { click_link 'Edit' }
- expect(page).to have_field :domain_dns, with: "#{domain.domain} ALIAS namespace1.example.com."
+ expect(page).to have_field :domain_dns, with: format(
+ "%{domain} ALIAS %{namespace}.%{pages_host}.",
+ domain: domain.domain,
+ namespace: domain.project.root_namespace.path,
+ pages_host: Settings.pages.host
+ )
end
end
end
diff --git a/spec/features/projects/pipelines/pipelines_spec.rb b/spec/features/projects/pipelines/pipelines_spec.rb
index 25eddf64f99..26fcd8ca3ca 100644
--- a/spec/features/projects/pipelines/pipelines_spec.rb
+++ b/spec/features/projects/pipelines/pipelines_spec.rb
@@ -559,7 +559,7 @@ RSpec.describe 'Pipelines', :js, feature_category: :continuous_integration do
find(dropdown_selector).click
within('.js-builds-dropdown-list') do
- build_element = page.find('.mini-pipeline-graph-dropdown-item')
+ build_element = page.find('.pipeline-job-item')
expect(build_element['title']).to eq('build - failed - (unknown failure)')
end
end
@@ -818,7 +818,7 @@ RSpec.describe 'Pipelines', :js, feature_category: :continuous_integration do
describe 'when the `ios_specific_templates` experiment is enabled and the "Set up a runner" button is clicked' do
before do
stub_experiments(ios_specific_templates: :candidate)
- create(:project_setting, project: project, target_platforms: %w(ios))
+ project.project_setting.update!(target_platforms: %w(ios))
visit project_pipelines_path(project)
click_button 'Set up a runner'
end
diff --git a/spec/features/projects/settings/access_tokens_spec.rb b/spec/features/projects/settings/access_tokens_spec.rb
index 210815f341c..9025bd9052e 100644
--- a/spec/features/projects/settings/access_tokens_spec.rb
+++ b/spec/features/projects/settings/access_tokens_spec.rb
@@ -49,6 +49,7 @@ RSpec.describe 'Project > Settings > Access Tokens', :js, feature_category: :use
it 'shows Owner option' do
visit resource_settings_access_tokens_path
+ click_button 'Add new token'
expect(role_dropdown_options).to include('Owner')
end
end
@@ -63,6 +64,7 @@ RSpec.describe 'Project > Settings > Access Tokens', :js, feature_category: :use
it 'does not show Owner option for a maintainer' do
visit resource_settings_access_tokens_path
+ click_button 'Add new token'
expect(role_dropdown_options).not_to include('Owner')
end
end
@@ -81,6 +83,7 @@ RSpec.describe 'Project > Settings > Access Tokens', :js, feature_category: :use
it 'shows access token creation form and text' do
visit project_settings_access_tokens_path(personal_project)
+ click_button 'Add new token'
expect(page).to have_selector('#js-new-access-token-form')
end
end
diff --git a/spec/features/projects/settings/packages_settings_spec.rb b/spec/features/projects/settings/packages_settings_spec.rb
index 564a71e9a23..5277ede8e52 100644
--- a/spec/features/projects/settings/packages_settings_spec.rb
+++ b/spec/features/projects/settings/packages_settings_spec.rb
@@ -35,7 +35,7 @@ RSpec.describe 'Projects > Settings > Packages', :js, feature_category: :groups_
let(:packages_enabled) { false }
it 'does not show up in UI' do
- expect(page).not_to have_selector('[data-testid="toggle-label"]', text: 'Packages')
+ expect(page).not_to have_selector('[data-testid="toggle-label"]', text: 'Package registry')
end
end
end
diff --git a/spec/features/projects/settings/pipelines_settings_spec.rb b/spec/features/projects/settings/pipelines_settings_spec.rb
index ef1c03f4f27..59d9e6d105e 100644
--- a/spec/features/projects/settings/pipelines_settings_spec.rb
+++ b/spec/features/projects/settings/pipelines_settings_spec.rb
@@ -65,6 +65,46 @@ RSpec.describe "Projects > Settings > Pipelines settings", feature_category: :gr
expect(checkbox).not_to be_checked
end
+ it 'disables forward deployment rollback allowed when forward deployment enabled is unchecked', :js do
+ visit project_settings_ci_cd_path(project)
+
+ forward_deployment_checkbox = find_field('project_ci_cd_settings_attributes_forward_deployment_enabled')
+ forward_deployment_rollback_checkbox =
+ find_field('project_ci_cd_settings_attributes_forward_deployment_rollback_allowed')
+ expect(forward_deployment_checkbox).to be_checked
+ expect(forward_deployment_rollback_checkbox).not_to be_disabled
+
+ forward_deployment_checkbox.click
+
+ expect(forward_deployment_rollback_checkbox).to be_disabled
+
+ forward_deployment_checkbox.click
+
+ expect(forward_deployment_rollback_checkbox).not_to be_disabled
+ end
+
+ it 'updates forward_deployment_rollback_allowed' do
+ visit project_settings_ci_cd_path(project)
+
+ checkbox = find_field('project_ci_cd_settings_attributes_forward_deployment_rollback_allowed')
+ expect(checkbox).to be_checked
+
+ checkbox.set(false)
+
+ page.within '#js-general-pipeline-settings' do
+ click_on 'Save changes'
+ end
+
+ expect(page.status_code).to eq(200)
+
+ page.within '#js-general-pipeline-settings' do
+ expect(page).to have_button('Save changes', disabled: false)
+ end
+
+ checkbox = find_field('project_ci_cd_settings_attributes_forward_deployment_rollback_allowed')
+ expect(checkbox).not_to be_checked
+ end
+
describe 'Auto DevOps' do
context 'when auto devops is turned on instance-wide' do
before do
diff --git a/spec/features/projects/settings/project_badges_spec.rb b/spec/features/projects/settings/project_badges_spec.rb
index 1f170300155..a66bf5cd3a9 100644
--- a/spec/features/projects/settings/project_badges_spec.rb
+++ b/spec/features/projects/settings/project_badges_spec.rb
@@ -24,7 +24,7 @@ RSpec.describe 'Project Badges', feature_category: :groups_and_projects do
page.within '.badge-settings' do
wait_for_requests
- rows = all('.card-body > div')
+ rows = all('.gl-card-body tbody tr')
expect(rows.length).to eq 2
expect(rows[0]).to have_content group_badge.link_url
expect(rows[1]).to have_content project_badge.link_url
@@ -33,6 +33,7 @@ RSpec.describe 'Project Badges', feature_category: :groups_and_projects do
context 'adding a badge', :js do
it 'user can preview a badge' do
+ click_button 'Add badge'
page.within '.badge-settings form' do
fill_in 'badge-link-url', with: badge_link_url
fill_in 'badge-image-url', with: badge_image_url
@@ -44,6 +45,7 @@ RSpec.describe 'Project Badges', feature_category: :groups_and_projects do
end
it do
+ click_button 'Add badge'
page.within '.badge-settings' do
fill_in 'badge-link-url', with: badge_link_url
fill_in 'badge-image-url', with: badge_image_url
@@ -51,7 +53,7 @@ RSpec.describe 'Project Badges', feature_category: :groups_and_projects do
click_button 'Add badge'
wait_for_requests
- within '.card-body' do
+ within '.gl-card-body' do
expect(find('a')[:href]).to eq badge_link_url
expect(find('a img')[:src]).to eq badge_image_url
end
@@ -63,32 +65,35 @@ RSpec.describe 'Project Badges', feature_category: :groups_and_projects do
it 'form is shown when clicking edit button in list' do
page.within '.badge-settings' do
wait_for_requests
- rows = all('.card-body > div')
+ rows = all('.gl-card-body tbody tr')
expect(rows.length).to eq 2
rows[1].find('[aria-label="Edit"]').click
+ end
- within 'form' do
- expect(find('#badge-link-url').value).to eq project_badge.link_url
- expect(find('#badge-image-url').value).to eq project_badge.image_url
- end
+ page.within '.gl-modal' do
+ expect(find('#badge-link-url').value).to eq project_badge.link_url
+ expect(find('#badge-image-url').value).to eq project_badge.image_url
end
end
it 'updates a badge when submitting the edit form' do
page.within '.badge-settings' do
wait_for_requests
- rows = all('.card-body > div')
+ rows = all('.gl-card-body tbody tr')
expect(rows.length).to eq 2
rows[1].find('[aria-label="Edit"]').click
- within 'form' do
- fill_in 'badge-link-url', with: badge_link_url
- fill_in 'badge-image-url', with: badge_image_url
+ end
- click_button 'Save changes'
- wait_for_requests
- end
+ page.within '.gl-modal' do
+ fill_in 'badge-link-url', with: badge_link_url
+ fill_in 'badge-image-url', with: badge_image_url
- rows = all('.card-body > div')
+ click_button 'Save changes'
+ wait_for_requests
+ end
+
+ page.within '.badge-settings' do
+ rows = all('.gl-card-body tbody tr')
expect(rows.length).to eq 2
expect(rows[1]).to have_content badge_link_url
end
@@ -102,7 +107,7 @@ RSpec.describe 'Project Badges', feature_category: :groups_and_projects do
it 'shows a modal when deleting a badge' do
wait_for_requests
- rows = all('.card-body > div')
+ rows = all('.gl-card-body tbody tr')
expect(rows.length).to eq 2
click_delete_button(rows[1])
@@ -112,14 +117,14 @@ RSpec.describe 'Project Badges', feature_category: :groups_and_projects do
it 'deletes a badge when confirming the modal' do
wait_for_requests
- rows = all('.card-body > div')
+ rows = all('.gl-card-body tbody tr')
expect(rows.length).to eq 2
click_delete_button(rows[1])
find('.modal .btn-danger').click
wait_for_requests
- rows = all('.card-body > div')
+ rows = all('.gl-card-body tbody tr')
expect(rows.length).to eq 1
expect(rows[0]).to have_content group_badge.link_url
end
diff --git a/spec/features/projects/settings/repository_settings_spec.rb b/spec/features/projects/settings/repository_settings_spec.rb
index d53aefe5a4e..838ac67ee3d 100644
--- a/spec/features/projects/settings/repository_settings_spec.rb
+++ b/spec/features/projects/settings/repository_settings_spec.rb
@@ -156,6 +156,7 @@ RSpec.describe 'Projects > Settings > Repository settings', feature_category: :g
before do
visit project_settings_repository_path(project)
+ click_button 'Add new'
end
it 'shows push mirror settings', :js do
diff --git a/spec/features/projects/settings/secure_files_spec.rb b/spec/features/projects/settings/secure_files_spec.rb
index 7ff1a5f3568..5f94e215a5f 100644
--- a/spec/features/projects/settings/secure_files_spec.rb
+++ b/spec/features/projects/settings/secure_files_spec.rb
@@ -46,7 +46,7 @@ RSpec.describe 'Secure Files', :js, feature_category: :groups_and_projects do
within '#js-secure-files' do
expect(page).to have_content(file.name)
- find('button.btn-danger-secondary').click
+ find('[data-testid="delete-button"]').click
end
expect(page).to have_content("Delete #{file.name}?")
diff --git a/spec/features/projects/settings/user_interacts_with_deploy_keys_spec.rb b/spec/features/projects/settings/user_interacts_with_deploy_keys_spec.rb
index 0006762a971..4e8f42ae792 100644
--- a/spec/features/projects/settings/user_interacts_with_deploy_keys_spec.rb
+++ b/spec/features/projects/settings/user_interacts_with_deploy_keys_spec.rb
@@ -91,6 +91,7 @@ RSpec.describe "User interacts with deploy keys", :js, feature_category: :groups
deploy_key_title = attributes_for(:key)[:title]
deploy_key_body = attributes_for(:key)[:key]
+ click_button("Add new key")
fill_in("deploy_key_title", with: deploy_key_title)
fill_in("deploy_key_key", with: deploy_key_body)
@@ -102,6 +103,16 @@ RSpec.describe "User interacts with deploy keys", :js, feature_category: :groups
expect(page).to have_content(deploy_key_title)
end
end
+
+ it "click on cancel hides the form" do
+ click_button("Add new key")
+
+ expect(page).to have_css('.gl-new-card-add-form')
+
+ click_button("Cancel")
+
+ expect(page).not_to have_css('.gl-new-card-add-form')
+ end
end
context "attaching existing keys" do
diff --git a/spec/features/projects/settings/user_renames_a_project_spec.rb b/spec/features/projects/settings/user_renames_a_project_spec.rb
index 5a58c049601..b7ae595a3a9 100644
--- a/spec/features/projects/settings/user_renames_a_project_spec.rb
+++ b/spec/features/projects/settings/user_renames_a_project_spec.rb
@@ -61,7 +61,7 @@ RSpec.describe 'Projects > Settings > User renames a project', feature_category:
it 'shows error for invalid project name' do
change_name(project, '🧮 foo bar ☁️')
expect(page).to have_field 'Project name', with: '🧮 foo bar ☁️'
- expect(page).not_to have_content "Name can contain only letters, digits, emojis '_', '.', dash and space. It must start with letter, digit, emoji or '_'."
+ expect(page).not_to have_content "Name can contain only letters, digits, emoji '_', '.', dash and space. It must start with letter, digit, emoji or '_'."
end
end
end
diff --git a/spec/features/projects/settings/user_searches_in_settings_spec.rb b/spec/features/projects/settings/user_searches_in_settings_spec.rb
index 978b678c334..1ca4b761788 100644
--- a/spec/features/projects/settings/user_searches_in_settings_spec.rb
+++ b/spec/features/projects/settings/user_searches_in_settings_spec.rb
@@ -31,7 +31,7 @@ RSpec.describe 'User searches project settings', :js, feature_category: :groups_
visit project_settings_access_tokens_path(project)
end
- it_behaves_like 'can highlight results', 'Expiration date'
+ it_behaves_like 'can highlight results', 'Token name'
end
context 'in Repository page' do
diff --git a/spec/features/projects/settings/visibility_settings_spec.rb b/spec/features/projects/settings/visibility_settings_spec.rb
index 7d41b60199c..890f514d3da 100644
--- a/spec/features/projects/settings/visibility_settings_spec.rb
+++ b/spec/features/projects/settings/visibility_settings_spec.rb
@@ -30,11 +30,11 @@ RSpec.describe 'Projects > Settings > Visibility settings', :js, feature_categor
context 'disable email notifications' do
it 'is visible' do
- expect(page).to have_selector('.js-emails-disabled', visible: true)
+ expect(page).to have_selector('.js-emails-enabled', visible: true)
end
it 'accepts the changed state' do
- find('.js-emails-disabled input[type="checkbox"]').click
+ find('.js-emails-enabled input[type="checkbox"]').click
expect { save_permissions_group }.to change { updated_emails_disabled? }.to(true)
end
@@ -59,7 +59,7 @@ RSpec.describe 'Projects > Settings > Visibility settings', :js, feature_categor
context 'disable email notifications' do
it 'is not available' do
- expect(page).not_to have_selector('.js-emails-disabled', visible: true)
+ expect(page).not_to have_selector('.js-emails-enabled', visible: true)
end
end
end
diff --git a/spec/features/projects/show/user_manages_notifications_spec.rb b/spec/features/projects/show/user_manages_notifications_spec.rb
index 455b931e7f3..bbf31c1e1e1 100644
--- a/spec/features/projects/show/user_manages_notifications_spec.rb
+++ b/spec/features/projects/show/user_manages_notifications_spec.rb
@@ -77,7 +77,7 @@ RSpec.describe 'Projects > Show > User manages notifications', :js, feature_cate
end
context 'when project emails are disabled' do
- let(:project) { create(:project, :public, :repository, emails_disabled: true) }
+ let_it_be(:project) { create(:project, :public, :repository, emails_enabled: false) }
it 'is disabled' do
visit project_path(project)
diff --git a/spec/features/projects/user_changes_project_visibility_spec.rb b/spec/features/projects/user_changes_project_visibility_spec.rb
index f27a659f65f..24f24229f9c 100644
--- a/spec/features/projects/user_changes_project_visibility_spec.rb
+++ b/spec/features/projects/user_changes_project_visibility_spec.rb
@@ -66,8 +66,8 @@ RSpec.describe 'User changes public project visibility', :js, feature_category:
let(:project) { create(:project, :empty_repo, :public) }
it 'saves without confirmation' do
- expect(page).to have_selector('.js-emails-disabled', visible: true)
- find('.js-emails-disabled input[type="checkbox"]').click
+ expect(page).to have_selector('.js-emails-enabled', visible: true)
+ find('.js-emails-enabled input[type="checkbox"]').click
page.within('#js-shared-permissions') do
click_button 'Save changes'
diff --git a/spec/features/projects/work_items/work_item_spec.rb b/spec/features/projects/work_items/work_item_spec.rb
index e996a76b1c5..618d3e2efd0 100644
--- a/spec/features/projects/work_items/work_item_spec.rb
+++ b/spec/features/projects/work_items/work_item_spec.rb
@@ -40,7 +40,7 @@ RSpec.describe 'Work item', :js, feature_category: :team_planning do
end
it_behaves_like 'work items title'
- it_behaves_like 'work items status'
+ it_behaves_like 'work items toggle status button'
it_behaves_like 'work items assignees'
it_behaves_like 'work items labels'
it_behaves_like 'work items comments', :issue
diff --git a/spec/features/protected_branches_spec.rb b/spec/features/protected_branches_spec.rb
index 9244cafbc0b..ee5d92b7cdb 100644
--- a/spec/features/protected_branches_spec.rb
+++ b/spec/features/protected_branches_spec.rb
@@ -40,6 +40,8 @@ RSpec.describe 'Protected Branches', :js, feature_category: :source_code_managem
it 'allows to create a protected branch with name containing HTML tags' do
visit project_protected_branches_path(project)
+
+ show_add_form
set_defaults
set_protected_branch_name('foo<b>bar<\b>')
click_on "Protect"
@@ -89,6 +91,8 @@ RSpec.describe 'Protected Branches', :js, feature_category: :source_code_managem
describe "explicit protected branches" do
it "allows creating explicit protected branches" do
visit project_protected_branches_path(project)
+
+ show_add_form
set_defaults
set_protected_branch_name('some->branch')
click_on "Protect"
@@ -100,6 +104,8 @@ RSpec.describe 'Protected Branches', :js, feature_category: :source_code_managem
it "shows success alert once protected branch is created" do
visit project_protected_branches_path(project)
+
+ show_add_form
set_defaults
set_protected_branch_name('some->branch')
click_on "Protect"
@@ -112,6 +118,8 @@ RSpec.describe 'Protected Branches', :js, feature_category: :source_code_managem
project.repository.add_branch(admin, 'some-branch', commit.id)
visit project_protected_branches_path(project)
+
+ show_add_form
set_defaults
set_protected_branch_name('some-branch')
click_on "Protect"
@@ -124,6 +132,8 @@ RSpec.describe 'Protected Branches', :js, feature_category: :source_code_managem
it "displays an error message if the named branch does not exist" do
visit project_protected_branches_path(project)
+
+ show_add_form
set_defaults
set_protected_branch_name('some-branch')
click_on "Protect"
@@ -135,6 +145,8 @@ RSpec.describe 'Protected Branches', :js, feature_category: :source_code_managem
describe "wildcard protected branches" do
it "allows creating protected branches with a wildcard" do
visit project_protected_branches_path(project)
+
+ show_add_form
set_defaults
set_protected_branch_name('*-stable')
click_on "Protect"
@@ -149,6 +161,8 @@ RSpec.describe 'Protected Branches', :js, feature_category: :source_code_managem
project.repository.add_branch(admin, 'staging-stable', 'master')
visit project_protected_branches_path(project)
+
+ show_add_form
set_defaults
set_protected_branch_name('*-stable')
click_on "Protect"
@@ -164,6 +178,8 @@ RSpec.describe 'Protected Branches', :js, feature_category: :source_code_managem
project.repository.add_branch(admin, 'development', 'master')
visit project_protected_branches_path(project)
+
+ show_add_form
set_protected_branch_name('*-stable')
set_defaults
click_on "Protect"
diff --git a/spec/features/protected_tags_spec.rb b/spec/features/protected_tags_spec.rb
index 45315f53fd6..f5b463d63fa 100644
--- a/spec/features/protected_tags_spec.rb
+++ b/spec/features/protected_tags_spec.rb
@@ -15,6 +15,8 @@ RSpec.describe 'Protected Tags', :js, :with_license, feature_category: :source_c
describe "explicit protected tags" do
it "allows creating explicit protected tags" do
visit project_protected_tags_path(project)
+ click_button('Add tag')
+
set_protected_tag_name('some-tag')
set_allowed_to('create')
click_on_protect
@@ -29,6 +31,7 @@ RSpec.describe 'Protected Tags', :js, :with_license, feature_category: :source_c
project.repository.add_tag(user, 'some-tag', commit.id)
visit project_protected_tags_path(project)
+ click_button('Add tag')
set_protected_tag_name('some-tag')
set_allowed_to('create')
click_on_protect
@@ -38,6 +41,7 @@ RSpec.describe 'Protected Tags', :js, :with_license, feature_category: :source_c
it "displays an error message if the named tag does not exist" do
visit project_protected_tags_path(project)
+ click_button('Add tag')
set_protected_tag_name('some-tag')
set_allowed_to('create')
click_on_protect
@@ -49,6 +53,7 @@ RSpec.describe 'Protected Tags', :js, :with_license, feature_category: :source_c
describe "wildcard protected tags" do
it "allows creating protected tags with a wildcard" do
visit project_protected_tags_path(project)
+ click_button('Add tag')
set_protected_tag_name('*-stable')
set_allowed_to('create')
click_on_protect
@@ -63,12 +68,16 @@ RSpec.describe 'Protected Tags', :js, :with_license, feature_category: :source_c
project.repository.add_tag(user, 'staging-stable', 'master')
visit project_protected_tags_path(project)
+ click_button('Add tag')
set_protected_tag_name('*-stable')
set_allowed_to('create')
click_on_protect
+ within("#js-protected-tags-settings .gl-new-card-count") do
+ expect(page).to have_content("2")
+ end
+
within(".protected-tags-list") do
- expect(page).to have_content("Protected tags (2)")
expect(page).to have_content("2 matching tags")
end
end
@@ -79,11 +88,13 @@ RSpec.describe 'Protected Tags', :js, :with_license, feature_category: :source_c
project.repository.add_tag(user, 'development', 'master')
visit project_protected_tags_path(project)
+ click_button('Add tag')
set_protected_tag_name('*-stable')
set_allowed_to('create')
click_on_protect
visit project_protected_tags_path(project)
+ click_button('Add tag')
click_on "2 matching tags"
within(".protected-tags-list") do
diff --git a/spec/features/triggers_spec.rb b/spec/features/triggers_spec.rb
index 903211ec250..52df4bfece2 100644
--- a/spec/features/triggers_spec.rb
+++ b/spec/features/triggers_spec.rb
@@ -26,16 +26,18 @@ RSpec.describe 'Triggers', :js, feature_category: :continuous_integration do
describe 'triggers page' do
describe 'create trigger workflow' do
it 'prevents adding new trigger with no description' do
+ click_button 'Add new token'
fill_in 'trigger_description', with: ''
- click_button 'Add trigger'
+ click_button 'Create pipeline trigger token'
# See if input has error due to empty value
expect(page.find('form.gl-show-field-errors .gl-field-error')).to be_visible
end
it 'adds new trigger with description' do
+ click_button 'Add new token'
fill_in 'trigger_description', with: 'trigger desc'
- click_button 'Add trigger'
+ click_button 'Create pipeline trigger token'
aggregate_failures 'display creation notice and trigger is created' do
expect(page.find('[data-testid="alert-info"]')).to have_content 'Trigger was created successfully.'
@@ -100,6 +102,7 @@ RSpec.describe 'Triggers', :js, feature_category: :continuous_integration do
describe 'show triggers workflow' do
it 'contains trigger description placeholder' do
+ click_button 'Add new token'
expect(page.find('#trigger_description')['placeholder']).to eq 'Trigger description'
end
diff --git a/spec/features/uploads/user_uploads_avatar_to_profile_spec.rb b/spec/features/uploads/user_uploads_avatar_to_profile_spec.rb
index cc296259b80..cd181f73473 100644
--- a/spec/features/uploads/user_uploads_avatar_to_profile_spec.rb
+++ b/spec/features/uploads/user_uploads_avatar_to_profile_spec.rb
@@ -6,34 +6,63 @@ RSpec.describe 'User uploads avatar to profile', feature_category: :user_profile
let!(:user) { create(:user) }
let(:avatar_file_path) { Rails.root.join('spec', 'fixtures', 'dk.png') }
- before do
- stub_feature_flags(edit_user_profile_vue: false)
- sign_in user
- visit profile_path
- end
+ shared_examples 'upload avatar' do
+ it 'shows the new avatar immediately in the header and setting sidebar', :js do
+ expect(page.find('.avatar-image .gl-avatar')['src']).not_to include(
+ "/uploads/-/system/user/avatar/#{user.id}/avatar.png"
+ )
+ find('.js-user-avatar-input', visible: false).set(avatar_file_path)
+
+ click_button 'Set new profile picture'
+ click_button 'Update profile settings'
- it 'they see their new avatar on their profile' do
- attach_file('user_avatar', avatar_file_path, visible: false)
- click_button 'Update profile settings'
+ wait_for_all_requests
- visit user_path(user)
+ data_uri = find('.avatar-image .gl-avatar')['src']
+ expect(page.find('.header-user-avatar')['src']).to eq data_uri
+ expect(page.find('[data-testid="sidebar-user-avatar"]')['src']).to eq data_uri
+
+ visit profile_path
+
+ expect(page.find('.avatar-image .gl-avatar')['src']).to include(
+ "/uploads/-/system/user/avatar/#{user.id}/avatar.png"
+ )
+ end
+ end
- expect(page).to have_selector(%(img[src$="/uploads/-/system/user/avatar/#{user.id}/dk.png?width=96"]))
+ context 'with "edit_user_profile_vue" turned on' do
+ before do
+ sign_in_and_visit_profile
+ end
- # Cheating here to verify something that isn't user-facing, but is important
- expect(user.reload.avatar.file).to exist
+ it_behaves_like 'upload avatar'
end
- it 'their new avatar is immediately visible in the header and setting sidebar', :js do
- find('.js-user-avatar-input', visible: false).set(avatar_file_path)
+ context 'with "edit_user_profile_vue" turned off' do
+ before do
+ stub_feature_flags(edit_user_profile_vue: false)
+ sign_in_and_visit_profile
+ end
- click_button 'Set new profile picture'
- click_button 'Update profile settings'
+ it 'they see their new avatar on their profile' do
+ attach_file('user_avatar', avatar_file_path, visible: false)
+ click_button 'Update profile settings'
- wait_for_all_requests
+ visit user_path(user)
- data_uri = find('.avatar-image .gl-avatar')['src']
- expect(page.find('.header-user-avatar')['src']).to eq data_uri
- expect(page.find('[data-testid="sidebar-user-avatar"]')['src']).to eq data_uri
+ expect(page).to have_selector(%(img[src$="/uploads/-/system/user/avatar/#{user.id}/dk.png?width=96"]))
+
+ # Cheating here to verify something that isn't user-facing, but is important
+ expect(user.reload.avatar.file).to exist
+ end
+
+ it_behaves_like 'upload avatar'
+ end
+
+ private
+
+ def sign_in_and_visit_profile
+ sign_in user
+ visit profile_path
end
end
diff --git a/spec/features/users/email_verification_on_login_spec.rb b/spec/features/users/email_verification_on_login_spec.rb
index 1854e812b73..7675de28f86 100644
--- a/spec/features/users/email_verification_on_login_spec.rb
+++ b/spec/features/users/email_verification_on_login_spec.rb
@@ -2,10 +2,12 @@
require 'spec_helper'
-RSpec.describe 'Email Verification On Login', :clean_gitlab_redis_rate_limiting, feature_category: :system_access do
+RSpec.describe 'Email Verification On Login', :clean_gitlab_redis_rate_limiting, :js, feature_category: :system_access do
include EmailHelpers
- let_it_be(:user) { create(:user) }
+ let_it_be_with_reload(:user) { create(:user) }
+ let_it_be(:another_user) { create(:user) }
+ let_it_be(:new_email) { build_stubbed(:user).email }
let(:require_email_verification_enabled) { user }
@@ -33,7 +35,7 @@ RSpec.describe 'Email Verification On Login', :clean_gitlab_redis_rate_limiting,
# Expect to see the verification form on the login page
expect(page).to have_current_path(new_user_session_path)
- expect(page).to have_content('Help us protect your account')
+ expect(page).to have_content(s_('IdentityVerification|Help us protect your account'))
# Expect an instructions email to be sent with a code
code = expect_instructions_email_and_extract_code
@@ -41,7 +43,7 @@ RSpec.describe 'Email Verification On Login', :clean_gitlab_redis_rate_limiting,
# Signing in again prompts for the code and doesn't send a new one
gitlab_sign_in(user)
expect(page).to have_current_path(new_user_session_path)
- expect(page).to have_content('Help us protect your account')
+ expect(page).to have_content(s_('IdentityVerification|Help us protect your account'))
# Verify the code
verify_code(code)
@@ -54,7 +56,7 @@ RSpec.describe 'Email Verification On Login', :clean_gitlab_redis_rate_limiting,
# Expect a confirmation page with a meta refresh tag for 3 seconds to the root
expect(page).to have_current_path(users_successful_verification_path)
- expect(page).to have_content('Verification successful')
+ expect(page).to have_content(s_('IdentityVerification|Verification successful'))
expect(page).to have_selector("meta[http-equiv='refresh'][content='3; url=#{root_path}']", visible: false)
end
end
@@ -69,7 +71,8 @@ RSpec.describe 'Email Verification On Login', :clean_gitlab_redis_rate_limiting,
code = expect_instructions_email_and_extract_code
# Request a new code
- click_link 'Resend code'
+ click_button s_('IdentityVerification|Resend code')
+ expect(page).to have_content(s_('IdentityVerification|A new code has been sent.'))
expect_log_message('Instructions Sent', 2)
new_code = expect_instructions_email_and_extract_code
@@ -83,22 +86,63 @@ RSpec.describe 'Email Verification On Login', :clean_gitlab_redis_rate_limiting,
gitlab_sign_in(user)
# It shows a resend button
- expect(page).to have_link 'Resend code'
+ expect(page).to have_button s_('IdentityVerification|Resend code')
# Resend more than the rate limited amount of times
10.times do
- click_link 'Resend code'
+ click_button s_('IdentityVerification|Resend code')
end
- # Expect the link to be gone
- expect(page).not_to have_link 'Resend code'
+ # Expect an error alert
+ expect(page).to have_content format(s_("IdentityVerification|You've reached the maximum amount of resends. "\
+ 'Wait %{interval} and try again.'), interval: 'about 1 hour')
+ end
+ end
- # Wait for 1 hour
- travel 1.hour
+ describe 'updating the email address' do
+ it 'offers to update the email address' do
+ perform_enqueued_jobs do
+ # When logging in
+ gitlab_sign_in(user)
- # Now it's visible again
- gitlab_sign_in(user)
- expect(page).to have_link 'Resend code'
+ # Expect an instructions email to be sent with a code
+ code = expect_instructions_email_and_extract_code
+
+ # It shows an update email button
+ expect(page).to have_button s_('IdentityVerification|Update email')
+
+ # Click Update email button
+ click_button s_('IdentityVerification|Update email')
+
+ # Try to update with another user's email address
+ fill_in _('Email'), with: another_user.email
+ click_button s_('IdentityVerification|Update email')
+ expect(page).to have_content('Email has already been taken')
+
+ # Update to a unique email address
+ fill_in _('Email'), with: new_email
+ click_button s_('IdentityVerification|Update email')
+ expect(page).to have_content(s_('IdentityVerification|A new code has been sent to ' \
+ 'your updated email address.'))
+ expect_log_message('Instructions Sent', 2)
+
+ new_code = expect_email_changed_notification_to_old_address_and_instructions_email_to_new_address
+
+ # Verify the old code is different from the new code
+ expect(code).not_to eq(new_code)
+ verify_code(new_code)
+
+ # Expect the user to be unlocked
+ expect_user_to_be_unlocked
+ expect_user_to_be_confirmed
+
+ # When logging in again
+ gitlab_sign_out
+ gitlab_sign_in(user)
+
+ # It does not show an update email button anymore
+ expect(page).not_to have_button s_('IdentityVerification|Update email')
+ end
end
end
@@ -118,8 +162,9 @@ RSpec.describe 'Email Verification On Login', :clean_gitlab_redis_rate_limiting,
# Expect an error message
expect_log_message('Failed Attempt', reason: 'rate_limited')
- expect(page).to have_content("You've reached the maximum amount of tries. "\
- 'Wait 10 minutes or send a new code and try again.')
+ expect(page).to have_content(
+ format(s_("IdentityVerification|You've reached the maximum amount of tries. "\
+ 'Wait %{interval} or send a new code and try again.'), interval: '10 minutes'))
# Wait for 10 minutes
travel 10.minutes
@@ -139,7 +184,8 @@ RSpec.describe 'Email Verification On Login', :clean_gitlab_redis_rate_limiting,
# Expect an error message
expect_log_message('Failed Attempt', reason: 'invalid')
- expect(page).to have_content('The code is incorrect. Enter it again, or send a new code.')
+ expect(page).to have_content(s_('IdentityVerification|The code is incorrect. '\
+ 'Enter it again, or send a new code.'))
end
it 'verifies expired codes' do
@@ -156,7 +202,7 @@ RSpec.describe 'Email Verification On Login', :clean_gitlab_redis_rate_limiting,
# Expect an error message
expect_log_message('Failed Attempt', reason: 'expired')
- expect(page).to have_content('The code has expired. Send a new code and try again.')
+ expect(page).to have_content(s_('IdentityVerification|The code has expired. Send a new code and try again.'))
end
end
end
@@ -250,7 +296,8 @@ RSpec.describe 'Email Verification On Login', :clean_gitlab_redis_rate_limiting,
it 'shows an error message on on the login page' do
expect(page).to have_current_path(new_user_session_path)
- expect(page).to have_content('Maximum login attempts exceeded. Wait 10 minutes and try again.')
+ expect(page).to have_content(format(s_('IdentityVerification|Maximum login attempts exceeded. '\
+ 'Wait %{interval} and try again.'), interval: '10 minutes'))
end
end
@@ -271,7 +318,7 @@ RSpec.describe 'Email Verification On Login', :clean_gitlab_redis_rate_limiting,
stub_feature_flags(require_email_verification: false)
# Resending and veryfying the code work as expected
- click_link 'Resend code'
+ click_button s_('IdentityVerification|Resend code')
new_code = expect_instructions_email_and_extract_code
verify_code(code)
@@ -283,7 +330,7 @@ RSpec.describe 'Email Verification On Login', :clean_gitlab_redis_rate_limiting,
verify_code(new_code)
expect(page).to have_content(s_('IdentityVerification|The code has expired. Send a new code and try again.'))
- click_link 'Resend code'
+ click_button s_('IdentityVerification|Resend code')
another_code = expect_instructions_email_and_extract_code
verify_code(another_code)
@@ -341,6 +388,28 @@ RSpec.describe 'Email Verification On Login', :clean_gitlab_redis_rate_limiting,
end
end
+ def expect_user_to_be_confirmed
+ aggregate_failures do
+ expect(user.email).to eq(new_email)
+ expect(user.unconfirmed_email).to be_nil
+ end
+ end
+
+ def expect_email_changed_notification_to_old_address_and_instructions_email_to_new_address
+ changed_email = ActionMailer::Base.deliveries[0]
+ instructions_email = ActionMailer::Base.deliveries[1]
+
+ expect(changed_email.to).to match_array([user.email])
+ expect(changed_email.subject).to eq('Email Changed')
+
+ expect(instructions_email.to).to match_array([new_email])
+ expect(instructions_email.subject).to eq(s_('IdentityVerification|Verify your identity'))
+
+ reset_delivered_emails!
+
+ instructions_email.body.parts.first.to_s[/\d{#{Users::EmailVerification::GenerateTokenService::TOKEN_LENGTH}}/o]
+ end
+
def expect_instructions_email_and_extract_code
mail = find_email_for(user)
expect(mail.to).to match_array([user.email])
diff --git a/spec/features/users/google_syndication_csp_spec.rb b/spec/features/users/google_syndication_csp_spec.rb
new file mode 100644
index 00000000000..e71539f87c8
--- /dev/null
+++ b/spec/features/users/google_syndication_csp_spec.rb
@@ -0,0 +1,54 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Google Syndication content security policy', feature_category: :purchase do
+ include ContentSecurityPolicyHelpers
+
+ let_it_be(:connect_src) { 'https://other-cdn.test' }
+
+ let_it_be(:google_analytics_src) do
+ 'localhost https://cdn.cookielaw.org https://*.onetrust.com *.google-analytics.com ' \
+ '*.analytics.google.com *.googletagmanager.com'
+ end
+
+ let_it_be(:allowed_src) do
+ '*.google.com/pagead/landing pagead2.googlesyndication.com/pagead/landing'
+ end
+
+ let(:extra) { { google_tag_manager_nonce_id: 'google_tag_manager_nonce_id' } }
+
+ let(:csp) do
+ ActionDispatch::ContentSecurityPolicy.new do |p|
+ p.connect_src(*connect_src.split)
+ end
+ end
+
+ subject { response_headers['Content-Security-Policy'] }
+
+ before do
+ setup_csp_for_controller(SessionsController, csp, any_time: true)
+ stub_config(extra: extra)
+ visit new_user_session_path
+ end
+
+ context 'when self-hosted' do
+ context 'when there is no CSP config' do
+ let(:extra) { {} }
+ let(:csp) { ActionDispatch::ContentSecurityPolicy.new }
+
+ it { is_expected.to be_blank }
+ end
+
+ context 'when connect-src CSP config exists' do
+ it { is_expected.to include("connect-src #{connect_src} #{google_analytics_src}") }
+ it { is_expected.not_to include(allowed_src) }
+ end
+ end
+
+ context 'when SaaS', :saas do
+ context 'when connect-src CSP config exists' do
+ it { is_expected.to include("connect-src #{connect_src} #{google_analytics_src} #{allowed_src}") }
+ end
+ end
+end
diff --git a/spec/features/users/rss_spec.rb b/spec/features/users/rss_spec.rb
index 39b6d049e43..2db58ce04a1 100644
--- a/spec/features/users/rss_spec.rb
+++ b/spec/features/users/rss_spec.rb
@@ -6,28 +6,53 @@ RSpec.describe 'User RSS', feature_category: :user_profile do
let(:user) { create(:user) }
let(:path) { user_path(create(:user)) }
- before do
- stub_feature_flags(user_profile_overflow_menu_vue: false)
- end
-
- context 'when signed in' do
+ describe 'with "user_profile_overflow_menu_vue" feature flag off' do
before do
- sign_in(user)
- visit path
+ stub_feature_flags(user_profile_overflow_menu_vue: false)
end
- it_behaves_like "it has an RSS button with current_user's feed token"
- end
+ context 'when signed in' do
+ before do
+ sign_in(user)
+ visit path
+ end
- context 'when signed out' do
- before do
- visit path
+ it_behaves_like "it has an RSS button with current_user's feed token"
end
- it_behaves_like "it has an RSS button without a feed token"
+ context 'when signed out' do
+ before do
+ visit path
+ end
+
+ it_behaves_like "it has an RSS button without a feed token"
+ end
end
- # TODO: implement tests before the FF "user_profile_overflow_menu_vue" is turned on
- # See: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/122971
- # Related Issue: https://gitlab.com/gitlab-org/gitlab/-/issues/416974
+ describe 'with "user_profile_overflow_menu_vue" feature flag on', :js do
+ context 'when signed in' do
+ before do
+ sign_in(user)
+ visit path
+ end
+
+ it 'shows the RSS link with overflow menu' do
+ find('[data-testid="base-dropdown-toggle"').click
+
+ expect(page).to have_link 'Subscribe', href: /feed_token=glft-.*-#{user.id}/
+ end
+ end
+
+ context 'when signed out' do
+ before do
+ visit path
+ end
+
+ it 'has an RSS without a feed token' do
+ find('[data-testid="base-dropdown-toggle"').click
+
+ expect(page).not_to have_link 'Subscribe', href: /feed_token=glft-.*-#{user.id}/
+ end
+ end
+ end
end
diff --git a/spec/features/users/signup_spec.rb b/spec/features/users/signup_spec.rb
index 850dd0bbc5d..450b9fa46b1 100644
--- a/spec/features/users/signup_spec.rb
+++ b/spec/features/users/signup_spec.rb
@@ -36,7 +36,7 @@ RSpec.shared_examples 'Signup name validation' do |field, max_length, label|
it 'shows an error message if the username contains emojis' do
simulate_input("##{field}", 'Ehsan 🦋')
- expect(page).to have_content("Invalid input, please avoid emojis")
+ expect(page).to have_content("Invalid input, please avoid emoji")
end
end
end
@@ -176,7 +176,7 @@ RSpec.describe 'Signup', :js, feature_category: :user_profile do
it 'shows an error message if the username contains emojis' do
simulate_input('#new_user_username', 'ehsan😀')
- expect(page).to have_content("Invalid input, please avoid emojis")
+ expect(page).to have_content("Invalid input, please avoid emoji")
end
it 'shows a pending message if the username availability is being fetched',
diff --git a/spec/finders/abuse_reports_finder_spec.rb b/spec/finders/abuse_reports_finder_spec.rb
index ee93d042ca2..0b641d0cb08 100644
--- a/spec/finders/abuse_reports_finder_spec.rb
+++ b/spec/finders/abuse_reports_finder_spec.rb
@@ -2,142 +2,205 @@
require 'spec_helper'
-RSpec.describe AbuseReportsFinder, '#execute' do
- let_it_be(:user1) { create(:user) }
- let_it_be(:user2) { create(:user) }
- let_it_be(:reporter) { create(:user) }
- let_it_be(:abuse_report_1) { create(:abuse_report, id: 20, category: 'spam', user: user1) }
- let_it_be(:abuse_report_2) do
- create(:abuse_report, :closed, id: 30, category: 'phishing', user: user2, reporter: reporter)
- end
+RSpec.describe AbuseReportsFinder, feature_category: :insider_threat do
+ let_it_be(:user_1) { create(:user) }
+ let_it_be(:user_2) { create(:user) }
- let(:params) { {} }
+ let_it_be(:reporter_1) { create(:user) }
+ let_it_be(:reporter_2) { create(:user) }
- subject { described_class.new(params).execute }
+ let_it_be(:abuse_report_1) do
+ create(:abuse_report, :open, category: 'spam', user: user_1, reporter: reporter_1, id: 1)
+ end
- context 'when params is empty' do
- it 'returns all abuse reports' do
- expect(subject).to match_array([abuse_report_1, abuse_report_2])
- end
+ let_it_be(:abuse_report_2) do
+ create(:abuse_report, :closed, category: 'phishing', user: user_2, reporter: reporter_2, id: 2)
end
- context 'when params[:user_id] is present' do
- let(:params) { { user_id: user2 } }
+ let(:params) { {} }
- it 'returns abuse reports for the specified user' do
- expect(subject).to match_array([abuse_report_2])
- end
- end
+ subject(:finder) { described_class.new(params).execute }
- shared_examples 'returns filtered reports' do |filter_field|
- it "returns abuse reports filtered by #{filter_field}_id" do
- expect(subject).to match_array(filtered_reports)
+ describe '#execute' do
+ context 'when params is empty' do
+ it 'returns all abuse reports' do
+ expect(finder).to match_array([abuse_report_1, abuse_report_2])
+ end
end
- context "when no user has username = params[:#{filter_field}]" do
- before do
- allow(User).to receive_message_chain(:by_username, :pick)
- .with(params[filter_field])
- .with(:id)
- .and_return(nil)
+ shared_examples 'returns filtered reports' do |filter_field|
+ it "returns abuse reports filtered by #{filter_field}_id" do
+ expect(finder).to match_array(filtered_reports)
end
- it 'returns all abuse reports' do
- expect(subject).to match_array([abuse_report_1, abuse_report_2])
+ context "when no user has username = params[:#{filter_field}]" do
+ before do
+ allow(User).to receive_message_chain(:by_username, :pick)
+ .with(params[filter_field])
+ .with(:id)
+ .and_return(nil)
+ end
+
+ it 'returns all abuse reports' do
+ expect(finder).to match_array([abuse_report_1, abuse_report_2])
+ end
end
end
- end
- context 'when params[:user] is present' do
- it_behaves_like 'returns filtered reports', :user do
- let(:params) { { user: user1.username } }
- let(:filtered_reports) { [abuse_report_1] }
+ context 'when params[:user] is present' do
+ it_behaves_like 'returns filtered reports', :user do
+ let(:params) { { user: user_1.username } }
+ let(:filtered_reports) { [abuse_report_1] }
+ end
end
- end
- context 'when params[:reporter] is present' do
- it_behaves_like 'returns filtered reports', :reporter do
- let(:params) { { reporter: reporter.username } }
- let(:filtered_reports) { [abuse_report_2] }
+ context 'when params[:reporter] is present' do
+ it_behaves_like 'returns filtered reports', :reporter do
+ let(:params) { { reporter: reporter_1.username } }
+ let(:filtered_reports) { [abuse_report_1] }
+ end
end
- end
- context 'when params[:status] is present' do
- context 'when value is "open"' do
+ context 'when params[:status] = open' do
let(:params) { { status: 'open' } }
it 'returns only open abuse reports' do
- expect(subject).to match_array([abuse_report_1])
+ expect(finder).to match_array([abuse_report_1])
end
end
- context 'when value is "closed"' do
+ context 'when params[:status] = closed' do
let(:params) { { status: 'closed' } }
it 'returns only closed abuse reports' do
- expect(subject).to match_array([abuse_report_2])
+ expect(finder).to match_array([abuse_report_2])
end
end
- context 'when value is not a valid status' do
+ context 'when params[:status] is not a valid status' do
let(:params) { { status: 'partial' } }
it 'defaults to returning open abuse reports' do
- expect(subject).to match_array([abuse_report_1])
+ expect(finder).to match_array([abuse_report_1])
end
end
- context 'when abuse_reports_list feature flag is disabled' do
- before do
- stub_feature_flags(abuse_reports_list: false)
- end
+ context 'when params[:category] is present' do
+ let(:params) { { category: 'phishing' } }
- it 'does not filter by status' do
- expect(subject).to match_array([abuse_report_1, abuse_report_2])
+ it 'returns abuse reports with the specified category' do
+ expect(subject).to match_array([abuse_report_2])
end
end
- end
- context 'when params[:category] is present' do
- let(:params) { { category: 'phishing' } }
+ describe 'aggregating reports' do
+ context 'when multiple open reports exist' do
+ let(:params) { { status: 'open' } }
- it 'returns abuse reports with the specified category' do
- expect(subject).to match_array([abuse_report_2])
- end
- end
+ # same category and user as abuse_report_1 -> will get aggregated
+ let_it_be(:abuse_report_3) do
+ create(:abuse_report, :open, category: abuse_report_1.category, user: abuse_report_1.user, id: 3)
+ end
- describe 'sorting' do
- let(:params) { { sort: 'created_at_asc' } }
+ # different category, but same user as abuse_report_1 -> won't get aggregated
+ let_it_be(:abuse_report_4) do
+ create(:abuse_report, :open, category: 'phishing', user: abuse_report_1.user, id: 4)
+ end
- it 'returns reports sorted by the specified sort attribute' do
- expect(subject).to eq [abuse_report_1, abuse_report_2]
- end
+ it 'aggregates open reports by user and category' do
+ expect(finder).to match_array([abuse_report_1, abuse_report_4])
+ end
+
+ it 'sorts by aggregated_count in descending order and created_at in descending order' do
+ expect(finder).to eq([abuse_report_1, abuse_report_4])
+ end
+
+ it 'returns count with aggregated reports' do
+ expect(finder[0].count).to eq(2)
+ end
+
+ context 'when a different sorting attribute is given' do
+ let(:params) { { status: 'open', sort: 'created_at_desc' } }
- context 'when sort is not specified' do
- let(:params) { {} }
+ it 'returns reports sorted by the specified sort attribute' do
+ expect(subject).to eq([abuse_report_4, abuse_report_1])
+ end
+ end
- it "returns reports sorted by #{described_class::DEFAULT_SORT}" do
- expect(subject).to eq [abuse_report_2, abuse_report_1]
+ context 'when params[:sort] is invalid' do
+ let(:params) { { status: 'open', sort: 'invalid' } }
+
+ it 'sorts reports by aggregated_count in descending order' do
+ expect(finder).to eq([abuse_report_1, abuse_report_4])
+ end
+ end
end
- end
- context 'when sort is not supported' do
- let(:params) { { sort: 'superiority' } }
+ context 'when multiple closed reports exist' do
+ let(:params) { { status: 'closed' } }
+
+ # same user and category as abuse_report_2 -> won't get aggregated
+ let_it_be(:abuse_report_5) do
+ create(:abuse_report, :closed, category: abuse_report_2.category, user: abuse_report_2.user, id: 5)
+ end
+
+ it 'does not aggregate closed reports' do
+ expect(finder).to match_array([abuse_report_2, abuse_report_5])
+ end
+
+ it 'sorts reports by created_at in descending order' do
+ expect(finder).to eq([abuse_report_5, abuse_report_2])
+ end
+
+ context 'when a different sorting attribute is given' do
+ let(:params) { { status: 'closed', sort: 'created_at_asc' } }
- it "returns reports sorted by #{described_class::DEFAULT_SORT}" do
- expect(subject).to eq [abuse_report_2, abuse_report_1]
+ it 'returns reports sorted by the specified sort attribute' do
+ expect(subject).to eq([abuse_report_2, abuse_report_5])
+ end
+ end
+
+ context 'when params[:sort] is invalid' do
+ let(:params) { { status: 'closed', sort: 'invalid' } }
+
+ it 'sorts reports by created_at in descending order' do
+ expect(finder).to eq([abuse_report_5, abuse_report_2])
+ end
+ end
end
end
- context 'when abuse_reports_list feature flag is disabled' do
- let_it_be(:abuse_report_3) { create(:abuse_report, id: 10) }
-
+ context 'when legacy view is enabled' do
before do
stub_feature_flags(abuse_reports_list: false)
end
- it 'returns reports sorted by id in descending order' do
- expect(subject).to eq [abuse_report_2, abuse_report_1, abuse_report_3]
+ context 'when params is empty' do
+ it 'returns all abuse reports' do
+ expect(subject).to match_array([abuse_report_1, abuse_report_2])
+ end
+ end
+
+ context 'when params[:user_id] is present' do
+ let(:params) { { user_id: user_1 } }
+
+ it 'returns abuse reports for the specified user' do
+ expect(subject).to match_array([abuse_report_1])
+ end
+ end
+
+ context 'when sorting' do
+ it 'returns reports sorted by id in descending order' do
+ expect(subject).to match_array([abuse_report_2, abuse_report_1])
+ end
+ end
+
+ context 'when any of the new filters are present such as params[:status]' do
+ let(:params) { { status: 'open' } }
+
+ it 'returns all abuse reports' do
+ expect(subject).to match_array([abuse_report_1, abuse_report_2])
+ end
end
end
end
diff --git a/spec/finders/admin/abuse_report_labels_finder_spec.rb b/spec/finders/admin/abuse_report_labels_finder_spec.rb
new file mode 100644
index 00000000000..01b0e742ed1
--- /dev/null
+++ b/spec/finders/admin/abuse_report_labels_finder_spec.rb
@@ -0,0 +1,40 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Admin::AbuseReportLabelsFinder, feature_category: :insider_threat do
+ let_it_be(:current_user) { create(:admin) }
+ let_it_be(:project_label) { create(:label) }
+ let_it_be(:label_one) { create(:abuse_report_label, title: 'Uno') }
+ let_it_be(:label_two) { create(:abuse_report_label, title: 'Dos') }
+
+ let(:params) { {} }
+
+ subject(:finder) { described_class.new(current_user, params) }
+
+ describe '#execute', :enable_admin_mode do
+ context 'when current user is admin' do
+ context 'when params is empty' do
+ it 'returns all abuse report labels sorted by title in ascending order' do
+ expect(finder.execute).to eq([label_two, label_one])
+ end
+ end
+
+ context 'when search_term param is present' do
+ let(:params) { { search_term: 'un' } }
+
+ it 'returns matching abuse report labels' do
+ expect(finder.execute).to match_array([label_one])
+ end
+ end
+ end
+
+ context 'when current user is not an admin' do
+ let_it_be(:current_user) { create(:user) }
+
+ it 'returns nothing' do
+ expect(finder.execute).to be_empty
+ end
+ end
+ end
+end
diff --git a/spec/finders/autocomplete/group_users_finder_spec.rb b/spec/finders/autocomplete/group_users_finder_spec.rb
new file mode 100644
index 00000000000..78d0663ada6
--- /dev/null
+++ b/spec/finders/autocomplete/group_users_finder_spec.rb
@@ -0,0 +1,88 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Autocomplete::GroupUsersFinder, feature_category: :team_planning do
+ let_it_be(:parent_group) { create(:group) }
+ let_it_be(:group) { create(:group, parent: parent_group) }
+ let_it_be(:subgroup) { create(:group, parent: group) }
+
+ let_it_be(:parent_group_project) { create(:project, namespace: parent_group) }
+ let_it_be(:group_project) { create(:project, namespace: group) }
+ let_it_be(:subgroup_project) { create(:project, namespace: subgroup) }
+
+ let(:finder) { described_class.new(group: group) }
+
+ describe '#execute' do
+ context 'with group members' do
+ let_it_be(:parent_group_member) { create(:user).tap { |u| parent_group.add_developer(u) } }
+ let_it_be(:group_member) { create(:user).tap { |u| group.add_developer(u) } }
+ let_it_be(:subgroup_member) { create(:user).tap { |u| subgroup.add_developer(u) } }
+
+ let_it_be(:other_group) { create(:group) }
+ let_it_be(:other_group_member) { create(:user).tap { |u| other_group.add_developer(u) } }
+
+ it 'returns members of groups in the hierarchy' do
+ expect(finder.execute).to contain_exactly(
+ parent_group_member,
+ group_member,
+ subgroup_member
+ )
+ end
+ end
+
+ context 'with project members' do
+ let_it_be(:parent_group_project_member) { create(:user).tap { |u| parent_group_project.add_developer(u) } }
+ let_it_be(:group_project_member) { create(:user).tap { |u| group_project.add_developer(u) } }
+ let_it_be(:subgroup_project_member) { create(:user).tap { |u| subgroup_project.add_developer(u) } }
+
+ it 'returns members of descendant projects' do
+ expect(finder.execute).to contain_exactly(
+ group_project_member,
+ subgroup_project_member
+ )
+ end
+ end
+
+ context 'with invited group members' do
+ let_it_be(:invited_group) { create(:group) }
+ let_it_be(:invited_group_user) { create(:user).tap { |u| invited_group.add_developer(u) } }
+
+ it 'returns members of groups invited to this group' do
+ create(:group_group_link, shared_group: group, shared_with_group: invited_group)
+
+ expect(finder.execute).to contain_exactly(invited_group_user)
+ end
+
+ it 'returns members of groups invited to an ancestor group' do
+ create(:group_group_link, shared_group: parent_group, shared_with_group: invited_group)
+
+ expect(finder.execute).to contain_exactly(invited_group_user)
+ end
+
+ it 'returns members of groups invited to a descendant group' do
+ create(:group_group_link, shared_group: subgroup, shared_with_group: invited_group)
+
+ expect(finder.execute).to contain_exactly(invited_group_user)
+ end
+
+ it 'returns members of groups invited to a child project' do
+ create(:project_group_link, project: group_project, group: invited_group)
+
+ expect(finder.execute).to contain_exactly(invited_group_user)
+ end
+
+ it 'returns members of groups invited to a descendant project' do
+ create(:project_group_link, project: subgroup_project, group: invited_group)
+
+ expect(finder.execute).to contain_exactly(invited_group_user)
+ end
+
+ it 'does not return members of groups invited to a project of an ancestor group' do
+ create(:project_group_link, project: parent_group_project, group: invited_group)
+
+ expect(finder.execute).to be_empty
+ end
+ end
+ end
+end
diff --git a/spec/finders/autocomplete/users_finder_spec.rb b/spec/finders/autocomplete/users_finder_spec.rb
index 57f804e471f..e4337e52306 100644
--- a/spec/finders/autocomplete/users_finder_spec.rb
+++ b/spec/finders/autocomplete/users_finder_spec.rb
@@ -88,13 +88,20 @@ RSpec.describe Autocomplete::UsersFinder do
let(:parent) { create(:group, :public, parent: grandparent) }
let(:child) { create(:group, :public, parent: parent) }
let(:group) { parent }
+ let(:child_project) { create(:project, group: group) }
let!(:grandparent_user) { create(:group_member, :developer, group: grandparent).user }
let!(:parent_user) { create(:group_member, :developer, group: parent).user }
let!(:child_user) { create(:group_member, :developer, group: child).user }
-
- it 'includes users from parent groups as well' do
- expect(subject).to match_array([grandparent_user, parent_user])
+ let!(:child_project_user) { create(:project_member, :developer, project: child_project).user }
+
+ it 'includes users from parent groups, descendant groups, and descendant projects' do
+ expect(subject).to contain_exactly(
+ grandparent_user,
+ parent_user,
+ child_user,
+ child_project_user
+ )
end
end
@@ -114,12 +121,6 @@ RSpec.describe Autocomplete::UsersFinder do
end
end
- context 'when filtered by skip_users' do
- let(:params) { { skip_users: [omniauth_user.id, current_user.id, blocked_user] } }
-
- it { is_expected.to match_array([user1, external_user]) }
- end
-
context 'when todos exist' do
let!(:pending_todo1) { create(:todo, user: current_user, author: user1, state: :pending) }
let!(:pending_todo2) { create(:todo, user: external_user, author: omniauth_user, state: :pending) }
diff --git a/spec/finders/deployments_finder_spec.rb b/spec/finders/deployments_finder_spec.rb
index 65003ea97ef..5a803ee2a0d 100644
--- a/spec/finders/deployments_finder_spec.rb
+++ b/spec/finders/deployments_finder_spec.rb
@@ -185,39 +185,6 @@ RSpec.describe DeploymentsFinder, feature_category: :deployment_management do
end
end
end
-
- context 'when remove_deployments_api_ref_sort is disabled' do
- before do
- stub_feature_flags(remove_deployments_api_ref_sort: false)
- end
-
- where(:order_by, :sort, :ordered_deployments) do
- 'created_at' | 'asc' | [:deployment_1, :deployment_2, :deployment_3]
- 'created_at' | 'desc' | [:deployment_3, :deployment_2, :deployment_1]
- 'id' | 'asc' | [:deployment_1, :deployment_2, :deployment_3]
- 'id' | 'desc' | [:deployment_3, :deployment_2, :deployment_1]
- 'iid' | 'asc' | [:deployment_1, :deployment_2, :deployment_3]
- 'iid' | 'desc' | [:deployment_3, :deployment_2, :deployment_1]
- 'ref' | 'asc' | [:deployment_2, :deployment_1, :deployment_3] # ref sorts when remove_deployments_api_ref_sort feature flag is disabled
- 'ref' | 'desc' | [:deployment_3, :deployment_1, :deployment_2] # ref sorts when remove_deployments_api_ref_sort feature flag is disabled
- 'updated_at' | 'asc' | [:deployment_2, :deployment_3, :deployment_1]
- 'updated_at' | 'desc' | [:deployment_1, :deployment_3, :deployment_2]
- 'finished_at' | 'asc' | described_class::InefficientQueryError
- 'finished_at' | 'desc' | described_class::InefficientQueryError
- 'invalid' | 'asc' | [:deployment_1, :deployment_2, :deployment_3]
- 'iid' | 'err' | [:deployment_1, :deployment_2, :deployment_3]
- end
-
- with_them do
- it 'returns the deployments ordered' do
- if ordered_deployments == described_class::InefficientQueryError
- expect { subject }.to raise_error(described_class::InefficientQueryError)
- else
- expect(subject).to eq(ordered_deployments.map { |name| public_send(name) })
- end
- end
- end
- end
end
describe 'transform `created_at` sorting to `id` sorting' do
diff --git a/spec/finders/group_members_finder_spec.rb b/spec/finders/group_members_finder_spec.rb
index 4ac4dc3ba37..18473a5e70b 100644
--- a/spec/finders/group_members_finder_spec.rb
+++ b/spec/finders/group_members_finder_spec.rb
@@ -51,55 +51,51 @@ RSpec.describe GroupMembersFinder, '#execute', feature_category: :groups_and_pro
user4_sub_group: create(:group_member, :developer, group: sub_group, user: user4, expires_at: 1.day.from_now),
user4_group: create(:group_member, :developer, group: group, user: user4, expires_at: 2.days.from_now),
user4_public_shared_group: create(:group_member, :developer, group: public_shared_group, user: user4),
- user4_private_shared_group: create(:group_member, :developer, group: private_shared_group, user: user4),
- user5_private_shared_group: create(:group_member, :developer, group: private_shared_group, user: user5_2fa)
+ user4_private_shared_group: create(:group_member, :developer, group: private_shared_group, user: user4),
+ user5_private_shared_group: create(:group_member, :developer, group: private_shared_group, user: user5_2fa)
}
end
- shared_examples 'member relations' do
- it 'raises an error if a non-supported relation type is used' do
- expect do
- described_class.new(group).execute(include_relations: [:direct, :invalid_relation_type])
- end.to raise_error(ArgumentError, "invalid_relation_type is not a valid relation type. Valid relation types are direct, inherited, descendants, shared_from_groups.")
- end
+ it 'raises an error if a non-supported relation type is used' do
+ expect do
+ described_class.new(group).execute(include_relations: [:direct, :invalid_relation_type])
+ end.to raise_error(ArgumentError, "invalid_relation_type is not a valid relation type. Valid relation types are direct, inherited, descendants, shared_from_groups.")
+ end
- using RSpec::Parameterized::TableSyntax
-
- where(:subject_relations, :subject_group, :expected_members) do
- [] | :group | []
- GroupMembersFinder::DEFAULT_RELATIONS | :group | [:user1_group, :user2_group, :user3_group, :user4_group]
- [:direct] | :group | [:user1_group, :user2_group, :user3_group, :user4_group]
- [:inherited] | :group | []
- [:descendants] | :group | [:user1_sub_sub_group, :user2_sub_group, :user3_sub_group, :user4_sub_group]
- [:shared_from_groups] | :group | [:user1_public_shared_group, :user2_public_shared_group, :user3_public_shared_group, :user4_public_shared_group]
- [:direct, :inherited, :descendants, :shared_from_groups] | :group | [:user1_sub_sub_group, :user2_group, :user3_sub_group, :user4_public_shared_group]
- [] | :sub_group | []
- GroupMembersFinder::DEFAULT_RELATIONS | :sub_group | [:user1_sub_group, :user2_group, :user3_sub_group, :user4_group]
- [:direct] | :sub_group | [:user1_sub_group, :user2_sub_group, :user3_sub_group, :user4_sub_group]
- [:inherited] | :sub_group | [:user1_group, :user2_group, :user3_group, :user4_group]
- [:descendants] | :sub_group | [:user1_sub_sub_group, :user2_sub_sub_group, :user3_sub_sub_group, :user4_sub_sub_group]
- [:shared_from_groups] | :sub_group | [:user1_public_shared_group, :user2_public_shared_group, :user3_public_shared_group, :user4_public_shared_group]
- [:direct, :inherited, :descendants, :shared_from_groups] | :sub_group | [:user1_sub_sub_group, :user2_group, :user3_sub_group, :user4_public_shared_group]
- [] | :sub_sub_group | []
- GroupMembersFinder::DEFAULT_RELATIONS | :sub_sub_group | [:user1_sub_sub_group, :user2_group, :user3_sub_group, :user4_group]
- [:direct] | :sub_sub_group | [:user1_sub_sub_group, :user2_sub_sub_group, :user3_sub_sub_group, :user4_sub_sub_group]
- [:inherited] | :sub_sub_group | [:user1_sub_group, :user2_group, :user3_sub_group, :user4_group]
- [:descendants] | :sub_sub_group | []
- [:shared_from_groups] | :sub_sub_group | [:user1_public_shared_group, :user2_public_shared_group, :user3_public_shared_group, :user4_public_shared_group]
- [:direct, :inherited, :descendants, :shared_from_groups] | :sub_sub_group | [:user1_sub_sub_group, :user2_group, :user3_sub_group, :user4_public_shared_group]
- end
+ using RSpec::Parameterized::TableSyntax
+
+ where(:subject_relations, :subject_group, :expected_members) do
+ [] | :group | []
+ GroupMembersFinder::DEFAULT_RELATIONS | :group | [:user1_group, :user2_group, :user3_group, :user4_group]
+ [:direct] | :group | [:user1_group, :user2_group, :user3_group, :user4_group]
+ [:inherited] | :group | []
+ [:descendants] | :group | [:user1_sub_sub_group, :user2_sub_group, :user3_sub_group, :user4_sub_group]
+ [:shared_from_groups] | :group | [:user1_public_shared_group, :user2_public_shared_group, :user3_public_shared_group, :user4_public_shared_group]
+ [:direct, :inherited, :descendants, :shared_from_groups] | :group | [:user1_sub_sub_group, :user2_group, :user3_sub_group, :user4_public_shared_group]
+ [] | :sub_group | []
+ GroupMembersFinder::DEFAULT_RELATIONS | :sub_group | [:user1_sub_group, :user2_group, :user3_sub_group, :user4_group]
+ [:direct] | :sub_group | [:user1_sub_group, :user2_sub_group, :user3_sub_group, :user4_sub_group]
+ [:inherited] | :sub_group | [:user1_group, :user2_group, :user3_group, :user4_group]
+ [:descendants] | :sub_group | [:user1_sub_sub_group, :user2_sub_sub_group, :user3_sub_sub_group, :user4_sub_sub_group]
+ [:shared_from_groups] | :sub_group | [:user1_public_shared_group, :user2_public_shared_group, :user3_public_shared_group, :user4_public_shared_group]
+ [:direct, :inherited, :descendants, :shared_from_groups] | :sub_group | [:user1_sub_sub_group, :user2_group, :user3_sub_group, :user4_public_shared_group]
+ [] | :sub_sub_group | []
+ GroupMembersFinder::DEFAULT_RELATIONS | :sub_sub_group | [:user1_sub_sub_group, :user2_group, :user3_sub_group, :user4_group]
+ [:direct] | :sub_sub_group | [:user1_sub_sub_group, :user2_sub_sub_group, :user3_sub_sub_group, :user4_sub_sub_group]
+ [:inherited] | :sub_sub_group | [:user1_sub_group, :user2_group, :user3_sub_group, :user4_group]
+ [:descendants] | :sub_sub_group | []
+ [:shared_from_groups] | :sub_sub_group | [:user1_public_shared_group, :user2_public_shared_group, :user3_public_shared_group, :user4_public_shared_group]
+ [:direct, :inherited, :descendants, :shared_from_groups] | :sub_sub_group | [:user1_sub_sub_group, :user2_group, :user3_sub_group, :user4_public_shared_group]
+ end
- with_them do
- it 'returns correct members' do
- result = described_class.new(groups[subject_group]).execute(include_relations: subject_relations)
+ with_them do
+ it 'returns correct members' do
+ result = described_class.new(groups[subject_group]).execute(include_relations: subject_relations)
- expect(result.to_a).to match_array(expected_members.map { |name| members[name] })
- end
+ expect(result.to_a).to match_array(expected_members.map { |name| members[name] })
end
end
- it_behaves_like 'member relations'
-
it 'returns the correct access level of the members shared through group sharing' do
shared_members_access = described_class
.new(groups[:group])
@@ -110,14 +106,6 @@ RSpec.describe GroupMembersFinder, '#execute', feature_category: :groups_and_pro
correct_access_levels = ([Gitlab::Access::DEVELOPER] * 3) << Gitlab::Access::REPORTER
expect(shared_members_access).to match_array(correct_access_levels)
end
-
- context 'when members_with_shared_group_access feature flag is disabled' do
- before do
- stub_feature_flags(members_with_shared_group_access: false)
- end
-
- it_behaves_like 'member relations'
- end
end
context 'search' do
diff --git a/spec/finders/group_projects_finder_spec.rb b/spec/finders/group_projects_finder_spec.rb
index 87e579dbeec..31c7e031e00 100644
--- a/spec/finders/group_projects_finder_spec.rb
+++ b/spec/finders/group_projects_finder_spec.rb
@@ -34,8 +34,8 @@ RSpec.describe GroupProjectsFinder do
end
end
- context "only owned" do
- let(:options) { { only_owned: true } }
+ context "when shared projects are excluded" do
+ let(:options) { { exclude_shared: true } }
context 'with subgroups projects' do
before do
@@ -146,8 +146,8 @@ RSpec.describe GroupProjectsFinder do
end
end
- context "only owned" do
- let(:options) { { only_owned: true } }
+ context "when shared projects are excluded" do
+ let(:options) { { exclude_shared: true } }
context "without external user" do
before do
@@ -247,8 +247,8 @@ RSpec.describe GroupProjectsFinder do
it { is_expected.to contain_exactly(shared_project_3, shared_project_2, shared_project_1) }
end
- context "only owned" do
- let(:options) { { only_owned: true } }
+ context "when shared projects are excluded" do
+ let(:options) { { exclude_shared: true } }
it { is_expected.to contain_exactly(private_project, public_project) }
end
@@ -265,8 +265,8 @@ RSpec.describe GroupProjectsFinder do
it { is_expected.to contain_exactly(shared_project_3, shared_project_1) }
end
- context "only owned" do
- let(:options) { { only_owned: true } }
+ context "when shared projects are excluded" do
+ let(:options) { { exclude_shared: true } }
it { is_expected.to contain_exactly(public_project) }
end
@@ -284,8 +284,8 @@ RSpec.describe GroupProjectsFinder do
it { is_expected.to match_array([shared_project_3, shared_project_1]) }
end
- context "only owned" do
- let(:options) { { only_owned: true } }
+ context "when shared projects are excluded" do
+ let(:options) { { exclude_shared: true } }
context 'with subgroups projects' do
before do
diff --git a/spec/finders/labels_finder_spec.rb b/spec/finders/labels_finder_spec.rb
index e344591dd5d..41224f0a5c5 100644
--- a/spec/finders/labels_finder_spec.rb
+++ b/spec/finders/labels_finder_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe LabelsFinder do
+RSpec.describe LabelsFinder, feature_category: :team_planning do
describe '#execute' do
let_it_be(:group_1) { create(:group) }
let_it_be(:group_2) { create(:group) }
@@ -20,10 +20,12 @@ RSpec.describe LabelsFinder do
let_it_be(:project_label_2) { create(:label, project: project_2, title: 'Label 2') }
let_it_be(:project_label_4) { create(:label, project: project_4, title: 'Label 4') }
let_it_be(:project_label_5) { create(:label, project: project_5, title: 'Label 5') }
+ let_it_be(:project_label_locked) { create(:label, project: project_1, title: 'Label Locked', lock_on_merge: true) }
let_it_be(:group_label_1) { create(:group_label, group: group_1, title: 'Label 1 (group)') }
let_it_be(:group_label_2) { create(:group_label, group: group_1, title: 'Group Label 2') }
let_it_be(:group_label_3) { create(:group_label, group: group_2, title: 'Group Label 3') }
+ let_it_be(:group_label_locked) { create(:group_label, group: group_1, title: 'Group Label Locked', lock_on_merge: true) }
let_it_be(:private_group_label_1) { create(:group_label, group: private_group_1, title: 'Private Group Label 1') }
let_it_be(:private_subgroup_label_1) { create(:group_label, group: private_subgroup_1, title: 'Private Sub Group Label 1') }
@@ -42,7 +44,7 @@ RSpec.describe LabelsFinder do
finder = described_class.new(user)
- expect(finder.execute).to match_array([group_label_2, group_label_3, project_label_1, group_label_1, project_label_2, project_label_4])
+ expect(finder.execute).to match_array([group_label_2, group_label_3, group_label_locked, project_label_1, group_label_1, project_label_2, project_label_4, project_label_locked])
end
it 'returns labels available if nil title is supplied' do
@@ -50,7 +52,7 @@ RSpec.describe LabelsFinder do
# params[:title] will return `nil` regardless whether it is specified
finder = described_class.new(user, title: nil)
- expect(finder.execute).to match_array([group_label_2, group_label_3, project_label_1, group_label_1, project_label_2, project_label_4])
+ expect(finder.execute).to match_array([group_label_2, group_label_3, group_label_locked, project_label_1, group_label_1, project_label_2, project_label_4, project_label_locked])
end
end
@@ -60,7 +62,7 @@ RSpec.describe LabelsFinder do
::Projects::UpdateService.new(project_1, user, archived: true).execute
finder = described_class.new(user, **group_params(group_1))
- expect(finder.execute).to match_array([group_label_2, group_label_1, project_label_5])
+ expect(finder.execute).to match_array([group_label_2, group_label_1, project_label_5, group_label_locked])
end
context 'when only_group_labels is true' do
@@ -69,7 +71,7 @@ RSpec.describe LabelsFinder do
finder = described_class.new(user, only_group_labels: true, **group_params(group_1))
- expect(finder.execute).to match_array([group_label_2, group_label_1])
+ expect(finder.execute).to match_array([group_label_2, group_label_1, group_label_locked])
end
end
@@ -249,7 +251,7 @@ RSpec.describe LabelsFinder do
it 'returns labels available for the project' do
finder = described_class.new(user, project_id: project_1.id)
- expect(finder.execute).to match_array([group_label_2, project_label_1, group_label_1])
+ expect(finder.execute).to match_array([group_label_2, group_label_locked, project_label_1, project_label_locked, group_label_1])
end
context 'as an administrator' do
@@ -330,6 +332,14 @@ RSpec.describe LabelsFinder do
end
end
+ context 'filter by locked labels' do
+ it 'returns labels that are locked' do
+ finder = described_class.new(user, locked_labels: true)
+
+ expect(finder.execute).to match_array([project_label_locked, group_label_locked])
+ end
+ end
+
context 'external authorization' do
it_behaves_like 'a finder with external authorization service' do
let!(:subject) { create(:label, project: project) }
diff --git a/spec/finders/metrics/dashboards/annotations_finder_spec.rb b/spec/finders/metrics/dashboards/annotations_finder_spec.rb
deleted file mode 100644
index 7c5932dde1e..00000000000
--- a/spec/finders/metrics/dashboards/annotations_finder_spec.rb
+++ /dev/null
@@ -1,108 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Metrics::Dashboards::AnnotationsFinder do
- describe '#execute' do
- subject(:annotations) { described_class.new(dashboard: dashboard, params: params).execute }
-
- let_it_be(:current_user) { create(:user) }
-
- let(:path) { 'config/prometheus/common_metrics.yml' }
- let(:params) { {} }
- let(:environment) { create(:environment) }
- let(:dashboard) { PerformanceMonitoring::PrometheusDashboard.new(path: path, environment: environment) }
-
- context 'there are no annotations records' do
- it 'returns empty array' do
- expect(annotations).to be_empty
- end
- end
-
- context 'with annotation records' do
- let!(:nine_minutes_old_annotation) { create(:metrics_dashboard_annotation, environment: environment, starting_at: 9.minutes.ago, dashboard_path: path) }
- let!(:fifteen_minutes_old_annotation) { create(:metrics_dashboard_annotation, environment: environment, starting_at: 15.minutes.ago, dashboard_path: path) }
- let!(:just_created_annotation) { create(:metrics_dashboard_annotation, environment: environment, dashboard_path: path) }
- let!(:annotation_for_different_env) { create(:metrics_dashboard_annotation, dashboard_path: path) }
- let!(:annotation_for_different_dashboard) { create(:metrics_dashboard_annotation, dashboard_path: '.gitlab/dashboards/test.yml') }
-
- it 'loads annotations' do
- expect(annotations).to match_array [fifteen_minutes_old_annotation, nine_minutes_old_annotation, just_created_annotation]
- end
-
- context 'when the from filter is present' do
- let(:params) do
- {
- from: 14.minutes.ago
- }
- end
-
- it 'loads only younger annotations' do
- expect(annotations).to match_array [nine_minutes_old_annotation, just_created_annotation]
- end
- end
-
- context 'when the to filter is present' do
- let(:params) do
- {
- to: 5.minutes.ago
- }
- end
-
- it 'loads only older annotations' do
- expect(annotations).to match_array [fifteen_minutes_old_annotation, nine_minutes_old_annotation]
- end
- end
-
- context 'when from and to filters are present' do
- context 'and to is bigger than from' do
- let(:params) do
- {
- from: 14.minutes.ago,
- to: 5.minutes.ago
- }
- end
-
- it 'loads only annotations assigned to this interval' do
- expect(annotations).to match_array [nine_minutes_old_annotation]
- end
- end
-
- context 'and from is bigger than to' do
- let(:params) do
- {
- to: 14.minutes.ago,
- from: 5.minutes.ago
- }
- end
-
- it 'ignores to parameter and returns annotations starting at from filter' do
- expect(annotations).to match_array [just_created_annotation]
- end
- end
-
- context 'when from or to filters are empty strings' do
- let(:params) do
- {
- from: '',
- to: ''
- }
- end
-
- it 'ignores this parameters' do
- expect(annotations).to match_array [fifteen_minutes_old_annotation, nine_minutes_old_annotation, just_created_annotation]
- end
- end
- end
-
- context 'dashboard environment is missing' do
- let(:dashboard) { PerformanceMonitoring::PrometheusDashboard.new(path: path, environment: nil) }
-
- it 'returns empty relation', :aggregate_failures do
- expect(annotations).to be_kind_of ::ActiveRecord::Relation
- expect(annotations).to be_empty
- end
- end
- end
- end
-end
diff --git a/spec/finders/metrics/users_starred_dashboards_finder_spec.rb b/spec/finders/metrics/users_starred_dashboards_finder_spec.rb
deleted file mode 100644
index 4136cf1123a..00000000000
--- a/spec/finders/metrics/users_starred_dashboards_finder_spec.rb
+++ /dev/null
@@ -1,56 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Metrics::UsersStarredDashboardsFinder do
- describe '#execute' do
- subject(:starred_dashboards) { described_class.new(user: user, project: project, params: params).execute }
-
- let_it_be(:user) { create(:user) }
-
- let(:project) { create(:project) }
- let(:dashboard_path) { 'config/prometheus/common_metrics.yml' }
- let(:params) { {} }
-
- context 'there are no starred dashboard records' do
- it 'returns empty array' do
- expect(starred_dashboards).to be_empty
- end
- end
-
- context 'with annotation records' do
- let!(:starred_dashboard_1) { create(:metrics_users_starred_dashboard, user: user, project: project) }
- let!(:starred_dashboard_2) { create(:metrics_users_starred_dashboard, user: user, project: project, dashboard_path: dashboard_path) }
- let!(:other_project_dashboard) { create(:metrics_users_starred_dashboard, user: user, dashboard_path: dashboard_path) }
- let!(:other_user_dashboard) { create(:metrics_users_starred_dashboard, project: project, dashboard_path: dashboard_path) }
-
- context 'user without read access to project' do
- it 'returns empty relation' do
- expect(starred_dashboards).to be_empty
- end
- end
-
- context 'user with read access to project' do
- before do
- project.add_reporter(user)
- end
-
- it 'loads starred dashboards' do
- expect(starred_dashboards).to contain_exactly starred_dashboard_1, starred_dashboard_2
- end
-
- context 'when the dashboard_path filter is present' do
- let(:params) do
- {
- dashboard_path: dashboard_path
- }
- end
-
- it 'loads filtered starred dashboards' do
- expect(starred_dashboards).to contain_exactly starred_dashboard_2
- end
- end
- end
- end
- end
-end
diff --git a/spec/finders/packages/go/version_finder_spec.rb b/spec/finders/packages/go/version_finder_spec.rb
index b67842d1e05..a4d40c0a59a 100644
--- a/spec/finders/packages/go/version_finder_spec.rb
+++ b/spec/finders/packages/go/version_finder_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe Packages::Go::VersionFinder do
let(:finder) { described_class.new mod }
- before :all do
+ before_all do
create :go_module_commit, :files, project: project, tag: 'v1.0.0', files: { 'README.md' => 'Hi' }
create :go_module_commit, :module, project: project, tag: 'v1.0.1'
create :go_module_commit, :package, project: project, tag: 'v1.0.2', path: 'pkg'
diff --git a/spec/finders/packages/nuget/package_finder_spec.rb b/spec/finders/packages/nuget/package_finder_spec.rb
index 6a6eebca778..792e543e424 100644
--- a/spec/finders/packages/nuget/package_finder_spec.rb
+++ b/spec/finders/packages/nuget/package_finder_spec.rb
@@ -1,13 +1,13 @@
# frozen_string_literal: true
require 'spec_helper'
-RSpec.describe Packages::Nuget::PackageFinder do
+RSpec.describe Packages::Nuget::PackageFinder, feature_category: :package_registry do
let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group) }
let_it_be(:subgroup) { create(:group, parent: group) }
let_it_be(:project) { create(:project, namespace: subgroup) }
let_it_be_with_refind(:package1) { create(:nuget_package, project: project) }
- let_it_be(:package2) { create(:nuget_package, name: package1.name, version: '2.0.0-ABC', project: project) }
+ let_it_be(:package2) { create(:nuget_package, :with_metadatum, name: package1.name, version: '2.0.0+ABC', project: project) }
let_it_be(:package3) { create(:nuget_package, name: 'Another.Dummy.Package', project: project) }
let_it_be(:other_package_1) { create(:nuget_package, name: package1.name, version: package1.version) }
let_it_be(:other_package_2) { create(:nuget_package, name: package1.name, version: package2.version) }
@@ -15,9 +15,18 @@ RSpec.describe Packages::Nuget::PackageFinder do
let(:package_name) { package1.name }
let(:package_version) { nil }
let(:limit) { 50 }
+ let(:client_version) { nil }
describe '#execute!' do
- subject { described_class.new(user, target, package_name: package_name, package_version: package_version, limit: limit).execute }
+ subject { described_class.new(user, target, package_name: package_name, package_version: package_version, limit: limit, client_version: client_version).execute }
+
+ shared_examples 'calling with_nuget_version_or_normalized_version scope' do |with_normalized:|
+ it 'calls with_nuget_version_or_normalized_version scope with the correct arguments' do
+ expect(::Packages::Package).to receive(:with_nuget_version_or_normalized_version).with(package_version, with_normalized: with_normalized).and_call_original
+
+ subject
+ end
+ end
shared_examples 'handling all the conditions' do
it { is_expected.to match_array([package1, package2]) }
@@ -43,13 +52,13 @@ RSpec.describe Packages::Nuget::PackageFinder do
end
context 'with valid version' do
- let(:package_version) { '2.0.0-ABC' }
+ let(:package_version) { '2.0.0+ABC' }
it { is_expected.to match_array([package2]) }
end
context 'with varying case version' do
- let(:package_version) { '2.0.0-abC' }
+ let(:package_version) { '2.0.0+abC' }
it { is_expected.to match_array([package2]) }
end
@@ -60,6 +69,16 @@ RSpec.describe Packages::Nuget::PackageFinder do
it { is_expected.to be_empty }
end
+ context 'with normalized version' do
+ let(:package_version) { '2.0.0' }
+
+ before do
+ package2.nuget_metadatum.update_column(:normalized_version, package_version)
+ end
+
+ it { is_expected.to match_array([package2]) }
+ end
+
context 'with limit hit' do
let_it_be(:package4) { create(:nuget_package, name: package1.name, project: project) }
let_it_be(:package5) { create(:nuget_package, name: package1.name, project: project) }
@@ -76,22 +95,34 @@ RSpec.describe Packages::Nuget::PackageFinder do
it { is_expected.to match_array([package1, package2]) }
end
- context 'with prefix wildcard' do
- let(:package_name) { "%#{package1.name[3..]}" }
+ context 'with client version less than 3' do
+ let(:package_version) { '2.0.0+abc' }
+ let(:client_version) { '2.8.6' }
- it { is_expected.to match_array([package1, package2]) }
+ it_behaves_like 'calling with_nuget_version_or_normalized_version scope', with_normalized: false
end
- context 'with suffix wildcard' do
- let(:package_name) { "#{package1.name[0..-3]}%" }
+ context 'with client version greater than or equal to 3' do
+ let(:package_version) { '2.0.0+abc' }
+ let(:client_version) { '3.5' }
- it { is_expected.to match_array([package1, package2]) }
+ it_behaves_like 'calling with_nuget_version_or_normalized_version scope', with_normalized: true
end
- context 'with surrounding wildcards' do
- let(:package_name) { "%#{package1.name[3..-3]}%" }
+ context 'with no client version' do
+ let(:package_version) { '2.0.0+abc' }
- it { is_expected.to match_array([package1, package2]) }
+ it_behaves_like 'calling with_nuget_version_or_normalized_version scope', with_normalized: true
+ end
+
+ context 'when nuget_normalized_version feature flag is disabled' do
+ let(:package_version) { '2.0.0+abc' }
+
+ before do
+ stub_feature_flags(nuget_normalized_version: false)
+ end
+
+ it_behaves_like 'calling with_nuget_version_or_normalized_version scope', with_normalized: false
end
end
@@ -130,5 +161,12 @@ RSpec.describe Packages::Nuget::PackageFinder do
it { is_expected.to be_empty }
end
+
+ context 'when package name is blank' do
+ let(:target) { project }
+ let(:package_name) { nil }
+
+ it { is_expected.to be_empty }
+ end
end
end
diff --git a/spec/finders/packages/pipelines_finder_spec.rb b/spec/finders/packages/pipelines_finder_spec.rb
new file mode 100644
index 00000000000..4fa6be5f0d1
--- /dev/null
+++ b/spec/finders/packages/pipelines_finder_spec.rb
@@ -0,0 +1,24 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ::Packages::PipelinesFinder, feature_category: :package_registry do
+ subject { described_class.new(pipeline_ids).execute }
+
+ let_it_be(:project) { create(:project) }
+ let_it_be(:p1) { create(:ci_pipeline, project: project) }
+ let_it_be(:p2) { create(:ci_pipeline, project: project) }
+ let_it_be(:p3) { create(:ci_pipeline, project: project) }
+
+ let(:pipeline_ids) { [p1.id, p3.id] }
+
+ describe '#execute' do
+ it 'returns only pipelines that match the given IDs, in descending order' do
+ expect(subject.map(&:id)).to eq([p3.id, p1.id])
+ end
+
+ it 'returns only selected columns' do
+ expect(subject.first.attributes.keys.map(&:to_sym)).to eq(::Packages::PipelinesFinder::COLUMNS)
+ end
+ end
+end
diff --git a/spec/finders/projects/ml/model_finder_spec.rb b/spec/finders/projects/ml/model_finder_spec.rb
index 386d690a8d2..48333ae49e5 100644
--- a/spec/finders/projects/ml/model_finder_spec.rb
+++ b/spec/finders/projects/ml/model_finder_spec.rb
@@ -3,38 +3,23 @@
require 'spec_helper'
RSpec.describe Projects::Ml::ModelFinder, feature_category: :mlops do
- let_it_be(:model1_a) { create(:ml_model_package) }
- let_it_be(:project) { model1_a.project }
- let_it_be(:model1_b) do
- create(:ml_model_package, name: model1_a.name, project: project)
- end
-
- let_it_be(:model2) do
- create(:ml_model_package, status: :pending_destruction, project: project)
- end
-
- let_it_be(:model3) { create(:ml_model_package) }
- let_it_be(:model4) { create(:generic_package, project: project) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:model1) { create(:ml_models, :with_versions, project: project) }
+ let_it_be(:model2) { create(:ml_models, :with_versions, project: project) }
+ let_it_be(:model3) { create(:ml_models) }
- subject { described_class.new(project).execute.to_a }
+ subject(:models) { described_class.new(project).execute.to_a }
- it 'returns the most recent version of a model' do
- is_expected.to include(model1_b)
+ it 'returns models for project' do
+ is_expected.to match_array([model1, model2])
end
- it 'does not return older versions of a model' do
- is_expected.not_to include(model1_a)
- end
-
- it 'does not return models pending destruction' do
- is_expected.not_to include(model2)
+ it 'including the latest version', :aggregate_failures do
+ expect(models[0].association_cached?(:latest_version)).to be(true)
+ expect(models[1].association_cached?(:latest_version)).to be(true)
end
it 'does not return models belonging to a different project' do
is_expected.not_to include(model3)
end
-
- it 'does not return packages that are not ml_model' do
- is_expected.not_to include(model4)
- end
end
diff --git a/spec/finders/repositories/tree_finder_spec.rb b/spec/finders/repositories/tree_finder_spec.rb
index 0d70d5f92d3..42b4047c4e8 100644
--- a/spec/finders/repositories/tree_finder_spec.rb
+++ b/spec/finders/repositories/tree_finder_spec.rb
@@ -26,10 +26,10 @@ RSpec.describe Repositories::TreeFinder do
end
it "accepts a gitaly_pagination argument" do
- expect(repository).to receive(:tree).with(anything, anything, recursive: nil, pagination_params: { limit: 20, page_token: nil }).and_call_original
+ expect(repository).to receive(:tree).with(anything, anything, recursive: nil, rescue_not_found: nil, pagination_params: { limit: 20, page_token: nil }).and_call_original
expect(tree_finder.execute(gitaly_pagination: true)).to be_an(Array)
- expect(repository).to receive(:tree).with(anything, anything, recursive: nil).and_call_original
+ expect(repository).to receive(:tree).with(anything, anything, recursive: nil, rescue_not_found: nil).and_call_original
expect(tree_finder.execute(gitaly_pagination: false)).to be_an(Array)
end
diff --git a/spec/finders/snippets_finder_spec.rb b/spec/finders/snippets_finder_spec.rb
index 9f4b7612be5..a5cd90b444e 100644
--- a/spec/finders/snippets_finder_spec.rb
+++ b/spec/finders/snippets_finder_spec.rb
@@ -106,12 +106,32 @@ RSpec.describe SnippetsFinder do
expect(snippets).to contain_exactly(public_personal_snippet)
end
- it 'returns all snippets for an admin in admin mode', :enable_admin_mode do
+ it 'returns all personal snippets for an admin in admin mode', :enable_admin_mode do
snippets = described_class.new(admin, author: user).execute
expect(snippets).to contain_exactly(private_personal_snippet, internal_personal_snippet, public_personal_snippet)
end
+ it 'returns all snippets (everything) for an admin when all_available="true" passed in' do
+ allow(admin).to receive(:can_read_all_resources?).and_return(true)
+
+ snippets = described_class.new(admin, author: user, all_available: true).execute
+
+ expect(snippets).to contain_exactly(
+ private_project_snippet,
+ internal_project_snippet,
+ public_project_snippet,
+ private_personal_snippet,
+ internal_personal_snippet,
+ public_personal_snippet)
+ end
+
+ it 'returns all snippets for non-admin user, even when all_available="true" passed in' do
+ snippets = described_class.new(user, author: user, all_available: true).execute
+
+ expect(snippets).to contain_exactly(private_personal_snippet, internal_personal_snippet, public_personal_snippet)
+ end
+
it 'returns all public and internal snippets for an admin without admin mode' do
snippets = described_class.new(admin, author: user).execute
diff --git a/spec/finders/work_items/namespace_work_items_finder_spec.rb b/spec/finders/work_items/namespace_work_items_finder_spec.rb
new file mode 100644
index 00000000000..71fde0546ee
--- /dev/null
+++ b/spec/finders/work_items/namespace_work_items_finder_spec.rb
@@ -0,0 +1,128 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe WorkItems::NamespaceWorkItemsFinder, feature_category: :team_planning do
+ include AdminModeHelper
+
+ describe '#execute' do
+ let_it_be(:group) { create(:group, :public) }
+ let_it_be(:sub_group) { create(:group, :private, parent: group) }
+ let_it_be(:project) { create(:project, :repository, :public, group: group) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:reporter) { create(:user).tap { |user| group.add_reporter(user) } }
+ let_it_be(:guest) { create(:user).tap { |user| group.add_guest(user) } }
+ let_it_be(:guest_author) { create(:user).tap { |user| group.add_guest(user) } }
+ let_it_be(:banned_user) { create(:banned_user) }
+
+ let_it_be(:project_work_item) { create(:work_item, project: project) }
+ let_it_be(:sub_group_work_item) do
+ create(:work_item, namespace: sub_group, author: reporter)
+ end
+
+ let_it_be(:group_work_item) do
+ create(:work_item, namespace: group, author: reporter)
+ end
+
+ let_it_be(:group_confidential_work_item, reload: true) do
+ create(:work_item, :confidential, namespace: group, author: guest_author)
+ end
+
+ let_it_be(:sub_group_confidential_work_item, reload: true) do
+ create(:work_item, :confidential, namespace: sub_group, author: guest_author)
+ end
+
+ let_it_be(:hidden_work_item) do
+ create(:work_item, :confidential, namespace: group, author: banned_user.user)
+ end
+
+ let_it_be(:other_work_item) { create(:work_item) }
+ let(:finder_params) { {} }
+ let(:current_user) { user }
+ let(:namespace) { nil }
+
+ subject do
+ described_class.new(current_user, finder_params.merge(
+ namespace_id: namespace
+ )).execute
+ end
+
+ context 'when no parent is provided' do
+ it { is_expected.to be_empty }
+ end
+
+ context 'when the namespace is private' do
+ let(:namespace) { sub_group }
+
+ context 'when the user cannot read the namespace' do
+ it { is_expected.to be_empty }
+ end
+
+ context 'when the user can not see confidential work_items' do
+ let(:current_user) { guest }
+
+ it { is_expected.to contain_exactly(sub_group_work_item) }
+
+ context 'when the user is the author of the work item' do
+ let(:current_user) { guest_author }
+
+ it { is_expected.to contain_exactly(sub_group_work_item, sub_group_confidential_work_item) }
+ end
+
+ context 'when the user is assigned to a confidential work item' do
+ before do
+ sub_group_confidential_work_item.update!(assignees: [current_user])
+ end
+
+ it { is_expected.to contain_exactly(sub_group_work_item, sub_group_confidential_work_item) }
+ end
+ end
+
+ context 'when the user can see confidential work_items' do
+ let(:current_user) { reporter }
+
+ it { is_expected.to contain_exactly(sub_group_work_item, sub_group_confidential_work_item) }
+ end
+ end
+
+ context 'when the namespace is public' do
+ let(:namespace) { group }
+
+ context 'when user is admin' do
+ let(:current_user) { create(:user, :admin).tap { |u| enable_admin_mode!(u) } }
+
+ it { is_expected.to contain_exactly(group_work_item, group_confidential_work_item, hidden_work_item) }
+ end
+
+ context 'with an anonymous user' do
+ let(:current_user) { nil }
+
+ it { is_expected.to contain_exactly(group_work_item) }
+ end
+
+ context 'when the user can not see confidential work_items' do
+ it { is_expected.to contain_exactly(group_work_item) }
+
+ context 'when the user is the author of the work item' do
+ let(:current_user) { guest_author }
+
+ it { is_expected.to contain_exactly(group_work_item, group_confidential_work_item) }
+ end
+
+ context 'when the user is assigned to a confidential work item' do
+ before do
+ group_confidential_work_item.update!(assignees: [current_user])
+ end
+
+ it { is_expected.to contain_exactly(group_work_item, group_confidential_work_item) }
+ end
+ end
+
+ context 'when the user can see confidential work_items' do
+ let(:current_user) { reporter }
+
+ it { is_expected.to contain_exactly(group_work_item, group_confidential_work_item) }
+ end
+ end
+ end
+end
diff --git a/spec/fixtures/api/schemas/entities/discussion.json b/spec/fixtures/api/schemas/entities/discussion.json
index f91571fbc48..625f9b86187 100644
--- a/spec/fixtures/api/schemas/entities/discussion.json
+++ b/spec/fixtures/api/schemas/entities/discussion.json
@@ -118,6 +118,9 @@
"null"
]
},
+ "resolved_by_push": {
+ "type": "boolean"
+ },
"resolved_at": {
"type": [
"string",
diff --git a/spec/fixtures/api/schemas/public_api/v4/packages/nuget/package_metadata.json b/spec/fixtures/api/schemas/public_api/v4/packages/nuget/package_metadata.json
index 9c80f1621ad..c27ea2840a7 100644
--- a/spec/fixtures/api/schemas/public_api/v4/packages/nuget/package_metadata.json
+++ b/spec/fixtures/api/schemas/public_api/v4/packages/nuget/package_metadata.json
@@ -20,6 +20,7 @@
"dependencyGroups",
"id",
"packageContent",
+ "description",
"summary",
"version"
],
@@ -36,6 +37,9 @@
"packageContent": {
"type": "string"
},
+ "description": {
+ "type": "string"
+ },
"summary": {
"type": "string"
},
diff --git a/spec/fixtures/api/schemas/public_api/v4/packages/nuget/packages_metadata.json b/spec/fixtures/api/schemas/public_api/v4/packages/nuget/packages_metadata.json
index 94b5ad48a1e..d17a3176433 100644
--- a/spec/fixtures/api/schemas/public_api/v4/packages/nuget/packages_metadata.json
+++ b/spec/fixtures/api/schemas/public_api/v4/packages/nuget/packages_metadata.json
@@ -49,6 +49,7 @@
"dependencyGroups",
"id",
"packageContent",
+ "description",
"summary",
"version"
],
@@ -65,6 +66,9 @@
"packageContent": {
"type": "string"
},
+ "description": {
+ "type": "string"
+ },
"summary": {
"type": "string"
},
diff --git a/spec/fixtures/api/schemas/public_api/v4/packages/nuget/search.json b/spec/fixtures/api/schemas/public_api/v4/packages/nuget/search.json
index 41ad7379d73..74f2913e5b9 100644
--- a/spec/fixtures/api/schemas/public_api/v4/packages/nuget/search.json
+++ b/spec/fixtures/api/schemas/public_api/v4/packages/nuget/search.json
@@ -16,6 +16,7 @@
"@type",
"authors",
"id",
+ "description",
"summary",
"title",
"totalDownloads",
@@ -32,6 +33,9 @@
"id": {
"type": "string"
},
+ "description": {
+ "type": "string"
+ },
"summary": {
"type": "string"
},
diff --git a/spec/fixtures/gl-annotations.json.gz b/spec/fixtures/gl-annotations.json.gz
new file mode 100644
index 00000000000..493b51f2243
--- /dev/null
+++ b/spec/fixtures/gl-annotations.json.gz
Binary files differ
diff --git a/spec/fixtures/lib/backup/design_repo.refs b/spec/fixtures/lib/backup/design_repo.refs
new file mode 100644
index 00000000000..0df0c6916cb
--- /dev/null
+++ b/spec/fixtures/lib/backup/design_repo.refs
@@ -0,0 +1,2 @@
+c3cd4d7bd73a51a0f22045c3a4c871c435dc959d HEAD
+c3cd4d7bd73a51a0f22045c3a4c871c435dc959d refs/heads/master
diff --git a/spec/fixtures/lib/backup/personal_snippet_repo.refs b/spec/fixtures/lib/backup/personal_snippet_repo.refs
new file mode 100644
index 00000000000..ece8aa8f40f
--- /dev/null
+++ b/spec/fixtures/lib/backup/personal_snippet_repo.refs
@@ -0,0 +1,2 @@
+3b3c067a3bc1d1b695b51e2be30c0f8cf698a06e HEAD
+3b3c067a3bc1d1b695b51e2be30c0f8cf698a06e refs/heads/master
diff --git a/spec/fixtures/lib/backup/project_repo.refs b/spec/fixtures/lib/backup/project_repo.refs
new file mode 100644
index 00000000000..a075e52264c
--- /dev/null
+++ b/spec/fixtures/lib/backup/project_repo.refs
@@ -0,0 +1,2 @@
+393a7d860a5a4c3cc736d7eb00604e3472bb95ec HEAD
+393a7d860a5a4c3cc736d7eb00604e3472bb95ec refs/heads/master
diff --git a/spec/fixtures/lib/backup/project_snippet_repo.refs b/spec/fixtures/lib/backup/project_snippet_repo.refs
new file mode 100644
index 00000000000..5a2c600a876
--- /dev/null
+++ b/spec/fixtures/lib/backup/project_snippet_repo.refs
@@ -0,0 +1,2 @@
+6e44ba56a4748be361a841e759c20e421a1651a1 HEAD
+6e44ba56a4748be361a841e759c20e421a1651a1 refs/heads/master
diff --git a/spec/fixtures/lib/backup/wiki_repo.refs b/spec/fixtures/lib/backup/wiki_repo.refs
new file mode 100644
index 00000000000..dab2adaf520
--- /dev/null
+++ b/spec/fixtures/lib/backup/wiki_repo.refs
@@ -0,0 +1,2 @@
+c74b9948d0088d703ee1fafeddd9ed9add2901ea HEAD
+c74b9948d0088d703ee1fafeddd9ed9add2901ea refs/heads/master
diff --git a/spec/fixtures/lib/generators/gitlab/usage_metric_definition_generator/sample_metric.yml b/spec/fixtures/lib/generators/gitlab/usage_metric_definition_generator/sample_metric.yml
index 42f9cc31c3a..29dd2ce0e84 100644
--- a/spec/fixtures/lib/generators/gitlab/usage_metric_definition_generator/sample_metric.yml
+++ b/spec/fixtures/lib/generators/gitlab/usage_metric_definition_generator/sample_metric.yml
@@ -11,13 +11,13 @@ milestone: "13.9"
introduced_by_url:
time_frame: 7d
data_source:
-data_category: operational
+data_category: optional
instrumentation_class: Count
performance_indicator_type: []
distribution:
- ce
-# Add here corresponding tiers
-# tier:
-# - free
-# - premium
-# - ultimate
+- ee
+tier:
+- free
+- premium
+- ultimate
diff --git a/spec/fixtures/lib/generators/gitlab/usage_metric_definition_generator/sample_metric_with_ee.yml b/spec/fixtures/lib/generators/gitlab/usage_metric_definition_generator/sample_metric_with_ee.yml
index e123056d771..cf2a10a46d1 100644
--- a/spec/fixtures/lib/generators/gitlab/usage_metric_definition_generator/sample_metric_with_ee.yml
+++ b/spec/fixtures/lib/generators/gitlab/usage_metric_definition_generator/sample_metric_with_ee.yml
@@ -1,6 +1,5 @@
---
key_path: counts_weekly.test_metric
-name: test metric name
description:
product_section:
product_stage:
diff --git a/spec/fixtures/lib/generators/gitlab/usage_metric_definition_generator/sample_metric_with_name_suggestions.yml b/spec/fixtures/lib/generators/gitlab/usage_metric_definition_generator/sample_metric_with_name_suggestions.yml
deleted file mode 100644
index 87c4e68f19e..00000000000
--- a/spec/fixtures/lib/generators/gitlab/usage_metric_definition_generator/sample_metric_with_name_suggestions.yml
+++ /dev/null
@@ -1,24 +0,0 @@
----
-# See Usage Ping metrics dictionary docs https://docs.gitlab.com/ee/development/usage_ping/metrics_dictionary.html
-key_path: counts_weekly.test_metric
-name: test metric name
-description:
-product_section:
-product_stage:
-product_group:
-value_type: number
-status: active
-milestone: "13.9"
-introduced_by_url:
-time_frame: 7d
-data_source:
-data_category: optional
-instrumentation_class: Count
-performance_indicator_type: []
-distribution:
-- ce
-- ee
-tier:
-- free
-- premium
-- ultimate
diff --git a/spec/fixtures/migrations/db/migrate/database_migration.txt b/spec/fixtures/migrations/db/migrate/database_migration.txt
new file mode 100644
index 00000000000..594d06f8431
--- /dev/null
+++ b/spec/fixtures/migrations/db/migrate/database_migration.txt
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+class DatabaseMigration < Gitlab::Database::Migration[2.0]
+ def up
+ add_column :dast_site_profiles, :scan_file_path, :text
+ end
+
+ def down
+ remove_column :dast_site_profiles, :scan_file_path, :text
+ end
+end
diff --git a/spec/fixtures/migrations/db/migrate/database_migration_two.txt b/spec/fixtures/migrations/db/migrate/database_migration_two.txt
new file mode 100644
index 00000000000..236529b4b2d
--- /dev/null
+++ b/spec/fixtures/migrations/db/migrate/database_migration_two.txt
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+class Gitlab::Database::DatabaseMigration < Gitlab::Database::Migration[2.0]
+ def up
+ add_column :dast_site_profiles, :scan_file_path, :text
+ end
+
+ def down
+ remove_column :dast_site_profiles, :scan_file_path, :text
+ end
+end
diff --git a/spec/fixtures/migrations/db/post_migrate/database_clash_migration.txt b/spec/fixtures/migrations/db/post_migrate/database_clash_migration.txt
new file mode 100644
index 00000000000..a936dafd69f
--- /dev/null
+++ b/spec/fixtures/migrations/db/post_migrate/database_clash_migration.txt
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+class ::ClashMigration < Gitlab::Database::Migration[2.0]
+ def up
+ add_column :dast_site_profiles, :scan_file_path, :text
+ end
+
+ def down
+ remove_column :dast_site_profiles, :scan_file_path, :text
+ end
+end
diff --git a/spec/fixtures/migrations/db/post_migrate/database_clash_migration_two.txt b/spec/fixtures/migrations/db/post_migrate/database_clash_migration_two.txt
new file mode 100644
index 00000000000..6af29a498bd
--- /dev/null
+++ b/spec/fixtures/migrations/db/post_migrate/database_clash_migration_two.txt
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+class Gitlab::ClashMigrationTwo < Gitlab::Database::Migration[2.0]
+ def up
+ add_column :dast_site_profiles, :scan_file_path, :text
+ end
+
+ def down
+ remove_column :dast_site_profiles, :scan_file_path, :text
+ end
+end
diff --git a/spec/fixtures/migrations/elasticsearch/elasticsearch_clash_migration.txt b/spec/fixtures/migrations/elasticsearch/elasticsearch_clash_migration.txt
new file mode 100644
index 00000000000..e723d9f44e7
--- /dev/null
+++ b/spec/fixtures/migrations/elasticsearch/elasticsearch_clash_migration.txt
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+class ClashMigration < Elastic::Migration
+ include Elastic::MigrationCreateIndex
+
+ retry_on_failure
+
+ def document_type
+ :epic
+ end
+
+ def target_class
+ Epic
+ end
+end
diff --git a/spec/fixtures/migrations/elasticsearch/elasticsearch_clash_migration_two.txt b/spec/fixtures/migrations/elasticsearch/elasticsearch_clash_migration_two.txt
new file mode 100644
index 00000000000..6af29a498bd
--- /dev/null
+++ b/spec/fixtures/migrations/elasticsearch/elasticsearch_clash_migration_two.txt
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+class Gitlab::ClashMigrationTwo < Gitlab::Database::Migration[2.0]
+ def up
+ add_column :dast_site_profiles, :scan_file_path, :text
+ end
+
+ def down
+ remove_column :dast_site_profiles, :scan_file_path, :text
+ end
+end
diff --git a/spec/fixtures/migrations/elasticsearch/elasticsearch_migration.txt b/spec/fixtures/migrations/elasticsearch/elasticsearch_migration.txt
new file mode 100644
index 00000000000..122d3ef6d2b
--- /dev/null
+++ b/spec/fixtures/migrations/elasticsearch/elasticsearch_migration.txt
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+
+class ElasticsearchMigration < Elastic::Migration
+ include Elastic::MigrationUpdateMappingsHelper
+
+ private
+
+ def index_name
+ Project.__elasticsearch__.index_name
+ end
+
+ def new_mappings
+ {
+ readme_content: {
+ type: 'text'
+ },
+ ci_catalog: {
+ type: 'boolean'
+ }
+ }
+ end
+end
diff --git a/spec/frontend/__helpers__/shared_test_setup.js b/spec/frontend/__helpers__/shared_test_setup.js
index 0217835b2a3..53a43626691 100644
--- a/spec/frontend/__helpers__/shared_test_setup.js
+++ b/spec/frontend/__helpers__/shared_test_setup.js
@@ -35,7 +35,7 @@ Vue.config.productionTip = false;
Vue.use(Translate);
-const JQUERY_MATCHERS_TO_EXCLUDE = ['toHaveLength', 'toExist'];
+const JQUERY_MATCHERS_TO_EXCLUDE = ['toBeEmpty', 'toHaveLength', 'toExist'];
// custom-jquery-matchers was written for an old Jest version, we need to make it compatible
Object.entries(jqueryMatchers).forEach(([matcherName, matcherFactory]) => {
diff --git a/spec/frontend/__mocks__/jed/index.js b/spec/frontend/__mocks__/jed/index.js
new file mode 100644
index 00000000000..fa2be5aa6e2
--- /dev/null
+++ b/spec/frontend/__mocks__/jed/index.js
@@ -0,0 +1,17 @@
+/**
+ * ## Why are we mocking Jed?
+ *
+ * https://gitlab.com/gitlab-org/gitlab/-/issues/390934#note_1494028934
+ *
+ * It's possible that some environments run a specific locale. If the unit
+ * tests run under this condition, hardcoded values will fail. To make
+ * tests more deterministic across environments, let's skip loading translations
+ * in FE unit tests.
+ */
+const Jed = jest.requireActual('jed');
+
+export default class MockJed extends Jed {
+ constructor() {
+ super({});
+ }
+}
diff --git a/spec/frontend/access_tokens/components/access_token_table_app_spec.js b/spec/frontend/access_tokens/components/access_token_table_app_spec.js
index 2fa14810578..5236f38dc35 100644
--- a/spec/frontend/access_tokens/components/access_token_table_app_spec.js
+++ b/spec/frontend/access_tokens/components/access_token_table_app_spec.js
@@ -91,24 +91,6 @@ describe('~/access_tokens/components/access_token_table_app', () => {
expect(cells.at(0).text()).toBe(noTokensMessage);
});
- it('should show a title indicating the amount of tokens', () => {
- createComponent();
-
- expect(wrapper.find('h5').text()).toBe(
- sprintf(__('Active %{accessTokenTypePlural} (%{totalAccessTokens})'), {
- accessTokenTypePlural,
- totalAccessTokens: defaultActiveAccessTokens.length,
- }),
- );
- });
-
- it('should render information section', () => {
- const info = 'This is my information';
- createComponent({ information: info });
-
- expect(wrapper.findByTestId('information-section').text()).toBe(info);
- });
-
describe('table headers', () => {
it('should include `Action` column', () => {
createComponent();
diff --git a/spec/frontend/access_tokens/components/new_access_token_app_spec.js b/spec/frontend/access_tokens/components/new_access_token_app_spec.js
index fb92cc34ce9..d51ac638f0e 100644
--- a/spec/frontend/access_tokens/components/new_access_token_app_spec.js
+++ b/spec/frontend/access_tokens/components/new_access_token_app_spec.js
@@ -23,6 +23,8 @@ describe('~/access_tokens/components/new_access_token_app', () => {
};
const findButtonEl = () => document.querySelector('[type=submit]');
+ const findGlAlertError = () => wrapper.findByTestId('error-message');
+ const findGlAlertSuccess = () => wrapper.findByTestId('success-message');
const triggerSuccess = async (newToken = 'new token') => {
wrapper
@@ -57,7 +59,7 @@ describe('~/access_tokens/components/new_access_token_app', () => {
it('should render nothing', () => {
expect(wrapper.findComponent(InputCopyToggleVisibility).exists()).toBe(false);
- expect(wrapper.findComponent(GlAlert).exists()).toBe(false);
+ expect(findGlAlertError().exists()).toBe(false);
});
describe('on success', () => {
@@ -65,10 +67,12 @@ describe('~/access_tokens/components/new_access_token_app', () => {
const newToken = '12345';
await triggerSuccess(newToken);
- expect(wrapper.findComponent(GlAlert).exists()).toBe(false);
+ expect(findGlAlertError().exists()).toBe(false);
+ expect(findGlAlertSuccess().exists()).toBe(true);
const InputCopyToggleVisibilityComponent = wrapper.findComponent(InputCopyToggleVisibility);
expect(InputCopyToggleVisibilityComponent.props('value')).toBe(newToken);
+ expect(InputCopyToggleVisibilityComponent.props('readonly')).toBe(true);
expect(InputCopyToggleVisibilityComponent.props('copyButtonTitle')).toBe(
sprintf(__('Copy %{accessTokenType}'), { accessTokenType }),
);
@@ -81,7 +85,7 @@ describe('~/access_tokens/components/new_access_token_app', () => {
const newToken = '12345';
await triggerSuccess(newToken);
- expect(wrapper.findComponent(GlAlert).exists()).toBe(false);
+ expect(findGlAlertError().exists()).toBe(false);
const inputAttributes = wrapper
.findByLabelText(sprintf(__('Your new %{accessTokenType}'), { accessTokenType }))
@@ -134,7 +138,7 @@ describe('~/access_tokens/components/new_access_token_app', () => {
expect(wrapper.findComponent(InputCopyToggleVisibility).exists()).toBe(false);
- let GlAlertComponent = wrapper.findComponent(GlAlert);
+ let GlAlertComponent = findGlAlertError();
expect(GlAlertComponent.props('title')).toBe(__('The form contains the following errors:'));
expect(GlAlertComponent.props('variant')).toBe('danger');
let itemEls = wrapper.findAll('li');
diff --git a/spec/frontend/access_tokens/components/token_spec.js b/spec/frontend/access_tokens/components/token_spec.js
index f62f7d72e3b..ad92366c3b6 100644
--- a/spec/frontend/access_tokens/components/token_spec.js
+++ b/spec/frontend/access_tokens/components/token_spec.js
@@ -50,6 +50,7 @@ describe('Token', () => {
formInputGroupProps: {
id: defaultPropsData.inputId,
},
+ readonly: true,
value: defaultPropsData.token,
copyButtonTitle: defaultPropsData.copyButtonTitle,
});
diff --git a/spec/frontend/access_tokens/index_spec.js b/spec/frontend/access_tokens/index_spec.js
index c1158e0d124..7d4d73b00b2 100644
--- a/spec/frontend/access_tokens/index_spec.js
+++ b/spec/frontend/access_tokens/index_spec.js
@@ -50,7 +50,6 @@ describe('access tokens', () => {
initialActiveAccessTokens,
// Default values
- information: undefined,
noActiveTokensMessage: sprintf(__('This user has no active %{accessTokenTypePlural}.'), {
accessTokenTypePlural,
}),
@@ -59,14 +58,12 @@ describe('access tokens', () => {
});
it('mounts the component and provides all values', () => {
- const information = 'Additional information';
const noActiveTokensMessage = 'This group has no active access tokens.';
setHTMLFixture(
`<div id="js-access-token-table-app"
data-access-token-type="${accessTokenType}"
data-access-token-type-plural="${accessTokenTypePlural}"
data-initial-active-access-tokens=${JSON.stringify(initialActiveAccessTokens)}
- data-information="${information}"
data-no-active-tokens-message="${noActiveTokensMessage}"
data-show-role
>
@@ -82,7 +79,6 @@ describe('access tokens', () => {
accessTokenType,
accessTokenTypePlural,
initialActiveAccessTokens,
- information,
noActiveTokensMessage,
showRole: true,
});
diff --git a/spec/frontend/add_context_commits_modal/components/add_context_commits_modal_spec.js b/spec/frontend/add_context_commits_modal/components/add_context_commits_modal_spec.js
index fa051f7a43a..68225468941 100644
--- a/spec/frontend/add_context_commits_modal/components/add_context_commits_modal_spec.js
+++ b/spec/frontend/add_context_commits_modal/components/add_context_commits_modal_spec.js
@@ -1,6 +1,7 @@
import { GlModal, GlFilteredSearch } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import getDiffWithCommit from 'test_fixtures/merge_request_diffs/with_commit.json';
import AddReviewItemsModal from '~/add_context_commits_modal/components/add_context_commits_modal_wrapper.vue';
diff --git a/spec/frontend/admin/abuse_report/components/report_actions_spec.js b/spec/frontend/admin/abuse_report/components/report_actions_spec.js
index ec7dd31a046..6dd6d0e55c5 100644
--- a/spec/frontend/admin/abuse_report/components/report_actions_spec.js
+++ b/spec/frontend/admin/abuse_report/components/report_actions_spec.js
@@ -154,7 +154,7 @@ describe('ReportActions', () => {
beforeEach(async () => {
jest.spyOn(axios, 'put');
- axiosMock.onPut(report.updatePath).replyOnce(responseStatus, responseData);
+ axiosMock.onPut(report.moderateUserPath).replyOnce(responseStatus, responseData);
selectAction(params.user_action);
setCloseReport(params.close);
@@ -169,7 +169,7 @@ describe('ReportActions', () => {
});
it('does a put call with the right data', () => {
- expect(axios.put).toHaveBeenCalledWith(report.updatePath, params);
+ expect(axios.put).toHaveBeenCalledWith(report.moderateUserPath, params);
});
it('closes the drawer', () => {
@@ -191,4 +191,31 @@ describe('ReportActions', () => {
);
});
});
+
+ describe('when moderateUserPath is not present', () => {
+ it('sends the request to updatePath', async () => {
+ jest.spyOn(axios, 'put');
+ axiosMock.onPut(report.updatePath).replyOnce(HTTP_STATUS_OK, {});
+
+ const reportWithoutModerateUserPath = { ...report };
+ delete reportWithoutModerateUserPath.moderateUserPath;
+
+ createComponent({ report: reportWithoutModerateUserPath });
+
+ clickActionsButton();
+
+ await nextTick();
+
+ selectAction(params.user_action);
+ selectReason(params.reason);
+
+ await nextTick();
+
+ submitForm();
+
+ await waitForPromises();
+
+ expect(axios.put).toHaveBeenCalledWith(report.updatePath, expect.any(Object));
+ });
+ });
});
diff --git a/spec/frontend/admin/abuse_report/mock_data.js b/spec/frontend/admin/abuse_report/mock_data.js
index 8c0ae223c87..8ff0c7d507a 100644
--- a/spec/frontend/admin/abuse_report/mock_data.js
+++ b/spec/frontend/admin/abuse_report/mock_data.js
@@ -51,5 +51,6 @@ export const mockAbuseReport = {
screenshot:
'/uploads/-/system/abuse_report/screenshot/27/Screenshot_2023-03-30_at_16.56.37.png',
updatePath: '/admin/abuse_reports/27',
+ moderateUserPath: '/admin/abuse_reports/27/moderate_user',
},
};
diff --git a/spec/frontend/admin/abuse_reports/components/abuse_report_row_spec.js b/spec/frontend/admin/abuse_reports/components/abuse_report_row_spec.js
index 03bf510f3ad..8482faccca0 100644
--- a/spec/frontend/admin/abuse_reports/components/abuse_report_row_spec.js
+++ b/spec/frontend/admin/abuse_reports/components/abuse_report_row_spec.js
@@ -94,4 +94,19 @@ describe('AbuseReportRow', () => {
it('renders abuse category', () => {
expect(findAbuseCategory().exists()).toBe(true);
});
+
+ describe('aggregated report', () => {
+ const mockAggregatedAbuseReport = mockAbuseReports[1];
+ const { reportedUser, category, count } = mockAggregatedAbuseReport;
+
+ beforeEach(() => {
+ createComponent({ report: mockAggregatedAbuseReport });
+ });
+
+ it('displays title with number of aggregated reports', () => {
+ expect(findAbuseReportTitle().text()).toMatchInterpolatedText(
+ `${reportedUser.name} reported for ${category} by ${count} users`,
+ );
+ });
+ });
});
diff --git a/spec/frontend/admin/abuse_reports/components/abuse_reports_filtered_search_bar_spec.js b/spec/frontend/admin/abuse_reports/components/abuse_reports_filtered_search_bar_spec.js
index 1f3f2caa995..dda9263d094 100644
--- a/spec/frontend/admin/abuse_reports/components/abuse_reports_filtered_search_bar_spec.js
+++ b/spec/frontend/admin/abuse_reports/components/abuse_reports_filtered_search_bar_spec.js
@@ -8,8 +8,10 @@ import {
FILTERED_SEARCH_TOKEN_REPORTER,
FILTERED_SEARCH_TOKEN_STATUS,
FILTERED_SEARCH_TOKEN_CATEGORY,
- DEFAULT_SORT,
- SORT_OPTIONS,
+ DEFAULT_SORT_STATUS_OPEN,
+ DEFAULT_SORT_STATUS_CLOSED,
+ SORT_OPTIONS_STATUS_OPEN,
+ SORT_OPTIONS_STATUS_CLOSED,
} from '~/admin/abuse_reports/constants';
import FilteredSearchBar from '~/vue_shared/components/filtered_search_bar/filtered_search_bar_root.vue';
import { FILTERED_SEARCH_TERM } from '~/vue_shared/components/filtered_search_bar/constants';
@@ -53,8 +55,8 @@ describe('AbuseReportsFilteredSearchBar', () => {
recentSearchesStorageKey: 'abuse_reports',
searchInputPlaceholder: 'Filter reports',
tokens: [...FILTERED_SEARCH_TOKENS, categoryToken],
- initialSortBy: DEFAULT_SORT,
- sortOptions: SORT_OPTIONS,
+ initialSortBy: DEFAULT_SORT_STATUS_OPEN,
+ sortOptions: SORT_OPTIONS_STATUS_OPEN,
});
});
@@ -88,6 +90,10 @@ describe('AbuseReportsFilteredSearchBar', () => {
expect(findFilteredSearchBar().props('initialFilterValue')).toMatchObject([
{
+ type: FILTERED_SEARCH_TOKEN_STATUS.type,
+ value: { data: 'closed', operator: '=' },
+ },
+ {
type: FILTERED_SEARCH_TOKEN_USER.type,
value: { data: 'mr_abuser', operator: '=' },
},
@@ -95,16 +101,12 @@ describe('AbuseReportsFilteredSearchBar', () => {
type: FILTERED_SEARCH_TOKEN_REPORTER.type,
value: { data: 'ms_nitch', operator: '=' },
},
- {
- type: FILTERED_SEARCH_TOKEN_STATUS.type,
- value: { data: 'closed', operator: '=' },
- },
]);
});
describe('initial sort', () => {
it.each(
- SORT_OPTIONS.flatMap(({ sortDirection: { descending, ascending } }) => [
+ SORT_OPTIONS_STATUS_OPEN.flatMap(({ sortDirection: { descending, ascending } }) => [
descending,
ascending,
]),
@@ -115,16 +117,20 @@ describe('AbuseReportsFilteredSearchBar', () => {
createComponent();
- expect(findFilteredSearchBar().props('initialSortBy')).toEqual(sortBy);
+ if (sortBy) {
+ expect(findFilteredSearchBar().props('initialSortBy')).toEqual(sortBy);
+ } else {
+ expect(findFilteredSearchBar().props('initialSortBy')).toEqual(DEFAULT_SORT_STATUS_OPEN);
+ }
},
);
- it(`uses ${DEFAULT_SORT} as initialSortBy when sort query param is invalid`, () => {
+ it(`uses ${DEFAULT_SORT_STATUS_OPEN} as initialSortBy when sort query param is invalid`, () => {
setWindowLocation(`?sort=unknown`);
createComponent();
- expect(findFilteredSearchBar().props('initialSortBy')).toEqual(DEFAULT_SORT);
+ expect(findFilteredSearchBar().props('initialSortBy')).toEqual(DEFAULT_SORT_STATUS_OPEN);
});
});
@@ -161,26 +167,39 @@ describe('AbuseReportsFilteredSearchBar', () => {
(filterToken) => {
createComponentAndFilter([filterToken]);
const { type, value } = filterToken;
- expect(redirectTo).toHaveBeenCalledWith(`https://localhost/?${type}=${value.data}`); // eslint-disable-line import/no-deprecated
+
+ // eslint-disable-next-line import/no-deprecated
+ expect(redirectTo).toHaveBeenCalledWith(
+ `https://localhost/?${type}=${value.data}&sort=${DEFAULT_SORT_STATUS_OPEN}`,
+ );
},
);
it('ignores search query param', () => {
const searchFilterToken = { type: FILTERED_SEARCH_TERM, value: { data: 'ignored' } };
createComponentAndFilter([USER_FILTER_TOKEN, searchFilterToken]);
- expect(redirectTo).toHaveBeenCalledWith('https://localhost/?user=mr_abuser'); // eslint-disable-line import/no-deprecated
+
+ // eslint-disable-next-line import/no-deprecated
+ expect(redirectTo).toHaveBeenCalledWith(
+ `https://localhost/?user=mr_abuser&sort=${DEFAULT_SORT_STATUS_OPEN}`,
+ );
});
it('redirects without page query param', () => {
createComponentAndFilter([USER_FILTER_TOKEN], '?page=2');
- expect(redirectTo).toHaveBeenCalledWith('https://localhost/?user=mr_abuser'); // eslint-disable-line import/no-deprecated
+
+ // eslint-disable-next-line import/no-deprecated
+ expect(redirectTo).toHaveBeenCalledWith(
+ `https://localhost/?user=mr_abuser&sort=${DEFAULT_SORT_STATUS_OPEN}`,
+ );
});
it('redirects with existing sort query param', () => {
- createComponentAndFilter([USER_FILTER_TOKEN], `?sort=${DEFAULT_SORT}`);
+ createComponentAndFilter([USER_FILTER_TOKEN], `?sort=${DEFAULT_SORT_STATUS_OPEN}`);
+
// eslint-disable-next-line import/no-deprecated
expect(redirectTo).toHaveBeenCalledWith(
- `https://localhost/?user=mr_abuser&sort=${DEFAULT_SORT}`,
+ `https://localhost/?user=mr_abuser&sort=${DEFAULT_SORT_STATUS_OPEN}`,
);
});
});
@@ -222,4 +241,42 @@ describe('AbuseReportsFilteredSearchBar', () => {
);
});
});
+
+ describe('sortOptions', () => {
+ describe('when status is closed', () => {
+ beforeEach(() => {
+ setWindowLocation('?status=closed');
+
+ createComponent();
+ });
+
+ it('only shows created_at & updated_at as sorting options', () => {
+ expect(findFilteredSearchBar().props('sortOptions')).toMatchObject(
+ SORT_OPTIONS_STATUS_CLOSED,
+ );
+ });
+
+ it('initially sorts by created_at_desc', () => {
+ expect(findFilteredSearchBar().props('initialSortBy')).toEqual(DEFAULT_SORT_STATUS_CLOSED);
+ });
+ });
+
+ describe('when status is open', () => {
+ beforeEach(() => {
+ setWindowLocation('?status=open');
+
+ createComponent();
+ });
+
+ it('shows number of reports as an additional sorting option', () => {
+ expect(findFilteredSearchBar().props('sortOptions')).toMatchObject(
+ SORT_OPTIONS_STATUS_OPEN,
+ );
+ });
+
+ it('initially sorts by number_of_reports_desc', () => {
+ expect(findFilteredSearchBar().props('initialSortBy')).toEqual(DEFAULT_SORT_STATUS_OPEN);
+ });
+ });
+ });
});
diff --git a/spec/frontend/admin/abuse_reports/mock_data.js b/spec/frontend/admin/abuse_reports/mock_data.js
index 1ea6ea7d131..33a28a21cca 100644
--- a/spec/frontend/admin/abuse_reports/mock_data.js
+++ b/spec/frontend/admin/abuse_reports/mock_data.js
@@ -6,6 +6,7 @@ export const mockAbuseReports = [
reporter: { name: 'Ms. Admin' },
reportedUser: { name: 'Mr. Abuser' },
reportPath: '/admin/abuse_reports/1',
+ count: 1,
},
{
category: 'phishing',
@@ -14,5 +15,6 @@ export const mockAbuseReports = [
reporter: { name: 'Ms. Reporter' },
reportedUser: { name: 'Mr. Phisher' },
reportPath: '/admin/abuse_reports/2',
+ count: 2,
},
];
diff --git a/spec/frontend/admin/analytics/devops_score/components/devops_score_spec.js b/spec/frontend/admin/analytics/devops_score/components/devops_score_spec.js
index de56e843eb9..019027be6c6 100644
--- a/spec/frontend/admin/analytics/devops_score/components/devops_score_spec.js
+++ b/spec/frontend/admin/analytics/devops_score/components/devops_score_spec.js
@@ -52,7 +52,7 @@ describe('DevopsScore', () => {
it('contains a link to the feature documentation', () => {
expect(findDocsLink().exists()).toBe(true);
expect(findDocsLink().attributes('href')).toBe(
- '/help/user/admin_area/analytics/dev_ops_reports',
+ '/help/administration/analytics/dev_ops_reports',
);
});
});
diff --git a/spec/frontend/admin/applications/components/delete_application_spec.js b/spec/frontend/admin/applications/components/delete_application_spec.js
index e0282b8c149..1e76ddf3d57 100644
--- a/spec/frontend/admin/applications/components/delete_application_spec.js
+++ b/spec/frontend/admin/applications/components/delete_application_spec.js
@@ -26,7 +26,7 @@ describe('DeleteApplication', () => {
};
const findModal = () => wrapper.findComponent(GlModal);
- const findForm = () => wrapper.find('form');
+ const findForm = () => wrapper.findComponent({ ref: 'deleteForm' });
beforeEach(() => {
setHTMLFixture(`
@@ -62,7 +62,7 @@ describe('DeleteApplication', () => {
let formSubmitSpy;
beforeEach(() => {
- formSubmitSpy = jest.spyOn(wrapper.vm.$refs.deleteForm, 'submit');
+ formSubmitSpy = jest.spyOn(findForm().element, 'submit');
findModal().vm.$emit('primary');
});
diff --git a/spec/frontend/admin/broadcast_messages/components/message_form_spec.js b/spec/frontend/admin/broadcast_messages/components/message_form_spec.js
index b937a58a742..7a7295ff3a1 100644
--- a/spec/frontend/admin/broadcast_messages/components/message_form_spec.js
+++ b/spec/frontend/admin/broadcast_messages/components/message_form_spec.js
@@ -44,6 +44,7 @@ describe('MessageForm', () => {
const findShowInCli = () => wrapper.findComponent('[data-testid=show-in-cli-checkbox]');
const findTargetSelect = () => wrapper.findComponent('[data-testid=target-select]');
const findTargetPath = () => wrapper.findComponent('[data-testid=target-path-input]');
+ const emitSubmitForm = () => findForm().vm.$emit('submit', { preventDefault: () => {} });
function createComponent({ broadcastMessage = {} } = {}) {
wrapper = mount(MessageForm, {
@@ -79,7 +80,7 @@ describe('MessageForm', () => {
it('renders the placeholder text when the user message is blank', () => {
createComponent({ broadcastMessage: { message: ' ' } });
- expect(wrapper.text()).toContain(wrapper.vm.$options.i18n.messagePlaceholder);
+ expect(wrapper.text()).toContain(MessageForm.i18n.messagePlaceholder);
});
});
@@ -129,13 +130,18 @@ describe('MessageForm', () => {
it('triggers displaying target path and target roles when selecting different options', async () => {
createComponent();
+ const targetPath = findTargetPath();
const options = findTargetSelect().findAll('option');
await options.at(1).setSelected();
- expect(findTargetPath().isVisible()).toBe(true);
+ expect(targetPath.isVisible()).toBe(true);
+ expect(targetPath.text()).toContain(MessageForm.i18n.targetPathDescription);
+ expect(targetPath.text()).not.toContain(MessageForm.i18n.targetPathWithRolesReminder);
expect(findTargetRoles().isVisible()).toBe(false);
await options.at(2).setSelected();
- expect(findTargetPath().isVisible()).toBe(true);
+ expect(targetPath.isVisible()).toBe(true);
+ expect(targetPath.text()).toContain(MessageForm.i18n.targetPathDescription);
+ expect(targetPath.text()).toContain(MessageForm.i18n.targetPathWithRolesReminder);
expect(findTargetRoles().isVisible()).toBe(true);
});
@@ -157,12 +163,12 @@ describe('MessageForm', () => {
describe('form submit button', () => {
it('renders the "add" text when the message is not persisted', () => {
createComponent({ broadcastMessage: { id: undefined } });
- expect(wrapper.text()).toContain(wrapper.vm.$options.i18n.add);
+ expect(wrapper.text()).toContain(MessageForm.i18n.add);
});
it('renders the "update" text when the message is persisted', () => {
createComponent({ broadcastMessage: { id: 100 } });
- expect(wrapper.text()).toContain(wrapper.vm.$options.i18n.update);
+ expect(wrapper.text()).toContain(MessageForm.i18n.update);
});
it('is disabled when the user message is blank', () => {
@@ -196,56 +202,86 @@ describe('MessageForm', () => {
ends_at: defaultProps.endsAt,
};
- it('sends a create request for a new message form', async () => {
- createComponent({ broadcastMessage: { id: undefined } });
- findForm().vm.$emit('submit', { preventDefault: () => {} });
- await waitForPromises();
+ describe('when creating a new message', () => {
+ beforeEach(() => {
+ createComponent({ broadcastMessage: { id: undefined } });
+ });
+
+ it('sends a create request for a new message form', async () => {
+ emitSubmitForm();
+ await waitForPromises();
- expect(axiosMock.history.post).toHaveLength(2);
- expect(axiosMock.history.post[1]).toMatchObject({
- url: messagesPath,
- data: JSON.stringify(defaultPayload),
+ expect(axiosMock.history.post).toHaveLength(2);
+ expect(axiosMock.history.post[1]).toMatchObject({
+ url: messagesPath,
+ data: JSON.stringify(defaultPayload),
+ });
});
- });
- it('shows an error alert if the create request fails', async () => {
- createComponent({ broadcastMessage: { id: undefined } });
- axiosMock.onPost(messagesPath).replyOnce(HTTP_STATUS_BAD_REQUEST);
- findForm().vm.$emit('submit', { preventDefault: () => {} });
- await waitForPromises();
-
- expect(createAlert).toHaveBeenCalledWith(
- expect.objectContaining({
- message: wrapper.vm.$options.i18n.addError,
- }),
- );
- });
-
- it('sends an update request for a persisted message form', async () => {
- const id = 1337;
- createComponent({ broadcastMessage: { id } });
- findForm().vm.$emit('submit', { preventDefault: () => {} });
- await waitForPromises();
-
- expect(axiosMock.history.patch).toHaveLength(1);
- expect(axiosMock.history.patch[0]).toMatchObject({
- url: `${messagesPath}/${id}`,
- data: JSON.stringify(defaultPayload),
+ it('shows an error alert if the create request fails', async () => {
+ axiosMock.onPost(messagesPath).replyOnce(HTTP_STATUS_BAD_REQUEST);
+ emitSubmitForm();
+ await waitForPromises();
+
+ expect(createAlert).toHaveBeenCalledWith(
+ expect.objectContaining({
+ message: MessageForm.i18n.addError,
+ }),
+ );
});
});
- it('shows an error alert if the update request fails', async () => {
- const id = 1337;
- createComponent({ broadcastMessage: { id } });
- axiosMock.onPost(`${messagesPath}/${id}`).replyOnce(HTTP_STATUS_BAD_REQUEST);
- findForm().vm.$emit('submit', { preventDefault: () => {} });
- await waitForPromises();
+ describe('when editing an existing message', () => {
+ const mockId = 1337;
+
+ beforeEach(() => {
+ createComponent({ broadcastMessage: { id: mockId } });
+ });
+
+ it('sends an update request for a persisted message form', async () => {
+ emitSubmitForm();
+ await waitForPromises();
- expect(createAlert).toHaveBeenCalledWith(
- expect.objectContaining({
- message: wrapper.vm.$options.i18n.updateError,
- }),
- );
+ expect(axiosMock.history.patch).toHaveLength(1);
+ expect(axiosMock.history.patch[0]).toMatchObject({
+ url: `${messagesPath}/${mockId}`,
+ data: JSON.stringify(defaultPayload),
+ });
+ });
+
+ it('shows an error alert if the update request fails', async () => {
+ axiosMock.onPost(`${messagesPath}/${mockId}`).replyOnce(HTTP_STATUS_BAD_REQUEST);
+ emitSubmitForm();
+ await waitForPromises();
+
+ expect(createAlert).toHaveBeenCalledWith(
+ expect.objectContaining({
+ message: MessageForm.i18n.updateError,
+ }),
+ );
+ });
+
+ it('does not submit if target roles is required, and later does submit when validation is corrected', async () => {
+ const options = findTargetSelect().findAll('option');
+ await options.at(2).setSelected();
+
+ emitSubmitForm();
+ await waitForPromises();
+
+ expect(axiosMock.history.patch).toHaveLength(0);
+ expect(wrapper.text()).toContain(MessageForm.i18n.targetRolesValidationMsg);
+
+ await findTargetRoles().find('input[type="checkbox"]').setChecked();
+
+ emitSubmitForm();
+ await waitForPromises();
+
+ expect(axiosMock.history.patch).toHaveLength(1);
+ expect(axiosMock.history.patch[0]).toMatchObject({
+ url: `${messagesPath}/${mockId}`,
+ data: JSON.stringify({ ...defaultPayload, target_access_levels: [10] }),
+ });
+ });
});
});
});
diff --git a/spec/frontend/admin/broadcast_messages/components/messages_table_spec.js b/spec/frontend/admin/broadcast_messages/components/messages_table_spec.js
index 6d536b2d0e4..c92820735ff 100644
--- a/spec/frontend/admin/broadcast_messages/components/messages_table_spec.js
+++ b/spec/frontend/admin/broadcast_messages/components/messages_table_spec.js
@@ -1,3 +1,4 @@
+import { GlModal } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
import MessagesTable from '~/admin/broadcast_messages/components/messages_table.vue';
import { MOCK_MESSAGES } from '../mock_data';
@@ -8,6 +9,7 @@ describe('MessagesTable', () => {
const findRows = () => wrapper.findAll('[data-testid="message-row"]');
const findTargetRoles = () => wrapper.find('[data-testid="target-roles-th"]');
const findDeleteButton = (id) => wrapper.find(`[data-testid="delete-message-${id}"]`);
+ const findModal = () => wrapper.findComponent(GlModal);
function createComponent(props = {}) {
wrapper = mount(MessagesTable, {
@@ -34,6 +36,8 @@ describe('MessagesTable', () => {
const { id } = MOCK_MESSAGES[0];
createComponent();
findDeleteButton(id).element.click();
+ findModal().vm.$emit('primary');
+
expect(wrapper.emitted('delete-message')).toHaveLength(1);
expect(wrapper.emitted('delete-message')[0]).toEqual([id]);
});
diff --git a/spec/frontend/admin/deploy_keys/components/table_spec.js b/spec/frontend/admin/deploy_keys/components/table_spec.js
index a05654a1d25..07d0f045509 100644
--- a/spec/frontend/admin/deploy_keys/components/table_spec.js
+++ b/spec/frontend/admin/deploy_keys/components/table_spec.js
@@ -1,5 +1,5 @@
import { merge } from 'lodash';
-import { GlLoadingIcon, GlEmptyState, GlPagination, GlModal } from '@gitlab/ui';
+import { GlCard, GlLoadingIcon, GlEmptyState, GlPagination, GlModal } from '@gitlab/ui';
import { nextTick } from 'vue';
import responseBody from 'test_fixtures/api/deploy_keys/index.json';
@@ -45,6 +45,8 @@ describe('DeployKeysTable', () => {
});
};
+ const findCard = () => wrapper.findComponent(GlCard);
+ const findCardTitle = () => findCard().find('.gl-new-card-title-wrapper');
const findEditButton = (index) =>
wrapper.findAllByLabelText(DeployKeysTable.i18n.edit, { selector: 'a' }).at(index);
const findRemoveButton = (index) =>
@@ -60,7 +62,7 @@ describe('DeployKeysTable', () => {
expect(wrapper.findByText(expectedDeployKey.title).exists()).toBe(true);
expect(
- wrapper.findByText(expectedDeployKey.fingerprint_sha256, { selector: 'span' }).exists(),
+ wrapper.findByText(expectedDeployKey.fingerprint_sha256, { selector: 'div' }).exists(),
).toBe(true);
expect(timeAgoTooltip.exists()).toBe(true);
expect(timeAgoTooltip.props('time')).toBe(expectedDeployKey.created_at);
@@ -70,7 +72,7 @@ describe('DeployKeysTable', () => {
};
const expectDeployKeyWithFingerprintIsRendered = (expectedDeployKey, expectedRowIndex) => {
- expect(wrapper.findByText(expectedDeployKey.fingerprint, { selector: 'span' }).exists()).toBe(
+ expect(wrapper.findByText(expectedDeployKey.fingerprint, { selector: 'div' }).exists()).toBe(
true,
);
expectDeployKeyIsRendered(expectedDeployKey, expectedRowIndex);
@@ -85,8 +87,6 @@ describe('DeployKeysTable', () => {
svgPath: defaultProvide.emptyStateSvgPath,
title: DeployKeysTable.i18n.emptyStateTitle,
description: DeployKeysTable.i18n.emptyStateDescription,
- primaryButtonText: DeployKeysTable.i18n.newDeployKeyButtonText,
- primaryButtonLink: defaultProvide.createPath,
});
});
};
@@ -131,6 +131,16 @@ describe('DeployKeysTable', () => {
createComponent();
});
+ it('renders card with the deploy keys', () => {
+ expect(findCard().exists()).toBe(true);
+ });
+
+ it('shows the correct number of deploy keys', () => {
+ expect(findCardTitle().text()).toMatchInterpolatedText(
+ `Public deploy keys ${responseBody.length}`,
+ );
+ });
+
it('renders deploy keys in table', () => {
expectDeployKeyWithFingerprintIsRendered(deployKey, 0);
expectDeployKeyWithFingerprintIsRendered(deployKey2, 1);
diff --git a/spec/frontend/admin/statistics_panel/components/app_spec.js b/spec/frontend/admin/statistics_panel/components/app_spec.js
index 60e46cddd7e..febc049b812 100644
--- a/spec/frontend/admin/statistics_panel/components/app_spec.js
+++ b/spec/frontend/admin/statistics_panel/components/app_spec.js
@@ -2,6 +2,7 @@ import { GlLoadingIcon } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import AxiosMockAdapter from 'axios-mock-adapter';
import Vue from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import StatisticsPanelApp from '~/admin/statistics_panel/components/app.vue';
import statisticsLabels from '~/admin/statistics_panel/constants';
diff --git a/spec/frontend/alerts_settings/components/alerts_integrations_list_spec.js b/spec/frontend/alerts_settings/components/alerts_integrations_list_spec.js
index 76d0c12e434..0453bf0b0f8 100644
--- a/spec/frontend/alerts_settings/components/alerts_integrations_list_spec.js
+++ b/spec/frontend/alerts_settings/components/alerts_integrations_list_spec.js
@@ -47,7 +47,6 @@ describe('AlertIntegrationsList', () => {
});
const findTableComponent = () => wrapper.findComponent(GlTable);
- const findTableComponentRows = () => wrapper.findComponent(GlTable).findAll('table tbody tr');
const finsStatusCell = () => wrapper.findAll('[data-testid="integration-activated-status"]');
it('renders a table', () => {
@@ -63,11 +62,6 @@ describe('AlertIntegrationsList', () => {
expect(findTableComponent().findAllComponents(GlButton).length).toBe(4);
});
- it('renders an highlighted row when a current integration is selected to edit', () => {
- mountComponent({ data: { currentIntegration: { id: '1' } } });
- expect(findTableComponentRows().at(0).classes()).toContain('gl-bg-blue-50');
- });
-
describe('integration status', () => {
it('enabled', () => {
const cell = finsStatusCell().at(0);
diff --git a/spec/frontend/alerts_settings/components/alerts_settings_wrapper_spec.js b/spec/frontend/alerts_settings/components/alerts_settings_wrapper_spec.js
index 8c5df06042c..a16a03a2fc5 100644
--- a/spec/frontend/alerts_settings/components/alerts_settings_wrapper_spec.js
+++ b/spec/frontend/alerts_settings/components/alerts_settings_wrapper_spec.js
@@ -1,7 +1,7 @@
import { GlLoadingIcon, GlAlert } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
import AxiosMockAdapter from 'axios-mock-adapter';
-import Vue from 'vue';
+import Vue, { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
import createHttpIntegrationMutation from 'ee_else_ce/alerts_settings/graphql/mutations/create_http_integration.mutation.graphql';
import updateHttpIntegrationMutation from 'ee_else_ce/alerts_settings/graphql/mutations/update_http_integration.mutation.graphql';
@@ -16,8 +16,6 @@ import createPrometheusIntegrationMutation from '~/alerts_settings/graphql/mutat
import destroyHttpIntegrationMutation from '~/alerts_settings/graphql/mutations/destroy_http_integration.mutation.graphql';
import resetHttpTokenMutation from '~/alerts_settings/graphql/mutations/reset_http_token.mutation.graphql';
import resetPrometheusTokenMutation from '~/alerts_settings/graphql/mutations/reset_prometheus_token.mutation.graphql';
-import updateCurrentHttpIntegrationMutation from '~/alerts_settings/graphql/mutations/update_current_http_integration.mutation.graphql';
-import updateCurrentPrometheusIntegrationMutation from '~/alerts_settings/graphql/mutations/update_current_prometheus_integration.mutation.graphql';
import updatePrometheusIntegrationMutation from '~/alerts_settings/graphql/mutations/update_prometheus_integration.mutation.graphql';
import getHttpIntegrationQuery from '~/alerts_settings/graphql/queries/get_http_integration.query.graphql';
import getIntegrationsQuery from '~/alerts_settings/graphql/queries/get_integrations.query.graphql';
@@ -45,6 +43,7 @@ import {
destroyIntegrationResponse,
integrationToDestroy,
destroyIntegrationResponseWithErrors,
+ prometheusIntegrationsList,
} from './mocks/apollo_mock';
import mockIntegrations from './mocks/integrations.json';
@@ -55,15 +54,9 @@ describe('AlertsSettingsWrapper', () => {
let fakeApollo;
let destroyIntegrationHandler;
- const httpMappingData = {
- payloadExample: '{"test: : "field"}',
- payloadAttributeMappings: [],
- payloadAlertFields: [],
- };
-
- const findLoader = () => wrapper.findComponent(IntegrationsList).findComponent(GlLoadingIcon);
const findIntegrationsList = () => wrapper.findComponent(IntegrationsList);
- const findIntegrations = () => wrapper.findComponent(IntegrationsList).findAll('table tbody tr');
+ const findLoader = () => findIntegrationsList().findComponent(GlLoadingIcon);
+ const findIntegrations = () => findIntegrationsList().findAll('table tbody tr');
const findAddIntegrationBtn = () => wrapper.findByTestId('add-integration-btn');
const findAlertsSettingsForm = () => wrapper.findComponent(AlertsSettingsForm);
const findAlert = () => wrapper.findComponent(GlAlert);
@@ -74,87 +67,160 @@ describe('AlertsSettingsWrapper', () => {
.vm.$emit('delete-integration', { id: integrationToDestroy.id });
}
- const createComponent = ({ data = {}, provide = {}, loading = false } = {}) => {
- wrapper = extendedWrapper(
- mount(AlertsSettingsWrapper, {
- data() {
- return { ...data };
- },
- provide: {
- ...provide,
- alertSettings: {
- templates: [],
- },
- service: {},
- },
- mocks: {
- $apollo: {
- mutate: jest.fn(),
- addSmartQuery: jest.fn((_, options) => {
- options.result.call(wrapper.vm);
- }),
- queries: {
- integrations: {
- loading,
- },
+ const integrationResponse = ({
+ mutation,
+ id = getIntegrationsQueryResponse.data.project.id,
+ extraAttributes = {},
+ }) => ({
+ data: { [mutation]: { integration: { id }, ...extraAttributes } },
+ });
+
+ const createIntegrationResponse = integrationResponse({
+ mutation: 'httpIntegrationCreate',
+ extraAttributes: {
+ errors: [],
+ },
+ });
+
+ const updateIntegrationResponse = integrationResponse({
+ mutation: 'updateHttpIntegrationMutation',
+ });
+
+ const resetHttpTokenResponse = integrationResponse({
+ mutation: 'resetHttpTokenMutation',
+ });
+
+ const createPrometheousIntegrationResponse = integrationResponse({
+ mutation: 'createPrometheusIntegrationMutation',
+ id: '2',
+ });
+
+ const resetPrometheousResponse = integrationResponse({
+ mutation: 'resetPrometheusTokenMutation',
+ });
+
+ const currentHttpIntegrationResponse = {
+ data: {
+ project: {
+ id: '1',
+ alertManagementHttpIntegrations: {
+ nodes: [
+ {
+ __typename: 'AlertManagementIntegration',
+ id: 'gid://gitlab/AlertManagement::HttpIntegration/7',
+ type: 'HTTP',
+ active: true,
+ name: 'test',
+ url:
+ 'http://192.168.1.152:3000/root/autodevops/alerts/notify/test/eddd36969b2d3d6a.json',
+ token: '7eb24af194116411ec8d66b58c6b0d2e',
},
- },
+ ],
},
- }),
- );
+ },
+ },
};
+ const currentIntegration =
+ getIntegrationsQueryResponse.data.project.alertManagementIntegrations.nodes[0];
+
+ const createIntegrationHandler = jest.fn().mockResolvedValue(createIntegrationResponse);
+
+ const updateIntegrationHandler = jest.fn().mockResolvedValue(updateIntegrationResponse);
+
+ const resetTokenHandler = jest.fn().mockResolvedValue(resetHttpTokenResponse);
+
+ const createPrometheousIntegrationHandler = jest
+ .fn()
+ .mockResolvedValue(createPrometheousIntegrationResponse);
+
+ const updatePrometheousIntegrationHandler = jest
+ .fn()
+ .mockResolvedValue(createPrometheousIntegrationResponse);
+
+ const resetPrometheousIntegrationHandler = jest.fn().mockResolvedValue(resetPrometheousResponse);
+
+ const currentHttpIntegrationQueryHandler = jest
+ .fn()
+ .mockResolvedValue(currentHttpIntegrationResponse);
+
+ const mockUpdateCurrentHttpIntegrationMutationHandler = jest.fn();
+
function createComponentWithApollo({
destroyHandler = jest.fn().mockResolvedValue(destroyIntegrationResponse),
+ provide = {},
+ currentIntegrationQueryHandler = jest.fn().mockResolvedValue(currentIntegration),
+ getIntegrationQueryHandler = jest.fn().mockResolvedValue(getIntegrationsQueryResponse),
+ createIntegrationResponseHandler = createIntegrationHandler,
+ updateIntegrationResponseHandler = updateIntegrationHandler,
+ resetTokenResponseHandler = resetTokenHandler,
+ createPrometheousResponseHandler = createPrometheousIntegrationHandler,
+ updatePrometheousResponseIntegrationHandler = updatePrometheousIntegrationHandler,
+ resetPrometheousResponseIntegrationHandler = resetPrometheousIntegrationHandler,
+ currentHttpIntegrationQueryResponseHandler = currentHttpIntegrationQueryHandler,
} = {}) {
Vue.use(VueApollo);
destroyIntegrationHandler = destroyHandler;
const requestHandlers = [
- [getIntegrationsQuery, jest.fn().mockResolvedValue(getIntegrationsQueryResponse)],
+ [getIntegrationsQuery, getIntegrationQueryHandler],
[destroyHttpIntegrationMutation, destroyIntegrationHandler],
+ [createHttpIntegrationMutation, createIntegrationResponseHandler],
+ [updateHttpIntegrationMutation, updateIntegrationResponseHandler],
+ [resetHttpTokenMutation, resetTokenResponseHandler],
+ [createPrometheusIntegrationMutation, createPrometheousResponseHandler],
+ [updatePrometheusIntegrationMutation, updatePrometheousResponseIntegrationHandler],
+ [resetPrometheusTokenMutation, resetPrometheousResponseIntegrationHandler],
+ [getHttpIntegrationQuery, currentHttpIntegrationQueryResponseHandler],
];
- fakeApollo = createMockApollo(requestHandlers);
-
- wrapper = mount(AlertsSettingsWrapper, {
- apolloProvider: fakeApollo,
- provide: {
- alertSettings: {
- templates: [],
- },
- service: {},
+ fakeApollo = createMockApollo(requestHandlers, {
+ Query: {
+ currentIntegration: currentIntegrationQueryHandler,
+ },
+ Mutation: {
+ updateCurrentIntegration: mockUpdateCurrentHttpIntegrationMutationHandler,
},
});
+
+ wrapper = extendedWrapper(
+ mount(AlertsSettingsWrapper, {
+ apolloProvider: fakeApollo,
+ provide: {
+ ...provide,
+ alertSettings: {
+ templates: [],
+ },
+ service: {},
+ },
+ stubs: {
+ AlertSettingsForm: true,
+ },
+ }),
+ );
}
describe('template', () => {
beforeEach(() => {
- createComponent({
- data: {
- integrations: mockIntegrations,
- currentIntegration: mockIntegrations[0],
- },
- loading: false,
- });
+ createComponentWithApollo();
});
- it('renders alerts integrations list', () => {
- expect(findLoader().exists()).toBe(false);
+ it('renders alerts integrations list', async () => {
+ expect(findLoader().exists()).toBe(true);
+
+ await waitForPromises();
expect(findIntegrations()).toHaveLength(mockIntegrations.length);
});
- it('renders `Add new integration` button when multiple integrations are supported', () => {
- createComponent({
- data: {
- integrations: mockIntegrations,
- currentIntegration: mockIntegrations[0],
- },
+ it('renders `Add new integration` button when multiple integrations are supported', async () => {
+ createComponentWithApollo({
provide: {
multiIntegrations: true,
},
- loading: false,
});
+
+ await waitForPromises();
+
expect(findAddIntegrationBtn().exists()).toBe(true);
});
@@ -163,162 +229,136 @@ describe('AlertsSettingsWrapper', () => {
});
it('hides `add new integration` button and displays setting form on btn click', async () => {
- createComponent({
- data: {
- integrations: mockIntegrations,
- currentIntegration: mockIntegrations[0],
- },
+ createComponentWithApollo({
provide: {
multiIntegrations: true,
},
- loading: false,
});
+
+ await waitForPromises();
+
const addNewIntegrationBtn = findAddIntegrationBtn();
expect(addNewIntegrationBtn.exists()).toBe(true);
- await addNewIntegrationBtn.trigger('click');
+ await addNewIntegrationBtn.vm.$emit('click');
expect(findAlertsSettingsForm().exists()).toBe(true);
expect(addNewIntegrationBtn.exists()).toBe(false);
});
it('shows loading indicator inside the IntegrationsList table', () => {
- createComponent({
- data: { integrations: [] },
- loading: true,
- });
- expect(wrapper.findComponent(IntegrationsList).exists()).toBe(true);
+ createComponentWithApollo();
+
+ expect(findIntegrationsList().exists()).toBe(true);
expect(findLoader().exists()).toBe(true);
});
});
describe('Integration updates', () => {
- beforeEach(() => {
- createComponent({
- data: {
- integrations: mockIntegrations,
- currentIntegration: mockIntegrations[0],
- formVisible: true,
+ beforeEach(async () => {
+ createComponentWithApollo({
+ provide: {
+ multiIntegrations: true,
},
- loading: false,
});
+
+ await waitForPromises();
+
+ await findAddIntegrationBtn().vm.$emit('click');
});
describe('Create', () => {
beforeEach(() => {
- jest.spyOn(wrapper.vm.$apollo, 'mutate').mockResolvedValue({
- data: { httpIntegrationCreate: { integration: { id: '1' }, errors: [] } },
- });
findAlertsSettingsForm().vm.$emit('create-new-integration', {
type: typeSet.http,
variables: createHttpVariables,
});
});
- it('calls `$apollo.mutate` with `createHttpIntegrationMutation`', () => {
- expect(wrapper.vm.$apollo.mutate).toHaveBeenCalledTimes(1);
- expect(wrapper.vm.$apollo.mutate).toHaveBeenCalledWith({
- mutation: createHttpIntegrationMutation,
- update: expect.anything(),
- variables: createHttpVariables,
+ it('`createIntegrationHandler` is called when a new integration is created', async () => {
+ expect(createIntegrationHandler).toHaveBeenCalledTimes(1);
+ expect(createIntegrationHandler).toHaveBeenCalledWith({
+ ...createHttpVariables,
});
- });
- it('shows success alert', () => {
+ await waitForPromises();
+
expect(findAlert().exists()).toBe(true);
});
});
- it('calls `$apollo.mutate` with `updateHttpIntegrationMutation`', () => {
- jest.spyOn(wrapper.vm.$apollo, 'mutate').mockResolvedValue({
- data: { updateHttpIntegrationMutation: { integration: { id: '1' } } },
- });
+ it('`updateHttpIntegrationHandler` is called when updated', () => {
findAlertsSettingsForm().vm.$emit('update-integration', {
type: typeSet.http,
variables: updateHttpVariables,
});
- expect(wrapper.vm.$apollo.mutate).toHaveBeenCalledWith({
- mutation: updateHttpIntegrationMutation,
- variables: updateHttpVariables,
+ expect(updateIntegrationHandler).toHaveBeenCalledTimes(1);
+ expect(updateIntegrationHandler).toHaveBeenCalledWith({
+ ...updateHttpVariables,
+ id: currentIntegration.id,
});
});
- it('calls `$apollo.mutate` with `resetHttpTokenMutation`', () => {
- jest.spyOn(wrapper.vm.$apollo, 'mutate').mockResolvedValue({
- data: { resetHttpTokenMutation: { integration: { id: '1' } } },
- });
+ it('`resetHttpTokenMutationHandler` is called on reset-token', () => {
findAlertsSettingsForm().vm.$emit('reset-token', {
type: typeSet.http,
variables: { id: HTTP_ID },
});
- expect(wrapper.vm.$apollo.mutate).toHaveBeenCalledWith({
- mutation: resetHttpTokenMutation,
- variables: {
- id: HTTP_ID,
- },
+ expect(resetTokenHandler).toHaveBeenCalledWith({
+ id: HTTP_ID,
});
});
- it('calls `$apollo.mutate` with `createPrometheusIntegrationMutation`', () => {
- jest.spyOn(wrapper.vm.$apollo, 'mutate').mockResolvedValue({
- data: { createPrometheusIntegrationMutation: { integration: { id: '2' } } },
- });
+ it('`createPrometheusIntegrationMutation` is called on creating a prometheus integration', () => {
findAlertsSettingsForm().vm.$emit('create-new-integration', {
type: typeSet.prometheus,
variables: createPrometheusVariables,
});
- expect(wrapper.vm.$apollo.mutate).toHaveBeenCalledTimes(1);
- expect(wrapper.vm.$apollo.mutate).toHaveBeenCalledWith({
- mutation: createPrometheusIntegrationMutation,
- update: expect.anything(),
- variables: createPrometheusVariables,
+ expect(createPrometheousIntegrationHandler).toHaveBeenCalledTimes(1);
+ expect(createPrometheousIntegrationHandler).toHaveBeenCalledWith({
+ ...createPrometheusVariables,
});
});
- it('calls `$apollo.mutate` with `updatePrometheusIntegrationMutation`', () => {
- createComponent({
- data: {
- integrations: mockIntegrations,
- currentIntegration: mockIntegrations[3],
- formVisible: true,
- },
- loading: false,
- });
-
- jest.spyOn(wrapper.vm.$apollo, 'mutate').mockResolvedValue({
- data: { updatePrometheusIntegrationMutation: { integration: { id: '2' } } },
- });
+ it('`updatePrometheusIntegrationMutation` is called on prometheus mutation update', () => {
findAlertsSettingsForm().vm.$emit('update-integration', {
type: typeSet.prometheus,
variables: updatePrometheusVariables,
});
- expect(wrapper.vm.$apollo.mutate).toHaveBeenCalledWith({
- mutation: updatePrometheusIntegrationMutation,
- variables: updatePrometheusVariables,
+ expect(updatePrometheousIntegrationHandler).toHaveBeenCalledTimes(1);
+
+ expect(updatePrometheousIntegrationHandler).toHaveBeenCalledWith({
+ ...updatePrometheusVariables,
+ id: currentIntegration.id,
});
});
- it('calls `$apollo.mutate` with `resetPrometheusTokenMutation`', () => {
- jest.spyOn(wrapper.vm.$apollo, 'mutate').mockResolvedValue({
- data: { resetPrometheusTokenMutation: { integration: { id: '1' } } },
- });
+ it('`resetPrometheusTokenMutation` is called on prometheus reset token', () => {
findAlertsSettingsForm().vm.$emit('reset-token', {
type: typeSet.prometheus,
variables: { id: PROMETHEUS_ID },
});
- expect(wrapper.vm.$apollo.mutate).toHaveBeenCalledWith({
- mutation: resetPrometheusTokenMutation,
- variables: {
- id: PROMETHEUS_ID,
- },
+ expect(resetPrometheousIntegrationHandler).toHaveBeenCalledWith({
+ id: PROMETHEUS_ID,
});
});
it('shows an error alert when integration creation fails', async () => {
- jest.spyOn(wrapper.vm.$apollo, 'mutate').mockRejectedValue(ADD_INTEGRATION_ERROR);
+ createComponentWithApollo({
+ createIntegrationResponseHandler: jest.fn().mockRejectedValue(ADD_INTEGRATION_ERROR),
+ provide: {
+ multiIntegrations: true,
+ },
+ });
+
+ await waitForPromises();
+
+ await findAddIntegrationBtn().vm.$emit('click');
+
+ await nextTick();
findAlertsSettingsForm().vm.$emit('create-new-integration', {});
await waitForPromises();
@@ -327,7 +367,18 @@ describe('AlertsSettingsWrapper', () => {
});
it('shows an error alert when integration token reset fails', async () => {
- jest.spyOn(wrapper.vm.$apollo, 'mutate').mockRejectedValue(RESET_INTEGRATION_TOKEN_ERROR);
+ createComponentWithApollo({
+ resetTokenResponseHandler: jest.fn().mockRejectedValue(ADD_INTEGRATION_ERROR),
+ provide: {
+ multiIntegrations: true,
+ },
+ });
+
+ await waitForPromises();
+
+ await findAddIntegrationBtn().vm.$emit('click');
+
+ await nextTick();
findAlertsSettingsForm().vm.$emit('reset-token', {});
@@ -336,7 +387,18 @@ describe('AlertsSettingsWrapper', () => {
});
it('shows an error alert when integration update fails', async () => {
- jest.spyOn(wrapper.vm.$apollo, 'mutate').mockRejectedValue(errorMsg);
+ createComponentWithApollo({
+ updateIntegrationResponseHandler: jest.fn().mockRejectedValue(errorMsg),
+ provide: {
+ multiIntegrations: true,
+ },
+ });
+
+ await waitForPromises();
+
+ await findAddIntegrationBtn().vm.$emit('click');
+
+ await nextTick();
findAlertsSettingsForm().vm.$emit('update-integration', {});
@@ -372,58 +434,63 @@ describe('AlertsSettingsWrapper', () => {
describe('Edit integration', () => {
describe('HTTP', () => {
- beforeEach(() => {
- createComponent({
- data: {
- integrations: mockIntegrations,
- currentIntegration: mockIntegrations[0],
- currentHttpIntegration: { id: mockIntegrations[0].id, ...httpMappingData },
- },
+ beforeEach(async () => {
+ createComponentWithApollo({
provide: {
multiIntegrations: true,
},
- loading: false,
});
- jest.spyOn(wrapper.vm.$apollo, 'mutate').mockResolvedValueOnce({});
+
+ await waitForPromises();
+
findIntegrationsList().vm.$emit('edit-integration', updateHttpVariables);
+
+ await nextTick();
});
- it('requests `currentHttpIntegration`', () => {
- expect(wrapper.vm.$apollo.addSmartQuery).toHaveBeenCalledWith(
- 'currentHttpIntegration',
- expect.objectContaining({
- query: getHttpIntegrationQuery,
- result: expect.any(Function),
- update: expect.any(Function),
- variables: expect.any(Function),
- }),
- );
+ it('calls `currentHttpIntegration` on editing', () => {
+ expect(currentHttpIntegrationQueryHandler).toHaveBeenCalled();
});
- it('calls `$apollo.mutate` with `updateCurrentHttpIntegrationMutation`', () => {
- expect(wrapper.vm.$apollo.mutate).toHaveBeenCalledWith({
- mutation: updateCurrentHttpIntegrationMutation,
- variables: { ...mockIntegrations[0], ...httpMappingData },
- });
+ it('`updateCurrentHttpIntegrationMutation` is called when we after editing', async () => {
+ await waitForPromises();
+
+ expect(mockUpdateCurrentHttpIntegrationMutationHandler).toHaveBeenCalledTimes(1);
});
});
describe('Prometheus', () => {
- it('calls `$apollo.mutate` with `updateCurrentPrometheusIntegrationMutation`', () => {
- createComponent({
- data: {
- integrations: mockIntegrations,
- currentIntegration: mockIntegrations[3],
+ it('`updateCurrentPrometheusIntegrationMutation` is called on editing', async () => {
+ const currentMockIntegration =
+ prometheusIntegrationsList.data.project.alertManagementIntegrations.nodes[3];
+ createComponentWithApollo({
+ provide: {
+ multiIntegrations: true,
},
- loading: false,
+ getIntegrationQueryHandler: jest.fn().mockResolvedValue(prometheusIntegrationsList),
+ currentIntegrationQueryHandler: jest.fn().mockResolvedValue(currentMockIntegration),
+ currentHttpIntegrationQueryResponseHandler: jest
+ .fn()
+ .mockResolvedValue(currentHttpIntegrationResponse),
});
- jest.spyOn(wrapper.vm.$apollo, 'mutate').mockResolvedValue();
- findIntegrationsList().vm.$emit('edit-integration', updatePrometheusVariables);
- expect(wrapper.vm.$apollo.mutate).toHaveBeenCalledWith({
- mutation: updateCurrentPrometheusIntegrationMutation,
- variables: mockIntegrations[3],
+ await waitForPromises();
+
+ findIntegrationsList().vm.$emit('edit-integration', {
+ ...updatePrometheusVariables,
});
+
+ await nextTick();
+
+ expect(mockUpdateCurrentHttpIntegrationMutationHandler).toHaveBeenCalledTimes(1);
+ expect(mockUpdateCurrentHttpIntegrationMutationHandler).toHaveBeenCalledWith(
+ {},
+ // Using expect.objectContaining , because of limitations
+ // Check https://gitlab.com/gitlab-org/gitlab/-/issues/420993
+ expect.objectContaining({ id: mockIntegrations[3].id }),
+ expect.anything(),
+ expect.anything(),
+ );
});
});
});
diff --git a/spec/frontend/alerts_settings/components/mocks/apollo_mock.js b/spec/frontend/alerts_settings/components/mocks/apollo_mock.js
index 170af1b5e0c..903b6e0d9c8 100644
--- a/spec/frontend/alerts_settings/components/mocks/apollo_mock.js
+++ b/spec/frontend/alerts_settings/components/mocks/apollo_mock.js
@@ -139,3 +139,57 @@ export const destroyIntegrationResponseWithErrors = {
},
},
};
+
+export const prometheusIntegrationsList = {
+ data: {
+ project: {
+ id: '1',
+ alertManagementIntegrations: {
+ nodes: [
+ {
+ __typename: 'AlertManagementIntegration',
+ id: 'gid://gitlab/AlertManagement::HttpIntegration/7',
+ type: 'HTTP',
+ active: true,
+ name: 'test',
+ url:
+ 'http://192.168.1.152:3000/root/autodevops/alerts/notify/test/eddd36969b2d3d6a.json',
+ token: '7eb24af194116411ec8d66b58c6b0d2e',
+ apiUrl: null,
+ },
+ {
+ __typename: 'AlertManagementIntegration',
+ id: 'gid://gitlab/AlertManagement::HttpIntegration/6',
+ type: 'HTTP',
+ active: false,
+ name: 'test',
+ url: 'http://192.168.1.152:3000/root/autodevops/alerts/notify/test/abce123.json',
+ token: '8639e0ce06c731b00ee3e8dcdfd14fe0',
+ apiUrl: null,
+ },
+ {
+ __typename: 'AlertManagementIntegration',
+ id: 'gid://gitlab/AlertManagement::HttpIntegration/5',
+ type: 'HTTP',
+ active: false,
+ name: 'test',
+ url:
+ 'http://192.168.1.152:3000/root/autodevops/alerts/notify/test/bcd64c85f918a2e2.json',
+ token: '5c8101533d970a55d5c105f8abff2192',
+ apiUrl: null,
+ },
+ {
+ __typename: 'AlertManagementIntegration',
+ id: 'gid://gitlab/PrometheusService/12',
+ type: 'PROMETHEUS',
+ active: true,
+ name: 'Prometheus',
+ url: 'http://192.168.1.152:3000/root/autodevops/prometheus/alerts/notify.json',
+ token: '0b18c37caa8fe980799b349916fe5ddf',
+ apiUrl: null,
+ },
+ ],
+ },
+ },
+ },
+};
diff --git a/spec/frontend/analytics/cycle_analytics/components/base_spec.js b/spec/frontend/analytics/cycle_analytics/components/base_spec.js
index 87f3117c7f9..653934000b3 100644
--- a/spec/frontend/analytics/cycle_analytics/components/base_spec.js
+++ b/spec/frontend/analytics/cycle_analytics/components/base_spec.js
@@ -1,6 +1,7 @@
import { GlLoadingIcon, GlEmptyState } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import Vue from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import ValueStreamMetrics from '~/analytics/shared/components/value_stream_metrics.vue';
diff --git a/spec/frontend/analytics/cycle_analytics/components/filter_bar_spec.js b/spec/frontend/analytics/cycle_analytics/components/filter_bar_spec.js
index f57d8559ddf..387d0b453ee 100644
--- a/spec/frontend/analytics/cycle_analytics/components/filter_bar_spec.js
+++ b/spec/frontend/analytics/cycle_analytics/components/filter_bar_spec.js
@@ -2,6 +2,7 @@ import { shallowMount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
import axios from 'axios';
import MockAdapter from 'axios-mock-adapter';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import {
filterMilestones,
diff --git a/spec/frontend/api_spec.js b/spec/frontend/api_spec.js
index 4ef37311e51..3a000a01cdc 100644
--- a/spec/frontend/api_spec.js
+++ b/spec/frontend/api_spec.js
@@ -1509,6 +1509,79 @@ describe('Api', () => {
});
});
+ describe('trackInternalEvent', () => {
+ const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/usage_data/track_event`;
+ const event = 'i_devops_adoption';
+
+ const defaultContext = {
+ data: {
+ project_id: 123,
+ namespace_id: 123,
+ },
+ };
+
+ const postData = {
+ event,
+ project_id: defaultContext.data.project_id,
+ namespace_id: defaultContext.data.namespace_id,
+ };
+
+ const headers = {
+ 'Content-Type': 'application/json',
+ };
+
+ describe('when user is set', () => {
+ beforeEach(() => {
+ window.gon.current_user_id = 1;
+ window.gl = { snowplowStandardContext: { ...defaultContext } };
+ });
+
+ describe('when internal event is called with feature flag disabled', () => {
+ beforeEach(() => {
+ gon.features = { usageDataApi: false };
+ });
+
+ it('returns null and does not call the endpoint', () => {
+ jest.spyOn(axios, 'post');
+ const result = Api.trackInternalEvent(event);
+ expect(result).toEqual(null);
+ expect(axios.post).toHaveBeenCalledTimes(0);
+ });
+ });
+
+ describe('when internal event is called with feature flag enabled', () => {
+ beforeEach(() => {
+ gon.features = { usageDataApi: true };
+ });
+
+ it('resolves the Promise', () => {
+ jest.spyOn(axios, 'post');
+ mock.onPost(expectedUrl, postData).replyOnce(HTTP_STATUS_OK, true);
+
+ return Api.trackInternalEvent(event).then(({ data }) => {
+ expect(data).toEqual(true);
+ expect(axios.post).toHaveBeenCalledWith(expectedUrl, postData, { headers });
+ });
+ });
+ });
+ });
+
+ describe('when user is not set and feature flag enabled', () => {
+ beforeEach(() => {
+ window.gon.current_user_id = '';
+ gon.features = { usageDataApi: true };
+ window.gl = { snowplowStandardContext: { ...defaultContext } };
+ });
+
+ it('returns null and does not call the endpoint', () => {
+ jest.spyOn(axios, 'post');
+ const result = Api.trackInternalEvent(event);
+ expect(result).toEqual(null);
+ expect(axios.post).toHaveBeenCalledTimes(0);
+ });
+ });
+ });
+
describe('deployKeys', () => {
it('fetches deploy keys', async () => {
const deployKeys = [
diff --git a/spec/frontend/artifacts_settings/components/__snapshots__/keep_latest_artifact_checkbox_spec.js.snap b/spec/frontend/artifacts_settings/components/__snapshots__/keep_latest_artifact_checkbox_spec.js.snap
index 0bee37dbf15..58aee76e381 100644
--- a/spec/frontend/artifacts_settings/components/__snapshots__/keep_latest_artifact_checkbox_spec.js.snap
+++ b/spec/frontend/artifacts_settings/components/__snapshots__/keep_latest_artifact_checkbox_spec.js.snap
@@ -5,52 +5,60 @@ exports[`Keep latest artifact toggle when application keep latest artifact setti
<!---->
<div
- class="gl-toggle-wrapper gl-display-flex gl-mb-0 gl-flex-direction-column"
+ class="gl-toggle-wrapper gl-display-flex gl-mb-0 flex-grow-1 gl-flex-direction-column"
data-testid="toggle-wrapper"
>
<span
- class="gl-toggle-label gl-flex-shrink-0 gl-mb-3"
- data-testid="toggle-label"
- id="toggle-label-4"
- >
- Keep artifacts from most recent successful jobs
- </span>
-
- <!---->
-
- <!---->
-
- <button
- aria-checked="true"
- aria-describedby="toggle-help-2"
- aria-labelledby="toggle-label-4"
- class="gl-flex-shrink-0 gl-toggle is-checked"
- role="switch"
- type="button"
+ class="gl-toggle-label-container gl-mb-3"
>
<span
- class="toggle-icon"
+ class="gl-toggle-label"
+ data-testid="toggle-label"
+ id="toggle-label-4"
>
- <gl-icon-stub
- name="mobile-issue-close"
- size="16"
- />
+ Keep artifacts from most recent successful jobs
</span>
- </button>
+
+ <!---->
+ </span>
<span
- class="gl-help-label"
- data-testid="toggle-help"
- id="toggle-help-2"
+ class="gl-toggle-switch-container"
>
-
+ <!---->
+
+ <button
+ aria-checked="true"
+ aria-describedby="toggle-help-2"
+ aria-labelledby="toggle-label-4"
+ class="gl-flex-shrink-0 gl-toggle is-checked"
+ role="switch"
+ type="button"
+ >
+ <span
+ class="toggle-icon"
+ >
+ <gl-icon-stub
+ name="mobile-issue-close"
+ size="16"
+ />
+ </span>
+ </button>
+
+ <span
+ class="gl-help-label"
+ data-testid="toggle-help"
+ id="toggle-help-2"
+ >
+
The latest artifacts created by jobs in the most recent successful pipeline will be stored.
- <gl-link-stub
- href="/help/ci/pipelines/job_artifacts"
- >
- Learn more.
- </gl-link-stub>
+ <gl-link-stub
+ href="/help/ci/pipelines/job_artifacts"
+ >
+ Learn more.
+ </gl-link-stub>
+ </span>
</span>
</div>
</div>
diff --git a/spec/frontend/authentication/two_factor_auth/components/recovery_codes_spec.js b/spec/frontend/authentication/two_factor_auth/components/recovery_codes_spec.js
index 0d9196b88ed..aef06a74fdd 100644
--- a/spec/frontend/authentication/two_factor_auth/components/recovery_codes_spec.js
+++ b/spec/frontend/authentication/two_factor_auth/components/recovery_codes_spec.js
@@ -6,12 +6,10 @@ import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import RecoveryCodes, {
i18n,
} from '~/authentication/two_factor_auth/components/recovery_codes.vue';
-import {
- RECOVERY_CODE_DOWNLOAD_FILENAME,
- COPY_KEYBOARD_SHORTCUT,
-} from '~/authentication/two_factor_auth/constants';
+import { RECOVERY_CODE_DOWNLOAD_FILENAME } from '~/authentication/two_factor_auth/constants';
import Tracking from '~/tracking';
import ClipboardButton from '~/vue_shared/components/clipboard_button.vue';
+import { MOUSETRAP_COPY_KEYBOARD_SHORTCUT } from '~/lib/mousetrap';
import { codes, codesFormattedString, codesDownloadHref, profileAccountPath } from '../mock_data';
describe('RecoveryCodes', () => {
@@ -42,7 +40,7 @@ describe('RecoveryCodes', () => {
const findPrintButton = () => findButtonByText('Print codes');
const findProceedButton = () => findButtonByText('Proceed');
const manuallyCopyRecoveryCodes = () =>
- wrapper.vm.$options.mousetrap.trigger(COPY_KEYBOARD_SHORTCUT);
+ wrapper.vm.$options.mousetrap.trigger(MOUSETRAP_COPY_KEYBOARD_SHORTCUT);
beforeEach(() => {
jest.spyOn(Tracking, 'event');
diff --git a/spec/frontend/badges/components/badge_form_spec.js b/spec/frontend/badges/components/badge_form_spec.js
index d7519f1f80d..d3d2544dc4f 100644
--- a/spec/frontend/badges/components/badge_form_spec.js
+++ b/spec/frontend/badges/components/badge_form_spec.js
@@ -1,5 +1,6 @@
import MockAdapter from 'axios-mock-adapter';
import Vue from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import { mount } from '@vue/test-utils';
import { DUMMY_IMAGE_URL, TEST_HOST } from 'helpers/test_constants';
@@ -49,7 +50,7 @@ describe('BadgeForm component', () => {
it('stops editing when cancel button is clicked', async () => {
createComponent({ isEditing: true });
- const cancelButton = wrapper.find('.row-content-block button');
+ const cancelButton = wrapper.findAll('[data-testid="action-buttons"] button').at(1);
await cancelButton.trigger('click');
@@ -143,13 +144,13 @@ describe('BadgeForm component', () => {
describe('if isEditing is false', () => {
const props = { isEditing: false };
- it('renders one button', () => {
+ it('renders two buttons', () => {
createComponent(props);
expect(wrapper.find('.row-content-block').exists()).toBe(false);
- const buttons = wrapper.findAll('.form-group:last-of-type button');
+ const buttons = wrapper.findAll('[data-testid="action-buttons"] button');
- expect(buttons).toHaveLength(1);
+ expect(buttons).toHaveLength(2);
const buttonAddWrapper = buttons.at(0);
expect(buttonAddWrapper.isVisible()).toBe(true);
@@ -164,15 +165,15 @@ describe('BadgeForm component', () => {
it('renders two buttons', () => {
createComponent(props);
- const buttons = wrapper.findAll('.row-content-block button');
+ const buttons = wrapper.findAll('[data-testid="action-buttons"] button');
expect(buttons).toHaveLength(2);
- const saveButton = buttons.at(1);
+ const saveButton = buttons.at(0);
expect(saveButton.isVisible()).toBe(true);
expect(saveButton.text()).toBe('Save changes');
- const cancelButton = buttons.at(0);
+ const cancelButton = buttons.at(1);
expect(cancelButton.isVisible()).toBe(true);
expect(cancelButton.text()).toBe('Cancel');
});
diff --git a/spec/frontend/badges/components/badge_list_row_spec.js b/spec/frontend/badges/components/badge_list_row_spec.js
deleted file mode 100644
index cbbeb36ff33..00000000000
--- a/spec/frontend/badges/components/badge_list_row_spec.js
+++ /dev/null
@@ -1,119 +0,0 @@
-import Vue from 'vue';
-import Vuex from 'vuex';
-import { mount } from '@vue/test-utils';
-
-import { setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
-import BadgeListRow from '~/badges/components/badge_list_row.vue';
-import { GROUP_BADGE, PROJECT_BADGE } from '~/badges/constants';
-
-import createState from '~/badges/store/state';
-import mutations from '~/badges/store/mutations';
-import actions from '~/badges/store/actions';
-
-import { createDummyBadge } from '../dummy_badge';
-
-Vue.use(Vuex);
-
-describe('BadgeListRow component', () => {
- let badge;
- let wrapper;
- let mockedActions;
-
- const createComponent = (kind) => {
- setHTMLFixture(`<div id="delete-badge-modal" class="modal"></div>`);
-
- mockedActions = Object.fromEntries(Object.keys(actions).map((name) => [name, jest.fn()]));
-
- const store = new Vuex.Store({
- state: {
- ...createState(),
- kind: PROJECT_BADGE,
- },
- mutations,
- actions: mockedActions,
- });
-
- badge = createDummyBadge();
- badge.kind = kind;
- wrapper = mount(BadgeListRow, {
- attachTo: document.body,
- store,
- propsData: { badge },
- });
- };
-
- afterEach(() => {
- resetHTMLFixture();
- });
-
- describe('for a project badge', () => {
- beforeEach(() => {
- createComponent(PROJECT_BADGE);
- });
-
- it('renders the badge', () => {
- const badgeImage = wrapper.find('.project-badge');
-
- expect(badgeImage.exists()).toBe(true);
- expect(badgeImage.attributes('src')).toBe(badge.renderedImageUrl);
- });
-
- it('renders the badge name', () => {
- expect(wrapper.text()).toMatch(badge.name);
- });
-
- it('renders the badge link', () => {
- expect(wrapper.text()).toMatch(badge.linkUrl);
- });
-
- it('renders the badge kind', () => {
- expect(wrapper.text()).toMatch('Project Badge');
- });
-
- it('shows edit and delete buttons', () => {
- const buttons = wrapper.findAll('.table-button-footer button');
-
- expect(buttons).toHaveLength(2);
- const editButton = buttons.at(0);
-
- expect(editButton.isVisible()).toBe(true);
- expect(editButton.element).toHaveSpriteIcon('pencil');
-
- const deleteButton = buttons.at(1);
- expect(deleteButton.isVisible()).toBe(true);
- expect(deleteButton.element).toHaveSpriteIcon('remove');
- });
-
- it('calls editBadge when clicking then edit button', async () => {
- const editButton = wrapper.find('.table-button-footer button:first-of-type');
-
- await editButton.trigger('click');
-
- expect(mockedActions.editBadge).toHaveBeenCalled();
- });
-
- it('calls updateBadgeInModal and shows modal when clicking then delete button', async () => {
- const deleteButton = wrapper.find('.table-button-footer button:last-of-type');
-
- await deleteButton.trigger('click');
-
- expect(mockedActions.updateBadgeInModal).toHaveBeenCalled();
- });
- });
-
- describe('for a group badge', () => {
- beforeEach(() => {
- createComponent(GROUP_BADGE);
- });
-
- it('renders the badge kind', () => {
- expect(wrapper.text()).toMatch('Group Badge');
- });
-
- it('hides edit and delete buttons', () => {
- const buttons = wrapper.findAll('.table-button-footer button');
-
- expect(buttons).toHaveLength(0);
- });
- });
-});
diff --git a/spec/frontend/badges/components/badge_list_spec.js b/spec/frontend/badges/components/badge_list_spec.js
index 374b7b50af4..612e9bdc41f 100644
--- a/spec/frontend/badges/components/badge_list_spec.js
+++ b/spec/frontend/badges/components/badge_list_spec.js
@@ -1,14 +1,13 @@
+import { GlTable, GlButton } from '@gitlab/ui';
import Vue from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
-import { mount } from '@vue/test-utils';
-
-import BadgeList from '~/badges/components/badge_list.vue';
+import { mountExtended } from 'helpers/vue_test_utils_helper';
import { GROUP_BADGE, PROJECT_BADGE } from '~/badges/constants';
-
import createState from '~/badges/store/state';
import mutations from '~/badges/store/mutations';
import actions from '~/badges/store/actions';
-
+import BadgeList from '~/badges/components/badge_list.vue';
import { createDummyBadge } from '../dummy_badge';
Vue.use(Vuex);
@@ -21,9 +20,16 @@ const badges = Array.from({ length: numberOfDummyBadges }).map((_, idx) => ({
describe('BadgeList component', () => {
let wrapper;
+ let mockedActions;
+
+ const findTable = () => wrapper.findComponent(GlTable);
+ const findTableRow = (pos) => findTable().find('tbody').findAll('tr').at(pos);
+ const findButtons = () => wrapper.findByTestId('badge-actions').findAllComponents(GlButton);
+ const findEditButton = () => wrapper.findByTestId('edit-badge-button');
+ const findDeleteButton = () => wrapper.findByTestId('delete-badge');
const createComponent = (customState) => {
- const mockedActions = Object.fromEntries(Object.keys(actions).map((name) => [name, jest.fn()]));
+ mockedActions = Object.fromEntries(Object.keys(actions).map((name) => [name, jest.fn()]));
const store = new Vuex.Store({
state: {
@@ -35,28 +41,23 @@ describe('BadgeList component', () => {
actions: mockedActions,
});
- wrapper = mount(BadgeList, { store });
+ wrapper = mountExtended(BadgeList, {
+ store,
+ stubs: {
+ GlTable,
+ GlButton,
+ },
+ });
};
describe('for project badges', () => {
- it('renders a header with the badge count', () => {
- createComponent({
- kind: PROJECT_BADGE,
- badges,
- });
-
- const header = wrapper.find('.card-header');
-
- expect(header.text()).toMatchInterpolatedText('Your badges 3');
- });
-
it('renders a row for each badge', () => {
createComponent({
kind: PROJECT_BADGE,
badges,
});
- const rows = wrapper.findAll('.gl-responsive-table-row');
+ const rows = findTable().find('tbody').findAll('tr');
expect(rows).toHaveLength(numberOfDummyBadges);
});
@@ -89,4 +90,60 @@ describe('BadgeList component', () => {
expect(wrapper.text()).toMatch('This group has no badges');
});
});
+
+ describe('BadgeList item', () => {
+ beforeEach(() => {
+ createComponent({
+ kind: PROJECT_BADGE,
+ badges,
+ });
+ });
+
+ it('renders the badge', () => {
+ const badgeImage = wrapper.find('.project-badge');
+
+ expect(badgeImage.exists()).toBe(true);
+ expect(badgeImage.attributes('src')).toBe(badges[0].renderedImageUrl);
+ });
+
+ it('renders the badge name', () => {
+ const badgeCell = findTableRow(0).findAll('td').at(0);
+
+ expect(badgeCell.text()).toMatch(badges[0].name);
+ });
+
+ it('renders the badge link', () => {
+ expect(wrapper.text()).toMatch(badges[0].linkUrl);
+ });
+
+ it('renders the badge kind', () => {
+ expect(wrapper.text()).toMatch('Project Badge');
+ });
+
+ it('shows edit and delete buttons', () => {
+ expect(findButtons()).toHaveLength(2);
+
+ const editButton = findEditButton();
+
+ expect(editButton.isVisible()).toBe(true);
+ expect(editButton.element).toHaveSpriteIcon('pencil');
+
+ const deleteButton = findDeleteButton();
+
+ expect(deleteButton.isVisible()).toBe(true);
+ expect(deleteButton.element).toHaveSpriteIcon('remove');
+ });
+
+ it('calls editBadge when clicking then edit button', () => {
+ findEditButton().trigger('click');
+
+ expect(mockedActions.editBadge).toHaveBeenCalled();
+ });
+
+ it('calls updateBadgeInModal and shows modal when clicking then delete button', () => {
+ findDeleteButton().trigger('click');
+
+ expect(mockedActions.updateBadgeInModal).toHaveBeenCalled();
+ });
+ });
});
diff --git a/spec/frontend/badges/components/badge_settings_spec.js b/spec/frontend/badges/components/badge_settings_spec.js
index 7ad2c99869c..af7389b4240 100644
--- a/spec/frontend/badges/components/badge_settings_spec.js
+++ b/spec/frontend/badges/components/badge_settings_spec.js
@@ -1,10 +1,11 @@
-import { GlModal } from '@gitlab/ui';
+import { GlCard, GlTable } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
-import Vue, { nextTick } from 'vue';
+import Vue from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
-import BadgeList from '~/badges/components/badge_list.vue';
-import BadgeListRow from '~/badges/components/badge_list_row.vue';
import BadgeSettings from '~/badges/components/badge_settings.vue';
+import BadgeList from '~/badges/components/badge_list.vue';
+import BadgeForm from '~/badges/components/badge_form.vue';
import store from '~/badges/store';
import { createDummyBadge } from '../dummy_badge';
@@ -22,8 +23,10 @@ describe('BadgeSettings component', () => {
wrapper = shallowMount(BadgeSettings, {
store,
stubs: {
+ GlCard,
+ GlTable,
'badge-list': BadgeList,
- 'badge-list-row': BadgeListRow,
+ 'badge-form': BadgeForm,
},
});
};
@@ -32,35 +35,35 @@ describe('BadgeSettings component', () => {
createComponent();
});
- it('displays modal if button for deleting a badge is clicked', async () => {
- const button = wrapper.find('[data-testid="delete-badge"]');
+ it('renders a header with the badge count', () => {
+ createComponent();
- button.vm.$emit('click');
- await nextTick();
+ const cardTitle = wrapper.find('.gl-new-card-title');
+ const cardCount = wrapper.find('.gl-new-card-count');
- const modal = wrapper.findComponent(GlModal);
- expect(modal.isVisible()).toBe(true);
+ expect(cardTitle.text()).toContain('Your badges');
+ expect(cardCount.text()).toContain('1');
});
- it('displays a form to add a badge', () => {
- expect(wrapper.find('[data-testid="add-new-badge"]').isVisible()).toBe(true);
+ it('displays a table', () => {
+ expect(wrapper.findComponent(GlTable).isVisible()).toBe(true);
});
- it('displays badge list', () => {
+ it('renders badge add form', () => {
+ expect(wrapper.findComponent(BadgeForm).exists()).toBe(true);
+ });
+
+ it('renders badge list', () => {
expect(wrapper.findComponent(BadgeList).isVisible()).toBe(true);
});
describe('when editing', () => {
beforeEach(() => {
- createComponent(true);
+ createComponent({ isEditing: true });
});
it('displays a form to edit a badge', () => {
expect(wrapper.find('[data-testid="edit-badge"]').isVisible()).toBe(true);
});
-
- it('displays no badge list', () => {
- expect(wrapper.findComponent(BadgeList).isVisible()).toBe(false);
- });
});
});
diff --git a/spec/frontend/batch_comments/components/diff_file_drafts_spec.js b/spec/frontend/batch_comments/components/diff_file_drafts_spec.js
index 014e28b7509..e779309e3d8 100644
--- a/spec/frontend/batch_comments/components/diff_file_drafts_spec.js
+++ b/spec/frontend/batch_comments/components/diff_file_drafts_spec.js
@@ -1,5 +1,6 @@
import { shallowMount } from '@vue/test-utils';
import Vue from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import DiffFileDrafts from '~/batch_comments/components/diff_file_drafts.vue';
import DraftNote from '~/batch_comments/components/draft_note.vue';
diff --git a/spec/frontend/batch_comments/components/preview_dropdown_spec.js b/spec/frontend/batch_comments/components/preview_dropdown_spec.js
index 3a28bf4ade8..608e9c82961 100644
--- a/spec/frontend/batch_comments/components/preview_dropdown_spec.js
+++ b/spec/frontend/batch_comments/components/preview_dropdown_spec.js
@@ -1,4 +1,5 @@
import Vue, { nextTick } from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import { mount } from '@vue/test-utils';
import { TEST_HOST } from 'helpers/test_constants';
diff --git a/spec/frontend/batch_comments/components/publish_dropdown_spec.js b/spec/frontend/batch_comments/components/publish_dropdown_spec.js
index 44d7b56c14f..2fe73f30a4c 100644
--- a/spec/frontend/batch_comments/components/publish_dropdown_spec.js
+++ b/spec/frontend/batch_comments/components/publish_dropdown_spec.js
@@ -1,6 +1,7 @@
import { GlDisclosureDropdown } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import Vue from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import PreviewDropdown from '~/batch_comments/components/preview_dropdown.vue';
import { createStore } from '~/mr_notes/stores';
diff --git a/spec/frontend/batch_comments/components/submit_dropdown_spec.js b/spec/frontend/batch_comments/components/submit_dropdown_spec.js
index 7e2ff7f786f..19be3fb7d31 100644
--- a/spec/frontend/batch_comments/components/submit_dropdown_spec.js
+++ b/spec/frontend/batch_comments/components/submit_dropdown_spec.js
@@ -1,5 +1,6 @@
import { GlDropdown } from '@gitlab/ui';
import Vue from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import { mountExtended } from 'helpers/vue_test_utils_helper';
import SubmitDropdown from '~/batch_comments/components/submit_dropdown.vue';
@@ -79,10 +80,9 @@ describe('Batch comments submit dropdown', () => {
findForm().vm.$emit('submit', { preventDefault: jest.fn() });
- expect(trackingSpy).toHaveBeenCalledWith(undefined, 'editor_type_used', {
- context: 'MergeRequest_review',
- editorType: 'editor_type_plain_text_editor',
- label: 'editor_tracking',
+ expect(trackingSpy).toHaveBeenCalledWith(undefined, 'save_markdown', {
+ label: 'markdown_editor',
+ property: 'MergeRequest_review',
});
});
diff --git a/spec/frontend/batch_comments/create_batch_comments_store.js b/spec/frontend/batch_comments/create_batch_comments_store.js
index 10dc6fe196e..2adaae6a9e1 100644
--- a/spec/frontend/batch_comments/create_batch_comments_store.js
+++ b/spec/frontend/batch_comments/create_batch_comments_store.js
@@ -1,4 +1,5 @@
import Vue from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import batchCommentsModule from '~/batch_comments/stores/modules/batch_comments';
import notesModule from '~/notes/stores/modules';
diff --git a/spec/frontend/behaviors/toasts_spec.js b/spec/frontend/behaviors/toasts_spec.js
new file mode 100644
index 00000000000..bad04758ea1
--- /dev/null
+++ b/spec/frontend/behaviors/toasts_spec.js
@@ -0,0 +1,59 @@
+import { initToastMessages } from '~/behaviors/toasts';
+import { setHTMLFixture } from 'helpers/fixtures';
+import showToast from '~/vue_shared/plugins/global_toast';
+
+jest.mock('~/vue_shared/plugins/global_toast');
+
+describe('initToastMessages', () => {
+ describe('when there are no messages', () => {
+ beforeEach(() => {
+ setHTMLFixture('<div></div>');
+
+ initToastMessages();
+ });
+
+ it('does not display any toasts', () => {
+ expect(showToast).not.toHaveBeenCalled();
+ });
+ });
+
+ describe('when there is a message', () => {
+ const expectedMessage = 'toast with jam is great';
+
+ beforeEach(() => {
+ setHTMLFixture(
+ `<div>
+ <div class="js-toast-message" data-message="${expectedMessage}"></div>
+ </div>`,
+ );
+
+ initToastMessages();
+ });
+
+ it('displays the message', () => {
+ expect(showToast).toHaveBeenCalledTimes(1);
+ expect(showToast).toHaveBeenCalledWith(expectedMessage);
+ });
+ });
+
+ describe('when there are multiple messages', () => {
+ beforeEach(() => {
+ setHTMLFixture(
+ `<div>
+ <div class="js-toast-message" data-message="foo"></div>
+ <div class="js-toast-message" data-message="bar"></div>
+ <div class="js-toast-message" data-message="baz"></div>
+ </div>`,
+ );
+
+ initToastMessages();
+ });
+
+ it('displays the messages', () => {
+ expect(showToast).toHaveBeenCalledTimes(3);
+ expect(showToast).toHaveBeenCalledWith('foo');
+ expect(showToast).toHaveBeenCalledWith('bar');
+ expect(showToast).toHaveBeenCalledWith('baz');
+ });
+ });
+});
diff --git a/spec/frontend/blob/file_template_mediator_spec.js b/spec/frontend/blob/file_template_mediator_spec.js
index 907a3c97799..ece9ae27273 100644
--- a/spec/frontend/blob/file_template_mediator_spec.js
+++ b/spec/frontend/blob/file_template_mediator_spec.js
@@ -1,5 +1,5 @@
import { setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
-import TemplateSelectorMediator from '~/blob/file_template_mediator';
+import FilepathFormMediator from '~/blob/filepath_form_mediator';
describe('Template Selector Mediator', () => {
let mediator;
@@ -14,7 +14,7 @@ describe('Template Selector Mediator', () => {
beforeEach(() => {
setHTMLFixture('<div class="file-editor"><input class="js-file-path-name-input" /></div>');
input = document.querySelector('.js-file-path-name-input');
- mediator = new TemplateSelectorMediator({
+ mediator = new FilepathFormMediator({
editor,
currentAction: jest.fn(),
projectId: jest.fn(),
@@ -44,7 +44,7 @@ describe('Template Selector Mediator', () => {
({ name, newName, shouldDispatch }) => {
input.value = name;
const eventHandler = jest.fn();
- input.addEventListener('change', eventHandler);
+ input.addEventListener('input', eventHandler);
mediator.setFilename(newName);
if (shouldDispatch) {
diff --git a/spec/frontend/blob/file_template_selector_spec.js b/spec/frontend/blob/file_template_selector_spec.js
deleted file mode 100644
index 123475f8d62..00000000000
--- a/spec/frontend/blob/file_template_selector_spec.js
+++ /dev/null
@@ -1,64 +0,0 @@
-import FileTemplateSelector from '~/blob/file_template_selector';
-import { setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
-
-describe('FileTemplateSelector', () => {
- let subject;
-
- const dropdown = '.dropdown';
- const wrapper = '.wrapper';
-
- const createSubject = () => {
- subject = new FileTemplateSelector({});
- subject.config = {
- dropdown,
- wrapper,
- };
- subject.initDropdown = jest.fn();
- };
-
- afterEach(() => {
- subject = null;
- resetHTMLFixture();
- });
-
- describe('show method', () => {
- beforeEach(() => {
- setHTMLFixture(`
- <div class="wrapper hidden">
- <div class="dropdown"></div>
- </div>
- `);
- createSubject();
- });
-
- it('calls init on first call', () => {
- jest.spyOn(subject, 'init');
- subject.show();
-
- expect(subject.init).toHaveBeenCalledTimes(1);
- });
-
- it('does not call init on subsequent calls', () => {
- jest.spyOn(subject, 'init');
- subject.show();
-
- expect(subject.init).toHaveBeenCalledTimes(1);
- });
-
- it('removes hidden class from wrapper', () => {
- subject.init();
- expect(subject.wrapper.classList.contains('hidden')).toBe(true);
-
- subject.show();
- expect(subject.wrapper.classList.contains('hidden')).toBe(false);
- });
-
- it('sets the focus on the dropdown', () => {
- subject.show();
- jest.spyOn(subject.dropdown, 'focus');
- jest.runAllTimers();
-
- expect(subject.dropdown.focus).toHaveBeenCalled();
- });
- });
-});
diff --git a/spec/frontend/blob/filepath_form/components/filepath_form_spec.js b/spec/frontend/blob/filepath_form/components/filepath_form_spec.js
new file mode 100644
index 00000000000..8a890cdc75a
--- /dev/null
+++ b/spec/frontend/blob/filepath_form/components/filepath_form_spec.js
@@ -0,0 +1,70 @@
+import { shallowMount } from '@vue/test-utils';
+import { nextTick } from 'vue';
+import { GlFormInput } from '@gitlab/ui';
+import { setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
+import FilepathForm from '~/blob/filepath_form/components/filepath_form.vue';
+import TemplateSelector from '~/blob/filepath_form/components/template_selector.vue';
+import { Templates as TemplatesMock } from './mock_data';
+
+describe('Filepath Form component', () => {
+ let wrapper;
+
+ const findNavLinks = () => document.querySelector('.nav-links');
+ const findNavLinkWrite = () => findNavLinks().querySelector('#edit');
+ const findNavLinkPreview = () => findNavLinks().querySelector('#preview');
+
+ const findInput = () => wrapper.findComponent(GlFormInput);
+ const findTemplateSelector = () => wrapper.findComponent(TemplateSelector);
+
+ const createComponent = (props = {}) => {
+ wrapper = shallowMount(FilepathForm, {
+ propsData: {
+ templates: TemplatesMock,
+ inputOptions: {},
+ ...props,
+ },
+ });
+ };
+
+ beforeEach(() => {
+ setHTMLFixture(`
+ <div class="file-editor">
+ <ul class="nav-links">
+ <a class="nav-link" id="edit" href="#editor">Write</a>
+ <a class="nav-link" id="preview" href="#preview">Preview</a>
+ </ul>
+ </div>
+ `);
+ createComponent();
+ });
+
+ afterEach(() => {
+ resetHTMLFixture();
+ });
+
+ it('renders input with correct attributes', () => {
+ createComponent({ inputOptions: { name: 'foo', value: 'bar' } });
+ expect(findInput().attributes()).toMatchObject({
+ name: 'foo',
+ value: 'bar',
+ });
+ });
+
+ describe('when write button is clicked', () => {
+ it('renders template selector', async () => {
+ findNavLinkWrite().click();
+ await nextTick();
+
+ expect(findTemplateSelector().exists()).toBe(true);
+ });
+ });
+
+ describe('when preview button is clicked', () => {
+ it('hides template selector', async () => {
+ findNavLinkPreview().click();
+ await nextTick();
+
+ expect(findTemplateSelector().exists()).toBe(false);
+ });
+ });
+});
diff --git a/spec/frontend/blob/filepath_form/components/mock_data.js b/spec/frontend/blob/filepath_form/components/mock_data.js
new file mode 100644
index 00000000000..fc19d6f1887
--- /dev/null
+++ b/spec/frontend/blob/filepath_form/components/mock_data.js
@@ -0,0 +1,57 @@
+export const SuggestCiYmlData = {
+ trackLabel: 'suggest_gitlab_ci_yml',
+ dismissKey: '10',
+ mergeRequestPath: 'mr_path',
+ humanAccess: 'owner',
+};
+
+export const Templates = {
+ licenses: {
+ Other: [
+ {
+ name: 'GNU Affero General Public License v3.0',
+ id: 'agpl-3.0',
+ key: 'agpl-3.0',
+ project_id: 10,
+ },
+ ],
+ Popular: [
+ {
+ name: 'Apache License 2.0',
+ id: 'apache-2.0',
+ key: 'apache-2.0',
+ project_id: 10,
+ },
+ ],
+ },
+ gitignore_names: {
+ Languages: [
+ {
+ name: 'Actionscript',
+ id: 'Actionscript',
+ key: 'Actionscript',
+ project_id: 10,
+ },
+ ],
+ },
+ gitlab_ci_ymls: {
+ General: [
+ {
+ name: '5-Minute-Production-App',
+ id: '5-Minute-Production-App',
+ key: '5-Minute-Production-App',
+ project_id: 10,
+ },
+ ],
+ },
+ dockerfile_names: {
+ General: [
+ {
+ name: 'Binary',
+ id: 'Binary',
+ key: 'Binary',
+ project_id: 10,
+ },
+ ],
+ },
+};
diff --git a/spec/frontend/blob/filepath_form/components/template_selector_spec.js b/spec/frontend/blob/filepath_form/components/template_selector_spec.js
new file mode 100644
index 00000000000..b1419320e1e
--- /dev/null
+++ b/spec/frontend/blob/filepath_form/components/template_selector_spec.js
@@ -0,0 +1,167 @@
+import { GlCollapsibleListbox } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import { nextTick } from 'vue';
+import TemplateSelector from '~/blob/filepath_form/components/template_selector.vue';
+import SuggestGitlabCiYml from '~/blob/suggest_gitlab_ci_yml/components/popover.vue';
+import { Templates as TemplatesMock, SuggestCiYmlData as SuggestCiYmlDataMock } from './mock_data';
+
+describe('Template Selector component', () => {
+ let wrapper;
+
+ const findListbox = () => wrapper.findComponent(GlCollapsibleListbox);
+ const findSuggestCiYmlPopover = () => wrapper.findComponent(SuggestGitlabCiYml);
+ const findDisplayedTemplates = () =>
+ findListbox()
+ .props('items')
+ .reduce((acc, item) => [...acc, ...item.options], [])
+ .map((template) => template.value);
+
+ const getTemplateKeysFromMock = (key) =>
+ Object.values(TemplatesMock[key])
+ .reduce((acc, items) => [...acc, ...items], [])
+ .map((template) => template.key);
+
+ const createComponent = (props = {}) => {
+ wrapper = shallowMount(TemplateSelector, {
+ propsData: {
+ filename: '',
+ templates: TemplatesMock,
+ ...props,
+ },
+ });
+ };
+
+ describe('when filename input is empty', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('does not render listbox', () => {
+ expect(findListbox().exists()).toBe(false);
+ });
+
+ it('does not render suggest-ci-yml popover', () => {
+ expect(findSuggestCiYmlPopover().exists()).toBe(false);
+ });
+ });
+
+ describe.each`
+ filename | key
+ ${'LICENSE'} | ${'licenses'}
+ ${'Dockerfile'} | ${'dockerfile_names'}
+ ${'.gitignore'} | ${'gitignore_names'}
+ ${'.gitlab-ci.yml'} | ${'gitlab_ci_ymls'}
+ `('when filename is $filename', ({ filename, key }) => {
+ beforeEach(() => {
+ createComponent({ filename });
+ });
+
+ it('renders listbox with correct props', () => {
+ expect(findListbox().exists()).toBe(true);
+ expect(findListbox().props('toggleText')).toBe('Apply a template');
+ expect(findListbox().props('searchPlaceholder')).toBe('Filter');
+ expect(findDisplayedTemplates()).toEqual(getTemplateKeysFromMock(key));
+ });
+
+ it('does not render suggest-ci-yml popover', () => {
+ expect(findSuggestCiYmlPopover().exists()).toBe(false);
+ });
+ });
+
+ describe('when filename input is .gitlab-ci.yml with suggestCiYmlData prop', () => {
+ beforeEach(() => {
+ createComponent({ filename: '.gitlab-ci.yml', suggestCiYmlData: SuggestCiYmlDataMock });
+ });
+
+ it('renders listbox with correct props', () => {
+ expect(findListbox().exists()).toBe(true);
+ expect(findListbox().props('toggleText')).toBe('Apply a template');
+ expect(findListbox().props('searchPlaceholder')).toBe('Filter');
+ });
+
+ it('renders suggest-ci-yml popover', () => {
+ expect(findSuggestCiYmlPopover().exists()).toBe(true);
+ });
+ });
+
+ describe('has filename that matches template pattern', () => {
+ const filename = 'LICENSE';
+ const templates = TemplatesMock.licenses.Other;
+
+ describe('has initial template prop', () => {
+ const initialTemplate = TemplatesMock.licenses.Other[0];
+
+ beforeEach(() => {
+ createComponent({ filename, initialTemplate: initialTemplate.key });
+ });
+
+ it('renders listbox toggle button with selected template name', () => {
+ expect(findListbox().props('toggleText')).toBe(initialTemplate.name);
+ });
+
+ it('selected template is checked', () => {
+ expect(findListbox().props('selected')).toBe(initialTemplate.key);
+ });
+ });
+
+ describe('when template is selected', () => {
+ beforeEach(() => {
+ createComponent({ filename });
+ findListbox().vm.$emit('select', templates[0].key);
+ });
+
+ it('emit `selected` event with selected template', () => {
+ const licenseSelectorType = {
+ key: 'licenses',
+ name: 'LICENSE',
+ pattern: /^(.+\/)?(licen[sc]e|copying)($|\.)/i,
+ type: 'licenses',
+ };
+
+ const { template, type } = wrapper.emitted('selected')[0][0];
+ expect(template).toBe(templates[0]);
+ expect(type).toMatchObject(licenseSelectorType);
+ });
+
+ it('set loading state to true', () => {
+ expect(findListbox().props('loading')).toBe(true);
+ });
+
+ describe('when stopLoading callback from `selected` event is called', () => {
+ it('set loading state to false', async () => {
+ const { stopLoading } = wrapper.emitted('selected')[0][0];
+
+ stopLoading();
+ await nextTick();
+
+ expect(findListbox().props('loading')).toBe(false);
+ });
+ });
+ });
+
+ describe('when searching for filter', () => {
+ const searchTerm = 'GNU';
+
+ beforeEach(() => {
+ createComponent({ filename: 'LICENSE' });
+ findListbox().vm.$emit('search', searchTerm);
+ });
+
+ it('shows matching templates', () => {
+ const displayedTemplates = findDisplayedTemplates();
+ const matchingTemplate = templates.find((template) =>
+ template.name.toLowerCase().includes(searchTerm.toLowerCase()),
+ );
+ expect(displayedTemplates).toContain(matchingTemplate?.key);
+ });
+
+ it('hides non-matching templates', () => {
+ const displayedTemplates = findDisplayedTemplates();
+ const nonMatchingTemplate = templates.find(
+ (template) => !template.name.includes(searchTerm),
+ );
+ expect(displayedTemplates).not.toContain(nonMatchingTemplate?.key);
+ });
+ });
+ });
+});
diff --git a/spec/frontend/blob_edit/blob_bundle_spec.js b/spec/frontend/blob_edit/blob_bundle_spec.js
index 6a7ca3288cb..f72380f211d 100644
--- a/spec/frontend/blob_edit/blob_bundle_spec.js
+++ b/spec/frontend/blob_edit/blob_bundle_spec.js
@@ -96,9 +96,10 @@ describe('BlobBundle', () => {
$('#commit-changes').click();
expect(trackingSpy).toHaveBeenCalledTimes(1);
- expect(trackingSpy).toHaveBeenCalledWith(undefined, undefined, {
- label: 'suggest_gitlab_ci_yml',
+ expect(trackingSpy).toHaveBeenCalledWith(undefined, 'click_button', {
+ label: 'suggest_gitlab_ci_yml_commit_changes',
property: 'owner',
+ value: '20',
});
});
});
diff --git a/spec/frontend/blob_edit/edit_blob_spec.js b/spec/frontend/blob_edit/edit_blob_spec.js
index 1bdc54723ce..e58ad4040a9 100644
--- a/spec/frontend/blob_edit/edit_blob_spec.js
+++ b/spec/frontend/blob_edit/edit_blob_spec.js
@@ -45,6 +45,7 @@ describe('Blob Editing', () => {
getValue: jest.fn().mockReturnValue('test value'),
focus: jest.fn(),
onDidChangeModelLanguage: emitter.event,
+ updateModelLanguage: jest.fn(),
};
beforeEach(() => {
mock = new MockAdapter(axios);
diff --git a/spec/frontend/boards/board_card_inner_spec.js b/spec/frontend/boards/board_card_inner_spec.js
index 36556ba00af..1740676161f 100644
--- a/spec/frontend/boards/board_card_inner_spec.js
+++ b/spec/frontend/boards/board_card_inner_spec.js
@@ -2,6 +2,7 @@ import { GlLabel, GlLoadingIcon, GlTooltip } from '@gitlab/ui';
import { range } from 'lodash';
import Vue, { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import createMockApollo from 'helpers/mock_apollo_helper';
import setWindowLocation from 'helpers/set_window_location_helper';
diff --git a/spec/frontend/boards/board_list_helper.js b/spec/frontend/boards/board_list_helper.js
index e3cdec1ab6e..7367b34c4df 100644
--- a/spec/frontend/boards/board_list_helper.js
+++ b/spec/frontend/boards/board_list_helper.js
@@ -1,6 +1,7 @@
import { shallowMount } from '@vue/test-utils';
import Vue from 'vue';
import VueApollo from 'vue-apollo';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import BoardCard from '~/boards/components/board_card.vue';
diff --git a/spec/frontend/boards/cache_updates_spec.js b/spec/frontend/boards/cache_updates_spec.js
new file mode 100644
index 00000000000..bc661f20451
--- /dev/null
+++ b/spec/frontend/boards/cache_updates_spec.js
@@ -0,0 +1,37 @@
+import * as Sentry from '@sentry/browser';
+import { setError } from '~/boards/graphql/cache_updates';
+import { defaultClient } from '~/graphql_shared/issuable_client';
+import setErrorMutation from '~/boards/graphql/client/set_error.mutation.graphql';
+
+describe('setError', () => {
+ let sentryCaptureExceptionSpy;
+ const errorMessage = 'Error';
+ const error = new Error(errorMessage);
+
+ beforeEach(() => {
+ jest.spyOn(defaultClient, 'mutate').mockResolvedValue();
+ sentryCaptureExceptionSpy = jest.spyOn(Sentry, 'captureException');
+ });
+
+ it('calls setErrorMutation and capture Sentry error', () => {
+ setError({ message: errorMessage, error });
+
+ expect(defaultClient.mutate).toHaveBeenCalledWith({
+ mutation: setErrorMutation,
+ variables: { error: errorMessage },
+ });
+
+ expect(sentryCaptureExceptionSpy).toHaveBeenCalledWith(error);
+ });
+
+ it('does not capture Sentry error when captureError is false', () => {
+ setError({ message: errorMessage, error, captureError: false });
+
+ expect(defaultClient.mutate).toHaveBeenCalledWith({
+ mutation: setErrorMutation,
+ variables: { error: errorMessage },
+ });
+
+ expect(sentryCaptureExceptionSpy).not.toHaveBeenCalled();
+ });
+});
diff --git a/spec/frontend/boards/components/board_add_new_column_form_spec.js b/spec/frontend/boards/components/board_add_new_column_form_spec.js
index 35296f36b89..719e36629c2 100644
--- a/spec/frontend/boards/components/board_add_new_column_form_spec.js
+++ b/spec/frontend/boards/components/board_add_new_column_form_spec.js
@@ -1,4 +1,5 @@
import Vue from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import BoardAddNewColumnForm from '~/boards/components/board_add_new_column_form.vue';
diff --git a/spec/frontend/boards/components/board_add_new_column_spec.js b/spec/frontend/boards/components/board_add_new_column_spec.js
index 8d6cc9373af..1a847d35900 100644
--- a/spec/frontend/boards/components/board_add_new_column_spec.js
+++ b/spec/frontend/boards/components/board_add_new_column_spec.js
@@ -1,14 +1,17 @@
import { GlCollapsibleListbox } from '@gitlab/ui';
import Vue, { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import createMockApollo from 'helpers/mock_apollo_helper';
+import waitForPromises from 'helpers/wait_for_promises';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import BoardAddNewColumn from '~/boards/components/board_add_new_column.vue';
import BoardAddNewColumnForm from '~/boards/components/board_add_new_column_form.vue';
import defaultState from '~/boards/stores/state';
import createBoardListMutation from 'ee_else_ce/boards/graphql/board_list_create.mutation.graphql';
import boardLabelsQuery from '~/boards/graphql/board_labels.query.graphql';
+import * as cacheUpdates from '~/boards/graphql/cache_updates';
import {
mockLabelList,
createBoardListResponse,
@@ -21,13 +24,14 @@ Vue.use(VueApollo);
describe('BoardAddNewColumn', () => {
let wrapper;
+ let mockApollo;
const createBoardListQueryHandler = jest.fn().mockResolvedValue(createBoardListResponse);
const labelsQueryHandler = jest.fn().mockResolvedValue(labelsQueryResponse);
- const mockApollo = createMockApollo([
- [boardLabelsQuery, labelsQueryHandler],
- [createBoardListMutation, createBoardListQueryHandler],
- ]);
+ const errorMessage = 'Failed to create list';
+ const createBoardListQueryHandlerFailure = jest.fn().mockRejectedValue(new Error(errorMessage));
+ const errorMessageLabels = 'Failed to fetch labels';
+ const labelsQueryHandlerFailure = jest.fn().mockRejectedValue(new Error(errorMessageLabels));
const findDropdown = () => wrapper.findComponent(GlCollapsibleListbox);
const findAddNewColumnForm = () => wrapper.findComponent(BoardAddNewColumnForm);
@@ -53,7 +57,14 @@ describe('BoardAddNewColumn', () => {
actions = {},
provide = {},
lists = {},
+ labelsHandler = labelsQueryHandler,
+ createHandler = createBoardListQueryHandler,
} = {}) => {
+ mockApollo = createMockApollo([
+ [boardLabelsQuery, labelsHandler],
+ [createBoardListMutation, createHandler],
+ ]);
+
wrapper = shallowMountExtended(BoardAddNewColumn, {
apolloProvider: mockApollo,
propsData: {
@@ -111,6 +122,10 @@ describe('BoardAddNewColumn', () => {
mockApollo.clients.defaultClient.cache.writeQuery = jest.fn();
};
+ beforeEach(() => {
+ cacheUpdates.setError = jest.fn();
+ });
+
describe('Add list button', () => {
it('calls addList', async () => {
const getListByLabelId = jest.fn().mockReturnValue(null);
@@ -208,11 +223,52 @@ describe('BoardAddNewColumn', () => {
findAddNewColumnForm().vm.$emit('add-list');
- await nextTick();
+ await waitForPromises();
expect(wrapper.emitted('highlight-list')).toEqual([[mockLabelList.id]]);
expect(createBoardListQueryHandler).not.toHaveBeenCalledWith();
});
});
+
+ describe('when fetch labels query fails', () => {
+ beforeEach(() => {
+ mountComponent({
+ provide: { isApolloBoard: true },
+ labelsHandler: labelsQueryHandlerFailure,
+ });
+ });
+
+ it('sets error', async () => {
+ findDropdown().vm.$emit('show');
+
+ await waitForPromises();
+ expect(cacheUpdates.setError).toHaveBeenCalled();
+ });
+ });
+
+ describe('when create list mutation fails', () => {
+ beforeEach(() => {
+ mountComponent({
+ selectedId: mockLabelList.label.id,
+ provide: { isApolloBoard: true },
+ createHandler: createBoardListQueryHandlerFailure,
+ });
+ });
+
+ it('sets error', async () => {
+ findDropdown().vm.$emit('show');
+
+ await nextTick();
+ expect(labelsQueryHandler).toHaveBeenCalled();
+
+ selectLabel(mockLabelList.label.id);
+
+ findAddNewColumnForm().vm.$emit('add-list');
+
+ await waitForPromises();
+
+ expect(cacheUpdates.setError).toHaveBeenCalled();
+ });
+ });
});
});
diff --git a/spec/frontend/boards/components/board_add_new_column_trigger_spec.js b/spec/frontend/boards/components/board_add_new_column_trigger_spec.js
index 825cfc9453a..396ec7d67cd 100644
--- a/spec/frontend/boards/components/board_add_new_column_trigger_spec.js
+++ b/spec/frontend/boards/components/board_add_new_column_trigger_spec.js
@@ -1,5 +1,6 @@
import { GlButton } from '@gitlab/ui';
import Vue from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import { mountExtended } from 'helpers/vue_test_utils_helper';
import BoardAddNewColumnTrigger from '~/boards/components/board_add_new_column_trigger.vue';
diff --git a/spec/frontend/boards/components/board_app_spec.js b/spec/frontend/boards/components/board_app_spec.js
index e7624437ac5..b16f9b26f40 100644
--- a/spec/frontend/boards/components/board_app_spec.js
+++ b/spec/frontend/boards/components/board_app_spec.js
@@ -1,6 +1,7 @@
import { shallowMount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import createMockApollo from 'helpers/mock_apollo_helper';
@@ -9,13 +10,17 @@ import BoardApp from '~/boards/components/board_app.vue';
import eventHub from '~/boards/eventhub';
import activeBoardItemQuery from 'ee_else_ce/boards/graphql/client/active_board_item.query.graphql';
import boardListsQuery from 'ee_else_ce/boards/graphql/board_lists.query.graphql';
+import * as cacheUpdates from '~/boards/graphql/cache_updates';
import { rawIssue, boardListsQueryResponse } from '../mock_data';
describe('BoardApp', () => {
let wrapper;
let store;
+ let mockApollo;
+
+ const errorMessage = 'Failed to fetch lists';
const boardListQueryHandler = jest.fn().mockResolvedValue(boardListsQueryResponse);
- const mockApollo = createMockApollo([[boardListsQuery, boardListQueryHandler]]);
+ const boardListQueryHandlerFailure = jest.fn().mockRejectedValue(new Error(errorMessage));
Vue.use(Vuex);
Vue.use(VueApollo);
@@ -33,7 +38,12 @@ describe('BoardApp', () => {
});
};
- const createComponent = ({ isApolloBoard = false, issue = rawIssue } = {}) => {
+ const createComponent = ({
+ isApolloBoard = false,
+ issue = rawIssue,
+ handler = boardListQueryHandler,
+ } = {}) => {
+ mockApollo = createMockApollo([[boardListsQuery, handler]]);
mockApollo.clients.defaultClient.cache.writeQuery({
query: activeBoardItemQuery,
data: {
@@ -57,6 +67,10 @@ describe('BoardApp', () => {
});
};
+ beforeEach(() => {
+ cacheUpdates.setError = jest.fn();
+ });
+
afterEach(() => {
store = null;
});
@@ -104,5 +118,13 @@ describe('BoardApp', () => {
expect(eventHub.$on).toHaveBeenCalledWith('updateBoard', wrapper.vm.refetchLists);
});
+
+ it('sets error on fetch lists failure', async () => {
+ createComponent({ isApolloBoard: true, handler: boardListQueryHandlerFailure });
+
+ await waitForPromises();
+
+ expect(cacheUpdates.setError).toHaveBeenCalled();
+ });
});
});
diff --git a/spec/frontend/boards/components/board_card_move_to_position_spec.js b/spec/frontend/boards/components/board_card_move_to_position_spec.js
index 5f308be5580..20beaf2e9bd 100644
--- a/spec/frontend/boards/components/board_card_move_to_position_spec.js
+++ b/spec/frontend/boards/components/board_card_move_to_position_spec.js
@@ -1,5 +1,6 @@
import { shallowMount } from '@vue/test-utils';
import Vue from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import { GlDisclosureDropdown, GlDisclosureDropdownItem } from '@gitlab/ui';
import {
diff --git a/spec/frontend/boards/components/board_card_spec.js b/spec/frontend/boards/components/board_card_spec.js
index 897219303b5..167efb94fcc 100644
--- a/spec/frontend/boards/components/board_card_spec.js
+++ b/spec/frontend/boards/components/board_card_spec.js
@@ -1,5 +1,6 @@
import { GlLabel } from '@gitlab/ui';
import Vue, { nextTick } from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import VueApollo from 'vue-apollo';
diff --git a/spec/frontend/boards/components/board_content_sidebar_spec.js b/spec/frontend/boards/components/board_content_sidebar_spec.js
index 9be2696de56..01eea12bf0a 100644
--- a/spec/frontend/boards/components/board_content_sidebar_spec.js
+++ b/spec/frontend/boards/components/board_content_sidebar_spec.js
@@ -2,6 +2,7 @@ import { GlDrawer } from '@gitlab/ui';
import { MountingPortal } from 'portal-vue';
import Vue, { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import SidebarDropdownWidget from 'ee_else_ce/sidebar/components/sidebar_dropdown_widget.vue';
import createMockApollo from 'helpers/mock_apollo_helper';
diff --git a/spec/frontend/boards/components/board_content_spec.js b/spec/frontend/boards/components/board_content_spec.js
index 0a2a78479fb..675b79a8b1a 100644
--- a/spec/frontend/boards/components/board_content_spec.js
+++ b/spec/frontend/boards/components/board_content_spec.js
@@ -1,8 +1,9 @@
import { GlAlert } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import VueApollo from 'vue-apollo';
-import Vue from 'vue';
+import Vue, { nextTick } from 'vue';
import Draggable from 'vuedraggable';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import createMockApollo from 'helpers/mock_apollo_helper';
@@ -10,6 +11,7 @@ import { stubComponent } from 'helpers/stub_component';
import waitForPromises from 'helpers/wait_for_promises';
import EpicsSwimlanes from 'ee_component/boards/components/epics_swimlanes.vue';
import getters from 'ee_else_ce/boards/stores/getters';
+import * as cacheUpdates from '~/boards/graphql/cache_updates';
import BoardColumn from '~/boards/components/board_column.vue';
import BoardContent from '~/boards/components/board_content.vue';
import BoardContentSidebar from '~/boards/components/board_content_sidebar.vue';
@@ -36,6 +38,8 @@ describe('BoardContent', () => {
let mockApollo;
const updateListHandler = jest.fn().mockResolvedValue(updateBoardListResponse);
+ const errorMessage = 'Failed to update list';
+ const updateListHandlerFailure = jest.fn().mockRejectedValue(new Error(errorMessage));
const defaultState = {
isShowingEpicsSwimlanes: false,
@@ -60,8 +64,9 @@ describe('BoardContent', () => {
issuableType = 'issue',
isIssueBoard = true,
isEpicBoard = false,
+ handler = updateListHandler,
} = {}) => {
- mockApollo = createMockApollo([[updateBoardListMutation, updateListHandler]]);
+ mockApollo = createMockApollo([[updateBoardListMutation, handler]]);
const listQueryVariables = { isProject: true };
mockApollo.clients.defaultClient.writeQuery({
@@ -107,6 +112,11 @@ describe('BoardContent', () => {
const findBoardColumns = () => wrapper.findAllComponents(BoardColumn);
const findBoardAddNewColumn = () => wrapper.findComponent(BoardAddNewColumn);
const findDraggable = () => wrapper.findComponent(Draggable);
+ const findError = () => wrapper.findComponent(GlAlert);
+
+ beforeEach(() => {
+ cacheUpdates.setError = jest.fn();
+ });
describe('default', () => {
beforeEach(() => {
@@ -123,7 +133,7 @@ describe('BoardContent', () => {
it('does not display EpicsSwimlanes component', () => {
expect(wrapper.findComponent(EpicsSwimlanes).exists()).toBe(false);
- expect(wrapper.findComponent(GlAlert).exists()).toBe(false);
+ expect(findError().exists()).toBe(false);
});
it('sets delay and delayOnTouchOnly attributes on board list', () => {
@@ -169,6 +179,18 @@ describe('BoardContent', () => {
});
describe('when Apollo boards FF is on', () => {
+ const moveList = () => {
+ const movableListsOrder = [mockLists[0].id, mockLists[1].id];
+
+ findDraggable().vm.$emit('end', {
+ item: { dataset: { listId: mockLists[0].id, draggableItemType: DraggableItemTypes.list } },
+ newIndex: 1,
+ to: {
+ children: movableListsOrder.map((listId) => ({ dataset: { listId } })),
+ },
+ });
+ };
+
beforeEach(async () => {
createComponent({ isApolloBoard: true });
await waitForPromises();
@@ -183,19 +205,38 @@ describe('BoardContent', () => {
});
it('reorders lists', async () => {
- const movableListsOrder = [mockLists[0].id, mockLists[1].id];
-
- findDraggable().vm.$emit('end', {
- item: { dataset: { listId: mockLists[0].id, draggableItemType: DraggableItemTypes.list } },
- newIndex: 1,
- to: {
- children: movableListsOrder.map((listId) => ({ dataset: { listId } })),
- },
- });
+ moveList();
await waitForPromises();
expect(updateListHandler).toHaveBeenCalled();
});
+
+ it('sets error on reorder lists failure', async () => {
+ createComponent({ isApolloBoard: true, handler: updateListHandlerFailure });
+
+ moveList();
+ await waitForPromises();
+
+ expect(cacheUpdates.setError).toHaveBeenCalled();
+ });
+
+ describe('when error is passed', () => {
+ beforeEach(async () => {
+ createComponent({ isApolloBoard: true, props: { apolloError: 'Error' } });
+ await waitForPromises();
+ });
+
+ it('displays error banner', () => {
+ expect(findError().exists()).toBe(true);
+ });
+
+ it('dismisses error', async () => {
+ findError().vm.$emit('dismiss');
+ await nextTick();
+
+ expect(cacheUpdates.setError).toHaveBeenCalledWith({ message: null, captureError: false });
+ });
+ });
});
describe('when "add column" form is visible', () => {
diff --git a/spec/frontend/boards/components/board_filtered_search_spec.js b/spec/frontend/boards/components/board_filtered_search_spec.js
index 5a976816f74..0bd936c9abd 100644
--- a/spec/frontend/boards/components/board_filtered_search_spec.js
+++ b/spec/frontend/boards/components/board_filtered_search_spec.js
@@ -1,5 +1,6 @@
import { shallowMount } from '@vue/test-utils';
import Vue from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import BoardFilteredSearch from '~/boards/components/board_filtered_search.vue';
import { updateHistory } from '~/lib/utils/url_utility';
diff --git a/spec/frontend/boards/components/board_form_spec.js b/spec/frontend/boards/components/board_form_spec.js
index 5604c589e37..15ee3976fb1 100644
--- a/spec/frontend/boards/components/board_form_spec.js
+++ b/spec/frontend/boards/components/board_form_spec.js
@@ -1,5 +1,6 @@
import { GlModal } from '@gitlab/ui';
import Vue from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import VueApollo from 'vue-apollo';
import setWindowLocation from 'helpers/set_window_location_helper';
diff --git a/spec/frontend/boards/components/board_list_header_spec.js b/spec/frontend/boards/components/board_list_header_spec.js
index 0c9e1b4646e..76e969f1725 100644
--- a/spec/frontend/boards/components/board_list_header_spec.js
+++ b/spec/frontend/boards/components/board_list_header_spec.js
@@ -1,9 +1,11 @@
import { GlButtonGroup } from '@gitlab/ui';
import Vue, { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import createMockApollo from 'helpers/mock_apollo_helper';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import waitForPromises from 'helpers/wait_for_promises';
import {
boardListQueryResponse,
mockLabelList,
@@ -12,6 +14,7 @@ import {
import BoardListHeader from '~/boards/components/board_list_header.vue';
import updateBoardListMutation from '~/boards/graphql/board_list_update.mutation.graphql';
import { ListType } from '~/boards/constants';
+import * as cacheUpdates from '~/boards/graphql/cache_updates';
import listQuery from 'ee_else_ce/boards/graphql/board_lists_deferred.query.graphql';
Vue.use(VueApollo);
@@ -25,7 +28,11 @@ describe('Board List Header Component', () => {
const updateListSpy = jest.fn();
const toggleListCollapsedSpy = jest.fn();
const mockClientToggleListCollapsedResolver = jest.fn();
- const updateListHandler = jest.fn().mockResolvedValue(updateBoardListResponse);
+ const updateListHandlerSuccess = jest.fn().mockResolvedValue(updateBoardListResponse);
+
+ beforeEach(() => {
+ cacheUpdates.setError = jest.fn();
+ });
afterEach(() => {
fakeApollo = null;
@@ -39,6 +46,7 @@ describe('Board List Header Component', () => {
withLocalStorage = true,
currentUserId = 1,
listQueryHandler = jest.fn().mockResolvedValue(boardListQueryResponse()),
+ updateListHandler = updateListHandlerSuccess,
injectedProps = {},
} = {}) => {
const boardId = 'gid://gitlab/Board/1';
@@ -271,7 +279,7 @@ describe('Board List Header Component', () => {
findCaret().vm.$emit('click');
await nextTick();
- expect(updateListHandler).not.toHaveBeenCalled();
+ expect(updateListHandlerSuccess).not.toHaveBeenCalled();
});
it('calls update list mutation when user is logged in', async () => {
@@ -280,7 +288,50 @@ describe('Board List Header Component', () => {
findCaret().vm.$emit('click');
await nextTick();
- expect(updateListHandler).toHaveBeenCalledWith({ listId: mockLabelList.id, collapsed: true });
+ expect(updateListHandlerSuccess).toHaveBeenCalledWith({
+ listId: mockLabelList.id,
+ collapsed: true,
+ });
+ });
+
+ describe('when fetch list query fails', () => {
+ const errorMessage = 'Failed to fetch list';
+ const listQueryHandlerFailure = jest.fn().mockRejectedValue(new Error(errorMessage));
+
+ beforeEach(() => {
+ createComponent({
+ listQueryHandler: listQueryHandlerFailure,
+ injectedProps: { isApolloBoard: true },
+ });
+ });
+
+ it('sets error', async () => {
+ await waitForPromises();
+
+ expect(cacheUpdates.setError).toHaveBeenCalled();
+ });
+ });
+
+ describe('when update list mutation fails', () => {
+ const errorMessage = 'Failed to update list';
+ const updateListHandlerFailure = jest.fn().mockRejectedValue(new Error(errorMessage));
+
+ beforeEach(() => {
+ createComponent({
+ currentUserId: 1,
+ updateListHandler: updateListHandlerFailure,
+ injectedProps: { isApolloBoard: true },
+ });
+ });
+
+ it('sets error', async () => {
+ await waitForPromises();
+
+ findCaret().vm.$emit('click');
+ await waitForPromises();
+
+ expect(cacheUpdates.setError).toHaveBeenCalled();
+ });
});
});
});
diff --git a/spec/frontend/boards/components/board_new_issue_spec.js b/spec/frontend/boards/components/board_new_issue_spec.js
index a1088f1e8f7..bf2608d0594 100644
--- a/spec/frontend/boards/components/board_new_issue_spec.js
+++ b/spec/frontend/boards/components/board_new_issue_spec.js
@@ -1,5 +1,6 @@
import { shallowMount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
diff --git a/spec/frontend/boards/components/board_settings_sidebar_spec.js b/spec/frontend/boards/components/board_settings_sidebar_spec.js
index affe1260c66..f6ed483dfc5 100644
--- a/spec/frontend/boards/components/board_settings_sidebar_spec.js
+++ b/spec/frontend/boards/components/board_settings_sidebar_spec.js
@@ -3,14 +3,17 @@ import { shallowMount } from '@vue/test-utils';
import { MountingPortal } from 'portal-vue';
import Vue, { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import createMockApollo from 'helpers/mock_apollo_helper';
import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
+import waitForPromises from 'helpers/wait_for_promises';
import { stubComponent } from 'helpers/stub_component';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import BoardSettingsSidebar from '~/boards/components/board_settings_sidebar.vue';
import { inactiveId, LIST } from '~/boards/constants';
import destroyBoardListMutation from '~/boards/graphql/board_list_destroy.mutation.graphql';
+import * as cacheUpdates from '~/boards/graphql/cache_updates';
import actions from '~/boards/stores/actions';
import getters from '~/boards/stores/getters';
import mutations from '~/boards/stores/mutations';
@@ -31,12 +34,17 @@ describe('BoardSettingsSidebar', () => {
const destroyBoardListMutationHandlerSuccess = jest
.fn()
.mockResolvedValue(destroyBoardListMutationResponse);
+ const errorMessage = 'Failed to delete list';
+ const destroyBoardListMutationHandlerFailure = jest
+ .fn()
+ .mockRejectedValue(new Error(errorMessage));
const createComponent = ({
canAdminList = false,
list = {},
sidebarType = LIST,
activeId = inactiveId,
+ destroyBoardListMutationHandler = destroyBoardListMutationHandlerSuccess,
isApolloBoard = false,
} = {}) => {
const boardLists = {
@@ -49,9 +57,7 @@ describe('BoardSettingsSidebar', () => {
actions,
});
- mockApollo = createMockApollo([
- [destroyBoardListMutation, destroyBoardListMutationHandlerSuccess],
- ]);
+ mockApollo = createMockApollo([[destroyBoardListMutation, destroyBoardListMutationHandler]]);
wrapper = extendedWrapper(
shallowMount(BoardSettingsSidebar, {
@@ -90,6 +96,10 @@ describe('BoardSettingsSidebar', () => {
const findModal = () => wrapper.findComponent(GlModal);
const findRemoveButton = () => wrapper.findComponent(GlButton);
+ beforeEach(() => {
+ cacheUpdates.setError = jest.fn();
+ });
+
it('finds a MountingPortal component', () => {
createComponent();
@@ -214,5 +224,23 @@ describe('BoardSettingsSidebar', () => {
createComponent({ canAdminList: true, activeId: listId, list: mockLabelList });
expect(findModal().props('modalId')).toBe(modalID);
});
+
+ it('sets error when destroy list mutation fails', async () => {
+ createComponent({
+ canAdminList: true,
+ activeId: listId,
+ list: mockLabelList,
+ destroyBoardListMutationHandler: destroyBoardListMutationHandlerFailure,
+ isApolloBoard: true,
+ });
+
+ findRemoveButton().vm.$emit('click');
+
+ wrapper.findComponent(GlModal).vm.$emit('primary');
+
+ await waitForPromises();
+
+ expect(cacheUpdates.setError).toHaveBeenCalled();
+ });
});
});
diff --git a/spec/frontend/boards/components/board_top_bar_spec.js b/spec/frontend/boards/components/board_top_bar_spec.js
index afc7da97617..87abe630688 100644
--- a/spec/frontend/boards/components/board_top_bar_spec.js
+++ b/spec/frontend/boards/components/board_top_bar_spec.js
@@ -1,8 +1,10 @@
import { shallowMount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import createMockApollo from 'helpers/mock_apollo_helper';
+import waitForPromises from 'helpers/wait_for_promises';
import BoardTopBar from '~/boards/components/board_top_bar.vue';
import BoardAddNewColumnTrigger from '~/boards/components/board_add_new_column_trigger.vue';
@@ -11,6 +13,7 @@ import ConfigToggle from '~/boards/components/config_toggle.vue';
import IssueBoardFilteredSearch from '~/boards/components/issue_board_filtered_search.vue';
import NewBoardButton from '~/boards/components/new_board_button.vue';
import ToggleFocus from '~/boards/components/toggle_focus.vue';
+import * as cacheUpdates from '~/boards/graphql/cache_updates';
import { WORKSPACE_GROUP, WORKSPACE_PROJECT } from '~/issues/constants';
import groupBoardQuery from '~/boards/graphql/group_board.query.graphql';
@@ -32,12 +35,18 @@ describe('BoardTopBar', () => {
const projectBoardQueryHandlerSuccess = jest.fn().mockResolvedValue(mockProjectBoardResponse);
const groupBoardQueryHandlerSuccess = jest.fn().mockResolvedValue(mockGroupBoardResponse);
-
- const createComponent = ({ provide = {} } = {}) => {
+ const errorMessage = 'Failed to fetch board';
+ const boardQueryHandlerFailure = jest.fn().mockRejectedValue(new Error(errorMessage));
+
+ const createComponent = ({
+ provide = {},
+ projectBoardQueryHandler = projectBoardQueryHandlerSuccess,
+ groupBoardQueryHandler = groupBoardQueryHandlerSuccess,
+ } = {}) => {
const store = createStore();
mockApollo = createMockApollo([
- [projectBoardQuery, projectBoardQueryHandlerSuccess],
- [groupBoardQuery, groupBoardQueryHandlerSuccess],
+ [projectBoardQuery, projectBoardQueryHandler],
+ [groupBoardQuery, groupBoardQueryHandler],
]);
wrapper = shallowMount(BoardTopBar, {
@@ -65,6 +74,10 @@ describe('BoardTopBar', () => {
});
};
+ beforeEach(() => {
+ cacheUpdates.setError = jest.fn();
+ });
+
afterEach(() => {
mockApollo = null;
});
@@ -134,5 +147,25 @@ describe('BoardTopBar', () => {
expect(queryHandler).toHaveBeenCalled();
expect(notCalledHandler).not.toHaveBeenCalled();
});
+
+ it.each`
+ boardType
+ ${WORKSPACE_GROUP}
+ ${WORKSPACE_PROJECT}
+ `('sets error when $boardType board query fails', async ({ boardType }) => {
+ createComponent({
+ provide: {
+ boardType,
+ isProjectBoard: boardType === WORKSPACE_PROJECT,
+ isGroupBoard: boardType === WORKSPACE_GROUP,
+ isApolloBoard: true,
+ },
+ groupBoardQueryHandler: boardQueryHandlerFailure,
+ projectBoardQueryHandler: boardQueryHandlerFailure,
+ });
+
+ await waitForPromises();
+ expect(cacheUpdates.setError).toHaveBeenCalled();
+ });
});
});
diff --git a/spec/frontend/boards/components/boards_selector_spec.js b/spec/frontend/boards/components/boards_selector_spec.js
index 13c017706ef..b17a5589c07 100644
--- a/spec/frontend/boards/components/boards_selector_spec.js
+++ b/spec/frontend/boards/components/boards_selector_spec.js
@@ -1,6 +1,7 @@
import { GlDropdown, GlLoadingIcon, GlDropdownSectionHeader } from '@gitlab/ui';
import Vue, { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import waitForPromises from 'helpers/wait_for_promises';
import { TEST_HOST } from 'spec/test_constants';
@@ -9,6 +10,7 @@ import groupBoardsQuery from '~/boards/graphql/group_boards.query.graphql';
import projectBoardsQuery from '~/boards/graphql/project_boards.query.graphql';
import groupRecentBoardsQuery from '~/boards/graphql/group_recent_boards.query.graphql';
import projectRecentBoardsQuery from '~/boards/graphql/project_recent_boards.query.graphql';
+import * as cacheUpdates from '~/boards/graphql/cache_updates';
import { WORKSPACE_GROUP, WORKSPACE_PROJECT } from '~/issues/constants';
import createMockApollo from 'helpers/mock_apollo_helper';
import { mountExtended } from 'helpers/vue_test_utils_helper';
@@ -37,7 +39,6 @@ describe('BoardsSelector', () => {
const createStore = () => {
store = new Vuex.Store({
actions: {
- setError: jest.fn(),
setBoardConfig: jest.fn(),
},
state: {
@@ -77,16 +78,19 @@ describe('BoardsSelector', () => {
.fn()
.mockResolvedValue(mockEmptyProjectRecentBoardsResponse);
+ const boardsHandlerFailure = jest.fn().mockRejectedValue(new Error('error'));
+
const createComponent = ({
projectBoardsQueryHandler = projectBoardsQueryHandlerSuccess,
projectRecentBoardsQueryHandler = projectRecentBoardsQueryHandlerSuccess,
+ groupBoardsQueryHandler = groupBoardsQueryHandlerSuccess,
isGroupBoard = false,
isProjectBoard = false,
provide = {},
} = {}) => {
fakeApollo = createMockApollo([
[projectBoardsQuery, projectBoardsQueryHandler],
- [groupBoardsQuery, groupBoardsQueryHandlerSuccess],
+ [groupBoardsQuery, groupBoardsQueryHandler],
[projectRecentBoardsQuery, projectRecentBoardsQueryHandler],
[groupRecentBoardsQuery, groupRecentBoardsQueryHandlerSuccess],
]);
@@ -115,6 +119,10 @@ describe('BoardsSelector', () => {
});
};
+ beforeEach(() => {
+ cacheUpdates.setError = jest.fn();
+ });
+
afterEach(() => {
fakeApollo = null;
});
@@ -173,8 +181,7 @@ describe('BoardsSelector', () => {
it('shows only matching boards when filtering', async () => {
const filterTerm = 'board1';
- const expectedCount = boards.filter((board) => board.node.name.includes(filterTerm))
- .length;
+ const expectedCount = boards.filter((board) => board.name.includes(filterTerm)).length;
fillSearchBox(filterTerm);
@@ -246,6 +253,29 @@ describe('BoardsSelector', () => {
expect(queryHandler).toHaveBeenCalled();
expect(notCalledHandler).not.toHaveBeenCalled();
});
+
+ it.each`
+ boardType
+ ${WORKSPACE_GROUP}
+ ${WORKSPACE_PROJECT}
+ `('sets error when fetching $boardType boards fails', async ({ boardType }) => {
+ createStore();
+ createComponent({
+ isGroupBoard: boardType === WORKSPACE_GROUP,
+ isProjectBoard: boardType === WORKSPACE_PROJECT,
+ projectBoardsQueryHandler: boardsHandlerFailure,
+ groupBoardsQueryHandler: boardsHandlerFailure,
+ });
+
+ await nextTick();
+
+ // Emits gl-dropdown show event to simulate the dropdown is opened at initialization time
+ findDropdown().vm.$emit('show');
+
+ await waitForPromises();
+
+ expect(cacheUpdates.setError).toHaveBeenCalled();
+ });
});
describe('dropdown visibility', () => {
diff --git a/spec/frontend/boards/components/config_toggle_spec.js b/spec/frontend/boards/components/config_toggle_spec.js
index 5330721451e..3d505038331 100644
--- a/spec/frontend/boards/components/config_toggle_spec.js
+++ b/spec/frontend/boards/components/config_toggle_spec.js
@@ -1,7 +1,9 @@
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import Vue from 'vue';
import { shallowMount } from '@vue/test-utils';
import { GlButton } from '@gitlab/ui';
+import { __ } from '~/locale';
import ConfigToggle from '~/boards/components/config_toggle.vue';
import eventHub from '~/boards/eventhub';
import store from '~/boards/stores';
@@ -12,13 +14,14 @@ describe('ConfigToggle', () => {
Vue.use(Vuex);
- const createComponent = (provide = {}) =>
+ const createComponent = (provide = {}, props = {}) =>
shallowMount(ConfigToggle, {
store,
provide: {
canAdminList: true,
...provide,
},
+ propsData: props,
});
const findButton = () => wrapper.findComponent(GlButton);
@@ -52,4 +55,20 @@ describe('ConfigToggle', () => {
label: 'edit_board',
});
});
+
+ it.each`
+ boardHasScope
+ ${true}
+ ${false}
+ `('renders dot highlight and tooltip depending on boardHasScope prop', ({ boardHasScope }) => {
+ wrapper = createComponent({}, { boardHasScope });
+
+ expect(findButton().classes('dot-highlight')).toBe(boardHasScope);
+
+ if (boardHasScope) {
+ expect(findButton().attributes('title')).toBe(__("This board's scope is reduced"));
+ } else {
+ expect(findButton().attributes('title')).toBe('');
+ }
+ });
});
diff --git a/spec/frontend/boards/components/sidebar/board_sidebar_time_tracker_spec.js b/spec/frontend/boards/components/sidebar/board_sidebar_time_tracker_spec.js
deleted file mode 100644
index b01ee01120e..00000000000
--- a/spec/frontend/boards/components/sidebar/board_sidebar_time_tracker_spec.js
+++ /dev/null
@@ -1,67 +0,0 @@
-/*
- To avoid duplicating tests in time_tracker.spec,
- this spec only contains a simple test to check rendering.
-
- A detailed feature spec is used to test time tracking feature
- in swimlanes sidebar.
-*/
-
-import { shallowMount } from '@vue/test-utils';
-import BoardSidebarTimeTracker from '~/boards/components/sidebar/board_sidebar_time_tracker.vue';
-import { createStore } from '~/boards/stores';
-import IssuableTimeTracker from '~/sidebar/components/time_tracking/time_tracker.vue';
-
-describe('BoardSidebarTimeTracker', () => {
- let wrapper;
- let store;
-
- const createComponent = (options) => {
- wrapper = shallowMount(BoardSidebarTimeTracker, {
- store,
- ...options,
- });
- };
-
- beforeEach(() => {
- store = createStore();
- store.state.boardItems = {
- 1: {
- id: 1,
- iid: 1,
- timeEstimate: 3600,
- totalTimeSpent: 1800,
- humanTimeEstimate: '1h',
- humanTotalTimeSpent: '30min',
- },
- };
- store.state.activeId = '1';
- });
-
- it.each`
- timeTrackingLimitToHours | canUpdate
- ${true} | ${false}
- ${true} | ${true}
- ${false} | ${false}
- ${false} | ${true}
- `(
- 'renders IssuableTimeTracker with correct spent and estimated time (timeTrackingLimitToHours=$timeTrackingLimitToHours, canUpdate=$canUpdate)',
- ({ timeTrackingLimitToHours, canUpdate }) => {
- createComponent({ provide: { timeTrackingLimitToHours, canUpdate } });
-
- expect(wrapper.findComponent(IssuableTimeTracker).props()).toEqual({
- limitToHours: timeTrackingLimitToHours,
- canAddTimeEntries: canUpdate,
- showCollapsed: false,
- issuableId: '1',
- issuableIid: '1',
- fullPath: '',
- initialTimeTracking: {
- timeEstimate: 3600,
- totalTimeSpent: 1800,
- humanTimeEstimate: '1h',
- humanTotalTimeSpent: '30min',
- },
- });
- },
- );
-});
diff --git a/spec/frontend/boards/mock_data.js b/spec/frontend/boards/mock_data.js
index 8235c3e4194..8f57a6eb7da 100644
--- a/spec/frontend/boards/mock_data.js
+++ b/spec/frontend/boards/mock_data.js
@@ -110,12 +110,10 @@ function boardGenerator(n) {
const name = `board${id}`;
return {
- node: {
- id,
- name,
- weight: 0,
- __typename: 'Board',
- },
+ id,
+ name,
+ weight: 0,
+ __typename: 'Board',
};
});
}
@@ -127,7 +125,7 @@ export const mockSmallProjectAllBoardsResponse = {
data: {
project: {
id: 'gid://gitlab/Project/114',
- boards: { edges: boardGenerator(3) },
+ boards: { nodes: boardGenerator(3) },
__typename: 'Project',
},
},
@@ -137,7 +135,7 @@ export const mockEmptyProjectRecentBoardsResponse = {
data: {
project: {
id: 'gid://gitlab/Project/114',
- recentIssueBoards: { edges: [] },
+ recentIssueBoards: { nodes: [] },
__typename: 'Project',
},
},
@@ -147,7 +145,7 @@ export const mockGroupAllBoardsResponse = {
data: {
group: {
id: 'gid://gitlab/Group/114',
- boards: { edges: boards },
+ boards: { nodes: boards },
__typename: 'Group',
},
},
@@ -157,7 +155,7 @@ export const mockProjectAllBoardsResponse = {
data: {
project: {
id: 'gid://gitlab/Project/1',
- boards: { edges: boards },
+ boards: { nodes: boards },
__typename: 'Project',
},
},
@@ -167,7 +165,7 @@ export const mockGroupRecentBoardsResponse = {
data: {
group: {
id: 'gid://gitlab/Group/114',
- recentIssueBoards: { edges: recentIssueBoards },
+ recentIssueBoards: { nodes: recentIssueBoards },
__typename: 'Group',
},
},
@@ -177,7 +175,7 @@ export const mockProjectRecentBoardsResponse = {
data: {
project: {
id: 'gid://gitlab/Project/1',
- recentIssueBoards: { edges: recentIssueBoards },
+ recentIssueBoards: { nodes: recentIssueBoards },
__typename: 'Project',
},
},
diff --git a/spec/frontend/boards/stores/actions_spec.js b/spec/frontend/boards/stores/actions_spec.js
index a2961fb1302..5b4b79c650a 100644
--- a/spec/frontend/boards/stores/actions_spec.js
+++ b/spec/frontend/boards/stores/actions_spec.js
@@ -1,6 +1,7 @@
import * as Sentry from '@sentry/browser';
import { cloneDeep } from 'lodash';
import Vue from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import { inactiveId, ISSUABLE, ListType, DraggableItemTypes } from 'ee_else_ce/boards/constants';
import issueMoveListMutation from 'ee_else_ce/boards/graphql/issue_move_list.mutation.graphql';
diff --git a/spec/frontend/ci/ci_variable_list/components/ci_group_variables_spec.js b/spec/frontend/ci/ci_variable_list/components/ci_group_variables_spec.js
index b364f098a3a..567a49d663c 100644
--- a/spec/frontend/ci/ci_variable_list/components/ci_group_variables_spec.js
+++ b/spec/frontend/ci/ci_variable_list/components/ci_group_variables_spec.js
@@ -30,7 +30,6 @@ describe('Ci Group Variable wrapper', () => {
provide: {
...mockProvide,
glFeatures: {
- ciGroupEnvScopeGraphql: false,
groupScopedCiVariables: false,
...featureFlags,
},
@@ -61,6 +60,10 @@ describe('Ci Group Variable wrapper', () => {
lookup: expect.any(Function),
query: getGroupVariables,
},
+ environments: {
+ lookup: expect.any(Function),
+ query: getGroupEnvironments,
+ },
},
refetchAfterMutation: false,
});
@@ -88,26 +91,4 @@ describe('Ci Group Variable wrapper', () => {
});
});
});
-
- describe('ciGroupEnvScopeGraphql feature flag', () => {
- describe('When enabled', () => {
- beforeEach(() => {
- createComponent({ featureFlags: { ciGroupEnvScopeGraphql: true } });
- });
-
- it('Passes down environments query to variable shared component', () => {
- expect(findCiShared().props('queryData').environments.query).toBe(getGroupEnvironments);
- });
- });
-
- describe('When disabled', () => {
- beforeEach(() => {
- createComponent();
- });
-
- it('Does not pass down environments query to variable shared component', () => {
- expect(findCiShared().props('queryData').environments).toBe(undefined);
- });
- });
- });
});
diff --git a/spec/frontend/ci/ci_variable_list/components/ci_variable_drawer_spec.js b/spec/frontend/ci/ci_variable_list/components/ci_variable_drawer_spec.js
new file mode 100644
index 00000000000..762c9611dac
--- /dev/null
+++ b/spec/frontend/ci/ci_variable_list/components/ci_variable_drawer_spec.js
@@ -0,0 +1,69 @@
+import { GlDrawer, GlFormSelect } from '@gitlab/ui';
+import { mountExtended, shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import CiVariableDrawer from '~/ci/ci_variable_list/components/ci_variable_drawer.vue';
+import {
+ ADD_VARIABLE_ACTION,
+ variableOptions,
+ variableTypes,
+} from '~/ci/ci_variable_list/constants';
+
+describe('CI Variable Drawer', () => {
+ let wrapper;
+
+ const defaultProps = {
+ areEnvironmentsLoading: false,
+ hasEnvScopeQuery: true,
+ mode: ADD_VARIABLE_ACTION,
+ };
+
+ const createComponent = ({ mountFn = shallowMountExtended, props = {} } = {}) => {
+ wrapper = mountFn(CiVariableDrawer, {
+ propsData: {
+ ...defaultProps,
+ ...props,
+ },
+ provide: {
+ environmentScopeLink: '/help/environments',
+ },
+ });
+ };
+
+ const findDrawer = () => wrapper.findComponent(GlDrawer);
+ const findTypeDropdown = () => wrapper.findComponent(GlFormSelect);
+
+ describe('validations', () => {
+ beforeEach(() => {
+ createComponent({ mountFn: mountExtended });
+ });
+
+ describe('type dropdown', () => {
+ it('adds each type option as a dropdown item', () => {
+ expect(findTypeDropdown().findAll('option')).toHaveLength(variableOptions.length);
+
+ variableOptions.forEach((v) => {
+ expect(findTypeDropdown().text()).toContain(v.text);
+ });
+ });
+
+ it('is set to environment variable by default', () => {
+ expect(findTypeDropdown().findAll('option').at(0).attributes('value')).toBe(
+ variableTypes.envType,
+ );
+ });
+ });
+ });
+
+ describe('drawer events', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('emits `close-form` when closing the drawer', async () => {
+ expect(wrapper.emitted('close-form')).toBeUndefined();
+
+ await findDrawer().vm.$emit('close');
+
+ expect(wrapper.emitted('close-form')).toHaveLength(1);
+ });
+ });
+});
diff --git a/spec/frontend/ci/ci_variable_list/components/ci_variable_modal_spec.js b/spec/frontend/ci/ci_variable_list/components/ci_variable_modal_spec.js
index d843646df16..7dce23f72c0 100644
--- a/spec/frontend/ci/ci_variable_list/components/ci_variable_modal_spec.js
+++ b/spec/frontend/ci/ci_variable_list/components/ci_variable_modal_spec.js
@@ -1,4 +1,4 @@
-import { GlButton, GlFormInput } from '@gitlab/ui';
+import { GlButton, GlFormInput, GlSprintf } from '@gitlab/ui';
import { mockTracking } from 'helpers/tracking_helper';
import { shallowMountExtended, mountExtended } from 'helpers/vue_test_utils_helper';
import CiEnvironmentsDropdown from '~/ci/ci_variable_list/components/ci_environments_dropdown.vue';
@@ -10,6 +10,8 @@ import {
EVENT_LABEL,
EVENT_ACTION,
ENVIRONMENT_SCOPE_LINK_TITLE,
+ AWS_TIP_TITLE,
+ AWS_TIP_MESSAGE,
groupString,
instanceString,
projectString,
@@ -28,10 +30,6 @@ describe('Ci variable modal', () => {
const mockVariables = mockVariablesWithScopes(instanceString);
const defaultProvide = {
- awsLogoSvgPath: '/logo',
- awsTipCommandsLink: '/tips',
- awsTipDeployLink: '/deploy',
- awsTipLearnLink: '/learn-link',
containsVariableReferenceLink: '/reference',
environmentScopeLink: '/help/environments',
glFeatures: {
@@ -122,9 +120,9 @@ describe('Ci variable modal', () => {
expect(wrapper.emitted('add-variable')).toEqual([[currentVariable]]);
});
- it('Dispatches the `hideModal` event when dismissing', () => {
+ it('Dispatches the `close-form` event when dismissing', () => {
findModal().vm.$emit('hidden');
- expect(wrapper.emitted('hideModal')).toEqual([[]]);
+ expect(wrapper.emitted('close-form')).toEqual([[]]);
});
});
});
@@ -171,7 +169,7 @@ describe('Ci variable modal', () => {
it('does not show AWS guidance tip', () => {
const tip = findAWSTip();
- expect(tip.exists()).toBe(true);
+
expect(tip.isVisible()).toBe(false);
});
});
@@ -184,13 +182,18 @@ describe('Ci variable modal', () => {
key: AWS_ACCESS_KEY_ID,
value: 'AKIAIOSFODNN7EXAMPLEjdhy',
};
- createComponent({ mountFn: mountExtended, props: { selectedVariable: AWSKeyVariable } });
+ createComponent({
+ mountFn: shallowMountExtended,
+ props: { selectedVariable: AWSKeyVariable },
+ });
});
it('shows AWS guidance tip', () => {
const tip = findAWSTip();
- expect(tip.exists()).toBe(true);
+
expect(tip.isVisible()).toBe(true);
+ expect(tip.props('title')).toBe(AWS_TIP_TITLE);
+ expect(tip.findComponent(GlSprintf).attributes('message')).toBe(AWS_TIP_MESSAGE);
});
});
@@ -313,9 +316,9 @@ describe('Ci variable modal', () => {
expect(wrapper.emitted('update-variable')).toEqual([[variable]]);
});
- it('Propagates the `hideModal` event', () => {
+ it('Propagates the `close-form` event', () => {
findModal().vm.$emit('hidden');
- expect(wrapper.emitted('hideModal')).toEqual([[]]);
+ expect(wrapper.emitted('close-form')).toEqual([[]]);
});
it('dispatches `delete-variable` with correct variable to delete', () => {
diff --git a/spec/frontend/ci/ci_variable_list/components/ci_variable_settings_spec.js b/spec/frontend/ci/ci_variable_list/components/ci_variable_settings_spec.js
index d72cfc5fc14..f5737c61eea 100644
--- a/spec/frontend/ci/ci_variable_list/components/ci_variable_settings_spec.js
+++ b/spec/frontend/ci/ci_variable_list/components/ci_variable_settings_spec.js
@@ -1,7 +1,9 @@
import { shallowMount } from '@vue/test-utils';
import CiVariableSettings from '~/ci/ci_variable_list/components/ci_variable_settings.vue';
-import ciVariableModal from '~/ci/ci_variable_list/components/ci_variable_modal.vue';
-import ciVariableTable from '~/ci/ci_variable_list/components/ci_variable_table.vue';
+import CiVariableModal from '~/ci/ci_variable_list/components/ci_variable_modal.vue';
+import CiVariableTable from '~/ci/ci_variable_list/components/ci_variable_table.vue';
+import CiVariableDrawer from '~/ci/ci_variable_list/components/ci_variable_drawer.vue';
+
import {
ADD_VARIABLE_ACTION,
EDIT_VARIABLE_ACTION,
@@ -27,15 +29,22 @@ describe('Ci variable table', () => {
variables: mockVariablesWithScopes(projectString),
};
- const findCiVariableTable = () => wrapper.findComponent(ciVariableTable);
- const findCiVariableModal = () => wrapper.findComponent(ciVariableModal);
+ const findCiVariableDrawer = () => wrapper.findComponent(CiVariableDrawer);
+ const findCiVariableTable = () => wrapper.findComponent(CiVariableTable);
+ const findCiVariableModal = () => wrapper.findComponent(CiVariableModal);
- const createComponent = ({ props = {} } = {}) => {
+ const createComponent = ({ props = {}, featureFlags = {} } = {}) => {
wrapper = shallowMount(CiVariableSettings, {
propsData: {
...defaultProps,
...props,
},
+ provide: {
+ glFeatures: {
+ ciVariableDrawer: false,
+ ...featureFlags,
+ },
+ },
});
};
@@ -70,51 +79,51 @@ describe('Ci variable table', () => {
});
});
- describe('modal mode', () => {
+ describe.each`
+ bool | flagStatus | elementName | findElement
+ ${false} | ${'disabled'} | ${'modal'} | ${findCiVariableModal}
+ ${true} | ${'enabled'} | ${'drawer'} | ${findCiVariableDrawer}
+ `('when ciVariableDrawer feature flag is $flagStatus', ({ bool, elementName, findElement }) => {
beforeEach(() => {
- createComponent();
+ createComponent({ featureFlags: { ciVariableDrawer: bool } });
});
- it('passes down ADD mode when receiving an empty variable', async () => {
- await findCiVariableTable().vm.$emit('set-selected-variable');
-
- expect(findCiVariableModal().props('mode')).toBe(ADD_VARIABLE_ACTION);
+ it(`${elementName} is hidden by default`, () => {
+ expect(findElement().exists()).toBe(false);
});
- it('passes down EDIT mode when receiving a variable', async () => {
- await findCiVariableTable().vm.$emit('set-selected-variable', newVariable);
+ it(`shows ${elementName} when adding a new variable`, async () => {
+ await findCiVariableTable().vm.$emit('set-selected-variable');
- expect(findCiVariableModal().props('mode')).toBe(EDIT_VARIABLE_ACTION);
+ expect(findElement().exists()).toBe(true);
});
- });
- describe('variable modal', () => {
- beforeEach(() => {
- createComponent();
- });
+ it(`shows ${elementName} when updating a variable`, async () => {
+ await findCiVariableTable().vm.$emit('set-selected-variable', newVariable);
- it('is hidden by default', () => {
- expect(findCiVariableModal().exists()).toBe(false);
+ expect(findElement().exists()).toBe(true);
});
- it('shows modal when adding a new variable', async () => {
+ it(`hides ${elementName} when closing the form`, async () => {
await findCiVariableTable().vm.$emit('set-selected-variable');
- expect(findCiVariableModal().exists()).toBe(true);
- });
+ expect(findElement().isVisible()).toBe(true);
- it('shows modal when updating a variable', async () => {
- await findCiVariableTable().vm.$emit('set-selected-variable', newVariable);
+ await findElement().vm.$emit('close-form');
- expect(findCiVariableModal().exists()).toBe(true);
+ expect(findElement().exists()).toBe(false);
});
- it('hides modal when receiving the event from the modal', async () => {
+ it(`passes down ADD mode to ${elementName} when receiving an empty variable`, async () => {
await findCiVariableTable().vm.$emit('set-selected-variable');
- await findCiVariableModal().vm.$emit('hideModal');
+ expect(findElement().props('mode')).toBe(ADD_VARIABLE_ACTION);
+ });
+
+ it(`passes down EDIT mode to ${elementName} when receiving a variable`, async () => {
+ await findCiVariableTable().vm.$emit('set-selected-variable', newVariable);
- expect(findCiVariableModal().exists()).toBe(false);
+ expect(findElement().props('mode')).toBe(EDIT_VARIABLE_ACTION);
});
});
diff --git a/spec/frontend/ci/ci_variable_list/components/ci_variable_table_spec.js b/spec/frontend/ci/ci_variable_list/components/ci_variable_table_spec.js
index f3f1c5bd2c5..39c03a41660 100644
--- a/spec/frontend/ci/ci_variable_list/components/ci_variable_table_spec.js
+++ b/spec/frontend/ci/ci_variable_list/components/ci_variable_table_spec.js
@@ -1,4 +1,4 @@
-import { GlAlert, GlBadge, GlKeysetPagination } from '@gitlab/ui';
+import { GlAlert, GlBadge, GlKeysetPagination, GlCard, GlIcon } from '@gitlab/ui';
import { sprintf } from '~/locale';
import { mountExtended } from 'helpers/vue_test_utils_helper';
import CiVariableTable from '~/ci/ci_variable_list/components/ci_variable_table.vue';
@@ -36,7 +36,7 @@ describe('Ci variable table', () => {
};
const findRevealButton = () => wrapper.findByText('Reveal values');
- const findAddButton = () => wrapper.findByLabelText('Add');
+ const findAddButton = () => wrapper.findByTestId('add-ci-variable-button');
const findEditButton = () => wrapper.findByLabelText('Edit');
const findEmptyVariablesPlaceholder = () => wrapper.findByText('There are no variables yet.');
const findHiddenValues = () => wrapper.findAllByTestId('hiddenValue');
@@ -50,11 +50,30 @@ describe('Ci variable table', () => {
const findGroupCiCdSettingsLink = (rowIndex) =>
wrapper.findAllByTestId('ci-variable-table-row-cicd-path').at(rowIndex).attributes('href');
const findKeysetPagination = () => wrapper.findComponent(GlKeysetPagination);
+ const findCard = () => wrapper.findComponent(GlCard);
const generateExceedsVariableLimitText = (entity, currentVariableCount, maxVariableLimit) => {
return sprintf(EXCEEDS_VARIABLE_LIMIT_TEXT, { entity, currentVariableCount, maxVariableLimit });
};
+ describe('card', () => {
+ it('displays the correct title', () => {
+ createComponent();
+ expect(findCard().text()).toContain('CI/CD Variables');
+ });
+
+ it('displays the correct icon', () => {
+ createComponent();
+ expect(findCard().findComponent(GlIcon).props('name')).toBe('code');
+ });
+
+ it('displays the number of added CI/CD Variables', () => {
+ const variables = [1, 2, 3];
+ createComponent({ props: { variables } });
+ expect(findCard().text()).toContain(String(variables.length));
+ });
+ });
+
describe.each`
isVariablePagesEnabled | text
${true} | ${'enabled'}
@@ -88,7 +107,7 @@ describe('Ci variable table', () => {
${1} | ${'Value'}
${2} | ${'Attributes'}
${3} | ${'Environments'}
- ${4} | ${''}
+ ${4} | ${'Actions'}
`('renders the $text column', ({ index, text }) => {
expect(findTableColumnText(index)).toEqual(text);
});
diff --git a/spec/frontend/ci/pipeline_editor/components/file-nav/branch_switcher_spec.js b/spec/frontend/ci/pipeline_editor/components/file-nav/branch_switcher_spec.js
index 3a99949413b..4057759b9b9 100644
--- a/spec/frontend/ci/pipeline_editor/components/file-nav/branch_switcher_spec.js
+++ b/spec/frontend/ci/pipeline_editor/components/file-nav/branch_switcher_spec.js
@@ -1,3 +1,5 @@
+import Vue from 'vue';
+import VueApollo from 'vue-apollo';
import {
GlDropdown,
GlDropdownItem,
@@ -5,8 +7,7 @@ import {
GlLoadingIcon,
GlSearchBoxByType,
} from '@gitlab/ui';
-import { createLocalVue, mount, shallowMount } from '@vue/test-utils';
-import VueApollo from 'vue-apollo';
+import { shallowMount } from '@vue/test-utils';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import BranchSwitcher from '~/ci/pipeline_editor/components/file_nav/branch_switcher.vue';
@@ -17,10 +18,10 @@ import getLastCommitBranch from '~/ci/pipeline_editor/graphql/queries/client/las
import { resolvers } from '~/ci/pipeline_editor/graphql/resolvers';
import {
+ generateMockProjectBranches,
mockBranchPaginationLimit,
mockDefaultBranch,
mockEmptySearchBranches,
- mockProjectBranches,
mockProjectFullPath,
mockSearchBranches,
mockTotalBranches,
@@ -28,55 +29,14 @@ import {
mockTotalSearchResults,
} from '../../mock_data';
-const localVue = createLocalVue();
-localVue.use(VueApollo);
-
describe('Pipeline editor branch switcher', () => {
let wrapper;
let mockApollo;
let mockAvailableBranchQuery;
- const createComponent = ({
- currentBranch = mockDefaultBranch,
- availableBranches = ['main'],
- isQueryLoading = false,
- mountFn = shallowMount,
- options = {},
- props = {},
- } = {}) => {
- wrapper = mountFn(BranchSwitcher, {
- propsData: {
- ...props,
- paginationLimit: mockBranchPaginationLimit,
- },
- provide: {
- projectFullPath: mockProjectFullPath,
- totalBranches: mockTotalBranches,
- },
- mocks: {
- $apollo: {
- queries: {
- availableBranches: {
- loading: isQueryLoading,
- },
- },
- },
- },
- data() {
- return {
- availableBranches,
- currentBranch,
- };
- },
- ...options,
- });
- };
+ Vue.use(VueApollo);
- const createComponentWithApollo = ({
- mountFn = shallowMount,
- props = {},
- availableBranches = ['main'],
- } = {}) => {
+ const createComponent = ({ props = {} } = {}) => {
const handlers = [[getAvailableBranchesQuery, mockAvailableBranchQuery]];
mockApollo = createMockApollo(handlers, resolvers);
@@ -106,16 +66,19 @@ describe('Pipeline editor branch switcher', () => {
},
});
- createComponent({
- mountFn,
- props,
- availableBranches,
- options: {
- localVue,
- apolloProvider: mockApollo,
- mocks: {},
+ wrapper = shallowMount(BranchSwitcher, {
+ propsData: {
+ ...props,
+ paginationLimit: mockBranchPaginationLimit,
+ },
+ provide: {
+ projectFullPath: mockProjectFullPath,
+ totalBranches: mockTotalBranches,
},
+ apolloProvider: mockApollo,
});
+
+ return waitForPromises();
};
const findDropdown = () => wrapper.findComponent(GlDropdown);
@@ -137,7 +100,7 @@ describe('Pipeline editor branch switcher', () => {
expect(wrapper.emitted('showError')).toBeDefined();
expect(wrapper.emitted('showError')[0]).toEqual([
{
- reasons: [wrapper.vm.$options.i18n.fetchError],
+ reasons: ['Unable to fetch branch list for this project.'],
type: DEFAULT_FAILURE,
},
]);
@@ -145,19 +108,26 @@ describe('Pipeline editor branch switcher', () => {
describe('when querying for the first time', () => {
beforeEach(() => {
- createComponentWithApollo({ availableBranches: [] });
+ createComponent();
});
it('disables the dropdown', () => {
expect(findDropdown().props('disabled')).toBe(true);
});
+
+ it('shows loading icon', () => {
+ expect(findLoadingIcon().exists()).toBe(true);
+ });
});
describe('after querying', () => {
beforeEach(async () => {
- setAvailableBranchesMock(mockProjectBranches);
- createComponentWithApollo({ mountFn: mount });
- await waitForPromises();
+ setAvailableBranchesMock(generateMockProjectBranches());
+ await createComponent();
+ });
+
+ it('does not render the loading icon', () => {
+ expect(findLoadingIcon().exists()).toBe(false);
});
it('renders search box', () => {
@@ -185,8 +155,7 @@ describe('Pipeline editor branch switcher', () => {
describe('on fetch error', () => {
beforeEach(async () => {
setAvailableBranchesMock(new Error());
- createComponentWithApollo({ availableBranches: [] });
- await waitForPromises();
+ await createComponent();
});
it('does not render dropdown', () => {
@@ -201,9 +170,8 @@ describe('Pipeline editor branch switcher', () => {
describe('when switching branches', () => {
beforeEach(async () => {
jest.spyOn(window.history, 'pushState').mockImplementation(() => {});
- setAvailableBranchesMock(mockProjectBranches);
- createComponentWithApollo({ mountFn: mount });
- await waitForPromises();
+ setAvailableBranchesMock(generateMockProjectBranches());
+ await createComponent();
});
it('updates session history when selecting a different branch', async () => {
@@ -251,7 +219,7 @@ describe('Pipeline editor branch switcher', () => {
describe('with unsaved changes', () => {
beforeEach(async () => {
- createComponentWithApollo({ mountFn: mount, props: { hasUnsavedChanges: true } });
+ createComponent({ props: { hasUnsavedChanges: true } });
await waitForPromises();
});
@@ -269,9 +237,8 @@ describe('Pipeline editor branch switcher', () => {
describe('when searching', () => {
beforeEach(async () => {
- setAvailableBranchesMock(mockProjectBranches);
- createComponentWithApollo({ mountFn: mount });
- await waitForPromises();
+ setAvailableBranchesMock(generateMockProjectBranches());
+ await createComponent();
});
afterEach(() => {
@@ -329,7 +296,7 @@ describe('Pipeline editor branch switcher', () => {
findSearchBox().vm.$emit('input', 'te');
await waitForPromises();
- mockAvailableBranchQuery.mockResolvedValue(mockProjectBranches);
+ mockAvailableBranchQuery.mockResolvedValue(generateMockProjectBranches());
});
it('calls query with correct variables', async () => {
@@ -355,23 +322,10 @@ describe('Pipeline editor branch switcher', () => {
});
});
- describe('loading icon', () => {
- it.each`
- isQueryLoading | isRendered
- ${true} | ${true}
- ${false} | ${false}
- `('checks if query is loading before rendering', ({ isQueryLoading, isRendered }) => {
- createComponent({ isQueryLoading, mountFn: mount });
-
- expect(findLoadingIcon().exists()).toBe(isRendered);
- });
- });
-
describe('when scrolling to the bottom of the list', () => {
beforeEach(async () => {
- setAvailableBranchesMock(mockProjectBranches);
- createComponentWithApollo();
- await waitForPromises();
+ setAvailableBranchesMock(generateMockProjectBranches());
+ await createComponent();
});
afterEach(() => {
@@ -382,6 +336,7 @@ describe('Pipeline editor branch switcher', () => {
it('fetches more branches', async () => {
expect(mockAvailableBranchQuery).toHaveBeenCalledTimes(1);
+ setAvailableBranchesMock(generateMockProjectBranches('new-'));
findInfiniteScroll().vm.$emit('bottomReached');
await waitForPromises();
@@ -389,6 +344,7 @@ describe('Pipeline editor branch switcher', () => {
});
it('calls the query with the correct variables', async () => {
+ setAvailableBranchesMock(generateMockProjectBranches('new-'));
findInfiniteScroll().vm.$emit('bottomReached');
await waitForPromises();
diff --git a/spec/frontend/ci/pipeline_editor/components/header/pipeline_editor_mini_graph_spec.js b/spec/frontend/ci/pipeline_editor/components/header/pipeline_editor_mini_graph_spec.js
index 29759f828e4..f5e0b65d615 100644
--- a/spec/frontend/ci/pipeline_editor/components/header/pipeline_editor_mini_graph_spec.js
+++ b/spec/frontend/ci/pipeline_editor/components/header/pipeline_editor_mini_graph_spec.js
@@ -4,7 +4,7 @@ import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import PipelineEditorMiniGraph from '~/ci/pipeline_editor/components/header/pipeline_editor_mini_graph.vue';
-import PipelineMiniGraph from '~/pipelines/components/pipeline_mini_graph/pipeline_mini_graph.vue';
+import LegacyPipelineMiniGraph from '~/pipelines/components/pipeline_mini_graph/legacy_pipeline_mini_graph.vue';
import getLinkedPipelinesQuery from '~/pipelines/graphql/queries/get_linked_pipelines.query.graphql';
import { PIPELINE_FAILURE } from '~/ci/pipeline_editor/constants';
import { mockLinkedPipelines, mockProjectFullPath, mockProjectPipeline } from '../../mock_data';
@@ -41,7 +41,7 @@ describe('Pipeline Status', () => {
});
};
- const findPipelineMiniGraph = () => wrapper.findComponent(PipelineMiniGraph);
+ const findLegacyPipelineMiniGraph = () => wrapper.findComponent(LegacyPipelineMiniGraph);
beforeEach(() => {
mockLinkedPipelinesQuery = jest.fn();
@@ -53,7 +53,7 @@ describe('Pipeline Status', () => {
});
it('renders pipeline mini graph', () => {
- expect(findPipelineMiniGraph().exists()).toBe(true);
+ expect(findLegacyPipelineMiniGraph().exists()).toBe(true);
});
});
@@ -63,7 +63,7 @@ describe('Pipeline Status', () => {
});
it('does not render pipeline mini graph', () => {
- expect(findPipelineMiniGraph().exists()).toBe(false);
+ expect(findLegacyPipelineMiniGraph().exists()).toBe(false);
});
});
@@ -85,7 +85,7 @@ describe('Pipeline Status', () => {
});
it('renders only the latest downstream pipelines', () => {
- expect(findPipelineMiniGraph().props('downstreamPipelines')).toHaveLength(1);
+ expect(findLegacyPipelineMiniGraph().props('downstreamPipelines')).toHaveLength(1);
});
});
diff --git a/spec/frontend/ci/pipeline_editor/components/header/pipeline_status_spec.js b/spec/frontend/ci/pipeline_editor/components/header/pipeline_status_spec.js
index 9d93ba332e9..3bbe14adb88 100644
--- a/spec/frontend/ci/pipeline_editor/components/header/pipeline_status_spec.js
+++ b/spec/frontend/ci/pipeline_editor/components/header/pipeline_status_spec.js
@@ -6,7 +6,7 @@ import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import PipelineStatus, { i18n } from '~/ci/pipeline_editor/components/header/pipeline_status.vue';
import getPipelineQuery from '~/ci/pipeline_editor/graphql/queries/pipeline.query.graphql';
-import GraphqlPipelineMiniGraph from '~/pipelines/components/pipeline_mini_graph/graphql_pipeline_mini_graph.vue';
+import PipelineMiniGraph from '~/pipelines/components/pipeline_mini_graph/pipeline_mini_graph.vue';
import PipelineEditorMiniGraph from '~/ci/pipeline_editor/components/header/pipeline_editor_mini_graph.vue';
import { mockCommitSha, mockProjectPipeline, mockProjectFullPath } from '../../mock_data';
@@ -38,8 +38,9 @@ describe('Pipeline Status', () => {
const findIcon = () => wrapper.findComponent(GlIcon);
const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
- const findGraphqlPipelineMiniGraph = () => wrapper.findComponent(GraphqlPipelineMiniGraph);
const findPipelineEditorMiniGraph = () => wrapper.findComponent(PipelineEditorMiniGraph);
+ const findPipelineMiniGraph = () => wrapper.findComponent(PipelineMiniGraph);
+
const findPipelineId = () => wrapper.find('[data-testid="pipeline-id"]');
const findPipelineCommit = () => wrapper.find('[data-testid="pipeline-commit"]');
const findPipelineErrorMsg = () => wrapper.find('[data-testid="pipeline-error-msg"]');
@@ -142,18 +143,18 @@ describe('Pipeline Status', () => {
});
it.each`
- state | provide | showPipelineMiniGraph | showGraphqlPipelineMiniGraph
- ${true} | ${{ ciGraphqlPipelineMiniGraph: true }} | ${false} | ${true}
- ${false} | ${{}} | ${true} | ${false}
+ state | showLegacyPipelineMiniGraph | showPipelineMiniGraph
+ ${true} | ${false} | ${true}
+ ${false} | ${true} | ${false}
`(
'renders the correct component when the feature flag is set to $state',
- async ({ provide, showPipelineMiniGraph, showGraphqlPipelineMiniGraph }) => {
- createComponentWithApollo(provide);
+ async ({ state, showLegacyPipelineMiniGraph, showPipelineMiniGraph }) => {
+ createComponentWithApollo({ ciGraphqlPipelineMiniGraph: state });
await waitForPromises();
- expect(findPipelineEditorMiniGraph().exists()).toBe(showPipelineMiniGraph);
- expect(findGraphqlPipelineMiniGraph().exists()).toBe(showGraphqlPipelineMiniGraph);
+ expect(findPipelineEditorMiniGraph().exists()).toBe(showLegacyPipelineMiniGraph);
+ expect(findPipelineMiniGraph().exists()).toBe(showPipelineMiniGraph);
},
);
});
diff --git a/spec/frontend/ci/pipeline_editor/mock_data.js b/spec/frontend/ci/pipeline_editor/mock_data.js
index a3294cdc269..007abde939f 100644
--- a/spec/frontend/ci/pipeline_editor/mock_data.js
+++ b/spec/frontend/ci/pipeline_editor/mock_data.js
@@ -1,5 +1,6 @@
import { CI_CONFIG_STATUS_INVALID, CI_CONFIG_STATUS_VALID } from '~/ci/pipeline_editor/constants';
import { unwrapStagesWithNeeds } from '~/pipelines/components/unwrapping_utils';
+import { DOCS_URL_IN_EE_DIR } from 'jh_else_ce/lib/utils/url_utility';
export const commonOptions = {
ciConfigPath: '/ci/config',
@@ -295,7 +296,7 @@ export const mockEmptyCommitShaResults = {
},
};
-export const mockProjectBranches = {
+export const generateMockProjectBranches = (prefix = '') => ({
data: {
project: {
id: '1',
@@ -311,14 +312,14 @@ export const mockProjectBranches = {
'mock-feature',
'test-merge-request',
'staging',
- ],
+ ].map((branch) => `${prefix}${branch}`),
},
},
},
-};
+});
-export const mockTotalBranchResults =
- mockProjectBranches.data.project.repository.branchNames.length;
+export const mockTotalBranchResults = generateMockProjectBranches().data.project.repository
+ .branchNames.length;
export const mockSearchBranches = {
data: {
@@ -601,7 +602,7 @@ export const mockErrors = [
];
export const mockWarnings = [
- '"jobs:multi_project_job may allow multiple pipelines to run for a single action due to `rules:when` clause with no `workflow:rules` - read more: https://docs.gitlab.com/ee/ci/troubleshooting.html#pipeline-warnings"',
+ `"jobs:multi_project_job may allow multiple pipelines to run for a single action due to \`rules:when\` clause with no \`workflow:rules\` - read more: ${DOCS_URL_IN_EE_DIR}/ci/troubleshooting.html#pipeline-warnings"`,
];
export const mockCommitCreateResponse = {
diff --git a/spec/frontend/ci/pipeline_editor/pipeline_editor_home_spec.js b/spec/frontend/ci/pipeline_editor/pipeline_editor_home_spec.js
index 576263d5418..ca5f80f331c 100644
--- a/spec/frontend/ci/pipeline_editor/pipeline_editor_home_spec.js
+++ b/spec/frontend/ci/pipeline_editor/pipeline_editor_home_spec.js
@@ -1,19 +1,19 @@
import { shallowMount } from '@vue/test-utils';
-import { nextTick } from 'vue';
-import { GlButton, GlDrawer, GlModal } from '@gitlab/ui';
+import { GlButton, GlModal } from '@gitlab/ui';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import setWindowLocation from 'helpers/set_window_location_helper';
-import CiEditorHeader from '~/ci/pipeline_editor/components/editor/ci_editor_header.vue';
import CommitSection from '~/ci/pipeline_editor/components/commit/commit_section.vue';
import PipelineEditorDrawer from '~/ci/pipeline_editor/components/drawer/pipeline_editor_drawer.vue';
import JobAssistantDrawer from '~/ci/pipeline_editor/components/job_assistant_drawer/job_assistant_drawer.vue';
import PipelineEditorFileNav from '~/ci/pipeline_editor/components/file_nav/pipeline_editor_file_nav.vue';
import PipelineEditorFileTree from '~/ci/pipeline_editor/components/file_tree/container.vue';
-import BranchSwitcher from '~/ci/pipeline_editor/components/file_nav/branch_switcher.vue';
import PipelineEditorHeader from '~/ci/pipeline_editor/components/header/pipeline_editor_header.vue';
import PipelineEditorTabs from '~/ci/pipeline_editor/components/pipeline_editor_tabs.vue';
import {
CREATE_TAB,
+ EDITOR_APP_DRAWER_HELP,
+ EDITOR_APP_DRAWER_JOB_ASSISTANT,
+ EDITOR_APP_DRAWER_NONE,
FILE_TREE_DISPLAY_KEY,
VALIDATE_TAB,
MERGED_TAB,
@@ -29,10 +29,9 @@ jest.mock('~/lib/utils/common_utils');
describe('Pipeline editor home wrapper', () => {
let wrapper;
- const createComponent = ({ props = {}, glFeatures = {}, data = {}, stubs = {} } = {}) => {
+ const createComponent = ({ props = {}, glFeatures = {}, stubs = {} } = {}) => {
wrapper = extendedWrapper(
shallowMount(PipelineEditorHome, {
- data: () => data,
propsData: {
ciConfigData: mockLintResponse,
ciFileContent: mockCiYml,
@@ -53,7 +52,6 @@ describe('Pipeline editor home wrapper', () => {
);
};
- const findBranchSwitcher = () => wrapper.findComponent(BranchSwitcher);
const findCommitSection = () => wrapper.findComponent(CommitSection);
const findFileNav = () => wrapper.findComponent(PipelineEditorFileNav);
const findModal = () => wrapper.findComponent(GlModal);
@@ -63,8 +61,16 @@ describe('Pipeline editor home wrapper', () => {
const findPipelineEditorHeader = () => wrapper.findComponent(PipelineEditorHeader);
const findPipelineEditorTabs = () => wrapper.findComponent(PipelineEditorTabs);
const findFileTreeBtn = () => wrapper.findByTestId('file-tree-toggle');
- const findHelpBtn = () => wrapper.findByTestId('drawer-toggle');
- const findJobAssistantBtn = () => wrapper.findByTestId('job-assistant-drawer-toggle');
+
+ const clickHelpBtn = async () => {
+ await findPipelineEditorDrawer().vm.$emit('switch-drawer', EDITOR_APP_DRAWER_HELP);
+ };
+ const clickJobAssistantBtn = async () => {
+ await findJobAssistantDrawer().vm.$emit('switch-drawer', EDITOR_APP_DRAWER_JOB_ASSISTANT);
+ };
+ const closeDrawer = async (finder) => {
+ await finder().vm.$emit('switch-drawer', EDITOR_APP_DRAWER_NONE);
+ };
afterEach(() => {
localStorage.clear();
@@ -103,11 +109,9 @@ describe('Pipeline editor home wrapper', () => {
});
});
describe('when `showSwitchBranchModal` value is true', () => {
- beforeEach(() => {
- createComponent({
- data: { showSwitchBranchModal: true },
- stubs: { PipelineEditorFileNav },
- });
+ beforeEach(async () => {
+ createComponent();
+ await findFileNav().vm.$emit('select-branch');
});
it('is visible', () => {
@@ -115,11 +119,11 @@ describe('Pipeline editor home wrapper', () => {
});
it('pass down `shouldLoadNewBranch` to the branch switcher when primary is selected', async () => {
- expect(findBranchSwitcher().props('shouldLoadNewBranch')).toBe(false);
+ expect(findFileNav().props('shouldLoadNewBranch')).toBe(false);
await findModal().vm.$emit('primary');
- expect(findBranchSwitcher().props('shouldLoadNewBranch')).toBe(true);
+ expect(findFileNav().props('shouldLoadNewBranch')).toBe(true);
});
it('closes the modal when secondary action is selected', async () => {
@@ -148,9 +152,7 @@ describe('Pipeline editor home wrapper', () => {
async ({ tab, shouldShow }) => {
expect(findCommitSection().exists()).toBe(true);
- findPipelineEditorTabs().vm.$emit('set-current-tab', tab);
-
- await nextTick();
+ await findPipelineEditorTabs().vm.$emit('set-current-tab', tab);
expect(findCommitSection().isVisible()).toBe(shouldShow);
},
@@ -159,12 +161,10 @@ describe('Pipeline editor home wrapper', () => {
it('shows the commit form again when coming back to the create tab', async () => {
expect(findCommitSection().isVisible()).toBe(true);
- findPipelineEditorTabs().vm.$emit('set-current-tab', MERGED_TAB);
- await nextTick();
+ await findPipelineEditorTabs().vm.$emit('set-current-tab', MERGED_TAB);
expect(findCommitSection().isVisible()).toBe(false);
- findPipelineEditorTabs().vm.$emit('set-current-tab', CREATE_TAB);
- await nextTick();
+ await findPipelineEditorTabs().vm.$emit('set-current-tab', CREATE_TAB);
expect(findCommitSection().isVisible()).toBe(true);
});
@@ -195,7 +195,9 @@ describe('Pipeline editor home wrapper', () => {
describe('when "walkthrough-popover-cta-clicked" is emitted from pipeline editor tabs', () => {
it('passes down `scrollToCommitForm=true` to commit section', async () => {
expect(findCommitSection().props('scrollToCommitForm')).toBe(false);
+
await findPipelineEditorTabs().vm.$emit('walkthrough-popover-cta-clicked');
+
expect(findCommitSection().props('scrollToCommitForm')).toBe(true);
});
});
@@ -204,6 +206,7 @@ describe('Pipeline editor home wrapper', () => {
it('passes down `scrollToCommitForm=false` to commit section', async () => {
await findPipelineEditorTabs().vm.$emit('walkthrough-popover-cta-clicked');
expect(findCommitSection().props('scrollToCommitForm')).toBe(true);
+
await findCommitSection().vm.$emit('scrolled-to-commit-form');
expect(findCommitSection().props('scrollToCommitForm')).toBe(false);
});
@@ -211,133 +214,49 @@ describe('Pipeline editor home wrapper', () => {
});
describe('help drawer', () => {
- const clickHelpBtn = async () => {
- findHelpBtn().vm.$emit('click');
- await nextTick();
- };
-
- it('hides the drawer by default', () => {
+ beforeEach(() => {
createComponent();
+ });
+ it('hides the drawer by default', () => {
expect(findPipelineEditorDrawer().props('isVisible')).toBe(false);
});
it('toggles the drawer on button click', async () => {
- createComponent({
- stubs: {
- CiEditorHeader,
- GlButton,
- GlDrawer,
- PipelineEditorTabs,
- PipelineEditorDrawer,
- },
- });
-
- await clickHelpBtn();
-
- expect(findPipelineEditorDrawer().props('isVisible')).toBe(true);
-
- await clickHelpBtn();
-
expect(findPipelineEditorDrawer().props('isVisible')).toBe(false);
- });
-
- it("closes the drawer through the drawer's close button", async () => {
- createComponent({
- stubs: {
- CiEditorHeader,
- GlButton,
- GlDrawer,
- PipelineEditorTabs,
- PipelineEditorDrawer,
- },
- });
await clickHelpBtn();
-
expect(findPipelineEditorDrawer().props('isVisible')).toBe(true);
- findPipelineEditorDrawer().findComponent(GlDrawer).vm.$emit('close');
- await nextTick();
-
+ await closeDrawer(findPipelineEditorDrawer);
expect(findPipelineEditorDrawer().props('isVisible')).toBe(false);
});
});
describe('job assistant drawer', () => {
- const clickHelpBtn = async () => {
- findHelpBtn().vm.$emit('click');
- await nextTick();
- };
- const clickJobAssistantBtn = async () => {
- findJobAssistantBtn().vm.$emit('click');
- await nextTick();
- };
-
- const stubs = {
- CiEditorHeader,
- GlButton,
- GlDrawer,
- PipelineEditorTabs,
- JobAssistantDrawer,
- };
-
- it('hides the job assistant drawer by default', () => {
+ beforeEach(() => {
createComponent({
glFeatures: {
ciJobAssistantDrawer: true,
},
});
+ });
+ it('hides the job assistant drawer by default', () => {
expect(findJobAssistantDrawer().props('isVisible')).toBe(false);
});
it('toggles the job assistant drawer on button click', async () => {
- createComponent({
- stubs,
- glFeatures: {
- ciJobAssistantDrawer: true,
- },
- });
-
- await clickJobAssistantBtn();
-
- expect(findJobAssistantDrawer().props('isVisible')).toBe(true);
-
- await clickJobAssistantBtn();
-
expect(findJobAssistantDrawer().props('isVisible')).toBe(false);
- });
-
- it("closes the job assistant drawer through the drawer's close button", async () => {
- createComponent({
- stubs,
- glFeatures: {
- ciJobAssistantDrawer: true,
- },
- });
await clickJobAssistantBtn();
-
expect(findJobAssistantDrawer().props('isVisible')).toBe(true);
- findJobAssistantDrawer().findComponent(GlDrawer).vm.$emit('close');
- await nextTick();
-
+ await closeDrawer(findJobAssistantDrawer);
expect(findJobAssistantDrawer().props('isVisible')).toBe(false);
});
it('covers helper drawer when opened last', async () => {
- createComponent({
- stubs: {
- ...stubs,
- PipelineEditorDrawer,
- },
- glFeatures: {
- ciJobAssistantDrawer: true,
- },
- });
-
await clickHelpBtn();
await clickJobAssistantBtn();
@@ -348,16 +267,6 @@ describe('Pipeline editor home wrapper', () => {
});
it('covered by helper drawer when opened first', async () => {
- createComponent({
- stubs: {
- ...stubs,
- PipelineEditorDrawer,
- },
- glFeatures: {
- ciJobAssistantDrawer: true,
- },
- });
-
await clickJobAssistantBtn();
await clickHelpBtn();
@@ -370,8 +279,7 @@ describe('Pipeline editor home wrapper', () => {
describe('file tree', () => {
const toggleFileTree = async () => {
- findFileTreeBtn().vm.$emit('click');
- await nextTick();
+ await findFileTreeBtn().vm.$emit('click');
};
describe('button toggle', () => {
@@ -412,9 +320,7 @@ describe('Pipeline editor home wrapper', () => {
describe('when file tree display state is saved in local storage', () => {
beforeEach(() => {
localStorage.setItem(FILE_TREE_DISPLAY_KEY, 'true');
- createComponent({
- stubs: { PipelineEditorFileNav },
- });
+ createComponent();
});
it('shows the file tree by default', () => {
@@ -424,9 +330,7 @@ describe('Pipeline editor home wrapper', () => {
describe('when file tree display state is not saved in local storage', () => {
beforeEach(() => {
- createComponent({
- stubs: { PipelineEditorFileNav },
- });
+ createComponent();
});
it('hides the file tree by default', () => {
diff --git a/spec/frontend/ci/pipeline_new/mock_data.js b/spec/frontend/ci/pipeline_new/mock_data.js
index 76a88f63298..72a491bb946 100644
--- a/spec/frontend/ci/pipeline_new/mock_data.js
+++ b/spec/frontend/ci/pipeline_new/mock_data.js
@@ -1,3 +1,5 @@
+import { DOCS_URL_IN_EE_DIR } from 'jh_else_ce/lib/utils/url_utility';
+
export const mockFilteredRefs = {
Branches: ['branch-1'],
Tags: ['1.0.0', '1.1.0'],
@@ -28,9 +30,9 @@ export const mockError = {
'test job: chosen stage does not exist; available stages are .pre, build, test, deploy, .post',
],
warnings: [
- 'jobs:build1 may allow multiple pipelines to run for a single action due to `rules:when` clause with no `workflow:rules` - read more: https://docs.gitlab.com/ee/ci/troubleshooting.html#pipeline-warnings',
- 'jobs:build2 may allow multiple pipelines to run for a single action due to `rules:when` clause with no `workflow:rules` - read more: https://docs.gitlab.com/ee/ci/troubleshooting.html#pipeline-warnings',
- 'jobs:build3 may allow multiple pipelines to run for a single action due to `rules:when` clause with no `workflow:rules` - read more: https://docs.gitlab.com/ee/ci/troubleshooting.html#pipeline-warnings',
+ `jobs:build1 may allow multiple pipelines to run for a single action due to \`rules:when\` clause with no \`workflow:rules\` - read more: ${DOCS_URL_IN_EE_DIR}/ci/troubleshooting.html#pipeline-warnings`,
+ `jobs:build2 may allow multiple pipelines to run for a single action due to \`rules:when\` clause with no \`workflow:rules\` - read more: ${DOCS_URL_IN_EE_DIR}/ci/troubleshooting.html#pipeline-warnings`,
+ `jobs:build3 may allow multiple pipelines to run for a single action due to \`rules:when\` clause with no \`workflow:rules\` - read more: ${DOCS_URL_IN_EE_DIR}/ci/troubleshooting.html#pipeline-warnings`,
],
total_warnings: 7,
};
diff --git a/spec/frontend/ci/pipeline_schedules/components/pipeline_schedules_form_spec.js b/spec/frontend/ci/pipeline_schedules/components/pipeline_schedules_form_spec.js
index bb48d4dc38d..79a0cfa0dc9 100644
--- a/spec/frontend/ci/pipeline_schedules/components/pipeline_schedules_form_spec.js
+++ b/spec/frontend/ci/pipeline_schedules/components/pipeline_schedules_form_spec.js
@@ -21,6 +21,7 @@ import {
createScheduleMutationResponse,
updateScheduleMutationResponse,
mockSinglePipelineScheduleNode,
+ mockSinglePipelineScheduleNodeNoVars,
} from '../mock_data';
Vue.use(VueApollo);
@@ -51,6 +52,9 @@ describe('Pipeline schedules form', () => {
const dailyLimit = '';
const querySuccessHandler = jest.fn().mockResolvedValue(mockSinglePipelineScheduleNode);
+ const querySuccessEmptyVarsHandler = jest
+ .fn()
+ .mockResolvedValue(mockSinglePipelineScheduleNodeNoVars);
const queryFailedHandler = jest.fn().mockRejectedValue(new Error('GraphQL error'));
const createMutationHandlerSuccess = jest.fn().mockResolvedValue(createScheduleMutationResponse);
@@ -95,6 +99,10 @@ describe('Pipeline schedules form', () => {
const findVariableRows = () => wrapper.findAllByTestId('ci-variable-row');
const findKeyInputs = () => wrapper.findAllByTestId('pipeline-form-ci-variable-key');
const findValueInputs = () => wrapper.findAllByTestId('pipeline-form-ci-variable-value');
+ const findHiddenValueInputs = () =>
+ wrapper.findAllByTestId('pipeline-form-ci-variable-hidden-value');
+ const findVariableSecurityBtn = () => wrapper.findByTestId('variable-security-btn');
+
const findRemoveIcons = () => wrapper.findAllByTestId('remove-ci-variable-row');
const addVariableToForm = () => {
@@ -241,6 +249,12 @@ describe('Pipeline schedules form', () => {
expect(findLoadingIcon().exists()).toBe(false);
});
+ it('does not show variable security button', () => {
+ createComponent();
+
+ expect(findVariableSecurityBtn().exists()).toBe(false);
+ });
+
describe('schedule creation success', () => {
let mock;
@@ -336,6 +350,26 @@ describe('Pipeline schedules form', () => {
expect(findLoadingIcon().exists()).toBe(false);
});
+ it('shows variable security button', async () => {
+ createComponent(shallowMountExtended, true, [
+ [getPipelineSchedulesQuery, querySuccessHandler],
+ ]);
+
+ await waitForPromises();
+
+ expect(findVariableSecurityBtn().exists()).toBe(true);
+ });
+
+ it('does not show variable security button with no present variables', async () => {
+ createComponent(shallowMountExtended, true, [
+ [getPipelineSchedulesQuery, querySuccessEmptyVarsHandler],
+ ]);
+
+ await waitForPromises();
+
+ expect(findVariableSecurityBtn().exists()).toBe(false);
+ });
+
describe('schedule fetch success', () => {
it('fetches schedule and sets form data correctly', async () => {
createComponent(mountExtended, true, [[getPipelineSchedulesQuery, querySuccessHandler]]);
@@ -351,8 +385,13 @@ describe('Pipeline schedules form', () => {
expect(findVariableRows()).toHaveLength(3);
expect(findKeyInputs().at(0).element.value).toBe(variables[0].key);
expect(findKeyInputs().at(1).element.value).toBe(variables[1].key);
- expect(findValueInputs().at(0).element.value).toBe(variables[0].value);
- expect(findValueInputs().at(1).element.value).toBe(variables[1].value);
+ // values are hidden on load when editing a schedule
+ expect(findHiddenValueInputs().at(0).element.value).toBe('*****************');
+ expect(findHiddenValueInputs().at(1).element.value).toBe('*****************');
+ expect(findHiddenValueInputs().at(0).attributes('disabled')).toBe('disabled');
+ expect(findHiddenValueInputs().at(1).attributes('disabled')).toBe('disabled');
+ // empty placeholder to create a new variable
+ expect(findValueInputs()).toHaveLength(1);
});
});
@@ -432,5 +471,23 @@ describe('Pipeline schedules form', () => {
message: 'An error occurred while updating the pipeline schedule.',
});
});
+
+ it('hides/shows variable values', async () => {
+ createComponent(mountExtended, true, [[getPipelineSchedulesQuery, querySuccessHandler]]);
+
+ await waitForPromises();
+
+ // shows two hidden values and one placeholder
+ expect(findHiddenValueInputs()).toHaveLength(2);
+ expect(findValueInputs()).toHaveLength(1);
+
+ findVariableSecurityBtn().vm.$emit('click');
+
+ await nextTick();
+
+ // shows all variable values
+ expect(findHiddenValueInputs()).toHaveLength(0);
+ expect(findValueInputs()).toHaveLength(3);
+ });
});
});
diff --git a/spec/frontend/ci/pipeline_schedules/components/pipeline_schedules_spec.js b/spec/frontend/ci/pipeline_schedules/components/pipeline_schedules_spec.js
index 01a19711264..eb76b0bfbb4 100644
--- a/spec/frontend/ci/pipeline_schedules/components/pipeline_schedules_spec.js
+++ b/spec/frontend/ci/pipeline_schedules/components/pipeline_schedules_spec.js
@@ -3,6 +3,7 @@ import Vue, { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
import { trimText } from 'helpers/text_helper';
import createMockApollo from 'helpers/mock_apollo_helper';
+import setWindowLocation from 'helpers/set_window_location_helper';
import waitForPromises from 'helpers/wait_for_promises';
import { mountExtended } from 'helpers/vue_test_utils_helper';
import PipelineSchedules from '~/ci/pipeline_schedules/components/pipeline_schedules.vue';
@@ -354,5 +355,19 @@ describe('Pipeline schedules app', () => {
expect(findLink().exists()).toBe(true);
expect(findLink().text()).toContain('scheduled pipelines documentation.');
});
+
+ describe('inactive tab', () => {
+ beforeEach(() => {
+ setWindowLocation('https://gitlab.com/flightjs/Flight/-/pipeline_schedules?scope=INACTIVE');
+ });
+
+ it('should not show empty state', async () => {
+ createComponent([[getPipelineSchedulesQuery, successEmptyHandler]]);
+
+ await waitForPromises();
+
+ expect(findEmptyState().exists()).toBe(false);
+ });
+ });
});
});
diff --git a/spec/frontend/ci/pipeline_schedules/components/table/pipeline_schedules_table_spec.js b/spec/frontend/ci/pipeline_schedules/components/table/pipeline_schedules_table_spec.js
index e488a36f3dc..8f0e9fca379 100644
--- a/spec/frontend/ci/pipeline_schedules/components/table/pipeline_schedules_table_spec.js
+++ b/spec/frontend/ci/pipeline_schedules/components/table/pipeline_schedules_table_spec.js
@@ -1,4 +1,4 @@
-import { GlTableLite } from '@gitlab/ui';
+import { GlTable } from '@gitlab/ui';
import { mountExtended } from 'helpers/vue_test_utils_helper';
import PipelineSchedulesTable from '~/ci/pipeline_schedules/components/table/pipeline_schedules_table.vue';
import { mockPipelineScheduleNodes, mockPipelineScheduleCurrentUser } from '../../mock_data';
@@ -19,7 +19,7 @@ describe('Pipeline schedules table', () => {
});
};
- const findTable = () => wrapper.findComponent(GlTableLite);
+ const findTable = () => wrapper.findComponent(GlTable);
const findScheduleDescription = () => wrapper.findByTestId('pipeline-schedule-description');
beforeEach(() => {
diff --git a/spec/frontend/ci/pipeline_schedules/mock_data.js b/spec/frontend/ci/pipeline_schedules/mock_data.js
index 0a4f233f199..8d4e0f1bea6 100644
--- a/spec/frontend/ci/pipeline_schedules/mock_data.js
+++ b/spec/frontend/ci/pipeline_schedules/mock_data.js
@@ -35,6 +35,19 @@ export const mockPipelineScheduleAsGuestNodes = guestNodes;
export const mockTakeOwnershipNodes = takeOwnershipNodes;
export const mockSinglePipelineScheduleNode = mockGetSinglePipelineScheduleGraphQLResponse;
+export const mockSinglePipelineScheduleNodeNoVars = {
+ data: {
+ currentUser: mockGetPipelineSchedulesGraphQLResponse.data.currentUser,
+ project: {
+ id: mockGetPipelineSchedulesGraphQLResponse.data.project.id,
+ pipelineSchedules: {
+ count: 1,
+ nodes: [mockGetPipelineSchedulesGraphQLResponse.data.project.pipelineSchedules.nodes[1]],
+ },
+ },
+ },
+};
+
export const emptyPipelineSchedulesResponse = {
data: {
currentUser: {
diff --git a/spec/frontend/ci/runner/admin_runners/provide_spec.js b/spec/frontend/ci/runner/admin_runners/provide_spec.js
new file mode 100644
index 00000000000..b24ddabbb66
--- /dev/null
+++ b/spec/frontend/ci/runner/admin_runners/provide_spec.js
@@ -0,0 +1,34 @@
+import { provide } from '~/ci/runner/admin_runners/provide';
+
+import {
+ onlineContactTimeoutSecs,
+ staleTimeoutSecs,
+ runnerInstallHelpPage,
+} from 'jest/ci/runner/mock_data';
+
+const mockDataset = {
+ runnerInstallHelpPage,
+ onlineContactTimeoutSecs,
+ staleTimeoutSecs,
+};
+
+describe('admin runners provide', () => {
+ it('returns provide values', () => {
+ expect(provide(mockDataset)).toMatchObject({
+ runnerInstallHelpPage,
+ onlineContactTimeoutSecs,
+ staleTimeoutSecs,
+ });
+ });
+
+ it('returns only provide values', () => {
+ const dataset = {
+ ...mockDataset,
+ extraEntry: 'ANOTHER_ENTRY',
+ };
+
+ expect(provide(dataset)).not.toMatchObject({
+ extraEntry: 'ANOTHER_ENTRY',
+ });
+ });
+});
diff --git a/spec/frontend/ci/runner/components/registration/registration_dropdown_spec.js b/spec/frontend/ci/runner/components/registration/registration_dropdown_spec.js
index e4373d1c198..3fb845b186a 100644
--- a/spec/frontend/ci/runner/components/registration/registration_dropdown_spec.js
+++ b/spec/frontend/ci/runner/components/registration/registration_dropdown_spec.js
@@ -168,9 +168,8 @@ describe('RegistrationDropdown', () => {
expect(findTokenDropdownItem().exists()).toBe(true);
});
- it('Displays masked value by default', () => {
+ it('Displays masked value as password input by default', () => {
const mockToken = '0123456789';
- const maskToken = '**********';
createComponent(
{
@@ -179,7 +178,7 @@ describe('RegistrationDropdown', () => {
mountExtended,
);
- expect(findRegistrationTokenInput().element.value).toBe(maskToken);
+ expect(findRegistrationTokenInput().element.type).toBe('password');
});
});
diff --git a/spec/frontend/ci/runner/components/registration/registration_token_spec.js b/spec/frontend/ci/runner/components/registration/registration_token_spec.js
index fd3896d5500..eccfe43b47f 100644
--- a/spec/frontend/ci/runner/components/registration/registration_token_spec.js
+++ b/spec/frontend/ci/runner/components/registration/registration_token_spec.js
@@ -38,10 +38,15 @@ describe('RegistrationToken', () => {
);
});
+ it('Renders readonly input', () => {
+ createComponent();
+
+ expect(findInputCopyToggleVisibility().props('readonly')).toBe(true);
+ });
+
// Component integration test to ensure secure masking
- it('Displays masked value by default', () => {
+ it('Displays masked value as password input by default', () => {
const mockToken = '0123456789';
- const maskToken = '**********';
createComponent({
props: {
@@ -50,7 +55,7 @@ describe('RegistrationToken', () => {
mountFn: mountExtended,
});
- expect(wrapper.find('input').element.value).toBe(maskToken);
+ expect(wrapper.find('input').element.type).toBe('password');
});
describe('When the copy to clipboard button is clicked', () => {
diff --git a/spec/frontend/ci/runner/components/search_tokens/tag_token_spec.js b/spec/frontend/ci/runner/components/search_tokens/tag_token_spec.js
index e9f2e888b9a..91d2a20ec8a 100644
--- a/spec/frontend/ci/runner/components/search_tokens/tag_token_spec.js
+++ b/spec/frontend/ci/runner/components/search_tokens/tag_token_spec.js
@@ -6,7 +6,7 @@ import waitForPromises from 'helpers/wait_for_promises';
import { createAlert } from '~/alert';
import axios from '~/lib/utils/axios_utils';
import { HTTP_STATUS_INTERNAL_SERVER_ERROR, HTTP_STATUS_OK } from '~/lib/utils/http_status';
-import TagToken, { TAG_SUGGESTIONS_PATH } from '~/ci/runner/components/search_tokens/tag_token.vue';
+import TagToken from '~/ci/runner/components/search_tokens/tag_token.vue';
import { OPERATORS_IS } from '~/vue_shared/components/filtered_search_bar/constants';
import { getRecentlyUsedSuggestions } from '~/vue_shared/components/filtered_search_bar/filtered_search_utils';
@@ -45,6 +45,8 @@ const mockTagTokenConfig = {
operators: OPERATORS_IS,
};
+const mockTagSuggestionsPath = '/path/runners/tag_list';
+
describe('TagToken', () => {
let mock;
let wrapper;
@@ -59,7 +61,8 @@ describe('TagToken', () => {
},
provide: {
portalName: 'fake target',
- alignSuggestions: function fakeAlignSuggestions() {},
+ tagSuggestionsPath: mockTagSuggestionsPath,
+ alignSuggestions: function fakeAligxnSuggestions() {},
filteredSearchSuggestionListInstance: {
register: jest.fn(),
unregister: jest.fn(),
@@ -80,9 +83,9 @@ describe('TagToken', () => {
beforeEach(() => {
mock = new MockAdapter(axios);
- mock.onGet(TAG_SUGGESTIONS_PATH, { params: { search: '' } }).reply(HTTP_STATUS_OK, mockTags);
+ mock.onGet(mockTagSuggestionsPath, { params: { search: '' } }).reply(HTTP_STATUS_OK, mockTags);
mock
- .onGet(TAG_SUGGESTIONS_PATH, { params: { search: mockSearchTerm } })
+ .onGet(mockTagSuggestionsPath, { params: { search: mockSearchTerm } })
.reply(HTTP_STATUS_OK, mockTagsFiltered);
getRecentlyUsedSuggestions.mockReturnValue([]);
@@ -163,7 +166,7 @@ describe('TagToken', () => {
describe('when suggestions cannot be loaded', () => {
beforeEach(async () => {
mock
- .onGet(TAG_SUGGESTIONS_PATH, { params: { search: '' } })
+ .onGet(mockTagSuggestionsPath, { params: { search: '' } })
.reply(HTTP_STATUS_INTERNAL_SERVER_ERROR);
createComponent();
diff --git a/spec/frontend/ci/runner/mock_data.js b/spec/frontend/ci/runner/mock_data.js
index d72f93ad574..b8eb9f0ba1b 100644
--- a/spec/frontend/ci/runner/mock_data.js
+++ b/spec/frontend/ci/runner/mock_data.js
@@ -104,7 +104,7 @@ export const mockSearchExamples = [
},
},
{
- name: 'a two terms text search',
+ name: 'a two words text search',
urlQuery: '?search=something+else',
search: {
runnerType: null,
@@ -112,11 +112,7 @@ export const mockSearchExamples = [
filters: [
{
type: FILTERED_SEARCH_TERM,
- value: { data: 'something' },
- },
- {
- type: FILTERED_SEARCH_TERM,
- value: { data: 'else' },
+ value: { data: 'something else' },
},
],
pagination: {},
@@ -323,6 +319,7 @@ export const mockRegistrationToken = 'MOCK_REGISTRATION_TOKEN';
export const mockAuthenticationToken = 'MOCK_AUTHENTICATION_TOKEN';
export const newRunnerPath = '/runners/new';
+export const runnerInstallHelpPage = 'https://docs.example.com/runner/install/';
export {
allRunnersData,
diff --git a/spec/frontend/ci/runner/runner_search_utils_spec.js b/spec/frontend/ci/runner/runner_search_utils_spec.js
index 9a4a6139198..0623d2a3348 100644
--- a/spec/frontend/ci/runner/runner_search_utils_spec.js
+++ b/spec/frontend/ci/runner/runner_search_utils_spec.js
@@ -50,8 +50,7 @@ describe('search_params.js', () => {
it('When search params appear as array, they are concatenated', () => {
expect(fromUrlQueryToSearch('?search[]=my&search[]=text').filters).toEqual([
- { type: FILTERED_SEARCH_TERM, value: { data: 'my' } },
- { type: FILTERED_SEARCH_TERM, value: { data: 'text' } },
+ { type: FILTERED_SEARCH_TERM, value: { data: 'my text' } },
]);
});
});
diff --git a/spec/frontend/ci_settings_pipeline_triggers/components/triggers_list_spec.js b/spec/frontend/ci_settings_pipeline_triggers/components/triggers_list_spec.js
index f1df4208fa2..6ce86852095 100644
--- a/spec/frontend/ci_settings_pipeline_triggers/components/triggers_list_spec.js
+++ b/spec/frontend/ci_settings_pipeline_triggers/components/triggers_list_spec.js
@@ -1,6 +1,7 @@
import { GlTable, GlBadge } from '@gitlab/ui';
import { nextTick } from 'vue';
import { mountExtended } from 'helpers/vue_test_utils_helper';
+import { setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
import TriggersList from '~/ci_settings_pipeline_triggers/components/triggers_list.vue';
import ClipboardButton from '~/vue_shared/components/clipboard_button.vue';
import TimeAgoTooltip from '~/vue_shared/components/time_ago_tooltip.vue';
@@ -25,17 +26,26 @@ describe('TriggersList', () => {
const findInvalidBadge = (i) => findCell(i, 0).findComponent(GlBadge);
const findEditBtn = (i) => findRowAt(i).find('[data-testid="edit-btn"]');
const findRevokeBtn = (i) => findRowAt(i).find('[data-testid="trigger_revoke_button"]');
- const findRevealHideButton = () => wrapper.findByTestId('reveal-hide-values-button');
+ const findRevealHideButton = () =>
+ document.querySelector('[data-testid="reveal-hide-values-button"]');
describe('With triggers set', () => {
beforeEach(async () => {
+ setHTMLFixture(`
+ <button data-testid="reveal-hide-values-button">Reveal values</button>
+ `);
+
createComponent();
await nextTick();
});
+ afterEach(() => {
+ resetHTMLFixture();
+ });
+
it('displays a table with expected headers', () => {
- const headers = ['Token', 'Description', 'Owner', 'Last Used', ''];
+ const headers = ['Token', 'Description', 'Owner', 'Last Used', 'Actions'];
headers.forEach((header, i) => {
expect(findHeaderAt(i).text()).toBe(header);
});
@@ -44,16 +54,16 @@ describe('TriggersList', () => {
it('displays a "Reveal/Hide values" button', async () => {
const revealHideButton = findRevealHideButton();
- expect(revealHideButton.exists()).toBe(true);
- expect(revealHideButton.text()).toBe('Reveal values');
+ expect(Boolean(revealHideButton)).toBe(true);
+ expect(revealHideButton.innerText).toBe('Reveal values');
- await revealHideButton.vm.$emit('click');
+ await revealHideButton.click();
- expect(revealHideButton.text()).toBe('Hide values');
+ expect(revealHideButton.innerText).toBe('Hide values');
});
it('displays a table with rows', async () => {
- await findRevealHideButton().vm.$emit('click');
+ await findRevealHideButton().click();
expect(findRows()).toHaveLength(triggers.length);
diff --git a/spec/frontend/clusters/forms/components/integration_form_spec.js b/spec/frontend/clusters/forms/components/integration_form_spec.js
index 396f8215b9f..9c9e07dee15 100644
--- a/spec/frontend/clusters/forms/components/integration_form_spec.js
+++ b/spec/frontend/clusters/forms/components/integration_form_spec.js
@@ -1,6 +1,7 @@
import { GlToggle, GlButton } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import Vue from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import IntegrationForm from '~/clusters/forms/components/integration_form.vue';
import { createStore } from '~/clusters/forms/stores/index';
diff --git a/spec/frontend/clusters_list/components/agent_token_spec.js b/spec/frontend/clusters_list/components/agent_token_spec.js
index edb8b22d79e..9be3976fea2 100644
--- a/spec/frontend/clusters_list/components/agent_token_spec.js
+++ b/spec/frontend/clusters_list/components/agent_token_spec.js
@@ -10,7 +10,6 @@ import {
} from '~/clusters_list/constants';
import { generateAgentRegistrationCommand } from '~/clusters_list/clusters_util';
import CodeBlock from '~/vue_shared/components/code_block.vue';
-import ModalCopyButton from '~/vue_shared/components/modal_copy_button.vue';
const kasAddress = 'kas.example.com';
const agentName = 'my-agent';
@@ -23,7 +22,7 @@ describe('InstallAgentModal', () => {
const findAlert = () => wrapper.findComponent(GlAlert);
const findCodeBlock = () => wrapper.findComponent(CodeBlock);
- const findCopyButton = () => wrapper.findComponent(ModalCopyButton);
+ const findCopyButton = () => wrapper.findComponentByTestId('agent-registration-command');
const findInput = () => wrapper.findComponent(GlFormInputGroup);
const findHelmVersionPolicyLink = () => wrapper.findComponent(GlLink);
const findHelmExternalLinkIcon = () => wrapper.findComponent(GlIcon);
diff --git a/spec/frontend/clusters_list/components/clusters_view_all_spec.js b/spec/frontend/clusters_list/components/clusters_view_all_spec.js
index e81b242dd90..e86cd1019a9 100644
--- a/spec/frontend/clusters_list/components/clusters_view_all_spec.js
+++ b/spec/frontend/clusters_list/components/clusters_view_all_spec.js
@@ -1,5 +1,6 @@
import { GlCard, GlLoadingIcon, GlSprintf, GlBadge } from '@gitlab/ui';
import Vue from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import ClustersViewAll from '~/clusters_list/components/clusters_view_all.vue';
diff --git a/spec/frontend/clusters_list/components/install_agent_modal_spec.js b/spec/frontend/clusters_list/components/install_agent_modal_spec.js
index e1306e2738f..0cf6b8fbff9 100644
--- a/spec/frontend/clusters_list/components/install_agent_modal_spec.js
+++ b/spec/frontend/clusters_list/components/install_agent_modal_spec.js
@@ -1,7 +1,7 @@
import { GlAlert, GlButton, GlFormInputGroup, GlSprintf } from '@gitlab/ui';
import Vue from 'vue';
import VueApollo from 'vue-apollo';
-import { sprintf } from '~/locale';
+import { sprintf, s__ } from '~/locale';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import { mockTracking } from 'helpers/tracking_helper';
import AvailableAgentsDropdown from '~/clusters_list/components/available_agents_dropdown.vue';
@@ -48,15 +48,26 @@ describe('InstallAgentModal', () => {
let trackingSpy;
const configurations = [{ agentName: 'agent-name' }];
- const apolloQueryResponse = {
+ const apolloQueryResponse = (configurationsNodes = configurations) => ({
data: {
project: {
__typename: 'Project',
id: 'project-1',
clusterAgents: { nodes: [] },
- agentConfigurations: { nodes: configurations },
+ agentConfigurations: { nodes: configurationsNodes },
},
},
+ });
+
+ const provide = {
+ projectPath,
+ kasAddress,
+ emptyStateImage,
+ };
+
+ const propsData = {
+ defaultBranchName,
+ maxAgents,
};
const findModal = () => wrapper.findComponent(ModalStub);
@@ -80,17 +91,12 @@ describe('InstallAgentModal', () => {
}
};
- const createWrapper = () => {
- const provide = {
- projectPath,
- kasAddress,
- emptyStateImage,
- };
-
- const propsData = {
- defaultBranchName,
- maxAgents,
- };
+ const createWrapper = (mockApolloProvider) => {
+ apolloProvider =
+ mockApolloProvider ||
+ createMockApollo([
+ [getAgentConfigurations, jest.fn().mockResolvedValue(apolloQueryResponse())],
+ ]);
wrapper = shallowMountExtended(InstallAgentModal, {
attachTo: document.body,
@@ -102,6 +108,8 @@ describe('InstallAgentModal', () => {
provide,
propsData,
});
+
+ trackingSpy = mockTracking(undefined, wrapper.element, jest.spyOn);
};
const writeQuery = () => {
@@ -117,33 +125,26 @@ describe('InstallAgentModal', () => {
});
};
- const mockSelectedAgentResponse = async () => {
- createWrapper();
+ const mockSelectedAgentResponse = (mockApolloProvider) => {
+ createWrapper(mockApolloProvider);
writeQuery();
- await waitForPromises();
-
wrapper.vm.setAgentName('agent-name');
findActionButton().vm.$emit('click');
return waitForPromises();
};
- beforeEach(async () => {
- apolloProvider = createMockApollo([
- [getAgentConfigurations, jest.fn().mockResolvedValue(apolloQueryResponse)],
- ]);
- createWrapper();
- await waitForPromises();
- trackingSpy = mockTracking(undefined, wrapper.element, jest.spyOn);
- });
-
afterEach(() => {
apolloProvider = null;
});
describe('when KAS is enabled', () => {
describe('initial state', () => {
+ beforeEach(() => {
+ createWrapper();
+ });
+
it('renders the dropdown for available agents', () => {
expect(findAgentDropdown().isVisible()).toBe(true);
});
@@ -173,8 +174,45 @@ describe('InstallAgentModal', () => {
});
});
+ describe('when there are 10 or more available agent configurations', () => {
+ it('displays an alert with Terraform instructions', async () => {
+ const configurationsNodes = Array(10).fill(configurations);
+ const mockApolloProvider = createMockApollo([
+ [
+ getAgentConfigurations,
+ jest.fn().mockResolvedValue(apolloQueryResponse(configurationsNodes)),
+ ],
+ ]);
+
+ createWrapper(mockApolloProvider);
+ await waitForPromises();
+
+ expect(findAlert().text()).toMatchInterpolatedText(
+ s__('ClusterAgents|To manage more agents, %{linkStart}use Terraform%{linkEnd}.'),
+ );
+ });
+
+ it('displays an alert with a warning when there are 100 or more configurations', async () => {
+ const configurationsNodes = Array(100).fill(configurations);
+ const mockApolloProvider = createMockApollo([
+ [
+ getAgentConfigurations,
+ jest.fn().mockResolvedValue(apolloQueryResponse(configurationsNodes)),
+ ],
+ ]);
+
+ createWrapper(mockApolloProvider);
+ await waitForPromises();
+
+ expect(findAlert().text()).toContain(
+ s__('ClusterAgents|We only support 100 agents on the UI.'),
+ );
+ });
+ });
+
describe('an agent is selected', () => {
beforeEach(() => {
+ createWrapper();
findAgentDropdown().vm.$emit('agentSelected');
});
@@ -195,13 +233,13 @@ describe('InstallAgentModal', () => {
const createAgentTokenHandler = jest.fn().mockResolvedValue(createAgentTokenResponse);
beforeEach(() => {
- apolloProvider = createMockApollo([
- [getAgentConfigurations, jest.fn().mockResolvedValue(apolloQueryResponse)],
+ const mockApolloProvider = createMockApollo([
+ [getAgentConfigurations, jest.fn().mockResolvedValue(apolloQueryResponse())],
[createAgentMutation, createAgentHandler],
[createAgentTokenMutation, createAgentTokenHandler],
]);
- return mockSelectedAgentResponse();
+ return mockSelectedAgentResponse(mockApolloProvider);
});
it('creates an agent and token', () => {
@@ -230,12 +268,12 @@ describe('InstallAgentModal', () => {
describe('error creating agent', () => {
beforeEach(() => {
- apolloProvider = createMockApollo([
- [getAgentConfigurations, jest.fn().mockResolvedValue(apolloQueryResponse)],
+ const mockApolloProvider = createMockApollo([
+ [getAgentConfigurations, jest.fn().mockResolvedValue(apolloQueryResponse())],
[createAgentMutation, jest.fn().mockResolvedValue(createAgentErrorResponse)],
]);
- return mockSelectedAgentResponse();
+ return mockSelectedAgentResponse(mockApolloProvider);
});
it('displays the error message', () => {
@@ -247,13 +285,13 @@ describe('InstallAgentModal', () => {
describe('error creating token', () => {
beforeEach(() => {
- apolloProvider = createMockApollo([
- [getAgentConfigurations, jest.fn().mockResolvedValue(apolloQueryResponse)],
+ const mockApolloProvider = createMockApollo([
+ [getAgentConfigurations, jest.fn().mockResolvedValue(apolloQueryResponse())],
[createAgentMutation, jest.fn().mockResolvedValue(createAgentResponse)],
[createAgentTokenMutation, jest.fn().mockResolvedValue(createAgentTokenErrorResponse)],
]);
- return mockSelectedAgentResponse();
+ return mockSelectedAgentResponse(mockApolloProvider);
});
it('displays the error message', () => {
@@ -267,11 +305,11 @@ describe('InstallAgentModal', () => {
describe('when KAS is disabled', () => {
beforeEach(async () => {
- apolloProvider = createMockApollo([
+ const mockApolloProvider = createMockApollo([
[getAgentConfigurations, jest.fn().mockResolvedValue(kasDisabledErrorResponse)],
]);
- createWrapper();
+ createWrapper(mockApolloProvider);
await waitForPromises();
});
diff --git a/spec/frontend/code_navigation/components/app_spec.js b/spec/frontend/code_navigation/components/app_spec.js
index 88861b0d08a..27e211466b1 100644
--- a/spec/frontend/code_navigation/components/app_spec.js
+++ b/spec/frontend/code_navigation/components/app_spec.js
@@ -1,5 +1,6 @@
import { shallowMount } from '@vue/test-utils';
import Vue from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import { setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
diff --git a/spec/frontend/comment_templates/components/__snapshots__/list_item_spec.js.snap b/spec/frontend/comment_templates/components/__snapshots__/list_item_spec.js.snap
index d0bc7a55f8e..60c87aa10eb 100644
--- a/spec/frontend/comment_templates/components/__snapshots__/list_item_spec.js.snap
+++ b/spec/frontend/comment_templates/components/__snapshots__/list_item_spec.js.snap
@@ -2,10 +2,10 @@
exports[`Comment templates list item component renders list item 1`] = `
<li
- class="gl-pt-4 gl-pb-5 gl-border-b"
+ class="gl-px-5! gl-py-4!"
>
<div
- class="gl-display-flex gl-align-items-center"
+ class="gl-display-flex"
>
<h6
class="gl-mr-3 gl-my-0"
@@ -26,7 +26,6 @@ exports[`Comment templates list item component renders list item 1`] = `
class="btn btn-default btn-md gl-button btn-default-tertiary gl-new-dropdown-toggle gl-new-dropdown-icon-only gl-new-dropdown-toggle-no-caret"
data-testid="base-dropdown-toggle"
id="actions-toggle-3"
- listeners="[object Object]"
type="button"
>
<!---->
@@ -130,9 +129,11 @@ exports[`Comment templates list item component renders list item 1`] = `
</div>
<div
- class="gl-mt-3 gl-font-monospace gl-white-space-pre-wrap"
+ class="gl-font-monospace gl-white-space-pre-line gl-font-sm gl-mt-n5"
>
+
/assign_reviewer
+
</div>
<!---->
diff --git a/spec/frontend/comment_templates/components/form_spec.js b/spec/frontend/comment_templates/components/form_spec.js
index 053a5099c37..b48feba5290 100644
--- a/spec/frontend/comment_templates/components/form_spec.js
+++ b/spec/frontend/comment_templates/components/form_spec.js
@@ -5,6 +5,7 @@ import VueApollo from 'vue-apollo';
import createdSavedReplyResponse from 'test_fixtures/graphql/comment_templates/create_saved_reply.mutation.graphql.json';
import createdSavedReplyErrorResponse from 'test_fixtures/graphql/comment_templates/create_saved_reply_with_errors.mutation.graphql.json';
import createMockApollo from 'helpers/mock_apollo_helper';
+import { mockTracking } from 'helpers/tracking_helper';
import waitForPromises from 'helpers/wait_for_promises';
import Form from '~/comment_templates/components/form.vue';
import createSavedReplyMutation from '~/comment_templates/queries/create_saved_reply.mutation.graphql';
@@ -52,6 +53,12 @@ const findSubmitBtn = () => wrapper.find('[data-testid="comment-template-form-su
describe('Comment templates form component', () => {
describe('creates comment template', () => {
+ let trackingSpy;
+
+ beforeEach(() => {
+ trackingSpy = mockTracking(undefined, window.document, jest.spyOn);
+ });
+
it('calls apollo mutation', async () => {
wrapper = createComponent();
@@ -66,6 +73,11 @@ describe('Comment templates form component', () => {
content: 'Test content',
name: 'Test',
});
+ expect(trackingSpy).toHaveBeenCalledWith(
+ expect.any(String),
+ 'i_code_review_saved_replies_create',
+ expect.any(Object),
+ );
});
it('does not submit when form validation fails', async () => {
diff --git a/spec/frontend/comment_templates/components/list_spec.js b/spec/frontend/comment_templates/components/list_spec.js
index 8b0daf2fe2f..8973857c338 100644
--- a/spec/frontend/comment_templates/components/list_spec.js
+++ b/spec/frontend/comment_templates/components/list_spec.js
@@ -25,12 +25,6 @@ describe('Comment templates list component', () => {
expect(wrapper.findAllComponents(ListItem).length).toBe(0);
});
- it('render comment templates count', () => {
- wrapper = createComponent(savedRepliesResponse);
-
- expect(wrapper.find('[data-testid="title"]').text()).toEqual('My comment templates (2)');
- });
-
it('renders list of comment templates', () => {
const savedReplies = savedRepliesResponse.data.currentUser.savedReplies.nodes;
wrapper = createComponent(savedRepliesResponse);
diff --git a/spec/frontend/comment_templates/pages/index_spec.js b/spec/frontend/comment_templates/pages/index_spec.js
index 6dbec3ef4a4..fa195ec8281 100644
--- a/spec/frontend/comment_templates/pages/index_spec.js
+++ b/spec/frontend/comment_templates/pages/index_spec.js
@@ -42,4 +42,13 @@ describe('Comment templates index page component', () => {
expect.objectContaining(savedReplies[1]),
);
});
+
+ it('render comment templates count', async () => {
+ const mockApollo = createMockApolloProvider(savedRepliesResponse);
+ wrapper = createComponent({ mockApollo });
+
+ await waitForPromises();
+
+ expect(wrapper.find('[data-testid="title"]').text()).toContain('2');
+ });
});
diff --git a/spec/frontend/commit/commit_box_pipeline_mini_graph_spec.js b/spec/frontend/commit/commit_box_pipeline_mini_graph_spec.js
index 7983f8fddf5..3b3e5098857 100644
--- a/spec/frontend/commit/commit_box_pipeline_mini_graph_spec.js
+++ b/spec/frontend/commit/commit_box_pipeline_mini_graph_spec.js
@@ -7,8 +7,8 @@ import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
import { createAlert } from '~/alert';
import CommitBoxPipelineMiniGraph from '~/projects/commit_box/info/components/commit_box_pipeline_mini_graph.vue';
-import GraphqlPipelineMiniGraph from '~/pipelines/components/pipeline_mini_graph/graphql_pipeline_mini_graph.vue';
import PipelineMiniGraph from '~/pipelines/components/pipeline_mini_graph/pipeline_mini_graph.vue';
+import LegacyPipelineMiniGraph from '~/pipelines/components/pipeline_mini_graph/legacy_pipeline_mini_graph.vue';
import { COMMIT_BOX_POLL_INTERVAL } from '~/projects/commit_box/info/constants';
import getLinkedPipelinesQuery from '~/pipelines/graphql/queries/get_linked_pipelines.query.graphql';
import getPipelineStagesQuery from '~/pipelines/graphql/queries/get_pipeline_stages.query.graphql';
@@ -29,8 +29,8 @@ describe('Commit box pipeline mini graph', () => {
let wrapper;
const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
- const findGraphqlPipelineMiniGraph = () => wrapper.findComponent(GraphqlPipelineMiniGraph);
const findPipelineMiniGraph = () => wrapper.findComponent(PipelineMiniGraph);
+ const findLegacyPipelineMiniGraph = () => wrapper.findComponent(LegacyPipelineMiniGraph);
const downstreamHandler = jest.fn().mockResolvedValue(mockDownstreamQueryResponse);
const failedHandler = jest.fn().mockRejectedValue(new Error('GraphQL error'));
@@ -79,7 +79,7 @@ describe('Commit box pipeline mini graph', () => {
createComponent();
expect(findLoadingIcon().exists()).toBe(true);
- expect(findPipelineMiniGraph().exists()).toBe(false);
+ expect(findLegacyPipelineMiniGraph().exists()).toBe(false);
});
});
@@ -90,11 +90,11 @@ describe('Commit box pipeline mini graph', () => {
it('should not display loading state after the query is resolved', () => {
expect(findLoadingIcon().exists()).toBe(false);
- expect(findPipelineMiniGraph().exists()).toBe(true);
+ expect(findLegacyPipelineMiniGraph().exists()).toBe(true);
});
it('should display the pipeline mini graph', () => {
- expect(findPipelineMiniGraph().exists()).toBe(true);
+ expect(findLegacyPipelineMiniGraph().exists()).toBe(true);
});
});
@@ -127,7 +127,7 @@ describe('Commit box pipeline mini graph', () => {
await waitForPromises();
- expect(findPipelineMiniGraph().props('stages')).toEqual(expectedStages);
+ expect(findLegacyPipelineMiniGraph().props('stages')).toEqual(expectedStages);
});
it('should render a downstream pipeline only', async () => {
@@ -135,8 +135,8 @@ describe('Commit box pipeline mini graph', () => {
await waitForPromises();
- const downstreamPipelines = findPipelineMiniGraph().props('downstreamPipelines');
- const upstreamPipeline = findPipelineMiniGraph().props('upstreamPipeline');
+ const downstreamPipelines = findLegacyPipelineMiniGraph().props('downstreamPipelines');
+ const upstreamPipeline = findLegacyPipelineMiniGraph().props('upstreamPipeline');
expect(downstreamPipelines).toEqual(expect.any(Array));
expect(upstreamPipeline).toEqual(null);
@@ -147,7 +147,7 @@ describe('Commit box pipeline mini graph', () => {
await waitForPromises();
- const downstreamPipelines = findPipelineMiniGraph().props('downstreamPipelines');
+ const downstreamPipelines = findLegacyPipelineMiniGraph().props('downstreamPipelines');
expect(downstreamPipelines).toHaveLength(1);
});
@@ -158,7 +158,7 @@ describe('Commit box pipeline mini graph', () => {
await waitForPromises();
const expectedPath = mockDownstreamQueryResponse.data.project.pipeline.path;
- const pipelinePath = findPipelineMiniGraph().props('pipelinePath');
+ const pipelinePath = findLegacyPipelineMiniGraph().props('pipelinePath');
expect(pipelinePath).toBe(expectedPath);
});
@@ -168,8 +168,8 @@ describe('Commit box pipeline mini graph', () => {
await waitForPromises();
- const downstreamPipelines = findPipelineMiniGraph().props('downstreamPipelines');
- const upstreamPipeline = findPipelineMiniGraph().props('upstreamPipeline');
+ const downstreamPipelines = findLegacyPipelineMiniGraph().props('downstreamPipelines');
+ const upstreamPipeline = findLegacyPipelineMiniGraph().props('upstreamPipeline');
expect(upstreamPipeline).toEqual(samplePipeline);
expect(downstreamPipelines).toHaveLength(0);
@@ -180,8 +180,8 @@ describe('Commit box pipeline mini graph', () => {
await waitForPromises();
- const downstreamPipelines = findPipelineMiniGraph().props('downstreamPipelines');
- const upstreamPipeline = findPipelineMiniGraph().props('upstreamPipeline');
+ const downstreamPipelines = findLegacyPipelineMiniGraph().props('downstreamPipelines');
+ const upstreamPipeline = findLegacyPipelineMiniGraph().props('upstreamPipeline');
expect(upstreamPipeline).toEqual(samplePipeline);
expect(downstreamPipelines).toEqual(
@@ -263,18 +263,18 @@ describe('Commit box pipeline mini graph', () => {
describe('feature flag behavior', () => {
it.each`
- state | provide | showPipelineMiniGraph | showGraphqlPipelineMiniGraph
- ${true} | ${{ ciGraphqlPipelineMiniGraph: true }} | ${false} | ${true}
- ${false} | ${{}} | ${true} | ${false}
+ state | showLegacyPipelineMiniGraph | showPipelineMiniGraph
+ ${true} | ${false} | ${true}
+ ${false} | ${true} | ${false}
`(
'renders the correct component when the feature flag is set to $state',
- async ({ provide, showPipelineMiniGraph, showGraphqlPipelineMiniGraph }) => {
- createComponent(provide);
+ async ({ state, showLegacyPipelineMiniGraph, showPipelineMiniGraph }) => {
+ createComponent({ ciGraphqlPipelineMiniGraph: state });
await waitForPromises();
+ expect(findLegacyPipelineMiniGraph().exists()).toBe(showLegacyPipelineMiniGraph);
expect(findPipelineMiniGraph().exists()).toBe(showPipelineMiniGraph);
- expect(findGraphqlPipelineMiniGraph().exists()).toBe(showGraphqlPipelineMiniGraph);
},
);
diff --git a/spec/frontend/content_editor/extensions/copy_paste_spec.js b/spec/frontend/content_editor/extensions/copy_paste_spec.js
index f8faa7869c0..4729b1c1223 100644
--- a/spec/frontend/content_editor/extensions/copy_paste_spec.js
+++ b/spec/frontend/content_editor/extensions/copy_paste_spec.js
@@ -5,6 +5,8 @@ import Diagram from '~/content_editor/extensions/diagram';
import Frontmatter from '~/content_editor/extensions/frontmatter';
import Heading from '~/content_editor/extensions/heading';
import Bold from '~/content_editor/extensions/bold';
+import BulletList from '~/content_editor/extensions/bullet_list';
+import ListItem from '~/content_editor/extensions/list_item';
import Italic from '~/content_editor/extensions/italic';
import { VARIANT_DANGER } from '~/alert';
import eventHubFactory from '~/helpers/event_hub_factory';
@@ -36,6 +38,8 @@ describe('content_editor/extensions/copy_paste', () => {
let loading;
let heading;
let codeBlock;
+ let bulletList;
+ let listItem;
let renderMarkdown;
let resolveRenderMarkdownPromise;
let resolveRenderMarkdownPromiseAndWait;
@@ -65,12 +69,14 @@ describe('content_editor/extensions/copy_paste', () => {
Diagram,
Frontmatter,
Heading,
+ BulletList,
+ ListItem,
CopyPaste.configure({ renderMarkdown, eventHub, serializer: new MarkdownSerializer() }),
],
});
({
- builders: { doc, p, bold, italic, heading, loading, codeBlock },
+ builders: { doc, p, bold, italic, heading, loading, codeBlock, bulletList, listItem },
} = createDocBuilder({
tiptapEditor,
names: {
@@ -78,6 +84,8 @@ describe('content_editor/extensions/copy_paste', () => {
italic: { markType: Italic.name },
loading: { nodeType: Loading.name },
heading: { nodeType: Heading.name },
+ bulletList: { nodeType: BulletList.name },
+ listItem: { nodeType: ListItem.name },
codeBlock: { nodeType: CodeBlockHighlight.name },
},
}));
@@ -303,7 +311,7 @@ describe('content_editor/extensions/copy_paste', () => {
await triggerPasteEventHandlerAndWaitForTransaction(
buildClipboardEvent({
- types: ['text/x-gfm'],
+ types: ['text/x-gfm', 'text/plain', 'text/html'],
data: {
'text/x-gfm': '**bold text**',
'text/plain': 'irrelevant text',
@@ -317,6 +325,28 @@ describe('content_editor/extensions/copy_paste', () => {
});
});
+ describe('when pasting a single code block with lang=markdown', () => {
+ it('process the textContent as markdown, ignoring the htmlContent', async () => {
+ const resolvedValue = '<ul><li>Cat</li><li>Dog</li><li>Turtle</li></ul>';
+ const expectedDoc = doc(
+ bulletList(listItem(p('Cat')), listItem(p('Dog')), listItem(p('Turtle'))),
+ );
+
+ await triggerPasteEventHandlerAndWaitForTransaction(
+ buildClipboardEvent({
+ types: ['text/plain', 'text/html'],
+ data: {
+ 'text/plain': '- Cat\n- Dog\n- Turtle\n',
+ 'text/html': `<meta charset='utf-8'><pre class="code highlight" lang="markdown"><span id="LC1" class="line" lang="markdown"><span class="p">-</span> Cat</span>\n<span id="LC2" class="line" lang="markdown"><span class="p">-</span> Dog</span>\n<span id="LC3" class="line" lang="markdown"><span class="p">-</span> Turtle</span>\n</pre>`,
+ },
+ }),
+ );
+ await resolveRenderMarkdownPromiseAndWait(resolvedValue);
+
+ expect(tiptapEditor.state.doc.toJSON()).toEqual(expectedDoc.toJSON());
+ });
+ });
+
describe('when pasting vscode-editor-data', () => {
it('pastes the content as a code block', async () => {
const resolvedValue =
diff --git a/spec/frontend/content_editor/markdown_snapshot_spec.js b/spec/frontend/content_editor/markdown_snapshot_spec.js
index 49b466fd7f5..5253b475ea5 100644
--- a/spec/frontend/content_editor/markdown_snapshot_spec.js
+++ b/spec/frontend/content_editor/markdown_snapshot_spec.js
@@ -64,7 +64,6 @@ describe('markdown example snapshots in ContentEditor', () => {
const expectedHtml = expectedHtmlExamples[name].wysiwyg;
const { html: actualHtml } = actualHtmlAndJsonExamples[name];
- // noinspection JSUnresolvedFunction (required to avoid RubyMine type inspection warning, because custom matchers auto-imported via Jest test setup are not automatically resolved - see https://youtrack.jetbrains.com/issue/WEB-42350/matcher-for-jest-is-not-recognized-but-it-is-runable)
expect(actualHtml).toMatchExpectedForMarkdown(
'HTML',
name,
@@ -82,7 +81,6 @@ describe('markdown example snapshots in ContentEditor', () => {
const expectedJson = expectedProseMirrorJsonExamples[name];
const { json: actualJson } = actualHtmlAndJsonExamples[name];
- // noinspection JSUnresolvedFunction
expect(actualJson).toMatchExpectedForMarkdown(
'JSON',
name,
diff --git a/spec/frontend/contribution_events/components/contribution_event/contribution_event_closed_spec.js b/spec/frontend/contribution_events/components/contribution_event/contribution_event_closed_spec.js
new file mode 100644
index 00000000000..19c78730828
--- /dev/null
+++ b/spec/frontend/contribution_events/components/contribution_event/contribution_event_closed_spec.js
@@ -0,0 +1,63 @@
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import ContributionEventClosed from '~/contribution_events/components/contribution_event/contribution_event_closed.vue';
+import ContributionEventBase from '~/contribution_events/components/contribution_event/contribution_event_base.vue';
+import { TARGET_TYPE_WORK_ITEM } from '~/contribution_events/constants';
+import {
+ eventMilestoneClosed,
+ eventIssueClosed,
+ eventMergeRequestClosed,
+ eventTaskClosed,
+ eventIncidentClosed,
+} from '../../utils';
+
+describe('ContributionEventClosed', () => {
+ let wrapper;
+
+ const createComponent = ({ propsData }) => {
+ wrapper = shallowMountExtended(ContributionEventClosed, {
+ propsData,
+ });
+ };
+
+ describe.each`
+ event | expectedMessage | iconName | iconClass
+ ${eventMilestoneClosed()} | ${'Closed milestone %{targetLink} in %{resourceParentLink}.'} | ${'status_closed'} | ${'gl-text-blue-500'}
+ ${eventIssueClosed()} | ${'Closed issue %{targetLink} in %{resourceParentLink}.'} | ${'issue-closed'} | ${'gl-text-blue-500'}
+ ${eventMergeRequestClosed()} | ${'Closed merge request %{targetLink} in %{resourceParentLink}.'} | ${'merge-request-close'} | ${'gl-text-red-500'}
+ ${{ target: { type: 'unsupported type' } }} | ${'Closed resource.'} | ${'status_closed'} | ${'gl-text-blue-500'}
+ `(
+ 'when event target type is $event.target.type',
+ ({ event, expectedMessage, iconName, iconClass }) => {
+ it('renders `ContributionEventBase` with correct props', () => {
+ createComponent({ propsData: { event } });
+
+ expect(wrapper.findComponent(ContributionEventBase).props()).toMatchObject({
+ event,
+ message: expectedMessage,
+ iconName,
+ iconClass,
+ });
+ });
+ },
+ );
+
+ describe(`when event target type is ${TARGET_TYPE_WORK_ITEM}`, () => {
+ describe.each`
+ event | expectedMessage
+ ${eventTaskClosed()} | ${'Closed task %{targetLink} in %{resourceParentLink}.'}
+ ${eventIncidentClosed()} | ${'Closed incident %{targetLink} in %{resourceParentLink}.'}
+ ${{ target: { type: TARGET_TYPE_WORK_ITEM, issue_type: 'unsupported type' } }} | ${'Closed resource.'}
+ `('when issue type is $event.target.issue_type', ({ event, expectedMessage }) => {
+ it('renders `ContributionEventBase` with correct props', () => {
+ createComponent({ propsData: { event } });
+
+ expect(wrapper.findComponent(ContributionEventBase).props()).toMatchObject({
+ event,
+ message: expectedMessage,
+ iconName: 'status_closed',
+ iconClass: 'gl-text-blue-500',
+ });
+ });
+ });
+ });
+});
diff --git a/spec/frontend/contribution_events/components/contribution_event/contribution_event_commented_spec.js b/spec/frontend/contribution_events/components/contribution_event/contribution_event_commented_spec.js
new file mode 100644
index 00000000000..0ab1ba5cd8b
--- /dev/null
+++ b/spec/frontend/contribution_events/components/contribution_event/contribution_event_commented_spec.js
@@ -0,0 +1,103 @@
+import { mountExtended } from 'helpers/vue_test_utils_helper';
+import ContributionEventCommented from '~/contribution_events/components/contribution_event/contribution_event_commented.vue';
+import ContributionEventBase from '~/contribution_events/components/contribution_event/contribution_event_base.vue';
+import ResourceParentLink from '~/contribution_events/components/resource_parent_link.vue';
+import {
+ eventCommentedIssue,
+ eventCommentedMergeRequest,
+ eventCommentedProjectSnippet,
+ eventCommentedPersonalSnippet,
+ eventCommentedDesign,
+ eventCommentedCommit,
+} from '../../utils';
+
+describe('ContributionEventCommented', () => {
+ let wrapper;
+
+ const createComponent = ({ propsData }) => {
+ wrapper = mountExtended(ContributionEventCommented, {
+ propsData,
+ });
+ };
+
+ const findNoteableLink = (event) =>
+ wrapper.findByRole('link', { name: event.noteable.reference_link_text });
+ const findResourceParentLink = () => wrapper.findComponent(ResourceParentLink);
+ const findContributionEventBase = () => wrapper.findComponent(ContributionEventBase);
+ const findEventBody = () => wrapper.findByTestId('event-body');
+
+ describe.each`
+ event | expectedMessage
+ ${eventCommentedIssue()} | ${'Commented on issue'}
+ ${eventCommentedMergeRequest()} | ${'Commented on merge request'}
+ ${eventCommentedProjectSnippet()} | ${'Commented on snippet'}
+ ${eventCommentedDesign()} | ${'Commented on design'}
+ ${eventCommentedCommit()} | ${'Commented on commit'}
+ `('when event is $event', ({ event, expectedMessage }) => {
+ beforeEach(() => {
+ createComponent({ propsData: { event } });
+ });
+
+ it('renders `ContributionEventBase` with correct props', () => {
+ expect(findContributionEventBase().props()).toMatchObject({
+ event,
+ iconName: 'comment',
+ });
+ });
+
+ it('renders message', () => {
+ expect(findEventBody().text()).toContain(expectedMessage);
+ });
+
+ it('renders resource parent link', () => {
+ expect(findResourceParentLink().props('event')).toEqual(event);
+ });
+
+ it('renders noteable link', () => {
+ expect(findNoteableLink(event).attributes('href')).toBe(event.noteable.web_url);
+ });
+
+ it('renders first line of comment in markdown', () => {
+ expect(wrapper.html()).toContain(event.noteable.first_line_in_markdown);
+ });
+ });
+
+ describe('when noteable type is a personal snippet', () => {
+ const event = eventCommentedPersonalSnippet();
+
+ beforeEach(() => {
+ createComponent({ propsData: { event } });
+ });
+
+ it('renders `ContributionEventBase` with correct props', () => {
+ expect(findContributionEventBase().props()).toMatchObject({
+ event,
+ iconName: 'comment',
+ });
+ });
+
+ it('renders message', () => {
+ expect(findEventBody().text()).toContain('Commented on snippet');
+ });
+
+ it('does not render resource parent link', () => {
+ expect(findResourceParentLink().exists()).toBe(false);
+ });
+
+ it('does not add `gl-font-monospace` to noteable link', () => {
+ expect(findNoteableLink(event).classes()).not.toContain('gl-font-monospace');
+ });
+ });
+
+ describe('when noteable type is a commit', () => {
+ const event = eventCommentedCommit();
+
+ beforeEach(() => {
+ createComponent({ propsData: { event } });
+ });
+
+ it('adds `gl-font-monospace` to noteable link', () => {
+ expect(findNoteableLink(event).classes()).toContain('gl-font-monospace');
+ });
+ });
+});
diff --git a/spec/frontend/contribution_events/components/contribution_event/contribution_event_created_spec.js b/spec/frontend/contribution_events/components/contribution_event/contribution_event_created_spec.js
new file mode 100644
index 00000000000..4be4aa50dfc
--- /dev/null
+++ b/spec/frontend/contribution_events/components/contribution_event/contribution_event_created_spec.js
@@ -0,0 +1,70 @@
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import ContributionEventCreated from '~/contribution_events/components/contribution_event/contribution_event_created.vue';
+import ContributionEventBase from '~/contribution_events/components/contribution_event/contribution_event_base.vue';
+import { TARGET_TYPE_WORK_ITEM } from '~/contribution_events/constants';
+import {
+ eventProjectCreated,
+ eventMilestoneCreated,
+ eventIssueCreated,
+ eventMergeRequestCreated,
+ eventWikiPageCreated,
+ eventDesignCreated,
+ eventTaskCreated,
+ eventIncidentCreated,
+} from '../../utils';
+
+describe('ContributionEventCreated', () => {
+ let wrapper;
+
+ const createComponent = ({ propsData }) => {
+ wrapper = shallowMountExtended(ContributionEventCreated, {
+ propsData,
+ });
+ };
+
+ describe.each`
+ event | expectedMessage | expectedIconName | expectedIconClass
+ ${eventProjectCreated()} | ${'Created project %{resourceParentLink}.'} | ${'status_open'} | ${'gl-text-green-500'}
+ ${eventMilestoneCreated()} | ${'Opened milestone %{targetLink} in %{resourceParentLink}.'} | ${'status_open'} | ${'gl-text-green-500'}
+ ${eventIssueCreated()} | ${'Opened issue %{targetLink} in %{resourceParentLink}.'} | ${'status_open'} | ${'gl-text-green-500'}
+ ${eventMergeRequestCreated()} | ${'Opened merge request %{targetLink} in %{resourceParentLink}.'} | ${'status_open'} | ${'gl-text-green-500'}
+ ${eventWikiPageCreated()} | ${'Created wiki page %{targetLink} in %{resourceParentLink}.'} | ${'status_open'} | ${'gl-text-green-500'}
+ ${eventDesignCreated()} | ${'Added design %{targetLink} in %{resourceParentLink}.'} | ${'upload'} | ${null}
+ ${{ resource_parent: { type: 'unsupported type' } }} | ${'Created resource.'} | ${'status_open'} | ${'gl-text-green-500'}
+ ${{ target: { type: 'unsupported type' } }} | ${'Created resource.'} | ${'status_open'} | ${'gl-text-green-500'}
+ `(
+ 'when event target type is $event.target.type',
+ ({ event, expectedMessage, expectedIconName, expectedIconClass }) => {
+ it('renders `ContributionEventBase` with correct props', () => {
+ createComponent({ propsData: { event } });
+
+ expect(wrapper.findComponent(ContributionEventBase).props()).toMatchObject({
+ event,
+ message: expectedMessage,
+ iconName: expectedIconName,
+ iconClass: expectedIconClass,
+ });
+ });
+ },
+ );
+
+ describe(`when event target type is ${TARGET_TYPE_WORK_ITEM}`, () => {
+ describe.each`
+ event | expectedMessage
+ ${eventTaskCreated()} | ${'Opened task %{targetLink} in %{resourceParentLink}.'}
+ ${eventIncidentCreated()} | ${'Opened incident %{targetLink} in %{resourceParentLink}.'}
+ ${{ target: { type: TARGET_TYPE_WORK_ITEM, issue_type: 'unsupported type' } }} | ${'Created resource.'}
+ `('when issue type is $event.target.issue_type', ({ event, expectedMessage }) => {
+ it('renders `ContributionEventBase` with correct props', () => {
+ createComponent({ propsData: { event } });
+
+ expect(wrapper.findComponent(ContributionEventBase).props()).toMatchObject({
+ event,
+ message: expectedMessage,
+ iconName: 'status_open',
+ iconClass: 'gl-text-green-500',
+ });
+ });
+ });
+ });
+});
diff --git a/spec/frontend/contribution_events/components/contribution_event/contribution_event_reopened_spec.js b/spec/frontend/contribution_events/components/contribution_event/contribution_event_reopened_spec.js
new file mode 100644
index 00000000000..87f3080a98f
--- /dev/null
+++ b/spec/frontend/contribution_events/components/contribution_event/contribution_event_reopened_spec.js
@@ -0,0 +1,60 @@
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import ContributionEventReopened from '~/contribution_events/components/contribution_event/contribution_event_reopened.vue';
+import ContributionEventBase from '~/contribution_events/components/contribution_event/contribution_event_base.vue';
+import { TARGET_TYPE_WORK_ITEM } from '~/contribution_events/constants';
+import {
+ eventMilestoneReopened,
+ eventIssueReopened,
+ eventMergeRequestReopened,
+ eventTaskReopened,
+ eventIncidentReopened,
+} from '../../utils';
+
+describe('ContributionEventReopened', () => {
+ let wrapper;
+
+ const createComponent = ({ propsData }) => {
+ wrapper = shallowMountExtended(ContributionEventReopened, {
+ propsData,
+ });
+ };
+
+ describe.each`
+ event | expectedMessage | iconName
+ ${eventMilestoneReopened()} | ${'Reopened milestone %{targetLink} in %{resourceParentLink}.'} | ${'status_open'}
+ ${eventIssueReopened()} | ${'Reopened issue %{targetLink} in %{resourceParentLink}.'} | ${'status_open'}
+ ${eventMergeRequestReopened()} | ${'Reopened merge request %{targetLink} in %{resourceParentLink}.'} | ${'merge-request-open'}
+ ${{ target: { type: 'unsupported type' } }} | ${'Reopened resource.'} | ${'status_open'}
+ `('when event target type is $event.target.type', ({ event, expectedMessage, iconName }) => {
+ it('renders `ContributionEventBase` with correct props', () => {
+ createComponent({ propsData: { event } });
+
+ expect(wrapper.findComponent(ContributionEventBase).props()).toMatchObject({
+ event,
+ message: expectedMessage,
+ iconName,
+ iconClass: 'gl-text-green-500',
+ });
+ });
+ });
+
+ describe(`when event target type is ${TARGET_TYPE_WORK_ITEM}`, () => {
+ describe.each`
+ event | expectedMessage
+ ${eventTaskReopened()} | ${'Reopened task %{targetLink} in %{resourceParentLink}.'}
+ ${eventIncidentReopened()} | ${'Reopened incident %{targetLink} in %{resourceParentLink}.'}
+ ${{ target: { type: TARGET_TYPE_WORK_ITEM, issue_type: 'unsupported type' } }} | ${'Reopened resource.'}
+ `('when issue type is $event.target.issue_type', ({ event, expectedMessage }) => {
+ it('renders `ContributionEventBase` with correct props', () => {
+ createComponent({ propsData: { event } });
+
+ expect(wrapper.findComponent(ContributionEventBase).props()).toMatchObject({
+ event,
+ message: expectedMessage,
+ iconName: 'status_open',
+ iconClass: 'gl-text-green-500',
+ });
+ });
+ });
+ });
+});
diff --git a/spec/frontend/contribution_events/components/contribution_events_spec.js b/spec/frontend/contribution_events/components/contribution_events_spec.js
index 31e1bc3e569..7493d248e2b 100644
--- a/spec/frontend/contribution_events/components/contribution_events_spec.js
+++ b/spec/frontend/contribution_events/components/contribution_events_spec.js
@@ -7,6 +7,10 @@ import ContributionEventLeft from '~/contribution_events/components/contribution
import ContributionEventPushed from '~/contribution_events/components/contribution_event/contribution_event_pushed.vue';
import ContributionEventPrivate from '~/contribution_events/components/contribution_event/contribution_event_private.vue';
import ContributionEventMerged from '~/contribution_events/components/contribution_event/contribution_event_merged.vue';
+import ContributionEventCreated from '~/contribution_events/components/contribution_event/contribution_event_created.vue';
+import ContributionEventClosed from '~/contribution_events/components/contribution_event/contribution_event_closed.vue';
+import ContributionEventReopened from '~/contribution_events/components/contribution_event/contribution_event_reopened.vue';
+import ContributionEventCommented from '~/contribution_events/components/contribution_event/contribution_event_commented.vue';
import {
eventApproved,
eventExpired,
@@ -15,6 +19,10 @@ import {
eventPushedBranch,
eventPrivate,
eventMerged,
+ eventCreated,
+ eventClosed,
+ eventReopened,
+ eventCommented,
} from '../utils';
describe('ContributionEvents', () => {
@@ -31,20 +39,28 @@ describe('ContributionEvents', () => {
eventPushedBranch(),
eventPrivate(),
eventMerged(),
+ eventCreated(),
+ eventClosed(),
+ eventReopened(),
+ eventCommented(),
],
},
});
};
it.each`
- expectedComponent | expectedEvent
- ${ContributionEventApproved} | ${eventApproved()}
- ${ContributionEventExpired} | ${eventExpired()}
- ${ContributionEventJoined} | ${eventJoined()}
- ${ContributionEventLeft} | ${eventLeft()}
- ${ContributionEventPushed} | ${eventPushedBranch()}
- ${ContributionEventPrivate} | ${eventPrivate()}
- ${ContributionEventMerged} | ${eventMerged()}
+ expectedComponent | expectedEvent
+ ${ContributionEventApproved} | ${eventApproved()}
+ ${ContributionEventExpired} | ${eventExpired()}
+ ${ContributionEventJoined} | ${eventJoined()}
+ ${ContributionEventLeft} | ${eventLeft()}
+ ${ContributionEventPushed} | ${eventPushedBranch()}
+ ${ContributionEventPrivate} | ${eventPrivate()}
+ ${ContributionEventMerged} | ${eventMerged()}
+ ${ContributionEventCreated} | ${eventCreated()}
+ ${ContributionEventClosed} | ${eventClosed()}
+ ${ContributionEventReopened} | ${eventReopened()}
+ ${ContributionEventCommented} | ${eventCommented()}
`(
'renders `$expectedComponent.name` component and passes expected event',
({ expectedComponent, expectedEvent }) => {
diff --git a/spec/frontend/contribution_events/components/target_link_spec.js b/spec/frontend/contribution_events/components/target_link_spec.js
index b71d6eff432..40650b3585c 100644
--- a/spec/frontend/contribution_events/components/target_link_spec.js
+++ b/spec/frontend/contribution_events/components/target_link_spec.js
@@ -1,7 +1,7 @@
import { GlLink } from '@gitlab/ui';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import TargetLink from '~/contribution_events/components/target_link.vue';
-import { eventApproved, eventJoined } from '../utils';
+import { eventApproved, eventJoined, eventWikiPageCreated } from '../utils';
describe('TargetLink', () => {
let wrapper;
@@ -19,13 +19,15 @@ describe('TargetLink', () => {
});
};
+ const findLink = () => wrapper.findComponent(GlLink);
+
describe('when target is defined', () => {
beforeEach(() => {
createComponent();
});
it('renders link', () => {
- const link = wrapper.findComponent(GlLink);
+ const link = findLink();
const { web_url: webUrl, title, reference_link_text } = defaultPropsData.event.target;
expect(link.attributes()).toMatchObject({
@@ -34,6 +36,17 @@ describe('TargetLink', () => {
});
expect(link.text()).toBe(reference_link_text);
});
+
+ describe('when target does not have `reference_link_text` defined', () => {
+ const event = eventWikiPageCreated();
+ beforeEach(() => {
+ createComponent({ propsData: { event } });
+ });
+
+ it('uses `title` for the link text', () => {
+ expect(findLink().text()).toBe(event.target.title);
+ });
+ });
});
describe('when target is not defined', () => {
diff --git a/spec/frontend/contribution_events/utils.js b/spec/frontend/contribution_events/utils.js
index 6e97455582d..8b34506c6ac 100644
--- a/spec/frontend/contribution_events/utils.js
+++ b/spec/frontend/contribution_events/utils.js
@@ -7,21 +7,46 @@ import {
EVENT_TYPE_PUSHED,
EVENT_TYPE_PRIVATE,
EVENT_TYPE_MERGED,
+ EVENT_TYPE_CLOSED,
+ EVENT_TYPE_REOPENED,
+ EVENT_TYPE_COMMENTED,
PUSH_EVENT_REF_TYPE_BRANCH,
PUSH_EVENT_REF_TYPE_TAG,
+ EVENT_TYPE_CREATED,
+ TARGET_TYPE_ISSUE,
+ TARGET_TYPE_MILESTONE,
+ TARGET_TYPE_MERGE_REQUEST,
+ TARGET_TYPE_WIKI,
+ TARGET_TYPE_DESIGN,
+ WORK_ITEM_ISSUE_TYPE_ISSUE,
+ WORK_ITEM_ISSUE_TYPE_TASK,
+ WORK_ITEM_ISSUE_TYPE_INCIDENT,
+ RESOURCE_PARENT_TYPE_PROJECT,
} from '~/contribution_events/constants';
-const findEventByAction = (action) => events.find((event) => event.action === action);
+import {
+ ISSUE_NOTEABLE_TYPE,
+ MERGE_REQUEST_NOTEABLE_TYPE,
+ SNIPPET_NOTEABLE_TYPE,
+ DESIGN_NOTEABLE_TYPE,
+ COMMIT_NOTEABLE_TYPE,
+} from '~/notes/constants';
+
+const findEventByAction = (action) => () => events.find((event) => event.action === action);
+const findEventByActionAndTargetType = (action, targetType) => () =>
+ events.find((event) => event.action === action && event.target?.type === targetType);
+const findEventByActionAndIssueType = (action, issueType) => () =>
+ events.find((event) => event.action === action && event.target.issue_type === issueType);
-export const eventApproved = () => findEventByAction(EVENT_TYPE_APPROVED);
+export const eventApproved = findEventByAction(EVENT_TYPE_APPROVED);
-export const eventExpired = () => findEventByAction(EVENT_TYPE_EXPIRED);
+export const eventExpired = findEventByAction(EVENT_TYPE_EXPIRED);
-export const eventJoined = () => findEventByAction(EVENT_TYPE_JOINED);
+export const eventJoined = findEventByAction(EVENT_TYPE_JOINED);
-export const eventLeft = () => findEventByAction(EVENT_TYPE_LEFT);
+export const eventLeft = findEventByAction(EVENT_TYPE_LEFT);
-export const eventMerged = () => findEventByAction(EVENT_TYPE_MERGED);
+export const eventMerged = findEventByAction(EVENT_TYPE_MERGED);
const findPushEvent = ({
isNew = false,
@@ -50,3 +75,81 @@ export const eventPushedRemovedTag = findPushEvent({
export const eventBulkPushedBranch = findPushEvent({ commitCount: 5 });
export const eventPrivate = () => ({ ...events[0], action: EVENT_TYPE_PRIVATE });
+
+export const eventCreated = findEventByAction(EVENT_TYPE_CREATED);
+
+export const findCreatedEvent = (targetType) =>
+ findEventByActionAndTargetType(EVENT_TYPE_CREATED, targetType);
+export const findWorkItemCreatedEvent = (issueType) =>
+ findEventByActionAndIssueType(EVENT_TYPE_CREATED, issueType);
+
+export const eventProjectCreated = findCreatedEvent(undefined);
+export const eventMilestoneCreated = findCreatedEvent(TARGET_TYPE_MILESTONE);
+export const eventIssueCreated = findCreatedEvent(TARGET_TYPE_ISSUE);
+export const eventMergeRequestCreated = findCreatedEvent(TARGET_TYPE_MERGE_REQUEST);
+export const eventWikiPageCreated = findCreatedEvent(TARGET_TYPE_WIKI);
+export const eventDesignCreated = findCreatedEvent(TARGET_TYPE_DESIGN);
+export const eventTaskCreated = findWorkItemCreatedEvent(WORK_ITEM_ISSUE_TYPE_TASK);
+export const eventIncidentCreated = findWorkItemCreatedEvent(WORK_ITEM_ISSUE_TYPE_INCIDENT);
+
+export const eventClosed = findEventByAction(EVENT_TYPE_CLOSED);
+
+export const findClosedEvent = (targetType) =>
+ findEventByActionAndTargetType(EVENT_TYPE_CREATED, targetType);
+export const findWorkItemClosedEvent = (issueType) =>
+ findEventByActionAndIssueType(EVENT_TYPE_CLOSED, issueType);
+
+export const eventMilestoneClosed = findClosedEvent(TARGET_TYPE_MILESTONE);
+export const eventIssueClosed = findClosedEvent(TARGET_TYPE_ISSUE);
+export const eventMergeRequestClosed = findClosedEvent(TARGET_TYPE_MERGE_REQUEST);
+export const eventWikiPageClosed = findClosedEvent(TARGET_TYPE_WIKI);
+export const eventDesignClosed = findClosedEvent(TARGET_TYPE_DESIGN);
+export const eventTaskClosed = findWorkItemClosedEvent(WORK_ITEM_ISSUE_TYPE_TASK);
+export const eventIncidentClosed = findWorkItemClosedEvent(WORK_ITEM_ISSUE_TYPE_INCIDENT);
+
+export const eventReopened = findEventByAction(EVENT_TYPE_REOPENED);
+
+export const findReopenedEvent = (targetType) =>
+ findEventByActionAndTargetType(EVENT_TYPE_REOPENED, targetType);
+export const findWorkItemReopenedEvent = (issueType) =>
+ findEventByActionAndIssueType(EVENT_TYPE_REOPENED, issueType);
+
+export const eventMilestoneReopened = findReopenedEvent(TARGET_TYPE_MILESTONE);
+export const eventMergeRequestReopened = findReopenedEvent(TARGET_TYPE_MERGE_REQUEST);
+export const eventWikiPageReopened = findReopenedEvent(TARGET_TYPE_WIKI);
+export const eventDesignReopened = findReopenedEvent(TARGET_TYPE_DESIGN);
+export const eventIssueReopened = findWorkItemReopenedEvent(WORK_ITEM_ISSUE_TYPE_ISSUE);
+export const eventTaskReopened = findWorkItemReopenedEvent(WORK_ITEM_ISSUE_TYPE_TASK);
+export const eventIncidentReopened = findWorkItemReopenedEvent(WORK_ITEM_ISSUE_TYPE_INCIDENT);
+
+export const eventCommented = findEventByAction(EVENT_TYPE_COMMENTED);
+
+const findEventByActionAndNoteableType = (action, noteableType) => () =>
+ events.find((event) => event.action === action && event.noteable?.type === noteableType);
+export const findCommentedEvent = (noteableType) =>
+ findEventByActionAndNoteableType(EVENT_TYPE_COMMENTED, noteableType);
+export const findCommentedSnippet = (resourceParentType) => () =>
+ events.find(
+ (event) =>
+ event.action === EVENT_TYPE_COMMENTED &&
+ event.noteable?.type === SNIPPET_NOTEABLE_TYPE &&
+ event.resource_parent?.type === resourceParentType,
+ );
+
+export const eventCommentedIssue = findCommentedEvent(ISSUE_NOTEABLE_TYPE);
+export const eventCommentedMergeRequest = findCommentedEvent(MERGE_REQUEST_NOTEABLE_TYPE);
+export const eventCommentedSnippet = findCommentedEvent(SNIPPET_NOTEABLE_TYPE);
+export const eventCommentedProjectSnippet = findCommentedSnippet(RESOURCE_PARENT_TYPE_PROJECT);
+export const eventCommentedPersonalSnippet = findCommentedSnippet(null);
+export const eventCommentedDesign = findCommentedEvent(DESIGN_NOTEABLE_TYPE);
+// Fixtures do not work for commits because they are not written to the database.
+// Manually creating a commented commit event as a workaround.
+export const eventCommentedCommit = () => ({
+ ...eventCommented(),
+ noteable: {
+ type: COMMIT_NOTEABLE_TYPE,
+ reference_link_text: '83c6aa31',
+ web_url: 'http://localhost/group3/project-1/-/commit/83c6aa31482b9076531ed3a880e75627fd6b335c',
+ first_line_in_markdown: '\u003cp\u003eMy title 9\u003c/p\u003e',
+ },
+});
diff --git a/spec/frontend/contribution_events/utils_spec.js b/spec/frontend/contribution_events/utils_spec.js
new file mode 100644
index 00000000000..298f5ae652a
--- /dev/null
+++ b/spec/frontend/contribution_events/utils_spec.js
@@ -0,0 +1,24 @@
+import { TARGET_TYPE_MILESTONE, WORK_ITEM_ISSUE_TYPE_TASK } from '~/contribution_events/constants';
+import { getValueByEventTarget } from '~/contribution_events/utils';
+import { eventMilestoneCreated, eventTaskCreated } from './utils';
+
+describe('getValueByEventTarget', () => {
+ const milestoneValue = 'milestone';
+ const taskValue = 'task';
+ const fallbackValue = 'fallback';
+
+ const map = {
+ [TARGET_TYPE_MILESTONE]: milestoneValue,
+ [WORK_ITEM_ISSUE_TYPE_TASK]: taskValue,
+ fallback: fallbackValue,
+ };
+
+ it.each`
+ event | expected
+ ${eventMilestoneCreated()} | ${milestoneValue}
+ ${eventTaskCreated()} | ${taskValue}
+ ${{ target: { type: 'unsupported type' } }} | ${fallbackValue}
+ `('returns $expected when event is $event', ({ event, expected }) => {
+ expect(getValueByEventTarget(map, event)).toBe(expected);
+ });
+});
diff --git a/spec/frontend/custom_emoji/components/__snapshots__/list_spec.js.snap b/spec/frontend/custom_emoji/components/__snapshots__/list_spec.js.snap
new file mode 100644
index 00000000000..4e87d4d8192
--- /dev/null
+++ b/spec/frontend/custom_emoji/components/__snapshots__/list_spec.js.snap
@@ -0,0 +1,220 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`Custom emoji settings list component renders table of custom emoji 1`] = `
+<div>
+ <div
+ class="tabs gl-tabs"
+ >
+ <!---->
+ <div
+ class=""
+ >
+ <ul
+ class="nav gl-tabs-nav"
+ role="tablist"
+ >
+ <div
+ class="gl-actions-tabs-start"
+ data-testid="actions-tabs-start"
+ >
+ <a
+ class="btn btn-info btn-md gl-button"
+ data-testid="action-primary"
+ href="/new"
+ to="/new"
+ >
+ <!---->
+
+ <!---->
+
+ <span
+ class="gl-button-text"
+ >
+
+ New custom emoji
+
+ </span>
+ </a>
+
+ <!---->
+
+ <!---->
+ </div>
+ <div
+ class="gl-actions-tabs-end"
+ data-testid="actions-tabs-end"
+ >
+ <a
+ class="btn btn-info btn-md gl-button"
+ data-testid="action-primary"
+ href="/new"
+ to="/new"
+ >
+ <!---->
+
+ <!---->
+
+ <span
+ class="gl-button-text"
+ >
+
+ New custom emoji
+
+ </span>
+ </a>
+
+ <!---->
+
+ <!---->
+ </div>
+ </ul>
+ </div>
+ <div
+ class="tab-content gl-pt-0 gl-tab-content"
+ >
+ <transition-stub
+ css="true"
+ enteractiveclass=""
+ enterclass=""
+ entertoclass="show"
+ leaveactiveclass=""
+ leaveclass="show"
+ leavetoclass=""
+ mode="out-in"
+ name=""
+ >
+ <div
+ aria-hidden="true"
+ class="tab-pane"
+ role="tabpanel"
+ style="display: none;"
+ >
+
+ <table
+ aria-busy=""
+ aria-colcount="4"
+ class="table b-table gl-table gl-table-layout-fixed"
+ role="table"
+ >
+ <!---->
+ <colgroup>
+ <col
+ style="width: 70px;"
+ />
+ <col />
+ <col
+ style="width: 25%;"
+ />
+ <col
+ style="width: 64px;"
+ />
+ </colgroup>
+ <thead
+ class=""
+ role="rowgroup"
+ >
+ <!---->
+ <tr
+ class=""
+ role="row"
+ >
+ <th
+ aria-colindex="1"
+ class="gl-border-t-0!"
+ role="columnheader"
+ scope="col"
+ >
+ <div>
+ Image
+ </div>
+ </th>
+ <th
+ aria-colindex="2"
+ class="gl-border-t-0!"
+ role="columnheader"
+ scope="col"
+ >
+ <div>
+ Name
+ </div>
+ </th>
+ <th
+ aria-colindex="3"
+ class="gl-border-t-0!"
+ role="columnheader"
+ scope="col"
+ >
+ <div>
+ Created date
+ </div>
+ </th>
+ <th
+ aria-colindex="4"
+ aria-label="Action"
+ class="gl-border-t-0!"
+ role="columnheader"
+ scope="col"
+ >
+ <div />
+ </th>
+ </tr>
+ </thead>
+ <tbody
+ role="rowgroup"
+ >
+ <!---->
+ <tr
+ class=""
+ role="row"
+ >
+ <td
+ aria-colindex="1"
+ class="gl-vertical-align-middle!"
+ role="cell"
+ >
+ <gl-emoji
+ data-fallback-src="https://gitlab.com/custom_emoji/custom_emoji/-/raw/main/img/confused_husky.gif"
+ data-name="confused_husky"
+ data-unicode-version="custom"
+ />
+ </td>
+ <td
+ aria-colindex="2"
+ class="gl-vertical-align-middle! gl-font-monospace"
+ role="cell"
+ >
+ <strong
+ class="gl-str-truncated"
+ >
+ :confused_husky:
+ </strong>
+ </td>
+ <td
+ aria-colindex="3"
+ class="gl-vertical-align-middle!"
+ role="cell"
+ >
+
+ created-at
+
+ </td>
+ <td
+ aria-colindex="4"
+ class=""
+ role="cell"
+ />
+ </tr>
+ <!---->
+ <!---->
+ </tbody>
+ <!---->
+ </table>
+
+ <!---->
+ </div>
+ </transition-stub>
+ <!---->
+ </div>
+ </div>
+</div>
+`;
diff --git a/spec/frontend/custom_emoji/components/delete_item_spec.js b/spec/frontend/custom_emoji/components/delete_item_spec.js
new file mode 100644
index 00000000000..06c4ca8d54b
--- /dev/null
+++ b/spec/frontend/custom_emoji/components/delete_item_spec.js
@@ -0,0 +1,89 @@
+import Vue from 'vue';
+import VueApollo from 'vue-apollo';
+import * as Sentry from '@sentry/browser';
+import { GlModal } from '@gitlab/ui';
+import { mountExtended } from 'helpers/vue_test_utils_helper';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import { createAlert } from '~/alert';
+import DeleteItem from '~/custom_emoji/components/delete_item.vue';
+import deleteCustomEmojiMutation from '~/custom_emoji/queries/delete_custom_emoji.mutation.graphql';
+import { CUSTOM_EMOJI } from '../mock_data';
+
+jest.mock('~/alert');
+jest.mock('@sentry/browser');
+
+let wrapper;
+let deleteMutationSpy;
+
+Vue.use(VueApollo);
+
+function createSuccessSpy() {
+ deleteMutationSpy = jest.fn().mockResolvedValue({
+ data: { destroyCustomEmoji: { customEmoji: { id: CUSTOM_EMOJI[0].id } } },
+ });
+}
+
+function createErrorSpy() {
+ deleteMutationSpy = jest.fn().mockRejectedValue();
+}
+
+function createMockApolloProvider() {
+ const requestHandlers = [[deleteCustomEmojiMutation, deleteMutationSpy]];
+
+ return createMockApollo(requestHandlers);
+}
+
+function createComponent() {
+ const apolloProvider = createMockApolloProvider();
+
+ wrapper = mountExtended(DeleteItem, {
+ apolloProvider,
+ propsData: {
+ emoji: CUSTOM_EMOJI[0],
+ },
+ });
+}
+
+const findDeleteButton = () => wrapper.findByTestId('delete-button');
+const findModal = () => wrapper.findComponent(GlModal);
+
+describe('Custom emoji delete item component', () => {
+ it('opens modal when clicking button', async () => {
+ createSuccessSpy();
+ createComponent();
+
+ await findDeleteButton().trigger('click');
+
+ expect(document.querySelector('.gl-modal')).not.toBe(null);
+ });
+
+ it('calls GraphQL mutation on modals primary action', () => {
+ createSuccessSpy();
+ createComponent();
+
+ findModal().vm.$emit('primary');
+
+ expect(deleteMutationSpy).toHaveBeenCalledWith({ id: CUSTOM_EMOJI[0].id });
+ });
+
+ it('creates alert when mutation fails', async () => {
+ createErrorSpy();
+ createComponent();
+
+ findModal().vm.$emit('primary');
+ await waitForPromises();
+
+ expect(createAlert).toHaveBeenCalledWith('Failed to delete custom emoji. Please try again.');
+ });
+
+ it('calls sentry when mutation fails', async () => {
+ createErrorSpy();
+ createComponent();
+
+ findModal().vm.$emit('primary');
+ await waitForPromises();
+
+ expect(Sentry.captureException).toHaveBeenCalled();
+ });
+});
diff --git a/spec/frontend/custom_emoji/components/form_spec.js b/spec/frontend/custom_emoji/components/form_spec.js
new file mode 100644
index 00000000000..c5010d93da4
--- /dev/null
+++ b/spec/frontend/custom_emoji/components/form_spec.js
@@ -0,0 +1,116 @@
+import Vue, { nextTick } from 'vue';
+import { GlAlert } from '@gitlab/ui';
+import VueApollo from 'vue-apollo';
+import { mountExtended } from 'helpers/vue_test_utils_helper';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import Form from '~/custom_emoji/components/form.vue';
+import createCustomEmojiMutation from '~/custom_emoji/queries/create_custom_emoji.mutation.graphql';
+import { CREATED_CUSTOM_EMOJI, CREATED_CUSTOM_EMOJI_WITH_ERROR } from '../mock_data';
+
+let wrapper;
+let createCustomEmojiResponseSpy;
+
+function createMockApolloProvider(response) {
+ Vue.use(VueApollo);
+
+ createCustomEmojiResponseSpy = jest.fn().mockResolvedValue(response);
+
+ const requestHandlers = [[createCustomEmojiMutation, createCustomEmojiResponseSpy]];
+
+ return createMockApollo(requestHandlers);
+}
+
+function createComponent(response = CREATED_CUSTOM_EMOJI) {
+ const mockApollo = createMockApolloProvider(response);
+
+ return mountExtended(Form, {
+ provide: {
+ groupPath: 'gitlab-org',
+ },
+ apolloProvider: mockApollo,
+ });
+}
+
+const findCustomEmojiNameInput = () => wrapper.findByTestId('custom-emoji-name-input');
+const findCustomEmojiNameFormGroup = () => wrapper.findByTestId('custom-emoji-name-form-group');
+const findCustomEmojiUrlInput = () => wrapper.findByTestId('custom-emoji-url-input');
+const findCustomEmojiUrlFormGroup = () => wrapper.findByTestId('custom-emoji-url-form-group');
+const findCustomEmojiFrom = () => wrapper.findByTestId('custom-emoji-form');
+const findAlerts = () => wrapper.findAllComponents(GlAlert);
+const findSubmitBtn = () => wrapper.findByTestId('custom-emoji-form-submit-btn');
+
+function completeForm() {
+ findCustomEmojiNameInput().setValue('Test');
+ findCustomEmojiUrlInput().setValue('https://example.com');
+ findCustomEmojiFrom().trigger('submit');
+}
+
+describe('Custom emoji form component', () => {
+ describe('creates custom emoji', () => {
+ it('calls apollo mutation', async () => {
+ wrapper = createComponent();
+
+ completeForm();
+
+ await waitForPromises();
+
+ expect(createCustomEmojiResponseSpy).toHaveBeenCalledWith({
+ groupPath: 'gitlab-org',
+ url: 'https://example.com',
+ name: 'Test',
+ });
+ });
+
+ it('does not submit when form validation fails', async () => {
+ wrapper = createComponent();
+
+ findCustomEmojiFrom().trigger('submit');
+
+ await waitForPromises();
+
+ expect(createCustomEmojiResponseSpy).not.toHaveBeenCalled();
+ });
+
+ it.each`
+ findFormGroup | findInput | fieldName
+ ${findCustomEmojiNameFormGroup} | ${findCustomEmojiUrlInput} | ${'name'}
+ ${findCustomEmojiUrlFormGroup} | ${findCustomEmojiNameInput} | ${'URL'}
+ `('shows errors for empty $fieldName input', async ({ findFormGroup, findInput }) => {
+ wrapper = createComponent(CREATED_CUSTOM_EMOJI_WITH_ERROR);
+
+ findInput().setValue('Test');
+ findCustomEmojiFrom().trigger('submit');
+
+ await waitForPromises();
+
+ expect(findFormGroup().classes('is-invalid')).toBe(true);
+ });
+
+ it('displays errors when mutation fails', async () => {
+ wrapper = createComponent(CREATED_CUSTOM_EMOJI_WITH_ERROR);
+
+ completeForm();
+
+ await waitForPromises();
+
+ const alertMessages = findAlerts().wrappers.map((x) => x.text());
+
+ expect(alertMessages).toEqual(CREATED_CUSTOM_EMOJI_WITH_ERROR.data.createCustomEmoji.errors);
+ });
+
+ it('shows loading state when saving', async () => {
+ wrapper = createComponent();
+
+ completeForm();
+
+ await nextTick();
+
+ expect(findSubmitBtn().props('loading')).toBe(true);
+
+ await waitForPromises();
+
+ expect(findSubmitBtn().props('loading')).toBe(false);
+ });
+ });
+});
diff --git a/spec/frontend/custom_emoji/components/list_spec.js b/spec/frontend/custom_emoji/components/list_spec.js
new file mode 100644
index 00000000000..b5729d59464
--- /dev/null
+++ b/spec/frontend/custom_emoji/components/list_spec.js
@@ -0,0 +1,79 @@
+import Vue from 'vue';
+import { mountExtended } from 'helpers/vue_test_utils_helper';
+import List from '~/custom_emoji/components/list.vue';
+import DeleteItem from '~/custom_emoji/components/delete_item.vue';
+import { CUSTOM_EMOJI } from '../mock_data';
+
+jest.mock('~/lib/utils/datetime/date_format_utility', () => ({
+ formatDate: (date) => date,
+}));
+
+Vue.config.ignoredElements = ['gl-emoji'];
+
+let wrapper;
+
+function createComponent(propsData = {}) {
+ wrapper = mountExtended(List, {
+ propsData: {
+ customEmojis: CUSTOM_EMOJI,
+ pageInfo: {},
+ count: CUSTOM_EMOJI.length,
+ userPermissions: { createCustomEmoji: true },
+ ...propsData,
+ },
+ });
+}
+
+describe('Custom emoji settings list component', () => {
+ it('renders table of custom emoji', () => {
+ createComponent();
+
+ expect(wrapper.element).toMatchSnapshot();
+ });
+
+ describe('user permissions', () => {
+ it.each`
+ createCustomEmoji | visible
+ ${true} | ${true}
+ ${false} | ${false}
+ `(
+ 'renders create new button if createCustomEmoji is $createCustomEmoji',
+ ({ createCustomEmoji, visible }) => {
+ createComponent({ userPermissions: { createCustomEmoji } });
+
+ expect(wrapper.findByTestId('action-primary').exists()).toBe(visible);
+ },
+ );
+ });
+
+ describe('pagination', () => {
+ it.each`
+ emits | button | pageInfo
+ ${{ before: 'startCursor' }} | ${'prevButton'} | ${{ hasPreviousPage: true, startCursor: 'startCursor' }}
+ ${{ after: 'endCursor' }} | ${'nextButton'} | ${{ hasNextPage: true, endCursor: 'endCursor' }}
+ `('emits $emits when $button is clicked', async ({ emits, button, pageInfo }) => {
+ createComponent({ pageInfo });
+
+ await wrapper.findByTestId(button).vm.$emit('click');
+
+ expect(wrapper.emitted('input')[0]).toEqual([emits]);
+ });
+ });
+
+ describe('delete button', () => {
+ it.each`
+ deleteCustomEmoji | rendersText | renders
+ ${true} | ${'renders'} | ${true}
+ ${false} | ${'does not render'} | ${false}
+ `(
+ '$rendersText delete button when deleteCustomEmoji is $deleteCustomEmoji',
+ ({ deleteCustomEmoji, renders }) => {
+ createComponent({
+ customEmojis: [{ ...CUSTOM_EMOJI[0], userPermissions: { deleteCustomEmoji } }],
+ });
+
+ expect(wrapper.findComponent(DeleteItem).exists()).toBe(renders);
+ },
+ );
+ });
+});
diff --git a/spec/frontend/custom_emoji/mock_data.js b/spec/frontend/custom_emoji/mock_data.js
new file mode 100644
index 00000000000..f2b32bf1cfb
--- /dev/null
+++ b/spec/frontend/custom_emoji/mock_data.js
@@ -0,0 +1,27 @@
+export const CUSTOM_EMOJI = [
+ {
+ id: 'gid://gitlab/CustomEmoji/1',
+ name: 'confused_husky',
+ url: 'https://gitlab.com/custom_emoji/custom_emoji/-/raw/main/img/confused_husky.gif',
+ createdAt: 'created-at',
+ userPermissions: {
+ deleteCustomEmoji: false,
+ },
+ },
+];
+
+export const CREATED_CUSTOM_EMOJI = {
+ data: {
+ createCustomEmoji: {
+ errors: [],
+ },
+ },
+};
+
+export const CREATED_CUSTOM_EMOJI_WITH_ERROR = {
+ data: {
+ createCustomEmoji: {
+ errors: ['Test error'],
+ },
+ },
+};
diff --git a/spec/frontend/deploy_freeze/components/deploy_freeze_modal_spec.js b/spec/frontend/deploy_freeze/components/deploy_freeze_modal_spec.js
index 1cd16e39417..c210880cbdc 100644
--- a/spec/frontend/deploy_freeze/components/deploy_freeze_modal_spec.js
+++ b/spec/frontend/deploy_freeze/components/deploy_freeze_modal_spec.js
@@ -1,6 +1,7 @@
import { GlButton, GlModal } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import Vue from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import Api from '~/api';
import DeployFreezeModal from '~/deploy_freeze/components/deploy_freeze_modal.vue';
diff --git a/spec/frontend/deploy_freeze/components/deploy_freeze_settings_spec.js b/spec/frontend/deploy_freeze/components/deploy_freeze_settings_spec.js
index 883cc6a344a..4068493a3b7 100644
--- a/spec/frontend/deploy_freeze/components/deploy_freeze_settings_spec.js
+++ b/spec/frontend/deploy_freeze/components/deploy_freeze_settings_spec.js
@@ -1,5 +1,6 @@
import { shallowMount } from '@vue/test-utils';
import Vue from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import DeployFreezeModal from '~/deploy_freeze/components/deploy_freeze_modal.vue';
import DeployFreezeSettings from '~/deploy_freeze/components/deploy_freeze_settings.vue';
diff --git a/spec/frontend/deploy_freeze/components/deploy_freeze_table_spec.js b/spec/frontend/deploy_freeze/components/deploy_freeze_table_spec.js
index 6a9e482a184..e69a486ea78 100644
--- a/spec/frontend/deploy_freeze/components/deploy_freeze_table_spec.js
+++ b/spec/frontend/deploy_freeze/components/deploy_freeze_table_spec.js
@@ -1,6 +1,7 @@
import { GlModal } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import DeployFreezeTable from '~/deploy_freeze/components/deploy_freeze_table.vue';
import createStore from '~/deploy_freeze/store';
@@ -45,7 +46,9 @@ describe('Deploy freeze table', () => {
it('displays empty', () => {
expect(findEmptyFreezePeriods().exists()).toBe(true);
expect(findEmptyFreezePeriods().text()).toBe(
- 'No deploy freezes exist for this project. To add one, select Add deploy freeze',
+ `No deploy freezes exist for this project. To add one, select
+ Add deploy freeze
+ above.`,
);
});
diff --git a/spec/frontend/deploy_keys/components/keys_panel_spec.js b/spec/frontend/deploy_keys/components/keys_panel_spec.js
index e0f86aadad4..e63b269fe23 100644
--- a/spec/frontend/deploy_keys/components/keys_panel_spec.js
+++ b/spec/frontend/deploy_keys/components/keys_panel_spec.js
@@ -41,10 +41,10 @@ describe('Deploy keys panel', () => {
it('renders help box if keys are empty', () => {
mountComponent({ keys: [] });
- expect(wrapper.find('.settings-message').exists()).toBe(true);
+ expect(wrapper.find('.gl-new-card-empty').exists()).toBe(true);
- expect(wrapper.find('.settings-message').text().trim()).toBe(
- 'No deploy keys found. Create one with the form above.',
+ expect(wrapper.find('.gl-new-card-empty').text().trim()).toBe(
+ 'No deploy keys found, start by adding a new one above.',
);
});
diff --git a/spec/frontend/deploy_tokens/components/new_deploy_token_spec.js b/spec/frontend/deploy_tokens/components/new_deploy_token_spec.js
index a3fdab88270..b149a94f3a1 100644
--- a/spec/frontend/deploy_tokens/components/new_deploy_token_spec.js
+++ b/spec/frontend/deploy_tokens/components/new_deploy_token_spec.js
@@ -1,11 +1,18 @@
import { shallowMount } from '@vue/test-utils';
-import { nextTick } from 'vue';
-import { GlButton, GlFormCheckbox, GlFormInput, GlFormInputGroup, GlDatepicker } from '@gitlab/ui';
+import {
+ GlAlert,
+ GlButton,
+ GlFormCheckbox,
+ GlFormInput,
+ GlFormInputGroup,
+ GlDatepicker,
+} from '@gitlab/ui';
import MockAdapter from 'axios-mock-adapter';
import axios from '~/lib/utils/axios_utils';
import { HTTP_STATUS_INTERNAL_SERVER_ERROR, HTTP_STATUS_OK } from '~/lib/utils/http_status';
import { TEST_HOST } from 'helpers/test_constants';
import NewDeployToken from '~/deploy_tokens/components/new_deploy_token.vue';
+import ClipboardButton from '~/vue_shared/components/clipboard_button.vue';
import waitForPromises from 'helpers/wait_for_promises';
import { createAlert, VARIANT_INFO } from '~/alert';
@@ -36,11 +43,72 @@ describe('New Deploy Token', () => {
tokenType,
},
stubs: {
+ GlAlert,
GlFormCheckbox,
},
});
};
+ const findNewTokenAlert = () => wrapper.findComponent(GlAlert);
+ const findClipboardButtons = () => wrapper.findAllComponents(ClipboardButton);
+ const findAllCheckboxes = () => wrapper.findAllComponents(GlFormCheckbox);
+ const findFormInputs = () => wrapper.findAllComponents(GlFormInput);
+
+ const setScopeCheckboxes = ({
+ readRepoValue = true,
+ readRegistryValue = true,
+ writeRegistryValue = true,
+ readPackageRegistryValue = true,
+ writePackageRegistryValue = true,
+ } = {}) => {
+ const [
+ readRepo,
+ readRegistry,
+ writeRegistry,
+ readPackageRegistry,
+ writePackageRegistry,
+ ] = findAllCheckboxes().wrappers;
+
+ readRepo.vm.$emit('input', readRepoValue);
+ readRegistry.vm.$emit('input', readRegistryValue);
+ writeRegistry.vm.$emit('input', writeRegistryValue);
+ readPackageRegistry.vm.$emit('input', readPackageRegistryValue);
+ writePackageRegistry.vm.$emit('input', writePackageRegistryValue);
+ };
+
+ const setTokenName = ({ nameVal = 'test name', usernameVal = 'test username' } = {}) => {
+ const formInputs = findFormInputs();
+ formInputs.at(0).vm.$emit('input', nameVal);
+ formInputs.at(2).vm.$emit('input', usernameVal);
+ };
+
+ const setTokenForm = ({ date }) => {
+ setTokenName();
+
+ const datepicker = wrapper.findAllComponents(GlDatepicker).at(0);
+ datepicker.vm.$emit('input', date);
+
+ setScopeCheckboxes();
+ };
+
+ const submitToken = async () => {
+ wrapper.findAllComponents(GlButton).at(0).vm.$emit('click');
+ await waitForPromises();
+ };
+
+ const checkSubmittedToken = () => {
+ const [tokenUsername, tokenValue] = wrapper.findAllComponents(GlFormInputGroup).wrappers;
+
+ expect(tokenUsername.props('value')).toBe('test token username');
+ expect(tokenValue.props('value')).toBe('test token');
+
+ expect(createAlert).toHaveBeenCalledWith(
+ expect.objectContaining({
+ variant: VARIANT_INFO,
+ }),
+ );
+ };
+
describe('without a container registry', () => {
beforeEach(() => {
wrapper = factory({ containerRegistryEnabled: false });
@@ -59,163 +127,130 @@ describe('New Deploy Token', () => {
});
it('should show the read registry scope', () => {
- const checkbox = wrapper.findAllComponents(GlFormCheckbox).at(1);
+ const checkbox = findAllCheckboxes().at(1);
expect(checkbox.text()).toContain('read_registry');
});
+ });
- function submitTokenThenCheck() {
- wrapper.findAllComponents(GlButton).at(0).vm.$emit('click');
-
- return waitForPromises()
- .then(() => nextTick())
- .then(() => {
- const [tokenUsername, tokenValue] = wrapper.findAllComponents(GlFormInputGroup).wrappers;
+ describe('token submission', () => {
+ let mockAxios;
+ const defaultTokenPayload = {
+ name: 'test name',
+ username: 'test username',
+ read_repository: true,
+ read_registry: true,
+ write_registry: true,
+ read_package_registry: true,
+ write_package_registry: true,
+ };
- expect(tokenUsername.props('value')).toBe('test token username');
- expect(tokenValue.props('value')).toBe('test token');
+ const mockTokenRequest = ({ payload, status, response }) => {
+ mockAxios.onPost(createNewTokenPath, { deploy_token: payload }).replyOnce(status, response);
+ };
- expect(createAlert).toHaveBeenCalledWith(
- expect.objectContaining({
- variant: VARIANT_INFO,
- }),
- );
- });
- }
+ beforeEach(() => {
+ mockAxios = new MockAdapter(axios);
+ wrapper = factory();
+ });
it('should alert error message if token creation fails', async () => {
- const mockAxios = new MockAdapter(axios);
+ const message = 'Server error while creating a token';
+ const date = new Date();
+
+ setTokenForm({ date });
+ mockTokenRequest({
+ payload: {
+ ...defaultTokenPayload,
+ expires_at: date.toISOString(),
+ },
+ status: HTTP_STATUS_INTERNAL_SERVER_ERROR,
+ response: { message },
+ });
+
+ await submitToken();
+ expect(createAlert).toHaveBeenCalledWith(expect.objectContaining({ message }));
+ });
+
+ it('should make a request to create a token on submit', async () => {
const date = new Date();
- const formInputs = wrapper.findAllComponents(GlFormInput);
- const name = formInputs.at(0);
- const username = formInputs.at(2);
- name.vm.$emit('input', 'test name');
- username.vm.$emit('input', 'test username');
-
- const datepicker = wrapper.findAllComponents(GlDatepicker).at(0);
- datepicker.vm.$emit('input', date);
-
- const [
- readRepo,
- readRegistry,
- writeRegistry,
- readPackageRegistry,
- writePackageRegistry,
- ] = wrapper.findAllComponents(GlFormCheckbox).wrappers;
- readRepo.vm.$emit('input', true);
- readRegistry.vm.$emit('input', true);
- writeRegistry.vm.$emit('input', true);
- readPackageRegistry.vm.$emit('input', true);
- writePackageRegistry.vm.$emit('input', true);
-
- const expectedErrorMessage = 'Server error while creating a token';
-
- mockAxios
- .onPost(createNewTokenPath, {
- deploy_token: {
- name: 'test name',
- expires_at: date.toISOString(),
- username: 'test username',
- read_repository: true,
- read_registry: true,
- write_registry: true,
- read_package_registry: true,
- write_package_registry: true,
- },
- })
- .replyOnce(HTTP_STATUS_INTERNAL_SERVER_ERROR, { message: expectedErrorMessage });
-
- wrapper.findAllComponents(GlButton).at(0).vm.$emit('click');
-
- await waitForPromises().then(() => nextTick());
-
- expect(createAlert).toHaveBeenCalledWith(
- expect.objectContaining({
- message: expectedErrorMessage,
- }),
- );
+
+ setTokenForm({ date });
+ mockTokenRequest({
+ payload: {
+ ...defaultTokenPayload,
+ expires_at: date.toISOString(),
+ },
+ status: HTTP_STATUS_OK,
+ response: { username: 'test token username', token: 'test token' },
+ });
+
+ await submitToken();
+
+ checkSubmittedToken();
});
- it('should make a request to create a token on submit', () => {
- const mockAxios = new MockAdapter(axios);
+ it('should request a token without an expiration date', async () => {
+ const nameVal = 'test never expire name';
+ const usernameVal = 'test never expire username';
+ const readRepoValue = false;
+ const readRegistryValue = false;
+ const writeRegistryValue = false;
+
+ setTokenName({ nameVal, usernameVal });
+ setScopeCheckboxes({ readRepoValue, readRegistryValue, writeRegistryValue });
+
+ mockTokenRequest({
+ payload: {
+ ...defaultTokenPayload,
+ expires_at: null,
+ name: nameVal,
+ username: usernameVal,
+ read_repository: readRepoValue,
+ read_registry: readRegistryValue,
+ write_registry: writeRegistryValue,
+ },
+ status: HTTP_STATUS_OK,
+ response: { username: 'test token username', token: 'test token' },
+ });
- const date = new Date();
- const formInputs = wrapper.findAllComponents(GlFormInput);
- const name = formInputs.at(0);
- const username = formInputs.at(2);
- name.vm.$emit('input', 'test name');
- username.vm.$emit('input', 'test username');
-
- const datepicker = wrapper.findAllComponents(GlDatepicker).at(0);
- datepicker.vm.$emit('input', date);
-
- const [
- readRepo,
- readRegistry,
- writeRegistry,
- readPackageRegistry,
- writePackageRegistry,
- ] = wrapper.findAllComponents(GlFormCheckbox).wrappers;
- readRepo.vm.$emit('input', true);
- readRegistry.vm.$emit('input', true);
- writeRegistry.vm.$emit('input', true);
- readPackageRegistry.vm.$emit('input', true);
- writePackageRegistry.vm.$emit('input', true);
-
- mockAxios
- .onPost(createNewTokenPath, {
- deploy_token: {
- name: 'test name',
- expires_at: date.toISOString(),
- username: 'test username',
- read_repository: true,
- read_registry: true,
- write_registry: true,
- read_package_registry: true,
- write_package_registry: true,
- },
- })
- .replyOnce(HTTP_STATUS_OK, { username: 'test token username', token: 'test token' });
-
- return submitTokenThenCheck();
+ await submitToken();
+
+ checkSubmittedToken();
});
- it('should request a token without an expiration date', () => {
- const mockAxios = new MockAdapter(axios);
+ it('should display the created token', async () => {
+ expect(findNewTokenAlert().exists()).toBe(false);
+
+ const date = new Date();
+ setTokenForm({ date });
+
+ mockTokenRequest({
+ payload: {
+ ...defaultTokenPayload,
+ expires_at: date.toISOString(),
+ },
+ status: HTTP_STATUS_OK,
+ response: { username: 'test token username', token: 'test token' },
+ });
- const formInputs = wrapper.findAllComponents(GlFormInput);
- const name = formInputs.at(0);
- const username = formInputs.at(2);
- name.vm.$emit('input', 'test never expire name');
- username.vm.$emit('input', 'test never expire username');
+ await submitToken();
- const [, , , readPackageRegistry, writePackageRegistry] = wrapper.findAllComponents(
- GlFormCheckbox,
- ).wrappers;
- readPackageRegistry.vm.$emit('input', true);
- writePackageRegistry.vm.$emit('input', true);
-
- mockAxios
- .onPost(createNewTokenPath, {
- deploy_token: {
- name: 'test never expire name',
- expires_at: null,
- username: 'test never expire username',
- read_repository: false,
- read_registry: false,
- write_registry: false,
- read_package_registry: true,
- write_package_registry: true,
- },
- })
- .replyOnce(HTTP_STATUS_OK, { username: 'test token username', token: 'test token' });
-
- return submitTokenThenCheck();
+ const tokenAlert = findNewTokenAlert();
+ expect(tokenAlert.exists()).toBe(true);
+ expect(tokenAlert.text()).toContain('Your new deploy token');
+
+ const [usernameBtn, tokenBtn] = findClipboardButtons().wrappers;
+ expect(usernameBtn.props()).toMatchObject({
+ text: 'test token username',
+ title: 'Copy username',
+ });
+ expect(tokenBtn.props()).toMatchObject({ text: 'test token', title: 'Copy deploy token' });
});
});
describe('help text for write_package_registry scope', () => {
- const findWriteRegistryScopeCheckbox = () => wrapper.findAllComponents(GlFormCheckbox).at(4);
+ const findWriteRegistryScopeCheckbox = () => findAllCheckboxes().at(4);
describe('with project tokenType', () => {
beforeEach(() => {
diff --git a/spec/frontend/design_management/components/design_description/description_form_spec.js b/spec/frontend/design_management/components/design_description/description_form_spec.js
index a61cc2af9b6..f7feff98da3 100644
--- a/spec/frontend/design_management/components/design_description/description_form_spec.js
+++ b/spec/frontend/design_management/components/design_description/description_form_spec.js
@@ -188,10 +188,9 @@ describe('Design description form', () => {
});
it('tracks submit action', () => {
- expect(trackingSpy).toHaveBeenCalledWith(undefined, 'editor_type_used', {
- context: 'Design',
- editorType: 'editor_type_plain_text_editor',
- label: 'editor_tracking',
+ expect(trackingSpy).toHaveBeenCalledWith(undefined, 'save_markdown', {
+ label: 'markdown_editor',
+ property: 'Design',
});
});
});
diff --git a/spec/frontend/design_management/pages/design/__snapshots__/index_spec.js.snap b/spec/frontend/design_management/pages/design/__snapshots__/index_spec.js.snap
index bd37d917faa..f0615f61059 100644
--- a/spec/frontend/design_management/pages/design/__snapshots__/index_spec.js.snap
+++ b/spec/frontend/design_management/pages/design/__snapshots__/index_spec.js.snap
@@ -7,8 +7,7 @@ exports[`Design management design index page renders design index 1`] = `
<div
class="gl-display-flex gl-overflow-hidden gl-flex-grow-1 gl-flex-direction-column gl-relative"
>
- <design-destroyer-stub
- filenames="test.jpg"
+ <div
iid="1"
project-path="project-path"
/>
@@ -126,8 +125,7 @@ exports[`Design management design index page with error GlAlert is rendered in c
<div
class="gl-display-flex gl-overflow-hidden gl-flex-grow-1 gl-flex-direction-column gl-relative"
>
- <design-destroyer-stub
- filenames="test.jpg"
+ <div
iid="1"
project-path="project-path"
/>
diff --git a/spec/frontend/design_management/pages/design/index_spec.js b/spec/frontend/design_management/pages/design/index_spec.js
index 6cddb0cbbf1..57651043384 100644
--- a/spec/frontend/design_management/pages/design/index_spec.js
+++ b/spec/frontend/design_management/pages/design/index_spec.js
@@ -6,6 +6,7 @@ import { mockTracking, unmockTracking } from 'helpers/tracking_helper';
import Api from '~/api';
import DesignPresentation from '~/design_management/components/design_presentation.vue';
import DesignSidebar from '~/design_management/components/design_sidebar.vue';
+import DesignDestroyer from '~/design_management/components/design_destroyer.vue';
import { DESIGN_DETAIL_LAYOUT_CLASSLIST } from '~/design_management/constants';
import updateActiveDiscussion from '~/design_management/graphql/mutations/update_active_discussion.mutation.graphql';
import getDesignQuery from '~/design_management/graphql/queries/get_design.query.graphql';
@@ -24,6 +25,8 @@ import {
} from '~/design_management/utils/tracking';
import { createAlert } from '~/alert';
import * as cacheUpdate from '~/design_management/utils/cache_update';
+import { stubComponent } from 'helpers/stub_component';
+
import mockAllVersions from '../../mock_data/all_versions';
import design from '../../mock_data/design';
import mockProject from '../../mock_data/project';
@@ -106,7 +109,10 @@ describe('Design management design index page', () => {
wrapper = shallowMount(DesignIndex, {
propsData: { id: '1' },
mocks: { $apollo },
- stubs,
+ stubs: {
+ ...stubs,
+ DesignDestroyer: stubComponent(DesignDestroyer, { template: '<div></div>' }),
+ },
provide: {
issueIid: '1',
projectPath: 'project-path',
diff --git a/spec/frontend/diffs/components/app_spec.js b/spec/frontend/diffs/components/app_spec.js
index fb5cf4dfd0a..c1f0966f9c6 100644
--- a/spec/frontend/diffs/components/app_spec.js
+++ b/spec/frontend/diffs/components/app_spec.js
@@ -2,6 +2,7 @@ import { GlLoadingIcon, GlPagination } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import MockAdapter from 'axios-mock-adapter';
import Vue, { nextTick } from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import setWindowLocation from 'helpers/set_window_location_helper';
import { TEST_HOST } from 'spec/test_constants';
@@ -11,7 +12,7 @@ import CompareVersions from '~/diffs/components/compare_versions.vue';
import DiffFile from '~/diffs/components/diff_file.vue';
import NoChanges from '~/diffs/components/no_changes.vue';
import findingsDrawer from '~/diffs/components/shared/findings_drawer.vue';
-import TreeList from '~/diffs/components/tree_list.vue';
+import DiffsFileTree from '~/diffs/components/diffs_file_tree.vue';
import CollapsedFilesWarning from '~/diffs/components/collapsed_files_warning.vue';
import HiddenFilesWarning from '~/diffs/components/hidden_files_warning.vue';
@@ -252,34 +253,6 @@ describe('diffs/components/app', () => {
});
});
- describe('resizable', () => {
- afterEach(() => {
- localStorage.removeItem('mr_tree_list_width');
- });
-
- it('sets initial width when no localStorage has been set', () => {
- createComponent();
-
- expect(wrapper.vm.treeWidth).toEqual(320);
- });
-
- it('sets initial width to localStorage size', () => {
- localStorage.setItem('mr_tree_list_width', '200');
-
- createComponent();
-
- expect(wrapper.vm.treeWidth).toEqual(200);
- });
-
- it('sets width of tree list', () => {
- createComponent({}, ({ state }) => {
- state.diffs.treeEntries = { 111: { type: 'blob', fileHash: '111', path: '111.js' } };
- });
-
- expect(wrapper.find('.js-diff-tree-list').element.style.width).toEqual('320px');
- });
- });
-
it('marks current diff file based on currently highlighted row', async () => {
window.location.hash = 'ABC_123';
@@ -596,18 +569,21 @@ describe('diffs/components/app', () => {
);
});
- it("doesn't render tree list when no changes exist", () => {
+ it('should always render diffs file tree', () => {
createComponent();
-
- expect(wrapper.findComponent(TreeList).exists()).toBe(false);
+ expect(wrapper.findComponent(DiffsFileTree).exists()).toBe(true);
});
- it('should render tree list', () => {
+ it('should pass renderDiffFiles to file tree as true when files are present', () => {
createComponent({}, ({ state }) => {
state.diffs.treeEntries = { 111: { type: 'blob', fileHash: '111', path: '111.js' } };
});
+ expect(wrapper.findComponent(DiffsFileTree).props('renderDiffFiles')).toBe(true);
+ });
- expect(wrapper.findComponent(TreeList).exists()).toBe(true);
+ it('should pass renderDiffFiles to file tree as false without files', () => {
+ createComponent();
+ expect(wrapper.findComponent(DiffsFileTree).props('renderDiffFiles')).toBe(false);
});
});
diff --git a/spec/frontend/diffs/components/collapsed_files_warning_spec.js b/spec/frontend/diffs/components/collapsed_files_warning_spec.js
index ae40f6c898d..4e34691f72b 100644
--- a/spec/frontend/diffs/components/collapsed_files_warning_spec.js
+++ b/spec/frontend/diffs/components/collapsed_files_warning_spec.js
@@ -1,5 +1,6 @@
import { shallowMount, mount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import CollapsedFilesWarning from '~/diffs/components/collapsed_files_warning.vue';
import { EVT_EXPAND_ALL_FILES } from '~/diffs/constants';
diff --git a/spec/frontend/diffs/components/compare_versions_spec.js b/spec/frontend/diffs/components/compare_versions_spec.js
index cbbfd88260b..3601f0cc7b0 100644
--- a/spec/frontend/diffs/components/compare_versions_spec.js
+++ b/spec/frontend/diffs/components/compare_versions_spec.js
@@ -84,7 +84,7 @@ describe('CompareVersions', () => {
const treeListBtn = wrapper.find('.js-toggle-tree-list');
expect(treeListBtn.exists()).toBe(true);
- expect(treeListBtn.attributes('title')).toBe('Hide file browser');
+ expect(treeListBtn.attributes('title')).toBe('Hide file browser (or press F)');
expect(treeListBtn.props('icon')).toBe('file-tree');
});
diff --git a/spec/frontend/diffs/components/diff_code_quality_spec.js b/spec/frontend/diffs/components/diff_code_quality_spec.js
deleted file mode 100644
index 73976ebd713..00000000000
--- a/spec/frontend/diffs/components/diff_code_quality_spec.js
+++ /dev/null
@@ -1,61 +0,0 @@
-import { mountExtended } from 'helpers/vue_test_utils_helper';
-import DiffCodeQuality from '~/diffs/components/diff_code_quality.vue';
-import DiffInlineFindings from '~/diffs/components/diff_inline_findings.vue';
-import { NEW_CODE_QUALITY_FINDINGS, NEW_SAST_FINDINGS } from '~/diffs/i18n';
-import {
- multipleCodeQualityNoSast,
- multipleSastNoCodeQuality,
-} from '../mock_data/diff_code_quality';
-
-let wrapper;
-
-const diffInlineFindings = () => wrapper.findComponent(DiffInlineFindings);
-const allDiffInlineFindings = () => wrapper.findAllComponents(DiffInlineFindings);
-
-describe('DiffCodeQuality', () => {
- const createWrapper = (findings) => {
- return mountExtended(DiffCodeQuality, {
- propsData: {
- expandedLines: [],
- codeQuality: findings.codeQuality,
- sast: findings.sast,
- },
- });
- };
-
- it('hides details and throws hideCodeQualityFindings event on close click', async () => {
- wrapper = createWrapper(multipleCodeQualityNoSast);
- expect(wrapper.findByTestId('diff-codequality').exists()).toBe(true);
-
- await wrapper.findByTestId('diff-codequality-close').trigger('click');
- expect(wrapper.emitted('hideCodeQualityFindings')).toHaveLength(1);
- });
-
- it('renders diff inline findings component with correct props for codequality array', () => {
- wrapper = createWrapper(multipleCodeQualityNoSast);
-
- expect(diffInlineFindings().props('title')).toBe(NEW_CODE_QUALITY_FINDINGS);
- expect(diffInlineFindings().props('findings')).toBe(multipleCodeQualityNoSast.codeQuality);
- });
-
- it('does not render codeQuality section when codeQuality array is empty', () => {
- wrapper = createWrapper(multipleSastNoCodeQuality);
-
- expect(diffInlineFindings().props('title')).toBe(NEW_SAST_FINDINGS);
- expect(allDiffInlineFindings()).toHaveLength(1);
- });
-
- it('renders heading and correct amount of list items for sast array and their description', () => {
- wrapper = createWrapper(multipleSastNoCodeQuality);
-
- expect(diffInlineFindings().props('title')).toBe(NEW_SAST_FINDINGS);
- expect(diffInlineFindings().props('findings')).toBe(multipleSastNoCodeQuality.sast);
- });
-
- it('does not render sast section when sast array is empty', () => {
- wrapper = createWrapper(multipleCodeQualityNoSast);
-
- expect(diffInlineFindings().props('title')).toBe(NEW_CODE_QUALITY_FINDINGS);
- expect(allDiffInlineFindings()).toHaveLength(1);
- });
-});
diff --git a/spec/frontend/diffs/components/diff_content_spec.js b/spec/frontend/diffs/components/diff_content_spec.js
index 3b37edbcb1d..be085ba1525 100644
--- a/spec/frontend/diffs/components/diff_content_spec.js
+++ b/spec/frontend/diffs/components/diff_content_spec.js
@@ -1,11 +1,11 @@
import { GlLoadingIcon } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import Vue from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import waitForPromises from 'helpers/wait_for_promises';
import { sprintf } from '~/locale';
import { createAlert } from '~/alert';
-import * as diffRowUtils from 'ee_else_ce/diffs/components/diff_row_utils';
import DiffContentComponent from '~/diffs/components/diff_content.vue';
import DiffDiscussions from '~/diffs/components/diff_discussions.vue';
import DiffView from '~/diffs/components/diff_view.vue';
@@ -120,32 +120,6 @@ describe('DiffContent', () => {
expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(true);
});
-
- it('should include Sast findings when sastReportsInInlineDiff flag is true', () => {
- const mapParallelSpy = jest.spyOn(diffRowUtils, 'mapParallel');
- const mapParallelNoSastSpy = jest.spyOn(diffRowUtils, 'mapParallelNoSast');
- createComponent({
- provide: {
- glFeatures: {
- sastReportsInInlineDiff: true,
- },
- },
- props: { diffFile: { ...textDiffFile, renderingLines: true } },
- });
-
- expect(mapParallelSpy).toHaveBeenCalled();
- expect(mapParallelNoSastSpy).not.toHaveBeenCalled();
- });
-
- it('should not include Sast findings when sastReportsInInlineDiff flag is false', () => {
- const mapParallelSpy = jest.spyOn(diffRowUtils, 'mapParallel');
- const mapParallelNoSastSpy = jest.spyOn(diffRowUtils, 'mapParallelNoSast');
-
- createComponent({ props: { diffFile: { ...textDiffFile, renderingLines: true } } });
-
- expect(mapParallelNoSastSpy).toHaveBeenCalled();
- expect(mapParallelSpy).not.toHaveBeenCalled();
- });
});
describe('with whitespace only change', () => {
diff --git a/spec/frontend/diffs/components/diff_discussion_reply_spec.js b/spec/frontend/diffs/components/diff_discussion_reply_spec.js
index 348439d6006..1e542c413b2 100644
--- a/spec/frontend/diffs/components/diff_discussion_reply_spec.js
+++ b/spec/frontend/diffs/components/diff_discussion_reply_spec.js
@@ -1,6 +1,7 @@
import { shallowMount } from '@vue/test-utils';
import { GlButton } from '@gitlab/ui';
import Vue from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import DiffDiscussionReply from '~/diffs/components/diff_discussion_reply.vue';
import NoteSignedOutWidget from '~/notes/components/note_signed_out_widget.vue';
diff --git a/spec/frontend/diffs/components/diff_file_header_spec.js b/spec/frontend/diffs/components/diff_file_header_spec.js
index d3afaab492d..b089825090b 100644
--- a/spec/frontend/diffs/components/diff_file_header_spec.js
+++ b/spec/frontend/diffs/components/diff_file_header_spec.js
@@ -1,6 +1,7 @@
import { shallowMount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
import { cloneDeep } from 'lodash';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import { mockTracking, triggerEvent } from 'helpers/tracking_helper';
diff --git a/spec/frontend/diffs/components/diff_file_spec.js b/spec/frontend/diffs/components/diff_file_spec.js
index db6cde883f3..53f135471b7 100644
--- a/spec/frontend/diffs/components/diff_file_spec.js
+++ b/spec/frontend/diffs/components/diff_file_spec.js
@@ -1,5 +1,6 @@
import MockAdapter from 'axios-mock-adapter';
import Vue, { nextTick } from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
@@ -42,6 +43,7 @@ jest.mock('~/notes/mixins/diff_line_note_form', () => ({
Vue.use(Vuex);
const saveDiffDiscussionMock = jest.fn();
+const prefetchFileNeighborsMock = jest.fn();
function changeViewer(store, index, { automaticallyCollapsed, manuallyCollapsed, name }) {
const file = store.state.diffs.diffFiles[index];
@@ -91,6 +93,7 @@ function createComponent({ file, first = false, last = false, options = {}, prop
const diffs = diffsModule();
diffs.actions = {
...diffs.actions,
+ prefetchFileNeighbors: prefetchFileNeighborsMock,
saveDiffDiscussion: saveDiffDiscussionMock,
};
@@ -155,19 +158,44 @@ const triggerSaveDraftNote = (wrapper, note, parent, error) =>
findNoteForm(wrapper).vm.$emit('handleFormUpdateAddToReview', note, false, parent, error);
describe('DiffFile', () => {
+ let readableFile;
let wrapper;
let store;
let axiosMock;
beforeEach(() => {
+ readableFile = getReadableFile();
axiosMock = new MockAdapter(axios);
- ({ wrapper, store } = createComponent({ file: getReadableFile() }));
+ ({ wrapper, store } = createComponent({ file: readableFile }));
});
afterEach(() => {
axiosMock.restore();
});
+ describe('mounted', () => {
+ beforeEach(() => {
+ jest.spyOn(window, 'requestIdleCallback').mockImplementation((fn) => fn());
+ });
+
+ it.each`
+ description | fileByFile
+ ${'does not prefetch if not in file-by-file mode'} | ${false}
+ ${'prefetches when in file-by-file mode'} | ${true}
+ `('$description', ({ fileByFile }) => {
+ createComponent({
+ props: { viewDiffsFileByFile: fileByFile },
+ file: readableFile,
+ });
+
+ if (fileByFile) {
+ expect(prefetchFileNeighborsMock).toHaveBeenCalled();
+ } else {
+ expect(prefetchFileNeighborsMock).not.toHaveBeenCalled();
+ }
+ });
+ });
+
describe('bus events', () => {
beforeEach(() => {
jest.spyOn(eventHub, '$emit').mockImplementation(() => {});
diff --git a/spec/frontend/diffs/components/diff_code_quality_item_spec.js b/spec/frontend/diffs/components/diff_inline_findings_item_spec.js
index 085eb096239..72d96d3435f 100644
--- a/spec/frontend/diffs/components/diff_code_quality_item_spec.js
+++ b/spec/frontend/diffs/components/diff_inline_findings_item_spec.js
@@ -1,8 +1,8 @@
import { GlIcon, GlLink } from '@gitlab/ui';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
-import DiffCodeQualityItem from '~/diffs/components/diff_code_quality_item.vue';
+import DiffInlineFindingsItem from '~/diffs/components/diff_inline_findings_item.vue';
import { SEVERITY_CLASSES, SEVERITY_ICONS } from '~/ci/reports/codequality_report/constants';
-import { multipleFindingsArrCodeQualityScale } from '../mock_data/diff_code_quality';
+import { multipleFindingsArrCodeQualityScale } from '../mock_data/inline_findings';
let wrapper;
@@ -14,7 +14,7 @@ const findDescriptionLinkSection = () => wrapper.findByTestId('description-butto
describe('DiffCodeQuality', () => {
const createWrapper = ({ glFeatures = {}, link = true } = {}) => {
- return shallowMountExtended(DiffCodeQualityItem, {
+ return shallowMountExtended(DiffInlineFindingsItem, {
propsData: {
finding: codeQualityFinding,
link,
@@ -30,7 +30,7 @@ describe('DiffCodeQuality', () => {
expect(findIcon().exists()).toBe(true);
expect(findIcon().attributes()).toMatchObject({
- class: `codequality-severity-icon ${SEVERITY_CLASSES[codeQualityFinding.severity]}`,
+ class: `inline-findings-severity-icon ${SEVERITY_CLASSES[codeQualityFinding.severity]}`,
name: SEVERITY_ICONS[codeQualityFinding.severity],
size: '12',
});
diff --git a/spec/frontend/diffs/components/diff_inline_findings_spec.js b/spec/frontend/diffs/components/diff_inline_findings_spec.js
index 9ccfb2a613d..65b2abe7dd5 100644
--- a/spec/frontend/diffs/components/diff_inline_findings_spec.js
+++ b/spec/frontend/diffs/components/diff_inline_findings_spec.js
@@ -1,12 +1,12 @@
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import DiffInlineFindings from '~/diffs/components/diff_inline_findings.vue';
-import DiffCodeQualityItem from '~/diffs/components/diff_code_quality_item.vue';
+import DiffInlineFindingsItem from '~/diffs/components/diff_inline_findings_item.vue';
import { NEW_CODE_QUALITY_FINDINGS } from '~/diffs/i18n';
-import { multipleCodeQualityNoSast } from '../mock_data/diff_code_quality';
+import { multipleCodeQualityNoSast } from '../mock_data/inline_findings';
let wrapper;
const heading = () => wrapper.findByTestId('diff-inline-findings-heading');
-const diffCodeQualityItems = () => wrapper.findAllComponents(DiffCodeQualityItem);
+const diffInlineFindingsItems = () => wrapper.findAllComponents(DiffInlineFindingsItem);
describe('DiffInlineFindings', () => {
const createWrapper = () => {
@@ -23,10 +23,10 @@ describe('DiffInlineFindings', () => {
expect(heading().text()).toBe(NEW_CODE_QUALITY_FINDINGS);
});
- it('renders the correct number of DiffCodeQualityItem components with correct props', () => {
+ it('renders the correct number of DiffInlineFindingsItem components with correct props', () => {
wrapper = createWrapper();
- expect(diffCodeQualityItems()).toHaveLength(multipleCodeQualityNoSast.codeQuality.length);
- expect(diffCodeQualityItems().wrappers[0].props('finding')).toEqual(
+ expect(diffInlineFindingsItems()).toHaveLength(multipleCodeQualityNoSast.codeQuality.length);
+ expect(diffInlineFindingsItems().wrappers[0].props('finding')).toEqual(
wrapper.props('findings')[0],
);
});
diff --git a/spec/frontend/diffs/components/diff_line_note_form_spec.js b/spec/frontend/diffs/components/diff_line_note_form_spec.js
index 0ca48db2497..dfbf45e1d71 100644
--- a/spec/frontend/diffs/components/diff_line_note_form_spec.js
+++ b/spec/frontend/diffs/components/diff_line_note_form_spec.js
@@ -7,11 +7,13 @@ import DiffLineNoteForm from '~/diffs/components/diff_line_note_form.vue';
import store from '~/mr_notes/stores';
import NoteForm from '~/notes/components/note_form.vue';
import MultilineCommentForm from '~/notes/components/multiline_comment_form.vue';
+import { clearDraft } from '~/lib/utils/autosave';
import { confirmAction } from '~/lib/utils/confirm_via_gl_modal/confirm_via_gl_modal';
import { noteableDataMock } from 'jest/notes/mock_data';
import { SOMETHING_WENT_WRONG, SAVING_THE_COMMENT_FAILED } from '~/diffs/i18n';
import { getDiffFileMock } from '../mock_data/diff_file';
+jest.mock('~/lib/utils/autosave');
jest.mock('~/lib/utils/confirm_via_gl_modal/confirm_via_gl_modal');
jest.mock('~/mr_notes/stores', () => jest.requireActual('helpers/mocks/mr_notes/stores'));
jest.mock('~/alert');
@@ -116,6 +118,17 @@ describe('DiffLineNoteForm', () => {
fileHash: diffFile.file_hash,
});
});
+
+ it('should clear the autosaved draft', async () => {
+ findNoteForm().vm.$emit('cancelForm', true, true);
+ await nextTick();
+ expect(confirmAction).toHaveBeenCalled();
+ await nextTick();
+
+ expect(clearDraft).toHaveBeenCalledWith(
+ `Note/Issue/${noteableDataMock.id}//DiffNote//${diffLines[1].line_code}`,
+ );
+ });
});
describe('without confirmation', () => {
diff --git a/spec/frontend/diffs/components/diff_line_spec.js b/spec/frontend/diffs/components/diff_line_spec.js
index a552a9d3e7f..501bd0757c8 100644
--- a/spec/frontend/diffs/components/diff_line_spec.js
+++ b/spec/frontend/diffs/components/diff_line_spec.js
@@ -1,6 +1,6 @@
import { shallowMount } from '@vue/test-utils';
import DiffLine from '~/diffs/components/diff_line.vue';
-import DiffCodeQuality from '~/diffs/components/diff_code_quality.vue';
+import InlineFindings from '~/diffs/components/inline_findings.vue';
const EXAMPLE_LINE_NUMBER = 3;
const EXAMPLE_DESCRIPTION = 'example description';
@@ -16,13 +16,6 @@ const left = {
severity: EXAMPLE_SEVERITY,
},
],
- sast: [
- {
- line: EXAMPLE_LINE_NUMBER,
- description: EXAMPLE_DESCRIPTION,
- severity: EXAMPLE_SEVERITY,
- },
- ],
},
},
};
@@ -37,13 +30,6 @@ const right = {
severity: EXAMPLE_SEVERITY,
},
],
- sast: [
- {
- line: EXAMPLE_LINE_NUMBER,
- description: EXAMPLE_DESCRIPTION,
- severity: EXAMPLE_SEVERITY,
- },
- ],
},
},
};
@@ -55,26 +41,19 @@ describe('DiffLine', () => {
return shallowMount(DiffLine, { propsData });
};
- it('should emit event when hideCodeQualityFindings is called', () => {
+ it('should emit event when hideInlineFindings is called', () => {
const wrapper = createWrapper(right);
- wrapper.findComponent(DiffCodeQuality).vm.$emit('hideCodeQualityFindings');
+ wrapper.findComponent(InlineFindings).vm.$emit('hideInlineFindings');
expect(wrapper.emitted()).toEqual({
- hideCodeQualityFindings: [[EXAMPLE_LINE_NUMBER]],
+ hideInlineFindings: [[EXAMPLE_LINE_NUMBER]],
});
});
mockData.forEach((element) => {
- it('should set correct props for DiffCodeQuality', () => {
+ it('should set correct props for InlineFindings', () => {
const wrapper = createWrapper(element);
- expect(wrapper.findComponent(DiffCodeQuality).props('codeQuality')).toEqual([
- {
- line: EXAMPLE_LINE_NUMBER,
- description: EXAMPLE_DESCRIPTION,
- severity: EXAMPLE_SEVERITY,
- },
- ]);
- expect(wrapper.findComponent(DiffCodeQuality).props('sast')).toEqual([
+ expect(wrapper.findComponent(InlineFindings).props('codeQuality')).toEqual([
{
line: EXAMPLE_LINE_NUMBER,
description: EXAMPLE_DESCRIPTION,
diff --git a/spec/frontend/diffs/components/diff_row_spec.js b/spec/frontend/diffs/components/diff_row_spec.js
index 119b8f9ad7f..8a67d7b152c 100644
--- a/spec/frontend/diffs/components/diff_row_spec.js
+++ b/spec/frontend/diffs/components/diff_row_spec.js
@@ -1,6 +1,7 @@
import { getByTestId, fireEvent } from '@testing-library/dom';
import { shallowMount } from '@vue/test-utils';
import Vue from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import DiffRow from '~/diffs/components/diff_row.vue';
import { mapParallel } from '~/diffs/components/diff_row_utils';
diff --git a/spec/frontend/diffs/components/diff_view_spec.js b/spec/frontend/diffs/components/diff_view_spec.js
index 8778683c135..2c8f751804e 100644
--- a/spec/frontend/diffs/components/diff_view_spec.js
+++ b/spec/frontend/diffs/components/diff_view_spec.js
@@ -1,10 +1,11 @@
import { shallowMount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import { throttle } from 'lodash';
import DiffView from '~/diffs/components/diff_view.vue';
import DiffLine from '~/diffs/components/diff_line.vue';
-import { diffCodeQuality } from '../mock_data/diff_code_quality';
+import { diffCodeQuality } from '../mock_data/inline_findings';
jest.mock('lodash/throttle', () => jest.fn((fn) => fn));
const lodash = jest.requireActual('lodash');
@@ -18,7 +19,7 @@ describe('DiffView', () => {
const setSelectedCommentPosition = jest.fn();
const getDiffRow = (wrapper) => wrapper.findComponent(DiffRow).vm;
- const createWrapper = (props) => {
+ const createWrapper = ({ props, flag = false } = {}) => {
Vue.use(Vuex);
const batchComments = {
@@ -50,9 +51,21 @@ describe('DiffView', () => {
diffFile: { file_hash: '123' },
diffLines: [],
...props,
+ provide: {
+ glFeatures: {
+ sastReportsInInlineDiff: flag,
+ },
+ },
+ };
+
+ const provide = {
+ glFeatures: {
+ sastReportsInInlineDiff: flag,
+ },
};
+
const stubs = { DiffExpansionCell, DiffRow, DiffCommentCell, DraftNote };
- return shallowMount(DiffView, { propsData, store, stubs });
+ return shallowMount(DiffView, { propsData, provide, store, stubs });
};
beforeEach(() => {
@@ -69,12 +82,26 @@ describe('DiffView', () => {
});
it('does render a diff-line component with the correct props when there is a finding', async () => {
- const wrapper = createWrapper(diffCodeQuality);
+ const wrapper = createWrapper({ props: diffCodeQuality });
wrapper.findComponent(DiffRow).vm.$emit('toggleCodeQualityFindings', 2);
await nextTick();
expect(wrapper.findComponent(DiffLine).props('line')).toBe(diffCodeQuality.diffLines[2]);
});
+ it('does not render a diff-line component when there is a finding and sastReportsInInlineDiff flag is true', async () => {
+ const wrapper = createWrapper({ props: diffCodeQuality, flag: true });
+ wrapper.findComponent(DiffRow).vm.$emit('toggleCodeQualityFindings', 2);
+ await nextTick();
+ expect(wrapper.findComponent(DiffLine).exists()).toBe(false);
+ });
+
+ it('does render a diff-line component when there is a finding and sastReportsInInlineDiff flag is false', async () => {
+ const wrapper = createWrapper({ props: diffCodeQuality });
+ wrapper.findComponent(DiffRow).vm.$emit('toggleCodeQualityFindings', 2);
+ await nextTick();
+ expect(wrapper.findComponent(DiffLine).exists()).toBe(true);
+ });
+
it.each`
type | side | container | sides | total
${'parallel'} | ${'left'} | ${'.old'} | ${{ left: { lineDrafts: [], renderDiscussion: true }, right: { lineDrafts: [], renderDiscussion: true } }} | ${2}
@@ -86,8 +113,10 @@ describe('DiffView', () => {
'renders a $type comment row with comment cell on $side',
({ type, container, sides, total }) => {
const wrapper = createWrapper({
- diffLines: [{ renderCommentRow: true, ...sides }],
- inline: type === 'inline',
+ props: {
+ diffLines: [{ renderCommentRow: true, ...sides }],
+ inline: type === 'inline',
+ },
});
expect(wrapper.findAllComponents(DiffCommentCell).length).toBe(total);
expect(wrapper.find(container).findComponent(DiffCommentCell).exists()).toBe(true);
@@ -96,21 +125,20 @@ describe('DiffView', () => {
it('renders a draft row', () => {
const wrapper = createWrapper({
- diffLines: [{ renderCommentRow: true, left: { lineDrafts: [{ isDraft: true }] } }],
+ props: { diffLines: [{ renderCommentRow: true, left: { lineDrafts: [{ isDraft: true }] } }] },
});
expect(wrapper.findComponent(DraftNote).exists()).toBe(true);
});
describe('drag operations', () => {
it('sets `dragStart` onStartDragging', () => {
- const wrapper = createWrapper({ diffLines: [{}] });
-
+ const wrapper = createWrapper({ props: { diffLines: [{}] } });
wrapper.findComponent(DiffRow).vm.$emit('startdragging', { line: { test: true } });
expect(wrapper.vm.idState.dragStart).toEqual({ test: true });
});
it('does not call `setSelectedCommentPosition` on different chunks onDragOver', () => {
- const wrapper = createWrapper({ diffLines: [{}] });
+ const wrapper = createWrapper({ props: { diffLines: [{}] } });
const diffRow = getDiffRow(wrapper);
diffRow.$emit('startdragging', { line: { chunk: 0 } });
@@ -127,7 +155,7 @@ describe('DiffView', () => {
`(
'calls `setSelectedCommentPosition` with correct `updatedLineRange`',
({ start, end, expectation }) => {
- const wrapper = createWrapper({ diffLines: [{}] });
+ const wrapper = createWrapper({ props: { diffLines: [{}] } });
const diffRow = getDiffRow(wrapper);
diffRow.$emit('startdragging', { line: { chunk: 1, index: start } });
@@ -140,7 +168,7 @@ describe('DiffView', () => {
);
it('sets `dragStart` to null onStopDragging', () => {
- const wrapper = createWrapper({ diffLines: [{}] });
+ const wrapper = createWrapper({ props: { diffLines: [{}] } });
const diffRow = getDiffRow(wrapper);
diffRow.$emit('startdragging', { line: { test: true } });
@@ -152,7 +180,8 @@ describe('DiffView', () => {
});
it('throttles multiple calls to enterdragging', () => {
- const wrapper = createWrapper({ diffLines: [{}] });
+ const wrapper = createWrapper({ props: { diffLines: [{}] } });
+
const diffRow = getDiffRow(wrapper);
diffRow.$emit('startdragging', { line: { chunk: 1, index: 1 } });
diff --git a/spec/frontend/diffs/components/diffs_file_tree_spec.js b/spec/frontend/diffs/components/diffs_file_tree_spec.js
new file mode 100644
index 00000000000..a79023a07cb
--- /dev/null
+++ b/spec/frontend/diffs/components/diffs_file_tree_spec.js
@@ -0,0 +1,116 @@
+import { nextTick } from 'vue';
+import { shallowMount } from '@vue/test-utils';
+import { Mousetrap } from '~/lib/mousetrap';
+import DiffsFileTree from '~/diffs/components/diffs_file_tree.vue';
+import TreeList from '~/diffs/components/tree_list.vue';
+import PanelResizer from '~/vue_shared/components/panel_resizer.vue';
+import { SET_SHOW_TREE_LIST } from '~/diffs/store/mutation_types';
+import createDiffsStore from '../create_diffs_store';
+
+describe('DiffsFileTree', () => {
+ let wrapper;
+ let store;
+
+ const createComponent = ({ renderDiffFiles = true, showTreeList = true } = {}) => {
+ store = createDiffsStore();
+ store.commit(`diffs/${SET_SHOW_TREE_LIST}`, showTreeList);
+ wrapper = shallowMount(DiffsFileTree, {
+ store,
+ propsData: {
+ renderDiffFiles,
+ },
+ });
+ };
+
+ describe('visibility', () => {
+ describe('when renderDiffFiles and showTreeList are true', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('tree list is visible', () => {
+ expect(wrapper.findComponent(TreeList).exists()).toBe(true);
+ });
+ });
+
+ describe('when renderDiffFiles and showTreeList are false', () => {
+ beforeEach(() => {
+ createComponent({ renderDiffFiles: false, showTreeList: false });
+ });
+
+ it('tree list is hidden', () => {
+ expect(wrapper.findComponent(TreeList).exists()).toBe(false);
+ });
+ });
+ });
+
+ it('emits toggled event', async () => {
+ createComponent();
+ store.commit(`diffs/${SET_SHOW_TREE_LIST}`, false);
+ await nextTick();
+ expect(wrapper.emitted('toggled')).toStrictEqual([[]]);
+ });
+
+ it('toggles when "f" hotkey is pressed', async () => {
+ createComponent();
+ Mousetrap.trigger('f');
+ await nextTick();
+ expect(wrapper.findComponent(TreeList).exists()).toBe(false);
+ });
+
+ describe('size', () => {
+ const checkWidth = (width) => {
+ expect(wrapper.element.style.width).toEqual(`${width}px`);
+ expect(wrapper.findComponent(PanelResizer).props('startSize')).toEqual(width);
+ };
+
+ afterEach(() => {
+ localStorage.removeItem('mr_tree_list_width');
+ });
+
+ describe('when no localStorage record is set', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('sets initial width when no localStorage has been set', () => {
+ checkWidth(320);
+ });
+ });
+
+ it('sets initial width to localStorage size', () => {
+ localStorage.setItem('mr_tree_list_width', '200');
+ createComponent();
+ checkWidth(200);
+ });
+
+ it('sets width of tree list', () => {
+ createComponent({}, ({ state }) => {
+ state.diffs.treeEntries = { 111: { type: 'blob', fileHash: '111', path: '111.js' } };
+ });
+ checkWidth(320);
+ });
+
+ it('updates width', async () => {
+ const WIDTH = 500;
+ createComponent();
+ wrapper.findComponent(PanelResizer).vm.$emit('update:size', WIDTH);
+ await nextTick();
+ checkWidth(WIDTH);
+ });
+
+ it('passes down hideFileStats as true when width is less than 260', async () => {
+ createComponent();
+ wrapper.findComponent(PanelResizer).vm.$emit('update:size', 200);
+ await nextTick();
+ expect(wrapper.findComponent(TreeList).props('hideFileStats')).toBe(true);
+ });
+
+ it('passes down hideFileStats as false when width is bigger than 260', async () => {
+ createComponent();
+ wrapper.findComponent(PanelResizer).vm.$emit('update:size', 300);
+ await nextTick();
+ expect(wrapper.findComponent(TreeList).props('hideFileStats')).toBe(false);
+ });
+ });
+});
diff --git a/spec/frontend/diffs/components/inline_findings_spec.js b/spec/frontend/diffs/components/inline_findings_spec.js
new file mode 100644
index 00000000000..71cc6ae49fd
--- /dev/null
+++ b/spec/frontend/diffs/components/inline_findings_spec.js
@@ -0,0 +1,33 @@
+import { mountExtended } from 'helpers/vue_test_utils_helper';
+import InlineFindings from '~/diffs/components/inline_findings.vue';
+import DiffInlineFindings from '~/diffs/components/diff_inline_findings.vue';
+import { NEW_CODE_QUALITY_FINDINGS } from '~/diffs/i18n';
+import { threeCodeQualityFindingsRaw } from '../mock_data/inline_findings';
+
+let wrapper;
+
+const diffInlineFindings = () => wrapper.findComponent(DiffInlineFindings);
+
+describe('InlineFindings', () => {
+ const createWrapper = () => {
+ return mountExtended(InlineFindings, {
+ propsData: {
+ codeQuality: threeCodeQualityFindingsRaw,
+ },
+ });
+ };
+
+ it('hides details and throws hideInlineFindings event on close click', async () => {
+ wrapper = createWrapper();
+ expect(wrapper.findByTestId('inline-findings').exists()).toBe(true);
+
+ await wrapper.findByTestId('inline-findings-close').trigger('click');
+ expect(wrapper.emitted('hideInlineFindings')).toHaveLength(1);
+ });
+
+ it('renders diff inline findings component with correct props for codequality array', () => {
+ wrapper = createWrapper();
+ expect(diffInlineFindings().props('title')).toBe(NEW_CODE_QUALITY_FINDINGS);
+ expect(diffInlineFindings().props('findings')).toBe(threeCodeQualityFindingsRaw);
+ });
+});
diff --git a/spec/frontend/diffs/components/shared/__snapshots__/findings_drawer_spec.js.snap b/spec/frontend/diffs/components/shared/__snapshots__/findings_drawer_spec.js.snap
index e82687aa146..51bd8f380ee 100644
--- a/spec/frontend/diffs/components/shared/__snapshots__/findings_drawer_spec.js.snap
+++ b/spec/frontend/diffs/components/shared/__snapshots__/findings_drawer_spec.js.snap
@@ -30,7 +30,7 @@ exports[`FindingsDrawer matches the snapshot 1`] = `
</span>
<gl-icon-stub
- class="codequality-severity-icon gl-text-orange-300"
+ class="inline-findings-severity-icon gl-text-orange-300"
data-testid="findings-drawer-severity-icon"
name="severity-low"
size="12"
diff --git a/spec/frontend/diffs/components/tree_list_spec.js b/spec/frontend/diffs/components/tree_list_spec.js
index f56dd28ce9c..a54cf9b8bff 100644
--- a/spec/frontend/diffs/components/tree_list_spec.js
+++ b/spec/frontend/diffs/components/tree_list_spec.js
@@ -1,4 +1,5 @@
import Vue, { nextTick } from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import TreeList from '~/diffs/components/tree_list.vue';
import createStore from '~/diffs/store/modules';
diff --git a/spec/frontend/diffs/create_diffs_store.js b/spec/frontend/diffs/create_diffs_store.js
index 92f38858ca5..6ffba1fe035 100644
--- a/spec/frontend/diffs/create_diffs_store.js
+++ b/spec/frontend/diffs/create_diffs_store.js
@@ -1,4 +1,5 @@
import Vue from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import batchCommentsModule from '~/batch_comments/stores/modules/batch_comments';
import diffsModule from '~/diffs/store/modules';
diff --git a/spec/frontend/diffs/mock_data/diff_code_quality.js b/spec/frontend/diffs/mock_data/inline_findings.js
index 5b9ed538e01..85fb48b86d5 100644
--- a/spec/frontend/diffs/mock_data/diff_code_quality.js
+++ b/spec/frontend/diffs/mock_data/inline_findings.js
@@ -95,6 +95,7 @@ export const threeCodeQualityFindings = {
filePath: 'index.js',
codequality: multipleFindingsArrCodeQualityScale.slice(0, 3),
};
+export const threeCodeQualityFindingsRaw = [multipleFindingsArrCodeQualityScale.slice(0, 3)];
export const singularCodeQualityFinding = {
filePath: 'index.js',
diff --git a/spec/frontend/diffs/store/actions_spec.js b/spec/frontend/diffs/store/actions_spec.js
index bbe748b8e1f..387407a7e4d 100644
--- a/spec/frontend/diffs/store/actions_spec.js
+++ b/spec/frontend/diffs/store/actions_spec.js
@@ -147,6 +147,175 @@ describe('DiffsStoreActions', () => {
});
});
+ describe('prefetchSingleFile', () => {
+ beforeEach(() => {
+ window.location.hash = 'e334a2a10f036c00151a04cea7938a5d4213a818';
+ });
+
+ it('should do nothing if the tree entry is already loading', () => {
+ return testAction(diffActions.prefetchSingleFile, { diffLoading: true }, {}, [], []);
+ });
+
+ it('should do nothing if the tree entry has already been marked as loaded', () => {
+ return testAction(
+ diffActions.prefetchSingleFile,
+ { diffLoaded: true },
+ {
+ flatBlobsList: [
+ { fileHash: 'e334a2a10f036c00151a04cea7938a5d4213a818', diffLoaded: true },
+ ],
+ },
+ [],
+ [],
+ );
+ });
+
+ describe('when a tree entry exists for the file, but it has not been marked as loaded', () => {
+ let state;
+ let getters;
+ let commit;
+ let hubSpy;
+ const defaultParams = {
+ old_path: 'old/123',
+ new_path: 'new/123',
+ w: '1',
+ view: 'inline',
+ };
+ const endpointDiffForPath = '/diffs/set/endpoint/path';
+ const diffForPath = mergeUrlParams(defaultParams, endpointDiffForPath);
+ const treeEntry = {
+ fileHash: 'e334a2a10f036c00151a04cea7938a5d4213a818',
+ filePaths: { old: 'old/123', new: 'new/123' },
+ };
+ const fileResult = {
+ diff_files: [{ file_hash: 'e334a2a10f036c00151a04cea7938a5d4213a818' }],
+ };
+
+ beforeEach(() => {
+ commit = jest.fn();
+ state = {
+ endpointDiffForPath,
+ diffFiles: [],
+ };
+ getters = {
+ flatBlobsList: [treeEntry],
+ getDiffFileByHash(hash) {
+ return state.diffFiles?.find((entry) => entry.file_hash === hash);
+ },
+ };
+ hubSpy = jest.spyOn(diffsEventHub, '$emit');
+ });
+
+ it('does nothing if the file already exists in the loaded diff files', () => {
+ state.diffFiles = fileResult.diff_files;
+
+ return testAction(diffActions.prefetchSingleFile, treeEntry, getters, [], []);
+ });
+
+ it('does some standard work every time', async () => {
+ mock.onGet(diffForPath).reply(HTTP_STATUS_OK, fileResult);
+
+ await diffActions.prefetchSingleFile({ state, getters, commit }, treeEntry);
+
+ expect(commit).toHaveBeenCalledWith(types.TREE_ENTRY_DIFF_LOADING, {
+ path: treeEntry.filePaths.new,
+ });
+
+ // wait for the mocked network request to return
+ await waitForPromises();
+
+ expect(commit).toHaveBeenCalledWith(types.SET_DIFF_DATA_BATCH, fileResult);
+
+ expect(hubSpy).toHaveBeenCalledWith('diffFilesModified');
+ });
+
+ it('should fetch data without commit ID', async () => {
+ getters.commitId = null;
+ mock.onGet(diffForPath).reply(HTTP_STATUS_OK, fileResult);
+
+ await diffActions.prefetchSingleFile({ state, getters, commit }, treeEntry);
+
+ // wait for the mocked network request to return and start processing the .then
+ await waitForPromises();
+
+ // This tests that commit_id is NOT added, if there isn't one in the store
+ expect(mock.history.get[0].url).toEqual(diffForPath);
+ });
+
+ it('should fetch data with commit ID', async () => {
+ const finalPath = mergeUrlParams(
+ { ...defaultParams, commit_id: '123' },
+ endpointDiffForPath,
+ );
+
+ getters.commitId = '123';
+ mock.onGet(finalPath).reply(HTTP_STATUS_OK, fileResult);
+
+ await diffActions.prefetchSingleFile({ state, getters, commit }, treeEntry);
+
+ // wait for the mocked network request to return and start processing the .then
+ await waitForPromises();
+
+ expect(mock.history.get[0].url).toEqual(finalPath);
+ });
+
+ describe('version parameters', () => {
+ const diffId = '4';
+ const startSha = 'abc';
+ const pathRoot = 'a/a/-/merge_requests/1';
+
+ it('fetches the data when there is no mergeRequestDiff', async () => {
+ diffActions.prefetchSingleFile({ state, getters, commit }, treeEntry);
+
+ // wait for the mocked network request to return and start processing the .then
+ await waitForPromises();
+
+ expect(mock.history.get[0].url).toEqual(diffForPath);
+ });
+
+ it.each`
+ desc | versionPath | start_sha | diff_id
+ ${'no additional version information'} | ${`${pathRoot}?search=terms`} | ${undefined} | ${undefined}
+ ${'the diff_id'} | ${`${pathRoot}?diff_id=${diffId}`} | ${undefined} | ${diffId}
+ ${'the start_sha'} | ${`${pathRoot}?start_sha=${startSha}`} | ${startSha} | ${undefined}
+ ${'all available version information'} | ${`${pathRoot}?diff_id=${diffId}&start_sha=${startSha}`} | ${startSha} | ${diffId}
+ `('fetches the data and includes $desc', async ({ versionPath, start_sha, diff_id }) => {
+ const finalPath = mergeUrlParams(
+ { ...defaultParams, diff_id, start_sha },
+ endpointDiffForPath,
+ );
+ state.mergeRequestDiff = { version_path: versionPath };
+ mock.onGet(finalPath).reply(HTTP_STATUS_OK, fileResult);
+
+ diffActions.prefetchSingleFile({ state, getters, commit }, treeEntry);
+
+ // wait for the mocked network request to return
+ await waitForPromises();
+
+ expect(mock.history.get[0].url).toEqual(finalPath);
+ });
+ });
+
+ describe('when the prefetch fails', () => {
+ beforeEach(() => {
+ mock.onGet(diffForPath).reply(HTTP_STATUS_INTERNAL_SERVER_ERROR);
+ });
+
+ it('should commit a mutation to set the tree entry diff loading to false', async () => {
+ diffActions.prefetchSingleFile({ state, getters, commit }, treeEntry);
+
+ // wait for the mocked network request to return
+ await waitForPromises();
+
+ expect(commit).toHaveBeenCalledWith(types.TREE_ENTRY_DIFF_LOADING, {
+ path: treeEntry.filePaths.new,
+ loading: false,
+ });
+ });
+ });
+ });
+ });
+
describe('fetchFileByFile', () => {
beforeEach(() => {
window.location.hash = 'e334a2a10f036c00151a04cea7938a5d4213a818';
@@ -460,6 +629,37 @@ describe('DiffsStoreActions', () => {
});
});
+ describe('prefetchFileNeighbors', () => {
+ it('dispatches two requests to prefetch the next/previous files', () => {
+ testAction(
+ diffActions.prefetchFileNeighbors,
+ {},
+ {
+ currentDiffIndex: 0,
+ flatBlobsList: [
+ {
+ type: 'blob',
+ fileHash: 'abc',
+ },
+ {
+ type: 'blob',
+ fileHash: 'def',
+ },
+ {
+ type: 'blob',
+ fileHash: 'ghi',
+ },
+ ],
+ },
+ [],
+ [
+ { type: 'prefetchSingleFile', payload: { type: 'blob', fileHash: 'def' } },
+ { type: 'prefetchSingleFile', payload: { type: 'blob', fileHash: 'abc' } },
+ ],
+ );
+ });
+ });
+
describe('fetchCoverageFiles', () => {
const endpointCoverage = '/fetch';
diff --git a/spec/frontend/diffs/store/mutations_spec.js b/spec/frontend/diffs/store/mutations_spec.js
index 274cb40dac8..e87c5d0a9b1 100644
--- a/spec/frontend/diffs/store/mutations_spec.js
+++ b/spec/frontend/diffs/store/mutations_spec.js
@@ -684,6 +684,36 @@ describe('DiffsStoreMutations', () => {
});
});
+ describe('TREE_ENTRY_DIFF_LOADING', () => {
+ it('sets the entry loading state to true by default', () => {
+ const state = {
+ treeEntries: {
+ path: {
+ diffLoading: false,
+ },
+ },
+ };
+
+ mutations[types.TREE_ENTRY_DIFF_LOADING](state, { path: 'path' });
+
+ expect(state.treeEntries.path.diffLoading).toBe(true);
+ });
+
+ it('sets the entry loading state to the provided value', () => {
+ const state = {
+ treeEntries: {
+ path: {
+ diffLoading: true,
+ },
+ },
+ };
+
+ mutations[types.TREE_ENTRY_DIFF_LOADING](state, { path: 'path', loading: false });
+
+ expect(state.treeEntries.path.diffLoading).toBe(false);
+ });
+ });
+
describe('SET_SHOW_TREE_LIST', () => {
it('sets showTreeList', () => {
const state = createState();
diff --git a/spec/frontend/diffs/store/utils_spec.js b/spec/frontend/diffs/store/utils_spec.js
index 117ed56e347..24cb8158739 100644
--- a/spec/frontend/diffs/store/utils_spec.js
+++ b/spec/frontend/diffs/store/utils_spec.js
@@ -948,9 +948,9 @@ describe('DiffsStoreUtils', () => {
describe('markTreeEntriesLoaded', () => {
it.each`
desc | entries | loaded | outcome
- ${'marks an existing entry as loaded'} | ${{ abc: {} }} | ${[{ new_path: 'abc' }]} | ${{ abc: { diffLoaded: true } }}
+ ${'marks an existing entry as loaded'} | ${{ abc: {} }} | ${[{ new_path: 'abc' }]} | ${{ abc: { diffLoaded: true, diffLoading: false } }}
${'does nothing if the new file is not found in the tree entries'} | ${{ abc: {} }} | ${[{ new_path: 'def' }]} | ${{ abc: {} }}
- ${'leaves entries unmodified if they are not in the loaded files'} | ${{ abc: {}, def: { diffLoaded: true }, ghi: {} }} | ${[{ new_path: 'ghi' }]} | ${{ abc: {}, def: { diffLoaded: true }, ghi: { diffLoaded: true } }}
+ ${'leaves entries unmodified if they are not in the loaded files'} | ${{ abc: {}, def: { diffLoaded: true }, ghi: {} }} | ${[{ new_path: 'ghi' }]} | ${{ abc: {}, def: { diffLoaded: true }, ghi: { diffLoaded: true, diffLoading: false } }}
`('$desc', ({ entries, loaded, outcome }) => {
expect(utils.markTreeEntriesLoaded({ priorEntries: entries, loadedFiles: loaded })).toEqual(
outcome,
diff --git a/spec/frontend/diffs/utils/tree_worker_utils_spec.js b/spec/frontend/diffs/utils/tree_worker_utils_spec.js
index b8bd4fcd081..b29275f45a6 100644
--- a/spec/frontend/diffs/utils/tree_worker_utils_spec.js
+++ b/spec/frontend/diffs/utils/tree_worker_utils_spec.js
@@ -76,6 +76,7 @@ describe('~/diffs/utils/tree_worker_utils', () => {
addedLines: 0,
changed: true,
diffLoaded: false,
+ diffLoading: false,
deleted: false,
fileHash: 'test',
filePaths: {
@@ -103,6 +104,7 @@ describe('~/diffs/utils/tree_worker_utils', () => {
addedLines: 0,
changed: true,
diffLoaded: false,
+ diffLoading: false,
deleted: false,
fileHash: 'test',
filePaths: {
@@ -123,6 +125,7 @@ describe('~/diffs/utils/tree_worker_utils', () => {
addedLines: 0,
changed: true,
diffLoaded: false,
+ diffLoading: false,
deleted: false,
fileHash: 'test',
filePaths: {
@@ -154,6 +157,7 @@ describe('~/diffs/utils/tree_worker_utils', () => {
addedLines: 42,
changed: true,
diffLoaded: false,
+ diffLoading: false,
deleted: false,
fileHash: 'test',
filePaths: {
@@ -181,6 +185,7 @@ describe('~/diffs/utils/tree_worker_utils', () => {
type: 'blob',
changed: true,
diffLoaded: false,
+ diffLoading: false,
tempFile: true,
submodule: true,
deleted: false,
@@ -201,6 +206,7 @@ describe('~/diffs/utils/tree_worker_utils', () => {
type: 'blob',
changed: true,
diffLoaded: false,
+ diffLoading: false,
tempFile: false,
submodule: undefined,
deleted: true,
@@ -376,7 +382,7 @@ describe('~/diffs/utils/tree_worker_utils', () => {
},
{
type: 'tree',
- name: 'ee/lib/…/…/…/longtreenametomakepath',
+ name: 'ee/lib/ee/gitlab/checks/longtreenametomakepath',
tree: [
{
name: 'diff_check.rb',
diff --git a/spec/frontend/editor/schema/ci/ci_schema_spec.js b/spec/frontend/editor/schema/ci/ci_schema_spec.js
index 51fcf26c39a..77c7f0d49a8 100644
--- a/spec/frontend/editor/schema/ci/ci_schema_spec.js
+++ b/spec/frontend/editor/schema/ci/ci_schema_spec.js
@@ -35,6 +35,7 @@ import IdTokensYaml from './yaml_tests/positive_tests/id_tokens.yml';
import HooksYaml from './yaml_tests/positive_tests/hooks.yml';
import SecretsYaml from './yaml_tests/positive_tests/secrets.yml';
import ServicesYaml from './yaml_tests/positive_tests/services.yml';
+import NeedsParallelMatrixYaml from './yaml_tests/positive_tests/needs_parallel_matrix.yml';
// YAML NEGATIVE TEST
import ArtifactsNegativeYaml from './yaml_tests/negative_tests/artifacts.yml';
@@ -56,6 +57,9 @@ import IdTokensNegativeYaml from './yaml_tests/negative_tests/id_tokens.yml';
import HooksNegative from './yaml_tests/negative_tests/hooks.yml';
import SecretsNegativeYaml from './yaml_tests/negative_tests/secrets.yml';
import ServicesNegativeYaml from './yaml_tests/negative_tests/services.yml';
+import NeedsParallelMatrixNumericYaml from './yaml_tests/negative_tests/needs/parallel_matrix/numeric.yml';
+import NeedsParallelMatrixWrongParallelValueYaml from './yaml_tests/negative_tests/needs/parallel_matrix/wrong_parallel_value.yml';
+import NeedsParallelMatrixWrongMatrixValueYaml from './yaml_tests/negative_tests/needs/parallel_matrix/wrong_matrix_value.yml';
const ajv = new Ajv({
strictTypes: false,
@@ -96,6 +100,7 @@ describe('positive tests', () => {
IdTokensYaml,
ServicesYaml,
SecretsYaml,
+ NeedsParallelMatrixYaml,
}),
)('schema validates %s', (_, input) => {
// We construct a new "JSON" from each main key that is inside a
@@ -136,6 +141,9 @@ describe('negative tests', () => {
ProjectPathIncludeTailSlashYaml,
SecretsNegativeYaml,
ServicesNegativeYaml,
+ NeedsParallelMatrixNumericYaml,
+ NeedsParallelMatrixWrongParallelValueYaml,
+ NeedsParallelMatrixWrongMatrixValueYaml,
}),
)('schema validates %s', (_, input) => {
// We construct a new "JSON" from each main key that is inside a
diff --git a/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/needs/parallel_matrix/numeric.yml b/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/needs/parallel_matrix/numeric.yml
new file mode 100644
index 00000000000..6d666203a8a
--- /dev/null
+++ b/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/needs/parallel_matrix/numeric.yml
@@ -0,0 +1,6 @@
+# invalid needs:parallel:matrix where parallel has a numeric value
+job_with_needs_parallel_matrix:
+ script: exit 0
+ needs:
+ - job: some_job_with_parallel_matrix
+ parallel: 10
diff --git a/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/needs/parallel_matrix/wrong_matrix_value.yml b/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/needs/parallel_matrix/wrong_matrix_value.yml
new file mode 100644
index 00000000000..746df9ebd96
--- /dev/null
+++ b/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/needs/parallel_matrix/wrong_matrix_value.yml
@@ -0,0 +1,30 @@
+# invalid needs:parallel:matrix where matrix value is incorrect
+job_with_needs_parallel_matrix:
+ script: exit 0
+ needs:
+ - job: some_job_with_parallel_matrix
+ parallel:
+ matrix: 10
+
+job_with_needs_parallel_matrix_2:
+ script: exit 0
+ needs:
+ - job: some_job_with_parallel_matrix
+ parallel:
+ matrix: "string"
+
+job_with_needs_parallel_matrix_3:
+ script: exit 0
+ needs:
+ - job: some_job_with_parallel_matrix
+ parallel:
+ matrix: [a1, a2]
+
+job_with_needs_parallel_matrix_4:
+ script: exit 0
+ needs:
+ - job: some_job_with_parallel_matrix
+ parallel:
+ matrix:
+ VAR_1: 1
+ VAR_2: 2
diff --git a/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/needs/parallel_matrix/wrong_parallel_value.yml b/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/needs/parallel_matrix/wrong_parallel_value.yml
new file mode 100644
index 00000000000..727c97119f1
--- /dev/null
+++ b/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/needs/parallel_matrix/wrong_parallel_value.yml
@@ -0,0 +1,27 @@
+# invalid needs:parallel:matrix where parallel value is incorrect
+job_with_needs_parallel_matrix:
+ script: exit 0
+ needs:
+ - job: some_job_with_parallel_matrix
+ parallel:
+ not_matrix:
+ - VAR_1: [a1, a2]
+ VAR_2: [b1, b2]
+
+job_with_needs_parallel_matrix_2:
+ script: exit 0
+ needs:
+ - job: some_job_with_parallel_matrix
+ parallel: [a1, a2]
+
+job_with_needs_parallel_matrix_3:
+ script: exit 0
+ needs:
+ - job: some_job_with_parallel_matrix
+ parallel: "matrix"
+
+job_with_needs_parallel_matrix_4:
+ script: exit 0
+ needs:
+ - job: some_job_with_parallel_matrix
+ parallel: object
diff --git a/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/secrets.yml b/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/secrets.yml
index 14ba930b394..4baf4c6b850 100644
--- a/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/secrets.yml
+++ b/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/secrets.yml
@@ -1,4 +1,4 @@
-job_with_secrets_without_vault:
+job_with_secrets_without_any_vault_provider:
script:
- echo $TEST_DB_PASSWORD
secrets:
@@ -37,3 +37,37 @@ job_with_secrets_with_missing_required_engine_property:
vault:
engine:
path: kv
+
+job_with_azure_key_vault_secrets_with_extra_properties:
+ script:
+ - echo $TEST_DB_PASSWORD
+ secrets:
+ TEST_DB_PASSWORD:
+ azure_key_vault:
+ name: mypass
+ extra_prop: TEST
+
+job_with_secrets_with_invalid_azure_key_vault_property:
+ script:
+ - echo $TEST_DB_PASSWORD
+ secrets:
+ TEST_DB_PASSWORD:
+ azure_key_vault:
+ invalid: TEST
+
+job_with_secrets_with_missing_required_azure_key_vault_property:
+ script:
+ - echo $TEST_DB_PASSWORD
+ secrets:
+ TEST_DB_PASSWORD:
+ azure_key_vault:
+ version: latest
+
+job_with_secrets_with_missing_required_name_property:
+ script:
+ - echo $TEST_DB_PASSWORD
+ secrets:
+ TEST_DB_PASSWORD:
+ azure_key_vault:
+ name:
+ version: latest
diff --git a/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/needs_parallel_matrix.yml b/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/needs_parallel_matrix.yml
new file mode 100644
index 00000000000..22b386d1dc8
--- /dev/null
+++ b/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/needs_parallel_matrix.yml
@@ -0,0 +1,32 @@
+# valid needs:parallel:matrix
+job_with_needs_parallel_matrix:
+ script: exit 0
+ needs:
+ - job: job_with_parallel_matrix
+ parallel:
+ matrix:
+ - VAR_1: [a]
+ VAR_2: [d]
+
+job_with_needs_parallel_matrix_2:
+ script: exit 0
+ needs:
+ - job: job_with_parallel_matrix
+ parallel:
+ matrix:
+ - VAR_1: a
+ VAR_2: d
+
+job_with_needs_parallel_matrix_3:
+ script: exit 0
+ needs:
+ - job: job_with_parallel_matrix
+ parallel:
+ matrix:
+ - VAR_1: ["a", b]
+ VAR_2: d
+ - job: job_with_parallel_matrix_2
+ parallel:
+ matrix:
+ - VAR_1: [a, "b", c]
+ VAR_5: [d, "e"]
diff --git a/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/secrets.yml b/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/secrets.yml
index 083cb4348ed..af3107974b9 100644
--- a/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/secrets.yml
+++ b/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/secrets.yml
@@ -26,3 +26,20 @@ valid_job_with_secrets_with_every_vault_keyword:
field: password
file: true
token: $TEST_TOKEN
+
+valid_job_with_azure_key_vault_secrets_name:
+ script:
+ - echo $TEST_DB_PASSWORD
+ secrets:
+ TEST_DB_PASSWORD:
+ azure_key_vault:
+ name: 'test'
+
+valid_job_with_azure_key_vault_secrets_name_and_version:
+ script:
+ - echo $TEST_DB_PASSWORD
+ secrets:
+ TEST_DB_PASSWORD:
+ azure_key_vault:
+ name: 'test'
+ version: 'version'
diff --git a/spec/frontend/environments/edit_environment_spec.js b/spec/frontend/environments/edit_environment_spec.js
index 93fe9ed9400..b55bbb34c65 100644
--- a/spec/frontend/environments/edit_environment_spec.js
+++ b/spec/frontend/environments/edit_environment_spec.js
@@ -7,7 +7,7 @@ import EditEnvironment from '~/environments/components/edit_environment.vue';
import { createAlert } from '~/alert';
import { visitUrl } from '~/lib/utils/url_utility';
import getEnvironment from '~/environments/graphql/queries/environment.query.graphql';
-import getEnvironmentWithNamespace from '~/environments/graphql/queries/environment_with_namespace.graphql';
+import getEnvironmentWithFluxResource from '~/environments/graphql/queries/environment_with_flux_resource.query.graphql';
import updateEnvironment from '~/environments/graphql/mutations/update_environment.mutation.graphql';
import { __ } from '~/locale';
import createMockApollo from '../__helpers__/mock_apollo_helper';
@@ -21,6 +21,7 @@ const environment = {
externalUrl: 'https://foo.example.com',
clusterAgent: null,
kubernetesNamespace: null,
+ fluxResourcePath: null,
};
const resolvedEnvironment = { project: { id: '1', environment } };
const environmentUpdateSuccess = {
@@ -43,7 +44,7 @@ describe('~/environments/components/edit.vue', () => {
let wrapper;
const getEnvironmentQuery = jest.fn().mockResolvedValue({ data: resolvedEnvironment });
- const getEnvironmentWithNamespaceQuery = jest
+ const getEnvironmentWithFluxResourceQuery = jest
.fn()
.mockResolvedValue({ data: resolvedEnvironment });
@@ -59,7 +60,7 @@ describe('~/environments/components/edit.vue', () => {
const mocks = [
[getEnvironment, getEnvironmentQuery],
- [getEnvironmentWithNamespace, getEnvironmentWithNamespaceQuery],
+ [getEnvironmentWithFluxResource, getEnvironmentWithFluxResourceQuery],
[updateEnvironment, mutationHandler],
];
@@ -68,14 +69,14 @@ describe('~/environments/components/edit.vue', () => {
const createWrapperWithApollo = async ({
mutationHandler = updateEnvironmentSuccess,
- kubernetesNamespaceForEnvironment = false,
+ fluxResourceForEnvironment = false,
} = {}) => {
wrapper = mountExtended(EditEnvironment, {
propsData: { environment: {} },
provide: {
...provide,
glFeatures: {
- kubernetesNamespaceForEnvironment,
+ fluxResourceForEnvironment,
},
},
apolloProvider: createMockApolloProvider(mutationHandler),
@@ -170,10 +171,10 @@ describe('~/environments/components/edit.vue', () => {
});
});
- describe('when `kubernetesNamespaceForEnvironment` is enabled', () => {
- it('calls the `getEnvironmentWithNamespace` query', () => {
- createWrapperWithApollo({ kubernetesNamespaceForEnvironment: true });
- expect(getEnvironmentWithNamespaceQuery).toHaveBeenCalled();
+ describe('when `fluxResourceForEnvironment` is enabled', () => {
+ it('calls the `getEnvironmentWithFluxResource` query', () => {
+ createWrapperWithApollo({ fluxResourceForEnvironment: true });
+ expect(getEnvironmentWithFluxResourceQuery).toHaveBeenCalled();
});
});
});
diff --git a/spec/frontend/environments/environment_flux_resource_selector_spec.js b/spec/frontend/environments/environment_flux_resource_selector_spec.js
new file mode 100644
index 00000000000..ba3375c731f
--- /dev/null
+++ b/spec/frontend/environments/environment_flux_resource_selector_spec.js
@@ -0,0 +1,178 @@
+import { GlCollapsibleListbox, GlAlert } from '@gitlab/ui';
+import Vue, { nextTick } from 'vue';
+import VueApollo from 'vue-apollo';
+import { shallowMount } from '@vue/test-utils';
+import waitForPromises from 'helpers/wait_for_promises';
+import { s__ } from '~/locale';
+import EnvironmentFluxResourceSelector from '~/environments/components/environment_flux_resource_selector.vue';
+import createMockApollo from '../__helpers__/mock_apollo_helper';
+import { mockKasTunnelUrl } from './mock_data';
+
+const configuration = {
+ basePath: mockKasTunnelUrl.replace(/\/$/, ''),
+ baseOptions: {
+ headers: {
+ 'GitLab-Agent-Id': 1,
+ },
+ withCredentials: true,
+ },
+};
+const namespace = 'my-namespace';
+
+const DEFAULT_PROPS = {
+ configuration,
+ namespace,
+ fluxResourcePath: '',
+};
+
+describe('~/environments/components/form.vue', () => {
+ let wrapper;
+
+ const kustomizationItem = {
+ apiVersion: 'kustomize.toolkit.fluxcd.io/v1beta1',
+ metadata: { name: 'kustomization', namespace },
+ };
+ const helmReleaseItem = {
+ apiVersion: 'helm.toolkit.fluxcd.io/v2beta1',
+ metadata: { name: 'helm-release', namespace },
+ };
+
+ const getKustomizationsQueryResult = jest.fn().mockReturnValue([kustomizationItem]);
+
+ const getHelmReleasesQueryResult = jest.fn().mockReturnValue([helmReleaseItem]);
+
+ const createWrapper = ({
+ propsData = {},
+ kustomizationsQueryResult = null,
+ helmReleasesQueryResult = null,
+ } = {}) => {
+ Vue.use(VueApollo);
+
+ const mockResolvers = {
+ Query: {
+ fluxKustomizations: kustomizationsQueryResult || getKustomizationsQueryResult,
+ fluxHelmReleases: helmReleasesQueryResult || getHelmReleasesQueryResult,
+ },
+ };
+
+ return shallowMount(EnvironmentFluxResourceSelector, {
+ propsData: {
+ ...DEFAULT_PROPS,
+ ...propsData,
+ },
+ apolloProvider: createMockApollo([], mockResolvers),
+ });
+ };
+
+ const findFluxResourceSelector = () => wrapper.findComponent(GlCollapsibleListbox);
+ const findAlert = () => wrapper.findComponent(GlAlert);
+
+ describe('default', () => {
+ const kustomizationValue = `${kustomizationItem.apiVersion}/namespaces/${kustomizationItem.metadata.namespace}/kustomizations/${kustomizationItem.metadata.name}`;
+ const helmReleaseValue = `${helmReleaseItem.apiVersion}/namespaces/${helmReleaseItem.metadata.namespace}/helmreleases/${helmReleaseItem.metadata.name}`;
+
+ beforeEach(() => {
+ wrapper = createWrapper();
+ });
+
+ it('renders flux resource selector', () => {
+ expect(findFluxResourceSelector().exists()).toBe(true);
+ });
+
+ it('requests the flux resources', async () => {
+ await waitForPromises();
+
+ expect(getKustomizationsQueryResult).toHaveBeenCalled();
+ expect(getHelmReleasesQueryResult).toHaveBeenCalled();
+ });
+
+ it('sets the loading prop while fetching the list', async () => {
+ expect(findFluxResourceSelector().props('loading')).toBe(true);
+
+ await waitForPromises();
+
+ expect(findFluxResourceSelector().props('loading')).toBe(false);
+ });
+
+ it('renders a list of available flux resources', async () => {
+ await waitForPromises();
+
+ expect(findFluxResourceSelector().props('items')).toEqual([
+ {
+ text: s__('Environments|Kustomizations'),
+ options: [{ value: kustomizationValue, text: kustomizationItem.metadata.name }],
+ },
+ {
+ text: s__('Environments|HelmReleases'),
+ options: [{ value: helmReleaseValue, text: helmReleaseItem.metadata.name }],
+ },
+ ]);
+ });
+
+ it('filters the flux resources list on user search', async () => {
+ await waitForPromises();
+ findFluxResourceSelector().vm.$emit('search', 'kustomization');
+ await nextTick();
+
+ expect(findFluxResourceSelector().props('items')).toEqual([
+ {
+ text: s__('Environments|Kustomizations'),
+ options: [{ value: kustomizationValue, text: kustomizationItem.metadata.name }],
+ },
+ ]);
+ });
+
+ it('emits changes to the fluxResourcePath', () => {
+ findFluxResourceSelector().vm.$emit('select', kustomizationValue);
+
+ expect(wrapper.emitted('change')).toEqual([[kustomizationValue]]);
+ });
+ });
+
+ describe('when environment has an associated flux resource path', () => {
+ beforeEach(() => {
+ wrapper = createWrapper({
+ propsData: { fluxResourcePath: 'path/to/flux/resource/name/default' },
+ });
+ });
+
+ it('updates flux resource selector with the name of the associated flux resource', () => {
+ expect(findFluxResourceSelector().props('toggleText')).toBe('default');
+ });
+ });
+
+ describe('on error', () => {
+ const error = new Error('Error from the cluster_client API');
+
+ it('renders an alert with both resource types mentioned when both queries failed', async () => {
+ wrapper = createWrapper({
+ kustomizationsQueryResult: jest.fn().mockRejectedValueOnce(error),
+ helmReleasesQueryResult: jest.fn().mockRejectedValueOnce(error),
+ });
+ await waitForPromises();
+
+ expect(findAlert().text()).toContain(
+ s__(
+ 'Environments|Unable to access the following resources from this environment. Check your authorization on the following and try again',
+ ),
+ );
+ expect(findAlert().text()).toContain('Kustomization');
+ expect(findAlert().text()).toContain('HelmRelease');
+ });
+
+ it('renders an alert with only failed resource type mentioned when one query failed', async () => {
+ wrapper = createWrapper({
+ kustomizationsQueryResult: jest.fn().mockRejectedValueOnce(error),
+ });
+ await waitForPromises();
+
+ expect(findAlert().text()).toContain(
+ s__(
+ 'Environments|Unable to access the following resources from this environment. Check your authorization on the following and try again',
+ ),
+ );
+ expect(findAlert().text()).toContain('Kustomization');
+ expect(findAlert().text()).not.toContain('HelmRelease');
+ });
+ });
+});
diff --git a/spec/frontend/environments/environment_folder_spec.js b/spec/frontend/environments/environment_folder_spec.js
index 65c16697d44..1973613897d 100644
--- a/spec/frontend/environments/environment_folder_spec.js
+++ b/spec/frontend/environments/environment_folder_spec.js
@@ -74,8 +74,6 @@ describe('~/environments/components/environments_folder.vue', () => {
beforeEach(() => {
collapse = wrapper.findComponent(GlCollapse);
icons = wrapper.findAllComponents(GlIcon);
- jest.spyOn(wrapper.vm.$apollo.queries.folder, 'startPolling');
- jest.spyOn(wrapper.vm.$apollo.queries.folder, 'stopPolling');
});
it('is collapsed by default', () => {
@@ -88,8 +86,12 @@ describe('~/environments/components/environments_folder.vue', () => {
expect(link.exists()).toBe(false);
});
- it('opens on click', async () => {
+ it('opens on click and starts polling', async () => {
+ expect(environmentFolderMock).toHaveBeenCalledTimes(1);
+
await button.trigger('click');
+ jest.advanceTimersByTime(2000);
+ await waitForPromises();
const link = findLink();
@@ -100,7 +102,7 @@ describe('~/environments/components/environments_folder.vue', () => {
expect(folderName.classes('gl-font-weight-bold')).toBe(true);
expect(link.attributes('href')).toBe(nestedEnvironment.latest.folderPath);
- expect(wrapper.vm.$apollo.queries.folder.startPolling).toHaveBeenCalledWith(2000);
+ expect(environmentFolderMock).toHaveBeenCalledTimes(2);
});
it('displays all environments when opened', async () => {
@@ -117,12 +119,15 @@ describe('~/environments/components/environments_folder.vue', () => {
it('stops polling on click', async () => {
await button.trigger('click');
- expect(wrapper.vm.$apollo.queries.folder.startPolling).toHaveBeenCalledWith(2000);
+ jest.advanceTimersByTime(2000);
+ await waitForPromises();
+
+ expect(environmentFolderMock).toHaveBeenCalledTimes(2);
const collapseButton = wrapper.findByRole('button', { name: __('Collapse') });
await collapseButton.trigger('click');
- expect(wrapper.vm.$apollo.queries.folder.stopPolling).toHaveBeenCalled();
+ expect(environmentFolderMock).toHaveBeenCalledTimes(2);
});
});
});
diff --git a/spec/frontend/environments/environment_form_spec.js b/spec/frontend/environments/environment_form_spec.js
index 803207bcce8..1b80b596db7 100644
--- a/spec/frontend/environments/environment_form_spec.js
+++ b/spec/frontend/environments/environment_form_spec.js
@@ -5,6 +5,7 @@ import waitForPromises from 'helpers/wait_for_promises';
import { mountExtended } from 'helpers/vue_test_utils_helper';
import EnvironmentForm from '~/environments/components/environment_form.vue';
import getUserAuthorizedAgents from '~/environments/graphql/queries/user_authorized_agents.query.graphql';
+import EnvironmentFluxResourceSelector from '~/environments/components/environment_flux_resource_selector.vue';
import createMockApollo from '../__helpers__/mock_apollo_helper';
import { mockKasTunnelUrl } from './mock_data';
@@ -25,6 +26,16 @@ const userAccessAuthorizedAgents = [
{ agent: { id: '2', name: 'agent-2' } },
];
+const configuration = {
+ basePath: mockKasTunnelUrl.replace(/\/$/, ''),
+ baseOptions: {
+ headers: {
+ 'GitLab-Agent-Id': 2,
+ },
+ withCredentials: true,
+ },
+};
+
describe('~/environments/components/form.vue', () => {
let wrapper;
@@ -44,7 +55,7 @@ describe('~/environments/components/form.vue', () => {
const createWrapperWithApollo = ({
propsData = {},
- kubernetesNamespaceForEnvironment = false,
+ fluxResourceForEnvironment = false,
queryResult = null,
} = {}) => {
Vue.use(VueApollo);
@@ -73,7 +84,7 @@ describe('~/environments/components/form.vue', () => {
provide: {
...PROVIDE,
glFeatures: {
- kubernetesNamespaceForEnvironment,
+ fluxResourceForEnvironment,
},
},
propsData: {
@@ -87,6 +98,7 @@ describe('~/environments/components/form.vue', () => {
const findAgentSelector = () => wrapper.findByTestId('agent-selector');
const findNamespaceSelector = () => wrapper.findByTestId('namespace-selector');
const findAlert = () => wrapper.findComponent(GlAlert);
+ const findFluxResourceSelector = () => wrapper.findComponent(EnvironmentFluxResourceSelector);
const selectAgent = async () => {
findAgentSelector().vm.$emit('shown');
@@ -290,132 +302,159 @@ describe('~/environments/components/form.vue', () => {
await selectAgent();
expect(wrapper.emitted('change')).toEqual([
- [{ name: '', externalUrl: '', clusterAgentId: '2', kubernetesNamespace: null }],
+ [
+ {
+ name: '',
+ externalUrl: '',
+ clusterAgentId: '2',
+ kubernetesNamespace: null,
+ fluxResourcePath: null,
+ },
+ ],
]);
});
});
describe('namespace selector', () => {
- it("doesn't render namespace selector if `kubernetesNamespaceForEnvironment` feature flag is disabled", () => {
+ beforeEach(() => {
wrapper = createWrapperWithApollo();
+ });
+
+ it("doesn't render namespace selector by default", () => {
expect(findNamespaceSelector().exists()).toBe(false);
});
- describe('when `kubernetesNamespaceForEnvironment` feature flag is enabled', () => {
- beforeEach(() => {
- wrapper = createWrapperWithApollo({
- kubernetesNamespaceForEnvironment: true,
- });
+ describe('when the agent was selected', () => {
+ beforeEach(async () => {
+ await selectAgent();
});
- it("doesn't render namespace selector by default", () => {
- expect(findNamespaceSelector().exists()).toBe(false);
+ it('renders namespace selector', () => {
+ expect(findNamespaceSelector().exists()).toBe(true);
});
- describe('when the agent was selected', () => {
- beforeEach(async () => {
- await selectAgent();
- });
+ it('requests the kubernetes namespaces with the correct configuration', async () => {
+ await waitForPromises();
- it('renders namespace selector', () => {
- expect(findNamespaceSelector().exists()).toBe(true);
- });
+ expect(getNamespacesQueryResult).toHaveBeenCalledWith(
+ {},
+ { configuration },
+ expect.anything(),
+ expect.anything(),
+ );
+ });
- it('requests the kubernetes namespaces with the correct configuration', async () => {
- const configuration = {
- basePath: mockKasTunnelUrl.replace(/\/$/, ''),
- baseOptions: {
- headers: {
- 'GitLab-Agent-Id': 2,
- },
- withCredentials: true,
- },
- };
+ it('sets the loading prop while fetching the list', async () => {
+ expect(findNamespaceSelector().props('loading')).toBe(true);
- await waitForPromises();
+ await waitForPromises();
- expect(getNamespacesQueryResult).toHaveBeenCalledWith(
- {},
- { configuration },
- expect.anything(),
- expect.anything(),
- );
- });
+ expect(findNamespaceSelector().props('loading')).toBe(false);
+ });
- it('sets the loading prop while fetching the list', async () => {
- expect(findNamespaceSelector().props('loading')).toBe(true);
+ it('renders a list of available namespaces', async () => {
+ await waitForPromises();
- await waitForPromises();
+ expect(findNamespaceSelector().props('items')).toEqual([
+ { text: 'default', value: 'default' },
+ { text: 'agent', value: 'agent' },
+ ]);
+ });
- expect(findNamespaceSelector().props('loading')).toBe(false);
- });
+ it('filters the namespaces list on user search', async () => {
+ await waitForPromises();
+ await findNamespaceSelector().vm.$emit('search', 'default');
- it('renders a list of available namespaces', async () => {
- await waitForPromises();
+ expect(findNamespaceSelector().props('items')).toEqual([
+ { value: 'default', text: 'default' },
+ ]);
+ });
- expect(findNamespaceSelector().props('items')).toEqual([
- { text: 'default', value: 'default' },
- { text: 'agent', value: 'agent' },
- ]);
- });
+ it('updates namespace selector field with the name of selected namespace', async () => {
+ await waitForPromises();
+ await findNamespaceSelector().vm.$emit('select', 'agent');
- it('filters the namespaces list on user search', async () => {
- await waitForPromises();
- await findNamespaceSelector().vm.$emit('search', 'default');
+ expect(findNamespaceSelector().props('toggleText')).toBe('agent');
+ });
- expect(findNamespaceSelector().props('items')).toEqual([
- { value: 'default', text: 'default' },
- ]);
- });
+ it('emits changes to the kubernetesNamespace', async () => {
+ await waitForPromises();
+ await findNamespaceSelector().vm.$emit('select', 'agent');
- it('updates namespace selector field with the name of selected namespace', async () => {
- await waitForPromises();
- await findNamespaceSelector().vm.$emit('select', 'agent');
+ expect(wrapper.emitted('change')[1]).toEqual([
+ { name: '', externalUrl: '', kubernetesNamespace: 'agent', fluxResourcePath: null },
+ ]);
+ });
- expect(findNamespaceSelector().props('toggleText')).toBe('agent');
- });
+ it('clears namespace selector when another agent was selected', async () => {
+ await waitForPromises();
+ await findNamespaceSelector().vm.$emit('select', 'agent');
- it('emits changes to the kubernetesNamespace', async () => {
- await waitForPromises();
- await findNamespaceSelector().vm.$emit('select', 'agent');
+ expect(findNamespaceSelector().props('toggleText')).toBe('agent');
+
+ await findAgentSelector().vm.$emit('select', '1');
+ expect(findNamespaceSelector().props('toggleText')).toBe(
+ EnvironmentForm.i18n.namespaceHelpText,
+ );
+ });
+ });
- expect(wrapper.emitted('change')[1]).toEqual([
- { name: '', externalUrl: '', kubernetesNamespace: 'agent' },
- ]);
+ describe('when cannot connect to the cluster', () => {
+ const error = new Error('Error from the cluster_client API');
+
+ beforeEach(async () => {
+ wrapper = createWrapperWithApollo({
+ queryResult: jest.fn().mockRejectedValueOnce(error),
});
- it('clears namespace selector when another agent was selected', async () => {
- await waitForPromises();
- await findNamespaceSelector().vm.$emit('select', 'agent');
+ await selectAgent();
+ await waitForPromises();
+ });
+
+ it("doesn't render the namespace selector", () => {
+ expect(findNamespaceSelector().exists()).toBe(false);
+ });
- expect(findNamespaceSelector().props('toggleText')).toBe('agent');
+ it('renders an alert', () => {
+ expect(findAlert().text()).toBe('Error from the cluster_client API');
+ });
+ });
+ });
- await findAgentSelector().vm.$emit('select', '1');
- expect(findNamespaceSelector().props('toggleText')).toBe(
- EnvironmentForm.i18n.namespaceHelpText,
- );
+ describe('flux resource selector', () => {
+ it("doesn't render if `fluxResourceForEnvironment` feature flag is disabled", () => {
+ wrapper = createWrapperWithApollo();
+ expect(findFluxResourceSelector().exists()).toBe(false);
+ });
+
+ describe('when `fluxResourceForEnvironment` feature flag is enabled', () => {
+ beforeEach(() => {
+ wrapper = createWrapperWithApollo({
+ fluxResourceForEnvironment: true,
});
});
- describe('when cannot connect to the cluster', () => {
- const error = new Error('Error from the cluster_client API');
+ it("doesn't render flux resource selector by default", () => {
+ expect(findFluxResourceSelector().exists()).toBe(false);
+ });
+ describe('when the agent was selected', () => {
beforeEach(async () => {
- wrapper = createWrapperWithApollo({
- kubernetesNamespaceForEnvironment: true,
- queryResult: jest.fn().mockRejectedValueOnce(error),
- });
-
await selectAgent();
- await waitForPromises();
});
- it("doesn't render the namespace selector", () => {
- expect(findNamespaceSelector().exists()).toBe(false);
+ it("doesn't render flux resource selector", () => {
+ expect(findFluxResourceSelector().exists()).toBe(false);
});
- it('renders an alert', () => {
- expect(findAlert().text()).toBe('Error from the cluster_client API');
+ it('renders the flux resource selector when the namespace is selected', async () => {
+ await findNamespaceSelector().vm.$emit('select', 'agent');
+
+ expect(findFluxResourceSelector().props()).toEqual({
+ namespace: 'agent',
+ fluxResourcePath: '',
+ configuration,
+ });
});
});
});
@@ -430,7 +469,6 @@ describe('~/environments/components/form.vue', () => {
beforeEach(() => {
wrapper = createWrapperWithApollo({
propsData: { environment: environmentWithAgent },
- kubernetesNamespaceForEnvironment: true,
});
});
@@ -463,7 +501,6 @@ describe('~/environments/components/form.vue', () => {
beforeEach(() => {
wrapper = createWrapperWithApollo({
propsData: { environment: environmentWithAgentAndNamespace },
- kubernetesNamespaceForEnvironment: true,
});
});
@@ -472,4 +509,25 @@ describe('~/environments/components/form.vue', () => {
expect(findNamespaceSelector().props('toggleText')).toBe('default');
});
});
+
+ describe('when environment has an associated flux resource', () => {
+ const fluxResourcePath = 'path/to/flux/resource';
+ const environmentWithAgentAndNamespace = {
+ ...DEFAULT_PROPS.environment,
+ clusterAgent: { id: '1', name: 'agent-1' },
+ clusterAgentId: '1',
+ kubernetesNamespace: 'default',
+ fluxResourcePath,
+ };
+ beforeEach(() => {
+ wrapper = createWrapperWithApollo({
+ propsData: { environment: environmentWithAgentAndNamespace },
+ fluxResourceForEnvironment: true,
+ });
+ });
+
+ it('provides flux resource path to the flux resource selector component', () => {
+ expect(findFluxResourceSelector().props('fluxResourcePath')).toBe(fluxResourcePath);
+ });
+ });
});
diff --git a/spec/frontend/environments/graphql/mock_data.js b/spec/frontend/environments/graphql/mock_data.js
index c2eafa5f51e..fd97f19a6ab 100644
--- a/spec/frontend/environments/graphql/mock_data.js
+++ b/spec/frontend/environments/graphql/mock_data.js
@@ -914,3 +914,12 @@ export const k8sNamespacesMock = [
{ metadata: { name: 'default' } },
{ metadata: { name: 'agent' } },
];
+
+export const fluxKustomizationsMock = [
+ {
+ status: 'True',
+ type: 'Ready',
+ },
+];
+
+export const fluxResourcePathMock = 'path/to/flux/resource';
diff --git a/spec/frontend/environments/graphql/resolvers_spec.js b/spec/frontend/environments/graphql/resolvers/base_spec.js
index be210ed619e..e01cf18c40d 100644
--- a/spec/frontend/environments/graphql/resolvers_spec.js
+++ b/spec/frontend/environments/graphql/resolvers/base_spec.js
@@ -1,5 +1,4 @@
import MockAdapter from 'axios-mock-adapter';
-import { CoreV1Api, AppsV1Api, BatchV1Api } from '@gitlab/cluster-client';
import { s__ } from '~/locale';
import axios from '~/lib/utils/axios_utils';
import { HTTP_STATUS_INTERNAL_SERVER_ERROR, HTTP_STATUS_OK } from '~/lib/utils/http_status';
@@ -12,17 +11,13 @@ import pollIntervalQuery from '~/environments/graphql/queries/poll_interval.quer
import isEnvironmentStoppingQuery from '~/environments/graphql/queries/is_environment_stopping.query.graphql';
import pageInfoQuery from '~/environments/graphql/queries/page_info.query.graphql';
import { TEST_HOST } from 'helpers/test_constants';
-import { CLUSTER_AGENT_ERROR_MESSAGES } from '~/environments/constants';
import {
environmentsApp,
resolvedEnvironmentsApp,
resolvedEnvironment,
folder,
resolvedFolder,
- k8sPodsMock,
- k8sServicesMock,
- k8sNamespacesMock,
-} from './mock_data';
+} from '../mock_data';
const ENDPOINT = `${TEST_HOST}/environments`;
@@ -32,14 +27,6 @@ describe('~/frontend/environments/graphql/resolvers', () => {
let mockApollo;
let localState;
- const configuration = {
- basePath: 'kas-proxy/',
- baseOptions: {
- headers: { 'GitLab-Agent-Id': '1' },
- },
- };
- const namespace = 'default';
-
beforeEach(() => {
mockResolvers = resolvers(ENDPOINT);
mock = new MockAdapter(axios);
@@ -156,215 +143,7 @@ describe('~/frontend/environments/graphql/resolvers', () => {
expect(environmentFolder).toEqual(resolvedFolder);
});
});
- describe('k8sPods', () => {
- const mockPodsListFn = jest.fn().mockImplementation(() => {
- return Promise.resolve({
- data: {
- items: k8sPodsMock,
- },
- });
- });
-
- const mockNamespacedPodsListFn = jest.fn().mockImplementation(mockPodsListFn);
- const mockAllPodsListFn = jest.fn().mockImplementation(mockPodsListFn);
-
- beforeEach(() => {
- jest
- .spyOn(CoreV1Api.prototype, 'listCoreV1NamespacedPod')
- .mockImplementation(mockNamespacedPodsListFn);
- jest
- .spyOn(CoreV1Api.prototype, 'listCoreV1PodForAllNamespaces')
- .mockImplementation(mockAllPodsListFn);
- });
-
- it('should request namespaced pods from the cluster_client library if namespace is specified', async () => {
- const pods = await mockResolvers.Query.k8sPods(null, { configuration, namespace });
-
- expect(mockNamespacedPodsListFn).toHaveBeenCalledWith(namespace);
- expect(mockAllPodsListFn).not.toHaveBeenCalled();
-
- expect(pods).toEqual(k8sPodsMock);
- });
- it('should request all pods from the cluster_client library if namespace is not specified', async () => {
- const pods = await mockResolvers.Query.k8sPods(null, { configuration, namespace: '' });
-
- expect(mockAllPodsListFn).toHaveBeenCalled();
- expect(mockNamespacedPodsListFn).not.toHaveBeenCalled();
-
- expect(pods).toEqual(k8sPodsMock);
- });
- it('should throw an error if the API call fails', async () => {
- jest
- .spyOn(CoreV1Api.prototype, 'listCoreV1PodForAllNamespaces')
- .mockRejectedValue(new Error('API error'));
-
- await expect(mockResolvers.Query.k8sPods(null, { configuration })).rejects.toThrow(
- 'API error',
- );
- });
- });
- describe('k8sServices', () => {
- const mockServicesListFn = jest.fn().mockImplementation(() => {
- return Promise.resolve({
- data: {
- items: k8sServicesMock,
- },
- });
- });
-
- beforeEach(() => {
- jest
- .spyOn(CoreV1Api.prototype, 'listCoreV1ServiceForAllNamespaces')
- .mockImplementation(mockServicesListFn);
- });
-
- it('should request services from the cluster_client library', async () => {
- const services = await mockResolvers.Query.k8sServices(null, { configuration });
-
- expect(mockServicesListFn).toHaveBeenCalled();
- expect(services).toEqual(k8sServicesMock);
- });
- it('should throw an error if the API call fails', async () => {
- jest
- .spyOn(CoreV1Api.prototype, 'listCoreV1ServiceForAllNamespaces')
- .mockRejectedValue(new Error('API error'));
-
- await expect(mockResolvers.Query.k8sServices(null, { configuration })).rejects.toThrow(
- 'API error',
- );
- });
- });
- describe('k8sWorkloads', () => {
- const emptyImplementation = jest.fn().mockImplementation(() => {
- return Promise.resolve({
- data: {
- items: [],
- },
- });
- });
-
- const [
- mockNamespacedDeployment,
- mockNamespacedDaemonSet,
- mockNamespacedStatefulSet,
- mockNamespacedReplicaSet,
- mockNamespacedJob,
- mockNamespacedCronJob,
- mockAllDeployment,
- mockAllDaemonSet,
- mockAllStatefulSet,
- mockAllReplicaSet,
- mockAllJob,
- mockAllCronJob,
- ] = Array(12).fill(emptyImplementation);
-
- const namespacedMocks = [
- { method: 'listAppsV1NamespacedDeployment', api: AppsV1Api, spy: mockNamespacedDeployment },
- { method: 'listAppsV1NamespacedDaemonSet', api: AppsV1Api, spy: mockNamespacedDaemonSet },
- { method: 'listAppsV1NamespacedStatefulSet', api: AppsV1Api, spy: mockNamespacedStatefulSet },
- { method: 'listAppsV1NamespacedReplicaSet', api: AppsV1Api, spy: mockNamespacedReplicaSet },
- { method: 'listBatchV1NamespacedJob', api: BatchV1Api, spy: mockNamespacedJob },
- { method: 'listBatchV1NamespacedCronJob', api: BatchV1Api, spy: mockNamespacedCronJob },
- ];
-
- const allMocks = [
- { method: 'listAppsV1DeploymentForAllNamespaces', api: AppsV1Api, spy: mockAllDeployment },
- { method: 'listAppsV1DaemonSetForAllNamespaces', api: AppsV1Api, spy: mockAllDaemonSet },
- { method: 'listAppsV1StatefulSetForAllNamespaces', api: AppsV1Api, spy: mockAllStatefulSet },
- { method: 'listAppsV1ReplicaSetForAllNamespaces', api: AppsV1Api, spy: mockAllReplicaSet },
- { method: 'listBatchV1JobForAllNamespaces', api: BatchV1Api, spy: mockAllJob },
- { method: 'listBatchV1CronJobForAllNamespaces', api: BatchV1Api, spy: mockAllCronJob },
- ];
-
- beforeEach(() => {
- [...namespacedMocks, ...allMocks].forEach((workloadMock) => {
- jest
- .spyOn(workloadMock.api.prototype, workloadMock.method)
- .mockImplementation(workloadMock.spy);
- });
- });
-
- it('should request namespaced workload types from the cluster_client library if namespace is specified', async () => {
- await mockResolvers.Query.k8sWorkloads(null, { configuration, namespace });
-
- namespacedMocks.forEach((workloadMock) => {
- expect(workloadMock.spy).toHaveBeenCalledWith(namespace);
- });
- });
-
- it('should request all workload types from the cluster_client library if namespace is not specified', async () => {
- await mockResolvers.Query.k8sWorkloads(null, { configuration, namespace: '' });
-
- allMocks.forEach((workloadMock) => {
- expect(workloadMock.spy).toHaveBeenCalled();
- });
- });
- it('should pass fulfilled calls data if one of the API calls fail', async () => {
- jest
- .spyOn(AppsV1Api.prototype, 'listAppsV1DeploymentForAllNamespaces')
- .mockRejectedValue(new Error('API error'));
-
- await expect(
- mockResolvers.Query.k8sWorkloads(null, { configuration }),
- ).resolves.toBeDefined();
- });
- it('should throw an error if all the API calls fail', async () => {
- [...allMocks].forEach((workloadMock) => {
- jest
- .spyOn(workloadMock.api.prototype, workloadMock.method)
- .mockRejectedValue(new Error('API error'));
- });
-
- await expect(mockResolvers.Query.k8sWorkloads(null, { configuration })).rejects.toThrow(
- 'API error',
- );
- });
- });
- describe('k8sNamespaces', () => {
- const mockNamespacesListFn = jest.fn().mockImplementation(() => {
- return Promise.resolve({
- data: {
- items: k8sNamespacesMock,
- },
- });
- });
-
- beforeEach(() => {
- jest
- .spyOn(CoreV1Api.prototype, 'listCoreV1Namespace')
- .mockImplementation(mockNamespacesListFn);
- });
-
- it('should request all namespaces from the cluster_client library', async () => {
- const namespaces = await mockResolvers.Query.k8sNamespaces(null, { configuration });
-
- expect(mockNamespacesListFn).toHaveBeenCalled();
-
- expect(namespaces).toEqual(k8sNamespacesMock);
- });
- it.each([
- ['Unauthorized', CLUSTER_AGENT_ERROR_MESSAGES.unauthorized],
- ['Forbidden', CLUSTER_AGENT_ERROR_MESSAGES.forbidden],
- ['Not found', CLUSTER_AGENT_ERROR_MESSAGES['not found']],
- ['Unknown', CLUSTER_AGENT_ERROR_MESSAGES.other],
- ])(
- 'should throw an error if the API call fails with the reason "%s"',
- async (reason, message) => {
- jest.spyOn(CoreV1Api.prototype, 'listCoreV1Namespace').mockRejectedValue({
- response: {
- data: {
- reason,
- },
- },
- });
-
- await expect(mockResolvers.Query.k8sNamespaces(null, { configuration })).rejects.toThrow(
- message,
- );
- },
- );
- });
describe('stopEnvironmentREST', () => {
it('should post to the stop environment path', async () => {
mock.onPost(ENDPOINT).reply(HTTP_STATUS_OK);
diff --git a/spec/frontend/environments/graphql/resolvers/flux_spec.js b/spec/frontend/environments/graphql/resolvers/flux_spec.js
new file mode 100644
index 00000000000..aa6f9e120f0
--- /dev/null
+++ b/spec/frontend/environments/graphql/resolvers/flux_spec.js
@@ -0,0 +1,140 @@
+import MockAdapter from 'axios-mock-adapter';
+import axios from '~/lib/utils/axios_utils';
+import { HTTP_STATUS_OK, HTTP_STATUS_UNAUTHORIZED } from '~/lib/utils/http_status';
+import { resolvers } from '~/environments/graphql/resolvers';
+import { fluxKustomizationsMock } from '../mock_data';
+
+describe('~/frontend/environments/graphql/resolvers', () => {
+ let mockResolvers;
+ let mock;
+
+ const configuration = {
+ basePath: 'kas-proxy/',
+ baseOptions: {
+ headers: { 'GitLab-Agent-Id': '1' },
+ },
+ };
+ const namespace = 'default';
+ const environmentName = 'my-environment';
+
+ beforeEach(() => {
+ mockResolvers = resolvers();
+ mock = new MockAdapter(axios);
+ });
+
+ afterEach(() => {
+ mock.reset();
+ });
+
+ describe('fluxKustomizationStatus', () => {
+ const endpoint = `${configuration.basePath}/apis/kustomize.toolkit.fluxcd.io/v1beta1/namespaces/${namespace}/kustomizations/${environmentName}`;
+ const fluxResourcePath =
+ 'kustomize.toolkit.fluxcd.io/v1beta1/namespaces/my-namespace/kustomizations/app';
+ const endpointWithFluxResourcePath = `${configuration.basePath}/apis/${fluxResourcePath}`;
+
+ it('should request Flux Kustomizations for the provided namespace via the Kubernetes API if the fluxResourcePath is not specified', async () => {
+ mock
+ .onGet(endpoint, { withCredentials: true, headers: configuration.baseOptions.headers })
+ .reply(HTTP_STATUS_OK, {
+ status: { conditions: fluxKustomizationsMock },
+ });
+
+ const fluxKustomizationStatus = await mockResolvers.Query.fluxKustomizationStatus(null, {
+ configuration,
+ namespace,
+ environmentName,
+ });
+
+ expect(fluxKustomizationStatus).toEqual(fluxKustomizationsMock);
+ });
+ it('should request Flux Kustomization for the provided fluxResourcePath via the Kubernetes API', async () => {
+ mock
+ .onGet(endpointWithFluxResourcePath, {
+ withCredentials: true,
+ headers: configuration.baseOptions.headers,
+ })
+ .reply(HTTP_STATUS_OK, {
+ status: { conditions: fluxKustomizationsMock },
+ });
+
+ const fluxKustomizationStatus = await mockResolvers.Query.fluxKustomizationStatus(null, {
+ configuration,
+ namespace,
+ environmentName,
+ fluxResourcePath,
+ });
+
+ expect(fluxKustomizationStatus).toEqual(fluxKustomizationsMock);
+ });
+ it('should throw an error if the API call fails', async () => {
+ const apiError = 'Invalid credentials';
+ mock
+ .onGet(endpoint, { withCredentials: true, headers: configuration.base })
+ .reply(HTTP_STATUS_UNAUTHORIZED, { message: apiError });
+
+ const fluxKustomizationsError = mockResolvers.Query.fluxKustomizationStatus(null, {
+ configuration,
+ namespace,
+ environmentName,
+ });
+
+ await expect(fluxKustomizationsError).rejects.toThrow(apiError);
+ });
+ });
+
+ describe('fluxHelmReleaseStatus', () => {
+ const endpoint = `${configuration.basePath}/apis/helm.toolkit.fluxcd.io/v2beta1/namespaces/${namespace}/helmreleases/${environmentName}`;
+ const fluxResourcePath =
+ 'helm.toolkit.fluxcd.io/v2beta1/namespaces/my-namespace/helmreleases/app';
+ const endpointWithFluxResourcePath = `${configuration.basePath}/apis/${fluxResourcePath}`;
+
+ it('should request Flux Helm Releases via the Kubernetes API', async () => {
+ mock
+ .onGet(endpoint, { withCredentials: true, headers: configuration.baseOptions.headers })
+ .reply(HTTP_STATUS_OK, {
+ status: { conditions: fluxKustomizationsMock },
+ });
+
+ const fluxHelmReleaseStatus = await mockResolvers.Query.fluxHelmReleaseStatus(null, {
+ configuration,
+ namespace,
+ environmentName,
+ });
+
+ expect(fluxHelmReleaseStatus).toEqual(fluxKustomizationsMock);
+ });
+ it('should request Flux HelmRelease for the provided fluxResourcePath via the Kubernetes API', async () => {
+ mock
+ .onGet(endpointWithFluxResourcePath, {
+ withCredentials: true,
+ headers: configuration.baseOptions.headers,
+ })
+ .reply(HTTP_STATUS_OK, {
+ status: { conditions: fluxKustomizationsMock },
+ });
+
+ const fluxHelmReleaseStatus = await mockResolvers.Query.fluxHelmReleaseStatus(null, {
+ configuration,
+ namespace,
+ environmentName,
+ fluxResourcePath,
+ });
+
+ expect(fluxHelmReleaseStatus).toEqual(fluxKustomizationsMock);
+ });
+ it('should throw an error if the API call fails', async () => {
+ const apiError = 'Invalid credentials';
+ mock
+ .onGet(endpoint, { withCredentials: true, headers: configuration.base })
+ .reply(HTTP_STATUS_UNAUTHORIZED, { message: apiError });
+
+ const fluxHelmReleasesError = mockResolvers.Query.fluxHelmReleaseStatus(null, {
+ configuration,
+ namespace,
+ environmentName,
+ });
+
+ await expect(fluxHelmReleasesError).rejects.toThrow(apiError);
+ });
+ });
+});
diff --git a/spec/frontend/environments/graphql/resolvers/kubernetes_spec.js b/spec/frontend/environments/graphql/resolvers/kubernetes_spec.js
new file mode 100644
index 00000000000..1d41fb11b14
--- /dev/null
+++ b/spec/frontend/environments/graphql/resolvers/kubernetes_spec.js
@@ -0,0 +1,238 @@
+import MockAdapter from 'axios-mock-adapter';
+import { CoreV1Api, AppsV1Api, BatchV1Api } from '@gitlab/cluster-client';
+import axios from '~/lib/utils/axios_utils';
+import { resolvers } from '~/environments/graphql/resolvers';
+import { CLUSTER_AGENT_ERROR_MESSAGES } from '~/environments/constants';
+import { k8sPodsMock, k8sServicesMock, k8sNamespacesMock } from '../mock_data';
+
+describe('~/frontend/environments/graphql/resolvers', () => {
+ let mockResolvers;
+ let mock;
+
+ const configuration = {
+ basePath: 'kas-proxy/',
+ baseOptions: {
+ headers: { 'GitLab-Agent-Id': '1' },
+ },
+ };
+ const namespace = 'default';
+
+ beforeEach(() => {
+ mockResolvers = resolvers();
+ mock = new MockAdapter(axios);
+ });
+
+ afterEach(() => {
+ mock.reset();
+ });
+
+ describe('k8sPods', () => {
+ const mockPodsListFn = jest.fn().mockImplementation(() => {
+ return Promise.resolve({
+ data: {
+ items: k8sPodsMock,
+ },
+ });
+ });
+
+ const mockNamespacedPodsListFn = jest.fn().mockImplementation(mockPodsListFn);
+ const mockAllPodsListFn = jest.fn().mockImplementation(mockPodsListFn);
+
+ beforeEach(() => {
+ jest
+ .spyOn(CoreV1Api.prototype, 'listCoreV1NamespacedPod')
+ .mockImplementation(mockNamespacedPodsListFn);
+ jest
+ .spyOn(CoreV1Api.prototype, 'listCoreV1PodForAllNamespaces')
+ .mockImplementation(mockAllPodsListFn);
+ });
+
+ it('should request namespaced pods from the cluster_client library if namespace is specified', async () => {
+ const pods = await mockResolvers.Query.k8sPods(null, { configuration, namespace });
+
+ expect(mockNamespacedPodsListFn).toHaveBeenCalledWith(namespace);
+ expect(mockAllPodsListFn).not.toHaveBeenCalled();
+
+ expect(pods).toEqual(k8sPodsMock);
+ });
+ it('should request all pods from the cluster_client library if namespace is not specified', async () => {
+ const pods = await mockResolvers.Query.k8sPods(null, { configuration, namespace: '' });
+
+ expect(mockAllPodsListFn).toHaveBeenCalled();
+ expect(mockNamespacedPodsListFn).not.toHaveBeenCalled();
+
+ expect(pods).toEqual(k8sPodsMock);
+ });
+ it('should throw an error if the API call fails', async () => {
+ jest
+ .spyOn(CoreV1Api.prototype, 'listCoreV1PodForAllNamespaces')
+ .mockRejectedValue(new Error('API error'));
+
+ await expect(mockResolvers.Query.k8sPods(null, { configuration })).rejects.toThrow(
+ 'API error',
+ );
+ });
+ });
+ describe('k8sServices', () => {
+ const mockServicesListFn = jest.fn().mockImplementation(() => {
+ return Promise.resolve({
+ data: {
+ items: k8sServicesMock,
+ },
+ });
+ });
+
+ beforeEach(() => {
+ jest
+ .spyOn(CoreV1Api.prototype, 'listCoreV1ServiceForAllNamespaces')
+ .mockImplementation(mockServicesListFn);
+ });
+
+ it('should request services from the cluster_client library', async () => {
+ const services = await mockResolvers.Query.k8sServices(null, { configuration });
+
+ expect(mockServicesListFn).toHaveBeenCalled();
+
+ expect(services).toEqual(k8sServicesMock);
+ });
+ it('should throw an error if the API call fails', async () => {
+ jest
+ .spyOn(CoreV1Api.prototype, 'listCoreV1ServiceForAllNamespaces')
+ .mockRejectedValue(new Error('API error'));
+
+ await expect(mockResolvers.Query.k8sServices(null, { configuration })).rejects.toThrow(
+ 'API error',
+ );
+ });
+ });
+ describe('k8sWorkloads', () => {
+ const emptyImplementation = jest.fn().mockImplementation(() => {
+ return Promise.resolve({
+ data: {
+ items: [],
+ },
+ });
+ });
+
+ const [
+ mockNamespacedDeployment,
+ mockNamespacedDaemonSet,
+ mockNamespacedStatefulSet,
+ mockNamespacedReplicaSet,
+ mockNamespacedJob,
+ mockNamespacedCronJob,
+ mockAllDeployment,
+ mockAllDaemonSet,
+ mockAllStatefulSet,
+ mockAllReplicaSet,
+ mockAllJob,
+ mockAllCronJob,
+ ] = Array(12).fill(emptyImplementation);
+
+ const namespacedMocks = [
+ { method: 'listAppsV1NamespacedDeployment', api: AppsV1Api, spy: mockNamespacedDeployment },
+ { method: 'listAppsV1NamespacedDaemonSet', api: AppsV1Api, spy: mockNamespacedDaemonSet },
+ { method: 'listAppsV1NamespacedStatefulSet', api: AppsV1Api, spy: mockNamespacedStatefulSet },
+ { method: 'listAppsV1NamespacedReplicaSet', api: AppsV1Api, spy: mockNamespacedReplicaSet },
+ { method: 'listBatchV1NamespacedJob', api: BatchV1Api, spy: mockNamespacedJob },
+ { method: 'listBatchV1NamespacedCronJob', api: BatchV1Api, spy: mockNamespacedCronJob },
+ ];
+
+ const allMocks = [
+ { method: 'listAppsV1DeploymentForAllNamespaces', api: AppsV1Api, spy: mockAllDeployment },
+ { method: 'listAppsV1DaemonSetForAllNamespaces', api: AppsV1Api, spy: mockAllDaemonSet },
+ { method: 'listAppsV1StatefulSetForAllNamespaces', api: AppsV1Api, spy: mockAllStatefulSet },
+ { method: 'listAppsV1ReplicaSetForAllNamespaces', api: AppsV1Api, spy: mockAllReplicaSet },
+ { method: 'listBatchV1JobForAllNamespaces', api: BatchV1Api, spy: mockAllJob },
+ { method: 'listBatchV1CronJobForAllNamespaces', api: BatchV1Api, spy: mockAllCronJob },
+ ];
+
+ beforeEach(() => {
+ [...namespacedMocks, ...allMocks].forEach((workloadMock) => {
+ jest
+ .spyOn(workloadMock.api.prototype, workloadMock.method)
+ .mockImplementation(workloadMock.spy);
+ });
+ });
+
+ it('should request namespaced workload types from the cluster_client library if namespace is specified', async () => {
+ await mockResolvers.Query.k8sWorkloads(null, { configuration, namespace });
+
+ namespacedMocks.forEach((workloadMock) => {
+ expect(workloadMock.spy).toHaveBeenCalledWith(namespace);
+ });
+ });
+
+ it('should request all workload types from the cluster_client library if namespace is not specified', async () => {
+ await mockResolvers.Query.k8sWorkloads(null, { configuration, namespace: '' });
+
+ allMocks.forEach((workloadMock) => {
+ expect(workloadMock.spy).toHaveBeenCalled();
+ });
+ });
+ it('should pass fulfilled calls data if one of the API calls fail', async () => {
+ jest
+ .spyOn(AppsV1Api.prototype, 'listAppsV1DeploymentForAllNamespaces')
+ .mockRejectedValue(new Error('API error'));
+
+ await expect(
+ mockResolvers.Query.k8sWorkloads(null, { configuration }),
+ ).resolves.toBeDefined();
+ });
+ it('should throw an error if all the API calls fail', async () => {
+ [...allMocks].forEach((workloadMock) => {
+ jest
+ .spyOn(workloadMock.api.prototype, workloadMock.method)
+ .mockRejectedValue(new Error('API error'));
+ });
+
+ await expect(mockResolvers.Query.k8sWorkloads(null, { configuration })).rejects.toThrow(
+ 'API error',
+ );
+ });
+ });
+ describe('k8sNamespaces', () => {
+ const mockNamespacesListFn = jest.fn().mockImplementation(() => {
+ return Promise.resolve({
+ data: {
+ items: k8sNamespacesMock,
+ },
+ });
+ });
+
+ beforeEach(() => {
+ jest
+ .spyOn(CoreV1Api.prototype, 'listCoreV1Namespace')
+ .mockImplementation(mockNamespacesListFn);
+ });
+
+ it('should request all namespaces from the cluster_client library', async () => {
+ const namespaces = await mockResolvers.Query.k8sNamespaces(null, { configuration });
+
+ expect(mockNamespacesListFn).toHaveBeenCalled();
+
+ expect(namespaces).toEqual(k8sNamespacesMock);
+ });
+ it.each([
+ ['Unauthorized', CLUSTER_AGENT_ERROR_MESSAGES.unauthorized],
+ ['Forbidden', CLUSTER_AGENT_ERROR_MESSAGES.forbidden],
+ ['Not found', CLUSTER_AGENT_ERROR_MESSAGES['not found']],
+ ['Unknown', CLUSTER_AGENT_ERROR_MESSAGES.other],
+ ])(
+ 'should throw an error if the API call fails with the reason "%s"',
+ async (reason, message) => {
+ jest.spyOn(CoreV1Api.prototype, 'listCoreV1Namespace').mockRejectedValue({
+ response: {
+ data: {
+ reason,
+ },
+ },
+ });
+
+ await expect(mockResolvers.Query.k8sNamespaces(null, { configuration })).rejects.toThrow(
+ message,
+ );
+ },
+ );
+ });
+});
diff --git a/spec/frontend/environments/kubernetes_overview_spec.js b/spec/frontend/environments/kubernetes_overview_spec.js
index 1c7ace00f48..aa7e2e9a3b7 100644
--- a/spec/frontend/environments/kubernetes_overview_spec.js
+++ b/spec/frontend/environments/kubernetes_overview_spec.js
@@ -6,12 +6,19 @@ import KubernetesAgentInfo from '~/environments/components/kubernetes_agent_info
import KubernetesPods from '~/environments/components/kubernetes_pods.vue';
import KubernetesTabs from '~/environments/components/kubernetes_tabs.vue';
import KubernetesStatusBar from '~/environments/components/kubernetes_status_bar.vue';
-import { agent, kubernetesNamespace } from './graphql/mock_data';
+import {
+ agent,
+ kubernetesNamespace,
+ resolvedEnvironment,
+ fluxResourcePathMock,
+} from './graphql/mock_data';
import { mockKasTunnelUrl } from './mock_data';
const propsData = {
clusterAgent: agent,
namespace: kubernetesNamespace,
+ environmentName: resolvedEnvironment.name,
+ fluxResourcePath: fluxResourcePathMock,
};
const provide = {
@@ -110,7 +117,13 @@ describe('~/environments/components/kubernetes_overview.vue', () => {
});
it('renders kubernetes status bar', () => {
- expect(findKubernetesStatusBar().exists()).toBe(true);
+ expect(findKubernetesStatusBar().props()).toEqual({
+ clusterHealthStatus: 'success',
+ configuration,
+ namespace: kubernetesNamespace,
+ environmentName: resolvedEnvironment.name,
+ fluxResourcePath: fluxResourcePathMock,
+ });
});
});
diff --git a/spec/frontend/environments/kubernetes_status_bar_spec.js b/spec/frontend/environments/kubernetes_status_bar_spec.js
index 2ebb30e2766..5dec7ca5aac 100644
--- a/spec/frontend/environments/kubernetes_status_bar_spec.js
+++ b/spec/frontend/environments/kubernetes_status_bar_spec.js
@@ -1,20 +1,67 @@
-import { shallowMount } from '@vue/test-utils';
-import { GlLoadingIcon, GlBadge } from '@gitlab/ui';
+import Vue from 'vue';
+import VueApollo from 'vue-apollo';
+import { GlLoadingIcon, GlPopover, GlSprintf } from '@gitlab/ui';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import KubernetesStatusBar from '~/environments/components/kubernetes_status_bar.vue';
import {
CLUSTER_STATUS_HEALTHY_TEXT,
CLUSTER_STATUS_UNHEALTHY_TEXT,
+ SYNC_STATUS_BADGES,
} from '~/environments/constants';
+import waitForPromises from 'helpers/wait_for_promises';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import { s__ } from '~/locale';
+import { mockKasTunnelUrl } from './mock_data';
+
+Vue.use(VueApollo);
+
+const configuration = {
+ basePath: mockKasTunnelUrl.replace(/\/$/, ''),
+ baseOptions: {
+ headers: { 'GitLab-Agent-Id': '1' },
+ withCredentials: true,
+ },
+};
+const environmentName = 'environment_name';
describe('~/environments/components/kubernetes_status_bar.vue', () => {
let wrapper;
const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
- const findHealthBadge = () => wrapper.findComponent(GlBadge);
+ const findHealthBadge = () => wrapper.findByTestId('health-badge');
+ const findSyncBadge = () => wrapper.findByTestId('sync-badge');
+ const findPopover = () => wrapper.findComponent(GlPopover);
+
+ const fluxKustomizationStatusQuery = jest.fn().mockReturnValue([]);
+ const fluxHelmReleaseStatusQuery = jest.fn().mockReturnValue([]);
+
+ const createApolloProvider = () => {
+ const mockResolvers = {
+ Query: {
+ fluxKustomizationStatus: fluxKustomizationStatusQuery,
+ fluxHelmReleaseStatus: fluxHelmReleaseStatusQuery,
+ },
+ };
+
+ return createMockApollo([], mockResolvers);
+ };
- const createWrapper = ({ clusterHealthStatus = '' } = {}) => {
- wrapper = shallowMount(KubernetesStatusBar, {
- propsData: { clusterHealthStatus },
+ const createWrapper = ({
+ apolloProvider = createApolloProvider(),
+ clusterHealthStatus = '',
+ namespace = '',
+ fluxResourcePath = '',
+ } = {}) => {
+ wrapper = shallowMountExtended(KubernetesStatusBar, {
+ propsData: {
+ clusterHealthStatus,
+ configuration,
+ environmentName,
+ namespace,
+ fluxResourcePath,
+ },
+ apolloProvider,
+ stubs: { GlSprintf },
});
};
@@ -39,4 +86,219 @@ describe('~/environments/components/kubernetes_status_bar.vue', () => {
},
);
});
+
+ describe('sync badge', () => {
+ describe('when no namespace is provided', () => {
+ beforeEach(() => {
+ createWrapper();
+ });
+
+ it("doesn't request Kustomizations and HelmReleases", () => {
+ expect(fluxKustomizationStatusQuery).not.toHaveBeenCalled();
+ expect(fluxHelmReleaseStatusQuery).not.toHaveBeenCalled();
+ });
+
+ it('renders sync status as Unavailable', () => {
+ expect(findSyncBadge().text()).toBe(s__('Deployment|Unavailable'));
+ });
+ });
+
+ describe('when flux resource path is provided', () => {
+ const namespace = 'my-namespace';
+ let fluxResourcePath;
+
+ describe('if the provided resource is a Kustomization', () => {
+ beforeEach(() => {
+ fluxResourcePath =
+ 'kustomize.toolkit.fluxcd.io/v1beta1/namespaces/my-namespace/kustomizations/app';
+
+ createWrapper({ namespace, fluxResourcePath });
+ });
+
+ it('requests the Kustomization resource status', () => {
+ expect(fluxKustomizationStatusQuery).toHaveBeenCalledWith(
+ {},
+ expect.objectContaining({
+ configuration,
+ namespace,
+ environmentName,
+ fluxResourcePath,
+ }),
+ expect.any(Object),
+ expect.any(Object),
+ );
+ });
+
+ it("doesn't request HelmRelease resource status", () => {
+ expect(fluxHelmReleaseStatusQuery).not.toHaveBeenCalled();
+ });
+ });
+
+ describe('if the provided resource is a helmRelease', () => {
+ beforeEach(() => {
+ fluxResourcePath =
+ 'helm.toolkit.fluxcd.io/v2beta1/namespaces/my-namespace/helmreleases/app';
+
+ createWrapper({ namespace, fluxResourcePath });
+ });
+
+ it('requests the HelmRelease resource status', () => {
+ expect(fluxHelmReleaseStatusQuery).toHaveBeenCalledWith(
+ {},
+ expect.objectContaining({
+ configuration,
+ namespace,
+ environmentName,
+ fluxResourcePath,
+ }),
+ expect.any(Object),
+ expect.any(Object),
+ );
+ });
+
+ it("doesn't request Kustomization resource status", () => {
+ expect(fluxKustomizationStatusQuery).not.toHaveBeenCalled();
+ });
+ });
+ });
+
+ describe('when namespace is provided', () => {
+ describe('with no Flux resources found', () => {
+ beforeEach(() => {
+ createWrapper({ namespace: 'my-namespace' });
+ });
+
+ it('requests Kustomizations', () => {
+ expect(fluxKustomizationStatusQuery).toHaveBeenCalled();
+ });
+
+ it('requests HelmReleases when there were no Kustomizations found', async () => {
+ await waitForPromises();
+
+ expect(fluxHelmReleaseStatusQuery).toHaveBeenCalled();
+ });
+
+ it('renders sync status as Unavailable when no Kustomizations and HelmReleases found', async () => {
+ await waitForPromises();
+
+ expect(findSyncBadge().text()).toBe(s__('Deployment|Unavailable'));
+ });
+ });
+
+ describe('with Flux Kustomizations available', () => {
+ const createApolloProviderWithKustomizations = ({
+ result = { status: 'True', type: 'Ready', message: '' },
+ } = {}) => {
+ const mockResolvers = {
+ Query: {
+ fluxKustomizationStatus: jest.fn().mockReturnValue([result]),
+ fluxHelmReleaseStatus: fluxHelmReleaseStatusQuery,
+ },
+ };
+
+ return createMockApollo([], mockResolvers);
+ };
+
+ it("doesn't request HelmReleases when the Kustomizations were found", async () => {
+ createWrapper({
+ apolloProvider: createApolloProviderWithKustomizations(),
+ namespace: 'my-namespace',
+ });
+ await waitForPromises();
+
+ expect(fluxHelmReleaseStatusQuery).not.toHaveBeenCalled();
+ });
+
+ it.each`
+ status | type | badgeType
+ ${'True'} | ${'Stalled'} | ${'stalled'}
+ ${'True'} | ${'Reconciling'} | ${'reconciling'}
+ ${'True'} | ${'Ready'} | ${'reconciled'}
+ ${'False'} | ${'Ready'} | ${'failed'}
+ ${'True'} | ${'Unknown'} | ${'unknown'}
+ `(
+ 'renders $badgeType when status is $status and type is $type',
+ async ({ status, type, badgeType }) => {
+ createWrapper({
+ apolloProvider: createApolloProviderWithKustomizations({
+ result: { status, type, message: '' },
+ }),
+ namespace: 'my-namespace',
+ });
+ await waitForPromises();
+
+ const badge = SYNC_STATUS_BADGES[badgeType];
+
+ expect(findSyncBadge().text()).toBe(badge.text);
+ expect(findSyncBadge().props()).toMatchObject({
+ icon: badge.icon,
+ variant: badge.variant,
+ });
+ },
+ );
+
+ it.each`
+ status | type | message | popoverTitle | popoverText
+ ${'True'} | ${'Stalled'} | ${'stalled reason'} | ${s__('Deployment|Flux sync stalled')} | ${'stalled reason'}
+ ${'True'} | ${'Reconciling'} | ${''} | ${undefined} | ${s__('Deployment|Flux sync reconciling')}
+ ${'True'} | ${'Ready'} | ${''} | ${undefined} | ${s__('Deployment|Flux sync reconciled successfully')}
+ ${'False'} | ${'Ready'} | ${'failed reason'} | ${s__('Deployment|Flux sync failed')} | ${'failed reason'}
+ ${'True'} | ${'Unknown'} | ${''} | ${s__('Deployment|Flux sync status is unknown')} | ${s__('Deployment|Unable to detect state. %{linkStart}How are states detected?%{linkEnd}')}
+ `(
+ 'renders correct popover text when status is $status and type is $type',
+ async ({ status, type, message, popoverTitle, popoverText }) => {
+ createWrapper({
+ apolloProvider: createApolloProviderWithKustomizations({
+ result: { status, type, message },
+ }),
+ namespace: 'my-namespace',
+ });
+ await waitForPromises();
+
+ expect(findPopover().text()).toMatchInterpolatedText(popoverText);
+ expect(findPopover().props('title')).toBe(popoverTitle);
+ },
+ );
+ });
+
+ describe('when Flux API errored', () => {
+ const error = new Error('Error from the cluster_client API');
+ const createApolloProviderWithErrors = () => {
+ const mockResolvers = {
+ Query: {
+ fluxKustomizationStatus: jest.fn().mockRejectedValueOnce(error),
+ fluxHelmReleaseStatus: jest.fn().mockRejectedValueOnce(error),
+ },
+ };
+
+ return createMockApollo([], mockResolvers);
+ };
+
+ beforeEach(async () => {
+ createWrapper({
+ apolloProvider: createApolloProviderWithErrors(),
+ namespace: 'my-namespace',
+ });
+ await waitForPromises();
+ });
+
+ it('renders sync badge as unavailable', () => {
+ const badge = SYNC_STATUS_BADGES.unavailable;
+
+ expect(findSyncBadge().text()).toBe(badge.text);
+ expect(findSyncBadge().props()).toMatchObject({
+ icon: badge.icon,
+ variant: badge.variant,
+ });
+ });
+
+ it('renders popover with an API error message', () => {
+ expect(findPopover().text()).toBe(error.message);
+ expect(findPopover().props('title')).toBe(
+ s__('Deployment|Flux sync status is unavailable'),
+ );
+ });
+ });
+ });
+ });
});
diff --git a/spec/frontend/environments/new_environment_item_spec.js b/spec/frontend/environments/new_environment_item_spec.js
index 387bc31c9aa..bfcc4f4ebb6 100644
--- a/spec/frontend/environments/new_environment_item_spec.js
+++ b/spec/frontend/environments/new_environment_item_spec.js
@@ -13,8 +13,13 @@ import Deployment from '~/environments/components/deployment.vue';
import DeployBoardWrapper from '~/environments/components/deploy_board_wrapper.vue';
import KubernetesOverview from '~/environments/components/kubernetes_overview.vue';
import getEnvironmentClusterAgent from '~/environments/graphql/queries/environment_cluster_agent.query.graphql';
-import getEnvironmentClusterAgentWithNamespace from '~/environments/graphql/queries/environment_cluster_agent_with_namespace.query.graphql';
-import { resolvedEnvironment, rolloutStatus, agent } from './graphql/mock_data';
+import getEnvironmentClusterAgentWithFluxResource from '~/environments/graphql/queries/environment_cluster_agent_with_flux_resource.query.graphql';
+import {
+ resolvedEnvironment,
+ rolloutStatus,
+ agent,
+ fluxResourcePathMock,
+} from './graphql/mock_data';
import { mockKasTunnelUrl } from './mock_data';
Vue.use(VueApollo);
@@ -22,7 +27,7 @@ Vue.use(VueApollo);
describe('~/environments/components/new_environment_item.vue', () => {
let wrapper;
let queryResponseHandler;
- let queryWithNamespaceResponseHandler;
+ let queryWithFluxResourceResponseHandler;
const projectPath = '/1';
@@ -33,26 +38,27 @@ describe('~/environments/components/new_environment_item.vue', () => {
id: '1',
environment: {
id: '1',
+ kubernetesNamespace: 'default',
clusterAgent,
},
},
},
};
queryResponseHandler = jest.fn().mockResolvedValue(response);
- queryWithNamespaceResponseHandler = jest.fn().mockResolvedValue({
+ queryWithFluxResourceResponseHandler = jest.fn().mockResolvedValue({
data: {
project: {
id: response.data.project.id,
environment: {
...response.data.project.environment,
- kubernetesNamespace: 'default',
+ fluxResourcePath: fluxResourcePathMock,
},
},
},
});
return createMockApollo([
[getEnvironmentClusterAgent, queryResponseHandler],
- [getEnvironmentClusterAgentWithNamespace, queryWithNamespaceResponseHandler],
+ [getEnvironmentClusterAgentWithFluxResource, queryWithFluxResourceResponseHandler],
]);
};
@@ -534,7 +540,7 @@ describe('~/environments/components/new_environment_item.vue', () => {
});
describe('kubernetes overview', () => {
- it('should request agent data when the environment is visible if the feature flag is enabled', async () => {
+ it('should request agent data when the environment is visible', async () => {
wrapper = createWrapper({
propsData: { environment: resolvedEnvironment },
apolloProvider: createApolloProvider(agent),
@@ -548,12 +554,12 @@ describe('~/environments/components/new_environment_item.vue', () => {
});
});
- it('should request agent data with kubernetes namespace when `kubernetesNamespaceForEnvironment` feature flag is enabled', async () => {
+ it('should request agent data with Flux resource when `fluxResourceForEnvironment` feature flag is enabled', async () => {
wrapper = createWrapper({
propsData: { environment: resolvedEnvironment },
provideData: {
glFeatures: {
- kubernetesNamespaceForEnvironment: true,
+ fluxResourceForEnvironment: true,
},
},
apolloProvider: createApolloProvider(agent),
@@ -561,7 +567,7 @@ describe('~/environments/components/new_environment_item.vue', () => {
await expandCollapsedSection();
- expect(queryWithNamespaceResponseHandler).toHaveBeenCalledWith({
+ expect(queryWithFluxResourceResponseHandler).toHaveBeenCalledWith({
environmentName: resolvedEnvironment.name,
projectFullPath: projectPath,
});
@@ -578,15 +584,16 @@ describe('~/environments/components/new_environment_item.vue', () => {
expect(findKubernetesOverview().props()).toMatchObject({
clusterAgent: agent,
+ environmentName: resolvedEnvironment.name,
});
});
- it('should render with the namespace if `kubernetesNamespaceForEnvironment` feature flag is enabled and the environment has an agent associated', async () => {
+ it('should render with the namespace if `fluxResourceForEnvironment` feature flag is enabled and the environment has an agent associated', async () => {
wrapper = createWrapper({
propsData: { environment: resolvedEnvironment },
provideData: {
glFeatures: {
- kubernetesNamespaceForEnvironment: true,
+ fluxResourceForEnvironment: true,
},
},
apolloProvider: createApolloProvider(agent),
@@ -595,9 +602,11 @@ describe('~/environments/components/new_environment_item.vue', () => {
await expandCollapsedSection();
await waitForPromises();
- expect(findKubernetesOverview().props()).toMatchObject({
+ expect(findKubernetesOverview().props()).toEqual({
clusterAgent: agent,
+ environmentName: resolvedEnvironment.name,
namespace: 'default',
+ fluxResourcePath: fluxResourcePathMock,
});
});
diff --git a/spec/frontend/error_tracking/components/error_details_spec.js b/spec/frontend/error_tracking/components/error_details_spec.js
index 6ef34504da7..d6cf12587b9 100644
--- a/spec/frontend/error_tracking/components/error_details_spec.js
+++ b/spec/frontend/error_tracking/components/error_details_spec.js
@@ -12,6 +12,7 @@ import {
} from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import { severityLevel, severityLevelVariant, errorStatus } from '~/error_tracking/constants';
import ErrorDetails from '~/error_tracking/components/error_details.vue';
diff --git a/spec/frontend/error_tracking/components/error_tracking_list_spec.js b/spec/frontend/error_tracking/components/error_tracking_list_spec.js
index 49f365e8c60..a9cd407f758 100644
--- a/spec/frontend/error_tracking/components/error_tracking_list_spec.js
+++ b/spec/frontend/error_tracking/components/error_tracking_list_spec.js
@@ -8,6 +8,7 @@ import {
} from '@gitlab/ui';
import { mount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import stubChildren from 'helpers/stub_children';
import ErrorTrackingActions from '~/error_tracking/components/error_tracking_actions.vue';
diff --git a/spec/frontend/error_tracking_settings/components/app_spec.js b/spec/frontend/error_tracking_settings/components/app_spec.js
index 9b7701d46bc..2e50e3aa90a 100644
--- a/spec/frontend/error_tracking_settings/components/app_spec.js
+++ b/spec/frontend/error_tracking_settings/components/app_spec.js
@@ -1,6 +1,7 @@
import { GlFormRadioGroup, GlFormRadio, GlFormInputGroup } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import ClipboardButton from '~/vue_shared/components/clipboard_button.vue';
import { TEST_HOST } from 'helpers/test_constants';
diff --git a/spec/frontend/error_tracking_settings/components/error_tracking_form_spec.js b/spec/frontend/error_tracking_settings/components/error_tracking_form_spec.js
index b1cf5d673f1..57c06eae2aa 100644
--- a/spec/frontend/error_tracking_settings/components/error_tracking_form_spec.js
+++ b/spec/frontend/error_tracking_settings/components/error_tracking_form_spec.js
@@ -1,6 +1,7 @@
import { GlFormInput, GlButton } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import Vue from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import ErrorTrackingForm from '~/error_tracking_settings/components/error_tracking_form.vue';
import createStore from '~/error_tracking_settings/store';
diff --git a/spec/frontend/error_tracking_settings/components/project_dropdown_spec.js b/spec/frontend/error_tracking_settings/components/project_dropdown_spec.js
index 03d090c5314..6449fc8643c 100644
--- a/spec/frontend/error_tracking_settings/components/project_dropdown_spec.js
+++ b/spec/frontend/error_tracking_settings/components/project_dropdown_spec.js
@@ -2,6 +2,7 @@ import { GlCollapsibleListbox } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
import { pick, clone } from 'lodash';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import ProjectDropdown from '~/error_tracking_settings/components/project_dropdown.vue';
import { defaultProps, projectList, staleProject } from '../mock';
diff --git a/spec/frontend/feature_flags/components/edit_feature_flag_spec.js b/spec/frontend/feature_flags/components/edit_feature_flag_spec.js
index b8d058e7bc5..813316ef437 100644
--- a/spec/frontend/feature_flags/components/edit_feature_flag_spec.js
+++ b/spec/frontend/feature_flags/components/edit_feature_flag_spec.js
@@ -2,6 +2,7 @@ import { GlToggle, GlAlert } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import MockAdapter from 'axios-mock-adapter';
import Vue, { nextTick } from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import { mockTracking } from 'helpers/tracking_helper';
import waitForPromises from 'helpers/wait_for_promises';
diff --git a/spec/frontend/feature_flags/components/feature_flags_spec.js b/spec/frontend/feature_flags/components/feature_flags_spec.js
index c0cfec384f0..de07e9b4588 100644
--- a/spec/frontend/feature_flags/components/feature_flags_spec.js
+++ b/spec/frontend/feature_flags/components/feature_flags_spec.js
@@ -1,6 +1,7 @@
import { GlAlert, GlEmptyState, GlLoadingIcon } from '@gitlab/ui';
import Vue from 'vue';
import MockAdapter from 'axios-mock-adapter';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import waitForPromises from 'helpers/wait_for_promises';
import { mountExtended } from 'helpers/vue_test_utils_helper';
@@ -30,7 +31,7 @@ describe('Feature flags', () => {
userListPath: '/user-list',
unleashApiUrl: `${TEST_HOST}/api/unleash`,
projectName: 'fakeProjectName',
- errorStateSvgPath: '/assets/illustrations/feature_flag.svg',
+ errorStateSvgPath: '/assets/illustrations/empty-state/empty-feature-flag-md.svg',
};
const mockState = {
diff --git a/spec/frontend/feature_flags/components/new_feature_flag_spec.js b/spec/frontend/feature_flags/components/new_feature_flag_spec.js
index c5418477661..7663110893b 100644
--- a/spec/frontend/feature_flags/components/new_feature_flag_spec.js
+++ b/spec/frontend/feature_flags/components/new_feature_flag_spec.js
@@ -1,6 +1,7 @@
import { GlAlert } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import { TEST_HOST } from 'spec/test_constants';
import Form from '~/feature_flags/components/form.vue';
diff --git a/spec/frontend/feature_flags/components/strategies/gitlab_user_list_spec.js b/spec/frontend/feature_flags/components/strategies/gitlab_user_list_spec.js
index 133796df3e4..45440d7568b 100644
--- a/spec/frontend/feature_flags/components/strategies/gitlab_user_list_spec.js
+++ b/spec/frontend/feature_flags/components/strategies/gitlab_user_list_spec.js
@@ -1,6 +1,7 @@
import { GlCollapsibleListbox, GlListboxItem } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import Api from '~/api';
import GitlabUserList from '~/feature_flags/components/strategies/gitlab_user_list.vue';
diff --git a/spec/frontend/feature_flags/components/strategy_spec.js b/spec/frontend/feature_flags/components/strategy_spec.js
index 1428d99aa76..ca6e338ac6c 100644
--- a/spec/frontend/feature_flags/components/strategy_spec.js
+++ b/spec/frontend/feature_flags/components/strategy_spec.js
@@ -2,6 +2,7 @@ import { GlAlert, GlFormSelect, GlLink, GlToken, GlButton } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
import { last } from 'lodash';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import Api from '~/api';
import NewEnvironmentsDropdown from '~/feature_flags/components/new_environments_dropdown.vue';
diff --git a/spec/frontend/fixtures/time_estimates.rb b/spec/frontend/fixtures/time_estimates.rb
new file mode 100644
index 00000000000..c9591bd63ac
--- /dev/null
+++ b/spec/frontend/fixtures/time_estimates.rb
@@ -0,0 +1,61 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Time estimates (GraphQL fixtures)', feature_category: :team_planning do
+ describe GraphQL::Query, type: :request do
+ include ApiHelpers
+ include GraphqlHelpers
+ include JavaScriptFixturesHelpers
+
+ let_it_be(:developer) { create(:user) }
+
+ context 'for issues time estimate' do
+ let_it_be(:project) { create(:project_empty_repo, :public) }
+ let_it_be(:issue) { create(:issue, project: project) }
+
+ let(:query_path) { 'sidebar/queries/issue_set_time_estimate.mutation.graphql' }
+ let(:query) { get_graphql_query_as_string(query_path) }
+
+ before_all do
+ project.add_developer(developer)
+ end
+
+ context 'when there are no errors while changing the time estimate' do
+ it "graphql/issue_set_time_estimate_without_errors.json" do
+ post_graphql(
+ query,
+ current_user: developer,
+ variables: {
+ input: {
+ projectPath: project.full_path,
+ iid: issue.iid.to_s,
+ timeEstimate: '1d 2h'
+ }
+ }
+ )
+
+ expect_graphql_errors_to_be_empty
+ end
+ end
+
+ context 'when there are errors while changing the time estimate' do
+ it "graphql/issue_set_time_estimate_with_errors.json" do
+ post_graphql(
+ query,
+ current_user: developer,
+ variables: {
+ input: {
+ projectPath: project.full_path,
+ iid: issue.iid.to_s,
+ timeEstimate: '1egh'
+ }
+ }
+ )
+
+ expect_graphql_errors_to_include("timeEstimate must be formatted correctly, for example `1h 30m`")
+ end
+ end
+ end
+ end
+end
diff --git a/spec/frontend/fixtures/users.rb b/spec/frontend/fixtures/users.rb
index 800a9af194e..4bdf9160083 100644
--- a/spec/frontend/fixtures/users.rb
+++ b/spec/frontend/fixtures/users.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe 'Users (JavaScript fixtures)', feature_category: :user_profile do
include JavaScriptFixturesHelpers
include ApiHelpers
+ include DesignManagementTestHelpers
let_it_be(:followers) { create_list(:user, 5) }
let_it_be(:followees) { create_list(:user, 5) }
@@ -28,16 +29,22 @@ RSpec.describe 'Users (JavaScript fixtures)', feature_category: :user_profile do
let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project_empty_repo, group: group) }
- include_context 'with user contribution events'
+ if Gitlab.ee?
+ include_context '[EE] with user contribution events'
+ else
+ include_context 'with user contribution events'
+ end
before do
+ enable_design_management
+ stub_licensed_features(epics: true)
group.add_owner(user)
project.add_maintainer(user)
sign_in(user)
end
it 'controller/users/activity.json' do
- get :activity, params: { username: user.username, limit: 50 }, format: :json
+ get :activity, params: { username: user.username, limit: 100 }, format: :json
expect(response).to be_successful
end
diff --git a/spec/frontend/forks/component/forks_button_spec.js b/spec/frontend/forks/component/forks_button_spec.js
new file mode 100644
index 00000000000..a743f015ef4
--- /dev/null
+++ b/spec/frontend/forks/component/forks_button_spec.js
@@ -0,0 +1,74 @@
+import { mountExtended } from 'helpers/vue_test_utils_helper';
+import ForksButton from '~/forks/components/forks_button.vue';
+
+describe('ForksButton', () => {
+ let wrapper;
+
+ const findForkButton = () => wrapper.findByTestId('fork-button');
+ const findForksCountButton = () => wrapper.findByTestId('forks-count');
+
+ const mountComponent = ({ injections } = {}) => {
+ wrapper = mountExtended(ForksButton, {
+ provide: {
+ forksCount: 10,
+ projectForksUrl: '/project/forks',
+ userForkUrl: '/user/fork',
+ newForkUrl: '/new/fork',
+ canReadCode: true,
+ canCreateFork: true,
+ canForkProject: true,
+ ...injections,
+ },
+ });
+ };
+
+ describe('forks count button', () => {
+ it('renders the correct number of forks', () => {
+ mountComponent();
+
+ expect(findForksCountButton().text()).toBe('10');
+ });
+
+ it('is disabled when the user cannot read code', () => {
+ mountComponent({ injections: { canReadCode: false } });
+
+ expect(findForksCountButton().props('disabled')).toBe(true);
+ });
+
+ it('is enabled when the user can read code and has the correct link', () => {
+ mountComponent();
+
+ expect(findForksCountButton().props('disabled')).toBe(false);
+ expect(findForksCountButton().attributes('href')).toBe('/project/forks');
+ });
+ });
+
+ describe('fork button', () => {
+ const userForkUrlPath = '/user/fork';
+ const newForkPath = '/new/fork';
+
+ const goToYourForkTitle = 'Go to your fork';
+ const createNewForkTitle = 'Create new fork';
+ const reachedLimitTitle = 'You have reached your project limit';
+ const noPermissionsTitle = "You don't have permission to fork this project";
+
+ it.each`
+ userForkUrl | canReadCode | canCreateFork | canForkProject | isDisabled | title | href
+ ${userForkUrlPath} | ${true} | ${true} | ${true} | ${false} | ${goToYourForkTitle} | ${userForkUrlPath}
+ ${userForkUrlPath} | ${false} | ${true} | ${true} | ${true} | ${createNewForkTitle} | ${userForkUrlPath}
+ ${null} | ${true} | ${true} | ${true} | ${false} | ${createNewForkTitle} | ${newForkPath}
+ ${null} | ${false} | ${true} | ${true} | ${true} | ${createNewForkTitle} | ${newForkPath}
+ ${null} | ${true} | ${false} | ${true} | ${true} | ${reachedLimitTitle} | ${newForkPath}
+ ${null} | ${true} | ${true} | ${false} | ${true} | ${noPermissionsTitle} | ${newForkPath}
+ `(
+ 'has the right enabled state, title, and link',
+ ({ userForkUrl, canReadCode, canCreateFork, canForkProject, isDisabled, title, href }) => {
+ mountComponent({ injections: { userForkUrl, canReadCode, canCreateFork, canForkProject } });
+
+ expect(findForkButton().props('disabled')).toBe(isDisabled);
+ expect(findForkButton().attributes('title')).toBe(title);
+ expect(findForkButton().attributes('href')).toBe(href);
+ },
+ );
+ });
+});
diff --git a/spec/frontend/frequent_items/components/app_spec.js b/spec/frontend/frequent_items/components/app_spec.js
index a8ae72eb4b3..122155a5d3f 100644
--- a/spec/frontend/frequent_items/components/app_spec.js
+++ b/spec/frontend/frequent_items/components/app_spec.js
@@ -1,6 +1,7 @@
import { GlButton, GlIcon } from '@gitlab/ui';
import MockAdapter from 'axios-mock-adapter';
import Vue, { nextTick } from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import { useLocalStorageSpy } from 'helpers/local_storage_helper';
import { mountExtended } from 'helpers/vue_test_utils_helper';
diff --git a/spec/frontend/frequent_items/components/frequent_items_list_item_spec.js b/spec/frontend/frequent_items/components/frequent_items_list_item_spec.js
index 7c8592fdf0c..55d20ad603c 100644
--- a/spec/frontend/frequent_items/components/frequent_items_list_item_spec.js
+++ b/spec/frontend/frequent_items/components/frequent_items_list_item_spec.js
@@ -1,5 +1,6 @@
import { GlIcon } from '@gitlab/ui';
import Vue, { nextTick } from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import { trimText } from 'helpers/text_helper';
diff --git a/spec/frontend/frequent_items/components/frequent_items_list_spec.js b/spec/frontend/frequent_items/components/frequent_items_list_spec.js
index dd6dd80af4f..8055b7a9c13 100644
--- a/spec/frontend/frequent_items/components/frequent_items_list_spec.js
+++ b/spec/frontend/frequent_items/components/frequent_items_list_spec.js
@@ -1,4 +1,5 @@
import Vue, { nextTick } from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import { mountExtended } from 'helpers/vue_test_utils_helper';
import frequentItemsListComponent from '~/frequent_items/components/frequent_items_list.vue';
diff --git a/spec/frontend/frequent_items/components/frequent_items_search_input_spec.js b/spec/frontend/frequent_items/components/frequent_items_search_input_spec.js
index dfce88ca0a8..d6aa0f4e221 100644
--- a/spec/frontend/frequent_items/components/frequent_items_search_input_spec.js
+++ b/spec/frontend/frequent_items/components/frequent_items_search_input_spec.js
@@ -1,6 +1,7 @@
import { GlSearchBoxByType } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import { mockTracking, unmockTracking } from 'helpers/tracking_helper';
import searchComponent from '~/frequent_items/components/frequent_items_search_input.vue';
diff --git a/spec/frontend/group_settings/components/shared_runners_form_spec.js b/spec/frontend/group_settings/components/shared_runners_form_spec.js
index 5daa21fd618..b39b9a62661 100644
--- a/spec/frontend/group_settings/components/shared_runners_form_spec.js
+++ b/spec/frontend/group_settings/components/shared_runners_form_spec.js
@@ -1,5 +1,6 @@
-import { GlAlert } from '@gitlab/ui';
+import { GlAlert, GlSprintf, GlLink } from '@gitlab/ui';
import { nextTick } from 'vue';
+import { s__, sprintf } from '~/locale';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
import { confirmAction } from '~/lib/utils/confirm_via_gl_modal/confirm_via_gl_modal';
@@ -17,6 +18,9 @@ const RUNNER_ENABLED_VALUE = 'enabled';
const RUNNER_DISABLED_VALUE = 'disabled_and_unoverridable';
const RUNNER_ALLOW_OVERRIDE_VALUE = 'disabled_and_overridable';
+const mockParentName = 'My group';
+const mockParentSettingsPath = '/groups/my-group/-/settings/ci_cd';
+
describe('group_settings/components/shared_runners_form', () => {
let wrapper;
@@ -27,20 +31,19 @@ describe('group_settings/components/shared_runners_form', () => {
groupName: GROUP_NAME,
groupIsEmpty: false,
sharedRunnersSetting: RUNNER_ENABLED_VALUE,
- parentSharedRunnersSetting: null,
+
runnerEnabledValue: RUNNER_ENABLED_VALUE,
runnerDisabledValue: RUNNER_DISABLED_VALUE,
runnerAllowOverrideValue: RUNNER_ALLOW_OVERRIDE_VALUE,
...provide,
},
+ stubs: {
+ GlSprintf,
+ },
});
};
- const findAlert = (variant) =>
- wrapper
- .findAllComponents(GlAlert)
- .filter((w) => w.props('variant') === variant)
- .at(0);
+ const findAlert = () => wrapper.findComponent(GlAlert);
const findSharedRunnersToggle = () => wrapper.findByTestId('shared-runners-toggle');
const findOverrideToggle = () => wrapper.findByTestId('override-runners-toggle');
const getSharedRunnersSetting = () => {
@@ -86,17 +89,37 @@ describe('group_settings/components/shared_runners_form', () => {
});
});
- describe('When parent group disabled shared runners', () => {
- it('toggles are disabled', () => {
+ describe.each`
+ provide | case | isParentLinkExpected
+ ${{ parentName: mockParentName, parentSettingsPath: mockParentSettingsPath }} | ${'can configure parent'} | ${true}
+ ${{}} | ${'cannot configure parent'} | ${false}
+ `('When parent group disabled shared runners and $case', ({ provide, isParentLinkExpected }) => {
+ beforeEach(() => {
createComponent({
sharedRunnersSetting: RUNNER_DISABLED_VALUE,
parentSharedRunnersSetting: RUNNER_DISABLED_VALUE,
+ ...provide,
});
-
- expect(findSharedRunnersToggle().props('disabled')).toBe(true);
- expect(findOverrideToggle().props('disabled')).toBe(true);
- expect(findAlert('warning').exists()).toBe(true);
});
+
+ it.each([findSharedRunnersToggle, findOverrideToggle])(
+ 'toggle %# is disabled',
+ (findToggle) => {
+ expect(findToggle().props('disabled')).toBe(true);
+ expect(findToggle().text()).toContain(s__('Runners|Shared runners are disabled.'));
+
+ if (isParentLinkExpected) {
+ expect(findToggle().text()).toContain(
+ sprintf(s__('Runners|Go to %{groupLink} to enable them.'), {
+ groupLink: mockParentName,
+ }),
+ );
+ const link = findToggle().findComponent(GlLink);
+ expect(link.text()).toBe(mockParentName);
+ expect(link.attributes('href')).toBe(mockParentSettingsPath);
+ }
+ },
+ );
});
describe('loading state', () => {
@@ -240,7 +263,7 @@ describe('group_settings/components/shared_runners_form', () => {
});
it('error should be shown', () => {
- expect(findAlert('danger').text()).toBe(message);
+ expect(findAlert().text()).toBe(message);
});
});
});
diff --git a/spec/frontend/groups/service/archived_projects_service_spec.js b/spec/frontend/groups/service/archived_projects_service_spec.js
index 3aec9d57ee1..6bc46e4799c 100644
--- a/spec/frontend/groups/service/archived_projects_service_spec.js
+++ b/spec/frontend/groups/service/archived_projects_service_spec.js
@@ -18,11 +18,9 @@ describe('ArchivedProjectsService', () => {
const query = 'git';
const sort = 'created_asc';
- beforeEach(() => {
+ it('returns promise the resolves with formatted project', async () => {
Api.groupProjects.mockResolvedValueOnce({ data: projects, headers });
- });
- it('returns promise the resolves with formatted project', async () => {
await expect(service.getGroups(undefined, page, query, sort)).resolves.toEqual({
data: projects.map((project) => {
return {
@@ -47,7 +45,7 @@ describe('ArchivedProjectsService', () => {
number_users_with_delimiter: 0,
star_count: project.star_count,
updated_at: project.updated_at,
- marked_for_deletion: project.marked_for_deletion_at !== null,
+ marked_for_deletion: false,
last_activity_at: project.last_activity_at,
};
}),
@@ -56,6 +54,7 @@ describe('ArchivedProjectsService', () => {
expect(Api.groupProjects).toHaveBeenCalledWith(groupId, query, {
archived: true,
+ include_subgroups: true,
page,
order_by: 'created_at',
sort: 'asc',
@@ -63,6 +62,35 @@ describe('ArchivedProjectsService', () => {
});
describe.each`
+ markedForDeletionAt | expected
+ ${null} | ${false}
+ ${undefined} | ${false}
+ ${'2023-07-21'} | ${true}
+ `(
+ 'when `marked_for_deletion_at` is $markedForDeletionAt',
+ ({ markedForDeletionAt, expected }) => {
+ it(`sets marked_for_deletion to ${expected}`, async () => {
+ Api.groupProjects.mockResolvedValueOnce({
+ data: projects.map((project) => ({
+ ...project,
+ marked_for_deletion_at: markedForDeletionAt,
+ })),
+ headers,
+ });
+
+ await expect(service.getGroups(undefined, page, query, sort)).resolves.toMatchObject({
+ data: projects.map(() => {
+ return {
+ marked_for_deletion: expected,
+ };
+ }),
+ headers,
+ });
+ });
+ },
+ );
+
+ describe.each`
sortArgument | expectedOrderByParameter | expectedSortParameter
${'name_asc'} | ${'name'} | ${'asc'}
${'name_desc'} | ${'name'} | ${'desc'}
@@ -75,10 +103,13 @@ describe('ArchivedProjectsService', () => {
'when the sort argument is $sortArgument',
({ sortArgument, expectedSortParameter, expectedOrderByParameter }) => {
it(`calls the API with sort parameter set to ${expectedSortParameter} and order_by parameter set to ${expectedOrderByParameter}`, () => {
+ Api.groupProjects.mockResolvedValueOnce({ data: projects, headers });
+
service.getGroups(undefined, page, query, sortArgument);
expect(Api.groupProjects).toHaveBeenCalledWith(groupId, query, {
archived: true,
+ include_subgroups: true,
page,
order_by: expectedOrderByParameter,
sort: expectedSortParameter,
diff --git a/spec/frontend/header_search/components/app_spec.js b/spec/frontend/header_search/components/app_spec.js
index ad56b2dde24..0d0b6628bdf 100644
--- a/spec/frontend/header_search/components/app_spec.js
+++ b/spec/frontend/header_search/components/app_spec.js
@@ -1,5 +1,6 @@
import { GlSearchBoxByType, GlToken, GlIcon } from '@gitlab/ui';
import Vue, { nextTick } from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import { mockTracking } from 'helpers/tracking_helper';
diff --git a/spec/frontend/header_search/components/header_search_autocomplete_items_spec.js b/spec/frontend/header_search/components/header_search_autocomplete_items_spec.js
index e77a9231b7a..868edb3e651 100644
--- a/spec/frontend/header_search/components/header_search_autocomplete_items_spec.js
+++ b/spec/frontend/header_search/components/header_search_autocomplete_items_spec.js
@@ -1,6 +1,7 @@
import { GlDropdownItem, GlLoadingIcon, GlAvatar, GlAlert, GlDropdownDivider } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import HeaderSearchAutocompleteItems from '~/header_search/components/header_search_autocomplete_items.vue';
import { LARGE_AVATAR_PX, SMALL_AVATAR_PX } from '~/header_search/constants';
diff --git a/spec/frontend/header_search/components/header_search_default_items_spec.js b/spec/frontend/header_search/components/header_search_default_items_spec.js
index 3768862d83e..acaad251bec 100644
--- a/spec/frontend/header_search/components/header_search_default_items_spec.js
+++ b/spec/frontend/header_search/components/header_search_default_items_spec.js
@@ -1,6 +1,7 @@
import { GlDropdownItem, GlDropdownSectionHeader } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import Vue from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import HeaderSearchDefaultItems from '~/header_search/components/header_search_default_items.vue';
import { MOCK_SEARCH_CONTEXT, MOCK_DEFAULT_SEARCH_OPTIONS } from '../mock_data';
diff --git a/spec/frontend/header_search/components/header_search_scoped_items_spec.js b/spec/frontend/header_search/components/header_search_scoped_items_spec.js
index 51d67198f04..78ea148caac 100644
--- a/spec/frontend/header_search/components/header_search_scoped_items_spec.js
+++ b/spec/frontend/header_search/components/header_search_scoped_items_spec.js
@@ -1,6 +1,7 @@
import { GlDropdownItem, GlToken, GlIcon } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import Vue from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import { trimText } from 'helpers/text_helper';
import HeaderSearchScopedItems from '~/header_search/components/header_search_scoped_items.vue';
diff --git a/spec/frontend/ide/components/branches/search_list_spec.js b/spec/frontend/ide/components/branches/search_list_spec.js
index eeab26f7559..a2c3044a291 100644
--- a/spec/frontend/ide/components/branches/search_list_spec.js
+++ b/spec/frontend/ide/components/branches/search_list_spec.js
@@ -1,6 +1,7 @@
import { GlLoadingIcon } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import Item from '~/ide/components/branches/item.vue';
import List from '~/ide/components/branches/search_list.vue';
diff --git a/spec/frontend/ide/components/commit_sidebar/editor_header_spec.js b/spec/frontend/ide/components/commit_sidebar/editor_header_spec.js
index ce43e648b43..2313c177bb6 100644
--- a/spec/frontend/ide/components/commit_sidebar/editor_header_spec.js
+++ b/spec/frontend/ide/components/commit_sidebar/editor_header_spec.js
@@ -1,6 +1,7 @@
import { GlModal } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import Vue from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import EditorHeader from '~/ide/components/commit_sidebar/editor_header.vue';
import { stubComponent } from 'helpers/stub_component';
diff --git a/spec/frontend/ide/components/commit_sidebar/new_merge_request_option_spec.js b/spec/frontend/ide/components/commit_sidebar/new_merge_request_option_spec.js
index ce26519abc9..c5e540c3ea7 100644
--- a/spec/frontend/ide/components/commit_sidebar/new_merge_request_option_spec.js
+++ b/spec/frontend/ide/components/commit_sidebar/new_merge_request_option_spec.js
@@ -1,4 +1,5 @@
import Vue from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import { GlFormCheckbox } from '@gitlab/ui';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
diff --git a/spec/frontend/ide/components/error_message_spec.js b/spec/frontend/ide/components/error_message_spec.js
index 5f6579654bc..0ffcce8e834 100644
--- a/spec/frontend/ide/components/error_message_spec.js
+++ b/spec/frontend/ide/components/error_message_spec.js
@@ -1,6 +1,7 @@
import { GlLoadingIcon } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import ErrorMessage from '~/ide/components/error_message.vue';
diff --git a/spec/frontend/ide/components/file_row_extra_spec.js b/spec/frontend/ide/components/file_row_extra_spec.js
index f5a6e7222f9..2ce4ea8a808 100644
--- a/spec/frontend/ide/components/file_row_extra_spec.js
+++ b/spec/frontend/ide/components/file_row_extra_spec.js
@@ -1,3 +1,4 @@
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import { mount } from '@vue/test-utils';
import FileRowExtra from '~/ide/components/file_row_extra.vue';
diff --git a/spec/frontend/ide/components/file_templates/dropdown_spec.js b/spec/frontend/ide/components/file_templates/dropdown_spec.js
index 72fdd05eb2c..9ccdaf8b916 100644
--- a/spec/frontend/ide/components/file_templates/dropdown_spec.js
+++ b/spec/frontend/ide/components/file_templates/dropdown_spec.js
@@ -2,6 +2,7 @@ import { GlLoadingIcon } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
import $ from 'jquery';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import Dropdown from '~/ide/components/file_templates/dropdown.vue';
diff --git a/spec/frontend/ide/components/ide_file_row_spec.js b/spec/frontend/ide/components/ide_file_row_spec.js
index 331877ff112..c9d4c23b475 100644
--- a/spec/frontend/ide/components/ide_file_row_spec.js
+++ b/spec/frontend/ide/components/ide_file_row_spec.js
@@ -1,5 +1,6 @@
import { mount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import FileRowExtra from '~/ide/components/file_row_extra.vue';
import IdeFileRow from '~/ide/components/ide_file_row.vue';
diff --git a/spec/frontend/ide/components/ide_review_spec.js b/spec/frontend/ide/components/ide_review_spec.js
index 7ae8cfac935..01d5ef763ad 100644
--- a/spec/frontend/ide/components/ide_review_spec.js
+++ b/spec/frontend/ide/components/ide_review_spec.js
@@ -1,5 +1,6 @@
import { mount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import { keepAlive } from 'helpers/keep_alive_component_helper';
import { trimText } from 'helpers/text_helper';
@@ -14,6 +15,7 @@ Vue.use(Vuex);
describe('IDE review mode', () => {
let wrapper;
let store;
+ let dispatch;
beforeEach(() => {
store = createStore();
@@ -25,27 +27,28 @@ describe('IDE review mode', () => {
loading: false,
});
+ dispatch = jest.spyOn(store, 'dispatch');
+
wrapper = mount(keepAlive(IdeReview), {
store,
});
});
+ const findEditorModeDropdown = () => wrapper.findComponent(EditorModeDropdown);
+
it('renders list of files', () => {
expect(wrapper.text()).toContain('fileName');
});
describe('activated', () => {
- let inititializeSpy;
-
beforeEach(async () => {
- inititializeSpy = jest.spyOn(wrapper.findComponent(IdeReview).vm, 'initialize');
store.state.viewer = 'editor';
await wrapper.vm.reactivate();
});
it('re initializes the component', () => {
- expect(inititializeSpy).toHaveBeenCalled();
+ expect(dispatch).toHaveBeenCalledWith('updateViewer', 'diff');
});
it('updates viewer to "diff" by default', () => {
@@ -81,7 +84,7 @@ describe('IDE review mode', () => {
});
it('renders edit dropdown', () => {
- expect(wrapper.findComponent(EditorModeDropdown).exists()).toBe(true);
+ expect(findEditorModeDropdown().exists()).toBe(true);
});
it('renders merge request link & IID', async () => {
diff --git a/spec/frontend/ide/components/ide_side_bar_spec.js b/spec/frontend/ide/components/ide_side_bar_spec.js
index c258c5312d8..fe92bd2c51e 100644
--- a/spec/frontend/ide/components/ide_side_bar_spec.js
+++ b/spec/frontend/ide/components/ide_side_bar_spec.js
@@ -1,6 +1,7 @@
import { GlSkeletonLoader } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import waitForPromises from 'helpers/wait_for_promises';
import IdeReview from '~/ide/components/ide_review.vue';
diff --git a/spec/frontend/ide/components/ide_spec.js b/spec/frontend/ide/components/ide_spec.js
index eb8f2a5e4ac..26bd6d468e7 100644
--- a/spec/frontend/ide/components/ide_spec.js
+++ b/spec/frontend/ide/components/ide_spec.js
@@ -1,5 +1,6 @@
import { shallowMount } from '@vue/test-utils';
import Vue from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import waitForPromises from 'helpers/wait_for_promises';
import { stubPerformanceWebAPI } from 'helpers/performance';
diff --git a/spec/frontend/ide/components/ide_status_list_spec.js b/spec/frontend/ide/components/ide_status_list_spec.js
index 344a1fbc4f6..e353a4de054 100644
--- a/spec/frontend/ide/components/ide_status_list_spec.js
+++ b/spec/frontend/ide/components/ide_status_list_spec.js
@@ -1,6 +1,7 @@
import { GlLink } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import Vue from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import IdeStatusList from '~/ide/components/ide_status_list.vue';
import TerminalSyncStatusSafe from '~/ide/components/terminal_sync/terminal_sync_status_safe.vue';
diff --git a/spec/frontend/ide/components/ide_tree_spec.js b/spec/frontend/ide/components/ide_tree_spec.js
index bcfa6809eca..1d6158d1e96 100644
--- a/spec/frontend/ide/components/ide_tree_spec.js
+++ b/spec/frontend/ide/components/ide_tree_spec.js
@@ -1,5 +1,6 @@
import { mount } from '@vue/test-utils';
import Vue from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import { viewerTypes } from '~/ide/constants';
import IdeTree from '~/ide/components/ide_tree.vue';
diff --git a/spec/frontend/ide/components/jobs/list_spec.js b/spec/frontend/ide/components/jobs/list_spec.js
index 0ece42bce51..c466ef8e96f 100644
--- a/spec/frontend/ide/components/jobs/list_spec.js
+++ b/spec/frontend/ide/components/jobs/list_spec.js
@@ -1,6 +1,7 @@
import { GlLoadingIcon } from '@gitlab/ui';
import { shallowMount, mount } from '@vue/test-utils';
import Vue from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import StageList from '~/ide/components/jobs/list.vue';
import Stage from '~/ide/components/jobs/stage.vue';
diff --git a/spec/frontend/ide/components/merge_requests/item_spec.js b/spec/frontend/ide/components/merge_requests/item_spec.js
index 2fbb6919b8b..4bfcdebb8f6 100644
--- a/spec/frontend/ide/components/merge_requests/item_spec.js
+++ b/spec/frontend/ide/components/merge_requests/item_spec.js
@@ -1,5 +1,6 @@
import { mount } from '@vue/test-utils';
import Vue from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import Item from '~/ide/components/merge_requests/item.vue';
import { createRouter } from '~/ide/ide_router';
diff --git a/spec/frontend/ide/components/merge_requests/list_spec.js b/spec/frontend/ide/components/merge_requests/list_spec.js
index 3b0e8c632fb..b123ea13966 100644
--- a/spec/frontend/ide/components/merge_requests/list_spec.js
+++ b/spec/frontend/ide/components/merge_requests/list_spec.js
@@ -1,6 +1,7 @@
import { GlLoadingIcon } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import Item from '~/ide/components/merge_requests/item.vue';
import List from '~/ide/components/merge_requests/list.vue';
diff --git a/spec/frontend/ide/components/new_dropdown/index_spec.js b/spec/frontend/ide/components/new_dropdown/index_spec.js
index a2371abe955..a26ebec6824 100644
--- a/spec/frontend/ide/components/new_dropdown/index_spec.js
+++ b/spec/frontend/ide/components/new_dropdown/index_spec.js
@@ -1,4 +1,5 @@
import Vue from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import { mountExtended } from 'helpers/vue_test_utils_helper';
import NewDropdown from '~/ide/components/new_dropdown/index.vue';
diff --git a/spec/frontend/ide/components/panes/collapsible_sidebar_spec.js b/spec/frontend/ide/components/panes/collapsible_sidebar_spec.js
index 42eb5b3fc7a..56e62829971 100644
--- a/spec/frontend/ide/components/panes/collapsible_sidebar_spec.js
+++ b/spec/frontend/ide/components/panes/collapsible_sidebar_spec.js
@@ -1,5 +1,6 @@
import { shallowMount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import IdeSidebarNav from '~/ide/components/ide_sidebar_nav.vue';
import CollapsibleSidebar from '~/ide/components/panes/collapsible_sidebar.vue';
diff --git a/spec/frontend/ide/components/panes/right_spec.js b/spec/frontend/ide/components/panes/right_spec.js
index 832983edf21..fc75eadbfe0 100644
--- a/spec/frontend/ide/components/panes/right_spec.js
+++ b/spec/frontend/ide/components/panes/right_spec.js
@@ -1,5 +1,6 @@
import { shallowMount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import CollapsibleSidebar from '~/ide/components/panes/collapsible_sidebar.vue';
import RightPane from '~/ide/components/panes/right.vue';
diff --git a/spec/frontend/ide/components/pipelines/list_spec.js b/spec/frontend/ide/components/pipelines/list_spec.js
index e913fa84d56..9c11ae9334b 100644
--- a/spec/frontend/ide/components/pipelines/list_spec.js
+++ b/spec/frontend/ide/components/pipelines/list_spec.js
@@ -1,6 +1,7 @@
import { GlLoadingIcon, GlTab } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import Vue from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import { pipelines } from 'jest/ide/mock_data';
import JobsList from '~/ide/components/jobs/list.vue';
diff --git a/spec/frontend/ide/components/repo_editor_spec.js b/spec/frontend/ide/components/repo_editor_spec.js
index aa99b1cacef..33fa5bc799f 100644
--- a/spec/frontend/ide/components/repo_editor_spec.js
+++ b/spec/frontend/ide/components/repo_editor_spec.js
@@ -2,6 +2,7 @@ import { GlTab } from '@gitlab/ui';
import MockAdapter from 'axios-mock-adapter';
import { editor as monacoEditor, Range } from 'monaco-editor';
import Vue, { nextTick } from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import { shallowMount } from '@vue/test-utils';
import waitForPromises from 'helpers/wait_for_promises';
diff --git a/spec/frontend/ide/components/repo_tab_spec.js b/spec/frontend/ide/components/repo_tab_spec.js
index d4f29b16a88..08e8062a45a 100644
--- a/spec/frontend/ide/components/repo_tab_spec.js
+++ b/spec/frontend/ide/components/repo_tab_spec.js
@@ -1,5 +1,6 @@
import { GlTab } from '@gitlab/ui';
import Vue from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import { stubComponent } from 'helpers/stub_component';
import RepoTab from '~/ide/components/repo_tab.vue';
diff --git a/spec/frontend/ide/components/repo_tabs_spec.js b/spec/frontend/ide/components/repo_tabs_spec.js
index 06ad162d398..9ced4babf1d 100644
--- a/spec/frontend/ide/components/repo_tabs_spec.js
+++ b/spec/frontend/ide/components/repo_tabs_spec.js
@@ -1,5 +1,6 @@
import { mount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import RepoTabs from '~/ide/components/repo_tabs.vue';
import { createStore } from '~/ide/stores';
diff --git a/spec/frontend/ide/components/resizable_panel_spec.js b/spec/frontend/ide/components/resizable_panel_spec.js
index 240e675a38e..0529ea1918d 100644
--- a/spec/frontend/ide/components/resizable_panel_spec.js
+++ b/spec/frontend/ide/components/resizable_panel_spec.js
@@ -1,5 +1,6 @@
import { shallowMount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import ResizablePanel from '~/ide/components/resizable_panel.vue';
import { SIDE_LEFT, SIDE_RIGHT } from '~/ide/constants';
diff --git a/spec/frontend/ide/components/terminal/session_spec.js b/spec/frontend/ide/components/terminal/session_spec.js
index 7e4a56b0610..c7d9663834d 100644
--- a/spec/frontend/ide/components/terminal/session_spec.js
+++ b/spec/frontend/ide/components/terminal/session_spec.js
@@ -1,6 +1,7 @@
import { GlButton } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import TerminalSession from '~/ide/components/terminal/session.vue';
import Terminal from '~/ide/components/terminal/terminal.vue';
diff --git a/spec/frontend/ide/components/terminal/terminal_spec.js b/spec/frontend/ide/components/terminal/terminal_spec.js
index 0500c116d23..cc11c1f2f32 100644
--- a/spec/frontend/ide/components/terminal/terminal_spec.js
+++ b/spec/frontend/ide/components/terminal/terminal_spec.js
@@ -1,6 +1,7 @@
import { GlLoadingIcon } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import Terminal from '~/ide/components/terminal/terminal.vue';
import TerminalControls from '~/ide/components/terminal/terminal_controls.vue';
diff --git a/spec/frontend/ide/components/terminal/view_spec.js b/spec/frontend/ide/components/terminal/view_spec.js
index b8ffaa89047..2db3a163e3d 100644
--- a/spec/frontend/ide/components/terminal/view_spec.js
+++ b/spec/frontend/ide/components/terminal/view_spec.js
@@ -1,5 +1,6 @@
import { shallowMount } from '@vue/test-utils';
import Vue from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import waitForPromises from 'helpers/wait_for_promises';
import { TEST_HOST } from 'spec/test_constants';
diff --git a/spec/frontend/ide/components/terminal_sync/terminal_sync_status_safe_spec.js b/spec/frontend/ide/components/terminal_sync/terminal_sync_status_safe_spec.js
index e420e28c7b6..c5ec64ba6b2 100644
--- a/spec/frontend/ide/components/terminal_sync/terminal_sync_status_safe_spec.js
+++ b/spec/frontend/ide/components/terminal_sync/terminal_sync_status_safe_spec.js
@@ -1,5 +1,6 @@
import { shallowMount } from '@vue/test-utils';
import Vue from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import TerminalSyncStatus from '~/ide/components/terminal_sync/terminal_sync_status.vue';
import TerminalSyncStatusSafe from '~/ide/components/terminal_sync/terminal_sync_status_safe.vue';
diff --git a/spec/frontend/ide/components/terminal_sync/terminal_sync_status_spec.js b/spec/frontend/ide/components/terminal_sync/terminal_sync_status_spec.js
index 4541c3b5ec8..a39ceefdd3e 100644
--- a/spec/frontend/ide/components/terminal_sync/terminal_sync_status_spec.js
+++ b/spec/frontend/ide/components/terminal_sync/terminal_sync_status_spec.js
@@ -1,6 +1,7 @@
import { GlLoadingIcon, GlIcon } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import Vue from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import TerminalSyncStatus from '~/ide/components/terminal_sync/terminal_sync_status.vue';
import {
diff --git a/spec/frontend/ide/stores/modules/editor/setup_spec.js b/spec/frontend/ide/stores/modules/editor/setup_spec.js
index 659bfb2742f..df0dfb6f260 100644
--- a/spec/frontend/ide/stores/modules/editor/setup_spec.js
+++ b/spec/frontend/ide/stores/modules/editor/setup_spec.js
@@ -1,4 +1,5 @@
import { cloneDeep } from 'lodash';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import eventHub from '~/ide/eventhub';
import { createStoreOptions } from '~/ide/stores';
diff --git a/spec/frontend/ide/stores/plugins/terminal_spec.js b/spec/frontend/ide/stores/plugins/terminal_spec.js
index 193300540fd..b2d5d85e005 100644
--- a/spec/frontend/ide/stores/plugins/terminal_spec.js
+++ b/spec/frontend/ide/stores/plugins/terminal_spec.js
@@ -1,4 +1,5 @@
import Vue from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import { TEST_HOST } from 'helpers/test_constants';
import terminalModule from '~/ide/stores/modules/terminal';
diff --git a/spec/frontend/ide/utils_spec.js b/spec/frontend/ide/utils_spec.js
index 4efc0ac6028..dd3c6862ea4 100644
--- a/spec/frontend/ide/utils_spec.js
+++ b/spec/frontend/ide/utils_spec.js
@@ -8,7 +8,6 @@ import {
trimTrailingWhitespace,
getPathParents,
getPathParent,
- readFileAsDataURL,
addNumericSuffix,
} from '~/ide/utils';
@@ -267,16 +266,6 @@ describe('WebIDE utils', () => {
});
});
- describe('readFileAsDataURL', () => {
- it('reads a file and returns its output as a data url', () => {
- const file = new File(['foo'], 'foo.png', { type: 'image/png' });
-
- return readFileAsDataURL(file).then((contents) => {
- expect(contents).toBe('data:image/png;base64,Zm9v');
- });
- });
- });
-
/*
* hello-2425 -> hello-2425
* hello.md -> hello-1.md
diff --git a/spec/frontend/import_entities/components/import_status_spec.js b/spec/frontend/import_entities/components/import_status_spec.js
index 103a3e4ddd1..37c58b9cdf8 100644
--- a/spec/frontend/import_entities/components/import_status_spec.js
+++ b/spec/frontend/import_entities/components/import_status_spec.js
@@ -168,18 +168,14 @@ describe('Import entities status component', () => {
};
describe.each`
- detailsPath | importDetailsPage | partialImport | expectLink
- ${undefined} | ${false} | ${false} | ${false}
- ${undefined} | ${false} | ${true} | ${false}
- ${undefined} | ${true} | ${false} | ${false}
- ${undefined} | ${true} | ${true} | ${false}
- ${mockDetailsPath} | ${false} | ${false} | ${false}
- ${mockDetailsPath} | ${false} | ${true} | ${false}
- ${mockDetailsPath} | ${true} | ${false} | ${false}
- ${mockDetailsPath} | ${true} | ${true} | ${true}
+ detailsPath | partialImport | expectLink
+ ${undefined} | ${false} | ${false}
+ ${undefined} | ${true} | ${false}
+ ${mockDetailsPath} | ${false} | ${false}
+ ${mockDetailsPath} | ${true} | ${true}
`(
- 'when detailsPath is $detailsPath, feature flag importDetailsPage is $importDetailsPage, partial import is $partialImport',
- ({ detailsPath, importDetailsPage, partialImport, expectLink }) => {
+ 'when detailsPath is $detailsPath, partial import is $partialImport',
+ ({ detailsPath, partialImport, expectLink }) => {
beforeEach(() => {
createComponent(
{
@@ -190,7 +186,6 @@ describe('Import entities status component', () => {
{
provide: {
detailsPath,
- glFeatures: { importDetailsPage },
},
},
);
diff --git a/spec/frontend/import_entities/components/import_target_dropdown_spec.js b/spec/frontend/import_entities/components/import_target_dropdown_spec.js
new file mode 100644
index 00000000000..c12baed2374
--- /dev/null
+++ b/spec/frontend/import_entities/components/import_target_dropdown_spec.js
@@ -0,0 +1,99 @@
+import { GlCollapsibleListbox } from '@gitlab/ui';
+import Vue from 'vue';
+import VueApollo from 'vue-apollo';
+import { shallowMount } from '@vue/test-utils';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+
+import ImportTargetDropdown from '~/import_entities/components/import_target_dropdown.vue';
+import { DEBOUNCE_DELAY } from '~/vue_shared/components/filtered_search_bar/constants';
+import searchNamespacesWhereUserCanImportProjectsQuery from '~/import_entities/import_projects/graphql/queries/search_namespaces_where_user_can_import_projects.query.graphql';
+
+import { mockAvailableNamespaces, mockNamespacesResponse, mockUserNamespace } from '../mock_data';
+
+Vue.use(VueApollo);
+
+describe('ImportTargetDropdown', () => {
+ let wrapper;
+
+ const defaultProps = {
+ selected: mockUserNamespace,
+ userNamespace: mockUserNamespace,
+ };
+
+ const createComponent = ({ props = {} } = {}) => {
+ const apolloProvider = createMockApollo([
+ [
+ searchNamespacesWhereUserCanImportProjectsQuery,
+ jest.fn().mockResolvedValue(mockNamespacesResponse),
+ ],
+ ]);
+
+ wrapper = shallowMount(ImportTargetDropdown, {
+ apolloProvider,
+ propsData: {
+ ...defaultProps,
+ ...props,
+ },
+ });
+ };
+
+ const findListbox = () => wrapper.findComponent(GlCollapsibleListbox);
+ const findListboxUsersItems = () => findListbox().props('items')[0].options;
+ const findListboxGroupsItems = () => findListbox().props('items')[1].options;
+
+ const waitForQuery = async () => {
+ jest.advanceTimersByTime(DEBOUNCE_DELAY);
+ await waitForPromises();
+ };
+
+ it('renders listbox', () => {
+ createComponent();
+
+ expect(findListbox().exists()).toBe(true);
+ });
+
+ it('truncates "toggle-text" when "selected" is too long', () => {
+ const mockSelected = 'a-group-path-that-is-longer-than-24-characters';
+
+ createComponent({
+ props: { selected: mockSelected },
+ });
+
+ expect(findListbox().props('toggleText')).toBe('a-group-path-that-is-lo…');
+ });
+
+ it('passes userNamespace as "Users" group item', () => {
+ createComponent();
+
+ expect(findListboxUsersItems()).toEqual([
+ { text: mockUserNamespace, value: mockUserNamespace },
+ ]);
+ });
+
+ it('passes namespaces from GraphQL as "Groups" group item', async () => {
+ createComponent();
+
+ await waitForQuery();
+
+ expect(findListboxGroupsItems()).toEqual(
+ mockAvailableNamespaces.map((namespace) => ({
+ text: namespace.fullPath,
+ value: namespace.fullPath,
+ })),
+ );
+ });
+
+ it('filters namespaces based on user input', async () => {
+ createComponent();
+
+ findListbox().vm.$emit('search', 'match');
+
+ await waitForQuery();
+
+ expect(findListboxGroupsItems()).toEqual([
+ { text: 'match1', value: 'match1' },
+ { text: 'match2', value: 'match2' },
+ ]);
+ });
+});
diff --git a/spec/frontend/import_entities/import_groups/components/import_actions_cell_spec.js b/spec/frontend/import_entities/import_groups/components/import_actions_cell_spec.js
index 4c13ec555c2..87bee6afd62 100644
--- a/spec/frontend/import_entities/import_groups/components/import_actions_cell_spec.js
+++ b/spec/frontend/import_entities/import_groups/components/import_actions_cell_spec.js
@@ -1,4 +1,10 @@
-import { GlDropdown, GlIcon, GlDropdownItem } from '@gitlab/ui';
+import {
+ GlDisclosureDropdown,
+ GlDisclosureDropdownItem,
+ GlButtonGroup,
+ GlButton,
+ GlIcon,
+} from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import ImportActionsCell from '~/import_entities/import_groups/components/import_actions_cell.vue';
@@ -13,6 +19,11 @@ describe('import actions cell', () => {
isInvalid: false,
...props,
},
+ stubs: {
+ GlButtonGroup,
+ GlDisclosureDropdown,
+ GlDisclosureDropdownItem,
+ },
});
};
@@ -22,9 +33,9 @@ describe('import actions cell', () => {
});
it('renders import dropdown', () => {
- const dropdown = wrapper.findComponent(GlDropdown);
- expect(dropdown.exists()).toBe(true);
- expect(dropdown.props('text')).toBe('Import with projects');
+ const button = wrapper.findComponent(GlButton);
+ expect(button.exists()).toBe(true);
+ expect(button.text()).toBe('Import with projects');
});
it('does not render icon with a hint', () => {
@@ -38,9 +49,9 @@ describe('import actions cell', () => {
});
it('renders re-import dropdown', () => {
- const dropdown = wrapper.findComponent(GlDropdown);
- expect(dropdown.exists()).toBe(true);
- expect(dropdown.props('text')).toBe('Re-import with projects');
+ const button = wrapper.findComponent(GlButton);
+ expect(button.exists()).toBe(true);
+ expect(button.text()).toBe('Re-import with projects');
});
it('renders icon with a hint', () => {
@@ -55,22 +66,22 @@ describe('import actions cell', () => {
it('does not render import dropdown when group is not available for import', () => {
createComponent({ isAvailableForImport: false });
- const dropdown = wrapper.findComponent(GlDropdown);
+ const dropdown = wrapper.findComponent(GlDisclosureDropdown);
expect(dropdown.exists()).toBe(false);
});
it('renders import dropdown as disabled when group is invalid', () => {
createComponent({ isInvalid: true, isAvailableForImport: true });
- const dropdown = wrapper.findComponent(GlDropdown);
+ const dropdown = wrapper.findComponent(GlDisclosureDropdown);
expect(dropdown.props().disabled).toBe(true);
});
it('emits import-group event when import button is clicked', () => {
createComponent({ isAvailableForImport: true });
- const dropdown = wrapper.findComponent(GlDropdown);
- dropdown.vm.$emit('click');
+ const button = wrapper.findComponent(GlButton);
+ button.vm.$emit('click');
expect(wrapper.emitted('import-group')).toHaveLength(1);
});
@@ -87,23 +98,24 @@ describe('import actions cell', () => {
});
it('render import dropdown', () => {
- const dropdown = wrapper.findComponent(GlDropdown);
- expect(dropdown.props('text')).toBe(`${expectedAction} with projects`);
- expect(dropdown.findComponent(GlDropdownItem).text()).toBe(
+ const button = wrapper.findComponent(GlButton);
+ const dropdown = wrapper.findComponent(GlDisclosureDropdown);
+ expect(button.element).toHaveText(`${expectedAction} with projects`);
+ expect(dropdown.findComponent(GlDisclosureDropdownItem).text()).toBe(
`${expectedAction} without projects`,
);
});
it('request migrate projects by default', () => {
- const dropdown = wrapper.findComponent(GlDropdown);
- dropdown.vm.$emit('click');
+ const button = wrapper.findComponent(GlButton);
+ button.vm.$emit('click');
expect(wrapper.emitted('import-group')[0]).toStrictEqual([{ migrateProjects: true }]);
});
it('request not to migrate projects via dropdown option', () => {
- const dropdown = wrapper.findComponent(GlDropdown);
- dropdown.findComponent(GlDropdownItem).vm.$emit('click');
+ const dropdown = wrapper.findComponent(GlDisclosureDropdown);
+ dropdown.findComponent(GlDisclosureDropdownItem).vm.$emit('action');
expect(wrapper.emitted('import-group')[0]).toStrictEqual([{ migrateProjects: false }]);
});
diff --git a/spec/frontend/import_entities/import_projects/components/github_status_table_spec.js b/spec/frontend/import_entities/import_projects/components/github_status_table_spec.js
index 7eebff7364c..0b9e7a8cb73 100644
--- a/spec/frontend/import_entities/import_projects/components/github_status_table_spec.js
+++ b/spec/frontend/import_entities/import_projects/components/github_status_table_spec.js
@@ -1,6 +1,7 @@
import { GlTabs, GlSearchBoxByClick } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import { stubComponent } from 'helpers/stub_component';
diff --git a/spec/frontend/import_entities/import_projects/components/import_projects_table_spec.js b/spec/frontend/import_entities/import_projects/components/import_projects_table_spec.js
index 351bbe5ea28..92d064846bd 100644
--- a/spec/frontend/import_entities/import_projects/components/import_projects_table_spec.js
+++ b/spec/frontend/import_entities/import_projects/components/import_projects_table_spec.js
@@ -1,6 +1,7 @@
import { GlLoadingIcon, GlButton, GlIntersectionObserver, GlSearchBoxByClick } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import { STATUSES } from '~/import_entities/constants';
import ImportProjectsTable from '~/import_entities/import_projects/components/import_projects_table.vue';
diff --git a/spec/frontend/import_entities/import_projects/components/provider_repo_table_row_spec.js b/spec/frontend/import_entities/import_projects/components/provider_repo_table_row_spec.js
index 57e232a4c46..b4890c09fe2 100644
--- a/spec/frontend/import_entities/import_projects/components/provider_repo_table_row_spec.js
+++ b/spec/frontend/import_entities/import_projects/components/provider_repo_table_row_spec.js
@@ -1,9 +1,11 @@
import { GlBadge, GlButton } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
-import { STATUSES } from '~/import_entities//constants';
-import ImportGroupDropdown from '~/import_entities/components/group_dropdown.vue';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+
+import { STATUSES } from '~/import_entities/constants';
+import ImportTargetDropdown from '~/import_entities/components/import_target_dropdown.vue';
import ImportStatus from '~/import_entities/components/import_status.vue';
import ProviderRepoTableRow from '~/import_entities/import_projects/components/provider_repo_table_row.vue';
@@ -39,8 +41,9 @@ describe('ProviderRepoTableRow', () => {
const findImportButton = () => findButton('Import');
const findReimportButton = () => findButton('Re-import');
- const findGroupDropdown = () => wrapper.findComponent(ImportGroupDropdown);
+ const findImportTargetDropdown = () => wrapper.findComponent(ImportTargetDropdown);
const findImportStatus = () => wrapper.findComponent(ImportStatus);
+ const findProviderLink = () => wrapper.findByTestId('providerLink');
const findCancelButton = () => {
const buttons = wrapper
@@ -55,7 +58,7 @@ describe('ProviderRepoTableRow', () => {
const store = initStore();
- wrapper = shallowMount(ProviderRepoTableRow, {
+ wrapper = shallowMountExtended(ProviderRepoTableRow, {
store,
propsData: { userNamespace, optionalStages: {}, ...props },
});
@@ -75,7 +78,7 @@ describe('ProviderRepoTableRow', () => {
});
it('renders project information', () => {
- const providerLink = wrapper.find('[data-testid=providerLink]');
+ const providerLink = findProviderLink();
expect(providerLink.attributes().href).toMatch(repo.importSource.providerLink);
expect(providerLink.text()).toMatch(repo.importSource.fullName);
@@ -86,7 +89,7 @@ describe('ProviderRepoTableRow', () => {
});
it('renders a group namespace select', () => {
- expect(wrapper.findComponent(ImportGroupDropdown).exists()).toBe(true);
+ expect(findImportTargetDropdown().exists()).toBe(true);
});
it('renders import button', () => {
@@ -106,7 +109,11 @@ describe('ProviderRepoTableRow', () => {
it('includes optionalStages to import', async () => {
const OPTIONAL_STAGES = { stage1: true, stage2: false };
- await wrapper.setProps({ optionalStages: OPTIONAL_STAGES });
+
+ mountComponent({
+ repo,
+ optionalStages: OPTIONAL_STAGES,
+ });
findImportButton().vm.$emit('click');
@@ -192,7 +199,7 @@ describe('ProviderRepoTableRow', () => {
});
it('renders project information', () => {
- const providerLink = wrapper.find('[data-testid=providerLink]');
+ const providerLink = findProviderLink();
expect(providerLink.attributes().href).toMatch(repo.importSource.providerLink);
expect(providerLink.text()).toMatch(repo.importSource.fullName);
@@ -203,7 +210,7 @@ describe('ProviderRepoTableRow', () => {
});
it('does not render a namespace select', () => {
- expect(findGroupDropdown().exists()).toBe(false);
+ expect(findImportTargetDropdown().exists()).toBe(false);
});
it('does not render import button', () => {
@@ -219,7 +226,7 @@ describe('ProviderRepoTableRow', () => {
await nextTick();
- expect(findGroupDropdown().exists()).toBe(true);
+ expect(findImportTargetDropdown().exists()).toBe(true);
});
it('imports repo when clicking re-import button', async () => {
@@ -282,7 +289,7 @@ describe('ProviderRepoTableRow', () => {
});
it('renders project information', () => {
- const providerLink = wrapper.find('[data-testid=providerLink]');
+ const providerLink = findProviderLink();
expect(providerLink.attributes().href).toMatch(repo.importSource.providerLink);
expect(providerLink.text()).toMatch(repo.importSource.fullName);
diff --git a/spec/frontend/import_entities/mock_data.js b/spec/frontend/import_entities/mock_data.js
new file mode 100644
index 00000000000..9208f99651f
--- /dev/null
+++ b/spec/frontend/import_entities/mock_data.js
@@ -0,0 +1,34 @@
+const mockGroupFactory = (fullPath) => ({
+ id: `gid://gitlab/Group/${fullPath}`,
+ fullPath,
+ name: fullPath,
+ visibility: 'public',
+ webUrl: `http://gdk.test:3000/groups/${fullPath}`,
+ __typename: 'Group',
+});
+
+export const mockAvailableNamespaces = [
+ mockGroupFactory('match1'),
+ mockGroupFactory('unrelated'),
+ mockGroupFactory('match2'),
+];
+
+export const mockNamespacesResponse = {
+ data: {
+ currentUser: {
+ id: 'gid://gitlab/User/1',
+ groups: {
+ nodes: mockAvailableNamespaces,
+ __typename: 'GroupConnection',
+ },
+ namespace: {
+ id: 'gid://gitlab/Namespaces::UserNamespace/1',
+ fullPath: 'root',
+ __typename: 'Namespace',
+ },
+ __typename: 'UserCore',
+ },
+ },
+};
+
+export const mockUserNamespace = 'user1';
diff --git a/spec/frontend/incidents_settings/components/__snapshots__/pagerduty_form_spec.js.snap b/spec/frontend/incidents_settings/components/__snapshots__/pagerduty_form_spec.js.snap
index fae93196d2c..b5f8f0023f9 100644
--- a/spec/frontend/incidents_settings/components/__snapshots__/pagerduty_form_spec.js.snap
+++ b/spec/frontend/incidents_settings/components/__snapshots__/pagerduty_form_spec.js.snap
@@ -38,7 +38,16 @@ exports[`Alert integration settings form should match the default snapshot 1`] =
predefinedoptions="[object Object]"
readonly=""
value="pagerduty.webhook.com"
- />
+ >
+ <clipboard-button-stub
+ category="secondary"
+ size="medium"
+ text="pagerduty.webhook.com"
+ title="Copy"
+ tooltipplacement="top"
+ variant="default"
+ />
+ </gl-form-input-group-stub>
<gl-button-stub
buttontextclasses=""
diff --git a/spec/frontend/integrations/edit/components/sections/google_play_spec.js b/spec/frontend/integrations/edit/components/sections/google_play_spec.js
index c0d6d17f639..a0fb41dad26 100644
--- a/spec/frontend/integrations/edit/components/sections/google_play_spec.js
+++ b/spec/frontend/integrations/edit/components/sections/google_play_spec.js
@@ -34,7 +34,7 @@ describe('IntegrationSectionGooglePlay', () => {
expect(field.exists()).toBe(true);
expect(field.props()).toMatchObject({
- label: 'Service account key (.json)',
+ label: 'Service account key (.JSON)',
helpText: '',
});
});
diff --git a/spec/frontend/integrations/index/components/integrations_list_spec.js b/spec/frontend/integrations/index/components/integrations_list_spec.js
index 155a3d1c6be..2d2a9b845a3 100644
--- a/spec/frontend/integrations/index/components/integrations_list_spec.js
+++ b/spec/frontend/integrations/index/components/integrations_list_spec.js
@@ -18,5 +18,6 @@ describe('IntegrationsList', () => {
expect(findActiveIntegrationsTable().props('integrations')).toEqual(mockActiveIntegrations);
expect(findInactiveIntegrationsTable().props('integrations')).toEqual(mockInactiveIntegrations);
+ expect(findInactiveIntegrationsTable().props('inactive')).toBe(true);
});
});
diff --git a/spec/frontend/integrations/index/components/integrations_table_spec.js b/spec/frontend/integrations/index/components/integrations_table_spec.js
index 54e5b45a5d8..330fa2ac860 100644
--- a/spec/frontend/integrations/index/components/integrations_table_spec.js
+++ b/spec/frontend/integrations/index/components/integrations_table_spec.js
@@ -62,4 +62,24 @@ describe('IntegrationsTable', () => {
});
},
);
+
+ describe('when no integrations are received', () => {
+ beforeEach(() => {
+ createComponent({ integrations: [] });
+ });
+
+ it('does not display fields in the table', () => {
+ expect(findTable().findAll('th')).toHaveLength(0);
+ });
+ });
+
+ describe.each([true, false])('when integrations inactive property is %p', (inactive) => {
+ beforeEach(() => {
+ createComponent({ integrations: [mockInactiveIntegrations], inactive });
+ });
+
+ it(`${inactive ? 'does not render' : 'render'} updated_at field`, () => {
+ expect(findTable().find('[aria-label="Updated At"]').exists()).toBe(!inactive);
+ });
+ });
});
diff --git a/spec/frontend/invite_members/components/group_select_spec.js b/spec/frontend/invite_members/components/group_select_spec.js
index bd90832f497..60501bfbd6a 100644
--- a/spec/frontend/invite_members/components/group_select_spec.js
+++ b/spec/frontend/invite_members/components/group_select_spec.js
@@ -1,6 +1,7 @@
import { nextTick } from 'vue';
import { GlAvatarLabeled, GlCollapsibleListbox } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
+import axios from 'axios';
import waitForPromises from 'helpers/wait_for_promises';
import { getGroups } from '~/api/groups_api';
import GroupSelect from '~/invite_members/components/group_select.vue';
@@ -49,16 +50,23 @@ describe('GroupSelect', () => {
await waitForPromises();
getGroups.mockClear();
getGroups.mockReturnValueOnce(new Promise(() => {}));
- findListbox().vm.$emit('search', group1.name);
+ findListbox().vm.$emit('search', group1.full_name);
await nextTick();
});
it('calls the API', () => {
- expect(getGroups).toHaveBeenCalledWith(group1.name, {
- exclude_internal: true,
- active: true,
- order_by: 'similarity',
- });
+ expect(getGroups).toHaveBeenCalledWith(
+ group1.full_name,
+ {
+ exclude_internal: true,
+ active: true,
+ order_by: 'similarity',
+ },
+ undefined,
+ {
+ signal: expect.any(AbortSignal),
+ },
+ );
});
it('displays loading icon while waiting for API call to resolve', () => {
@@ -182,7 +190,33 @@ describe('GroupSelect', () => {
expect(wrapper.emitted('error')).toEqual([[GroupSelect.i18n.errorMessage]]);
});
+
+ it('does not emit `error` event if error is from request cancellation', async () => {
+ createComponent();
+ await waitForPromises();
+
+ getGroups.mockRejectedValueOnce(new axios.Cancel());
+
+ findListbox().vm.$emit('bottom-reached');
+ await waitForPromises();
+
+ expect(wrapper.emitted('error')).toEqual(undefined);
+ });
});
});
});
+
+ describe('when multiple API calls are in-flight', () => {
+ it('aborts the first API call and resolves second API call', async () => {
+ const abortSpy = jest.spyOn(AbortController.prototype, 'abort');
+
+ createComponent();
+ await waitForPromises();
+
+ findListbox().vm.$emit('search', group1.full_name);
+
+ expect(abortSpy).toHaveBeenCalledTimes(1);
+ expect(wrapper.emitted('error')).toEqual(undefined);
+ });
+ });
});
diff --git a/spec/frontend/invite_members/components/import_project_members_modal_spec.js b/spec/frontend/invite_members/components/import_project_members_modal_spec.js
index 224ebe18e2a..4fac21f7a57 100644
--- a/spec/frontend/invite_members/components/import_project_members_modal_spec.js
+++ b/spec/frontend/invite_members/components/import_project_members_modal_spec.js
@@ -1,4 +1,4 @@
-import { GlFormGroup, GlSprintf, GlModal } from '@gitlab/ui';
+import { GlFormGroup, GlSprintf, GlModal, GlCollapse, GlIcon } from '@gitlab/ui';
import MockAdapter from 'axios-mock-adapter';
import { nextTick } from 'vue';
import { createWrapper } from '@vue/test-utils';
@@ -12,6 +12,7 @@ import eventHub from '~/invite_members/event_hub';
import ImportProjectMembersModal from '~/invite_members/components/import_project_members_modal.vue';
import ProjectSelect from '~/invite_members/components/project_select.vue';
import axios from '~/lib/utils/axios_utils';
+import { HTTP_STATUS_CREATED } from '~/lib/utils/http_status';
import {
displaySuccessfulInvitationAlert,
@@ -19,9 +20,14 @@ import {
} from '~/invite_members/utils/trigger_successful_invite_alert';
import {
+ EXPANDED_ERRORS,
IMPORT_PROJECT_MEMBERS_MODAL_TRACKING_CATEGORY,
IMPORT_PROJECT_MEMBERS_MODAL_TRACKING_LABEL,
} from '~/invite_members/constants';
+import {
+ IMPORT_PROJECT_MEMBERS_PATH,
+ importProjectMembersApiResponse,
+} from '../mock_data/api_responses';
jest.mock('~/invite_members/utils/trigger_successful_invite_alert');
@@ -92,6 +98,14 @@ describe('ImportProjectMembersModal', () => {
const formGroupInvalidFeedback = () => findFormGroup().props('invalidFeedback');
const formGroupErrorState = () => findFormGroup().props('state');
const findProjectSelect = () => wrapper.findComponent(ProjectSelect);
+ const findMemberErrorAlert = () => wrapper.findByTestId('alert-member-error');
+ const findMoreInviteErrorsButton = () => wrapper.findByTestId('accordion-button');
+ const findAccordion = () => wrapper.findComponent(GlCollapse);
+ const findErrorsIcon = () => wrapper.findComponent(GlIcon);
+ const findMemberErrorMessage = (element) =>
+ `@${Object.keys(importProjectMembersApiResponse.EXPANDED_IMPORT_ERRORS.message)[element]}: ${
+ Object.values(importProjectMembersApiResponse.EXPANDED_IMPORT_ERRORS.message)[element]
+ }`;
describe('rendering the modal', () => {
beforeEach(() => {
@@ -241,7 +255,7 @@ describe('ImportProjectMembersModal', () => {
});
});
- describe('when the import fails', () => {
+ describe('when the import fails due to generic api error', () => {
beforeEach(async () => {
createComponent();
@@ -276,5 +290,82 @@ describe('ImportProjectMembersModal', () => {
expect(formGroupErrorState()).not.toBe(false);
});
});
+
+ describe('when the import fails with member import errors', () => {
+ const mockInvitationsApi = (code, data) => {
+ mock.onPost(IMPORT_PROJECT_MEMBERS_PATH).reply(code, data);
+ };
+
+ beforeEach(() => {
+ createComponent();
+ findProjectSelect().vm.$emit('input', projectToBeImported);
+ });
+
+ it('displays the error alert', async () => {
+ mockInvitationsApi(
+ HTTP_STATUS_CREATED,
+ importProjectMembersApiResponse.NO_COLLAPSE_IMPORT_ERRORS,
+ );
+
+ clickImportButton();
+ await waitForPromises();
+
+ expect(findMemberErrorAlert().props('title')).toContain(
+ 'The following 2 out of 2 members could not be added',
+ );
+ expect(findMemberErrorAlert().text()).toContain(findMemberErrorMessage(0));
+ expect(findMemberErrorAlert().text()).toContain(findMemberErrorMessage(1));
+ });
+
+ it('displays collapse when there are more than 2 errors', async () => {
+ mockInvitationsApi(
+ HTTP_STATUS_CREATED,
+ importProjectMembersApiResponse.EXPANDED_IMPORT_ERRORS,
+ );
+
+ clickImportButton();
+ await waitForPromises();
+
+ expect(findAccordion().exists()).toBe(true);
+ expect(findMoreInviteErrorsButton().text()).toContain('Show more (2)');
+ });
+
+ it('toggles the collapse on click', async () => {
+ mockInvitationsApi(
+ HTTP_STATUS_CREATED,
+ importProjectMembersApiResponse.EXPANDED_IMPORT_ERRORS,
+ );
+
+ clickImportButton();
+ await waitForPromises();
+
+ expect(findMoreInviteErrorsButton().text()).toContain('Show more (2)');
+ expect(findErrorsIcon().attributes('class')).not.toContain('gl-rotate-180');
+ expect(findAccordion().attributes('visible')).toBeUndefined();
+
+ await findMoreInviteErrorsButton().vm.$emit('click');
+
+ expect(findMoreInviteErrorsButton().text()).toContain(EXPANDED_ERRORS);
+ expect(findErrorsIcon().attributes('class')).toContain('gl-rotate-180');
+ expect(findAccordion().attributes('visible')).toBeDefined();
+
+ await findMoreInviteErrorsButton().vm.$emit('click');
+
+ expect(findMoreInviteErrorsButton().text()).toContain('Show more (2)');
+ });
+
+ it("doesn't display collapse when there are 2 or less errors", async () => {
+ mockInvitationsApi(
+ HTTP_STATUS_CREATED,
+ importProjectMembersApiResponse.NO_COLLAPSE_IMPORT_ERRORS,
+ );
+
+ clickImportButton();
+ await waitForPromises();
+
+ expect(findAccordion().exists()).toBe(false);
+ expect(findMoreInviteErrorsButton().exists()).toBe(false);
+ });
+ });
});
});
diff --git a/spec/frontend/invite_members/components/user_limit_notification_spec.js b/spec/frontend/invite_members/components/user_limit_notification_spec.js
index 490b2e8bc7c..cb43ab506ac 100644
--- a/spec/frontend/invite_members/components/user_limit_notification_spec.js
+++ b/spec/frontend/invite_members/components/user_limit_notification_spec.js
@@ -1,14 +1,9 @@
import { GlAlert, GlSprintf } from '@gitlab/ui';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import UserLimitNotification from '~/invite_members/components/user_limit_notification.vue';
-import {
- NOTIFICATION_LIMIT_VARIANT,
- REACHED_LIMIT_VARIANT,
- CLOSE_TO_LIMIT_VARIANT,
-} from '~/invite_members/constants';
+import { REACHED_LIMIT_VARIANT, CLOSE_TO_LIMIT_VARIANT } from '~/invite_members/constants';
import { freeUsersLimit, remainingSeats } from '../mock_data/member_modal';
-const INFO_ALERT_TITLE = 'Your top-level group name is over the 5 user limit.';
const WARNING_ALERT_TITLE = 'You only have space for 2 more members in name';
describe('UserLimitNotification', () => {
@@ -36,17 +31,6 @@ describe('UserLimitNotification', () => {
});
};
- describe('when previewing free user cap', () => {
- it("renders user's preview limit notification", () => {
- createComponent(NOTIFICATION_LIMIT_VARIANT);
-
- const alert = findAlert();
-
- expect(alert.attributes('title')).toEqual(INFO_ALERT_TITLE);
- expect(alert.text()).toContain('GitLab will enforce this limit in the future.');
- });
- });
-
describe('when close to limit within a group', () => {
it("renders user's limit notification", () => {
createComponent(CLOSE_TO_LIMIT_VARIANT);
diff --git a/spec/frontend/invite_members/mock_data/api_responses.js b/spec/frontend/invite_members/mock_data/api_responses.js
index 6fe06decb6b..e3e2426fcfc 100644
--- a/spec/frontend/invite_members/mock_data/api_responses.js
+++ b/spec/frontend/invite_members/mock_data/api_responses.js
@@ -57,3 +57,27 @@ export const invitationsApiResponse = {
EMAIL_TAKEN,
EXPANDED_RESTRICTED,
};
+
+export const IMPORT_PROJECT_MEMBERS_PATH = '/api/v4/projects/1/import_project_members/2';
+const EXPANDED_IMPORT_ERRORS = {
+ message: {
+ bob_smith: 'Something is wrong for this member.',
+ john_smith: 'Something is wrong for this member.',
+ doug_logan: 'Something is wrong for this member.',
+ root: 'Something is wrong for this member.',
+ },
+ total_members_count: '4',
+ status: 'error',
+};
+const NO_COLLAPSE_IMPORT_ERRORS = {
+ message: {
+ bob_smith: 'Something is wrong for this member.',
+ john_smith: 'Something is wrong for this member.',
+ },
+ total_members_count: '2',
+ status: 'error',
+};
+export const importProjectMembersApiResponse = {
+ EXPANDED_IMPORT_ERRORS,
+ NO_COLLAPSE_IMPORT_ERRORS,
+};
diff --git a/spec/frontend/issuable/issuable_form_spec.js b/spec/frontend/issuable/issuable_form_spec.js
index b9652327e3d..d0e2e9adf4b 100644
--- a/spec/frontend/issuable/issuable_form_spec.js
+++ b/spec/frontend/issuable/issuable_form_spec.js
@@ -274,32 +274,28 @@ describe('IssuableForm', () => {
});
it.each`
- windowLocation | context | localStorageValue | editorType
- ${'/gitlab-org/gitlab/-/issues/412699'} | ${'Issue'} | ${'contentEditor'} | ${'editor_type_rich_text_editor'}
- ${'/gitlab-org/gitlab/-/merge_requests/125979/diffs'} | ${'MergeRequest'} | ${'contentEditor'} | ${'editor_type_rich_text_editor'}
- ${'/groups/gitlab-org/-/milestones/8/edit'} | ${'Other'} | ${'contentEditor'} | ${'editor_type_rich_text_editor'}
- ${'/gitlab-org/gitlab/-/issues/412699'} | ${'Issue'} | ${'markdownField'} | ${'editor_type_plain_text_editor'}
- ${'/gitlab-org/gitlab/-/merge_requests/125979/diffs'} | ${'MergeRequest'} | ${'markdownField'} | ${'editor_type_plain_text_editor'}
- ${'/groups/gitlab-org/-/milestones/8/edit'} | ${'Other'} | ${'markdownField'} | ${'editor_type_plain_text_editor'}
- `(
- 'tracks event on form submit',
- ({ windowLocation, context, localStorageValue, editorType }) => {
- setWindowLocation(`${TEST_HOST}/${windowLocation}`);
- localStorage.setItem('gl-markdown-editor-mode', localStorageValue);
-
- issueDescription.value = 'sample message';
+ windowLocation | property | localStorageValue | value
+ ${'/gitlab-org/gitlab/-/issues/412699'} | ${'Issue'} | ${'contentEditor'} | ${1}
+ ${'/gitlab-org/gitlab/-/merge_requests/125979/diffs'} | ${'MergeRequest'} | ${'contentEditor'} | ${1}
+ ${'/groups/gitlab-org/-/milestones/8/edit'} | ${'Other'} | ${'contentEditor'} | ${1}
+ ${'/gitlab-org/gitlab/-/issues/412699'} | ${'Issue'} | ${'markdownField'} | ${0}
+ ${'/gitlab-org/gitlab/-/merge_requests/125979/diffs'} | ${'MergeRequest'} | ${'markdownField'} | ${0}
+ ${'/groups/gitlab-org/-/milestones/8/edit'} | ${'Other'} | ${'markdownField'} | ${0}
+ `('tracks event on form submit', ({ windowLocation, property, localStorageValue, value }) => {
+ setWindowLocation(`${TEST_HOST}/${windowLocation}`);
+ localStorage.setItem('gl-markdown-editor-mode', localStorageValue);
- createIssuable($form);
+ issueDescription.value = 'sample message';
- $form.submit();
+ createIssuable($form);
- expect(trackingSpy).toHaveBeenCalledWith(undefined, 'editor_type_used', {
- context,
- editorType,
- label: 'editor_tracking',
- });
- },
- );
+ $form.submit();
+
+ expect(trackingSpy).toHaveBeenCalledWith(undefined, 'save_markdown', {
+ label: value ? 'rich_text_editor' : 'markdown_editor',
+ property,
+ });
+ });
it('prevents form submission when token is present', () => {
issueDescription.value = sensitiveMessage;
diff --git a/spec/frontend/issues/create_merge_request_dropdown_spec.js b/spec/frontend/issues/create_merge_request_dropdown_spec.js
index 21ae844e2dd..54dc07688df 100644
--- a/spec/frontend/issues/create_merge_request_dropdown_spec.js
+++ b/spec/frontend/issues/create_merge_request_dropdown_spec.js
@@ -51,31 +51,6 @@ describe('CreateMergeRequestDropdown', () => {
});
});
- describe('updateCreatePaths', () => {
- it('escapes branch names correctly', () => {
- dropdown.createBranchPath = `${TEST_HOST}/branches?branch_name=some-branch&issue=42`;
- dropdown.createMrPath = `${TEST_HOST}/create_merge_request?merge_request%5Bsource_branch%5D=test&merge_request%5Btarget_branch%5D=master&merge_request%5Bissue_iid%5D=42`;
-
- dropdown.updateCreatePaths('branch', 'contains#hash');
-
- expect(dropdown.createBranchPath).toBe(
- `${TEST_HOST}/branches?branch_name=contains%23hash&issue=42`,
- );
-
- expect(dropdown.createMrPath).toBe(
- `${TEST_HOST}/create_merge_request?merge_request%5Bsource_branch%5D=contains%23hash&merge_request%5Btarget_branch%5D=master&merge_request%5Bissue_iid%5D=42`,
- );
-
- expect(dropdown.wrapperEl.dataset.createBranchPath).toBe(
- `${TEST_HOST}/branches?branch_name=contains%23hash&issue=42`,
- );
-
- expect(dropdown.wrapperEl.dataset.createMrPath).toBe(
- `${TEST_HOST}/create_merge_request?merge_request%5Bsource_branch%5D=contains%23hash&merge_request%5Btarget_branch%5D=master&merge_request%5Bissue_iid%5D=42`,
- );
- });
- });
-
describe('enable', () => {
beforeEach(() => {
dropdown.createMergeRequestButton.classList.add('disabled');
diff --git a/spec/frontend/issues/dashboard/components/issues_dashboard_app_spec.js b/spec/frontend/issues/dashboard/components/issues_dashboard_app_spec.js
index 148c6230b9f..4686a4fe0c4 100644
--- a/spec/frontend/issues/dashboard/components/issues_dashboard_app_spec.js
+++ b/spec/frontend/issues/dashboard/components/issues_dashboard_app_spec.js
@@ -35,6 +35,8 @@ import {
TOKEN_TYPE_MY_REACTION,
TOKEN_TYPE_SEARCH_WITHIN,
TOKEN_TYPE_TYPE,
+ TOKEN_TYPE_CREATED,
+ TOKEN_TYPE_CLOSED,
} from '~/vue_shared/components/filtered_search_bar/constants';
import IssuableList from '~/vue_shared/issuable/list/components/issuable_list_root.vue';
import {
@@ -61,6 +63,7 @@ describe('IssuesDashboardApp component', () => {
emptyStateWithFilterSvgPath: 'empty/state/with/filter/svg/path.svg',
emptyStateWithoutFilterSvgPath: 'empty/state/with/filter/svg/path.svg',
hasBlockedIssuesFeature: true,
+ hasIssueDateFilterFeature: true,
hasIssuableHealthStatusFeature: true,
hasIssueWeightsFeature: true,
hasScopedLabelsFeature: true,
@@ -365,7 +368,9 @@ describe('IssuesDashboardApp component', () => {
expect(findIssuableList().props('searchTokens')).toMatchObject([
{ type: TOKEN_TYPE_ASSIGNEE, preloadedUsers },
{ type: TOKEN_TYPE_AUTHOR, preloadedUsers },
+ { type: TOKEN_TYPE_CLOSED },
{ type: TOKEN_TYPE_CONFIDENTIAL },
+ { type: TOKEN_TYPE_CREATED },
{ type: TOKEN_TYPE_LABEL },
{ type: TOKEN_TYPE_MILESTONE },
{ type: TOKEN_TYPE_MY_REACTION },
diff --git a/spec/frontend/issues/issue_spec.js b/spec/frontend/issues/issue_spec.js
index 3b8a09714a7..bf2ca42f71f 100644
--- a/spec/frontend/issues/issue_spec.js
+++ b/spec/frontend/issues/issue_spec.js
@@ -1,7 +1,6 @@
-import { getByText } from '@testing-library/dom';
+import MockAdapter from 'axios-mock-adapter';
import htmlOpenIssue from 'test_fixtures/issues/open-issue.html';
import htmlClosedIssue from 'test_fixtures/issues/closed-issue.html';
-import MockAdapter from 'axios-mock-adapter';
import { setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
import { EVENT_ISSUABLE_VUE_APP_CHANGE } from '~/issuable/constants';
import Issue from '~/issues/issue';
@@ -26,14 +25,6 @@ describe('Issue', () => {
});
const getIssueCounter = () => document.querySelector('.issue_counter');
- const getOpenStatusBox = () =>
- getByText(document, (_, el) => el.textContent.match(/Open/), {
- selector: '.issuable-status-badge-open',
- });
- const getClosedStatusBox = () =>
- getByText(document, (_, el) => el.textContent.match(/Closed/), {
- selector: '.issuable-status-badge-closed',
- });
describe.each`
desc | isIssueInitiallyOpen | expectedCounterText
@@ -48,9 +39,6 @@ describe('Issue', () => {
}
testContext.issueCounter = getIssueCounter();
- testContext.statusBoxClosed = getClosedStatusBox();
- testContext.statusBoxOpen = getOpenStatusBox();
-
testContext.issueCounter.textContent = '1,001';
});
@@ -58,16 +46,6 @@ describe('Issue', () => {
resetHTMLFixture();
});
- it(`has the proper visible status box when ${isIssueInitiallyOpen ? 'open' : 'closed'}`, () => {
- if (isIssueInitiallyOpen) {
- expect(testContext.statusBoxClosed).toHaveClass('hidden');
- expect(testContext.statusBoxOpen).not.toHaveClass('hidden');
- } else {
- expect(testContext.statusBoxClosed).not.toHaveClass('hidden');
- expect(testContext.statusBoxOpen).toHaveClass('hidden');
- }
- });
-
describe('when vue app triggers change', () => {
beforeEach(() => {
document.dispatchEvent(
@@ -80,16 +58,6 @@ describe('Issue', () => {
);
});
- it('displays correct status box', () => {
- if (isIssueInitiallyOpen) {
- expect(testContext.statusBoxClosed).not.toHaveClass('hidden');
- expect(testContext.statusBoxOpen).toHaveClass('hidden');
- } else {
- expect(testContext.statusBoxClosed).toHaveClass('hidden');
- expect(testContext.statusBoxOpen).not.toHaveClass('hidden');
- }
- });
-
it('updates issueCounter text', () => {
expect(testContext.issueCounter).toBeVisible();
expect(testContext.issueCounter).toHaveText(expectedCounterText);
diff --git a/spec/frontend/issues/list/components/issues_list_app_spec.js b/spec/frontend/issues/list/components/issues_list_app_spec.js
index 72bf4826056..de027a21c8f 100644
--- a/spec/frontend/issues/list/components/issues_list_app_spec.js
+++ b/spec/frontend/issues/list/components/issues_list_app_spec.js
@@ -1,4 +1,4 @@
-import { GlButton, GlDisclosureDropdown } from '@gitlab/ui';
+import { GlButton, GlDisclosureDropdown, GlDrawer } from '@gitlab/ui';
import * as Sentry from '@sentry/browser';
import { mount, shallowMount } from '@vue/test-utils';
import AxiosMockAdapter from 'axios-mock-adapter';
@@ -69,7 +69,20 @@ import {
TOKEN_TYPE_RELEASE,
TOKEN_TYPE_SEARCH_WITHIN,
TOKEN_TYPE_TYPE,
+ TOKEN_TYPE_CREATED,
+ TOKEN_TYPE_CLOSED,
} from '~/vue_shared/components/filtered_search_bar/constants';
+import WorkItemDetail from '~/work_items/components/work_item_detail.vue';
+import deleteWorkItemMutation from '~/work_items/graphql/delete_work_item.mutation.graphql';
+import {
+ workItemResponseFactory,
+ workItemByIidResponseFactory,
+ mockAwardEmojiThumbsUp,
+ mockAwardsWidget,
+ mockAssignees,
+ mockLabels,
+ mockMilestone,
+} from 'jest/work_items/mock_data';
import('~/issuable');
import('~/users_select');
@@ -98,6 +111,7 @@ describe('CE IssuesListApp component', () => {
hasAnyIssues: true,
hasAnyProjects: true,
hasBlockedIssuesFeature: true,
+ hasIssueDateFilterFeature: true,
hasIssuableHealthStatusFeature: true,
hasIssueWeightsFeature: true,
hasIterationsFeature: true,
@@ -130,6 +144,10 @@ describe('CE IssuesListApp component', () => {
const mockIssuesQueryResponse = jest.fn().mockResolvedValue(defaultQueryResponse);
const mockIssuesCountsQueryResponse = jest.fn().mockResolvedValue(getIssuesCountsQueryResponse);
+ const deleteWorkItemMutationHandler = jest
+ .fn()
+ .mockResolvedValue({ data: { workItemDelete: { errors: [] } } });
+
const findCsvImportExportButtons = () => wrapper.findComponent(CsvImportExportButtons);
const findDropdown = () => wrapper.findComponent(GlDisclosureDropdown);
const findIssuableByEmail = () => wrapper.findComponent(IssuableByEmail);
@@ -142,6 +160,8 @@ describe('CE IssuesListApp component', () => {
const findNewResourceDropdown = () => wrapper.findComponent(NewResourceDropdown);
const findCalendarButton = () => wrapper.findByTestId('subscribe-calendar');
const findRssButton = () => wrapper.findByTestId('subscribe-rss');
+ const findIssuableDrawer = () => wrapper.findComponent(GlDrawer);
+ const findDrawerWorkItem = () => wrapper.findComponent(WorkItemDetail);
const findLabelsToken = () =>
findIssuableList()
@@ -156,11 +176,13 @@ describe('CE IssuesListApp component', () => {
sortPreferenceMutationResponse = jest.fn().mockResolvedValue(setSortPreferenceMutationResponse),
stubs = {},
mountFn = shallowMount,
+ deleteMutationHandler = deleteWorkItemMutationHandler,
} = {}) => {
const requestHandlers = [
[getIssuesQuery, issuesQueryResponse],
[getIssuesCountsQuery, issuesCountsQueryResponse],
[setSortPreferenceMutation, sortPreferenceMutationResponse],
+ [deleteWorkItemMutation, deleteMutationHandler],
];
router = new VueRouter({ mode: 'history' });
@@ -634,8 +656,10 @@ describe('CE IssuesListApp component', () => {
expect(findIssuableList().props('searchTokens')).toMatchObject([
{ type: TOKEN_TYPE_ASSIGNEE, preloadedUsers },
{ type: TOKEN_TYPE_AUTHOR, preloadedUsers },
+ { type: TOKEN_TYPE_CLOSED },
{ type: TOKEN_TYPE_CONFIDENTIAL },
{ type: TOKEN_TYPE_CONTACT },
+ { type: TOKEN_TYPE_CREATED },
{ type: TOKEN_TYPE_LABEL },
{ type: TOKEN_TYPE_MILESTONE },
{ type: TOKEN_TYPE_MY_REACTION },
@@ -1017,4 +1041,225 @@ describe('CE IssuesListApp component', () => {
expect(findLabelsToken().fetchLatestLabels).toBe(null);
});
});
+
+ describe('when issue drawer is enabled', () => {
+ beforeEach(() => {
+ wrapper = mountComponent({
+ provide: {
+ glFeatures: {
+ issuesListDrawer: true,
+ },
+ },
+ stubs: {
+ GlDrawer,
+ },
+ });
+ });
+
+ it('renders issuable drawer component', () => {
+ expect(findIssuableDrawer().exists()).toBe(true);
+ });
+
+ it('renders issuable drawer closed by default', () => {
+ expect(findIssuableDrawer().props('open')).toBe(false);
+ });
+
+ describe('on selecting an issuable', () => {
+ beforeEach(() => {
+ findIssuableList().vm.$emit(
+ 'select-issuable',
+ getIssuesQueryResponse.data.project.issues.nodes[0],
+ );
+ return nextTick();
+ });
+
+ it('opens issuable drawer', () => {
+ expect(findIssuableDrawer().props('open')).toBe(true);
+ });
+
+ it('selects active issuable', () => {
+ expect(findIssuableList().props('activeIssuable')).toEqual(
+ getIssuesQueryResponse.data.project.issues.nodes[0],
+ );
+ });
+
+ describe('when closing the drawer', () => {
+ it('closes the drawer on drawer `close` event', async () => {
+ findIssuableDrawer().vm.$emit('close');
+ await nextTick();
+
+ expect(findIssuableDrawer().props('open')).toBe(false);
+ });
+
+ it('removes active issuable', async () => {
+ findIssuableDrawer().vm.$emit('close');
+ await nextTick();
+
+ expect(findIssuableList().props('activeIssuable')).toBe(null);
+ });
+ });
+
+ describe('when updating an issuable', () => {
+ it('refetches the list if the issuable changed state', async () => {
+ const {
+ data: { workItem },
+ } = workItemResponseFactory({ iid: '789', state: 'CLOSED' });
+ findDrawerWorkItem().vm.$emit('work-item-updated', workItem);
+
+ await waitForPromises();
+
+ expect(mockIssuesQueryResponse).toHaveBeenCalledTimes(2);
+ expect(mockIssuesCountsQueryResponse).toHaveBeenCalledTimes(2);
+ });
+
+ it('updates the assignees field of active issuable', async () => {
+ const {
+ data: { workItem },
+ } = workItemResponseFactory({ iid: '789' });
+ findDrawerWorkItem().vm.$emit('work-item-updated', workItem);
+
+ await waitForPromises();
+
+ expect(findIssuableList().props('issuables')[0].assignees.nodes).toEqual(
+ mockAssignees.map((assignee) => ({
+ ...assignee,
+ __persist: true,
+ })),
+ );
+ });
+
+ it('updates the labels field of active issuable', async () => {
+ const {
+ data: { workItem },
+ } = workItemResponseFactory({ iid: '789' });
+ findDrawerWorkItem().vm.$emit('work-item-updated', workItem);
+
+ await waitForPromises();
+
+ expect(findIssuableList().props('issuables')[0].labels.nodes).toEqual(
+ mockLabels.map((label) => ({
+ ...label,
+ __persist: true,
+ textColor: undefined,
+ })),
+ );
+ });
+
+ it('updates the upvotes count of active issuable', async () => {
+ const workItem = workItemByIidResponseFactory({
+ iid: '789',
+ awardEmoji: {
+ ...mockAwardsWidget,
+ nodes: [mockAwardEmojiThumbsUp],
+ },
+ }).data.workspace.workItems.nodes[0];
+
+ findDrawerWorkItem().vm.$emit('work-item-emoji-updated', workItem);
+
+ await waitForPromises();
+
+ expect(findIssuableList().props('issuables')[0].upvotes).toBe(1);
+ });
+
+ it('updates the milestone field of active issuable', async () => {
+ const {
+ data: { workItem },
+ } = workItemResponseFactory({ iid: '789' });
+ findDrawerWorkItem().vm.$emit('work-item-updated', workItem);
+
+ await waitForPromises();
+
+ expect(findIssuableList().props('issuables')[0].milestone).toEqual({
+ ...mockMilestone,
+ __persist: true,
+ expired: undefined,
+ state: undefined,
+ });
+ });
+
+ it('updates the title and confidential state of active issuable', async () => {
+ const {
+ data: { workItem },
+ } = workItemResponseFactory({ iid: '789', confidential: true });
+ findDrawerWorkItem().vm.$emit('work-item-updated', workItem);
+
+ await waitForPromises();
+
+ expect(findIssuableList().props('issuables')[0].title).toBe('Updated title');
+ expect(findIssuableList().props('issuables')[0].confidential).toBe(true);
+ });
+
+ it('refetches the list if new child was added to active issuable', async () => {
+ findDrawerWorkItem().vm.$emit('addChild');
+
+ await waitForPromises();
+
+ expect(mockIssuesQueryResponse).toHaveBeenCalledTimes(2);
+ expect(mockIssuesCountsQueryResponse).toHaveBeenCalledTimes(2);
+ });
+
+ it('updates issuable type to objective if promoted to objective', async () => {
+ findDrawerWorkItem().vm.$emit('promotedToObjective', '789');
+
+ await waitForPromises();
+ // required for cache updates
+ jest.runOnlyPendingTimers();
+ await nextTick();
+
+ expect(findIssuableList().props('issuables')[0].type).toBe('OBJECTIVE');
+ });
+ });
+
+ describe('when deleting an issuable from the drawer', () => {
+ beforeEach(async () => {
+ const {
+ data: { workItem },
+ } = workItemResponseFactory({ iid: '789' });
+ findDrawerWorkItem().vm.$emit('deleteWorkItem', workItem);
+
+ await waitForPromises();
+ });
+
+ it('should refetch issues and issues count', () => {
+ expect(mockIssuesQueryResponse).toHaveBeenCalledTimes(2);
+ expect(mockIssuesCountsQueryResponse).toHaveBeenCalledTimes(2);
+ });
+
+ it('should close the issue drawer', () => {
+ expect(findIssuableDrawer().props('open')).toBe(false);
+ });
+ });
+ });
+ });
+
+ it('shows an error when deleting from the drawer fails', async () => {
+ const errorHandler = jest.fn().mockRejectedValue('Houston, we have a problem');
+ const {
+ data: { workItem },
+ } = workItemResponseFactory({ iid: '789' });
+
+ wrapper = mountComponent({
+ provide: {
+ glFeatures: {
+ issuesListDrawer: true,
+ },
+ },
+ stubs: {
+ GlDrawer,
+ },
+ deleteMutationHandler: errorHandler,
+ });
+
+ findIssuableList().vm.$emit(
+ 'select-issuable',
+ getIssuesQueryResponse.data.project.issues.nodes[0],
+ );
+ await nextTick();
+
+ findDrawerWorkItem().vm.$emit('deleteWorkItem', workItem);
+ await waitForPromises();
+
+ expect(Sentry.captureException).toHaveBeenCalled();
+ expect(findIssuableList().props('error')).toBe('An error occurred while deleting an issuable.');
+ });
});
diff --git a/spec/frontend/issues/show/components/app_spec.js b/spec/frontend/issues/show/components/app_spec.js
index ecca3e69ef6..de183f94277 100644
--- a/spec/frontend/issues/show/components/app_spec.js
+++ b/spec/frontend/issues/show/components/app_spec.js
@@ -1,5 +1,6 @@
import { GlIcon, GlIntersectionObserver } from '@gitlab/ui';
import MockAdapter from 'axios-mock-adapter';
+import { nextTick } from 'vue';
import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
@@ -10,6 +11,7 @@ import {
STATUS_OPEN,
STATUS_REOPENED,
TYPE_EPIC,
+ TYPE_INCIDENT,
TYPE_ISSUE,
} from '~/issues/constants';
import IssuableApp from '~/issues/show/components/app.vue';
@@ -79,6 +81,13 @@ describe('Issuable output', () => {
return waitForPromises();
};
+ const createComponentAndScroll = async (props) => {
+ await createComponent({ props });
+ global.pageYOffset = 100;
+ wrapper.findComponent(GlIntersectionObserver).vm.$emit('disappear');
+ await nextTick();
+ };
+
const emitHubEvent = (event) => {
eventHub.$emit(event);
return waitForPromises();
@@ -320,57 +329,51 @@ describe('Issuable output', () => {
});
describe('sticky header', () => {
- beforeEach(async () => {
- await createComponent();
- });
-
describe('when title is in view', () => {
- it('is not shown', () => {
+ it('is not shown', async () => {
+ await createComponent();
wrapper.findComponent(GlIntersectionObserver).vm.$emit('disappear');
+
expect(findStickyHeader().exists()).toBe(false);
});
});
describe('when title is not in view', () => {
- beforeEach(() => {
- global.pageYOffset = 100;
- wrapper.findComponent(GlIntersectionObserver).vm.$emit('disappear');
- });
-
- it('shows with title', () => {
- expect(findStickyHeader().text()).toContain(initialRequest.title_text);
- });
-
- it('shows with title for an epic', async () => {
- await wrapper.setProps({ issuableType: 'epic' });
+ it.each([TYPE_INCIDENT, TYPE_ISSUE, TYPE_EPIC])(
+ 'shows with title when issuableType="%s"',
+ async (issuableType) => {
+ await createComponentAndScroll({ issuableType });
- expect(findStickyHeader().text()).toContain(' this is a title');
- });
+ expect(findStickyHeader().text()).toContain('this is a title');
+ },
+ );
it.each`
- issuableType | issuableStatus | statusIcon
- ${TYPE_ISSUE} | ${STATUS_OPEN} | ${'issues'}
- ${TYPE_ISSUE} | ${STATUS_CLOSED} | ${'issue-closed'}
- ${TYPE_EPIC} | ${STATUS_OPEN} | ${'epic'}
- ${TYPE_EPIC} | ${STATUS_CLOSED} | ${'epic-closed'}
+ issuableType | issuableStatus | statusIcon
+ ${TYPE_INCIDENT} | ${STATUS_OPEN} | ${'issues'}
+ ${TYPE_INCIDENT} | ${STATUS_CLOSED} | ${'issue-closed'}
+ ${TYPE_ISSUE} | ${STATUS_OPEN} | ${'issues'}
+ ${TYPE_ISSUE} | ${STATUS_CLOSED} | ${'issue-closed'}
+ ${TYPE_EPIC} | ${STATUS_OPEN} | ${'epic'}
+ ${TYPE_EPIC} | ${STATUS_CLOSED} | ${'epic-closed'}
`(
'shows with state icon "$statusIcon" for $issuableType when status is $issuableStatus',
async ({ issuableType, issuableStatus, statusIcon }) => {
- await wrapper.setProps({ issuableType, issuableStatus });
+ await createComponentAndScroll({ issuableType, issuableStatus });
expect(findStickyHeader().findComponent(GlIcon).props('name')).toBe(statusIcon);
},
);
it.each`
- title | state
+ title | issuableStatus
${'shows with Open when status is opened'} | ${STATUS_OPEN}
${'shows with Closed when status is closed'} | ${STATUS_CLOSED}
${'shows with Open when status is reopened'} | ${STATUS_REOPENED}
- `('$title', async ({ state }) => {
- await wrapper.setProps({ issuableStatus: state });
+ `('$title', async ({ issuableStatus }) => {
+ await createComponentAndScroll({ issuableStatus });
- expect(findStickyHeader().text()).toContain(issuableStatusText[state]);
+ expect(findStickyHeader().text()).toContain(issuableStatusText[issuableStatus]);
});
it.each`
@@ -378,10 +381,11 @@ describe('Issuable output', () => {
${'does not show confidential badge when issue is not confidential'} | ${false}
${'shows confidential badge when issue is confidential'} | ${true}
`('$title', async ({ isConfidential }) => {
- await wrapper.setProps({ isConfidential });
-
+ await createComponentAndScroll({ isConfidential });
const confidentialEl = findConfidentialBadge();
+
expect(confidentialEl.exists()).toBe(isConfidential);
+
if (isConfidential) {
expect(confidentialEl.props()).toMatchObject({
workspaceType: 'project',
@@ -395,8 +399,7 @@ describe('Issuable output', () => {
${'does not show locked badge when issue is not locked'} | ${false}
${'shows locked badge when issue is locked'} | ${true}
`('$title', async ({ isLocked }) => {
- await wrapper.setProps({ isLocked });
-
+ await createComponentAndScroll({ isLocked });
const lockedBadge = findLockedBadge();
expect(lockedBadge.exists()).toBe(isLocked);
@@ -414,8 +417,7 @@ describe('Issuable output', () => {
${'does not show hidden badge when issue is not hidden'} | ${false}
${'shows hidden badge when issue is hidden'} | ${true}
`('$title', async ({ isHidden }) => {
- await wrapper.setProps({ isHidden });
-
+ await createComponentAndScroll({ isHidden });
const hiddenBadge = findHiddenBadge();
expect(hiddenBadge.exists()).toBe(isHidden);
diff --git a/spec/frontend/issues/show/components/fields/description_spec.js b/spec/frontend/issues/show/components/fields/description_spec.js
index 5e329d44acb..83b927d3699 100644
--- a/spec/frontend/issues/show/components/fields/description_spec.js
+++ b/spec/frontend/issues/show/components/fields/description_spec.js
@@ -74,10 +74,9 @@ describe('Description field component', () => {
});
it('tracks event', () => {
- expect(trackingSpy).toHaveBeenCalledWith(undefined, 'editor_type_used', {
- context: 'Issue',
- editorType: 'editor_type_plain_text_editor',
- label: 'editor_tracking',
+ expect(trackingSpy).toHaveBeenCalledWith(undefined, 'save_markdown', {
+ label: 'markdown_editor',
+ property: 'Issue',
});
});
});
diff --git a/spec/frontend/issues/show/components/header_actions_spec.js b/spec/frontend/issues/show/components/header_actions_spec.js
index 8a98b2b702a..ce2161f4670 100644
--- a/spec/frontend/issues/show/components/header_actions_spec.js
+++ b/spec/frontend/issues/show/components/header_actions_spec.js
@@ -1,6 +1,7 @@
import Vue, { nextTick } from 'vue';
import { GlDropdown, GlDropdownItem, GlLink, GlModal, GlButton } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import VueApollo from 'vue-apollo';
import waitForPromises from 'helpers/wait_for_promises';
diff --git a/spec/frontend/issues/show/components/issue_header_spec.js b/spec/frontend/issues/show/components/issue_header_spec.js
new file mode 100644
index 00000000000..6acc7004576
--- /dev/null
+++ b/spec/frontend/issues/show/components/issue_header_spec.js
@@ -0,0 +1,129 @@
+import { GlLink, GlSprintf } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import { STATUS_CLOSED, STATUS_OPEN } from '~/issues/constants';
+import IssueHeader from '~/issues/show/components/issue_header.vue';
+import { __, s__ } from '~/locale';
+import IssuableHeader from '~/vue_shared/issuable/show/components/issuable_header.vue';
+
+describe('IssueHeader component', () => {
+ let wrapper;
+
+ const findGlLink = () => wrapper.findComponent(GlLink);
+ const findIssuableHeader = () => wrapper.findComponent(IssuableHeader);
+
+ const mountComponent = (props = {}) => {
+ wrapper = shallowMount(IssueHeader, {
+ propsData: {
+ author: { id: 48 },
+ confidential: false,
+ createdAt: '2020-01-23T12:34:56.789Z',
+ duplicatedToIssueUrl: '',
+ isFirstContribution: false,
+ isHidden: false,
+ isLocked: false,
+ issuableState: 'opened',
+ issuableType: 'issue',
+ movedToIssueUrl: '',
+ promotedToEpicUrl: '',
+ ...props,
+ },
+ stubs: {
+ GlSprintf,
+ },
+ });
+ };
+
+ it('renders IssuableHeader component', () => {
+ mountComponent();
+
+ expect(findIssuableHeader().props()).toMatchObject({
+ author: { id: 48 },
+ blocked: false,
+ confidential: false,
+ createdAt: '2020-01-23T12:34:56.789Z',
+ isFirstContribution: false,
+ isHidden: false,
+ issuableState: 'opened',
+ issuableType: 'issue',
+ serviceDeskReplyTo: '',
+ showWorkItemTypeIcon: true,
+ statusIcon: 'issues',
+ workspaceType: 'project',
+ });
+ });
+
+ describe('status badge slot', () => {
+ describe('when status is open', () => {
+ beforeEach(() => {
+ mountComponent({ issuableState: STATUS_OPEN });
+ });
+
+ it('renders Open text', () => {
+ expect(findIssuableHeader().text()).toBe(__('Open'));
+ });
+
+ it('renders correct icon', () => {
+ expect(findIssuableHeader().props('statusIcon')).toBe('issues');
+ });
+ });
+
+ describe('when status is closed', () => {
+ beforeEach(() => {
+ mountComponent({ issuableState: STATUS_CLOSED });
+ });
+
+ it('renders Closed text', () => {
+ expect(findIssuableHeader().text()).toBe(s__('IssuableStatus|Closed'));
+ });
+
+ it('renders correct icon', () => {
+ expect(findIssuableHeader().props('statusIcon')).toBe('issue-closed');
+ });
+
+ describe('when issue is marked as duplicate', () => {
+ beforeEach(() => {
+ mountComponent({
+ issuableState: STATUS_CLOSED,
+ duplicatedToIssueUrl: 'project/-/issue/5',
+ });
+ });
+
+ it('renders `Closed (duplicated)`', () => {
+ expect(findIssuableHeader().text()).toMatchInterpolatedText('Closed (duplicated)');
+ });
+
+ it('links to the duplicated issue', () => {
+ expect(findGlLink().attributes('href')).toBe('project/-/issue/5');
+ });
+ });
+
+ describe('when issue is marked as moved', () => {
+ beforeEach(() => {
+ mountComponent({ issuableState: STATUS_CLOSED, movedToIssueUrl: 'project/-/issue/6' });
+ });
+
+ it('renders `Closed (moved)`', () => {
+ expect(findIssuableHeader().text()).toMatchInterpolatedText('Closed (moved)');
+ });
+
+ it('links to the moved issue', () => {
+ expect(findGlLink().attributes('href')).toBe('project/-/issue/6');
+ });
+ });
+
+ describe('when issue is marked as promoted', () => {
+ beforeEach(() => {
+ mountComponent({ issuableState: STATUS_CLOSED, promotedToEpicUrl: 'group/-/epic/7' });
+ });
+
+ it('renders `Closed (promoted)`', () => {
+ expect(findIssuableHeader().text()).toMatchInterpolatedText('Closed (promoted)');
+ });
+
+ it('links to the promoted epic', () => {
+ expect(findGlLink().attributes('href')).toBe('group/-/epic/7');
+ });
+ });
+ });
+ });
+});
diff --git a/spec/frontend/issues/show/components/sentry_error_stack_trace_spec.js b/spec/frontend/issues/show/components/sentry_error_stack_trace_spec.js
index 02b20b9e7b7..6fa84432032 100644
--- a/spec/frontend/issues/show/components/sentry_error_stack_trace_spec.js
+++ b/spec/frontend/issues/show/components/sentry_error_stack_trace_spec.js
@@ -1,6 +1,7 @@
import Vue from 'vue';
import { GlLoadingIcon } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import Stacktrace from '~/error_tracking/components/stacktrace.vue';
import SentryErrorStackTrace from '~/issues/show/components/sentry_error_stack_trace.vue';
diff --git a/spec/frontend/jira_connect/subscriptions/api_spec.js b/spec/frontend/jira_connect/subscriptions/api_spec.js
index 36e2c7bbab2..0ba50cbcff1 100644
--- a/spec/frontend/jira_connect/subscriptions/api_spec.js
+++ b/spec/frontend/jira_connect/subscriptions/api_spec.js
@@ -55,11 +55,13 @@ describe('JiraConnect API', () => {
describe('fetchGroups', () => {
const mockGroupsPath = 'groupsPath';
+ const mockMinAccessLevel = 30;
const mockPage = 1;
const mockPerPage = 10;
const makeRequest = () =>
fetchGroups(mockGroupsPath, {
+ minAccessLevel: mockMinAccessLevel,
page: mockPage,
perPage: mockPerPage,
});
@@ -68,6 +70,7 @@ describe('JiraConnect API', () => {
jest.spyOn(axiosInstance, 'get');
axiosMock
.onGet(mockGroupsPath, {
+ min_access_level: mockMinAccessLevel,
page: mockPage,
per_page: mockPerPage,
})
@@ -78,6 +81,7 @@ describe('JiraConnect API', () => {
expect(axiosInstance.get).toHaveBeenCalledWith(mockGroupsPath, {
headers: {},
params: {
+ min_access_level: mockMinAccessLevel,
page: mockPage,
per_page: mockPerPage,
search: undefined,
diff --git a/spec/frontend/jira_connect/subscriptions/components/add_namespace_modal/groups_list_spec.js b/spec/frontend/jira_connect/subscriptions/components/add_namespace_modal/groups_list_spec.js
index 845ada187ef..bf85e2b5cd1 100644
--- a/spec/frontend/jira_connect/subscriptions/components/add_namespace_modal/groups_list_spec.js
+++ b/spec/frontend/jira_connect/subscriptions/components/add_namespace_modal/groups_list_spec.js
@@ -3,10 +3,12 @@ import { shallowMount } from '@vue/test-utils';
import { nextTick } from 'vue';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
+
import { fetchGroups } from '~/jira_connect/subscriptions/api';
import GroupsList from '~/jira_connect/subscriptions/components/add_namespace_modal/groups_list.vue';
import GroupsListItem from '~/jira_connect/subscriptions/components/add_namespace_modal/groups_list_item.vue';
import { DEFAULT_GROUPS_PER_PAGE } from '~/jira_connect/subscriptions/constants';
+import createStore from '~/jira_connect/subscriptions/store';
import { mockGroup1, mockGroup2 } from '../../mock_data';
const createMockGroup = (groupId) => {
@@ -26,24 +28,30 @@ jest.mock('~/jira_connect/subscriptions/api', () => {
};
});
-const mockGroupsPath = '/groups';
-const mockAccessToken = '123';
-
describe('GroupsList', () => {
let wrapper;
+ let store;
- const mockEmptyResponse = { data: [] };
+ const mockGroupsPath = '/groups';
+ const mockAccessToken = '123';
+ const mockUser = {
+ name: 'test user',
+ is_admin: false,
+ };
+
+ const createComponent = ({ initialState } = {}) => {
+ store = createStore({
+ accessToken: mockAccessToken,
+ currentUser: mockUser,
+ ...initialState,
+ });
- const createComponent = (options = {}) => {
wrapper = extendedWrapper(
shallowMount(GroupsList, {
+ store,
provide: {
groupsPath: mockGroupsPath,
},
- computed: {
- accessToken: () => mockAccessToken,
- },
- ...options,
}),
);
};
@@ -82,6 +90,8 @@ describe('GroupsList', () => {
});
describe('with no groups returned', () => {
+ const mockEmptyResponse = { data: [] };
+
it('renders empty state', async () => {
fetchGroups.mockResolvedValue(mockEmptyResponse);
createComponent();
@@ -151,6 +161,7 @@ describe('GroupsList', () => {
expect(fetchGroups).toHaveBeenLastCalledWith(
mockGroupsPath,
{
+ minAccessLevel: 40,
page: 1,
perPage: DEFAULT_GROUPS_PER_PAGE,
search: mockSearchTeam,
@@ -229,6 +240,7 @@ describe('GroupsList', () => {
expect(fetchGroups).toHaveBeenCalledWith(
mockGroupsPath,
{
+ minAccessLevel: 40,
page: 1,
perPage: DEFAULT_GROUPS_PER_PAGE,
search: expectedSearchValue,
@@ -268,6 +280,7 @@ describe('GroupsList', () => {
expect(fetchGroups).toHaveBeenLastCalledWith(
mockGroupsPath,
{
+ minAccessLevel: 40,
page: 2,
perPage: DEFAULT_GROUPS_PER_PAGE,
search: '',
@@ -289,6 +302,7 @@ describe('GroupsList', () => {
expect(fetchGroups).toHaveBeenLastCalledWith(
mockGroupsPath,
{
+ minAccessLevel: 40,
page: expectedPage,
perPage: DEFAULT_GROUPS_PER_PAGE,
search: expectedSearchTerm,
@@ -300,6 +314,36 @@ describe('GroupsList', () => {
});
});
+ describe('when user is admin', () => {
+ const mockAdmin = {
+ name: 'test admin',
+ is_admin: true,
+ };
+
+ beforeEach(async () => {
+ fetchGroups.mockResolvedValue();
+ createComponent({
+ initialState: {
+ currentUser: mockAdmin,
+ },
+ });
+
+ await waitForPromises();
+ });
+
+ it('calls `fetchGroups` without `min_access_level`', () => {
+ expect(fetchGroups).toHaveBeenLastCalledWith(
+ mockGroupsPath,
+ {
+ page: 1,
+ perPage: DEFAULT_GROUPS_PER_PAGE,
+ search: '',
+ },
+ mockAccessToken,
+ );
+ });
+ });
+
describe('pagination', () => {
it.each`
scenario | totalItems | shouldShowPagination
@@ -336,13 +380,14 @@ describe('GroupsList', () => {
await waitForPromises();
});
- it('executes `fetchGroups` with correct arguments', () => {
+ it('calls `fetchGroups` with correct arguments', () => {
const paginationEl = findPagination();
paginationEl.vm.$emit('input', 2);
expect(fetchGroups).toHaveBeenLastCalledWith(
mockGroupsPath,
{
+ minAccessLevel: 40,
page: 2,
perPage: DEFAULT_GROUPS_PER_PAGE,
search: '',
diff --git a/spec/frontend/jobs/components/job/artifacts_block_spec.js b/spec/frontend/jobs/components/job/artifacts_block_spec.js
index ea5d727bd08..f9e52a5ae43 100644
--- a/spec/frontend/jobs/components/job/artifacts_block_spec.js
+++ b/spec/frontend/jobs/components/job/artifacts_block_spec.js
@@ -1,4 +1,5 @@
-import { mount } from '@vue/test-utils';
+import { GlPopover } from '@gitlab/ui';
+import { mountExtended } from 'helpers/vue_test_utils_helper';
import { trimText } from 'helpers/text_helper';
import ArtifactsBlock from '~/jobs/components/job/sidebar/artifacts_block.vue';
import { getTimeago } from '~/lib/utils/datetime_utility';
@@ -7,18 +8,20 @@ describe('Artifacts block', () => {
let wrapper;
const createWrapper = (propsData) =>
- mount(ArtifactsBlock, {
+ mountExtended(ArtifactsBlock, {
propsData: {
helpUrl: 'help-url',
...propsData,
},
});
- const findArtifactRemoveElt = () => wrapper.find('[data-testid="artifacts-remove-timeline"]');
- const findJobLockedElt = () => wrapper.find('[data-testid="job-locked-message"]');
- const findKeepBtn = () => wrapper.find('[data-testid="keep-artifacts"]');
- const findDownloadBtn = () => wrapper.find('[data-testid="download-artifacts"]');
- const findBrowseBtn = () => wrapper.find('[data-testid="browse-artifacts"]');
+ const findArtifactRemoveElt = () => wrapper.findByTestId('artifacts-remove-timeline');
+ const findJobLockedElt = () => wrapper.findByTestId('job-locked-message');
+ const findKeepBtn = () => wrapper.findByTestId('keep-artifacts');
+ const findDownloadBtn = () => wrapper.findByTestId('download-artifacts');
+ const findBrowseBtn = () => wrapper.findByTestId('browse-artifacts');
+ const findArtifactsHelpLink = () => wrapper.findByTestId('artifacts-help-link');
+ const findPopover = () => wrapper.findComponent(GlPopover);
const expireAt = '2018-08-14T09:38:49.157Z';
const timeago = getTimeago();
@@ -168,4 +171,23 @@ describe('Artifacts block', () => {
expect(findBrowseBtn().exists()).toBe(true);
});
});
+
+ describe('artifacts help text', () => {
+ beforeEach(() => {
+ wrapper = createWrapper({
+ artifact: lockedNonExpiredArtifact,
+ });
+ });
+
+ it('displays help text', () => {
+ const expectedHelpText =
+ 'Job artifacts are files that are configured to be uploaded when a job finishes execution. Artifacts could be compiled files, unit tests or scanning reports, or any other files generated by a job.';
+
+ expect(findPopover().text()).toBe(expectedHelpText);
+ });
+
+ it('links to artifacts help page', () => {
+ expect(findArtifactsHelpLink().attributes('href')).toBe('/help/ci/jobs/job_artifacts');
+ });
+ });
});
diff --git a/spec/frontend/jobs/components/job/job_app_spec.js b/spec/frontend/jobs/components/job/job_app_spec.js
index c925131dd9c..8f5700ee22d 100644
--- a/spec/frontend/jobs/components/job/job_app_spec.js
+++ b/spec/frontend/jobs/components/job/job_app_spec.js
@@ -1,4 +1,5 @@
import Vue, { nextTick } from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import { GlLoadingIcon } from '@gitlab/ui';
import MockAdapter from 'axios-mock-adapter';
diff --git a/spec/frontend/jobs/components/job/job_log_controllers_spec.js b/spec/frontend/jobs/components/job/job_log_controllers_spec.js
index 218096b9745..7b6d58f63d1 100644
--- a/spec/frontend/jobs/components/job/job_log_controllers_spec.js
+++ b/spec/frontend/jobs/components/job/job_log_controllers_spec.js
@@ -21,7 +21,6 @@ describe('Job log controllers', () => {
const defaultProps = {
rawPath: '/raw',
- erasePath: '/erase',
size: 511952,
isScrollTopDisabled: false,
isScrollBottomDisabled: false,
diff --git a/spec/frontend/jobs/components/job/sidebar_detail_row_spec.js b/spec/frontend/jobs/components/job/sidebar_detail_row_spec.js
index fd27004816a..546f5392caf 100644
--- a/spec/frontend/jobs/components/job/sidebar_detail_row_spec.js
+++ b/spec/frontend/jobs/components/job/sidebar_detail_row_spec.js
@@ -1,12 +1,13 @@
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import SidebarDetailRow from '~/jobs/components/job/sidebar/sidebar_detail_row.vue';
+import { DOCS_URL } from 'jh_else_ce/lib/utils/url_utility';
describe('Sidebar detail row', () => {
let wrapper;
const title = 'this is the title';
const value = 'this is the value';
- const helpUrl = 'https://docs.gitlab.com/runner/register/index.html';
+ const helpUrl = `${DOCS_URL}/runner/register/index.html`;
const path = 'path/to/value';
const findHelpLink = () => wrapper.findByTestId('job-sidebar-help-link');
diff --git a/spec/frontend/jobs/components/log/line_header_spec.js b/spec/frontend/jobs/components/log/line_header_spec.js
index 16fe753e08a..c02d8c22655 100644
--- a/spec/frontend/jobs/components/log/line_header_spec.js
+++ b/spec/frontend/jobs/components/log/line_header_spec.js
@@ -1,5 +1,6 @@
import { mount } from '@vue/test-utils';
import { nextTick } from 'vue';
+import setWindowLocation from 'helpers/set_window_location_helper';
import DurationBadge from '~/jobs/components/log/duration_badge.vue';
import LineHeader from '~/jobs/components/log/line_header.vue';
import LineNumber from '~/jobs/components/log/line_number.vue';
@@ -15,7 +16,7 @@ describe('Job Log Header Line', () => {
style: 'term-fg-l-green',
},
],
- lineNumber: 0,
+ lineNumber: 76,
},
isClosed: true,
path: '/jashkenas/underscore/-/jobs/335',
@@ -89,4 +90,30 @@ describe('Job Log Header Line', () => {
expect(wrapper.findComponent(DurationBadge).exists()).toBe(true);
});
});
+
+ describe('line highlighting', () => {
+ describe('with hash', () => {
+ beforeEach(() => {
+ setWindowLocation(`http://foo.com/root/ci-project/-/jobs/6353#L77`);
+
+ createComponent(data);
+ });
+
+ it('highlights line', () => {
+ expect(wrapper.classes()).toContain('gl-bg-gray-700');
+ });
+ });
+
+ describe('without hash', () => {
+ beforeEach(() => {
+ setWindowLocation(`http://foo.com/root/ci-project/-/jobs/6353`);
+
+ createComponent(data);
+ });
+
+ it('does not highlight line', () => {
+ expect(wrapper.classes()).not.toContain('gl-bg-gray-700');
+ });
+ });
+ });
});
diff --git a/spec/frontend/jobs/components/log/line_spec.js b/spec/frontend/jobs/components/log/line_spec.js
index 50ebd1610d2..fad7a03beef 100644
--- a/spec/frontend/jobs/components/log/line_spec.js
+++ b/spec/frontend/jobs/components/log/line_spec.js
@@ -1,6 +1,7 @@
import { shallowMount } from '@vue/test-utils';
import Line from '~/jobs/components/log/line.vue';
import LineNumber from '~/jobs/components/log/line_number.vue';
+import setWindowLocation from 'helpers/set_window_location_helper';
const httpUrl = 'http://example.com';
const httpsUrl = 'https://example.com';
@@ -203,7 +204,7 @@ describe('Job Log Line', () => {
searchResults: mockSearchResults,
});
- expect(wrapper.classes()).toContain('gl-bg-gray-500');
+ expect(wrapper.classes()).toContain('gl-bg-gray-700');
});
it('does not apply highlight class to search result elements', () => {
@@ -218,7 +219,49 @@ describe('Job Log Line', () => {
searchResults: mockSearchResults,
});
- expect(wrapper.classes()).not.toContain('gl-bg-gray-500');
+ expect(wrapper.classes()).not.toContain('gl-bg-gray-700');
+ });
+ });
+
+ describe('job log hash highlighting', () => {
+ describe('with hash', () => {
+ beforeEach(() => {
+ setWindowLocation(`http://foo.com/root/ci-project/-/jobs/6353#L77`);
+ });
+
+ it('applies highlight class to job log line', () => {
+ createComponent({
+ line: {
+ offset: 24526,
+ content: [{ text: 'job log content' }],
+ section: 'custom-section',
+ lineNumber: 76,
+ },
+ path: '/root/ci-project/-/jobs/6353',
+ });
+
+ expect(wrapper.classes()).toContain('gl-bg-gray-700');
+ });
+ });
+
+ describe('without hash', () => {
+ beforeEach(() => {
+ setWindowLocation(`http://foo.com/root/ci-project/-/jobs/6353`);
+ });
+
+ it('does not apply highlight class to job log line', () => {
+ createComponent({
+ line: {
+ offset: 24500,
+ content: [{ text: 'line' }],
+ section: 'custom-section',
+ lineNumber: 10,
+ },
+ path: '/root/ci-project/-/jobs/6353',
+ });
+
+ expect(wrapper.classes()).not.toContain('gl-bg-gray-700');
+ });
});
});
});
diff --git a/spec/frontend/jobs/components/log/log_spec.js b/spec/frontend/jobs/components/log/log_spec.js
index 20638b13169..9407b340950 100644
--- a/spec/frontend/jobs/components/log/log_spec.js
+++ b/spec/frontend/jobs/components/log/log_spec.js
@@ -1,5 +1,6 @@
import { mount } from '@vue/test-utils';
import Vue from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import waitForPromises from 'helpers/wait_for_promises';
import { scrollToElement } from '~/lib/utils/common_utils';
diff --git a/spec/frontend/lib/print_markdown_dom_spec.js b/spec/frontend/lib/print_markdown_dom_spec.js
new file mode 100644
index 00000000000..7f28417228e
--- /dev/null
+++ b/spec/frontend/lib/print_markdown_dom_spec.js
@@ -0,0 +1,102 @@
+import printJS from 'print-js';
+import printMarkdownDom from '~/lib/print_markdown_dom';
+
+jest.mock('print-js', () => jest.fn());
+
+describe('print util', () => {
+ describe('print markdown dom', () => {
+ beforeEach(() => {
+ document.body.innerHTML = `<div id='target'></div>`;
+ });
+
+ const getTarget = () => document.getElementById('target');
+
+ const contentValues = [
+ {
+ title: 'test title',
+ expectedTitle: '<h2 class="gl-mt-0 gl-mb-5">test title</h2>',
+ content: '',
+ expectedContent: '<div class="md"></div>',
+ },
+ {
+ title: 'test title',
+ expectedTitle: '<h2 class="gl-mt-0 gl-mb-5">test title</h2>',
+ content: '<p>test content</p>',
+ expectedContent: '<div class="md"><p>test content</p></div>',
+ },
+ {
+ title: 'test title',
+ expectedTitle: '<h2 class="gl-mt-0 gl-mb-5">test title</h2>',
+ content: '<details><summary>test detail</summary><p>test detail content</p></details>',
+ expectedContent:
+ '<div class="md"><details open=""><summary>test detail</summary><p>test detail content</p></details></div>',
+ },
+ {
+ title: undefined,
+ expectedTitle: '',
+ content: '',
+ expectedContent: '<div class="md"></div>',
+ },
+ {
+ title: undefined,
+ expectedTitle: '',
+ content: '<p>test content</p>',
+ expectedContent: '<div class="md"><p>test content</p></div>',
+ },
+ {
+ title: undefined,
+ expectedTitle: '',
+ content: '<details><summary>test detail</summary><p>test detail content</p></details>',
+ expectedContent:
+ '<div class="md"><details open=""><summary>test detail</summary><p>test detail content</p></details></div>',
+ },
+ ];
+
+ it.each(contentValues)(
+ 'should print with title ($title) and content ($content)',
+ async ({ title, expectedTitle, content, expectedContent }) => {
+ const target = getTarget();
+ target.innerHTML = content;
+ const stylesheet = 'test stylesheet';
+
+ await printMarkdownDom({
+ target,
+ title,
+ stylesheet,
+ });
+
+ expect(printJS).toHaveBeenCalledWith({
+ printable: expectedTitle + expectedContent,
+ type: 'raw-html',
+ documentTitle: title,
+ scanStyles: false,
+ css: stylesheet,
+ });
+ },
+ );
+ });
+
+ describe('ignore selectors', () => {
+ beforeEach(() => {
+ document.body.innerHTML = `<div id='target'><div><div class='ignore-me'></div></div></div>`;
+ });
+
+ it('should ignore dom if ignoreSelectors', async () => {
+ const target = document.getElementById('target');
+ const ignoreSelectors = ['.ignore-me'];
+
+ await printMarkdownDom({
+ target,
+ ignoreSelectors,
+ });
+
+ expect(printJS).toHaveBeenCalledWith({
+ printable: '<div class="md"><div></div></div>',
+ type: 'raw-html',
+ documentTitle: undefined,
+ scanStyles: false,
+ css: [],
+ });
+ });
+ });
+});
diff --git a/spec/frontend/lib/utils/error_util_spec.js b/spec/frontend/lib/utils/error_util_spec.js
new file mode 100644
index 00000000000..72dcf550428
--- /dev/null
+++ b/spec/frontend/lib/utils/error_util_spec.js
@@ -0,0 +1,194 @@
+import {
+ ActiveModelError,
+ generateHelpTextWithLinks,
+ mapSystemToFriendlyError,
+} from '~/lib/utils/error_utils';
+import { convertObjectPropsToLowerCase } from '~/lib/utils/common_utils';
+
+describe('Error Alert Utils', () => {
+ const unfriendlyErrorOneKey = 'Unfriendly error 1';
+ const emailTakenAttributeMap = 'email:taken';
+ const emailTakenError = 'Email has already been taken';
+ const emailTakenFriendlyError = {
+ message: 'This is a friendly error message for the given attribute map',
+ links: {},
+ };
+
+ const mockErrorDictionary = convertObjectPropsToLowerCase({
+ [unfriendlyErrorOneKey]: {
+ message:
+ 'This is a friendly error with %{linkOneStart}link 1%{linkOneEnd} and %{linkTwoStart}link 2%{linkTwoEnd}',
+ links: {
+ linkOne: '/sample/link/1',
+ linkTwo: '/sample/link/2',
+ },
+ },
+ 'Unfriendly error 2': {
+ message: 'This is a friendly error with only %{linkStart} one link %{linkEnd}',
+ links: {
+ link: '/sample/link/1',
+ },
+ },
+ 'Unfriendly error 3': {
+ message: 'This is a friendly error with no links',
+ links: {},
+ },
+ [emailTakenAttributeMap]: emailTakenFriendlyError,
+ [emailTakenError]: emailTakenFriendlyError,
+ });
+
+ const mockGeneralError = {
+ message: 'Something went wrong',
+ link: {},
+ };
+
+ describe('mapSystemToFriendlyError', () => {
+ describe.each(Object.keys(mockErrorDictionary))('when system error is %s', (systemError) => {
+ const friendlyError = mockErrorDictionary[systemError];
+
+ it('maps the system error to the friendly one', () => {
+ expect(mapSystemToFriendlyError(new Error(systemError), mockErrorDictionary)).toEqual(
+ friendlyError,
+ );
+ });
+
+ it('maps the system error to the friendly one from uppercase', () => {
+ expect(
+ mapSystemToFriendlyError(new Error(systemError.toUpperCase()), mockErrorDictionary),
+ ).toEqual(friendlyError);
+ });
+ });
+
+ describe.each(['', {}, [], undefined, null, new Error()])(
+ 'when system error is %s',
+ (systemError) => {
+ it('defaults to the given general error message when provided', () => {
+ expect(
+ mapSystemToFriendlyError(systemError, mockErrorDictionary, mockGeneralError),
+ ).toEqual(mockGeneralError);
+ });
+
+ it('defaults to the default error message when general error message is not provided', () => {
+ expect(mapSystemToFriendlyError(systemError, mockErrorDictionary)).toEqual({
+ message: 'Something went wrong. Please try again.',
+ links: {},
+ });
+ });
+ },
+ );
+
+ describe('when system error is a non-existent key', () => {
+ const message = 'a non-existent key';
+ const nonExistentKeyError = { message, links: {} };
+
+ it('maps the system error to the friendly one', () => {
+ expect(mapSystemToFriendlyError(new Error(message), mockErrorDictionary)).toEqual(
+ nonExistentKeyError,
+ );
+ });
+ });
+
+ describe('when system error consists of multiple non-existent keys', () => {
+ const message = 'a non-existent key, another non-existent key';
+ const nonExistentKeyError = { message, links: {} };
+
+ it('maps the system error to the friendly one', () => {
+ expect(mapSystemToFriendlyError(new Error(message), mockErrorDictionary)).toEqual(
+ nonExistentKeyError,
+ );
+ });
+ });
+
+ describe('when system error consists of multiple messages with one matching key', () => {
+ const message = `a non-existent key, ${unfriendlyErrorOneKey}`;
+
+ it('maps the system error to the friendly one', () => {
+ expect(mapSystemToFriendlyError(new Error(message), mockErrorDictionary)).toEqual(
+ mockErrorDictionary[unfriendlyErrorOneKey.toLowerCase()],
+ );
+ });
+ });
+
+ describe('when error is email:taken error_attribute_map', () => {
+ const errorAttributeMap = { email: ['taken'] };
+
+ it('maps the email friendly error', () => {
+ expect(
+ mapSystemToFriendlyError(
+ new ActiveModelError(errorAttributeMap, emailTakenError),
+ mockErrorDictionary,
+ ),
+ ).toEqual(mockErrorDictionary[emailTakenAttributeMap.toLowerCase()]);
+ });
+ });
+
+ describe('when there are multiple errors in the error_attribute_map', () => {
+ const errorAttributeMap = { email: ['taken', 'invalid'] };
+
+ it('maps the email friendly error', () => {
+ expect(
+ mapSystemToFriendlyError(
+ new ActiveModelError(errorAttributeMap, `${emailTakenError}, Email is invalid`),
+ mockErrorDictionary,
+ ),
+ ).toEqual(mockErrorDictionary[emailTakenAttributeMap.toLowerCase()]);
+ });
+ });
+ });
+
+ describe('generateHelpTextWithLinks', () => {
+ describe('when the error is present in the dictionary', () => {
+ describe.each(Object.values(mockErrorDictionary))(
+ 'when system error is %s',
+ (friendlyError) => {
+ it('generates the proper link', () => {
+ const errorHtmlString = generateHelpTextWithLinks(friendlyError);
+ const expected = Array.from(friendlyError.message.matchAll(/%{/g)).length / 2;
+ const newNode = document.createElement('div');
+ newNode.innerHTML = errorHtmlString;
+ const links = Array.from(newNode.querySelectorAll('a'));
+
+ expect(links).toHaveLength(expected);
+ });
+ },
+ );
+ });
+
+ describe('when the error contains no links', () => {
+ it('generates the proper link/s', () => {
+ const anError = { message: 'An error', links: {} };
+ const errorHtmlString = generateHelpTextWithLinks(anError);
+ const expected = Object.keys(anError.links).length;
+ const newNode = document.createElement('div');
+ newNode.innerHTML = errorHtmlString;
+ const links = Array.from(newNode.querySelectorAll('a'));
+
+ expect(links).toHaveLength(expected);
+ });
+ });
+
+ describe('when the error is invalid', () => {
+ it('returns the error', () => {
+ expect(() => generateHelpTextWithLinks([])).toThrow(
+ new Error('The error cannot be empty.'),
+ );
+ });
+ });
+
+ describe('when the error is not an object', () => {
+ it('returns the error', () => {
+ const errorHtmlString = generateHelpTextWithLinks('An error');
+
+ expect(errorHtmlString).toBe('An error');
+ });
+ });
+
+ describe('when the error is falsy', () => {
+ it('throws an error', () => {
+ expect(() => generateHelpTextWithLinks(null)).toThrow(
+ new Error('The error cannot be empty.'),
+ );
+ });
+ });
+ });
+});
diff --git a/spec/frontend/lib/utils/file_utility_spec.js b/spec/frontend/lib/utils/file_utility_spec.js
new file mode 100644
index 00000000000..386deafe712
--- /dev/null
+++ b/spec/frontend/lib/utils/file_utility_spec.js
@@ -0,0 +1,13 @@
+import { readFileAsDataURL } from '~/lib/utils/file_utility';
+
+describe('File utilities', () => {
+ describe('readFileAsDataURL', () => {
+ it('reads a file and returns its output as a data url', () => {
+ const file = new File(['foo'], 'foo.png', { type: 'image/png' });
+
+ return readFileAsDataURL(file).then((contents) => {
+ expect(contents).toBe('data:image/png;base64,Zm9v');
+ });
+ });
+ });
+});
diff --git a/spec/frontend/lib/utils/text_utility_spec.js b/spec/frontend/lib/utils/text_utility_spec.js
index 8f1f6899935..b7d6bbd3991 100644
--- a/spec/frontend/lib/utils/text_utility_spec.js
+++ b/spec/frontend/lib/utils/text_utility_spec.js
@@ -238,36 +238,6 @@ describe('text_utility', () => {
});
});
- describe('truncatePathMiddleToLength', () => {
- it('does not truncate text', () => {
- expect(textUtils.truncatePathMiddleToLength('app/test', 50)).toEqual('app/test');
- });
-
- it('truncates middle of the path', () => {
- expect(textUtils.truncatePathMiddleToLength('app/test/diff', 13)).toEqual('app/…/diff');
- });
-
- it('truncates multiple times in the middle of the path', () => {
- expect(textUtils.truncatePathMiddleToLength('app/test/merge_request/diff', 13)).toEqual(
- 'app/…/…/diff',
- );
- });
-
- describe('given a path too long for the maxWidth', () => {
- it.each`
- path | maxWidth | result
- ${'aa/bb/cc'} | ${1} | ${'…'}
- ${'aa/bb/cc'} | ${2} | ${'…'}
- ${'aa/bb/cc'} | ${3} | ${'…/…'}
- ${'aa/bb/cc'} | ${4} | ${'…/…'}
- ${'aa/bb/cc'} | ${5} | ${'…/…/…'}
- `('truncates ($path, $maxWidth) to $result', ({ path, maxWidth, result }) => {
- expect(result.length).toBeLessThanOrEqual(maxWidth);
- expect(textUtils.truncatePathMiddleToLength(path, maxWidth)).toEqual(result);
- });
- });
- });
-
describe('slugifyWithUnderscore', () => {
it('should replaces whitespaces with underscore and convert to lower case', () => {
expect(textUtils.slugifyWithUnderscore('My Input String')).toEqual('my_input_string');
diff --git a/spec/frontend/lib/utils/url_utility_spec.js b/spec/frontend/lib/utils/url_utility_spec.js
index 0f32eaa4ca6..450eeefd898 100644
--- a/spec/frontend/lib/utils/url_utility_spec.js
+++ b/spec/frontend/lib/utils/url_utility_spec.js
@@ -421,6 +421,16 @@ describe('URL utility', () => {
window.location = originalLocation;
});
+ it.each`
+ inputQuery | expectedQuery
+ ${'?scope=all&state=merged'} | ${'?scope=all&state=merged'}
+ ${'?'} | ${'?'}
+ `('handles query string: $inputQuery', ({ inputQuery, expectedQuery }) => {
+ window.location.href = mockUrl;
+ urlUtils.visitUrl(inputQuery);
+ expect(window.location.assign).toHaveBeenCalledWith(`${mockUrl}${expectedQuery}`);
+ });
+
it('does not navigate to unsafe urls', () => {
// eslint-disable-next-line no-script-url
const url = 'javascript:alert(document.domain)';
@@ -1107,6 +1117,7 @@ describe('URL utility', () => {
describe('defaultPromoUrl', () => {
it('Gitlab about page url', () => {
+ // eslint-disable-next-line no-restricted-syntax
const url = 'https://about.gitlab.com';
expect(urlUtils.PROMO_URL).toBe(url);
@@ -1136,4 +1147,18 @@ describe('URL utility', () => {
expect(urlUtils.removeLastSlashInUrlPath(input)).toBe(output);
});
});
+
+ describe('buildURLwithRefType', () => {
+ const base = 'http://gitlab.com/';
+
+ it.each`
+ path | refType | output
+ ${'foo/bar'} | ${'heads'} | ${'/foo/bar?ref_type=heads'}
+ ${'/foo/bar/'} | ${'HEADS'} | ${'/foo/bar/?ref_type=heads'}
+ ${'/foo/bar/'} | ${''} | ${'/foo/bar/'}
+ ${'/'} | ${''} | ${'/'}
+ `('path $path with ref $refType becomes $output', ({ path, refType, output }) => {
+ expect(urlUtils.buildURLwithRefType({ base, path, refType })).toBe(output);
+ });
+ });
});
diff --git a/spec/frontend/lib/utils/vuex_module_mappers_spec.js b/spec/frontend/lib/utils/vuex_module_mappers_spec.js
index abd5095c1d2..9070903728b 100644
--- a/spec/frontend/lib/utils/vuex_module_mappers_spec.js
+++ b/spec/frontend/lib/utils/vuex_module_mappers_spec.js
@@ -1,5 +1,6 @@
import { mount } from '@vue/test-utils';
import Vue from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import {
mapVuexModuleActions,
diff --git a/spec/frontend/members/components/action_buttons/approve_access_request_button_spec.js b/spec/frontend/members/components/action_buttons/approve_access_request_button_spec.js
index 7a4cd844425..3e88246ada0 100644
--- a/spec/frontend/members/components/action_buttons/approve_access_request_button_spec.js
+++ b/spec/frontend/members/components/action_buttons/approve_access_request_button_spec.js
@@ -1,6 +1,7 @@
import { GlButton, GlForm } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import Vue from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
import ApproveAccessRequestButton from '~/members/components/action_buttons/approve_access_request_button.vue';
diff --git a/spec/frontend/members/components/action_buttons/remove_group_link_button_spec.js b/spec/frontend/members/components/action_buttons/remove_group_link_button_spec.js
index 1d83a2e0e71..24a936abc99 100644
--- a/spec/frontend/members/components/action_buttons/remove_group_link_button_spec.js
+++ b/spec/frontend/members/components/action_buttons/remove_group_link_button_spec.js
@@ -1,6 +1,7 @@
import { GlButton } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
import Vue from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
import RemoveGroupLinkButton from '~/members/components/action_buttons/remove_group_link_button.vue';
diff --git a/spec/frontend/members/components/action_buttons/remove_member_button_spec.js b/spec/frontend/members/components/action_buttons/remove_member_button_spec.js
index 3879279b559..84393cb64ea 100644
--- a/spec/frontend/members/components/action_buttons/remove_member_button_spec.js
+++ b/spec/frontend/members/components/action_buttons/remove_member_button_spec.js
@@ -1,6 +1,7 @@
import { GlButton } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import Vue from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
import { modalData } from 'jest/members/mock_data';
diff --git a/spec/frontend/members/components/action_buttons/resend_invite_button_spec.js b/spec/frontend/members/components/action_buttons/resend_invite_button_spec.js
index a6b5978b566..50facbf7f5d 100644
--- a/spec/frontend/members/components/action_buttons/resend_invite_button_spec.js
+++ b/spec/frontend/members/components/action_buttons/resend_invite_button_spec.js
@@ -1,6 +1,7 @@
import { GlButton } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import Vue from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
import ResendInviteButton from '~/members/components/action_buttons/resend_invite_button.vue';
diff --git a/spec/frontend/members/components/action_dropdowns/remove_member_dropdown_item_spec.js b/spec/frontend/members/components/action_dropdowns/remove_member_dropdown_item_spec.js
index 2f0d4b8e655..be53c48c9fd 100644
--- a/spec/frontend/members/components/action_dropdowns/remove_member_dropdown_item_spec.js
+++ b/spec/frontend/members/components/action_dropdowns/remove_member_dropdown_item_spec.js
@@ -1,6 +1,7 @@
import { GlDisclosureDropdownItem } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import Vue from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import { modalData } from 'jest/members/mock_data';
import RemoveMemberDropdownItem from '~/members/components/action_dropdowns/remove_member_dropdown_item.vue';
diff --git a/spec/frontend/members/components/app_spec.js b/spec/frontend/members/components/app_spec.js
index b2147163233..929a5a054e6 100644
--- a/spec/frontend/members/components/app_spec.js
+++ b/spec/frontend/members/components/app_spec.js
@@ -1,6 +1,7 @@
import { GlAlert } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import * as commonUtils from '~/lib/utils/common_utils';
import MembersApp from '~/members/components/app.vue';
diff --git a/spec/frontend/members/components/filter_sort/filter_sort_container_spec.js b/spec/frontend/members/components/filter_sort/filter_sort_container_spec.js
index de2f6e6dd47..6bb51b4633c 100644
--- a/spec/frontend/members/components/filter_sort/filter_sort_container_spec.js
+++ b/spec/frontend/members/components/filter_sort/filter_sort_container_spec.js
@@ -1,5 +1,6 @@
import { shallowMount } from '@vue/test-utils';
import Vue from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import FilterSortContainer from '~/members/components/filter_sort/filter_sort_container.vue';
import MembersFilteredSearchBar from '~/members/components/filter_sort/members_filtered_search_bar.vue';
diff --git a/spec/frontend/members/components/filter_sort/members_filtered_search_bar_spec.js b/spec/frontend/members/components/filter_sort/members_filtered_search_bar_spec.js
index 29b7ceae0e3..107bd2f0985 100644
--- a/spec/frontend/members/components/filter_sort/members_filtered_search_bar_spec.js
+++ b/spec/frontend/members/components/filter_sort/members_filtered_search_bar_spec.js
@@ -1,5 +1,6 @@
import { shallowMount } from '@vue/test-utils';
import Vue from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import setWindowLocation from 'helpers/set_window_location_helper';
import { redirectTo } from '~/lib/utils/url_utility'; // eslint-disable-line import/no-deprecated
diff --git a/spec/frontend/members/components/filter_sort/sort_dropdown_spec.js b/spec/frontend/members/components/filter_sort/sort_dropdown_spec.js
index 526f839ece8..849a84b1a6f 100644
--- a/spec/frontend/members/components/filter_sort/sort_dropdown_spec.js
+++ b/spec/frontend/members/components/filter_sort/sort_dropdown_spec.js
@@ -1,6 +1,7 @@
import { GlSorting, GlSortingItem } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
import Vue from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import setWindowLocation from 'helpers/set_window_location_helper';
import * as urlUtilities from '~/lib/utils/url_utility';
diff --git a/spec/frontend/members/components/members_tabs_spec.js b/spec/frontend/members/components/members_tabs_spec.js
index 9078bd87d62..de2f8c9f4c6 100644
--- a/spec/frontend/members/components/members_tabs_spec.js
+++ b/spec/frontend/members/components/members_tabs_spec.js
@@ -1,5 +1,6 @@
import { GlTabs, GlButton } from '@gitlab/ui';
import Vue, { nextTick } from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import setWindowLocation from 'helpers/set_window_location_helper';
import { mountExtended } from 'helpers/vue_test_utils_helper';
diff --git a/spec/frontend/members/components/modals/leave_modal_spec.js b/spec/frontend/members/components/modals/leave_modal_spec.js
index cec5f192e59..95a4fb07853 100644
--- a/spec/frontend/members/components/modals/leave_modal_spec.js
+++ b/spec/frontend/members/components/modals/leave_modal_spec.js
@@ -1,6 +1,7 @@
import { GlModal, GlForm } from '@gitlab/ui';
import { cloneDeep } from 'lodash';
import Vue, { nextTick } from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import { mountExtended, extendedWrapper } from 'helpers/vue_test_utils_helper';
import LeaveModal from '~/members/components/modals/leave_modal.vue';
diff --git a/spec/frontend/members/components/modals/remove_group_link_modal_spec.js b/spec/frontend/members/components/modals/remove_group_link_modal_spec.js
index e4782ac7f2e..bde33528eb8 100644
--- a/spec/frontend/members/components/modals/remove_group_link_modal_spec.js
+++ b/spec/frontend/members/components/modals/remove_group_link_modal_spec.js
@@ -2,6 +2,7 @@ import { GlModal, GlForm } from '@gitlab/ui';
import { within } from '@testing-library/dom';
import { mount, createWrapper } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import RemoveGroupLinkModal from '~/members/components/modals/remove_group_link_modal.vue';
import { REMOVE_GROUP_LINK_MODAL_ID, MEMBER_TYPES } from '~/members/constants';
diff --git a/spec/frontend/members/components/modals/remove_member_modal_spec.js b/spec/frontend/members/components/modals/remove_member_modal_spec.js
index baef0b30b02..01138ff845d 100644
--- a/spec/frontend/members/components/modals/remove_member_modal_spec.js
+++ b/spec/frontend/members/components/modals/remove_member_modal_spec.js
@@ -1,6 +1,7 @@
import { GlModal } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import Vue from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import RemoveMemberModal from '~/members/components/modals/remove_member_modal.vue';
import {
diff --git a/spec/frontend/members/components/table/expiration_datepicker_spec.js b/spec/frontend/members/components/table/expiration_datepicker_spec.js
index 9176a02a447..d9847abda52 100644
--- a/spec/frontend/members/components/table/expiration_datepicker_spec.js
+++ b/spec/frontend/members/components/table/expiration_datepicker_spec.js
@@ -1,6 +1,7 @@
import { GlDatepicker } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import { useFakeDate } from 'helpers/fake_date';
import waitForPromises from 'helpers/wait_for_promises';
diff --git a/spec/frontend/members/components/table/members_table_cell_spec.js b/spec/frontend/members/components/table/members_table_cell_spec.js
index 1c6f1b086cf..099fe7b4b8a 100644
--- a/spec/frontend/members/components/table/members_table_cell_spec.js
+++ b/spec/frontend/members/components/table/members_table_cell_spec.js
@@ -1,5 +1,6 @@
import { mount } from '@vue/test-utils';
import Vue from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import MembersTableCell from '~/members/components/table/members_table_cell.vue';
import { MEMBER_TYPES } from '~/members/constants';
diff --git a/spec/frontend/members/components/table/members_table_spec.js b/spec/frontend/members/components/table/members_table_spec.js
index efc8c9b4459..4539478bf9a 100644
--- a/spec/frontend/members/components/table/members_table_spec.js
+++ b/spec/frontend/members/components/table/members_table_spec.js
@@ -1,5 +1,6 @@
import { GlBadge, GlPagination, GlTable } from '@gitlab/ui';
import Vue from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import setWindowLocation from 'helpers/set_window_location_helper';
import { mountExtended, extendedWrapper } from 'helpers/vue_test_utils_helper';
diff --git a/spec/frontend/members/components/table/role_dropdown_spec.js b/spec/frontend/members/components/table/role_dropdown_spec.js
index fa188f50d54..5204ac2fdbe 100644
--- a/spec/frontend/members/components/table/role_dropdown_spec.js
+++ b/spec/frontend/members/components/table/role_dropdown_spec.js
@@ -3,6 +3,7 @@ import { GlBreakpointInstance as bp } from '@gitlab/ui/dist/utils';
import * as Sentry from '@sentry/browser';
import { mount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import waitForPromises from 'helpers/wait_for_promises';
import RoleDropdown from '~/members/components/table/role_dropdown.vue';
diff --git a/spec/frontend/merge_conflicts/components/merge_conflict_resolver_app_spec.js b/spec/frontend/merge_conflicts/components/merge_conflict_resolver_app_spec.js
index ab913b30f3c..edd18c57f43 100644
--- a/spec/frontend/merge_conflicts/components/merge_conflict_resolver_app_spec.js
+++ b/spec/frontend/merge_conflicts/components/merge_conflict_resolver_app_spec.js
@@ -1,5 +1,6 @@
import { GlSprintf } from '@gitlab/ui';
import Vue, { nextTick } from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import { shallowMountExtended, extendedWrapper } from 'helpers/vue_test_utils_helper';
import InlineConflictLines from '~/merge_conflicts/components/inline_conflict_lines.vue';
diff --git a/spec/frontend/milestones/components/milestone_combobox_spec.js b/spec/frontend/milestones/components/milestone_combobox_spec.js
index 748e01d4291..53abf6dc544 100644
--- a/spec/frontend/milestones/components/milestone_combobox_spec.js
+++ b/spec/frontend/milestones/components/milestone_combobox_spec.js
@@ -3,6 +3,7 @@ import { mount } from '@vue/test-utils';
import axios from 'axios';
import MockAdapter from 'axios-mock-adapter';
import Vue, { nextTick } from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import { HTTP_STATUS_INTERNAL_SERVER_ERROR, HTTP_STATUS_OK } from '~/lib/utils/http_status';
import { ENTER_KEY } from '~/lib/utils/keys';
diff --git a/spec/frontend/milestones/index_spec.js b/spec/frontend/milestones/index_spec.js
index 477217fc10f..1f65cfc556e 100644
--- a/spec/frontend/milestones/index_spec.js
+++ b/spec/frontend/milestones/index_spec.js
@@ -6,6 +6,7 @@ jest.mock('~/behaviors/markdown/render_gfm');
jest.mock('~/milestones/milestone');
jest.mock('~/right_sidebar');
jest.mock('~/sidebar/mount_milestone_sidebar');
+jest.mock('~/lib/graphql');
describe('#initShow', () => {
beforeEach(() => {
diff --git a/spec/frontend/nav/components/top_nav_dropdown_menu_spec.js b/spec/frontend/nav/components/top_nav_dropdown_menu_spec.js
index 08d6650b5bb..1d516240306 100644
--- a/spec/frontend/nav/components/top_nav_dropdown_menu_spec.js
+++ b/spec/frontend/nav/components/top_nav_dropdown_menu_spec.js
@@ -5,6 +5,7 @@ import TopNavMenuItem from '~/nav/components/top_nav_menu_item.vue';
import TopNavMenuSections from '~/nav/components/top_nav_menu_sections.vue';
import KeepAliveSlots from '~/vue_shared/components/keep_alive_slots.vue';
import { TEST_NAV_DATA } from '../mock_data';
+import { stubComponent } from '../../__helpers__/stub_component';
describe('~/nav/components/top_nav_dropdown_menu.vue', () => {
let wrapper;
@@ -19,7 +20,7 @@ describe('~/nav/components/top_nav_dropdown_menu.vue', () => {
},
stubs: {
// Stub the keep-alive-slots so we don't render frequent items which uses a store
- KeepAliveSlots: true,
+ KeepAliveSlots: stubComponent(KeepAliveSlots),
},
});
};
diff --git a/spec/frontend/notes/components/comment_form_spec.js b/spec/frontend/notes/components/comment_form_spec.js
index a6d88bdd310..0728646246d 100644
--- a/spec/frontend/notes/components/comment_form_spec.js
+++ b/spec/frontend/notes/components/comment_form_spec.js
@@ -3,6 +3,7 @@ import { mount, shallowMount } from '@vue/test-utils';
import Autosize from 'autosize';
import MockAdapter from 'axios-mock-adapter';
import Vue, { nextTick } from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import { useLocalStorageSpy } from 'helpers/local_storage_helper';
@@ -170,10 +171,9 @@ describe('issue_comment_form component', () => {
findCloseReopenButton().trigger('click');
- expect(trackingSpy).toHaveBeenCalledWith(undefined, 'editor_type_used', {
- context: 'Issue_comment',
- editorType: 'editor_type_plain_text_editor',
- label: 'editor_tracking',
+ expect(trackingSpy).toHaveBeenCalledWith(undefined, 'save_markdown', {
+ label: 'markdown_editor',
+ property: 'Issue_comment',
});
});
diff --git a/spec/frontend/notes/components/discussion_counter_spec.js b/spec/frontend/notes/components/discussion_counter_spec.js
index e52dd87f784..64cb42af316 100644
--- a/spec/frontend/notes/components/discussion_counter_spec.js
+++ b/spec/frontend/notes/components/discussion_counter_spec.js
@@ -1,6 +1,7 @@
import { GlDisclosureDropdown, GlDisclosureDropdownItem } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import DiscussionCounter from '~/notes/components/discussion_counter.vue';
import notesModule from '~/notes/stores/modules';
diff --git a/spec/frontend/notes/components/discussion_filter_spec.js b/spec/frontend/notes/components/discussion_filter_spec.js
index 7d8347b20d4..87ccb5b7394 100644
--- a/spec/frontend/notes/components/discussion_filter_spec.js
+++ b/spec/frontend/notes/components/discussion_filter_spec.js
@@ -2,6 +2,7 @@ import { GlDisclosureDropdown, GlDisclosureDropdownItem } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
import AxiosMockAdapter from 'axios-mock-adapter';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import { TEST_HOST } from 'helpers/test_constants';
import createEventHub from '~/helpers/event_hub_factory';
diff --git a/spec/frontend/notes/components/mr_discussion_filter_spec.js b/spec/frontend/notes/components/mr_discussion_filter_spec.js
index 2bb47fd3c9e..05576d2ccc6 100644
--- a/spec/frontend/notes/components/mr_discussion_filter_spec.js
+++ b/spec/frontend/notes/components/mr_discussion_filter_spec.js
@@ -1,6 +1,7 @@
import { mount } from '@vue/test-utils';
import { GlCollapsibleListbox, GlListboxItem, GlButton } from '@gitlab/ui';
import Vue, { nextTick } from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import DiscussionFilter from '~/notes/components/mr_discussion_filter.vue';
import { MR_FILTER_OPTIONS } from '~/notes/constants';
diff --git a/spec/frontend/notes/components/multiline_comment_form_spec.js b/spec/frontend/notes/components/multiline_comment_form_spec.js
index 8446bba340f..feba016e427 100644
--- a/spec/frontend/notes/components/multiline_comment_form_spec.js
+++ b/spec/frontend/notes/components/multiline_comment_form_spec.js
@@ -1,6 +1,7 @@
import { GlFormSelect } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
import Vue from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import MultilineCommentForm from '~/notes/components/multiline_comment_form.vue';
import notesModule from '~/notes/stores/modules';
diff --git a/spec/frontend/notes/components/note_awards_list_spec.js b/spec/frontend/notes/components/note_awards_list_spec.js
index 0107b27f980..fd4d04129ea 100644
--- a/spec/frontend/notes/components/note_awards_list_spec.js
+++ b/spec/frontend/notes/components/note_awards_list_spec.js
@@ -1,5 +1,6 @@
import AxiosMockAdapter from 'axios-mock-adapter';
import Vue from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import { TEST_HOST } from 'helpers/test_constants';
import { mountExtended } from 'helpers/vue_test_utils_helper';
diff --git a/spec/frontend/notes/components/note_body_spec.js b/spec/frontend/notes/components/note_body_spec.js
index c4f8e50b969..622038171b3 100644
--- a/spec/frontend/notes/components/note_body_spec.js
+++ b/spec/frontend/notes/components/note_body_spec.js
@@ -1,3 +1,4 @@
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
diff --git a/spec/frontend/notes/components/note_form_spec.js b/spec/frontend/notes/components/note_form_spec.js
index 645aef21e38..3c461f2b382 100644
--- a/spec/frontend/notes/components/note_form_spec.js
+++ b/spec/frontend/notes/components/note_form_spec.js
@@ -234,53 +234,65 @@ describe('issue_note_form component', () => {
const saveButton = wrapper.find('.js-vue-issue-save');
saveButton.vm.$emit('click');
- expect(trackingSpy).toHaveBeenCalledWith(undefined, 'editor_type_used', {
- context: 'Issue_note',
- editorType: 'editor_type_plain_text_editor',
- label: 'editor_tracking',
+ expect(trackingSpy).toHaveBeenCalledWith(undefined, 'save_markdown', {
+ label: 'markdown_editor',
+ property: 'Issue_note',
});
});
});
});
- describe('with batch comments', () => {
- beforeEach(() => {
- store.registerModule('batchComments', batchComments());
-
+ describe('resolve checkbox', () => {
+ it('hides resolve checkbox when discussion is not resolvable', () => {
createComponentWrapper({
- isDraft: true,
- noteId: '',
- discussion: { ...discussionMock, for_commit: false },
+ discussion: {
+ ...discussionMock,
+ notes: [
+ ...discussionMock.notes.map((n) => ({
+ ...n,
+ resolvable: false,
+ current_user: { ...n.current_user, can_resolve_discussion: false },
+ })),
+ ],
+ },
});
- });
-
- it('should be possible to cancel', () => {
- findCancelCommentButton().vm.$emit('click');
-
- expect(wrapper.emitted('cancelForm')).toEqual([[true, false]]);
- });
- it('shows resolve checkbox', () => {
- expect(wrapper.findComponent(GlFormCheckbox).exists()).toBe(true);
+ expect(wrapper.findComponent(GlFormCheckbox).exists()).toBe(false);
});
- it('hides resolve checkbox', () => {
+ it('shows resolve checkbox when discussion is resolvable', () => {
createComponentWrapper({
- isDraft: false,
discussion: {
...discussionMock,
notes: [
...discussionMock.notes.map((n) => ({
...n,
resolvable: true,
- current_user: { ...n.current_user, can_resolve_discussion: false },
+ current_user: { ...n.current_user, can_resolve_discussion: true },
})),
],
- for_commit: false,
},
});
- expect(wrapper.findComponent(GlFormCheckbox).exists()).toBe(false);
+ expect(wrapper.findComponent(GlFormCheckbox).exists()).toBe(true);
+ });
+ });
+
+ describe('with batch comments', () => {
+ beforeEach(() => {
+ store.registerModule('batchComments', batchComments());
+
+ createComponentWrapper({
+ isDraft: true,
+ noteId: '',
+ discussion: { ...discussionMock, for_commit: false },
+ });
+ });
+
+ it('should be possible to cancel', () => {
+ findCancelCommentButton().vm.$emit('click');
+
+ expect(wrapper.emitted('cancelForm')).toEqual([[true, false]]);
});
it('hides actions for commits', () => {
diff --git a/spec/frontend/notes/components/note_header_spec.js b/spec/frontend/notes/components/note_header_spec.js
index 60ad9e3344a..54f2bcd09ed 100644
--- a/spec/frontend/notes/components/note_header_spec.js
+++ b/spec/frontend/notes/components/note_header_spec.js
@@ -1,4 +1,5 @@
import Vue, { nextTick } from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import NoteHeader from '~/notes/components/note_header.vue';
@@ -22,6 +23,7 @@ describe('NoteHeader component', () => {
const findAuthorName = () => wrapper.findByTestId('author-name');
const findSpinner = () => wrapper.findComponent({ ref: 'spinner' });
const authorUsernameLink = () => wrapper.findComponent({ ref: 'authorUsernameLink' });
+ const findAuthorNameLink = () => wrapper.findComponent({ ref: 'authorNameLink' });
const statusHtml =
'"<span class="user-status-emoji has-tooltip" title="foo bar" data-html="true" data-placement="top"><gl-emoji title="basketball and hoop" data-name="basketball" data-unicode-version="6.0">🏀</gl-emoji></span>"';
@@ -205,7 +207,7 @@ describe('NoteHeader component', () => {
it('proxies `mouseenter` event to author name link', () => {
createComponent({ author });
- const dispatchEvent = jest.spyOn(wrapper.vm.$refs.authorNameLink, 'dispatchEvent');
+ const dispatchEvent = jest.spyOn(findAuthorNameLink().element, 'dispatchEvent');
wrapper.findComponent({ ref: 'authorUsernameLink' }).trigger('mouseenter');
@@ -215,7 +217,7 @@ describe('NoteHeader component', () => {
it('proxies `mouseleave` event to author name link', () => {
createComponent({ author });
- const dispatchEvent = jest.spyOn(wrapper.vm.$refs.authorNameLink, 'dispatchEvent');
+ const dispatchEvent = jest.spyOn(findAuthorNameLink().element, 'dispatchEvent');
wrapper.findComponent({ ref: 'authorUsernameLink' }).trigger('mouseleave');
diff --git a/spec/frontend/notes/components/noteable_discussion_spec.js b/spec/frontend/notes/components/noteable_discussion_spec.js
index 36f89e479e6..1b1b64f34d7 100644
--- a/spec/frontend/notes/components/noteable_discussion_spec.js
+++ b/spec/frontend/notes/components/noteable_discussion_spec.js
@@ -1,5 +1,6 @@
import { mount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import MockAdapter from 'axios-mock-adapter';
import discussionWithTwoUnresolvedNotes from 'test_fixtures/merge_requests/resolved_diff_discussion.json';
diff --git a/spec/frontend/notes/components/noteable_note_spec.js b/spec/frontend/notes/components/noteable_note_spec.js
index 059972df56b..825a856e5fa 100644
--- a/spec/frontend/notes/components/noteable_note_spec.js
+++ b/spec/frontend/notes/components/noteable_note_spec.js
@@ -1,4 +1,5 @@
import Vue, { nextTick } from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import { GlAvatarLink, GlAvatar } from '@gitlab/ui';
import { clone } from 'lodash';
diff --git a/spec/frontend/notes/components/timeline_toggle_spec.js b/spec/frontend/notes/components/timeline_toggle_spec.js
index caa6f95d5da..a8411584c8d 100644
--- a/spec/frontend/notes/components/timeline_toggle_spec.js
+++ b/spec/frontend/notes/components/timeline_toggle_spec.js
@@ -1,6 +1,7 @@
import { GlButton } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import TimelineToggle, {
timelineEnabledTooltip,
diff --git a/spec/frontend/notes/mixins/discussion_navigation_spec.js b/spec/frontend/notes/mixins/discussion_navigation_spec.js
index bef8ed8e659..128fa0979f7 100644
--- a/spec/frontend/notes/mixins/discussion_navigation_spec.js
+++ b/spec/frontend/notes/mixins/discussion_navigation_spec.js
@@ -1,5 +1,6 @@
import { shallowMount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import { setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
import createEventHub from '~/helpers/event_hub_factory';
diff --git a/spec/frontend/notes/mock_data.js b/spec/frontend/notes/mock_data.js
index 94549c4a73b..b291eba61f5 100644
--- a/spec/frontend/notes/mock_data.js
+++ b/spec/frontend/notes/mock_data.js
@@ -15,6 +15,10 @@ export const notesDataMock = {
closePath: '/twitter/flight/issues/9.json?issue%5Bstate_event%5D=close',
reopenPath: '/twitter/flight/issues/9.json?issue%5Bstate_event%5D=reopen',
canAwardEmoji: true,
+ noteableType: 'issue',
+ noteableId: 1,
+ projectId: 2,
+ groupId: null,
};
export const userDataMock = {
diff --git a/spec/frontend/notes/stores/actions_spec.js b/spec/frontend/notes/stores/actions_spec.js
index 50df63d06af..0205f606297 100644
--- a/spec/frontend/notes/stores/actions_spec.js
+++ b/spec/frontend/notes/stores/actions_spec.js
@@ -2,6 +2,7 @@ import AxiosMockAdapter from 'axios-mock-adapter';
import { setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
import testAction from 'helpers/vuex_action_helper';
import { TEST_HOST } from 'spec/test_constants';
+import actionCable from '~/actioncable_consumer';
import Api from '~/api';
import { createAlert } from '~/alert';
import toast from '~/vue_shared/plugins/global_toast';
@@ -44,6 +45,15 @@ jest.mock('~/alert', () => ({
jest.mock('~/vue_shared/plugins/global_toast');
+jest.mock('@rails/actioncable', () => {
+ const mockConsumer = {
+ subscriptions: { create: jest.fn().mockReturnValue({ unsubscribe: jest.fn() }) },
+ };
+ return {
+ createConsumer: jest.fn().mockReturnValue(mockConsumer),
+ };
+});
+
describe('Actions Notes Store', () => {
let commit;
let dispatch;
@@ -251,6 +261,59 @@ describe('Actions Notes Store', () => {
});
});
+ describe('initPolling', () => {
+ afterEach(() => {
+ gon.features = {};
+ });
+
+ it('creates the Action Cable subscription', () => {
+ gon.features = { actionCableNotes: true };
+
+ store.dispatch('setNotesData', notesDataMock);
+ store.dispatch('initPolling');
+
+ expect(actionCable.subscriptions.create).toHaveBeenCalledTimes(1);
+ expect(actionCable.subscriptions.create).toHaveBeenCalledWith(
+ {
+ channel: 'Noteable::NotesChannel',
+ project_id: store.state.notesData.projectId,
+ group_id: store.state.notesData.groupId,
+ noteable_type: store.state.notesData.noteableType,
+ noteable_id: store.state.notesData.noteableId,
+ },
+ expect.any(Object),
+ );
+ });
+ });
+
+ describe('fetchUpdatedNotes', () => {
+ const response = { notes: [], last_fetched_at: '123456' };
+ const successMock = () =>
+ axiosMock.onGet(notesDataMock.notesPath).reply(HTTP_STATUS_OK, response);
+ const failureMock = () =>
+ axiosMock.onGet(notesDataMock.notesPath).reply(HTTP_STATUS_INTERNAL_SERVER_ERROR);
+
+ beforeEach(() => {
+ return store.dispatch('setNotesData', notesDataMock);
+ });
+
+ it('calls the endpoint and stores last fetched state', async () => {
+ successMock();
+
+ await store.dispatch('fetchUpdatedNotes');
+
+ expect(store.state.lastFetchedAt).toBe('123456');
+ });
+
+ it('shows an alert when fetching fails', async () => {
+ failureMock();
+
+ await store.dispatch('fetchUpdatedNotes');
+
+ expect(createAlert).toHaveBeenCalledTimes(1);
+ });
+ });
+
describe('poll', () => {
const pollInterval = 6000;
const pollResponse = { notes: [], last_fetched_at: '123456' };
diff --git a/spec/frontend/notes/stores/mutation_spec.js b/spec/frontend/notes/stores/mutation_spec.js
index 385aee2c1aa..cbbce493251 100644
--- a/spec/frontend/notes/stores/mutation_spec.js
+++ b/spec/frontend/notes/stores/mutation_spec.js
@@ -452,11 +452,36 @@ describe('Notes Store mutations', () => {
discussions: [individualNote],
};
- const transformedNote = { ...individualNote.notes[0], type: DISCUSSION_NOTE };
+ const transformedNote = {
+ ...individualNote.notes[0],
+ type: DISCUSSION_NOTE,
+ resolvable: true,
+ };
mutations.UPDATE_NOTE(state, transformedNote);
expect(state.discussions[0].individual_note).toEqual(false);
+ expect(state.discussions[0].resolvable).toEqual(true);
+ });
+
+ it('copies resolve state to discussion', () => {
+ const state = { discussions: [{ ...discussionMock }] };
+
+ const resolvedNote = {
+ ...discussionMock.notes[0],
+ resolvable: true,
+ resolved: true,
+ resolved_at: '2017-08-02T10:51:58.559Z',
+ resolved_by: discussionMock.notes[0].author,
+ resolved_by_push: false,
+ };
+
+ mutations.UPDATE_NOTE(state, resolvedNote);
+
+ expect(state.discussions[0].resolved).toEqual(resolvedNote.resolved);
+ expect(state.discussions[0].resolved_at).toEqual(resolvedNote.resolved_at);
+ expect(state.discussions[0].resolved_by).toEqual(resolvedNote.resolved_by);
+ expect(state.discussions[0].resolved_by_push).toEqual(resolvedNote.resolved_by_push);
});
});
diff --git a/spec/frontend/notifications/components/notifications_dropdown_spec.js b/spec/frontend/notifications/components/notifications_dropdown_spec.js
index bae9b028cf7..1b6a1d2898d 100644
--- a/spec/frontend/notifications/components/notifications_dropdown_spec.js
+++ b/spec/frontend/notifications/components/notifications_dropdown_spec.js
@@ -200,7 +200,7 @@ describe('NotificationsDropdown', () => {
noFlip: true,
});
- expect(findDropdown().attributes('no-flip')).toBe('true');
+ expect(findDropdown().props('noFlip')).toBe(true);
});
});
diff --git a/spec/frontend/oauth_application/components/oauth_secret_spec.js b/spec/frontend/oauth_application/components/oauth_secret_spec.js
index c38bd066da8..5ad55c1e81b 100644
--- a/spec/frontend/oauth_application/components/oauth_secret_spec.js
+++ b/spec/frontend/oauth_application/components/oauth_secret_spec.js
@@ -47,6 +47,10 @@ describe('OAuthSecret', () => {
it('shows the renew secret button', () => {
expect(findRenewSecretButton().exists()).toBe(true);
});
+
+ it('renders secret in readonly input', () => {
+ expect(findInputCopyToggleVisibility().props('readonly')).toBe(true);
+ });
});
describe('when secret is not provided', () => {
diff --git a/spec/frontend/observability/client_spec.js b/spec/frontend/observability/client_spec.js
index 239d7adf986..10fdc8c33c4 100644
--- a/spec/frontend/observability/client_spec.js
+++ b/spec/frontend/observability/client_spec.js
@@ -9,6 +9,7 @@ describe('buildClient', () => {
let axiosMock;
const tracingUrl = 'https://example.com/tracing';
+ const EXPECTED_ERROR_MESSAGE = 'traces are missing/invalid in the response';
beforeEach(() => {
axiosMock = new MockAdapter(axios);
@@ -24,11 +25,51 @@ describe('buildClient', () => {
axiosMock.restore();
});
+ describe('fetchTrace', () => {
+ it('fetches the trace from the tracing URL', async () => {
+ const mockTraces = [
+ { trace_id: 'trace-1', spans: [{ duration_nano: 1000 }, { duration_nano: 2000 }] },
+ ];
+
+ axiosMock.onGet(tracingUrl).reply(200, {
+ traces: mockTraces,
+ });
+
+ const result = await client.fetchTrace('trace-1');
+
+ expect(axios.get).toHaveBeenCalledTimes(1);
+ expect(axios.get).toHaveBeenCalledWith(tracingUrl, {
+ withCredentials: true,
+ params: { trace_id: 'trace-1' },
+ });
+ expect(result).toEqual({
+ ...mockTraces[0],
+ duration: 1,
+ });
+ });
+
+ it('rejects if trace id is missing', () => {
+ return expect(client.fetchTrace()).rejects.toThrow('traceId is required.');
+ });
+
+ it('rejects if traces are empty', () => {
+ axiosMock.onGet(tracingUrl).reply(200, { traces: [] });
+
+ return expect(client.fetchTrace('trace-1')).rejects.toThrow(EXPECTED_ERROR_MESSAGE);
+ });
+
+ it('rejects if traces are invalid', () => {
+ axiosMock.onGet(tracingUrl).reply(200, { traces: 'invalid' });
+
+ return expect(client.fetchTraces()).rejects.toThrow(EXPECTED_ERROR_MESSAGE);
+ });
+ });
+
describe('fetchTraces', () => {
- it('should fetch traces from the tracing URL', async () => {
+ it('fetches traces from the tracing URL', async () => {
const mockTraces = [
- { id: 1, spans: [{ duration_nano: 1000 }, { duration_nano: 2000 }] },
- { id: 2, spans: [{ duration_nano: 2000 }] },
+ { trace_id: 'trace-1', spans: [{ duration_nano: 1000 }, { duration_nano: 2000 }] },
+ { trace_id: 'trace-2', spans: [{ duration_nano: 2000 }] },
];
axiosMock.onGet(tracingUrl).reply(200, {
@@ -40,27 +81,127 @@ describe('buildClient', () => {
expect(axios.get).toHaveBeenCalledTimes(1);
expect(axios.get).toHaveBeenCalledWith(tracingUrl, {
withCredentials: true,
+ params: new URLSearchParams(),
});
expect(result).toEqual([
- { id: 1, spans: [{ duration_nano: 1000 }, { duration_nano: 2000 }], duration: 3 },
- { id: 2, spans: [{ duration_nano: 2000 }], duration: 2 },
+ {
+ ...mockTraces[0],
+ duration: 1,
+ },
+ {
+ ...mockTraces[1],
+ duration: 2,
+ },
]);
});
it('rejects if traces are missing', () => {
axiosMock.onGet(tracingUrl).reply(200, {});
- return expect(client.fetchTraces()).rejects.toThrow(
- 'traces are missing/invalid in the response',
- );
+ return expect(client.fetchTraces()).rejects.toThrow(EXPECTED_ERROR_MESSAGE);
});
it('rejects if traces are invalid', () => {
axiosMock.onGet(tracingUrl).reply(200, { traces: 'invalid' });
- return expect(client.fetchTraces()).rejects.toThrow(
- 'traces are missing/invalid in the response',
- );
+ return expect(client.fetchTraces()).rejects.toThrow(EXPECTED_ERROR_MESSAGE);
+ });
+
+ describe('query filter', () => {
+ beforeEach(() => {
+ axiosMock.onGet(tracingUrl).reply(200, {
+ traces: [],
+ });
+ });
+
+ const getQueryParam = () => decodeURIComponent(axios.get.mock.calls[0][1].params.toString());
+
+ it('does not set any query param without filters', async () => {
+ await client.fetchTraces();
+
+ expect(getQueryParam()).toBe('');
+ });
+
+ it('converts filter to proper query params', async () => {
+ await client.fetchTraces({
+ durationMs: [
+ { operator: '>', value: '100' },
+ { operator: '<', value: '1000' },
+ ],
+ operation: [
+ { operator: '=', value: 'op' },
+ { operator: '!=', value: 'not-op' },
+ ],
+ serviceName: [
+ { operator: '=', value: 'service' },
+ { operator: '!=', value: 'not-service' },
+ ],
+ period: [{ operator: '=', value: '5m' }],
+ traceId: [
+ { operator: '=', value: 'trace-id' },
+ { operator: '!=', value: 'not-trace-id' },
+ ],
+ });
+ expect(getQueryParam()).toBe(
+ 'gt[duration_nano]=100000&lt[duration_nano]=1000000' +
+ '&operation=op&not[operation]=not-op' +
+ '&service_name=service&not[service_name]=not-service' +
+ '&period=5m' +
+ '&trace_id=trace-id&not[trace_id]=not-trace-id',
+ );
+ });
+
+ it('handles repeated params', async () => {
+ await client.fetchTraces({
+ operation: [
+ { operator: '=', value: 'op' },
+ { operator: '=', value: 'op2' },
+ ],
+ });
+ expect(getQueryParam()).toBe('operation=op&operation=op2');
+ });
+
+ it('ignores unsupported filters', async () => {
+ await client.fetchTraces({
+ unsupportedFilter: [{ operator: '=', value: 'foo' }],
+ });
+
+ expect(getQueryParam()).toBe('');
+ });
+
+ it('ignores empty filters', async () => {
+ await client.fetchTraces({
+ durationMs: null,
+ traceId: undefined,
+ });
+
+ expect(getQueryParam()).toBe('');
+ });
+
+ it('ignores unsupported operators', async () => {
+ await client.fetchTraces({
+ durationMs: [
+ { operator: '*', value: 'foo' },
+ { operator: '=', value: 'foo' },
+ { operator: '!=', value: 'foo' },
+ ],
+ operation: [
+ { operator: '>', value: 'foo' },
+ { operator: '<', value: 'foo' },
+ ],
+ serviceName: [
+ { operator: '>', value: 'foo' },
+ { operator: '<', value: 'foo' },
+ ],
+ period: [{ operator: '!=', value: 'foo' }],
+ traceId: [
+ { operator: '>', value: 'foo' },
+ { operator: '<', value: 'foo' },
+ ],
+ });
+
+ expect(getQueryParam()).toBe('');
+ });
});
});
});
diff --git a/spec/frontend/organizations/groups_and_projects/components/app_spec.js b/spec/frontend/organizations/groups_and_projects/components/app_spec.js
index 24e1a26336c..64182b74e4f 100644
--- a/spec/frontend/organizations/groups_and_projects/components/app_spec.js
+++ b/spec/frontend/organizations/groups_and_projects/components/app_spec.js
@@ -1,99 +1,172 @@
-import VueApollo from 'vue-apollo';
-import Vue from 'vue';
-import { GlLoadingIcon } from '@gitlab/ui';
+import { GlCollapsibleListbox, GlSorting, GlSortingItem } from '@gitlab/ui';
import App from '~/organizations/groups_and_projects/components/app.vue';
-import resolvers from '~/organizations/groups_and_projects/graphql/resolvers';
-import ProjectsList from '~/vue_shared/components/projects_list/projects_list.vue';
-import { getIdFromGraphQLId } from '~/graphql_shared/utils';
-import { createAlert } from '~/alert';
+import GroupsPage from '~/organizations/groups_and_projects/components/groups_page.vue';
+import ProjectsPage from '~/organizations/groups_and_projects/components/projects_page.vue';
+import {
+ DISPLAY_QUERY_GROUPS,
+ DISPLAY_QUERY_PROJECTS,
+ SORT_ITEM_CREATED,
+ SORT_DIRECTION_DESC,
+} from '~/organizations/groups_and_projects/constants';
+import FilteredSearchBar from '~/vue_shared/components/filtered_search_bar/filtered_search_bar_root.vue';
+import {
+ FILTERED_SEARCH_TERM,
+ TOKEN_EMPTY_SEARCH_TERM,
+} from '~/vue_shared/components/filtered_search_bar/constants';
+import { createRouter } from '~/organizations/groups_and_projects';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
-import createMockApollo from 'helpers/mock_apollo_helper';
-import waitForPromises from 'helpers/wait_for_promises';
-import { organizationProjects } from './mock_data';
-
-jest.mock('~/alert');
-
-Vue.use(VueApollo);
-jest.useFakeTimers();
describe('GroupsAndProjectsApp', () => {
+ const router = createRouter();
+ const routerMock = {
+ push: jest.fn(),
+ };
let wrapper;
- let mockApollo;
- const createComponent = ({ mockResolvers = resolvers } = {}) => {
- mockApollo = createMockApollo([], mockResolvers);
-
- wrapper = shallowMountExtended(App, { apolloProvider: mockApollo });
+ const createComponent = ({ routeQuery = { search: 'foo' } } = {}) => {
+ wrapper = shallowMountExtended(App, {
+ router,
+ mocks: { $route: { path: '/', query: routeQuery }, $router: routerMock },
+ });
};
- afterEach(() => {
- mockApollo = null;
+ const findFilteredSearchBar = () => wrapper.findComponent(FilteredSearchBar);
+ const findListbox = () => wrapper.findComponent(GlCollapsibleListbox);
+ const findSort = () => wrapper.findComponent(GlSorting);
+
+ describe.each`
+ display | expectedComponent | expectedDisplayListboxSelectedProp
+ ${null} | ${GroupsPage} | ${DISPLAY_QUERY_GROUPS}
+ ${'unsupported_value'} | ${GroupsPage} | ${DISPLAY_QUERY_GROUPS}
+ ${DISPLAY_QUERY_GROUPS} | ${GroupsPage} | ${DISPLAY_QUERY_GROUPS}
+ ${DISPLAY_QUERY_PROJECTS} | ${ProjectsPage} | ${DISPLAY_QUERY_PROJECTS}
+ `(
+ 'when `display` query string is $display',
+ ({ display, expectedComponent, expectedDisplayListboxSelectedProp }) => {
+ beforeEach(() => {
+ createComponent({ routeQuery: { display } });
+ });
+
+ it('renders expected component', () => {
+ expect(wrapper.findComponent(expectedComponent).exists()).toBe(true);
+ });
+
+ it('renders display listbox with correct props', () => {
+ expect(findListbox().props()).toMatchObject({
+ selected: expectedDisplayListboxSelectedProp,
+ items: App.displayListboxItems,
+ headerText: App.i18n.displayListboxHeaderText,
+ });
+ });
+ },
+ );
+
+ it('renders filtered search bar with correct props', () => {
+ createComponent();
+
+ expect(findFilteredSearchBar().props()).toMatchObject({
+ namespace: App.filteredSearch.namespace,
+ tokens: App.filteredSearch.tokens,
+ initialFilterValue: [
+ {
+ type: FILTERED_SEARCH_TERM,
+ value: {
+ data: 'foo',
+ operator: undefined,
+ },
+ },
+ ],
+ syncFilterAndSort: true,
+ recentSearchesStorageKey: App.filteredSearch.recentSearchesStorageKey,
+ searchInputPlaceholder: App.i18n.searchInputPlaceholder,
+ });
+ });
+
+ it('renders sort dropdown with sort items and correct props', () => {
+ createComponent();
+
+ const sortItems = wrapper.findAllComponents(GlSortingItem).wrappers.map((sortItemWrapper) => ({
+ active: sortItemWrapper.attributes('active'),
+ text: sortItemWrapper.text(),
+ }));
+
+ expect(findSort().props()).toMatchObject({
+ isAscending: true,
+ text: SORT_ITEM_CREATED.text,
+ });
+ expect(sortItems).toEqual([
+ {
+ active: 'true',
+ text: SORT_ITEM_CREATED.text,
+ },
+ ]);
});
- describe('when API call is loading', () => {
+ describe('when filtered search bar is submitted', () => {
+ const searchTerm = 'foo bar';
+
beforeEach(() => {
- const mockResolvers = {
- Query: {
- organization: jest.fn().mockReturnValueOnce(new Promise(() => {})),
- },
- };
+ createComponent();
- createComponent({ mockResolvers });
+ findFilteredSearchBar().vm.$emit('onFilter', [
+ { id: 'token-0', type: FILTERED_SEARCH_TERM, value: { data: searchTerm } },
+ ]);
});
- it('renders loading icon', () => {
- expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(true);
+ it('updates `search` query string', () => {
+ expect(routerMock.push).toHaveBeenCalledWith({ query: { search: searchTerm } });
});
});
- describe('when API call is successful', () => {
+ describe('when display listbox is changed', () => {
beforeEach(() => {
createComponent();
+
+ findListbox().vm.$emit('select', DISPLAY_QUERY_PROJECTS);
});
- it('renders `ProjectsList` component and passes correct props', async () => {
- jest.runAllTimers();
- await waitForPromises();
-
- expect(wrapper.findComponent(ProjectsList).props()).toEqual({
- projects: organizationProjects.projects.nodes.map(
- ({ id, nameWithNamespace, accessLevel, ...project }) => ({
- ...project,
- id: getIdFromGraphQLId(id),
- name: nameWithNamespace,
- permissions: {
- projectAccess: {
- accessLevel: accessLevel.integerValue,
- },
- },
- }),
- ),
- showProjectIcon: true,
- });
+ it('updates `display` query string', () => {
+ expect(routerMock.push).toHaveBeenCalledWith({ query: { display: DISPLAY_QUERY_PROJECTS } });
});
});
- describe('when API call is not successful', () => {
- const error = new Error();
-
+ describe('when sort item is changed', () => {
beforeEach(() => {
- const mockResolvers = {
- Query: {
- organization: jest.fn().mockRejectedValueOnce(error),
- },
- };
+ createComponent();
- createComponent({ mockResolvers });
+ wrapper.findComponent(GlSortingItem).trigger('click', SORT_ITEM_CREATED);
});
- it('displays error alert', async () => {
- await waitForPromises();
+ it('updates `sort_name` query string', () => {
+ expect(routerMock.push).toHaveBeenCalledWith({
+ query: { sort_name: SORT_ITEM_CREATED.name, search: 'foo' },
+ });
+ });
+ });
- expect(createAlert).toHaveBeenCalledWith({
- message: App.i18n.errorMessage,
- error,
- captureError: true,
+ describe('when sort direction is changed', () => {
+ beforeEach(() => {
+ createComponent();
+
+ findSort().vm.$emit('sortDirectionChange', false);
+ });
+
+ it('updates `sort_direction` query string', () => {
+ expect(routerMock.push).toHaveBeenCalledWith({
+ query: { sort_direction: SORT_DIRECTION_DESC, search: 'foo' },
});
});
});
+
+ describe('when `search` query string is not set', () => {
+ beforeEach(() => {
+ createComponent({ routeQuery: {} });
+ });
+
+ it('passes empty search term token to filtered search', () => {
+ expect(findFilteredSearchBar().props('initialFilterValue')).toEqual([
+ TOKEN_EMPTY_SEARCH_TERM,
+ ]);
+ });
+ });
});
diff --git a/spec/frontend/organizations/groups_and_projects/components/groups_page_spec.js b/spec/frontend/organizations/groups_and_projects/components/groups_page_spec.js
new file mode 100644
index 00000000000..537f8114fcf
--- /dev/null
+++ b/spec/frontend/organizations/groups_and_projects/components/groups_page_spec.js
@@ -0,0 +1,88 @@
+import VueApollo from 'vue-apollo';
+import Vue from 'vue';
+import { GlLoadingIcon } from '@gitlab/ui';
+import GroupsPage from '~/organizations/groups_and_projects/components/groups_page.vue';
+import { formatGroups } from '~/organizations/groups_and_projects/utils';
+import resolvers from '~/organizations/groups_and_projects/graphql/resolvers';
+import GroupsList from '~/vue_shared/components/groups_list/groups_list.vue';
+import { createAlert } from '~/alert';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import { organizationGroups } from '../mock_data';
+
+jest.mock('~/alert');
+
+Vue.use(VueApollo);
+jest.useFakeTimers();
+
+describe('GroupsPage', () => {
+ let wrapper;
+ let mockApollo;
+
+ const createComponent = ({ mockResolvers = resolvers } = {}) => {
+ mockApollo = createMockApollo([], mockResolvers);
+
+ wrapper = shallowMountExtended(GroupsPage, { apolloProvider: mockApollo });
+ };
+
+ afterEach(() => {
+ mockApollo = null;
+ });
+
+ describe('when API call is loading', () => {
+ beforeEach(() => {
+ const mockResolvers = {
+ Query: {
+ organization: jest.fn().mockReturnValueOnce(new Promise(() => {})),
+ },
+ };
+
+ createComponent({ mockResolvers });
+ });
+
+ it('renders loading icon', () => {
+ expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(true);
+ });
+ });
+
+ describe('when API call is successful', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('renders `GroupsList` component and passes correct props', async () => {
+ jest.runAllTimers();
+ await waitForPromises();
+
+ expect(wrapper.findComponent(GroupsList).props()).toEqual({
+ groups: formatGroups(organizationGroups.nodes),
+ showGroupIcon: true,
+ });
+ });
+ });
+
+ describe('when API call is not successful', () => {
+ const error = new Error();
+
+ beforeEach(() => {
+ const mockResolvers = {
+ Query: {
+ organization: jest.fn().mockRejectedValueOnce(error),
+ },
+ };
+
+ createComponent({ mockResolvers });
+ });
+
+ it('displays error alert', async () => {
+ await waitForPromises();
+
+ expect(createAlert).toHaveBeenCalledWith({
+ message: GroupsPage.i18n.errorMessage,
+ error,
+ captureError: true,
+ });
+ });
+ });
+});
diff --git a/spec/frontend/organizations/groups_and_projects/components/mock_data.js b/spec/frontend/organizations/groups_and_projects/components/mock_data.js
deleted file mode 100644
index c3276450745..00000000000
--- a/spec/frontend/organizations/groups_and_projects/components/mock_data.js
+++ /dev/null
@@ -1,98 +0,0 @@
-export const organizationProjects = {
- id: 'gid://gitlab/Organization/1',
- __typename: 'Organization',
- projects: {
- nodes: [
- {
- id: 'gid://gitlab/Project/8',
- nameWithNamespace: 'Twitter / Typeahead.Js',
- webUrl: 'http://127.0.0.1:3000/twitter/Typeahead.Js',
- topics: ['JavaScript', 'Vue.js'],
- forksCount: 4,
- avatarUrl: null,
- starCount: 0,
- visibility: 'public',
- openIssuesCount: 48,
- descriptionHtml:
- '<p data-sourcepos="1:1-1:59" dir="auto">Optio et reprehenderit enim doloremque deserunt et commodi.</p>',
- issuesAccessLevel: 'enabled',
- forkingAccessLevel: 'enabled',
- accessLevel: {
- integerValue: 30,
- },
- },
- {
- id: 'gid://gitlab/Project/7',
- nameWithNamespace: 'Flightjs / Flight',
- webUrl: 'http://127.0.0.1:3000/flightjs/Flight',
- topics: [],
- forksCount: 0,
- avatarUrl: null,
- starCount: 0,
- visibility: 'private',
- openIssuesCount: 37,
- descriptionHtml:
- '<p data-sourcepos="1:1-1:49" dir="auto">Dolor dicta rerum et ut eius voluptate earum qui.</p>',
- issuesAccessLevel: 'enabled',
- forkingAccessLevel: 'enabled',
- accessLevel: {
- integerValue: 20,
- },
- },
- {
- id: 'gid://gitlab/Project/6',
- nameWithNamespace: 'Jashkenas / Underscore',
- webUrl: 'http://127.0.0.1:3000/jashkenas/Underscore',
- topics: [],
- forksCount: 0,
- avatarUrl: null,
- starCount: 0,
- visibility: 'private',
- openIssuesCount: 34,
- descriptionHtml:
- '<p data-sourcepos="1:1-1:52" dir="auto">Incidunt est aliquam autem nihil eveniet quis autem.</p>',
- issuesAccessLevel: 'enabled',
- forkingAccessLevel: 'enabled',
- accessLevel: {
- integerValue: 40,
- },
- },
- {
- id: 'gid://gitlab/Project/5',
- nameWithNamespace: 'Commit451 / Lab Coat',
- webUrl: 'http://127.0.0.1:3000/Commit451/lab-coat',
- topics: [],
- forksCount: 0,
- avatarUrl: null,
- starCount: 0,
- visibility: 'internal',
- openIssuesCount: 49,
- descriptionHtml:
- '<p data-sourcepos="1:1-1:34" dir="auto">Sint eos dolorem impedit rerum et.</p>',
- issuesAccessLevel: 'enabled',
- forkingAccessLevel: 'enabled',
- accessLevel: {
- integerValue: 10,
- },
- },
- {
- id: 'gid://gitlab/Project/1',
- nameWithNamespace: 'Toolbox / Gitlab Smoke Tests',
- webUrl: 'http://127.0.0.1:3000/toolbox/gitlab-smoke-tests',
- topics: [],
- forksCount: 0,
- avatarUrl: null,
- starCount: 0,
- visibility: 'internal',
- openIssuesCount: 34,
- descriptionHtml:
- '<p data-sourcepos="1:1-1:40" dir="auto">Veritatis error laboriosam libero autem.</p>',
- issuesAccessLevel: 'enabled',
- forkingAccessLevel: 'enabled',
- accessLevel: {
- integerValue: 30,
- },
- },
- ],
- },
-};
diff --git a/spec/frontend/organizations/groups_and_projects/components/projects_page_spec.js b/spec/frontend/organizations/groups_and_projects/components/projects_page_spec.js
new file mode 100644
index 00000000000..7cadcab5021
--- /dev/null
+++ b/spec/frontend/organizations/groups_and_projects/components/projects_page_spec.js
@@ -0,0 +1,88 @@
+import VueApollo from 'vue-apollo';
+import Vue from 'vue';
+import { GlLoadingIcon } from '@gitlab/ui';
+import ProjectsPage from '~/organizations/groups_and_projects/components/projects_page.vue';
+import { formatProjects } from '~/organizations/groups_and_projects/utils';
+import resolvers from '~/organizations/groups_and_projects/graphql/resolvers';
+import ProjectsList from '~/vue_shared/components/projects_list/projects_list.vue';
+import { createAlert } from '~/alert';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import { organizationProjects } from '../mock_data';
+
+jest.mock('~/alert');
+
+Vue.use(VueApollo);
+jest.useFakeTimers();
+
+describe('ProjectsPage', () => {
+ let wrapper;
+ let mockApollo;
+
+ const createComponent = ({ mockResolvers = resolvers } = {}) => {
+ mockApollo = createMockApollo([], mockResolvers);
+
+ wrapper = shallowMountExtended(ProjectsPage, { apolloProvider: mockApollo });
+ };
+
+ afterEach(() => {
+ mockApollo = null;
+ });
+
+ describe('when API call is loading', () => {
+ beforeEach(() => {
+ const mockResolvers = {
+ Query: {
+ organization: jest.fn().mockReturnValueOnce(new Promise(() => {})),
+ },
+ };
+
+ createComponent({ mockResolvers });
+ });
+
+ it('renders loading icon', () => {
+ expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(true);
+ });
+ });
+
+ describe('when API call is successful', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('renders `ProjectsList` component and passes correct props', async () => {
+ jest.runAllTimers();
+ await waitForPromises();
+
+ expect(wrapper.findComponent(ProjectsList).props()).toEqual({
+ projects: formatProjects(organizationProjects.nodes),
+ showProjectIcon: true,
+ });
+ });
+ });
+
+ describe('when API call is not successful', () => {
+ const error = new Error();
+
+ beforeEach(() => {
+ const mockResolvers = {
+ Query: {
+ organization: jest.fn().mockRejectedValueOnce(error),
+ },
+ };
+
+ createComponent({ mockResolvers });
+ });
+
+ it('displays error alert', async () => {
+ await waitForPromises();
+
+ expect(createAlert).toHaveBeenCalledWith({
+ message: ProjectsPage.i18n.errorMessage,
+ error,
+ captureError: true,
+ });
+ });
+ });
+});
diff --git a/spec/frontend/organizations/groups_and_projects/mock_data.js b/spec/frontend/organizations/groups_and_projects/mock_data.js
new file mode 100644
index 00000000000..eb829a24f50
--- /dev/null
+++ b/spec/frontend/organizations/groups_and_projects/mock_data.js
@@ -0,0 +1,252 @@
+export const organization = {
+ id: 'gid://gitlab/Organization/1',
+ __typename: 'Organization',
+};
+
+export const organizationProjects = {
+ nodes: [
+ {
+ id: 'gid://gitlab/Project/8',
+ nameWithNamespace: 'Twitter / Typeahead.Js',
+ webUrl: 'http://127.0.0.1:3000/twitter/Typeahead.Js',
+ topics: ['JavaScript', 'Vue.js'],
+ forksCount: 4,
+ avatarUrl: null,
+ starCount: 0,
+ visibility: 'public',
+ openIssuesCount: 48,
+ descriptionHtml:
+ '<p data-sourcepos="1:1-1:59" dir="auto">Optio et reprehenderit enim doloremque deserunt et commodi.</p>',
+ issuesAccessLevel: 'enabled',
+ forkingAccessLevel: 'enabled',
+ isForked: true,
+ accessLevel: {
+ integerValue: 30,
+ },
+ },
+ {
+ id: 'gid://gitlab/Project/7',
+ nameWithNamespace: 'Flightjs / Flight',
+ webUrl: 'http://127.0.0.1:3000/flightjs/Flight',
+ topics: [],
+ forksCount: 0,
+ avatarUrl: null,
+ starCount: 0,
+ visibility: 'private',
+ openIssuesCount: 37,
+ descriptionHtml:
+ '<p data-sourcepos="1:1-1:49" dir="auto">Dolor dicta rerum et ut eius voluptate earum qui.</p>',
+ issuesAccessLevel: 'enabled',
+ forkingAccessLevel: 'enabled',
+ isForked: false,
+ accessLevel: {
+ integerValue: 20,
+ },
+ },
+ {
+ id: 'gid://gitlab/Project/6',
+ nameWithNamespace: 'Jashkenas / Underscore',
+ webUrl: 'http://127.0.0.1:3000/jashkenas/Underscore',
+ topics: [],
+ forksCount: 0,
+ avatarUrl: null,
+ starCount: 0,
+ visibility: 'private',
+ openIssuesCount: 34,
+ descriptionHtml:
+ '<p data-sourcepos="1:1-1:52" dir="auto">Incidunt est aliquam autem nihil eveniet quis autem.</p>',
+ issuesAccessLevel: 'enabled',
+ forkingAccessLevel: 'enabled',
+ isForked: false,
+ accessLevel: {
+ integerValue: 40,
+ },
+ },
+ {
+ id: 'gid://gitlab/Project/5',
+ nameWithNamespace: 'Commit451 / Lab Coat',
+ webUrl: 'http://127.0.0.1:3000/Commit451/lab-coat',
+ topics: [],
+ forksCount: 0,
+ avatarUrl: null,
+ starCount: 0,
+ visibility: 'internal',
+ openIssuesCount: 49,
+ descriptionHtml:
+ '<p data-sourcepos="1:1-1:34" dir="auto">Sint eos dolorem impedit rerum et.</p>',
+ issuesAccessLevel: 'enabled',
+ forkingAccessLevel: 'enabled',
+ isForked: false,
+ accessLevel: {
+ integerValue: 10,
+ },
+ },
+ {
+ id: 'gid://gitlab/Project/1',
+ nameWithNamespace: 'Toolbox / Gitlab Smoke Tests',
+ webUrl: 'http://127.0.0.1:3000/toolbox/gitlab-smoke-tests',
+ topics: [],
+ forksCount: 0,
+ avatarUrl: null,
+ starCount: 0,
+ visibility: 'internal',
+ openIssuesCount: 34,
+ descriptionHtml:
+ '<p data-sourcepos="1:1-1:40" dir="auto">Veritatis error laboriosam libero autem.</p>',
+ issuesAccessLevel: 'enabled',
+ forkingAccessLevel: 'enabled',
+ isForked: false,
+ accessLevel: {
+ integerValue: 30,
+ },
+ },
+ ],
+};
+
+export const organizationGroups = {
+ nodes: [
+ {
+ id: 'gid://gitlab/Group/29',
+ fullName: 'Commit451',
+ parent: null,
+ webUrl: 'http://127.0.0.1:3000/groups/Commit451',
+ descriptionHtml:
+ '<p data-sourcepos="1:1-1:52" dir="auto">Autem praesentium vel ut ratione itaque ullam culpa.</p>',
+ avatarUrl: null,
+ descendantGroupsCount: 0,
+ projectsCount: 3,
+ groupMembersCount: 2,
+ visibility: 'public',
+ accessLevel: {
+ integerValue: 30,
+ },
+ },
+ {
+ id: 'gid://gitlab/Group/33',
+ fullName: 'Flightjs',
+ parent: null,
+ webUrl: 'http://127.0.0.1:3000/groups/flightjs',
+ descriptionHtml:
+ '<p data-sourcepos="1:1-1:60" dir="auto">Ipsa reiciendis deleniti officiis illum nostrum quo aliquam.</p>',
+ avatarUrl: null,
+ descendantGroupsCount: 4,
+ projectsCount: 3,
+ groupMembersCount: 1,
+ visibility: 'private',
+ accessLevel: {
+ integerValue: 20,
+ },
+ },
+ {
+ id: 'gid://gitlab/Group/24',
+ fullName: 'Gitlab Org',
+ parent: null,
+ webUrl: 'http://127.0.0.1:3000/groups/gitlab-org',
+ descriptionHtml:
+ '<p data-sourcepos="1:1-1:64" dir="auto">Dolorem dolorem omnis impedit cupiditate pariatur officia velit.</p>',
+ avatarUrl: null,
+ descendantGroupsCount: 1,
+ projectsCount: 1,
+ groupMembersCount: 2,
+ visibility: 'internal',
+ accessLevel: {
+ integerValue: 10,
+ },
+ },
+ {
+ id: 'gid://gitlab/Group/27',
+ fullName: 'Gnuwget',
+ parent: null,
+ webUrl: 'http://127.0.0.1:3000/groups/gnuwgetf',
+ descriptionHtml:
+ '<p data-sourcepos="1:1-1:47" dir="auto">Culpa soluta aut eius dolores est vel sapiente.</p>',
+ avatarUrl: null,
+ descendantGroupsCount: 4,
+ projectsCount: 2,
+ groupMembersCount: 3,
+ visibility: 'public',
+ accessLevel: {
+ integerValue: 40,
+ },
+ },
+ {
+ id: 'gid://gitlab/Group/31',
+ fullName: 'Jashkenas',
+ parent: null,
+ webUrl: 'http://127.0.0.1:3000/groups/jashkenas',
+ descriptionHtml: '<p data-sourcepos="1:1-1:25" dir="auto">Ut ut id aliquid nostrum.</p>',
+ avatarUrl: null,
+ descendantGroupsCount: 3,
+ projectsCount: 3,
+ groupMembersCount: 10,
+ visibility: 'private',
+ accessLevel: {
+ integerValue: 10,
+ },
+ },
+ {
+ id: 'gid://gitlab/Group/22',
+ fullName: 'Toolbox',
+ parent: null,
+ webUrl: 'http://127.0.0.1:3000/groups/toolbox',
+ descriptionHtml:
+ '<p data-sourcepos="1:1-1:46" dir="auto">Quo voluptatem magnam facere voluptates alias.</p>',
+ avatarUrl: null,
+ descendantGroupsCount: 2,
+ projectsCount: 3,
+ groupMembersCount: 40,
+ visibility: 'internal',
+ accessLevel: {
+ integerValue: 30,
+ },
+ },
+ {
+ id: 'gid://gitlab/Group/35',
+ fullName: 'Twitter',
+ parent: null,
+ webUrl: 'http://127.0.0.1:3000/groups/twitter',
+ descriptionHtml:
+ '<p data-sourcepos="1:1-1:40" dir="auto">Quae nulla consequatur assumenda id quo.</p>',
+ avatarUrl: null,
+ descendantGroupsCount: 20,
+ projectsCount: 30,
+ groupMembersCount: 100,
+ visibility: 'public',
+ accessLevel: {
+ integerValue: 40,
+ },
+ },
+ {
+ id: 'gid://gitlab/Group/73',
+ fullName: 'test',
+ parent: null,
+ webUrl: 'http://127.0.0.1:3000/groups/test',
+ descriptionHtml: '',
+ avatarUrl: null,
+ descendantGroupsCount: 1,
+ projectsCount: 1,
+ groupMembersCount: 1,
+ visibility: 'private',
+ accessLevel: {
+ integerValue: 30,
+ },
+ },
+ {
+ id: 'gid://gitlab/Group/74',
+ fullName: 'Twitter / test subgroup',
+ parent: {
+ id: 'gid://gitlab/Group/35',
+ },
+ webUrl: 'http://127.0.0.1:3000/groups/twitter/test-subgroup',
+ descriptionHtml: '',
+ avatarUrl: null,
+ descendantGroupsCount: 4,
+ projectsCount: 4,
+ groupMembersCount: 4,
+ visibility: 'internal',
+ accessLevel: {
+ integerValue: 20,
+ },
+ },
+ ],
+};
diff --git a/spec/frontend/organizations/groups_and_projects/utils_spec.js b/spec/frontend/organizations/groups_and_projects/utils_spec.js
new file mode 100644
index 00000000000..2cb1ee02061
--- /dev/null
+++ b/spec/frontend/organizations/groups_and_projects/utils_spec.js
@@ -0,0 +1,35 @@
+import { formatProjects, formatGroups } from '~/organizations/groups_and_projects/utils';
+import { ACTION_EDIT, ACTION_DELETE } from '~/vue_shared/components/projects_list/constants';
+import { getIdFromGraphQLId } from '~/graphql_shared/utils';
+import { organizationProjects, organizationGroups } from './mock_data';
+
+describe('formatProjects', () => {
+ it('correctly formats the projects', () => {
+ const [firstMockProject] = organizationProjects.nodes;
+ const formattedProjects = formatProjects(organizationProjects.nodes);
+ const [firstFormattedProject] = formattedProjects;
+
+ expect(firstFormattedProject).toMatchObject({
+ id: getIdFromGraphQLId(firstMockProject.id),
+ name: firstMockProject.nameWithNamespace,
+ permissions: {
+ projectAccess: {
+ accessLevel: firstMockProject.accessLevel.integerValue,
+ },
+ },
+ actions: [ACTION_EDIT, ACTION_DELETE],
+ });
+ expect(formattedProjects.length).toBe(organizationProjects.nodes.length);
+ });
+});
+
+describe('formatGroups', () => {
+ it('correctly formats the groups', () => {
+ const [firstMockGroup] = organizationGroups.nodes;
+ const formattedGroups = formatGroups(organizationGroups.nodes);
+ const [firstFormattedGroup] = formattedGroups;
+
+ expect(firstFormattedGroup.id).toBe(getIdFromGraphQLId(firstMockGroup.id));
+ expect(formattedGroups.length).toBe(organizationGroups.nodes.length);
+ });
+});
diff --git a/spec/frontend/packages_and_registries/container_registry/explorer/components/details_page/details_header_spec.js b/spec/frontend/packages_and_registries/container_registry/explorer/components/details_page/details_header_spec.js
index 01089422376..500fb0d7598 100644
--- a/spec/frontend/packages_and_registries/container_registry/explorer/components/details_page/details_header_spec.js
+++ b/spec/frontend/packages_and_registries/container_registry/explorer/components/details_page/details_header_spec.js
@@ -1,6 +1,6 @@
-import { GlDropdownItem, GlIcon, GlDropdown } from '@gitlab/ui';
+import { GlDisclosureDropdown, GlDisclosureDropdownItem, GlIcon } from '@gitlab/ui';
import VueApollo from 'vue-apollo';
-import Vue, { nextTick } from 'vue';
+import Vue from 'vue';
import { numberToHumanSize } from '~/lib/utils/number_utils';
import { useFakeDate } from 'helpers/fake_date';
import createMockApollo from 'helpers/mock_apollo_helper';
@@ -21,7 +21,6 @@ import {
ROOT_IMAGE_TOOLTIP,
} from '~/packages_and_registries/container_registry/explorer/constants';
import getContainerRepositoryMetadata from '~/packages_and_registries/container_registry/explorer/graphql/queries/get_container_repository_metadata.query.graphql';
-import TitleArea from '~/vue_shared/components/registry/title_area.vue';
import { containerRepositoryMock, imageTagsCountMock } from '../../mock_data';
describe('Details Header', () => {
@@ -39,17 +38,11 @@ describe('Details Header', () => {
const findTitle = () => wrapper.findByTestId('title');
const findTagsCount = () => wrapper.findByTestId('tags-count');
const findCleanup = () => wrapper.findByTestId('cleanup');
- const findDeleteButton = () => wrapper.findComponent(GlDropdownItem);
+ const findDeleteButton = () => wrapper.findComponent(GlDisclosureDropdownItem);
const findInfoIcon = () => wrapper.findComponent(GlIcon);
- const findMenu = () => wrapper.findComponent(GlDropdown);
+ const findMenu = () => wrapper.findComponent(GlDisclosureDropdown);
const findSize = () => wrapper.findByTestId('image-size');
- const waitForMetadataItems = async () => {
- // Metadata items are printed by a loop in the title-area and it takes two ticks for them to be available
- await nextTick();
- await nextTick();
- };
-
const mountComponent = ({
propsData = { image: defaultImage },
resolver = jest.fn().mockResolvedValue(imageTagsCountMock()),
@@ -65,11 +58,6 @@ describe('Details Header', () => {
directives: {
GlTooltip: createMockDirective('gl-tooltip'),
},
- stubs: {
- TitleArea,
- GlDropdown,
- GlDropdownItem,
- },
});
};
@@ -134,46 +122,45 @@ describe('Details Header', () => {
},
);
- describe('delete button', () => {
- it('exists', () => {
- mountComponent();
+ it('has the correct props', () => {
+ mountComponent();
- expect(findDeleteButton().exists()).toBe(true);
+ expect(findMenu().props()).toMatchObject({
+ category: 'tertiary',
+ icon: 'ellipsis_v',
+ placement: 'right',
+ textSrOnly: true,
+ noCaret: true,
+ toggleText: 'More actions',
});
+ });
- it('has the correct text', () => {
+ describe('delete item', () => {
+ beforeEach(() => {
mountComponent();
-
- expect(findDeleteButton().text()).toBe('Delete image repository');
});
- it('has the correct props', () => {
- mountComponent();
+ it('exists', () => {
+ expect(findDeleteButton().exists()).toBe(true);
+ });
- expect(findDeleteButton().attributes()).toMatchObject(
- expect.objectContaining({
- variant: 'danger',
- }),
- );
+ it('has the correct text', () => {
+ expect(findDeleteButton().text()).toBe('Delete image repository');
});
it('emits the correct event', () => {
- mountComponent();
+ findDeleteButton().vm.$emit('action');
- findDeleteButton().vm.$emit('click');
-
- expect(wrapper.emitted('delete')).toEqual([[]]);
+ expect(wrapper.emitted('delete')).toHaveLength(1);
});
});
});
describe('metadata items', () => {
describe('tags count', () => {
- it('displays "-- tags" while loading', async () => {
+ it('displays "-- tags" while loading', () => {
mountComponent();
- await waitForMetadataItems();
-
expect(findTagsCount().props('text')).toBe('-- tags');
});
@@ -181,7 +168,6 @@ describe('Details Header', () => {
mountComponent();
await waitForPromises();
- await waitForMetadataItems();
expect(findTagsCount().props('text')).toBe('13 tags');
});
@@ -192,23 +178,20 @@ describe('Details Header', () => {
});
await waitForPromises();
- await waitForMetadataItems();
expect(findTagsCount().props('text')).toBe('1 tag');
});
- it('has the correct icon', async () => {
+ it('has the correct icon', () => {
mountComponent();
- await waitForMetadataItems();
expect(findTagsCount().props('icon')).toBe('tag');
});
});
describe('size metadata item', () => {
- it('when size is not returned, it hides the item', async () => {
+ it('when size is not returned, it hides the item', () => {
mountComponent();
- await waitForMetadataItems();
expect(findSize().exists()).toBe(false);
});
@@ -220,7 +203,6 @@ describe('Details Header', () => {
});
await waitForPromises();
- await waitForMetadataItems();
expect(findSize().props()).toMatchObject({
icon: 'disk',
@@ -230,18 +212,11 @@ describe('Details Header', () => {
});
describe('cleanup metadata item', () => {
- it('has the correct icon', async () => {
- mountComponent();
- await waitForMetadataItems();
-
- expect(findCleanup().props('icon')).toBe('expire');
- });
-
- it('when cleanup is not scheduled', async () => {
+ it('when cleanup is not scheduled has the right icon and props', () => {
mountComponent();
- await waitForMetadataItems();
expect(findCleanup().props()).toMatchObject({
+ icon: 'expire',
text: CLEANUP_DISABLED_TEXT,
textTooltip: CLEANUP_DISABLED_TOOLTIP,
});
@@ -255,7 +230,7 @@ describe('Details Header', () => {
${UNFINISHED_STATUS} | ${'Cleanup incomplete'} | ${CLEANUP_UNFINISHED_TOOLTIP}
`(
'when the status is $status the text is $text and the tooltip is $tooltip',
- async ({ status, text, tooltip }) => {
+ ({ status, text, tooltip }) => {
mountComponent({
propsData: {
image: {
@@ -267,7 +242,6 @@ describe('Details Header', () => {
},
},
});
- await waitForMetadataItems();
expect(findCleanup().props()).toMatchObject({
text,
@@ -278,25 +252,22 @@ describe('Details Header', () => {
});
describe('visibility and created at', () => {
- it('has created text', async () => {
+ it('has created text', () => {
mountComponent();
- await waitForMetadataItems();
expect(findCreatedAndVisibility().props('text')).toBe('Created Nov 3, 2020 13:29');
});
describe('visibility icon', () => {
- it('shows an eye when the project is public', async () => {
+ it('shows an eye when the project is public', () => {
mountComponent();
- await waitForMetadataItems();
expect(findCreatedAndVisibility().props('icon')).toBe('eye');
});
- it('shows an eye slashed when the project is not public', async () => {
+ it('shows an eye slashed when the project is not public', () => {
mountComponent({
propsData: { image: { ...defaultImage, project: { visibility: 'private' } } },
});
- await waitForMetadataItems();
expect(findCreatedAndVisibility().props('icon')).toBe('eye-slash');
});
diff --git a/spec/frontend/packages_and_registries/container_registry/explorer/components/details_page/tags_list_row_spec.js b/spec/frontend/packages_and_registries/container_registry/explorer/components/details_page/tags_list_row_spec.js
index d4b69d3e8e8..09e2c35d449 100644
--- a/spec/frontend/packages_and_registries/container_registry/explorer/components/details_page/tags_list_row_spec.js
+++ b/spec/frontend/packages_and_registries/container_registry/explorer/components/details_page/tags_list_row_spec.js
@@ -318,7 +318,6 @@ describe('tags list row', () => {
expect(findDeleteButton().props('item').extraAttrs).toMatchObject({
class: 'gl-text-red-500!',
'data-testid': 'single-delete-button',
- 'data-qa-selector': 'tag_delete_button',
});
expect(findDeleteButton().text()).toBe(REMOVE_TAG_BUTTON_TITLE);
diff --git a/spec/frontend/packages_and_registries/container_registry/explorer/components/details_page/tags_list_spec.js b/spec/frontend/packages_and_registries/container_registry/explorer/components/details_page/tags_list_spec.js
index 0cbb9eab018..8b8241f395f 100644
--- a/spec/frontend/packages_and_registries/container_registry/explorer/components/details_page/tags_list_spec.js
+++ b/spec/frontend/packages_and_registries/container_registry/explorer/components/details_page/tags_list_spec.js
@@ -9,6 +9,7 @@ import component from '~/packages_and_registries/container_registry/explorer/com
import TagsListRow from '~/packages_and_registries/container_registry/explorer/components/details_page/tags_list_row.vue';
import TagsLoader from '~/packages_and_registries/shared/components/tags_loader.vue';
import RegistryList from '~/packages_and_registries/shared/components/registry_list.vue';
+import PersistedPagination from '~/packages_and_registries/shared/components/persisted_pagination.vue';
import PersistedSearch from '~/packages_and_registries/shared/components/persisted_search.vue';
import getContainerRepositoryTagsQuery from '~/packages_and_registries/container_registry/explorer/graphql/queries/get_container_repository_tags.query.graphql';
import deleteContainerRepositoryTagsMutation from '~/packages_and_registries/container_registry/explorer/graphql/mutations/delete_container_repository_tags.mutation.graphql';
@@ -40,6 +41,7 @@ describe('Tags List', () => {
};
const findDeleteModal = () => wrapper.findComponent(DeleteModal);
+ const findPersistedPagination = () => wrapper.findComponent(PersistedPagination);
const findPersistedSearch = () => wrapper.findComponent(PersistedSearch);
const findTagsListRow = () => wrapper.findAllComponents(TagsListRow);
const findRegistryList = () => wrapper.findComponent(RegistryList);
@@ -47,7 +49,7 @@ describe('Tags List', () => {
const findTagsLoader = () => wrapper.findComponent(TagsLoader);
const fireFirstSortUpdate = () => {
- findPersistedSearch().vm.$emit('update', { sort: 'NAME_ASC', filters: [] });
+ findPersistedSearch().vm.$emit('update', { sort: 'NAME_ASC', filters: [], pageInfo: {} });
};
const waitForApolloRequestRender = async () => {
@@ -103,18 +105,24 @@ describe('Tags List', () => {
it('binds the correct props', () => {
expect(findRegistryList().props()).toMatchObject({
title: '2 tags',
- pagination: tagsPageInfo,
items: tags,
idProperty: 'name',
hiddenDelete: false,
});
});
+ it('has persisted pagination', () => {
+ expect(findPersistedPagination().props('pagination')).toEqual(tagsPageInfo);
+ });
+
describe('events', () => {
- it('prev-page fetch the previous page', async () => {
- findRegistryList().vm.$emit('prev-page');
+ it('prev-page fetches the previous page', async () => {
+ findPersistedPagination().vm.$emit('prev');
await waitForPromises();
+ // we are fetching previous page after load,
+ // so we expect the resolver to have been called twice
+ expect(resolver).toHaveBeenCalledTimes(2);
expect(resolver).toHaveBeenCalledWith({
first: null,
name: '',
@@ -125,10 +133,13 @@ describe('Tags List', () => {
});
});
- it('next-page fetch the previous page', async () => {
- findRegistryList().vm.$emit('next-page');
+ it('next-page fetches the next page', async () => {
+ findPersistedPagination().vm.$emit('next');
await waitForPromises();
+ // we are fetching next page after load,
+ // so we expect the resolver to have been called twice
+ expect(resolver).toHaveBeenCalledTimes(2);
expect(resolver).toHaveBeenCalledWith({
after: tagsPageInfo.endCursor,
first: GRAPHQL_PAGE_SIZE,
@@ -182,6 +193,49 @@ describe('Tags List', () => {
});
});
+ describe('when persisted search emits update', () => {
+ beforeEach(() => {
+ mountComponent();
+ });
+
+ it('with before calls resolver with pagination params', async () => {
+ findPersistedSearch().vm.$emit('update', {
+ sort: 'NAME_ASC',
+ filters: [],
+ pageInfo: { before: tagsPageInfo.startCursor },
+ });
+ await waitForPromises();
+
+ expect(resolver).toHaveBeenCalledTimes(1);
+ expect(resolver).toHaveBeenCalledWith({
+ first: null,
+ name: '',
+ sort: 'NAME_ASC',
+ before: tagsPageInfo.startCursor,
+ last: GRAPHQL_PAGE_SIZE,
+ id: '1',
+ });
+ });
+
+ it('with after calls resolver with pagination params', async () => {
+ findPersistedSearch().vm.$emit('update', {
+ sort: 'NAME_ASC',
+ filters: [],
+ pageInfo: { after: tagsPageInfo.endCursor },
+ });
+ await waitForPromises();
+
+ expect(resolver).toHaveBeenCalledTimes(1);
+ expect(resolver).toHaveBeenCalledWith({
+ after: tagsPageInfo.endCursor,
+ first: GRAPHQL_PAGE_SIZE,
+ name: '',
+ sort: 'NAME_ASC',
+ id: '1',
+ });
+ });
+ });
+
describe('list rows', () => {
it('one row exist for each tag', async () => {
mountComponent();
@@ -334,31 +388,44 @@ describe('Tags List', () => {
let mutationResolver;
describe('when mutation', () => {
- beforeEach(() => {
+ beforeEach(async () => {
mutationResolver = jest.fn().mockResolvedValue(graphQLDeleteImageRepositoryTagsMock);
mountComponent({ mutationResolver });
- return waitForApolloRequestRender();
- });
-
- it('is started renders loader', async () => {
+ await waitForApolloRequestRender();
findRegistryList().vm.$emit('delete', [tags[0]]);
findDeleteModal().vm.$emit('confirmDelete');
- await nextTick();
+ });
+
+ describe('starts', () => {
+ beforeEach(async () => {
+ await nextTick();
+ });
+
+ it('renders loader', () => {
+ expect(findTagsLoader().exists()).toBe(true);
+ expect(findTagsListRow().exists()).toBe(false);
+ });
- expect(findTagsLoader().exists()).toBe(true);
- expect(findTagsListRow().exists()).toBe(false);
+ it('hides pagination', () => {
+ expect(findPersistedPagination().exists()).toEqual(false);
+ });
});
- it('ends, loader is hidden', async () => {
- findRegistryList().vm.$emit('delete', [tags[0]]);
+ describe('is resolved', () => {
+ beforeEach(async () => {
+ await waitForPromises();
+ });
- findDeleteModal().vm.$emit('confirmDelete');
- await waitForPromises();
+ it('loader is hidden', () => {
+ expect(findTagsLoader().exists()).toBe(false);
+ expect(findTagsListRow().exists()).toBe(true);
+ });
- expect(findTagsLoader().exists()).toBe(false);
- expect(findTagsListRow().exists()).toBe(true);
+ it('pagination is shown', () => {
+ expect(findPersistedPagination().props('pagination')).toEqual(tagsPageInfo);
+ });
});
});
@@ -495,6 +562,11 @@ describe('Tags List', () => {
expect(findTagsLoader().exists()).toBe(loadingVisible);
expect(findTagsListRow().exists()).toBe(!loadingVisible);
+ if (queryExecuting) {
+ expect(findPersistedPagination().props('pagination')).toEqual({});
+ } else {
+ expect(findPersistedPagination().props('pagination')).toEqual(tagsPageInfo);
+ }
},
);
});
diff --git a/spec/frontend/packages_and_registries/container_registry/explorer/components/list_page/group_empty_state_spec.js b/spec/frontend/packages_and_registries/container_registry/explorer/components/list_page/group_empty_state_spec.js
index 900ea61e4ea..e512edea554 100644
--- a/spec/frontend/packages_and_registries/container_registry/explorer/components/list_page/group_empty_state_spec.js
+++ b/spec/frontend/packages_and_registries/container_registry/explorer/components/list_page/group_empty_state_spec.js
@@ -1,6 +1,7 @@
import { GlSprintf } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import Vue from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import groupEmptyState from '~/packages_and_registries/container_registry/explorer/components/list_page/group_empty_state.vue';
import { GlEmptyState } from '../../stubs';
diff --git a/spec/frontend/packages_and_registries/container_registry/explorer/components/list_page/image_list_row_spec.js b/spec/frontend/packages_and_registries/container_registry/explorer/components/list_page/image_list_row_spec.js
index 5d8df45415e..b7a995c96ee 100644
--- a/spec/frontend/packages_and_registries/container_registry/explorer/components/list_page/image_list_row_spec.js
+++ b/spec/frontend/packages_and_registries/container_registry/explorer/components/list_page/image_list_row_spec.js
@@ -82,7 +82,7 @@ describe('Image List Row', () => {
const trackingSpy = mockTracking(undefined, wrapper.element, jest.spyOn);
const mockFocusFn = jest.fn();
- wrapper.vm.$refs.imageName.$el.focus = mockFocusFn;
+ wrapper.findComponent(RouterLink).element.focus = mockFocusFn;
await findShowFullPathButton().trigger('click');
diff --git a/spec/frontend/packages_and_registries/container_registry/explorer/components/list_page/image_list_spec.js b/spec/frontend/packages_and_registries/container_registry/explorer/components/list_page/image_list_spec.js
index 6c771887b88..bc0102055ff 100644
--- a/spec/frontend/packages_and_registries/container_registry/explorer/components/list_page/image_list_spec.js
+++ b/spec/frontend/packages_and_registries/container_registry/explorer/components/list_page/image_list_spec.js
@@ -1,21 +1,18 @@
-import { GlKeysetPagination } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import Component from '~/packages_and_registries/container_registry/explorer/components/list_page/image_list.vue';
import ImageListRow from '~/packages_and_registries/container_registry/explorer/components/list_page/image_list_row.vue';
-import { imagesListResponse, pageInfo as defaultPageInfo } from '../../mock_data';
+import { imagesListResponse } from '../../mock_data';
describe('Image List', () => {
let wrapper;
const findRow = () => wrapper.findAllComponents(ImageListRow);
- const findPagination = () => wrapper.findComponent(GlKeysetPagination);
const mountComponent = (props) => {
wrapper = shallowMount(Component, {
propsData: {
images: imagesListResponse,
- pageInfo: defaultPageInfo,
...props,
},
});
@@ -40,44 +37,4 @@ describe('Image List', () => {
expect(findRow().at(0).props('metadataLoading')).toBe(true);
});
});
-
- describe('pagination', () => {
- it('exists', () => {
- mountComponent();
-
- expect(findPagination().exists()).toBe(true);
- });
-
- it.each`
- hasNextPage | hasPreviousPage | isVisible
- ${true} | ${true} | ${true}
- ${true} | ${false} | ${true}
- ${false} | ${true} | ${true}
- `(
- 'when hasNextPage is $hasNextPage and hasPreviousPage is $hasPreviousPage: is $isVisible that the component is visible',
- ({ hasNextPage, hasPreviousPage, isVisible }) => {
- mountComponent({ pageInfo: { ...defaultPageInfo, hasNextPage, hasPreviousPage } });
-
- expect(findPagination().exists()).toBe(isVisible);
- expect(findPagination().props('hasPreviousPage')).toBe(hasPreviousPage);
- expect(findPagination().props('hasNextPage')).toBe(hasNextPage);
- },
- );
-
- it('emits "prev-page" when the user clicks the back page button', () => {
- mountComponent();
-
- findPagination().vm.$emit('prev');
-
- expect(wrapper.emitted('prev-page')).toEqual([[]]);
- });
-
- it('emits "next-page" when the user clicks the forward page button', () => {
- mountComponent();
-
- findPagination().vm.$emit('next');
-
- expect(wrapper.emitted('next-page')).toEqual([[]]);
- });
- });
});
diff --git a/spec/frontend/packages_and_registries/container_registry/explorer/components/list_page/project_empty_state_spec.js b/spec/frontend/packages_and_registries/container_registry/explorer/components/list_page/project_empty_state_spec.js
index e4d13143484..23e7a9e1982 100644
--- a/spec/frontend/packages_and_registries/container_registry/explorer/components/list_page/project_empty_state_spec.js
+++ b/spec/frontend/packages_and_registries/container_registry/explorer/components/list_page/project_empty_state_spec.js
@@ -1,6 +1,7 @@
import { GlSprintf } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import Vue from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import projectEmptyState from '~/packages_and_registries/container_registry/explorer/components/list_page/project_empty_state.vue';
import { dockerCommands } from '../../mock_data';
diff --git a/spec/frontend/packages_and_registries/container_registry/explorer/mock_data.js b/spec/frontend/packages_and_registries/container_registry/explorer/mock_data.js
index 8ca74f5077e..5ee1b4315ff 100644
--- a/spec/frontend/packages_and_registries/container_registry/explorer/mock_data.js
+++ b/spec/frontend/packages_and_registries/container_registry/explorer/mock_data.js
@@ -40,7 +40,7 @@ export const pageInfo = {
hasPreviousPage: true,
startCursor: 'eyJpZCI6IjI2In0',
endCursor: 'eyJpZCI6IjgifQ',
- __typename: 'ContainerRepositoryConnection',
+ __typename: 'PageInfo',
};
export const graphQLImageListMock = {
diff --git a/spec/frontend/packages_and_registries/container_registry/explorer/pages/details_spec.js b/spec/frontend/packages_and_registries/container_registry/explorer/pages/details_spec.js
index 7fed81acead..9dbdf57b587 100644
--- a/spec/frontend/packages_and_registries/container_registry/explorer/pages/details_spec.js
+++ b/spec/frontend/packages_and_registries/container_registry/explorer/pages/details_spec.js
@@ -1,4 +1,4 @@
-import { GlKeysetPagination, GlEmptyState } from '@gitlab/ui';
+import { GlKeysetPagination, GlEmptyState, GlSkeletonLoader } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
@@ -12,7 +12,6 @@ import DetailsHeader from '~/packages_and_registries/container_registry/explorer
import PartialCleanupAlert from '~/packages_and_registries/container_registry/explorer/components/details_page/partial_cleanup_alert.vue';
import StatusAlert from '~/packages_and_registries/container_registry/explorer/components/details_page/status_alert.vue';
import TagsList from '~/packages_and_registries/container_registry/explorer/components/details_page/tags_list.vue';
-import TagsLoader from '~/packages_and_registries/shared/components/tags_loader.vue';
import {
UNFINISHED_STATUS,
@@ -40,7 +39,7 @@ describe('Details Page', () => {
const findDeleteModal = () => wrapper.findComponent(DeleteModal);
const findPagination = () => wrapper.findComponent(GlKeysetPagination);
- const findTagsLoader = () => wrapper.findComponent(TagsLoader);
+ const findLoader = () => wrapper.findComponent(GlSkeletonLoader);
const findTagsList = () => wrapper.findComponent(TagsList);
const findDeleteAlert = () => wrapper.findComponent(DeleteAlert);
const findDetailsHeader = () => wrapper.findComponent(DetailsHeader);
@@ -109,13 +108,13 @@ describe('Details Page', () => {
it('shows the loader', () => {
mountComponent();
- expect(findTagsLoader().exists()).toBe(true);
+ expect(findLoader().exists()).toBe(true);
});
- it('does not show the list', () => {
+ it('sets loading prop on tags list component', () => {
mountComponent();
- expect(findTagsList().exists()).toBe(false);
+ expect(findTagsList().props('isImageLoading')).toBe(true);
});
});
@@ -125,7 +124,7 @@ describe('Details Page', () => {
await waitForApolloRequestRender();
- expect(findTagsLoader().exists()).toBe(false);
+ expect(findLoader().exists()).toBe(false);
expect(findDetailsHeader().exists()).toBe(false);
expect(findTagsList().exists()).toBe(false);
expect(findPagination().exists()).toBe(false);
@@ -424,13 +423,15 @@ describe('Details Page', () => {
await waitForPromises();
- expect(findTagsLoader().exists()).toBe(true);
+ expect(findLoader().exists()).toBe(true);
+ expect(findTagsList().props('isImageLoading')).toBe(true);
findDeleteImage().vm.$emit('end');
await nextTick();
- expect(findTagsLoader().exists()).toBe(false);
+ expect(findLoader().exists()).toBe(false);
+ expect(findTagsList().props('isImageLoading')).toBe(false);
});
it('binds correctly to delete-image error event', async () => {
diff --git a/spec/frontend/packages_and_registries/container_registry/explorer/pages/list_spec.js b/spec/frontend/packages_and_registries/container_registry/explorer/pages/list_spec.js
index 1823bbfe533..1f1f010e0c4 100644
--- a/spec/frontend/packages_and_registries/container_registry/explorer/pages/list_spec.js
+++ b/spec/frontend/packages_and_registries/container_registry/explorer/pages/list_spec.js
@@ -15,6 +15,7 @@ import RegistryHeader from '~/packages_and_registries/container_registry/explore
import {
DELETE_IMAGE_SUCCESS_MESSAGE,
DELETE_IMAGE_ERROR_MESSAGE,
+ GRAPHQL_PAGE_SIZE,
SORT_FIELDS,
SETTINGS_TEXT,
} from '~/packages_and_registries/container_registry/explorer/constants';
@@ -22,6 +23,7 @@ import deleteContainerRepositoryMutation from '~/packages_and_registries/contain
import getContainerRepositoriesDetails from '~/packages_and_registries/container_registry/explorer/graphql/queries/get_container_repositories_details.query.graphql';
import component from '~/packages_and_registries/container_registry/explorer/pages/list.vue';
import Tracking from '~/tracking';
+import PersistedPagination from '~/packages_and_registries/shared/components/persisted_pagination.vue';
import PersistedSearch from '~/packages_and_registries/shared/components/persisted_search.vue';
import { FILTERED_SEARCH_TERM } from '~/vue_shared/components/filtered_search_bar/constants';
import TitleArea from '~/vue_shared/components/registry/title_area.vue';
@@ -60,8 +62,10 @@ describe('List Page', () => {
const findEmptySearchMessage = () => wrapper.find('[data-testid="emptySearch"]');
const findDeleteImage = () => wrapper.findComponent(DeleteImage);
+ const findPersistedPagination = () => wrapper.findComponent(PersistedPagination);
+
const fireFirstSortUpdate = () => {
- findPersistedSearch().vm.$emit('update', { sort: 'UPDATED_DESC', filters: [] });
+ findPersistedSearch().vm.$emit('update', { sort: 'UPDATED_DESC', filters: [], pageInfo: {} });
};
const waitForApolloRequestRender = async () => {
@@ -218,7 +222,13 @@ describe('List Page', () => {
expect(findImageList().exists()).toBe(false);
});
- it('cli commands is not visible', () => {
+ it('pagination is set to empty object', () => {
+ mountComponent();
+
+ expect(findPersistedPagination().props('pagination')).toEqual({});
+ });
+
+ it('cli commands are not visible', () => {
mountComponent();
expect(findCliCommands().exists()).toBe(false);
@@ -233,11 +243,42 @@ describe('List Page', () => {
});
});
+ describe('when mutation is loading', () => {
+ beforeEach(async () => {
+ mountComponent();
+ fireFirstSortUpdate();
+ await waitForApolloRequestRender();
+ findImageList().vm.$emit('delete', deletedContainerRepository);
+ findDeleteModal().vm.$emit('confirmDelete');
+ findDeleteImage().vm.$emit('start');
+ });
+
+ it('shows the skeleton loader', () => {
+ expect(findSkeletonLoader().exists()).toBe(true);
+ });
+
+ it('imagesList is not visible', () => {
+ expect(findImageList().exists()).toBe(false);
+ });
+
+ it('pagination is hidden', () => {
+ expect(findPersistedPagination().exists()).toBe(false);
+ });
+
+ it('cli commands are not visible', () => {
+ expect(findCliCommands().exists()).toBe(false);
+ });
+
+ it('title has the metadataLoading props set to true', () => {
+ expect(findRegistryHeader().props('metadataLoading')).toBe(true);
+ });
+ });
+
describe('list is empty', () => {
describe('project page', () => {
const resolver = jest.fn().mockResolvedValue(graphQLEmptyImageListMock);
- it('cli commands is not visible', async () => {
+ it('cli commands are not visible', async () => {
mountComponent({ resolver });
await waitForApolloRequestRender();
@@ -269,7 +310,7 @@ describe('List Page', () => {
expect(findGroupEmptyState().exists()).toBe(true);
});
- it('cli commands is not visible', async () => {
+ it('cli commands are not visible', async () => {
mountComponent({ resolver, config });
await waitForApolloRequestRender();
@@ -461,7 +502,15 @@ describe('List Page', () => {
});
describe('pagination', () => {
- it('prev-page event triggers a fetchMore request', async () => {
+ it('exists', async () => {
+ mountComponent();
+ fireFirstSortUpdate();
+ await waitForApolloRequestRender();
+
+ expect(findPersistedPagination().props('pagination')).toEqual(pageInfo);
+ });
+
+ it('prev event triggers a previous page request', async () => {
const resolver = jest.fn().mockResolvedValue(graphQLImageListMock);
const detailsResolver = jest
.fn()
@@ -470,18 +519,58 @@ describe('List Page', () => {
fireFirstSortUpdate();
await waitForApolloRequestRender();
- findImageList().vm.$emit('prev-page');
+ findPersistedPagination().vm.$emit('prev');
await waitForPromises();
expect(resolver).toHaveBeenCalledWith(
- expect.objectContaining({ before: pageInfo.startCursor }),
+ expect.objectContaining({
+ before: pageInfo.startCursor,
+ first: null,
+ last: GRAPHQL_PAGE_SIZE,
+ }),
);
expect(detailsResolver).toHaveBeenCalledWith(
- expect.objectContaining({ before: pageInfo.startCursor }),
+ expect.objectContaining({
+ before: pageInfo.startCursor,
+ first: null,
+ last: GRAPHQL_PAGE_SIZE,
+ }),
);
});
- it('next-page event triggers a fetchMore request', async () => {
+ it('calls resolver with pagination params when persisted search returns before', async () => {
+ const resolver = jest.fn().mockResolvedValue(graphQLImageListMock);
+ const detailsResolver = jest
+ .fn()
+ .mockResolvedValue(graphQLProjectImageRepositoriesDetailsMock);
+ mountComponent({ resolver, detailsResolver });
+
+ findPersistedSearch().vm.$emit('update', {
+ sort: 'UPDATED_DESC',
+ filters: [],
+ pageInfo: { before: pageInfo.startCursor },
+ });
+ await waitForApolloRequestRender();
+
+ expect(resolver).toHaveBeenCalledWith(
+ expect.objectContaining({
+ sort: 'UPDATED_DESC',
+ before: pageInfo.startCursor,
+ first: null,
+ last: GRAPHQL_PAGE_SIZE,
+ }),
+ );
+ expect(detailsResolver).toHaveBeenCalledWith(
+ expect.objectContaining({
+ sort: 'UPDATED_DESC',
+ before: pageInfo.startCursor,
+ first: null,
+ last: GRAPHQL_PAGE_SIZE,
+ }),
+ );
+ });
+
+ it('next event triggers a next page request', async () => {
const resolver = jest.fn().mockResolvedValue(graphQLImageListMock);
const detailsResolver = jest
.fn()
@@ -490,14 +579,50 @@ describe('List Page', () => {
fireFirstSortUpdate();
await waitForApolloRequestRender();
- findImageList().vm.$emit('next-page');
+ findPersistedPagination().vm.$emit('next');
await waitForPromises();
expect(resolver).toHaveBeenCalledWith(
- expect.objectContaining({ after: pageInfo.endCursor }),
+ expect.objectContaining({
+ after: pageInfo.endCursor,
+ first: GRAPHQL_PAGE_SIZE,
+ }),
+ );
+ expect(detailsResolver).toHaveBeenCalledWith(
+ expect.objectContaining({
+ after: pageInfo.endCursor,
+ first: GRAPHQL_PAGE_SIZE,
+ }),
+ );
+ });
+
+ it('calls resolver with pagination params when persisted search returns after', async () => {
+ const resolver = jest.fn().mockResolvedValue(graphQLImageListMock);
+ const detailsResolver = jest
+ .fn()
+ .mockResolvedValue(graphQLProjectImageRepositoriesDetailsMock);
+ mountComponent({ resolver, detailsResolver });
+
+ findPersistedSearch().vm.$emit('update', {
+ sort: 'UPDATED_DESC',
+ filters: [],
+ pageInfo: { after: pageInfo.endCursor },
+ });
+ await waitForApolloRequestRender();
+
+ expect(resolver).toHaveBeenCalledWith(
+ expect.objectContaining({
+ sort: 'UPDATED_DESC',
+ after: pageInfo.endCursor,
+ first: GRAPHQL_PAGE_SIZE,
+ }),
);
expect(detailsResolver).toHaveBeenCalledWith(
- expect.objectContaining({ after: pageInfo.endCursor }),
+ expect.objectContaining({
+ sort: 'UPDATED_DESC',
+ after: pageInfo.endCursor,
+ first: GRAPHQL_PAGE_SIZE,
+ }),
);
});
});
diff --git a/spec/frontend/packages_and_registries/container_registry/explorer/utils_spec.js b/spec/frontend/packages_and_registries/container_registry/explorer/utils_spec.js
index d7a9c200c7b..92cb6a3c9bc 100644
--- a/spec/frontend/packages_and_registries/container_registry/explorer/utils_spec.js
+++ b/spec/frontend/packages_and_registries/container_registry/explorer/utils_spec.js
@@ -1,6 +1,9 @@
import {
getImageName,
timeTilRun,
+ getNextPageParams,
+ getPreviousPageParams,
+ getPageParams,
} from '~/packages_and_registries/container_registry/explorer/utils';
describe('Container registry utilities', () => {
@@ -35,4 +38,49 @@ describe('Container registry utilities', () => {
expect(result).toBe('');
});
});
+
+ describe('getNextPageParams', () => {
+ it('should return the next page params with the provided cursor', () => {
+ const cursor = 'abc123';
+ expect(getNextPageParams(cursor)).toEqual({
+ after: cursor,
+ first: 10,
+ });
+ });
+ });
+
+ describe('getPreviousPageParams', () => {
+ it('should return the previous page params with the provided cursor', () => {
+ const cursor = 'abc123';
+ expect(getPreviousPageParams(cursor)).toEqual({
+ first: null,
+ before: cursor,
+ last: 10,
+ });
+ });
+ });
+
+ describe('getPageParams', () => {
+ it('should return the previous page params if before cursor is available', () => {
+ const pageInfo = { before: 'abc123' };
+ expect(getPageParams(pageInfo)).toEqual({
+ first: null,
+ before: pageInfo.before,
+ last: 10,
+ });
+ });
+
+ it('should return the next page params if after cursor is available', () => {
+ const pageInfo = { after: 'abc123' };
+ expect(getPageParams(pageInfo)).toEqual({
+ after: pageInfo.after,
+ first: 10,
+ });
+ });
+
+ it('should return an empty object if both before and after cursors are not available', () => {
+ const pageInfo = {};
+ expect(getPageParams(pageInfo)).toEqual({});
+ });
+ });
});
diff --git a/spec/frontend/packages_and_registries/dependency_proxy/components/manifest_list_spec.js b/spec/frontend/packages_and_registries/dependency_proxy/components/manifest_list_spec.js
index 8f445843aa8..521a38bee70 100644
--- a/spec/frontend/packages_and_registries/dependency_proxy/components/manifest_list_spec.js
+++ b/spec/frontend/packages_and_registries/dependency_proxy/components/manifest_list_spec.js
@@ -91,12 +91,6 @@ describe('Manifests List', () => {
});
describe('pagination', () => {
- it('is hidden when there is no next or prev pages', () => {
- createComponent({ ...defaultProps, pagination: {} });
-
- expect(findPagination().exists()).toBe(false);
- });
-
it('has the correct props', () => {
createComponent();
diff --git a/spec/frontend/packages_and_registries/infrastructure_registry/components/details/components/app_spec.js b/spec/frontend/packages_and_registries/infrastructure_registry/components/details/components/app_spec.js
index f8130287c12..204134f1ee9 100644
--- a/spec/frontend/packages_and_registries/infrastructure_registry/components/details/components/app_spec.js
+++ b/spec/frontend/packages_and_registries/infrastructure_registry/components/details/components/app_spec.js
@@ -1,6 +1,7 @@
import { GlEmptyState } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import { useMockLocationHelper } from 'helpers/mock_window_location_helper';
import stubChildren from 'helpers/stub_children';
diff --git a/spec/frontend/packages_and_registries/infrastructure_registry/components/details/components/details_title_spec.js b/spec/frontend/packages_and_registries/infrastructure_registry/components/details/components/details_title_spec.js
index 7f56d3e216c..8e5386fc954 100644
--- a/spec/frontend/packages_and_registries/infrastructure_registry/components/details/components/details_title_spec.js
+++ b/spec/frontend/packages_and_registries/infrastructure_registry/components/details/components/details_title_spec.js
@@ -1,5 +1,6 @@
import { shallowMount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import component from '~/packages_and_registries/infrastructure_registry/details/components/details_title.vue';
import TitleArea from '~/vue_shared/components/registry/title_area.vue';
diff --git a/spec/frontend/packages_and_registries/infrastructure_registry/components/details/components/terraform_installation_spec.js b/spec/frontend/packages_and_registries/infrastructure_registry/components/details/components/terraform_installation_spec.js
index 94797f01d16..9bdd0e438d3 100644
--- a/spec/frontend/packages_and_registries/infrastructure_registry/components/details/components/terraform_installation_spec.js
+++ b/spec/frontend/packages_and_registries/infrastructure_registry/components/details/components/terraform_installation_spec.js
@@ -1,5 +1,6 @@
import { shallowMount } from '@vue/test-utils';
import Vue from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import TerraformInstallation from '~/packages_and_registries/infrastructure_registry/details/components/terraform_installation.vue';
import CodeInstructions from '~/vue_shared/components/registry/code_instruction.vue';
diff --git a/spec/frontend/packages_and_registries/infrastructure_registry/components/list/components/infrastructure_search_spec.js b/spec/frontend/packages_and_registries/infrastructure_registry/components/list/components/infrastructure_search_spec.js
index a89247c0a97..2f252047d82 100644
--- a/spec/frontend/packages_and_registries/infrastructure_registry/components/list/components/infrastructure_search_spec.js
+++ b/spec/frontend/packages_and_registries/infrastructure_registry/components/list/components/infrastructure_search_spec.js
@@ -1,5 +1,6 @@
import { shallowMount } from '@vue/test-utils';
import Vue from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import component from '~/packages_and_registries/infrastructure_registry/list/components/infrastructure_search.vue';
import RegistrySearch from '~/vue_shared/components/registry/registry_search.vue';
diff --git a/spec/frontend/packages_and_registries/infrastructure_registry/components/list/components/packages_list_app_spec.js b/spec/frontend/packages_and_registries/infrastructure_registry/components/list/components/packages_list_app_spec.js
index 47d36d11e35..eb905fbec40 100644
--- a/spec/frontend/packages_and_registries/infrastructure_registry/components/list/components/packages_list_app_spec.js
+++ b/spec/frontend/packages_and_registries/infrastructure_registry/components/list/components/packages_list_app_spec.js
@@ -1,6 +1,7 @@
import { GlEmptyState, GlSprintf, GlLink } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import Vue from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import setWindowLocation from 'helpers/set_window_location_helper';
import { createAlert, VARIANT_INFO } from '~/alert';
diff --git a/spec/frontend/packages_and_registries/infrastructure_registry/components/list/components/packages_list_spec.js b/spec/frontend/packages_and_registries/infrastructure_registry/components/list/components/packages_list_spec.js
index 51445942eaa..ad906d41435 100644
--- a/spec/frontend/packages_and_registries/infrastructure_registry/components/list/components/packages_list_spec.js
+++ b/spec/frontend/packages_and_registries/infrastructure_registry/components/list/components/packages_list_spec.js
@@ -2,6 +2,7 @@ import { GlTable, GlPagination } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
import Vue from 'vue';
import { last } from 'lodash';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import stubChildren from 'helpers/stub_children';
import { mockTracking, unmockTracking } from 'helpers/tracking_helper';
diff --git a/spec/frontend/packages_and_registries/infrastructure_registry/components/shared/__snapshots__/package_list_row_spec.js.snap b/spec/frontend/packages_and_registries/infrastructure_registry/components/shared/__snapshots__/package_list_row_spec.js.snap
index 08e2de6c18f..250b33cbb14 100644
--- a/spec/frontend/packages_and_registries/infrastructure_registry/components/shared/__snapshots__/package_list_row_spec.js.snap
+++ b/spec/frontend/packages_and_registries/infrastructure_registry/components/shared/__snapshots__/package_list_row_spec.js.snap
@@ -24,7 +24,7 @@ exports[`packages_list_row renders 1`] = `
>
<gl-link-stub
class="gl-text-body gl-min-w-0"
- data-qa-selector="package_link"
+ data-testid="details-link"
href="foo"
>
<gl-truncate-stub
diff --git a/spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/pypi_installation_spec.js.snap b/spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/pypi_installation_spec.js.snap
index b4ea6543446..99ee6ce01b2 100644
--- a/spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/pypi_installation_spec.js.snap
+++ b/spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/pypi_installation_spec.js.snap
@@ -13,47 +13,133 @@ exports[`PypiInstallation renders all the messages 1`] = `
<div>
<div
- class="dropdown b-dropdown gl-dropdown btn-group"
- id="__BVID__27"
- lazy=""
+ class="gl-new-dropdown"
>
- <!---->
<button
- aria-expanded="false"
- aria-haspopup="menu"
- class="btn dropdown-toggle btn-default btn-md gl-button gl-dropdown-toggle"
- id="__BVID__27__BV_toggle_"
+ aria-controls="base-dropdown-10"
+ aria-haspopup="listbox"
+ aria-labelledby="dropdown-toggle-btn-8"
+ class="btn btn-default btn-md gl-button gl-new-dropdown-toggle"
+ data-testid="base-dropdown-toggle"
+ id="dropdown-toggle-btn-8"
type="button"
>
<!---->
<!---->
-
+
<span
- class="gl-dropdown-button-text"
+ class="gl-button-text"
>
- Show PyPi commands
+ <span
+ class="gl-new-dropdown-button-text"
+ >
+
+ Show PyPi commands
+
+ </span>
+
+ <svg
+ aria-hidden="true"
+ class="gl-button-icon gl-new-dropdown-chevron gl-icon s16"
+ data-testid="chevron-down-icon"
+ role="img"
+ >
+ <use
+ href="file-mock#chevron-down"
+ />
+ </svg>
</span>
-
- <svg
- aria-hidden="true"
- class="gl-button-icon dropdown-chevron gl-icon s16"
- data-testid="chevron-down-icon"
- role="img"
- >
- <use
- href="file-mock#chevron-down"
- />
- </svg>
</button>
- <ul
- aria-labelledby="__BVID__27__BV_toggle_"
- class="dropdown-menu"
- role="menu"
- tabindex="-1"
+
+ <div
+ class="gl-new-dropdown-panel gl-w-31!"
+ data-testid="base-dropdown-menu"
+ id="base-dropdown-10"
>
- <!---->
- </ul>
+ <div
+ class="gl-new-dropdown-inner"
+ >
+
+ <!---->
+
+ <!---->
+
+ <ul
+ aria-labelledby="dropdown-toggle-btn-8"
+ class="gl-new-dropdown-contents gl-new-dropdown-contents-with-scrim-overlay gl-new-dropdown-contents"
+ id="listbox-9"
+ role="listbox"
+ tabindex="-1"
+ >
+ <li
+ aria-hidden="true"
+ class="top-scrim-wrapper"
+ data-testid="top-scrim"
+ >
+ <div
+ class="top-scrim top-scrim-light"
+ />
+ </li>
+
+ <li
+ aria-hidden="true"
+ />
+
+ <li
+ aria-selected="true"
+ class="gl-new-dropdown-item"
+ data-testid="listbox-item-pypi"
+ role="option"
+ tabindex="-1"
+ >
+ <span
+ class="gl-new-dropdown-item-content gl-bg-gray-50!"
+ >
+ <svg
+ aria-hidden="true"
+ class="gl-icon s16 gl-new-dropdown-item-check-icon gl-mt-3 gl-align-self-start"
+ data-testid="dropdown-item-checkbox"
+ role="img"
+ >
+ <use
+ href="file-mock#mobile-issue-close"
+ />
+ </svg>
+
+ <span
+ class="gl-new-dropdown-item-text-wrapper"
+ >
+
+ Show PyPi commands
+
+ </span>
+ </span>
+ </li>
+
+ <!---->
+
+ <!---->
+
+ <li
+ aria-hidden="true"
+ />
+
+ <li
+ aria-hidden="true"
+ class="bottom-scrim-wrapper"
+ data-testid="bottom-scrim"
+ >
+ <div
+ class="bottom-scrim"
+ />
+ </li>
+ </ul>
+
+ <!---->
+
+ </div>
+ </div>
</div>
</div>
</div>
@@ -80,7 +166,7 @@ exports[`PypiInstallation renders all the messages 1`] = `
id="installation-pip-command"
>
<label
- for="instruction-input_5"
+ for="instruction-input_11"
>
Pip Command
</label>
@@ -94,7 +180,7 @@ exports[`PypiInstallation renders all the messages 1`] = `
<input
class="form-control gl-font-monospace"
data-testid="instruction-input"
- id="instruction-input_5"
+ id="instruction-input_11"
readonly="readonly"
type="text"
/>
@@ -109,7 +195,7 @@ exports[`PypiInstallation renders all the messages 1`] = `
class="btn input-group-text btn-default btn-md gl-button btn-default-secondary btn-icon"
data-clipboard-handle-tooltip="false"
data-clipboard-text="pip install @gitlab-org/package-15 --index-url http://__token__:<your_personal_token>@gdk.test:3000/api/v4/projects/1/packages/pypi/simple"
- id="clipboard-button-6"
+ id="clipboard-button-12"
title="Copy Pip command"
type="button"
>
diff --git a/spec/frontend/packages_and_registries/package_registry/components/details/package_files_spec.js b/spec/frontend/packages_and_registries/package_registry/components/details/package_files_spec.js
index 2c712feac86..5ba4b1f687e 100644
--- a/spec/frontend/packages_and_registries/package_registry/components/details/package_files_spec.js
+++ b/spec/frontend/packages_and_registries/package_registry/components/details/package_files_spec.js
@@ -176,22 +176,6 @@ describe('Package Files', () => {
disabled: false,
});
});
-
- it('hides pagination when only one page', async () => {
- createComponent({
- resolver: jest.fn().mockResolvedValue(
- packageFilesQuery({
- extendPagination: {
- hasNextPage: false,
- hasPreviousPage: false,
- },
- }),
- ),
- });
- await waitForPromises();
-
- expect(findPagination().exists()).toBe(false);
- });
});
describe('link', () => {
diff --git a/spec/frontend/packages_and_registries/package_registry/components/list/__snapshots__/package_list_row_spec.js.snap b/spec/frontend/packages_and_registries/package_registry/components/list/__snapshots__/package_list_row_spec.js.snap
index 0443fb85dc9..e0e6c101029 100644
--- a/spec/frontend/packages_and_registries/package_registry/components/list/__snapshots__/package_list_row_spec.js.snap
+++ b/spec/frontend/packages_and_registries/package_registry/components/list/__snapshots__/package_list_row_spec.js.snap
@@ -32,7 +32,6 @@ exports[`packages_list_row renders 1`] = `
<router-link-stub
ariacurrentvalue="page"
class="gl-text-body gl-min-w-0"
- data-qa-selector="package_link"
data-testid="details-link"
event="click"
tag="a"
@@ -105,35 +104,33 @@ exports[`packages_list_row renders 1`] = `
<div
class="gl-w-9 gl-display-flex gl-justify-content-end gl-pr-1"
>
- <gl-dropdown-stub
+ <gl-disclosure-dropdown-stub
+ autoclose="true"
category="tertiary"
- clearalltext="Clear all"
- clearalltextclass="gl-px-5"
data-testid="delete-dropdown"
- headertext=""
- hideheaderborder="true"
- highlighteditemstitle="Selected"
- highlighteditemstitleclass="gl-px-5"
icon="ellipsis_v"
- no-caret=""
+ items=""
+ nocaret="true"
+ placement="left"
+ positioningstrategy="absolute"
size="medium"
- text="More actions"
textsronly="true"
+ toggleid="dropdown-toggle-btn-3"
+ toggletext="More actions"
variant="default"
>
- <gl-dropdown-item-stub
- avatarurl=""
+ <gl-disclosure-dropdown-item-stub
data-testid="action-delete"
- iconcolor=""
- iconname=""
- iconrightarialabel=""
- iconrightname=""
- secondarytext=""
- variant="danger"
>
- Delete package
- </gl-dropdown-item-stub>
- </gl-dropdown-stub>
+ <span
+ class="gl-text-red-500"
+ >
+
+ Delete package
+
+ </span>
+ </gl-disclosure-dropdown-item-stub>
+ </gl-disclosure-dropdown-stub>
</div>
</div>
diff --git a/spec/frontend/packages_and_registries/package_registry/components/list/package_list_row_spec.js b/spec/frontend/packages_and_registries/package_registry/components/list/package_list_row_spec.js
index 523d5f855fc..9f8fd4e28e7 100644
--- a/spec/frontend/packages_and_registries/package_registry/components/list/package_list_row_spec.js
+++ b/spec/frontend/packages_and_registries/package_registry/components/list/package_list_row_spec.js
@@ -34,7 +34,8 @@ describe('packages_list_row', () => {
const packageWithTags = { ...packageWithoutTags, tags: { nodes: packageTags() } };
const findPackageTags = () => wrapper.findComponent(PackageTags);
- const findDeleteDropdown = () => wrapper.findByTestId('action-delete');
+ const findDeleteDropdown = () => wrapper.findByTestId('delete-dropdown');
+ const findDeleteButton = () => wrapper.findByTestId('action-delete');
const findPackageType = () => wrapper.findByTestId('package-type');
const findPackageLink = () => wrapper.findByTestId('details-link');
const findWarningIcon = () => wrapper.findByTestId('warning-icon');
@@ -103,7 +104,7 @@ describe('packages_list_row', () => {
});
});
- describe('delete button', () => {
+ describe('delete dropdown', () => {
it('does not exist when package cannot be destroyed', () => {
mountComponent({
packageEntity: { ...packageWithoutTags, canDestroy: false },
@@ -112,19 +113,39 @@ describe('packages_list_row', () => {
expect(findDeleteDropdown().exists()).toBe(false);
});
- it('exists and has the correct props', () => {
- mountComponent({ packageEntity: packageWithoutTags });
+ it('exists when package can be destroyed', () => {
+ mountComponent();
- expect(findDeleteDropdown().exists()).toBe(true);
- expect(findDeleteDropdown().attributes()).toMatchObject({
- variant: 'danger',
+ expect(findDeleteDropdown().props()).toMatchObject({
+ category: 'tertiary',
+ icon: 'ellipsis_v',
+ textSrOnly: true,
+ noCaret: true,
+ toggleText: 'More actions',
});
});
+ });
+
+ describe('delete button', () => {
+ it('does not exist when package cannot be destroyed', () => {
+ mountComponent({
+ packageEntity: { ...packageWithoutTags, canDestroy: false },
+ });
+
+ expect(findDeleteButton().exists()).toBe(false);
+ });
+
+ it('exists and has the correct text', () => {
+ mountComponent({ packageEntity: packageWithoutTags });
+
+ expect(findDeleteButton().exists()).toBe(true);
+ expect(findDeleteButton().text()).toBe('Delete package');
+ });
it('emits the delete event when the delete button is clicked', () => {
mountComponent({ packageEntity: packageWithoutTags });
- findDeleteDropdown().vm.$emit('click');
+ findDeleteButton().vm.$emit('action');
expect(wrapper.emitted('delete')).toHaveLength(1);
});
diff --git a/spec/frontend/packages_and_registries/shared/components/cli_commands_spec.js b/spec/frontend/packages_and_registries/shared/components/cli_commands_spec.js
index 41482e6e681..328f83394f9 100644
--- a/spec/frontend/packages_and_registries/shared/components/cli_commands_spec.js
+++ b/spec/frontend/packages_and_registries/shared/components/cli_commands_spec.js
@@ -1,6 +1,7 @@
import { GlDropdown } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
import Vue from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import QuickstartDropdown from '~/packages_and_registries/shared/components/cli_commands.vue';
import {
diff --git a/spec/frontend/packages_and_registries/shared/components/persisted_pagination_spec.js b/spec/frontend/packages_and_registries/shared/components/persisted_pagination_spec.js
new file mode 100644
index 00000000000..3aa4b10cef6
--- /dev/null
+++ b/spec/frontend/packages_and_registries/shared/components/persisted_pagination_spec.js
@@ -0,0 +1,100 @@
+import { GlKeysetPagination } from '@gitlab/ui';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import PersistedPagination from '~/packages_and_registries/shared/components/persisted_pagination.vue';
+import UrlSync from '~/vue_shared/components/url_sync.vue';
+
+describe('Persisted Search', () => {
+ let wrapper;
+
+ const defaultProps = {
+ pagination: {
+ hasNextPage: true,
+ hasPreviousPage: true,
+ startCursor: 'eyJpZCI6IjI2In0',
+ endCursor: 'eyJpZCI6IjgifQ',
+ },
+ };
+
+ const findPagination = () => wrapper.findComponent(GlKeysetPagination);
+ const findUrlSync = () => wrapper.findComponent(UrlSync);
+
+ const mountComponent = ({ propsData = defaultProps, stubs = {} } = {}) => {
+ wrapper = shallowMountExtended(PersistedPagination, {
+ propsData,
+ stubs: {
+ UrlSync,
+ ...stubs,
+ },
+ });
+ };
+
+ it('has pagination component', () => {
+ mountComponent();
+
+ const { hasNextPage, hasPreviousPage, startCursor, endCursor } = defaultProps.pagination;
+ expect(findPagination().props('hasNextPage')).toBe(hasNextPage);
+ expect(findPagination().props('hasPreviousPage')).toBe(hasPreviousPage);
+ expect(findPagination().props('startCursor')).toBe(startCursor);
+ expect(findPagination().props('endCursor')).toBe(endCursor);
+ });
+
+ it('has a UrlSync component', () => {
+ mountComponent();
+
+ expect(findUrlSync().exists()).toBe(true);
+ });
+
+ describe('pagination events', () => {
+ const updateQueryMock = jest.fn();
+ const mockUrlSync = {
+ methods: {
+ updateQuery: updateQueryMock,
+ },
+ render() {
+ return this.$scopedSlots.default?.({ updateQuery: this.updateQuery });
+ },
+ };
+
+ beforeEach(() => {
+ mountComponent({ stubs: { UrlSync: mockUrlSync } });
+ });
+
+ afterEach(() => {
+ updateQueryMock.mockReset();
+ });
+
+ describe('prev event', () => {
+ beforeEach(() => {
+ findPagination().vm.$emit('prev');
+ });
+
+ it('calls updateQuery mock with right params', () => {
+ expect(updateQueryMock).toHaveBeenCalledWith({
+ before: defaultProps.pagination?.startCursor,
+ after: null,
+ });
+ });
+
+ it('re-emits prev event', () => {
+ expect(wrapper.emitted('prev')).toHaveLength(1);
+ });
+ });
+
+ describe('next event', () => {
+ beforeEach(() => {
+ findPagination().vm.$emit('next');
+ });
+
+ it('calls updateQuery mock with right params', () => {
+ expect(updateQueryMock).toHaveBeenCalledWith({
+ after: defaultProps.pagination.endCursor,
+ before: null,
+ });
+ });
+
+ it('re-emits next event', () => {
+ expect(wrapper.emitted('next')).toHaveLength(1);
+ });
+ });
+ });
+});
diff --git a/spec/frontend/packages_and_registries/shared/components/persisted_search_spec.js b/spec/frontend/packages_and_registries/shared/components/persisted_search_spec.js
index c1e86080d29..296caf091d5 100644
--- a/spec/frontend/packages_and_registries/shared/components/persisted_search_spec.js
+++ b/spec/frontend/packages_and_registries/shared/components/persisted_search_spec.js
@@ -1,16 +1,21 @@
-import { nextTick } from 'vue';
+import Vue, { nextTick } from 'vue';
+import VueRouter from 'vue-router';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import RegistrySearch from '~/vue_shared/components/registry/registry_search.vue';
import component from '~/packages_and_registries/shared/components/persisted_search.vue';
import UrlSync from '~/vue_shared/components/url_sync.vue';
-import { useMockLocationHelper } from 'helpers/mock_window_location_helper';
-import { getQueryParams, extractFilterAndSorting } from '~/packages_and_registries/shared/utils';
+import {
+ getQueryParams,
+ extractFilterAndSorting,
+ extractPageInfo,
+} from '~/packages_and_registries/shared/utils';
jest.mock('~/packages_and_registries/shared/utils');
-useMockLocationHelper();
+Vue.use(VueRouter);
describe('Persisted Search', () => {
+ let router;
let wrapper;
const defaultQueryParamsMock = {
@@ -31,8 +36,11 @@ describe('Persisted Search', () => {
const findUrlSync = () => wrapper.findComponent(UrlSync);
const mountComponent = (propsData = defaultProps) => {
+ router = new VueRouter({ mode: 'history' });
+
wrapper = shallowMountExtended(component, {
propsData,
+ router,
stubs: {
UrlSync,
},
@@ -41,6 +49,10 @@ describe('Persisted Search', () => {
beforeEach(() => {
extractFilterAndSorting.mockReturnValue(defaultQueryParamsMock);
+ extractPageInfo.mockReturnValue({
+ after: '123',
+ before: null,
+ });
});
it('has a registry search component', async () => {
@@ -63,6 +75,48 @@ describe('Persisted Search', () => {
expect(findUrlSync().exists()).toBe(true);
});
+ it('emits update event on mount', () => {
+ mountComponent();
+
+ expect(wrapper.emitted('update')[0]).toEqual([
+ {
+ filters: ['foo'],
+ sort: 'TEST_DESC',
+ pageInfo: {
+ after: '123',
+ before: null,
+ },
+ },
+ ]);
+ });
+
+ it('re-emits update event when route changes', async () => {
+ mountComponent();
+
+ extractFilterAndSorting.mockReturnValue({
+ filters: [],
+ sorting: {},
+ });
+ extractPageInfo.mockReturnValue({
+ after: null,
+ before: '456',
+ });
+
+ await router.push({ query: { before: '456' } });
+
+ // there is always a first call on mounted that emits up default values
+ expect(wrapper.emitted('update')[1]).toEqual([
+ {
+ filters: [],
+ sort: 'TEST_DESC',
+ pageInfo: {
+ before: '456',
+ after: null,
+ },
+ },
+ ]);
+ });
+
it('on sorting:changed emits update event and update internal sort', async () => {
const payload = { sort: 'desc', orderBy: 'test' };
@@ -81,6 +135,7 @@ describe('Persisted Search', () => {
{
filters: ['foo'],
sort: 'TEST_DESC',
+ pageInfo: {},
},
]);
});
@@ -110,6 +165,10 @@ describe('Persisted Search', () => {
{
filters: ['foo'],
sort: 'TEST_DESC',
+ pageInfo: {
+ after: '123',
+ before: null,
+ },
},
]);
});
@@ -126,7 +185,7 @@ describe('Persisted Search', () => {
expect(UrlSync.methods.updateQuery).toHaveBeenCalled();
});
- it('sets the component sorting and filtering based on the querystring', async () => {
+ it('sets the component sorting, filtering and page info based on the querystring', async () => {
mountComponent();
await nextTick();
diff --git a/spec/frontend/packages_and_registries/shared/components/registry_list_spec.js b/spec/frontend/packages_and_registries/shared/components/registry_list_spec.js
index 66fca2ce12e..43dca2e6bf2 100644
--- a/spec/frontend/packages_and_registries/shared/components/registry_list_spec.js
+++ b/spec/frontend/packages_and_registries/shared/components/registry_list_spec.js
@@ -185,7 +185,7 @@ describe('Registry List', () => {
pagination = { hasPreviousPage: false, hasNextPage: true };
});
- it('has a pagination', () => {
+ it('has pagination', () => {
mountComponent({
propsData: { ...defaultPropsData, pagination },
});
@@ -193,24 +193,6 @@ describe('Registry List', () => {
expect(findPagination().props()).toMatchObject(pagination);
});
- it.each`
- hasPreviousPage | hasNextPage | visible
- ${true} | ${true} | ${true}
- ${true} | ${false} | ${true}
- ${false} | ${true} | ${true}
- ${false} | ${false} | ${false}
- `(
- 'when hasPreviousPage is $hasPreviousPage and hasNextPage is $hasNextPage is $visible that the pagination is shown',
- ({ hasPreviousPage, hasNextPage, visible }) => {
- pagination = { hasPreviousPage, hasNextPage };
- mountComponent({
- propsData: { ...defaultPropsData, pagination },
- });
-
- expect(findPagination().exists()).toBe(visible);
- },
- );
-
it('pagination emits the correct events', () => {
mountComponent({
propsData: { ...defaultPropsData, pagination },
diff --git a/spec/frontend/packages_and_registries/shared/utils_spec.js b/spec/frontend/packages_and_registries/shared/utils_spec.js
index d81cdbfd8bd..1dc6bb261de 100644
--- a/spec/frontend/packages_and_registries/shared/utils_spec.js
+++ b/spec/frontend/packages_and_registries/shared/utils_spec.js
@@ -3,6 +3,7 @@ import {
keyValueToFilterToken,
searchArrayToFilterTokens,
extractFilterAndSorting,
+ extractPageInfo,
beautifyPath,
getCommitLink,
} from '~/packages_and_registries/shared/utils';
@@ -61,6 +62,21 @@ describe('Packages And Registries shared utils', () => {
);
});
+ describe('extractPageInfo', () => {
+ it.each`
+ after | before | result
+ ${null} | ${null} | ${{ after: null, before: null }}
+ ${'123'} | ${null} | ${{ after: '123', before: null }}
+ ${null} | ${'123'} | ${{ after: null, before: '123' }}
+ `('returns pagination objects', ({ after, before, result }) => {
+ const queryObject = {
+ after,
+ before,
+ };
+ expect(extractPageInfo(queryObject)).toStrictEqual(result);
+ });
+ });
+
describe('beautifyPath', () => {
it('returns a string with spaces around /', () => {
expect(beautifyPath('foo/bar')).toBe('foo / bar');
diff --git a/spec/frontend/pages/import/bulk_imports/history/components/bulk_imports_history_app_spec.js b/spec/frontend/pages/import/bulk_imports/history/components/bulk_imports_history_app_spec.js
index 8a7fc57c409..0037934cbc5 100644
--- a/spec/frontend/pages/import/bulk_imports/history/components/bulk_imports_history_app_spec.js
+++ b/spec/frontend/pages/import/bulk_imports/history/components/bulk_imports_history_app_spec.js
@@ -2,6 +2,7 @@ import { GlEmptyState, GlLoadingIcon, GlTableLite } from '@gitlab/ui';
import { mount, shallowMount } from '@vue/test-utils';
import MockAdapter from 'axios-mock-adapter';
import axios from '~/lib/utils/axios_utils';
+import waitForPromises from 'helpers/wait_for_promises';
import { HTTP_STATUS_OK } from '~/lib/utils/http_status';
import PaginationBar from '~/vue_shared/components/pagination_bar/pagination_bar.vue';
import LocalStorageSync from '~/vue_shared/components/local_storage_sync.vue';
@@ -60,10 +61,13 @@ describe('BulkImportsHistoryApp', () => {
let wrapper;
let mock;
+ const mockRealtimeChangesPath = '/import/realtime_changes.json';
function createComponent({ shallow = true } = {}) {
const mountFn = shallow ? shallowMount : mount;
- wrapper = mountFn(BulkImportsHistoryApp);
+ wrapper = mountFn(BulkImportsHistoryApp, {
+ provide: { realtimeChangesPath: mockRealtimeChangesPath },
+ });
}
const findLocalStorageSync = () => wrapper.findComponent(LocalStorageSync);
@@ -220,4 +224,69 @@ describe('BulkImportsHistoryApp', () => {
expect(JSON.parse(detailsRowContent.text())).toStrictEqual(DUMMY_RESPONSE[1].failures);
});
});
+
+ describe('status polling', () => {
+ describe('when there are no isImporting imports', () => {
+ it('does not start polling', async () => {
+ createComponent({ shallow: false });
+ await waitForPromises();
+
+ expect(mock.history.get.map((x) => x.url)).toEqual([API_URL]);
+ });
+ });
+
+ describe('when there are isImporting imports', () => {
+ const mockCreatedImport = {
+ id: 3,
+ bulk_import_id: 3,
+ status: 'created',
+ entity_type: 'group',
+ source_full_path: 'top-level-group-12',
+ destination_full_path: 'h5bp/top-level-group-12',
+ destination_name: 'top-level-group-12',
+ destination_namespace: 'h5bp',
+ created_at: '2021-07-08T10:03:44.743Z',
+ failures: [],
+ };
+ const mockImportChanges = [{ id: 3, status_name: 'finished' }];
+ const pollInterval = 1;
+
+ beforeEach(async () => {
+ const RESPONSE = [mockCreatedImport, ...DUMMY_RESPONSE];
+ const POLL_HEADERS = { 'poll-interval': pollInterval };
+
+ mock.onGet(API_URL).reply(HTTP_STATUS_OK, RESPONSE, DEFAULT_HEADERS);
+ mock.onGet(mockRealtimeChangesPath).replyOnce(HTTP_STATUS_OK, [], POLL_HEADERS);
+ mock
+ .onGet(mockRealtimeChangesPath)
+ .replyOnce(HTTP_STATUS_OK, mockImportChanges, POLL_HEADERS);
+
+ createComponent({ shallow: false });
+
+ await waitForPromises();
+ });
+
+ it('starts polling for realtime changes', () => {
+ jest.advanceTimersByTime(pollInterval);
+
+ expect(mock.history.get.map((x) => x.url)).toEqual([API_URL, mockRealtimeChangesPath]);
+ expect(wrapper.findAll('tbody tr').at(0).text()).toContain('Pending');
+ });
+
+ it('stops polling when import is finished', async () => {
+ jest.advanceTimersByTime(pollInterval);
+ await waitForPromises();
+ // Wait an extra interval to make sure we've stopped polling
+ jest.advanceTimersByTime(pollInterval);
+ await waitForPromises();
+
+ expect(mock.history.get.map((x) => x.url)).toEqual([
+ API_URL,
+ mockRealtimeChangesPath,
+ mockRealtimeChangesPath,
+ ]);
+ expect(wrapper.findAll('tbody tr').at(0).text()).toContain('Complete');
+ });
+ });
+ });
});
diff --git a/spec/frontend/pages/projects/forks/new/components/fork_form_spec.js b/spec/frontend/pages/projects/forks/new/components/fork_form_spec.js
index 722857a1420..7bc4cd4d541 100644
--- a/spec/frontend/pages/projects/forks/new/components/fork_form_spec.js
+++ b/spec/frontend/pages/projects/forks/new/components/fork_form_spec.js
@@ -118,6 +118,7 @@ describe('ForkForm component', () => {
const findInternalRadio = () => wrapper.find('[data-testid="radio-internal"]');
const findPublicRadio = () => wrapper.find('[data-testid="radio-public"]');
const findForkNameInput = () => wrapper.find('[data-testid="fork-name-input"]');
+ const findGlFormRadioGroup = () => wrapper.findComponent(GlFormRadioGroup);
const findForkUrlInput = () => wrapper.findComponent(ProjectNamespace);
const findForkSlugInput = () => wrapper.find('[data-testid="fork-slug-input"]');
const findForkDescriptionTextarea = () =>
@@ -235,7 +236,7 @@ describe('ForkForm component', () => {
it('resets the visibility to max allowed below current level', async () => {
createFullComponent({ projectVisibility: 'public' }, { namespaces });
- expect(wrapper.vm.form.fields.visibility.value).toBe('public');
+ expect(findGlFormRadioGroup().vm.$attrs.checked).toBe('public');
fillForm({
name: 'one',
@@ -250,7 +251,7 @@ describe('ForkForm component', () => {
it('does not reset the visibility when current level is allowed', async () => {
createFullComponent({ projectVisibility: 'public' }, { namespaces });
- expect(wrapper.vm.form.fields.visibility.value).toBe('public');
+ expect(findGlFormRadioGroup().vm.$attrs.checked).toBe('public');
fillForm({
name: 'two',
@@ -265,7 +266,7 @@ describe('ForkForm component', () => {
it('does not reset the visibility when visibility cap is increased', async () => {
createFullComponent({ projectVisibility: 'public' }, { namespaces });
- expect(wrapper.vm.form.fields.visibility.value).toBe('public');
+ expect(findGlFormRadioGroup().vm.$attrs.checked).toBe('public');
fillForm({
name: 'three',
@@ -290,7 +291,7 @@ describe('ForkForm component', () => {
{ namespaces },
);
- wrapper.vm.form.fields.visibility.value = 'internal';
+ await findGlFormRadioGroup().vm.$emit('input', 'internal');
fillForm({
name: 'five',
id: 5,
@@ -468,7 +469,8 @@ describe('ForkForm component', () => {
jest.spyOn(axios, 'post');
setupComponent();
- wrapper.vm.form.fields.visibility.value = null;
+ await findGlFormRadioGroup().vm.$emit('input', null);
+
await nextTick();
await submitForm();
diff --git a/spec/frontend/pages/projects/pipeline_schedules/shared/components/interval_pattern_input_spec.js b/spec/frontend/pages/projects/pipeline_schedules/shared/components/interval_pattern_input_spec.js
index 197a76f2c86..f5a7dfe6d11 100644
--- a/spec/frontend/pages/projects/pipeline_schedules/shared/components/interval_pattern_input_spec.js
+++ b/spec/frontend/pages/projects/pipeline_schedules/shared/components/interval_pattern_input_spec.js
@@ -132,7 +132,7 @@ describe('Interval Pattern Input Component', () => {
'Every day (at 4:00am)',
'Every week (Monday at 4:00am)',
'Every month (Day 1 at 4:00am)',
- 'Custom ( Learn more. )',
+ 'Custom (Learn more.)',
]);
});
});
diff --git a/spec/frontend/pages/projects/shared/permissions/components/settings_panel_spec.js b/spec/frontend/pages/projects/shared/permissions/components/settings_panel_spec.js
index 02e510c9541..8b672ff3f32 100644
--- a/spec/frontend/pages/projects/shared/permissions/components/settings_panel_spec.js
+++ b/spec/frontend/pages/projects/shared/permissions/components/settings_panel_spec.js
@@ -519,26 +519,34 @@ describe('Settings Panel', () => {
});
it.each`
- projectVisibilityLevel | packageRegistryEnabled | packageRegistryApiForEveryoneEnabled | expectedAccessLevel
- ${VISIBILITY_LEVEL_PRIVATE_INTEGER} | ${false} | ${'disabled'} | ${featureAccessLevel.NOT_ENABLED}
- ${VISIBILITY_LEVEL_PRIVATE_INTEGER} | ${true} | ${false} | ${featureAccessLevel.PROJECT_MEMBERS}
- ${VISIBILITY_LEVEL_PRIVATE_INTEGER} | ${true} | ${true} | ${FEATURE_ACCESS_LEVEL_ANONYMOUS}
- ${VISIBILITY_LEVEL_INTERNAL_INTEGER} | ${false} | ${'disabled'} | ${featureAccessLevel.NOT_ENABLED}
- ${VISIBILITY_LEVEL_INTERNAL_INTEGER} | ${true} | ${false} | ${featureAccessLevel.EVERYONE}
- ${VISIBILITY_LEVEL_INTERNAL_INTEGER} | ${true} | ${true} | ${FEATURE_ACCESS_LEVEL_ANONYMOUS}
- ${VISIBILITY_LEVEL_PUBLIC_INTEGER} | ${false} | ${'hidden'} | ${featureAccessLevel.NOT_ENABLED}
- ${VISIBILITY_LEVEL_PUBLIC_INTEGER} | ${true} | ${'hidden'} | ${FEATURE_ACCESS_LEVEL_ANONYMOUS}
+ projectVisibilityLevel | packageRegistryEnabled | packageRegistryAllowAnyoneToPullOption | packageRegistryApiForEveryoneEnabled | expectedAccessLevel
+ ${VISIBILITY_LEVEL_PRIVATE_INTEGER} | ${false} | ${true} | ${'disabled'} | ${featureAccessLevel.NOT_ENABLED}
+ ${VISIBILITY_LEVEL_PRIVATE_INTEGER} | ${true} | ${true} | ${false} | ${featureAccessLevel.PROJECT_MEMBERS}
+ ${VISIBILITY_LEVEL_PRIVATE_INTEGER} | ${true} | ${true} | ${true} | ${FEATURE_ACCESS_LEVEL_ANONYMOUS}
+ ${VISIBILITY_LEVEL_INTERNAL_INTEGER} | ${false} | ${true} | ${'disabled'} | ${featureAccessLevel.NOT_ENABLED}
+ ${VISIBILITY_LEVEL_INTERNAL_INTEGER} | ${true} | ${true} | ${false} | ${featureAccessLevel.EVERYONE}
+ ${VISIBILITY_LEVEL_INTERNAL_INTEGER} | ${true} | ${true} | ${true} | ${FEATURE_ACCESS_LEVEL_ANONYMOUS}
+ ${VISIBILITY_LEVEL_PUBLIC_INTEGER} | ${false} | ${true} | ${'hidden'} | ${featureAccessLevel.NOT_ENABLED}
+ ${VISIBILITY_LEVEL_PUBLIC_INTEGER} | ${true} | ${true} | ${'hidden'} | ${FEATURE_ACCESS_LEVEL_ANONYMOUS}
+ ${VISIBILITY_LEVEL_PRIVATE_INTEGER} | ${false} | ${false} | ${'hidden'} | ${featureAccessLevel.NOT_ENABLED}
+ ${VISIBILITY_LEVEL_PRIVATE_INTEGER} | ${true} | ${false} | ${'hidden'} | ${featureAccessLevel.PROJECT_MEMBERS}
+ ${VISIBILITY_LEVEL_INTERNAL_INTEGER} | ${false} | ${false} | ${'hidden'} | ${featureAccessLevel.NOT_ENABLED}
+ ${VISIBILITY_LEVEL_INTERNAL_INTEGER} | ${true} | ${false} | ${'hidden'} | ${featureAccessLevel.EVERYONE}
+ ${VISIBILITY_LEVEL_PUBLIC_INTEGER} | ${false} | ${false} | ${'hidden'} | ${featureAccessLevel.NOT_ENABLED}
+ ${VISIBILITY_LEVEL_PUBLIC_INTEGER} | ${true} | ${false} | ${'hidden'} | ${FEATURE_ACCESS_LEVEL_ANONYMOUS}
`(
'sets correct access level',
async ({
projectVisibilityLevel,
packageRegistryEnabled,
+ packageRegistryAllowAnyoneToPullOption,
packageRegistryApiForEveryoneEnabled,
expectedAccessLevel,
}) => {
wrapper = mountComponent({
packagesAvailable: true,
currentSettings: {
+ packageRegistryAllowAnyoneToPullOption,
visibilityLevel: projectVisibilityLevel,
},
});
@@ -678,7 +686,7 @@ describe('Settings Panel', () => {
});
describe('Default award emojis', () => {
- it('should show the "Show default award emojis" input', () => {
+ it('should show the "Show default emoji reactions" input', () => {
wrapper = mountComponent();
expect(findShowDefaultAwardEmojis().exists()).toBe(true);
diff --git a/spec/frontend/pages/shared/wikis/components/wiki_export_spec.js b/spec/frontend/pages/shared/wikis/components/wiki_export_spec.js
new file mode 100644
index 00000000000..b7002412561
--- /dev/null
+++ b/spec/frontend/pages/shared/wikis/components/wiki_export_spec.js
@@ -0,0 +1,48 @@
+import { GlDisclosureDropdown } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import WikiExport from '~/pages/shared/wikis/components/wiki_export.vue';
+import printMarkdownDom from '~/lib/print_markdown_dom';
+
+jest.mock('~/lib/print_markdown_dom');
+
+describe('pages/shared/wikis/components/wiki_export', () => {
+ let wrapper;
+
+ const createComponent = (provide) => {
+ wrapper = shallowMount(WikiExport, {
+ provide,
+ });
+ };
+
+ const findDropdown = () => wrapper.findComponent(GlDisclosureDropdown);
+ const findPrintItem = () =>
+ findDropdown()
+ .props('items')
+ .find((x) => x.text === 'Print as PDF');
+
+ describe('print', () => {
+ beforeEach(() => {
+ document.body.innerHTML = '<div id="content-body">Content</div>';
+ });
+
+ afterEach(() => {
+ document.body.innerHTML = '';
+ });
+
+ it('should print the content', () => {
+ createComponent({
+ target: '#content-body',
+ title: 'test title',
+ stylesheet: [],
+ });
+
+ findPrintItem().action();
+
+ expect(printMarkdownDom).toHaveBeenCalledWith({
+ target: document.querySelector('#content-body'),
+ title: 'test title',
+ stylesheet: [],
+ });
+ });
+ });
+});
diff --git a/spec/frontend/pages/shared/wikis/components/wiki_form_spec.js b/spec/frontend/pages/shared/wikis/components/wiki_form_spec.js
index db889abad88..3d838f365b9 100644
--- a/spec/frontend/pages/shared/wikis/components/wiki_form_spec.js
+++ b/spec/frontend/pages/shared/wikis/components/wiki_form_spec.js
@@ -239,10 +239,9 @@ describe('WikiForm', () => {
});
it('tracks editor type used', () => {
- expect(trackingSpy).toHaveBeenCalledWith(undefined, 'editor_type_used', {
- context: 'Wiki',
- editorType: 'editor_type_plain_text_editor',
- label: 'editor_tracking',
+ expect(trackingSpy).toHaveBeenCalledWith(undefined, 'save_markdown', {
+ label: 'markdown_editor',
+ property: 'Wiki',
});
});
diff --git a/spec/frontend/pipelines/components/jobs/failed_jobs_table_spec.js b/spec/frontend/pipelines/components/jobs/failed_jobs_table_spec.js
index d5307b87a11..99a178120cc 100644
--- a/spec/frontend/pipelines/components/jobs/failed_jobs_table_spec.js
+++ b/spec/frontend/pipelines/components/jobs/failed_jobs_table_spec.js
@@ -2,12 +2,14 @@ import { GlButton, GlLink, GlTableLite } from '@gitlab/ui';
import Vue from 'vue';
import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
+import { mockTracking, unmockTracking } from 'helpers/tracking_helper';
import waitForPromises from 'helpers/wait_for_promises';
import { mountExtended } from 'helpers/vue_test_utils_helper';
import { createAlert } from '~/alert';
import { redirectTo } from '~/lib/utils/url_utility'; // eslint-disable-line import/no-deprecated
import FailedJobsTable from '~/pipelines/components/jobs/failed_jobs_table.vue';
import RetryFailedJobMutation from '~/pipelines/graphql/mutations/retry_failed_job.mutation.graphql';
+import { TRACKING_CATEGORIES } from '~/pipelines/constants';
import {
successRetryMutationResponse,
failedRetryMutationResponse,
@@ -71,7 +73,9 @@ describe('Failed Jobs Table', () => {
expect(findFirstFailureMessage().text()).toBe('Job failed');
});
- it('calls the retry failed job mutation correctly', () => {
+ it('calls the retry failed job mutation and tracks the click', () => {
+ const trackingSpy = mockTracking(undefined, wrapper.element, jest.spyOn);
+
createComponent(successRetryMutationHandler);
findRetryButton().trigger('click');
@@ -79,6 +83,12 @@ describe('Failed Jobs Table', () => {
expect(successRetryMutationHandler).toHaveBeenCalledWith({
id: mockFailedJobsData[0].id,
});
+ expect(trackingSpy).toHaveBeenCalledTimes(1);
+ expect(trackingSpy).toHaveBeenCalledWith(undefined, 'click_retry', {
+ label: TRACKING_CATEGORIES.failed,
+ });
+
+ unmockTracking();
});
it('redirects to the new job after the mutation', async () => {
diff --git a/spec/frontend/pipelines/components/pipeline_mini_graph/graphql_pipeline_mini_graph_spec.js b/spec/frontend/pipelines/components/pipeline_mini_graph/graphql_pipeline_mini_graph_spec.js
deleted file mode 100644
index 69b223461bd..00000000000
--- a/spec/frontend/pipelines/components/pipeline_mini_graph/graphql_pipeline_mini_graph_spec.js
+++ /dev/null
@@ -1,123 +0,0 @@
-import Vue from 'vue';
-import VueApollo from 'vue-apollo';
-import { GlLoadingIcon } from '@gitlab/ui';
-
-import { createAlert } from '~/alert';
-import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
-import waitForPromises from 'helpers/wait_for_promises';
-import createMockApollo from 'helpers/mock_apollo_helper';
-
-import getLinkedPipelinesQuery from '~/pipelines/graphql/queries/get_linked_pipelines.query.graphql';
-import getPipelineStagesQuery from '~/pipelines/graphql/queries/get_pipeline_stages.query.graphql';
-import PipelineMiniGraph from '~/pipelines/components/pipeline_mini_graph/pipeline_mini_graph.vue';
-import GraphqlPipelineMiniGraph from '~/pipelines/components/pipeline_mini_graph/graphql_pipeline_mini_graph.vue';
-import * as sharedGraphQlUtils from '~/graphql_shared/utils';
-
-import {
- linkedPipelinesFetchError,
- stagesFetchError,
- mockPipelineStagesQueryResponse,
- mockUpstreamDownstreamQueryResponse,
-} from './mock_data';
-
-Vue.use(VueApollo);
-jest.mock('~/alert');
-
-describe('GraphqlPipelineMiniGraph', () => {
- let wrapper;
- let linkedPipelinesResponse;
- let pipelineStagesResponse;
-
- const fullPath = 'gitlab-org/gitlab';
- const iid = '315';
- const pipelineEtag = '/api/graphql:pipelines/id/315';
-
- const createComponent = ({
- pipelineStagesHandler = pipelineStagesResponse,
- linkedPipelinesHandler = linkedPipelinesResponse,
- } = {}) => {
- const handlers = [
- [getLinkedPipelinesQuery, linkedPipelinesHandler],
- [getPipelineStagesQuery, pipelineStagesHandler],
- ];
- const mockApollo = createMockApollo(handlers);
-
- wrapper = shallowMountExtended(GraphqlPipelineMiniGraph, {
- propsData: {
- fullPath,
- iid,
- pipelineEtag,
- },
- apolloProvider: mockApollo,
- });
-
- return waitForPromises();
- };
-
- const findPipelineMiniGraph = () => wrapper.findComponent(PipelineMiniGraph);
- const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
-
- beforeEach(() => {
- linkedPipelinesResponse = jest.fn().mockResolvedValue(mockUpstreamDownstreamQueryResponse);
- pipelineStagesResponse = jest.fn().mockResolvedValue(mockPipelineStagesQueryResponse);
- });
-
- describe('when initial queries are loading', () => {
- beforeEach(() => {
- createComponent();
- });
-
- it('shows a loading icon and no mini graph', () => {
- expect(findLoadingIcon().exists()).toBe(true);
- expect(findPipelineMiniGraph().exists()).toBe(false);
- });
- });
-
- describe('when queries have loaded', () => {
- it('does not show a loading icon', async () => {
- await createComponent();
-
- expect(findLoadingIcon().exists()).toBe(false);
- });
-
- it('renders the Pipeline Mini Graph', async () => {
- await createComponent();
-
- expect(findPipelineMiniGraph().exists()).toBe(true);
- });
-
- it('fires the queries', async () => {
- await createComponent();
-
- expect(linkedPipelinesResponse).toHaveBeenCalledWith({ iid, fullPath });
- expect(pipelineStagesResponse).toHaveBeenCalledWith({ iid, fullPath });
- });
- });
-
- describe('polling', () => {
- it('toggles query polling with visibility check', async () => {
- jest.spyOn(sharedGraphQlUtils, 'toggleQueryPollingByVisibility');
-
- createComponent();
-
- await waitForPromises();
-
- expect(sharedGraphQlUtils.toggleQueryPollingByVisibility).toHaveBeenCalledTimes(2);
- });
- });
-
- describe('when pipeline queries are unsuccessful', () => {
- const failedHandler = jest.fn().mockRejectedValue(new Error('GraphQL error'));
- it.each`
- query | handlerName | errorMessage
- ${'pipeline stages'} | ${'pipelineStagesHandler'} | ${stagesFetchError}
- ${'linked pipelines'} | ${'linkedPipelinesHandler'} | ${linkedPipelinesFetchError}
- `('throws an error for the $query query', async ({ errorMessage, handlerName }) => {
- await createComponent({ [handlerName]: failedHandler });
-
- await waitForPromises();
-
- expect(createAlert).toHaveBeenCalledWith({ message: errorMessage });
- });
- });
-});
diff --git a/spec/frontend/pipelines/components/pipeline_mini_graph/job_item_spec.js b/spec/frontend/pipelines/components/pipeline_mini_graph/job_item_spec.js
new file mode 100644
index 00000000000..b89f27e5c05
--- /dev/null
+++ b/spec/frontend/pipelines/components/pipeline_mini_graph/job_item_spec.js
@@ -0,0 +1,29 @@
+import { shallowMount } from '@vue/test-utils';
+import JobItem from '~/pipelines/components/pipeline_mini_graph/job_item.vue';
+
+describe('JobItem', () => {
+ let wrapper;
+
+ const defaultProps = {
+ job: { id: '3' },
+ };
+
+ const createComponent = ({ props = {} } = {}) => {
+ wrapper = shallowMount(JobItem, {
+ propsData: {
+ ...defaultProps,
+ ...props,
+ },
+ });
+ };
+
+ describe('when mounted', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('renders the received HTML', () => {
+ expect(wrapper.html()).toContain(defaultProps.job.id);
+ });
+ });
+});
diff --git a/spec/frontend/pipelines/components/pipeline_mini_graph/legacy_pipeline_mini_graph_spec.js b/spec/frontend/pipelines/components/pipeline_mini_graph/legacy_pipeline_mini_graph_spec.js
new file mode 100644
index 00000000000..6661bb079d2
--- /dev/null
+++ b/spec/frontend/pipelines/components/pipeline_mini_graph/legacy_pipeline_mini_graph_spec.js
@@ -0,0 +1,122 @@
+import { mount } from '@vue/test-utils';
+import { pipelines } from 'test_fixtures/pipelines/pipelines.json';
+import LegacyPipelineMiniGraph from '~/pipelines/components/pipeline_mini_graph/legacy_pipeline_mini_graph.vue';
+import PipelineStages from '~/pipelines/components/pipeline_mini_graph/pipeline_stages.vue';
+import mockLinkedPipelines from './linked_pipelines_mock_data';
+
+const mockStages = pipelines[0].details.stages;
+
+describe('Legacy Pipeline Mini Graph', () => {
+ let wrapper;
+
+ const findLegacyPipelineMiniGraph = () => wrapper.findComponent(LegacyPipelineMiniGraph);
+ const findPipelineStages = () => wrapper.findComponent(PipelineStages);
+
+ const findLinkedPipelineUpstream = () =>
+ wrapper.findComponent('[data-testid="pipeline-mini-graph-upstream"]');
+ const findLinkedPipelineDownstream = () =>
+ wrapper.findComponent('[data-testid="pipeline-mini-graph-downstream"]');
+ const findDownstreamArrowIcon = () => wrapper.find('[data-testid="downstream-arrow-icon"]');
+ const findUpstreamArrowIcon = () => wrapper.find('[data-testid="upstream-arrow-icon"]');
+
+ const createComponent = (props = {}) => {
+ wrapper = mount(LegacyPipelineMiniGraph, {
+ propsData: {
+ stages: mockStages,
+ ...props,
+ },
+ });
+ };
+
+ describe('rendered state without upstream or downstream pipelines', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('should render the pipeline stages', () => {
+ expect(findPipelineStages().exists()).toBe(true);
+ });
+
+ it('should have the correct props', () => {
+ expect(findLegacyPipelineMiniGraph().props()).toMatchObject({
+ downstreamPipelines: [],
+ isMergeTrain: false,
+ pipelinePath: '',
+ stages: expect.any(Array),
+ updateDropdown: false,
+ upstreamPipeline: undefined,
+ });
+ });
+
+ it('should have no linked pipelines', () => {
+ expect(findLinkedPipelineDownstream().exists()).toBe(false);
+ expect(findLinkedPipelineUpstream().exists()).toBe(false);
+ });
+
+ it('should not render arrow icons', () => {
+ expect(findUpstreamArrowIcon().exists()).toBe(false);
+ expect(findDownstreamArrowIcon().exists()).toBe(false);
+ });
+ });
+
+ describe('rendered state with upstream pipeline', () => {
+ beforeEach(() => {
+ createComponent({
+ upstreamPipeline: mockLinkedPipelines.triggered_by,
+ });
+ });
+
+ it('should have the correct props', () => {
+ expect(findLegacyPipelineMiniGraph().props()).toMatchObject({
+ downstreamPipelines: [],
+ isMergeTrain: false,
+ pipelinePath: '',
+ stages: expect.any(Array),
+ updateDropdown: false,
+ upstreamPipeline: expect.any(Object),
+ });
+ });
+
+ it('should render the upstream linked pipelines mini list only', () => {
+ expect(findLinkedPipelineUpstream().exists()).toBe(true);
+ expect(findLinkedPipelineDownstream().exists()).toBe(false);
+ });
+
+ it('should render an upstream arrow icon only', () => {
+ expect(findDownstreamArrowIcon().exists()).toBe(false);
+ expect(findUpstreamArrowIcon().exists()).toBe(true);
+ expect(findUpstreamArrowIcon().props('name')).toBe('long-arrow');
+ });
+ });
+
+ describe('rendered state with downstream pipelines', () => {
+ beforeEach(() => {
+ createComponent({
+ downstreamPipelines: mockLinkedPipelines.triggered,
+ pipelinePath: 'my/pipeline/path',
+ });
+ });
+
+ it('should have the correct props', () => {
+ expect(findLegacyPipelineMiniGraph().props()).toMatchObject({
+ downstreamPipelines: expect.any(Array),
+ isMergeTrain: false,
+ pipelinePath: 'my/pipeline/path',
+ stages: expect.any(Array),
+ updateDropdown: false,
+ upstreamPipeline: undefined,
+ });
+ });
+
+ it('should render the downstream linked pipelines mini list only', () => {
+ expect(findLinkedPipelineDownstream().exists()).toBe(true);
+ expect(findLinkedPipelineUpstream().exists()).toBe(false);
+ });
+
+ it('should render a downstream arrow icon only', () => {
+ expect(findUpstreamArrowIcon().exists()).toBe(false);
+ expect(findDownstreamArrowIcon().exists()).toBe(true);
+ expect(findDownstreamArrowIcon().props('name')).toBe('long-arrow');
+ });
+ });
+});
diff --git a/spec/frontend/pipelines/components/pipeline_mini_graph/legacy_pipeline_stage_spec.js b/spec/frontend/pipelines/components/pipeline_mini_graph/legacy_pipeline_stage_spec.js
new file mode 100644
index 00000000000..3697eaeea1a
--- /dev/null
+++ b/spec/frontend/pipelines/components/pipeline_mini_graph/legacy_pipeline_stage_spec.js
@@ -0,0 +1,247 @@
+import { GlDropdown } from '@gitlab/ui';
+import { nextTick } from 'vue';
+import { mount } from '@vue/test-utils';
+import MockAdapter from 'axios-mock-adapter';
+import CiIcon from '~/vue_shared/components/ci_icon.vue';
+import axios from '~/lib/utils/axios_utils';
+import { HTTP_STATUS_INTERNAL_SERVER_ERROR, HTTP_STATUS_OK } from '~/lib/utils/http_status';
+import LegacyPipelineStage from '~/pipelines/components/pipeline_mini_graph/legacy_pipeline_stage.vue';
+import eventHub from '~/pipelines/event_hub';
+import waitForPromises from 'helpers/wait_for_promises';
+import { stageReply } from '../../mock_data';
+
+const dropdownPath = 'path.json';
+
+describe('Pipelines stage component', () => {
+ let wrapper;
+ let mock;
+ let glTooltipDirectiveMock;
+
+ const createComponent = (props = {}) => {
+ glTooltipDirectiveMock = jest.fn();
+ wrapper = mount(LegacyPipelineStage, {
+ attachTo: document.body,
+ directives: {
+ GlTooltip: glTooltipDirectiveMock,
+ },
+ propsData: {
+ stage: {
+ status: {
+ group: 'success',
+ icon: 'status_success',
+ title: 'success',
+ },
+ dropdown_path: dropdownPath,
+ },
+ updateDropdown: false,
+ ...props,
+ },
+ });
+ };
+
+ beforeEach(() => {
+ mock = new MockAdapter(axios);
+ jest.spyOn(eventHub, '$emit');
+ });
+
+ afterEach(() => {
+ eventHub.$emit.mockRestore();
+ mock.restore();
+ // eslint-disable-next-line @gitlab/vtu-no-explicit-wrapper-destroy
+ wrapper.destroy();
+ });
+
+ const findCiActionBtn = () => wrapper.find('.js-ci-action');
+ const findCiIcon = () => wrapper.findComponent(CiIcon);
+ const findDropdown = () => wrapper.findComponent(GlDropdown);
+ const findDropdownToggle = () => wrapper.find('button.dropdown-toggle');
+ const findDropdownMenu = () =>
+ wrapper.find('[data-testid="mini-pipeline-graph-dropdown-menu-list"]');
+ const findDropdownMenuTitle = () =>
+ wrapper.find('[data-testid="pipeline-stage-dropdown-menu-title"]');
+ const findMergeTrainWarning = () => wrapper.find('[data-testid="warning-message-merge-trains"]');
+ const findLoadingState = () => wrapper.find('[data-testid="pipeline-stage-loading-state"]');
+
+ const openStageDropdown = async () => {
+ await findDropdownToggle().trigger('click');
+ await waitForPromises();
+ await nextTick();
+ };
+
+ describe('loading state', () => {
+ beforeEach(async () => {
+ createComponent({ updateDropdown: true });
+
+ mock.onGet(dropdownPath).reply(HTTP_STATUS_OK, stageReply);
+
+ await openStageDropdown();
+ });
+
+ it('displays loading state while jobs are being fetched', async () => {
+ jest.runOnlyPendingTimers();
+ await nextTick();
+
+ expect(findLoadingState().exists()).toBe(true);
+ expect(findLoadingState().text()).toBe(LegacyPipelineStage.i18n.loadingText);
+ });
+
+ it('does not display loading state after jobs have been fetched', async () => {
+ await waitForPromises();
+
+ expect(findLoadingState().exists()).toBe(false);
+ });
+ });
+
+ describe('default appearance', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('sets up the tooltip to not have a show delay animation', () => {
+ expect(glTooltipDirectiveMock.mock.calls[0][1].modifiers.ds0).toBe(true);
+ });
+
+ it('renders a dropdown with the status icon', () => {
+ expect(findDropdown().exists()).toBe(true);
+ expect(findDropdownToggle().exists()).toBe(true);
+ expect(findCiIcon().exists()).toBe(true);
+ });
+
+ it('renders a borderless ci-icon', () => {
+ expect(findCiIcon().exists()).toBe(true);
+ expect(findCiIcon().props('isBorderless')).toBe(true);
+ expect(findCiIcon().classes('borderless')).toBe(true);
+ });
+
+ it('renders a ci-icon with a custom border class', () => {
+ expect(findCiIcon().exists()).toBe(true);
+ expect(findCiIcon().classes('gl-border')).toBe(true);
+ });
+ });
+
+ describe('when user opens dropdown and stage request is successful', () => {
+ beforeEach(async () => {
+ mock.onGet(dropdownPath).reply(HTTP_STATUS_OK, stageReply);
+ createComponent();
+
+ await openStageDropdown();
+ await jest.runAllTimers();
+ await axios.waitForAll();
+ });
+
+ it('renders the received data and emits the correct events', () => {
+ expect(findDropdownMenu().text()).toContain(stageReply.latest_statuses[0].name);
+ expect(findDropdownMenuTitle().text()).toContain(stageReply.name);
+ expect(eventHub.$emit).toHaveBeenCalledWith('clickedDropdown');
+ expect(wrapper.emitted('miniGraphStageClick')).toEqual([[]]);
+ });
+
+ it('refreshes when updateDropdown is set to true', async () => {
+ expect(mock.history.get).toHaveLength(1);
+
+ wrapper.setProps({ updateDropdown: true });
+ await axios.waitForAll();
+
+ expect(mock.history.get).toHaveLength(2);
+ });
+ });
+
+ describe('when user opens dropdown and stage request fails', () => {
+ it('should close the dropdown', async () => {
+ mock.onGet(dropdownPath).reply(HTTP_STATUS_INTERNAL_SERVER_ERROR);
+ createComponent();
+
+ await openStageDropdown();
+ await axios.waitForAll();
+ await waitForPromises();
+
+ expect(findDropdown().classes('show')).toBe(false);
+ });
+ });
+
+ describe('update endpoint correctly', () => {
+ beforeEach(async () => {
+ const copyStage = { ...stageReply };
+ copyStage.latest_statuses[0].name = 'this is the updated content';
+ mock.onGet('bar.json').reply(HTTP_STATUS_OK, copyStage);
+ createComponent({
+ stage: {
+ status: {
+ group: 'running',
+ icon: 'status_running',
+ title: 'running',
+ },
+ dropdown_path: 'bar.json',
+ },
+ });
+ await axios.waitForAll();
+ });
+
+ it('should update the stage to request the new endpoint provided', async () => {
+ await openStageDropdown();
+ jest.runOnlyPendingTimers();
+ await waitForPromises();
+
+ expect(findDropdownMenu().text()).toContain('this is the updated content');
+ });
+ });
+
+ describe('job update in dropdown', () => {
+ beforeEach(async () => {
+ mock.onGet(dropdownPath).reply(HTTP_STATUS_OK, stageReply);
+ mock.onPost(`${stageReply.latest_statuses[0].status.action.path}.json`).reply(HTTP_STATUS_OK);
+
+ createComponent();
+ await waitForPromises();
+ await nextTick();
+ });
+
+ const clickCiAction = async () => {
+ await openStageDropdown();
+ jest.runOnlyPendingTimers();
+ await waitForPromises();
+
+ await findCiActionBtn().trigger('click');
+ };
+
+ it('keeps dropdown open when job item action is clicked', async () => {
+ await clickCiAction();
+ await waitForPromises();
+
+ expect(findDropdown().classes('show')).toBe(true);
+ });
+ });
+
+ describe('With merge trains enabled', () => {
+ it('shows a warning on the dropdown', async () => {
+ mock.onGet(dropdownPath).reply(HTTP_STATUS_OK, stageReply);
+ createComponent({
+ isMergeTrain: true,
+ });
+
+ await openStageDropdown();
+ jest.runOnlyPendingTimers();
+ await waitForPromises();
+
+ const warning = findMergeTrainWarning();
+
+ expect(warning.text()).toBe('Merge train pipeline jobs can not be retried');
+ });
+ });
+
+ describe('With merge trains disabled', () => {
+ beforeEach(async () => {
+ mock.onGet(dropdownPath).reply(HTTP_STATUS_OK, stageReply);
+ createComponent();
+
+ await openStageDropdown();
+ await axios.waitForAll();
+ });
+
+ it('does not show a warning on the dropdown', () => {
+ const warning = findMergeTrainWarning();
+
+ expect(warning.exists()).toBe(false);
+ });
+ });
+});
diff --git a/spec/frontend/pipelines/components/pipeline_mini_graph/pipeline_mini_graph_spec.js b/spec/frontend/pipelines/components/pipeline_mini_graph/pipeline_mini_graph_spec.js
index e7415a6c596..b3e157f75f6 100644
--- a/spec/frontend/pipelines/components/pipeline_mini_graph/pipeline_mini_graph_spec.js
+++ b/spec/frontend/pipelines/components/pipeline_mini_graph/pipeline_mini_graph_spec.js
@@ -1,122 +1,123 @@
-import { mount } from '@vue/test-utils';
-import { pipelines } from 'test_fixtures/pipelines/pipelines.json';
+import Vue from 'vue';
+import VueApollo from 'vue-apollo';
+import { GlLoadingIcon } from '@gitlab/ui';
+
+import { createAlert } from '~/alert';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import createMockApollo from 'helpers/mock_apollo_helper';
+
+import getLinkedPipelinesQuery from '~/pipelines/graphql/queries/get_linked_pipelines.query.graphql';
+import getPipelineStagesQuery from '~/pipelines/graphql/queries/get_pipeline_stages.query.graphql';
+import LegacyPipelineMiniGraph from '~/pipelines/components/pipeline_mini_graph/legacy_pipeline_mini_graph.vue';
import PipelineMiniGraph from '~/pipelines/components/pipeline_mini_graph/pipeline_mini_graph.vue';
-import PipelineStages from '~/pipelines/components/pipeline_mini_graph/pipeline_stages.vue';
-import mockLinkedPipelines from './linked_pipelines_mock_data';
+import * as sharedGraphQlUtils from '~/graphql_shared/utils';
-const mockStages = pipelines[0].details.stages;
+import {
+ linkedPipelinesFetchError,
+ stagesFetchError,
+ mockPipelineStagesQueryResponse,
+ mockUpstreamDownstreamQueryResponse,
+} from './mock_data';
-describe('Pipeline Mini Graph', () => {
- let wrapper;
-
- const findPipelineMiniGraph = () => wrapper.findComponent(PipelineMiniGraph);
- const findPipelineStages = () => wrapper.findComponent(PipelineStages);
-
- const findLinkedPipelineUpstream = () =>
- wrapper.findComponent('[data-testid="pipeline-mini-graph-upstream"]');
- const findLinkedPipelineDownstream = () =>
- wrapper.findComponent('[data-testid="pipeline-mini-graph-downstream"]');
- const findDownstreamArrowIcon = () => wrapper.find('[data-testid="downstream-arrow-icon"]');
- const findUpstreamArrowIcon = () => wrapper.find('[data-testid="upstream-arrow-icon"]');
+Vue.use(VueApollo);
+jest.mock('~/alert');
- const createComponent = (props = {}) => {
- wrapper = mount(PipelineMiniGraph, {
+describe('PipelineMiniGraph', () => {
+ let wrapper;
+ let linkedPipelinesResponse;
+ let pipelineStagesResponse;
+
+ const fullPath = 'gitlab-org/gitlab';
+ const iid = '315';
+ const pipelineEtag = '/api/graphql:pipelines/id/315';
+
+ const createComponent = ({
+ pipelineStagesHandler = pipelineStagesResponse,
+ linkedPipelinesHandler = linkedPipelinesResponse,
+ } = {}) => {
+ const handlers = [
+ [getLinkedPipelinesQuery, linkedPipelinesHandler],
+ [getPipelineStagesQuery, pipelineStagesHandler],
+ ];
+ const mockApollo = createMockApollo(handlers);
+
+ wrapper = shallowMountExtended(PipelineMiniGraph, {
propsData: {
- stages: mockStages,
- ...props,
+ fullPath,
+ iid,
+ pipelineEtag,
},
+ apolloProvider: mockApollo,
});
+
+ return waitForPromises();
};
- describe('rendered state without upstream or downstream pipelines', () => {
+ const findLegacyPipelineMiniGraph = () => wrapper.findComponent(LegacyPipelineMiniGraph);
+ const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
+
+ beforeEach(() => {
+ linkedPipelinesResponse = jest.fn().mockResolvedValue(mockUpstreamDownstreamQueryResponse);
+ pipelineStagesResponse = jest.fn().mockResolvedValue(mockPipelineStagesQueryResponse);
+ });
+
+ describe('when initial queries are loading', () => {
beforeEach(() => {
createComponent();
});
- it('should render the pipeline stages', () => {
- expect(findPipelineStages().exists()).toBe(true);
+ it('shows a loading icon and no mini graph', () => {
+ expect(findLoadingIcon().exists()).toBe(true);
+ expect(findLegacyPipelineMiniGraph().exists()).toBe(false);
});
+ });
- it('should have the correct props', () => {
- expect(findPipelineMiniGraph().props()).toMatchObject({
- downstreamPipelines: [],
- isMergeTrain: false,
- pipelinePath: '',
- stages: expect.any(Array),
- updateDropdown: false,
- upstreamPipeline: undefined,
- });
+ describe('when queries have loaded', () => {
+ it('does not show a loading icon', async () => {
+ await createComponent();
+
+ expect(findLoadingIcon().exists()).toBe(false);
});
- it('should have no linked pipelines', () => {
- expect(findLinkedPipelineDownstream().exists()).toBe(false);
- expect(findLinkedPipelineUpstream().exists()).toBe(false);
+ it('renders the Pipeline Mini Graph', async () => {
+ await createComponent();
+
+ expect(findLegacyPipelineMiniGraph().exists()).toBe(true);
});
- it('should not render arrow icons', () => {
- expect(findUpstreamArrowIcon().exists()).toBe(false);
- expect(findDownstreamArrowIcon().exists()).toBe(false);
+ it('fires the queries', async () => {
+ await createComponent();
+
+ expect(linkedPipelinesResponse).toHaveBeenCalledWith({ iid, fullPath });
+ expect(pipelineStagesResponse).toHaveBeenCalledWith({ iid, fullPath });
});
});
- describe('rendered state with upstream pipeline', () => {
- beforeEach(() => {
- createComponent({
- upstreamPipeline: mockLinkedPipelines.triggered_by,
- });
- });
+ describe('polling', () => {
+ it('toggles query polling with visibility check', async () => {
+ jest.spyOn(sharedGraphQlUtils, 'toggleQueryPollingByVisibility');
- it('should have the correct props', () => {
- expect(findPipelineMiniGraph().props()).toMatchObject({
- downstreamPipelines: [],
- isMergeTrain: false,
- pipelinePath: '',
- stages: expect.any(Array),
- updateDropdown: false,
- upstreamPipeline: expect.any(Object),
- });
- });
+ createComponent();
- it('should render the upstream linked pipelines mini list only', () => {
- expect(findLinkedPipelineUpstream().exists()).toBe(true);
- expect(findLinkedPipelineDownstream().exists()).toBe(false);
- });
+ await waitForPromises();
- it('should render an upstream arrow icon only', () => {
- expect(findDownstreamArrowIcon().exists()).toBe(false);
- expect(findUpstreamArrowIcon().exists()).toBe(true);
- expect(findUpstreamArrowIcon().props('name')).toBe('long-arrow');
+ expect(sharedGraphQlUtils.toggleQueryPollingByVisibility).toHaveBeenCalledTimes(2);
});
});
- describe('rendered state with downstream pipelines', () => {
- beforeEach(() => {
- createComponent({
- downstreamPipelines: mockLinkedPipelines.triggered,
- pipelinePath: 'my/pipeline/path',
- });
- });
+ describe('when pipeline queries are unsuccessful', () => {
+ const failedHandler = jest.fn().mockRejectedValue(new Error('GraphQL error'));
+ it.each`
+ query | handlerName | errorMessage
+ ${'pipeline stages'} | ${'pipelineStagesHandler'} | ${stagesFetchError}
+ ${'linked pipelines'} | ${'linkedPipelinesHandler'} | ${linkedPipelinesFetchError}
+ `('throws an error for the $query query', async ({ errorMessage, handlerName }) => {
+ await createComponent({ [handlerName]: failedHandler });
- it('should have the correct props', () => {
- expect(findPipelineMiniGraph().props()).toMatchObject({
- downstreamPipelines: expect.any(Array),
- isMergeTrain: false,
- pipelinePath: 'my/pipeline/path',
- stages: expect.any(Array),
- updateDropdown: false,
- upstreamPipeline: undefined,
- });
- });
-
- it('should render the downstream linked pipelines mini list only', () => {
- expect(findLinkedPipelineDownstream().exists()).toBe(true);
- expect(findLinkedPipelineUpstream().exists()).toBe(false);
- });
+ await waitForPromises();
- it('should render a downstream arrow icon only', () => {
- expect(findUpstreamArrowIcon().exists()).toBe(false);
- expect(findDownstreamArrowIcon().exists()).toBe(true);
- expect(findDownstreamArrowIcon().props('name')).toBe('long-arrow');
+ expect(createAlert).toHaveBeenCalledWith({ message: errorMessage });
});
});
});
diff --git a/spec/frontend/pipelines/components/pipeline_mini_graph/pipeline_stage_spec.js b/spec/frontend/pipelines/components/pipeline_mini_graph/pipeline_stage_spec.js
index 21d92fec9bf..1989aad12b0 100644
--- a/spec/frontend/pipelines/components/pipeline_mini_graph/pipeline_stage_spec.js
+++ b/spec/frontend/pipelines/components/pipeline_mini_graph/pipeline_stage_spec.js
@@ -1,247 +1,46 @@
-import { GlDropdown } from '@gitlab/ui';
-import { nextTick } from 'vue';
-import { mount } from '@vue/test-utils';
-import MockAdapter from 'axios-mock-adapter';
-import CiIcon from '~/vue_shared/components/ci_icon.vue';
-import axios from '~/lib/utils/axios_utils';
-import { HTTP_STATUS_INTERNAL_SERVER_ERROR, HTTP_STATUS_OK } from '~/lib/utils/http_status';
-import PipelineStage from '~/pipelines/components/pipeline_mini_graph/pipeline_stage.vue';
-import eventHub from '~/pipelines/event_hub';
+import Vue from 'vue';
+import VueApollo from 'vue-apollo';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
-import { stageReply } from '../../mock_data';
-
-const dropdownPath = 'path.json';
-
-describe('Pipelines stage component', () => {
- let wrapper;
- let mock;
- let glTooltipDirectiveMock;
-
- const createComponent = (props = {}) => {
- glTooltipDirectiveMock = jest.fn();
- wrapper = mount(PipelineStage, {
- attachTo: document.body,
- directives: {
- GlTooltip: glTooltipDirectiveMock,
- },
- propsData: {
- stage: {
- status: {
- group: 'success',
- icon: 'status_success',
- title: 'success',
- },
- dropdown_path: dropdownPath,
- },
- updateDropdown: false,
- ...props,
- },
- });
- };
+import createMockApollo from 'helpers/mock_apollo_helper';
- beforeEach(() => {
- mock = new MockAdapter(axios);
- jest.spyOn(eventHub, '$emit');
- });
+import getPipelineStageQuery from '~/pipelines/graphql/queries/get_pipeline_stage.query.graphql';
+import PipelineStage from '~/pipelines/components/pipeline_mini_graph/pipeline_stage.vue';
- afterEach(() => {
- eventHub.$emit.mockRestore();
- mock.restore();
- // eslint-disable-next-line @gitlab/vtu-no-explicit-wrapper-destroy
- wrapper.destroy();
- });
+Vue.use(VueApollo);
- const findCiActionBtn = () => wrapper.find('.js-ci-action');
- const findCiIcon = () => wrapper.findComponent(CiIcon);
- const findDropdown = () => wrapper.findComponent(GlDropdown);
- const findDropdownToggle = () => wrapper.find('button.dropdown-toggle');
- const findDropdownMenu = () =>
- wrapper.find('[data-testid="mini-pipeline-graph-dropdown-menu-list"]');
- const findDropdownMenuTitle = () =>
- wrapper.find('[data-testid="pipeline-stage-dropdown-menu-title"]');
- const findMergeTrainWarning = () => wrapper.find('[data-testid="warning-message-merge-trains"]');
- const findLoadingState = () => wrapper.find('[data-testid="pipeline-stage-loading-state"]');
+describe('PipelineStage', () => {
+ let wrapper;
+ let pipelineStageResponse;
- const openStageDropdown = async () => {
- await findDropdownToggle().trigger('click');
- await waitForPromises();
- await nextTick();
+ const defaultProps = {
+ pipelineEtag: '/etag',
+ stageId: '1',
};
- describe('loading state', () => {
- beforeEach(async () => {
- createComponent({ updateDropdown: true });
-
- mock.onGet(dropdownPath).reply(HTTP_STATUS_OK, stageReply);
+ const createComponent = ({ pipelineStageHandler = pipelineStageResponse } = {}) => {
+ const handlers = [[getPipelineStageQuery, pipelineStageHandler]];
+ const mockApollo = createMockApollo(handlers);
- await openStageDropdown();
- });
-
- it('displays loading state while jobs are being fetched', async () => {
- jest.runOnlyPendingTimers();
- await nextTick();
-
- expect(findLoadingState().exists()).toBe(true);
- expect(findLoadingState().text()).toBe(PipelineStage.i18n.loadingText);
+ wrapper = shallowMountExtended(PipelineStage, {
+ propsData: {
+ ...defaultProps,
+ },
+ apolloProvider: mockApollo,
});
- it('does not display loading state after jobs have been fetched', async () => {
- await waitForPromises();
+ return waitForPromises();
+ };
- expect(findLoadingState().exists()).toBe(false);
- });
- });
+ const findPipelineStage = () => wrapper.findComponent(PipelineStage);
- describe('default appearance', () => {
+ describe('when mounted', () => {
beforeEach(() => {
createComponent();
});
- it('sets up the tooltip to not have a show delay animation', () => {
- expect(glTooltipDirectiveMock.mock.calls[0][1].modifiers.ds0).toBe(true);
- });
-
- it('renders a dropdown with the status icon', () => {
- expect(findDropdown().exists()).toBe(true);
- expect(findDropdownToggle().exists()).toBe(true);
- expect(findCiIcon().exists()).toBe(true);
- });
-
- it('renders a borderless ci-icon', () => {
- expect(findCiIcon().exists()).toBe(true);
- expect(findCiIcon().props('isBorderless')).toBe(true);
- expect(findCiIcon().classes('borderless')).toBe(true);
- });
-
- it('renders a ci-icon with a custom border class', () => {
- expect(findCiIcon().exists()).toBe(true);
- expect(findCiIcon().classes('gl-border')).toBe(true);
- });
- });
-
- describe('when user opens dropdown and stage request is successful', () => {
- beforeEach(async () => {
- mock.onGet(dropdownPath).reply(HTTP_STATUS_OK, stageReply);
- createComponent();
-
- await openStageDropdown();
- await jest.runAllTimers();
- await axios.waitForAll();
- });
-
- it('renders the received data and emits the correct events', () => {
- expect(findDropdownMenu().text()).toContain(stageReply.latest_statuses[0].name);
- expect(findDropdownMenuTitle().text()).toContain(stageReply.name);
- expect(eventHub.$emit).toHaveBeenCalledWith('clickedDropdown');
- expect(wrapper.emitted('miniGraphStageClick')).toEqual([[]]);
- });
-
- it('refreshes when updateDropdown is set to true', async () => {
- expect(mock.history.get).toHaveLength(1);
-
- wrapper.setProps({ updateDropdown: true });
- await axios.waitForAll();
-
- expect(mock.history.get).toHaveLength(2);
- });
- });
-
- describe('when user opens dropdown and stage request fails', () => {
- it('should close the dropdown', async () => {
- mock.onGet(dropdownPath).reply(HTTP_STATUS_INTERNAL_SERVER_ERROR);
- createComponent();
-
- await openStageDropdown();
- await axios.waitForAll();
- await waitForPromises();
-
- expect(findDropdown().classes('show')).toBe(false);
- });
- });
-
- describe('update endpoint correctly', () => {
- beforeEach(async () => {
- const copyStage = { ...stageReply };
- copyStage.latest_statuses[0].name = 'this is the updated content';
- mock.onGet('bar.json').reply(HTTP_STATUS_OK, copyStage);
- createComponent({
- stage: {
- status: {
- group: 'running',
- icon: 'status_running',
- title: 'running',
- },
- dropdown_path: 'bar.json',
- },
- });
- await axios.waitForAll();
- });
-
- it('should update the stage to request the new endpoint provided', async () => {
- await openStageDropdown();
- jest.runOnlyPendingTimers();
- await waitForPromises();
-
- expect(findDropdownMenu().text()).toContain('this is the updated content');
- });
- });
-
- describe('job update in dropdown', () => {
- beforeEach(async () => {
- mock.onGet(dropdownPath).reply(HTTP_STATUS_OK, stageReply);
- mock.onPost(`${stageReply.latest_statuses[0].status.action.path}.json`).reply(HTTP_STATUS_OK);
-
- createComponent();
- await waitForPromises();
- await nextTick();
- });
-
- const clickCiAction = async () => {
- await openStageDropdown();
- jest.runOnlyPendingTimers();
- await waitForPromises();
-
- await findCiActionBtn().trigger('click');
- };
-
- it('keeps dropdown open when job item action is clicked', async () => {
- await clickCiAction();
- await waitForPromises();
-
- expect(findDropdown().classes('show')).toBe(true);
- });
- });
-
- describe('With merge trains enabled', () => {
- it('shows a warning on the dropdown', async () => {
- mock.onGet(dropdownPath).reply(HTTP_STATUS_OK, stageReply);
- createComponent({
- isMergeTrain: true,
- });
-
- await openStageDropdown();
- jest.runOnlyPendingTimers();
- await waitForPromises();
-
- const warning = findMergeTrainWarning();
-
- expect(warning.text()).toBe('Merge train pipeline jobs can not be retried');
- });
- });
-
- describe('With merge trains disabled', () => {
- beforeEach(async () => {
- mock.onGet(dropdownPath).reply(HTTP_STATUS_OK, stageReply);
- createComponent();
-
- await openStageDropdown();
- await axios.waitForAll();
- });
-
- it('does not show a warning on the dropdown', () => {
- const warning = findMergeTrainWarning();
-
- expect(warning.exists()).toBe(false);
+ it('renders job item', () => {
+ expect(findPipelineStage().exists()).toBe(true);
});
});
});
diff --git a/spec/frontend/pipelines/components/pipeline_mini_graph/pipeline_stages_spec.js b/spec/frontend/pipelines/components/pipeline_mini_graph/pipeline_stages_spec.js
index 73e810bde99..c212087b7e3 100644
--- a/spec/frontend/pipelines/components/pipeline_mini_graph/pipeline_stages_spec.js
+++ b/spec/frontend/pipelines/components/pipeline_mini_graph/pipeline_stages_spec.js
@@ -1,6 +1,6 @@
import { shallowMount } from '@vue/test-utils';
import { pipelines } from 'test_fixtures/pipelines/pipelines.json';
-import PipelineStage from '~/pipelines/components/pipeline_mini_graph/pipeline_stage.vue';
+import LegacyPipelineStage from '~/pipelines/components/pipeline_mini_graph/legacy_pipeline_stage.vue';
import PipelineStages from '~/pipelines/components/pipeline_mini_graph/pipeline_stages.vue';
const mockStages = pipelines[0].details.stages;
@@ -8,8 +8,8 @@ const mockStages = pipelines[0].details.stages;
describe('Pipeline Stages', () => {
let wrapper;
- const findPipelineStages = () => wrapper.findAllComponents(PipelineStage);
- const findPipelineStagesAt = (i) => findPipelineStages().at(i);
+ const findLegacyPipelineStages = () => wrapper.findAllComponents(LegacyPipelineStage);
+ const findPipelineStagesAt = (i) => findLegacyPipelineStages().at(i);
const createComponent = (props = {}) => {
wrapper = shallowMount(PipelineStages, {
@@ -23,14 +23,14 @@ describe('Pipeline Stages', () => {
it('renders stages', () => {
createComponent();
- expect(findPipelineStages()).toHaveLength(mockStages.length);
+ expect(findLegacyPipelineStages()).toHaveLength(mockStages.length);
});
it('does not fail when stages are empty', () => {
createComponent({ stages: [] });
expect(wrapper.exists()).toBe(true);
- expect(findPipelineStages()).toHaveLength(0);
+ expect(findLegacyPipelineStages()).toHaveLength(0);
});
it('update dropdown is false by default', () => {
diff --git a/spec/frontend/pipelines/components/pipeline_tabs_spec.js b/spec/frontend/pipelines/components/pipeline_tabs_spec.js
index fde13128662..0951e1ffb46 100644
--- a/spec/frontend/pipelines/components/pipeline_tabs_spec.js
+++ b/spec/frontend/pipelines/components/pipeline_tabs_spec.js
@@ -1,10 +1,14 @@
-import { shallowMount } from '@vue/test-utils';
import { GlTab } from '@gitlab/ui';
-import { extendedWrapper } from 'helpers/vue_test_utils_helper';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import { mockTracking, unmockTracking } from 'helpers/tracking_helper';
import PipelineTabs from '~/pipelines/components/pipeline_tabs.vue';
+import { TRACKING_CATEGORIES } from '~/pipelines/constants';
describe('The Pipeline Tabs', () => {
let wrapper;
+ let trackingSpy;
+
+ const $router = { push: jest.fn() };
const findDagTab = () => wrapper.findByTestId('dag-tab');
const findFailedJobsTab = () => wrapper.findByTestId('failed-jobs-tab');
@@ -24,18 +28,19 @@ describe('The Pipeline Tabs', () => {
};
const createComponent = (provide = {}) => {
- wrapper = extendedWrapper(
- shallowMount(PipelineTabs, {
- provide: {
- ...defaultProvide,
- ...provide,
- },
- stubs: {
- GlTab,
- RouterView: true,
- },
- }),
- );
+ wrapper = shallowMountExtended(PipelineTabs, {
+ provide: {
+ ...defaultProvide,
+ ...provide,
+ },
+ stubs: {
+ GlTab,
+ RouterView: true,
+ },
+ mocks: {
+ $router,
+ },
+ });
};
describe('Tabs', () => {
@@ -76,4 +81,34 @@ describe('The Pipeline Tabs', () => {
expect(badgeComponent().text()).toBe(badgeText);
});
});
+
+ describe('Tab tracking', () => {
+ beforeEach(() => {
+ createComponent();
+
+ trackingSpy = mockTracking(undefined, wrapper.element, jest.spyOn);
+ });
+
+ afterEach(() => {
+ unmockTracking();
+ });
+
+ it('tracks failed jobs tab click', () => {
+ findFailedJobsTab().vm.$emit('click');
+
+ expect(trackingSpy).toHaveBeenCalledTimes(1);
+ expect(trackingSpy).toHaveBeenCalledWith(undefined, 'click_tab', {
+ label: TRACKING_CATEGORIES.failed,
+ });
+ });
+
+ it('tracks tests tab click', () => {
+ findTestsTab().vm.$emit('click');
+
+ expect(trackingSpy).toHaveBeenCalledTimes(1);
+ expect(trackingSpy).toHaveBeenCalledWith(undefined, 'click_tab', {
+ label: TRACKING_CATEGORIES.tests,
+ });
+ });
+ });
});
diff --git a/spec/frontend/pipelines/components/pipelines_list/failure_widget/failed_job_details_spec.js b/spec/frontend/pipelines/components/pipelines_list/failure_widget/failed_job_details_spec.js
index 4ba1b82e971..479ee854ecf 100644
--- a/spec/frontend/pipelines/components/pipelines_list/failure_widget/failed_job_details_spec.js
+++ b/spec/frontend/pipelines/components/pipelines_list/failure_widget/failed_job_details_spec.js
@@ -8,6 +8,7 @@ import { createAlert } from '~/alert';
import { getIdFromGraphQLId } from '~/graphql_shared/utils';
import FailedJobDetails from '~/pipelines/components/pipelines_list/failure_widget/failed_job_details.vue';
import RetryMrFailedJobMutation from '~/pipelines/graphql/mutations/retry_mr_failed_job.mutation.graphql';
+import { BRIDGE_KIND } from '~/pipelines/components/graph/constants';
import { job } from './mock';
Vue.use(VueApollo);
@@ -45,8 +46,7 @@ describe('FailedJobDetails component', () => {
const findArrowIcon = () => wrapper.findComponent(GlIcon);
const findJobId = () => wrapper.findComponent(GlLink);
- const findHiddenJobLog = () => wrapper.findByTestId('log-is-hidden');
- const findVisibleJobLog = () => wrapper.findByTestId('log-is-visible');
+ const findJobLog = () => wrapper.findByTestId('job-log');
const findJobName = () => wrapper.findByText(defaultProps.job.name);
const findRetryButton = () => wrapper.findByLabelText('Retry');
const findRow = () => wrapper.findByTestId('widget-row');
@@ -78,8 +78,7 @@ describe('FailedJobDetails component', () => {
});
it('does not renders the job lob', () => {
- expect(findHiddenJobLog().exists()).toBe(true);
- expect(findVisibleJobLog().exists()).toBe(false);
+ expect(findJobLog().exists()).toBe(false);
});
});
@@ -94,6 +93,16 @@ describe('FailedJobDetails component', () => {
});
});
+ describe('when the job is a bridge', () => {
+ beforeEach(() => {
+ createComponent({ props: { job: { ...job, kind: BRIDGE_KIND } } });
+ });
+
+ it('disables the retry button', () => {
+ expect(findRetryButton().props().disabled).toBe(true);
+ });
+ });
+
describe('when the job is retryable', () => {
describe('and user has permission to update the build', () => {
beforeEach(() => {
@@ -178,13 +187,11 @@ describe('FailedJobDetails component', () => {
});
it('does not renders the received html of the job log', () => {
- expect(findVisibleJobLog().html()).not.toContain(defaultProps.job.trace.htmlSummary);
+ expect(findJobLog().html()).not.toContain(defaultProps.job.trace.htmlSummary);
});
it('shows a permission error message', () => {
- expect(findVisibleJobLog().text()).toBe(
- "You do not have permission to read this job's log",
- );
+ expect(findJobLog().text()).toBe("You do not have permission to read this job's log.");
});
});
@@ -200,8 +207,7 @@ describe('FailedJobDetails component', () => {
describe('while collapsed', () => {
it('expands the job log', () => {
- expect(findHiddenJobLog().exists()).toBe(false);
- expect(findVisibleJobLog().exists()).toBe(true);
+ expect(findJobLog().exists()).toBe(true);
});
it('renders the down arrow', () => {
@@ -209,19 +215,17 @@ describe('FailedJobDetails component', () => {
});
it('renders the received html of the job log', () => {
- expect(findVisibleJobLog().html()).toContain(defaultProps.job.trace.htmlSummary);
+ expect(findJobLog().html()).toContain(defaultProps.job.trace.htmlSummary);
});
});
describe('while expanded', () => {
it('collapes the job log', async () => {
- expect(findHiddenJobLog().exists()).toBe(false);
- expect(findVisibleJobLog().exists()).toBe(true);
+ expect(findJobLog().exists()).toBe(true);
await findRow().trigger('click');
- expect(findHiddenJobLog().exists()).toBe(true);
- expect(findVisibleJobLog().exists()).toBe(false);
+ expect(findJobLog().exists()).toBe(false);
});
it('renders the right arrow', async () => {
@@ -236,14 +240,12 @@ describe('FailedJobDetails component', () => {
describe('when clicking on a link element within the row', () => {
it('does not expands/collapse the job log', async () => {
- expect(findHiddenJobLog().exists()).toBe(true);
- expect(findVisibleJobLog().exists()).toBe(false);
+ expect(findJobLog().exists()).toBe(false);
expect(findArrowIcon().props().name).toBe('chevron-right');
await findJobId().vm.$emit('click');
- expect(findHiddenJobLog().exists()).toBe(true);
- expect(findVisibleJobLog().exists()).toBe(false);
+ expect(findJobLog().exists()).toBe(false);
expect(findArrowIcon().props().name).toBe('chevron-right');
});
});
diff --git a/spec/frontend/pipelines/components/pipelines_list/failure_widget/failed_jobs_list_spec.js b/spec/frontend/pipelines/components/pipelines_list/failure_widget/failed_jobs_list_spec.js
index fc8263c6c4d..967812cc627 100644
--- a/spec/frontend/pipelines/components/pipelines_list/failure_widget/failed_jobs_list_spec.js
+++ b/spec/frontend/pipelines/components/pipelines_list/failure_widget/failed_jobs_list_spec.js
@@ -23,14 +23,14 @@ describe('FailedJobsList component', () => {
const showToast = jest.fn();
const defaultProps = {
+ failedJobsCount: 0,
graphqlResourceEtag: 'api/graphql',
isPipelineActive: false,
pipelineIid: 1,
- pipelinePath: '/pipelines/1',
+ projectPath: 'namespace/project/',
};
const defaultProvide = {
- fullPath: 'namespace/project/',
graphqlPath: 'api/graphql',
};
@@ -65,6 +65,21 @@ describe('FailedJobsList component', () => {
mockFailedJobsResponse = jest.fn();
});
+ describe('on mount', () => {
+ beforeEach(() => {
+ mockFailedJobsResponse.mockResolvedValue(failedJobsMock);
+ createComponent();
+ });
+
+ it('fires the graphql query', () => {
+ expect(mockFailedJobsResponse).toHaveBeenCalledTimes(1);
+ expect(mockFailedJobsResponse).toHaveBeenCalledWith({
+ fullPath: defaultProps.projectPath,
+ pipelineIid: defaultProps.pipelineIid,
+ });
+ });
+ });
+
describe('when loading failed jobs', () => {
beforeEach(() => {
mockFailedJobsResponse.mockResolvedValue(failedJobsMock);
@@ -91,7 +106,7 @@ describe('FailedJobsList component', () => {
});
it('renders table column', () => {
- expect(findAllHeaders()).toHaveLength(4);
+ expect(findAllHeaders()).toHaveLength(3);
});
it('shows the list of failed jobs', () => {
@@ -184,6 +199,34 @@ describe('FailedJobsList component', () => {
});
});
+ describe('When the job count changes from REST', () => {
+ beforeEach(() => {
+ mockFailedJobsResponse.mockResolvedValue(failedJobsMockEmpty);
+
+ createComponent();
+ });
+
+ describe('and the count is the same', () => {
+ it('does not re-fetch the query', async () => {
+ expect(mockFailedJobsResponse).toHaveBeenCalledTimes(1);
+
+ await wrapper.setProps({ failedJobsCount: 0 });
+
+ expect(mockFailedJobsResponse).toHaveBeenCalledTimes(1);
+ });
+ });
+
+ describe('and the count is different', () => {
+ it('re-fetches the query', async () => {
+ expect(mockFailedJobsResponse).toHaveBeenCalledTimes(1);
+
+ await wrapper.setProps({ failedJobsCount: 10 });
+
+ expect(mockFailedJobsResponse).toHaveBeenCalledTimes(2);
+ });
+ });
+ });
+
describe('when an error occurs loading jobs', () => {
const errorMessage = "We couldn't fetch jobs for you because you are not qualified";
diff --git a/spec/frontend/pipelines/components/pipelines_list/failure_widget/mock.js b/spec/frontend/pipelines/components/pipelines_list/failure_widget/mock.js
index b047b57fc34..318d787a984 100644
--- a/spec/frontend/pipelines/components/pipelines_list/failure_widget/mock.js
+++ b/spec/frontend/pipelines/components/pipelines_list/failure_widget/mock.js
@@ -3,17 +3,19 @@ export const job = {
allowFailure: false,
detailedStatus: {
id: 'status',
+ detailsPath: '/jobs/5241',
action: {
id: 'action',
path: '/retry',
icon: 'retry',
},
group: 'running',
- icon: 'running-icon',
+ icon: 'status_running_icon',
},
name: 'job-name',
retried: false,
retryable: true,
+ kind: 'BUILD',
stage: {
id: '1',
name: 'build',
@@ -25,7 +27,6 @@ export const job = {
readBuild: true,
updateBuild: true,
},
- webPath: '/',
};
export const allowedToFailJob = {
diff --git a/spec/frontend/pipelines/components/pipelines_list/failure_widget/pipeline_failed_jobs_widget_spec.js b/spec/frontend/pipelines/components/pipelines_list/failure_widget/pipeline_failed_jobs_widget_spec.js
index c1a885391e9..5bbb874edb0 100644
--- a/spec/frontend/pipelines/components/pipelines_list/failure_widget/pipeline_failed_jobs_widget_spec.js
+++ b/spec/frontend/pipelines/components/pipelines_list/failure_widget/pipeline_failed_jobs_widget_spec.js
@@ -1,4 +1,4 @@
-import { GlButton, GlIcon, GlPopover } from '@gitlab/ui';
+import { GlButton, GlCard, GlIcon, GlPopover } from '@gitlab/ui';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import PipelineFailedJobsWidget from '~/pipelines/components/pipelines_list/failure_widget/pipeline_failed_jobs_widget.vue';
import FailedJobsList from '~/pipelines/components/pipelines_list/failure_widget/failed_jobs_list.vue';
@@ -13,6 +13,7 @@ describe('PipelineFailedJobsWidget component', () => {
isPipelineActive: false,
pipelineIid: 1,
pipelinePath: '/pipelines/1',
+ projectPath: 'namespace/project/',
};
const defaultProvide = {
@@ -29,9 +30,11 @@ describe('PipelineFailedJobsWidget component', () => {
...defaultProvide,
...provide,
},
+ stubs: { GlCard },
});
};
+ const findFailedJobsCard = () => wrapper.findByTestId('failed-jobs-card');
const findFailedJobsButton = () => wrapper.findComponent(GlButton);
const findFailedJobsList = () => wrapper.findAllComponents(FailedJobsList);
const findInfoIcon = () => wrapper.findComponent(GlIcon);
@@ -44,7 +47,7 @@ describe('PipelineFailedJobsWidget component', () => {
it('renders the show failed jobs button with a count of 0', () => {
expect(findFailedJobsButton().exists()).toBe(true);
- expect(findFailedJobsButton().text()).toBe('Show failed jobs (0)');
+ expect(findFailedJobsButton().text()).toBe('Failed jobs (0)');
});
});
@@ -55,9 +58,7 @@ describe('PipelineFailedJobsWidget component', () => {
it('renders the show failed jobs button with correct count', () => {
expect(findFailedJobsButton().exists()).toBe(true);
- expect(findFailedJobsButton().text()).toBe(
- `Show failed jobs (${defaultProps.failedJobsCount})`,
- );
+ expect(findFailedJobsButton().text()).toBe(`Failed jobs (${defaultProps.failedJobsCount})`);
});
it('renders the info icon', () => {
@@ -82,6 +83,24 @@ describe('PipelineFailedJobsWidget component', () => {
it('renders the failed jobs widget', () => {
expect(findFailedJobsList().exists()).toBe(true);
});
+
+ it('removes the CSS border classes', () => {
+ expect(findFailedJobsCard().attributes('class')).not.toContain(
+ 'gl-border-white gl-hover-border-gray-100',
+ );
+ });
+ });
+
+ describe('when the job details are not expanded', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('has the CSS border classes', () => {
+ expect(findFailedJobsCard().attributes('class')).toContain(
+ 'gl-border-white gl-hover-border-gray-100',
+ );
+ });
});
describe('when the job count changes', () => {
diff --git a/spec/frontend/pipelines/pipeline_graph/utils_spec.js b/spec/frontend/pipelines/pipeline_graph/utils_spec.js
index 41b020189d0..96b18fcf96f 100644
--- a/spec/frontend/pipelines/pipeline_graph/utils_spec.js
+++ b/spec/frontend/pipelines/pipeline_graph/utils_spec.js
@@ -1,5 +1,5 @@
import { createJobsHash, generateJobNeedsDict, getPipelineDefaultTab } from '~/pipelines/utils';
-import { validPipelineTabNames } from '~/pipelines/constants';
+import { validPipelineTabNames, pipelineTabName } from '~/pipelines/constants';
describe('utils functions', () => {
const jobName1 = 'build_1';
@@ -173,8 +173,8 @@ describe('utils functions', () => {
describe('getPipelineDefaultTab', () => {
const baseUrl = 'http://gitlab.com/user/multi-projects-small/-/pipelines/332/';
- it('returns null if there is only the base url', () => {
- expect(getPipelineDefaultTab(baseUrl)).toBe(null);
+ it('returns pipeline tab name if there is only the base url', () => {
+ expect(getPipelineDefaultTab(baseUrl)).toBe(pipelineTabName);
});
it('returns null if there was no valid last url part', () => {
diff --git a/spec/frontend/pipelines/pipeline_multi_actions_spec.js b/spec/frontend/pipelines/pipeline_multi_actions_spec.js
index 43336bbc748..0fdc45a5931 100644
--- a/spec/frontend/pipelines/pipeline_multi_actions_spec.js
+++ b/spec/frontend/pipelines/pipeline_multi_actions_spec.js
@@ -28,6 +28,20 @@ describe('Pipeline Multi Actions Dropdown', () => {
path: '/download/path-two',
},
];
+ const newArtifacts = [
+ {
+ name: 'job-3 my-new-artifact',
+ path: '/new/download/path',
+ },
+ {
+ name: 'job-4 my-new-artifact-2',
+ path: '/new/download/path-two',
+ },
+ {
+ name: 'job-5 my-new-artifact-3',
+ path: '/new/download/path-three',
+ },
+ ];
const artifactItemTestId = 'artifact-item';
const artifactsEndpointPlaceholder = ':pipeline_artifacts_id';
const artifactsEndpoint = `endpoint/${artifactsEndpointPlaceholder}/artifacts.json`;
@@ -59,8 +73,15 @@ describe('Pipeline Multi Actions Dropdown', () => {
const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
const findAllArtifactItems = () => wrapper.findAllByTestId(artifactItemTestId);
const findFirstArtifactItem = () => wrapper.findByTestId(artifactItemTestId);
+ const findAllArtifactItemsData = () =>
+ wrapper.findAllByTestId(artifactItemTestId).wrappers.map((x) => ({
+ path: x.attributes('href'),
+ name: x.text(),
+ }));
const findSearchBox = () => wrapper.findComponent(GlSearchBoxByType);
const findEmptyMessage = () => wrapper.findByTestId('artifacts-empty-message');
+ const findWarning = () => wrapper.findByTestId('artifacts-fetch-warning');
+ const changePipelineId = (newId) => wrapper.setProps({ pipelineId: newId });
beforeEach(() => {
mockAxios = new MockAdapter(axios);
@@ -136,6 +157,80 @@ describe('Pipeline Multi Actions Dropdown', () => {
expect(findFirstArtifactItem().attributes('href')).toBe(artifacts[0].path);
expect(findFirstArtifactItem().text()).toBe(artifacts[0].name);
});
+
+ describe('when opened again with new artifacts', () => {
+ describe('with a successful refetch', () => {
+ beforeEach(async () => {
+ mockAxios.resetHistory();
+ mockAxios.onGet(endpoint).replyOnce(HTTP_STATUS_OK, { artifacts: newArtifacts });
+
+ findDropdown().vm.$emit('show');
+ await nextTick();
+ });
+
+ it('should hide list and render a loading spinner on dropdown click', () => {
+ expect(findAllArtifactItems()).toHaveLength(0);
+ expect(findLoadingIcon().exists()).toBe(true);
+ });
+
+ it('should not render warning or empty message while loading', () => {
+ expect(findEmptyMessage().exists()).toBe(false);
+ expect(findWarning().exists()).toBe(false);
+ });
+
+ it('should render the correct new list', async () => {
+ await waitForPromises();
+
+ expect(findAllArtifactItemsData()).toEqual(newArtifacts);
+ });
+ });
+
+ describe('with a failing refetch', () => {
+ beforeEach(async () => {
+ mockAxios.onGet(endpoint).replyOnce(HTTP_STATUS_INTERNAL_SERVER_ERROR);
+
+ findDropdown().vm.$emit('show');
+ await waitForPromises();
+ });
+
+ it('should render warning', () => {
+ expect(findWarning().text()).toBe(i18n.artifactsFetchWarningMessage);
+ });
+
+ it('should render old list', () => {
+ expect(findAllArtifactItemsData()).toEqual(artifacts);
+ });
+ });
+ });
+
+ describe('pipeline id has changed', () => {
+ const newEndpoint = artifactsEndpoint.replace(
+ artifactsEndpointPlaceholder,
+ pipelineId + 1,
+ );
+
+ beforeEach(() => {
+ changePipelineId(pipelineId + 1);
+ });
+
+ describe('followed by a failing request', () => {
+ beforeEach(async () => {
+ mockAxios.onGet(newEndpoint).replyOnce(HTTP_STATUS_INTERNAL_SERVER_ERROR);
+
+ findDropdown().vm.$emit('show');
+ await waitForPromises();
+ });
+
+ it('should render error message and no warning', () => {
+ expect(findWarning().exists()).toBe(false);
+ expect(findAlert().text()).toBe(i18n.artifactsFetchErrorMessage);
+ });
+
+ it('should clear list', () => {
+ expect(findAllArtifactItems()).toHaveLength(0);
+ });
+ });
+ });
});
describe('artifacts list is empty', () => {
diff --git a/spec/frontend/pipelines/pipelines_spec.js b/spec/frontend/pipelines/pipelines_spec.js
index 5b77d44c5bd..cc85d6d99e0 100644
--- a/spec/frontend/pipelines/pipelines_spec.js
+++ b/spec/frontend/pipelines/pipelines_spec.js
@@ -744,9 +744,8 @@ describe('Pipelines', () => {
createComponent();
- stopMock = jest.spyOn(wrapper.vm.poll, 'stop');
- restartMock = jest.spyOn(wrapper.vm.poll, 'restart');
- cancelMock = jest.spyOn(wrapper.vm.service.cancelationSource, 'cancel');
+ stopMock = jest.spyOn(window, 'clearTimeout');
+ restartMock = jest.spyOn(axios, 'get');
});
describe('when a request is being made', () => {
@@ -765,13 +764,15 @@ describe('Pipelines', () => {
// cancelMock is getting overwritten in pipelines_service.js#L29
// so we have to spy on it again here
- cancelMock = jest.spyOn(wrapper.vm.service.cancelationSource, 'cancel');
+ cancelMock = jest.spyOn(axios.CancelToken, 'source');
await waitForPromises();
expect(cancelMock).toHaveBeenCalled();
expect(stopMock).toHaveBeenCalled();
- expect(restartMock).toHaveBeenCalled();
+ expect(restartMock).toHaveBeenCalledWith(
+ `${mockPipelinesResponse.pipelines[0].path}/stage.json?stage=build`,
+ );
});
it('stops polling & restarts polling', async () => {
@@ -781,7 +782,9 @@ describe('Pipelines', () => {
expect(cancelMock).not.toHaveBeenCalled();
expect(stopMock).toHaveBeenCalled();
- expect(restartMock).toHaveBeenCalled();
+ expect(restartMock).toHaveBeenCalledWith(
+ `${mockPipelinesResponse.pipelines[0].path}/stage.json?stage=build`,
+ );
});
});
});
diff --git a/spec/frontend/pipelines/pipelines_table_spec.js b/spec/frontend/pipelines/pipelines_table_spec.js
index 251d823cc37..950a6b21e16 100644
--- a/spec/frontend/pipelines/pipelines_table_spec.js
+++ b/spec/frontend/pipelines/pipelines_table_spec.js
@@ -4,7 +4,8 @@ import { mount } from '@vue/test-utils';
import fixture from 'test_fixtures/pipelines/pipelines.json';
import { mockTracking, unmockTracking } from 'helpers/tracking_helper';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
-import PipelineMiniGraph from '~/pipelines/components/pipeline_mini_graph/pipeline_mini_graph.vue';
+import LegacyPipelineMiniGraph from '~/pipelines/components/pipeline_mini_graph/legacy_pipeline_mini_graph.vue';
+import PipelineFailedJobsWidget from '~/pipelines/components/pipelines_list/failure_widget/pipeline_failed_jobs_widget.vue';
import PipelineOperations from '~/pipelines/components/pipelines_list/pipeline_operations.vue';
import PipelineTriggerer from '~/pipelines/components/pipelines_list/pipeline_triggerer.vue';
import PipelineUrl from '~/pipelines/components/pipelines_list/pipeline_url.vue';
@@ -70,10 +71,11 @@ describe('Pipelines Table', () => {
const findCiBadgeLink = () => wrapper.findComponent(CiBadgeLink);
const findPipelineInfo = () => wrapper.findComponent(PipelineUrl);
const findTriggerer = () => wrapper.findComponent(PipelineTriggerer);
- const findPipelineMiniGraph = () => wrapper.findComponent(PipelineMiniGraph);
+ const findLegacyPipelineMiniGraph = () => wrapper.findComponent(LegacyPipelineMiniGraph);
const findTimeAgo = () => wrapper.findComponent(PipelinesTimeago);
const findActions = () => wrapper.findComponent(PipelineOperations);
+ const findPipelineFailureWidget = () => wrapper.findComponent(PipelineFailedJobsWidget);
const findTableRows = () => wrapper.findAllByTestId('pipeline-table-row');
const findStatusTh = () => wrapper.findByTestId('status-th');
const findPipelineTh = () => wrapper.findByTestId('pipeline-th');
@@ -124,12 +126,12 @@ describe('Pipelines Table', () => {
describe('stages cell', () => {
it('should render pipeline mini graph', () => {
- expect(findPipelineMiniGraph().exists()).toBe(true);
+ expect(findLegacyPipelineMiniGraph().exists()).toBe(true);
});
it('should render the right number of stages', () => {
const stagesLength = pipeline.details.stages.length;
- expect(findPipelineMiniGraph().props('stages').length).toBe(stagesLength);
+ expect(findLegacyPipelineMiniGraph().props('stages').length).toBe(stagesLength);
});
it('should render the latest downstream pipelines only', () => {
@@ -137,7 +139,7 @@ describe('Pipelines Table', () => {
// because we retried the trigger job, so the mini pipeline graph will only
// render the newly created downstream pipeline instead
expect(pipeline.triggered).toHaveLength(2);
- expect(findPipelineMiniGraph().props('downstreamPipelines')).toHaveLength(1);
+ expect(findLegacyPipelineMiniGraph().props('downstreamPipelines')).toHaveLength(1);
});
describe('when pipeline does not have stages', () => {
@@ -149,7 +151,7 @@ describe('Pipelines Table', () => {
});
it('stages are not rendered', () => {
- expect(findPipelineMiniGraph().props('stages')).toHaveLength(0);
+ expect(findLegacyPipelineMiniGraph().props('stages')).toHaveLength(0);
});
});
});
@@ -189,6 +191,7 @@ describe('Pipelines Table', () => {
it('does not render', () => {
expect(findTableRows()).toHaveLength(1);
+ expect(findPipelineFailureWidget().exists()).toBe(false);
});
});
@@ -197,8 +200,21 @@ describe('Pipelines Table', () => {
beforeEach(() => {
createComponent({ pipelines: [pipeline] }, provideWithDetails);
});
+
it('renders', () => {
expect(findTableRows()).toHaveLength(2);
+ expect(findPipelineFailureWidget().exists()).toBe(true);
+ });
+
+ it('passes the expected props', () => {
+ expect(findPipelineFailureWidget().props()).toStrictEqual({
+ failedJobsCount: pipeline.failed_builds.length,
+ isPipelineActive: pipeline.active,
+ pipelineIid: pipeline.iid,
+ pipelinePath: pipeline.path,
+ // Make sure the forward slash was removed
+ projectPath: 'frontend-fixtures/pipelines-project',
+ });
});
});
@@ -212,6 +228,7 @@ describe('Pipelines Table', () => {
it('does not render', () => {
expect(findTableRows()).toHaveLength(1);
+ expect(findPipelineFailureWidget().exists()).toBe(false);
});
});
});
@@ -252,7 +269,7 @@ describe('Pipelines Table', () => {
});
it('tracks pipeline mini graph stage click', () => {
- findPipelineMiniGraph().vm.$emit('miniGraphStageClick');
+ findLegacyPipelineMiniGraph().vm.$emit('miniGraphStageClick');
expect(trackingSpy).toHaveBeenCalledWith(undefined, 'click_minigraph', {
label: TRACKING_CATEGORIES.table,
diff --git a/spec/frontend/pipelines/test_reports/test_reports_spec.js b/spec/frontend/pipelines/test_reports/test_reports_spec.js
index c8c917a1b9e..de16f496eff 100644
--- a/spec/frontend/pipelines/test_reports/test_reports_spec.js
+++ b/spec/frontend/pipelines/test_reports/test_reports_spec.js
@@ -1,6 +1,7 @@
import { GlLoadingIcon } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import Vue from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import testReports from 'test_fixtures/pipelines/test_report.json';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
diff --git a/spec/frontend/pipelines/test_reports/test_suite_table_spec.js b/spec/frontend/pipelines/test_reports/test_suite_table_spec.js
index 8eb83f17f4d..08b430fa703 100644
--- a/spec/frontend/pipelines/test_reports/test_suite_table_spec.js
+++ b/spec/frontend/pipelines/test_reports/test_suite_table_spec.js
@@ -1,5 +1,6 @@
import { GlButton, GlFriendlyWrap, GlLink, GlPagination, GlEmptyState } from '@gitlab/ui';
import Vue from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import testReports from 'test_fixtures/pipelines/test_report.json';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
diff --git a/spec/frontend/pipelines/test_reports/test_summary_table_spec.js b/spec/frontend/pipelines/test_reports/test_summary_table_spec.js
index cfe9ff564dc..a45946d5a03 100644
--- a/spec/frontend/pipelines/test_reports/test_summary_table_spec.js
+++ b/spec/frontend/pipelines/test_reports/test_summary_table_spec.js
@@ -1,5 +1,6 @@
import { mount } from '@vue/test-utils';
import Vue from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import testReports from 'test_fixtures/pipelines/test_report.json';
import SummaryTable from '~/pipelines/components/test_reports/test_summary_table.vue';
diff --git a/spec/frontend/profile/edit/components/profile_edit_app_spec.js b/spec/frontend/profile/edit/components/profile_edit_app_spec.js
new file mode 100644
index 00000000000..31a368aefa9
--- /dev/null
+++ b/spec/frontend/profile/edit/components/profile_edit_app_spec.js
@@ -0,0 +1,181 @@
+import { GlButton, GlForm } from '@gitlab/ui';
+import MockAdapter from 'axios-mock-adapter';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+
+import { readFileAsDataURL } from '~/lib/utils/file_utility';
+import axios from '~/lib/utils/axios_utils';
+import ProfileEditApp from '~/profile/edit/components/profile_edit_app.vue';
+import UserAvatar from '~/profile/edit/components/user_avatar.vue';
+import SetStatusForm from '~/set_status_modal/set_status_form.vue';
+import { VARIANT_DANGER, VARIANT_INFO, createAlert } from '~/alert';
+import { AVAILABILITY_STATUS } from '~/set_status_modal/constants';
+import { timeRanges } from '~/vue_shared/constants';
+
+jest.mock('~/alert');
+jest.mock('~/lib/utils/file_utility', () => ({
+ readFileAsDataURL: jest.fn().mockResolvedValue(),
+}));
+
+const [oneMinute, oneHour] = timeRanges;
+const defaultProvide = {
+ currentEmoji: 'basketball',
+ currentMessage: 'Foo bar',
+ currentAvailability: AVAILABILITY_STATUS.NOT_SET,
+ defaultEmoji: 'speech_balloon',
+ currentClearStatusAfter: oneMinute.shortcut,
+};
+
+describe('Profile Edit App', () => {
+ let wrapper;
+ let mockAxios;
+
+ const mockAvatarBlob = new Blob([''], { type: 'image/png' });
+
+ const mockAvatarFile = new File([mockAvatarBlob], 'avatar.png', { type: mockAvatarBlob.type });
+
+ const stubbedProfilePath = '/profile/edit';
+ const stubbedUserPath = '/user/test';
+ const successMessage = 'Profile was successfully updated.';
+
+ const createComponent = () => {
+ wrapper = shallowMountExtended(ProfileEditApp, {
+ propsData: {
+ profilePath: stubbedProfilePath,
+ userPath: stubbedUserPath,
+ },
+ provide: defaultProvide,
+ });
+ };
+
+ beforeEach(() => {
+ mockAxios = new MockAdapter(axios);
+
+ createComponent();
+ });
+
+ const findForm = () => wrapper.findComponent(GlForm);
+ const findButtons = () => wrapper.findAllComponents(GlButton);
+ const findAvatar = () => wrapper.findComponent(UserAvatar);
+ const findSetStatusForm = () => wrapper.findComponent(SetStatusForm);
+ const submitForm = () => findForm().vm.$emit('submit', new Event('submit'));
+ const setAvatar = () => findAvatar().vm.$emit('blob-change', mockAvatarFile);
+ const setStatus = () => {
+ const setStatusForm = findSetStatusForm();
+
+ setStatusForm.vm.$emit('message-input', 'Foo bar baz');
+ setStatusForm.vm.$emit('emoji-click', 'baseball');
+ setStatusForm.vm.$emit('clear-status-after-click', oneHour);
+ setStatusForm.vm.$emit('availability-input', true);
+ };
+
+ it('renders the form for users to interact with', () => {
+ const form = findForm();
+ const buttons = findButtons();
+
+ expect(form.exists()).toBe(true);
+ expect(buttons).toHaveLength(2);
+
+ expect(wrapper.findByTestId('cancel-edit-button').attributes('href')).toBe(stubbedUserPath);
+ });
+
+ it('renders `SetStatusForm` component and passes correct props', () => {
+ expect(findSetStatusForm().props()).toMatchObject({
+ defaultEmoji: defaultProvide.defaultEmoji,
+ emoji: defaultProvide.currentEmoji,
+ message: defaultProvide.currentMessage,
+ availability: false,
+ clearStatusAfter: null,
+ currentClearStatusAfter: defaultProvide.currentClearStatusAfter,
+ });
+ });
+
+ describe('when form submit request is successful', () => {
+ it('shows success alert', async () => {
+ mockAxios.onPut(stubbedProfilePath).reply(200, {
+ message: successMessage,
+ });
+
+ submitForm();
+ await waitForPromises();
+
+ expect(createAlert).toHaveBeenCalledWith({ message: successMessage, variant: VARIANT_INFO });
+ });
+
+ it('syncs header avatars', async () => {
+ mockAxios.onPut(stubbedProfilePath).reply(200, {
+ message: successMessage,
+ });
+
+ setAvatar();
+ submitForm();
+
+ await waitForPromises();
+
+ expect(readFileAsDataURL).toHaveBeenCalledWith(mockAvatarFile);
+ });
+
+ it('contains changes from the status form', async () => {
+ mockAxios.onPut(stubbedProfilePath).reply(200, {
+ message: successMessage,
+ });
+
+ setStatus();
+ submitForm();
+
+ await waitForPromises();
+ const axiosRequestData = mockAxios.history.put[0].data;
+
+ expect(axiosRequestData.get('user[status][emoji]')).toBe('baseball');
+ expect(axiosRequestData.get('user[status][clear_status_after]')).toBe(oneHour.shortcut);
+ expect(axiosRequestData.get('user[status][message]')).toBe('Foo bar baz');
+ expect(axiosRequestData.get('user[status][availability]')).toBe(AVAILABILITY_STATUS.BUSY);
+ });
+
+ describe('when clear status after has not been changed', () => {
+ it('does not include it in the API request', async () => {
+ mockAxios.onPut(stubbedProfilePath).reply(200, {
+ message: successMessage,
+ });
+
+ submitForm();
+
+ await waitForPromises();
+ const axiosRequestData = mockAxios.history.put[0].data;
+
+ expect(axiosRequestData.get('user[status][emoji]')).toBe(defaultProvide.currentEmoji);
+ expect(axiosRequestData.get('user[status][clear_status_after]')).toBe(null);
+ expect(axiosRequestData.get('user[status][message]')).toBe(defaultProvide.currentMessage);
+ expect(axiosRequestData.get('user[status][availability]')).toBe(
+ AVAILABILITY_STATUS.NOT_SET,
+ );
+ });
+ });
+ });
+
+ describe('when form submit request is not successful', () => {
+ it('shows error alert', async () => {
+ mockAxios.onPut(stubbedProfilePath).reply(500);
+
+ submitForm();
+ await waitForPromises();
+
+ expect(createAlert).toHaveBeenCalledWith(
+ expect.objectContaining({ variant: VARIANT_DANGER }),
+ );
+ });
+ });
+
+ it('submits API request with avatar file', async () => {
+ mockAxios.onPut(stubbedProfilePath).reply(200);
+
+ setAvatar();
+ submitForm();
+
+ await waitForPromises();
+
+ const axiosRequestData = mockAxios.history.put[0].data;
+
+ expect(axiosRequestData.get('user[avatar]')).toEqual(mockAvatarFile);
+ });
+});
diff --git a/spec/frontend/profile/edit/components/user_avatar_spec.js b/spec/frontend/profile/edit/components/user_avatar_spec.js
new file mode 100644
index 00000000000..caa3356b49f
--- /dev/null
+++ b/spec/frontend/profile/edit/components/user_avatar_spec.js
@@ -0,0 +1,139 @@
+import { nextTick } from 'vue';
+import jQuery from 'jquery';
+import { GlAvatar, GlAvatarLink, GlSprintf } from '@gitlab/ui';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import { avatarI18n } from '~/profile/edit/constants';
+import { loadCSSFile } from '~/lib/utils/css_utils';
+
+import UserAvatar from '~/profile/edit/components/user_avatar.vue';
+
+const glCropDataMock = jest.fn().mockImplementation(() => ({
+ getBlob: jest.fn(),
+}));
+
+const jQueryMock = {
+ glCrop: jest.fn().mockReturnValue({
+ data: glCropDataMock,
+ }),
+};
+
+jest.mock(`~/lib/utils/css_utils`);
+jest.mock('jquery');
+
+describe('Edit User Avatar', () => {
+ let wrapper;
+
+ beforeEach(() => {
+ jQuery.mockImplementation(() => jQueryMock);
+ });
+
+ const defaultProvides = {
+ avatarUrl: '/-/profile/avatarUrl',
+ brandProfileImageGuidelines: '',
+ cropperCssPath: '',
+ hasAvatar: true,
+ gravatarEnabled: true,
+ gravatarLink: {
+ hostname: 'gravatar.com',
+ url: 'gravatar.com',
+ },
+ profileAvatarPath: '/profile/avatar',
+ };
+
+ const createComponent = (provides = {}) => {
+ wrapper = shallowMountExtended(UserAvatar, {
+ provide: {
+ ...defaultProvides,
+ ...provides,
+ },
+ });
+ };
+
+ const findAvatar = () => wrapper.findComponent(GlAvatar);
+ const findAvatarLink = () => wrapper.findComponent(GlAvatarLink);
+ const findHelpText = () => wrapper.findComponent(GlSprintf).attributes('message');
+ const findRemoveAvatarButton = () => wrapper.findByTestId('remove-avatar-button');
+
+ describe('renders correctly', () => {
+ it('under default condition', async () => {
+ createComponent();
+ await nextTick();
+
+ expect(jQueryMock.glCrop).toHaveBeenCalledWith({
+ filename: '.js-avatar-filename',
+ previewImage: '.avatar-image .gl-avatar',
+ modalCrop: '.modal-profile-crop',
+ pickImageEl: '.js-choose-user-avatar-button',
+ uploadImageBtn: '.js-upload-user-avatar',
+ modalCropImg: '.modal-profile-crop-image',
+ onBlobChange: expect.any(Function),
+ });
+ expect(glCropDataMock).toHaveBeenCalledWith('glcrop');
+ expect(loadCSSFile).toHaveBeenCalledWith(defaultProvides.cropperCssPath);
+ const avatar = findAvatar();
+
+ expect(avatar.exists()).toBe(true);
+ expect(avatar.attributes('src')).toBe(defaultProvides.avatarUrl);
+ expect(findAvatarLink().attributes('href')).toBe(defaultProvides.avatarUrl);
+
+ const removeAvatarButton = findRemoveAvatarButton();
+ expect(removeAvatarButton.exists()).toBe(true);
+ expect(removeAvatarButton.attributes('href')).toBe(defaultProvides.profileAvatarPath);
+ });
+
+ describe('when user has avatar', () => {
+ describe('while gravatar is enabled', () => {
+ it('shows help text for change or remove avatar', () => {
+ createComponent({
+ gravatarEnabled: true,
+ });
+
+ expect(findHelpText()).toBe(avatarI18n.changeOrRemoveAvatar);
+ });
+ });
+ describe('while gravatar is disabled', () => {
+ it('shows help text for change avatar', () => {
+ createComponent({
+ gravatarEnabled: false,
+ });
+
+ expect(findHelpText()).toBe(avatarI18n.changeAvatar);
+ });
+ });
+ });
+
+ describe('when user does not have an avatar', () => {
+ describe('while gravatar is enabled', () => {
+ it('shows help text for upload or change avatar', () => {
+ createComponent({
+ gravatarEnabled: true,
+ hasAvatar: false,
+ });
+ expect(findHelpText()).toBe(avatarI18n.uploadOrChangeAvatar);
+ });
+ });
+
+ describe('while gravatar is disabled', () => {
+ it('shows help text for upload avatar', () => {
+ createComponent({
+ gravatarEnabled: false,
+ hasAvatar: false,
+ });
+ expect(findHelpText()).toBe(avatarI18n.uploadAvatar);
+ expect(findRemoveAvatarButton().exists()).toBe(false);
+ });
+ });
+ });
+ });
+
+ it('can render profile image guidelines', () => {
+ const brandProfileImageGuidelines = 'brandProfileImageGuidelines';
+ createComponent({
+ brandProfileImageGuidelines,
+ });
+
+ expect(wrapper.findByTestId('brand-profile-image-guidelines').text()).toBe(
+ brandProfileImageGuidelines,
+ );
+ });
+});
diff --git a/spec/frontend/projects/commit/components/branches_dropdown_spec.js b/spec/frontend/projects/commit/components/branches_dropdown_spec.js
index bff40c2bc39..6c595b81455 100644
--- a/spec/frontend/projects/commit/components/branches_dropdown_spec.js
+++ b/spec/frontend/projects/commit/components/branches_dropdown_spec.js
@@ -1,6 +1,7 @@
import { GlCollapsibleListbox } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import BranchesDropdown from '~/projects/commit/components/branches_dropdown.vue';
diff --git a/spec/frontend/projects/commit/components/projects_dropdown_spec.js b/spec/frontend/projects/commit/components/projects_dropdown_spec.js
index baf2ea2656f..725840cb60b 100644
--- a/spec/frontend/projects/commit/components/projects_dropdown_spec.js
+++ b/spec/frontend/projects/commit/components/projects_dropdown_spec.js
@@ -1,6 +1,7 @@
import { GlCollapsibleListbox } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import ProjectsDropdown from '~/projects/commit/components/projects_dropdown.vue';
diff --git a/spec/frontend/projects/commits/components/author_select_spec.js b/spec/frontend/projects/commits/components/author_select_spec.js
index 50e3f2d0f37..d345407c15e 100644
--- a/spec/frontend/projects/commits/components/author_select_spec.js
+++ b/spec/frontend/projects/commits/components/author_select_spec.js
@@ -1,6 +1,7 @@
import { GlCollapsibleListbox, GlListboxItem } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import { resetHTMLFixture, setHTMLFixture } from 'helpers/fixtures';
import setWindowLocation from 'helpers/set_window_location_helper';
diff --git a/spec/frontend/projects/components/__snapshots__/project_delete_button_spec.js.snap b/spec/frontend/projects/components/__snapshots__/project_delete_button_spec.js.snap
index 974650a2c7c..4893ee26178 100644
--- a/spec/frontend/projects/components/__snapshots__/project_delete_button_spec.js.snap
+++ b/spec/frontend/projects/components/__snapshots__/project_delete_button_spec.js.snap
@@ -1,7 +1,7 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`Project remove modal initialized matches the snapshot 1`] = `
-<form
+<gl-form-stub
action="some/path"
method="post"
>
@@ -16,100 +16,23 @@ exports[`Project remove modal initialized matches the snapshot 1`] = `
type="hidden"
/>
+ <delete-modal-stub
+ confirmphrase="foo"
+ forkscount="3"
+ issuescount="1"
+ mergerequestscount="2"
+ starscount="4"
+ />
+
<gl-button-stub
buttontextclasses=""
category="primary"
data-qa-selector="delete_button"
icon=""
- role="button"
size="medium"
- tabindex="0"
variant="danger"
>
Delete project
</gl-button-stub>
-
- <gl-modal-stub
- actioncancel="[object Object]"
- actionprimary="[object Object]"
- arialabel=""
- dismisslabel="Close"
- footer-class="gl-bg-gray-10 gl-p-5"
- modalclass=""
- modalid="fakeUniqueId"
- ok-variant="danger"
- size="md"
- title-class="gl-text-red-500"
- titletag="h4"
- >
-
- <div>
- <gl-alert-stub
- class="gl-mb-5"
- dismisslabel="Dismiss"
- primarybuttonlink=""
- primarybuttontext=""
- secondarybuttonlink=""
- secondarybuttontext=""
- showicon="true"
- title=""
- variant="danger"
- >
- <h4
- class="gl-alert-title"
- data-testid="delete-alert-title"
- >
-
- You are about to delete this project containing:
-
- </h4>
-
- <ul>
- <li>
- 1 issue
- </li>
-
- <li>
- 2 merge requests
- </li>
-
- <li>
- 3 forks
- </li>
-
- <li>
- 4 stars
- </li>
- </ul>
- This project is
- <strong>
- NOT
- </strong>
- a fork. This process deletes the project repository and all related resources.
- </gl-alert-stub>
-
- <p
- class="gl-mb-1"
- >
- Enter the following to confirm:
- </p>
-
- <p>
- <code
- class="gl-white-space-pre-wrap"
- >
- foo
- </code>
- </p>
-
- <gl-form-input-stub
- data-qa-selector="confirm_name_field"
- id="confirm_name_input"
- name="confirm_name_input"
- type="text"
- />
-
- </div>
- </gl-modal-stub>
-</form>
+</gl-form-stub>
`;
diff --git a/spec/frontend/projects/components/shared/__snapshots__/delete_button_spec.js.snap b/spec/frontend/projects/components/shared/__snapshots__/delete_button_spec.js.snap
deleted file mode 100644
index ac020fe6915..00000000000
--- a/spec/frontend/projects/components/shared/__snapshots__/delete_button_spec.js.snap
+++ /dev/null
@@ -1,116 +0,0 @@
-// Jest Snapshot v1, https://goo.gl/fbAQLP
-
-exports[`Project remove modal intialized matches the snapshot 1`] = `
-<form
- action="some/path"
- method="post"
->
- <input
- name="_method"
- type="hidden"
- value="delete"
- />
-
- <input
- name="authenticity_token"
- type="hidden"
- value="test-csrf-token"
- />
-
- <gl-button-stub
- buttontextclasses=""
- category="primary"
- data-qa-selector="delete_button"
- icon=""
- role="button"
- size="medium"
- tabindex="0"
- variant="danger"
- >
- Delete project
- </gl-button-stub>
-
- <div
- footer-class="gl-bg-gray-10 gl-p-5"
- ok-variant="danger"
- title-class="gl-text-red-500"
- >
- Are you absolutely sure?
- <div>
- <gl-alert-stub
- class="gl-mb-5"
- dismisslabel="Dismiss"
- primarybuttonlink=""
- primarybuttontext=""
- secondarybuttonlink=""
- secondarybuttontext=""
- showicon="true"
- title=""
- variant="danger"
- >
- <h4
- class="gl-alert-title"
- data-testid="delete-alert-title"
- >
-
- You are about to delete this project containing:
-
- </h4>
-
- <ul>
- <li>
- <gl-sprintf-stub
- message="1 issue"
- />
- </li>
-
- <li>
- <gl-sprintf-stub
- message="2 merge requests"
- />
- </li>
-
- <li>
- <gl-sprintf-stub
- message="3 forks"
- />
- </li>
-
- <li>
- <gl-sprintf-stub
- message="4 stars"
- />
- </li>
- </ul>
-
- <gl-sprintf-stub
- data-testid="delete-alert-body"
- message="This project is %{strongStart}NOT%{strongEnd} a fork. This process deletes the project repository and all related resources."
- />
- </gl-alert-stub>
-
- <p
- class="gl-mb-1"
- >
- Enter the following to confirm:
- </p>
-
- <p>
- <code
- class="gl-white-space-pre-wrap"
- >
- foo
- </code>
- </p>
-
- <gl-form-input-stub
- data-qa-selector="confirm_name_field"
- id="confirm_name_input"
- name="confirm_name_input"
- type="text"
- />
-
- </div>
- </div>
-</form>
-`;
diff --git a/spec/frontend/projects/components/shared/delete_button_spec.js b/spec/frontend/projects/components/shared/delete_button_spec.js
index 6b4ef341b0c..556c1ae7084 100644
--- a/spec/frontend/projects/components/shared/delete_button_spec.js
+++ b/spec/frontend/projects/components/shared/delete_button_spec.js
@@ -1,21 +1,17 @@
-import { GlModal } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
-import { stubComponent } from 'helpers/stub_component';
-import SharedDeleteButton from '~/projects/components/shared/delete_button.vue';
+import { GlForm, GlButton } from '@gitlab/ui';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import DeleteButton from '~/projects/components/shared/delete_button.vue';
+import DeleteModal from '~/projects/components/shared/delete_modal.vue';
jest.mock('~/lib/utils/csrf', () => ({ token: 'test-csrf-token' }));
-describe('Project remove modal', () => {
+describe('DeleteButton', () => {
let wrapper;
- const findFormElement = () => wrapper.find('form');
- const findConfirmButton = () => wrapper.find('.js-modal-action-primary');
- const findAuthenticityTokenInput = () => findFormElement().find('input[name=authenticity_token]');
- const findModal = () => wrapper.findComponent(GlModal);
- const findTitle = () => wrapper.find('[data-testid="delete-alert-title"]');
- const findAlertBody = () => wrapper.find('[data-testid="delete-alert-body"]');
+ const findForm = () => wrapper.findComponent(GlForm);
+ const findModal = () => wrapper.findComponent(DeleteModal);
- const defaultProps = {
+ const defaultPropsData = {
confirmPhrase: 'foo',
formPath: 'some/path',
isFork: false,
@@ -25,88 +21,68 @@ describe('Project remove modal', () => {
starsCount: 4,
};
- const createComponent = (data = {}, stubs = {}, props = {}) => {
- wrapper = shallowMount(SharedDeleteButton, {
+ const createComponent = (propsData) => {
+ wrapper = shallowMountExtended(DeleteButton, {
propsData: {
- ...defaultProps,
- ...props,
+ ...defaultPropsData,
+ ...propsData,
},
- data: () => data,
- stubs: {
- GlModal: stubComponent(GlModal, {
- template: `
- <div>
- <slot name="modal-title"></slot>
- <slot></slot>
- </div>`,
- }),
- ...stubs,
+ scopedSlots: {
+ 'modal-footer': '<div data-testid="modal-footer-slot"></div>',
},
});
};
- describe('intialized', () => {
- beforeEach(() => {
- createComponent();
- });
+ it('renders modal and passes correct props', () => {
+ createComponent();
- it('matches the snapshot', () => {
- expect(wrapper.element).toMatchSnapshot();
- });
-
- it('sets a csrf token on the authenticity form input', () => {
- expect(findAuthenticityTokenInput().element.value).toEqual('test-csrf-token');
- });
+ const { formPath, ...expectedProps } = defaultPropsData;
- it('sets the form action to the provided path', () => {
- expect(findFormElement().attributes('action')).toEqual(defaultProps.formPath);
+ expect(findModal().props()).toMatchObject({
+ visible: false,
+ ...expectedProps,
});
});
- describe('when the user input does not match the confirmPhrase', () => {
- beforeEach(() => {
- createComponent({ userInput: 'bar' }, { GlModal });
- });
+ it('renders form with required inputs', () => {
+ createComponent();
- it('the confirm button is disabled', () => {
- expect(findConfirmButton().attributes('disabled')).toBeDefined();
- });
+ const form = findForm();
+
+ expect(form.find('input[name="_method"]').attributes('value')).toBe('delete');
+ expect(form.find('input[name="authenticity_token"]').attributes('value')).toBe(
+ 'test-csrf-token',
+ );
});
- describe('when the user input matches the confirmPhrase', () => {
+ describe('when button is clicked', () => {
beforeEach(() => {
- createComponent({ userInput: defaultProps.confirmPhrase }, { GlModal });
+ createComponent();
+ wrapper.findComponent(GlButton).vm.$emit('click');
});
- it('the confirm button is not disabled', () => {
- expect(findConfirmButton().attributes('disabled')).toBe(undefined);
+ it('opens modal', () => {
+ expect(findModal().props('visible')).toBe(true);
});
});
- describe('when the modal is confirmed', () => {
- beforeEach(() => {
+ describe('when modal emits `primary` event', () => {
+ it('submits the form', () => {
createComponent();
- findModal().vm.$emit('ok');
- });
- it('submits the form element', () => {
- expect(findFormElement().element.submit).toHaveBeenCalled();
- });
- });
+ const submitMock = jest.fn();
- describe('when project is a fork', () => {
- beforeEach(() => {
- createComponent({}, {}, { isFork: true });
- });
+ findForm().element.submit = submitMock;
- it('matches the fork title', () => {
- expect(findTitle().text()).toEqual('You are about to delete this forked project containing:');
- });
+ findModal().vm.$emit('primary');
- it('matches the fork body', () => {
- expect(findAlertBody().attributes().message).toEqual(
- 'This process deletes the project repository and all related resources.',
- );
+ expect(submitMock).toHaveBeenCalled();
});
});
+
+ it('renders `modal-footer` slot', () => {
+ createComponent();
+
+ expect(wrapper.findByTestId('modal-footer-slot').exists()).toBe(true);
+ });
});
diff --git a/spec/frontend/projects/components/shared/delete_modal_spec.js b/spec/frontend/projects/components/shared/delete_modal_spec.js
new file mode 100644
index 00000000000..c6213fd4b6d
--- /dev/null
+++ b/spec/frontend/projects/components/shared/delete_modal_spec.js
@@ -0,0 +1,167 @@
+import { GlFormInput, GlModal, GlAlert } from '@gitlab/ui';
+import { mountExtended } from 'helpers/vue_test_utils_helper';
+import DeleteModal from '~/projects/components/shared/delete_modal.vue';
+import { __, sprintf } from '~/locale';
+import { stubComponent } from 'helpers/stub_component';
+
+jest.mock('lodash/uniqueId', () => () => 'fake-id');
+
+describe('DeleteModal', () => {
+ let wrapper;
+
+ const defaultPropsData = {
+ visible: false,
+ confirmPhrase: 'foo',
+ isFork: false,
+ issuesCount: 1,
+ mergeRequestsCount: 2,
+ forksCount: 3,
+ starsCount: 4,
+ };
+
+ const createComponent = (propsData) => {
+ wrapper = mountExtended(DeleteModal, {
+ propsData: {
+ ...defaultPropsData,
+ ...propsData,
+ },
+ stubs: {
+ GlModal: stubComponent(GlModal),
+ },
+ scopedSlots: {
+ 'modal-footer': '<div data-testid="modal-footer-slot"></div>',
+ },
+ });
+ };
+
+ const findGlModal = () => wrapper.findComponent(GlModal);
+ const alertText = () => wrapper.findComponent(GlAlert).text();
+ const findFormInput = () => wrapper.findComponent(GlFormInput);
+
+ it('renders modal with correct props', () => {
+ createComponent();
+
+ expect(findGlModal().props()).toMatchObject({
+ visible: defaultPropsData.visible,
+ modalId: 'fake-id',
+ actionPrimary: {
+ text: __('Yes, delete project'),
+ attributes: {
+ variant: 'danger',
+ disabled: true,
+ 'data-qa-selector': 'confirm_delete_button',
+ },
+ },
+ actionCancel: {
+ text: __('Cancel, keep project'),
+ },
+ });
+ });
+
+ describe('when resource counts are set', () => {
+ it('displays resource counts', () => {
+ createComponent();
+
+ expect(alertText()).toContain(`${defaultPropsData.issuesCount} issue`);
+ expect(alertText()).toContain(`${defaultPropsData.mergeRequestsCount} merge requests`);
+ expect(alertText()).toContain(`${defaultPropsData.forksCount} forks`);
+ expect(alertText()).toContain(`${defaultPropsData.starsCount} stars`);
+ });
+ });
+
+ describe('when resource counts are not set', () => {
+ it('does not display resource counts', () => {
+ createComponent({
+ issuesCount: null,
+ mergeRequestsCount: null,
+ forksCount: null,
+ starsCount: null,
+ });
+
+ expect(alertText()).not.toContain('issue');
+ expect(alertText()).not.toContain('merge requests');
+ expect(alertText()).not.toContain('forks');
+ expect(alertText()).not.toContain('stars');
+ });
+ });
+
+ describe('when project is a fork', () => {
+ beforeEach(() => {
+ createComponent({
+ isFork: true,
+ });
+ });
+
+ it('displays correct alert title', () => {
+ expect(alertText()).toContain(DeleteModal.i18n.isForkAlertTitle);
+ });
+
+ it('displays correct alert body', () => {
+ expect(alertText()).toContain(DeleteModal.i18n.isForkAlertBody);
+ });
+ });
+
+ describe('when project is not a fork', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('displays correct alert title', () => {
+ expect(alertText()).toContain(
+ sprintf(DeleteModal.i18n.isNotForkAlertTitle, { strongStart: '', strongEnd: '' }),
+ );
+ });
+
+ it('displays correct alert body', () => {
+ expect(alertText()).toContain(
+ sprintf(DeleteModal.i18n.isNotForkAlertBody, { strongStart: '', strongEnd: '' }),
+ );
+ });
+ });
+
+ describe('when correct confirm phrase is used', () => {
+ beforeEach(() => {
+ createComponent();
+
+ findFormInput().vm.$emit('input', defaultPropsData.confirmPhrase);
+ });
+
+ it('enables the primary action', () => {
+ expect(findGlModal().props('actionPrimary').attributes.disabled).toBe(false);
+ });
+ });
+
+ describe('when correct confirm phrase is not used', () => {
+ beforeEach(() => {
+ createComponent();
+
+ findFormInput().vm.$emit('input', 'bar');
+ });
+
+ it('keeps the primary action disabled', () => {
+ expect(findGlModal().props('actionPrimary').attributes.disabled).toBe(true);
+ });
+ });
+
+ it('emits `primary` event', () => {
+ createComponent();
+
+ findGlModal().vm.$emit('primary');
+
+ expect(wrapper.emitted('primary')).toEqual([[]]);
+ });
+
+ it('emits `change` event', () => {
+ createComponent();
+
+ findGlModal().vm.$emit('change', true);
+
+ expect(wrapper.emitted('change')).toEqual([[true]]);
+ });
+
+ it('renders `modal-footer` slot', () => {
+ createComponent();
+
+ expect(wrapper.findByTestId('modal-footer-slot').exists()).toBe(true);
+ });
+});
diff --git a/spec/frontend/projects/pipelines/charts/components/ci_cd_analytics_charts_spec.js b/spec/frontend/projects/pipelines/charts/components/ci_cd_analytics_charts_spec.js
index 38760a724ff..94cfa53d1cd 100644
--- a/spec/frontend/projects/pipelines/charts/components/ci_cd_analytics_charts_spec.js
+++ b/spec/frontend/projects/pipelines/charts/components/ci_cd_analytics_charts_spec.js
@@ -82,15 +82,25 @@ describe('~/vue_shared/components/ci_cd_analytics/ci_cd_analytics_charts.vue', (
]);
});
- it('should select a different chart on change', async () => {
- findSegmentedControl().vm.$emit('input', 1);
+ describe('when the date range is updated', () => {
+ let chart;
- const chart = wrapper.findComponent(CiCdAnalyticsAreaChart);
+ beforeEach(async () => {
+ chart = wrapper.findComponent(CiCdAnalyticsAreaChart);
- await nextTick();
+ await findSegmentedControl().vm.$emit('input', 1);
+ });
+
+ it('should select a different chart on change', () => {
+ expect(chart.props('chartData')).toEqual(transformedAreaChartData);
+ expect(chart.text()).toBe('Date range: test range 2');
+ });
- expect(chart.props('chartData')).toEqual(transformedAreaChartData);
- expect(chart.text()).toBe('Date range: test range 2');
+ it('will emit a `select-chart` event', () => {
+ expect(wrapper.emitted()).toEqual({
+ 'select-chart': [[1]],
+ });
+ });
});
});
diff --git a/spec/frontend/projects/pipelines/charts/components/pipeline_charts_spec.js b/spec/frontend/projects/pipelines/charts/components/pipeline_charts_spec.js
index ab2a12219e5..137100be34e 100644
--- a/spec/frontend/projects/pipelines/charts/components/pipeline_charts_spec.js
+++ b/spec/frontend/projects/pipelines/charts/components/pipeline_charts_spec.js
@@ -72,6 +72,7 @@ describe('~/projects/pipelines/charts/components/pipeline_charts.vue', () => {
expect(charts.props()).toEqual({
charts: wrapper.vm.areaCharts,
chartOptions: wrapper.vm.$options.areaChartOptions,
+ loading: false,
});
});
});
diff --git a/spec/frontend/projects/project_new_spec.js b/spec/frontend/projects/project_new_spec.js
index 54d0cfaa8c6..7f6b5d1f7c8 100644
--- a/spec/frontend/projects/project_new_spec.js
+++ b/spec/frontend/projects/project_new_spec.js
@@ -131,9 +131,9 @@ describe('New Project', () => {
});
});
- describe("Name can contain only letters, digits, emojis, '_', '.', '+', dashes, or spaces", () => {
+ describe("Name can contain only letters, digits, emoji, '_', '.', '+', dashes, or spaces", () => {
const errormsg =
- 'Name can contain only lowercase or uppercase letters, digits, emojis, spaces, dots, underscores, dashes, or pluses.';
+ 'Name can contain only lowercase or uppercase letters, digits, emoji, spaces, dots, underscores, dashes, or pluses.';
it("'foo(#^.^#)foo' should error", () => {
const text = 'foo(#^.^#)foo';
expect(checkRules(text)).toBe(errormsg);
diff --git a/spec/frontend/projects/settings/components/shared_runners_toggle_spec.js b/spec/frontend/projects/settings/components/shared_runners_toggle_spec.js
index f28bc13895e..4fee969ee62 100644
--- a/spec/frontend/projects/settings/components/shared_runners_toggle_spec.js
+++ b/spec/frontend/projects/settings/components/shared_runners_toggle_spec.js
@@ -1,4 +1,4 @@
-import { GlToggle } from '@gitlab/ui';
+import { GlLink, GlSprintf, GlToggle } from '@gitlab/ui';
import MockAxiosAdapter from 'axios-mock-adapter';
import { nextTick } from 'vue';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
@@ -8,6 +8,8 @@ import { HTTP_STATUS_OK, HTTP_STATUS_UNAUTHORIZED } from '~/lib/utils/http_statu
import SharedRunnersToggleComponent from '~/projects/settings/components/shared_runners_toggle.vue';
const TEST_UPDATE_PATH = '/test/update_shared_runners';
+const mockParentName = 'My group';
+const mockGroupSettingsPath = '/groups/my-group/-/settings/ci_cd';
jest.mock('~/alert');
@@ -25,6 +27,9 @@ describe('projects/settings/components/shared_runners', () => {
isCreditCardValidationRequired: false,
...props,
},
+ stubs: {
+ GlSprintf,
+ },
});
};
@@ -55,8 +60,30 @@ describe('projects/settings/components/shared_runners', () => {
expect(isToggleDisabled()).toBe(true);
});
- it('alert should exist explaining why the toggle is disabled', () => {
- expect(findUnoverridableAlert().exists()).toBe(true);
+ it('renders text explaining why the toggle is disabled', () => {
+ expect(findSharedRunnersToggle().text()).toEqual(
+ 'Shared runners are disabled in the group settings.',
+ );
+ });
+
+ describe('when user can configure group', () => {
+ beforeEach(() => {
+ createComponent({
+ isDisabledAndUnoverridable: true,
+ groupName: mockParentName,
+ groupSettingsPath: mockGroupSettingsPath,
+ });
+ });
+
+ it('renders link to enable', () => {
+ expect(findSharedRunnersToggle().text()).toContain(
+ `Go to ${mockParentName} to enable them.`,
+ );
+
+ const link = findSharedRunnersToggle().findComponent(GlLink);
+ expect(link.text()).toBe(mockParentName);
+ expect(link.attributes('href')).toBe(mockGroupSettingsPath);
+ });
});
});
diff --git a/spec/frontend/projects/settings_service_desk/components/custom_email_confirm_modal_spec.js b/spec/frontend/projects/settings_service_desk/components/custom_email_confirm_modal_spec.js
new file mode 100644
index 00000000000..2ab50c93f6c
--- /dev/null
+++ b/spec/frontend/projects/settings_service_desk/components/custom_email_confirm_modal_spec.js
@@ -0,0 +1,52 @@
+import { GlModal } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import { extendedWrapper } from 'helpers/vue_test_utils_helper';
+import CustomEmailConfirmModal from '~/projects/settings_service_desk/components/custom_email_confirm_modal.vue';
+
+describe('CustomEmailConfirmModal', () => {
+ let wrapper;
+
+ const defaultProps = { visible: false, customEmail: 'user@example.com' };
+
+ const findModal = () => wrapper.findComponent(GlModal);
+
+ const createWrapper = (props = {}) => {
+ wrapper = extendedWrapper(
+ shallowMount(CustomEmailConfirmModal, { propsData: { ...defaultProps, ...props } }),
+ );
+ };
+
+ it('does not display modal', () => {
+ createWrapper();
+
+ expect(findModal().props('visible')).toBe(false);
+ });
+
+ describe('when visible', () => {
+ beforeEach(() => {
+ createWrapper({ visible: true });
+ });
+
+ it('displays the modal', () => {
+ expect(findModal().props('visible')).toBe(true);
+ });
+
+ it('emits remove event on primary button click', () => {
+ findModal().vm.$emit('primary');
+
+ expect(wrapper.emitted('remove')).toEqual([[]]);
+ });
+
+ it('emits cancel event on cancel button click', () => {
+ findModal().vm.$emit('canceled');
+
+ expect(wrapper.emitted('cancel')).toEqual([[]]);
+ });
+
+ it('emits cancel event on close button click', () => {
+ findModal().vm.$emit('hidden');
+
+ expect(wrapper.emitted('cancel')).toEqual([[]]);
+ });
+ });
+});
diff --git a/spec/frontend/projects/settings_service_desk/components/custom_email_form_spec.js b/spec/frontend/projects/settings_service_desk/components/custom_email_form_spec.js
new file mode 100644
index 00000000000..ded8b181c4e
--- /dev/null
+++ b/spec/frontend/projects/settings_service_desk/components/custom_email_form_spec.js
@@ -0,0 +1,199 @@
+import { mount } from '@vue/test-utils';
+import { nextTick } from 'vue';
+import { extendedWrapper } from 'helpers/vue_test_utils_helper';
+import CustomEmailForm from '~/projects/settings_service_desk/components/custom_email_form.vue';
+import ClipboardButton from '~/vue_shared/components/clipboard_button.vue';
+import { I18N_FORM_FORWARDING_CLIPBOARD_BUTTON_TITLE } from '~/projects/settings_service_desk/custom_email_constants';
+
+describe('CustomEmailForm', () => {
+ let wrapper;
+
+ const defaultProps = {
+ incomingEmail: 'incoming@example.com',
+ isSubmitting: false,
+ };
+
+ const findForm = () => wrapper.find('form');
+ const findClipboardButton = () => wrapper.findComponent(ClipboardButton);
+ const findInputByTestId = (testId) => wrapper.findByTestId(testId).find('input');
+ const findCustomEmailInput = () => findInputByTestId('form-custom-email');
+ const findSmtpAddressInput = () => findInputByTestId('form-smtp-address');
+ const findSmtpPortInput = () => findInputByTestId('form-smtp-port');
+ const findSmtpUsernameInput = () => findInputByTestId('form-smtp-username');
+ const findSmtpPasswordInput = () => findInputByTestId('form-smtp-password');
+ const findSubmit = () => wrapper.findByTestId('form-submit');
+
+ const clickButtonAndExpectNoSubmitEvent = async () => {
+ await nextTick();
+ findForm().trigger('submit');
+
+ expect(findSubmit().find('button').attributes('disabled')).toBeDefined();
+ expect(wrapper.emitted('submit')).toEqual(undefined);
+ };
+
+ const createWrapper = (props = {}) => {
+ wrapper = extendedWrapper(mount(CustomEmailForm, { propsData: { ...defaultProps, ...props } }));
+ };
+
+ it('renders a copy to clipboard button', () => {
+ createWrapper();
+
+ expect(findClipboardButton().exists()).toBe(true);
+ expect(findClipboardButton().props()).toEqual(
+ expect.objectContaining({
+ title: I18N_FORM_FORWARDING_CLIPBOARD_BUTTON_TITLE,
+ text: defaultProps.incomingEmail,
+ }),
+ );
+ });
+
+ it('form inputs are disabled when submitting', () => {
+ createWrapper({ isSubmitting: true });
+
+ expect(findCustomEmailInput().attributes('disabled')).toBeDefined();
+ expect(findSmtpAddressInput().attributes('disabled')).toBeDefined();
+ expect(findSmtpPortInput().attributes('disabled')).toBeDefined();
+ expect(findSmtpUsernameInput().attributes('disabled')).toBeDefined();
+ expect(findSmtpPasswordInput().attributes('disabled')).toBeDefined();
+ expect(findSubmit().props('loading')).toBe(true);
+ });
+
+ describe('form validation and submit event', () => {
+ it('is invalid when form inputs are empty', async () => {
+ createWrapper();
+
+ await nextTick();
+ findForm().trigger('submit');
+
+ expect(wrapper.emitted('submit')).toEqual(undefined);
+ });
+
+ describe('with inputs set', () => {
+ beforeEach(() => {
+ createWrapper();
+
+ findCustomEmailInput().setValue('user@example.com');
+ findCustomEmailInput().trigger('change');
+
+ findSmtpAddressInput().setValue('smtp.example.com');
+ findSmtpAddressInput().trigger('change');
+
+ findSmtpPortInput().setValue('587');
+ findSmtpPortInput().trigger('change');
+
+ findSmtpUsernameInput().setValue('user@example.com');
+ findSmtpUsernameInput().trigger('change');
+
+ findSmtpPasswordInput().setValue('supersecret');
+ findSmtpPasswordInput().trigger('change');
+ });
+
+ it('is invalid when malformed email provided', async () => {
+ findCustomEmailInput().setValue('userexample.com');
+ findCustomEmailInput().trigger('change');
+
+ await clickButtonAndExpectNoSubmitEvent();
+ expect(findCustomEmailInput().classes()).toContain('is-invalid');
+ });
+
+ it('is invalid when email is not set', async () => {
+ findCustomEmailInput().setValue('');
+ findCustomEmailInput().trigger('change');
+
+ await clickButtonAndExpectNoSubmitEvent();
+ expect(findCustomEmailInput().classes()).toContain('is-invalid');
+ });
+
+ it('is invalid when smtp address is not set', async () => {
+ findSmtpAddressInput().setValue('');
+ findSmtpAddressInput().trigger('change');
+
+ await clickButtonAndExpectNoSubmitEvent();
+ expect(findSmtpAddressInput().classes()).toContain('is-invalid');
+ });
+
+ it('is invalid when smtp port is not set', async () => {
+ findSmtpPortInput().setValue('');
+ findSmtpPortInput().trigger('change');
+
+ await clickButtonAndExpectNoSubmitEvent();
+ expect(findSmtpPortInput().classes()).toContain('is-invalid');
+ });
+
+ it('is invalid when smtp port is not an integer', async () => {
+ findSmtpPortInput().setValue('20m2');
+ findSmtpPortInput().trigger('change');
+
+ await clickButtonAndExpectNoSubmitEvent();
+ expect(findSmtpPortInput().classes()).toContain('is-invalid');
+ });
+
+ it('is invalid when smtp port is 0', async () => {
+ findSmtpPortInput().setValue('0');
+ findSmtpPortInput().trigger('change');
+
+ await clickButtonAndExpectNoSubmitEvent();
+ expect(findSmtpPortInput().classes()).toContain('is-invalid');
+ });
+
+ it('is invalid when smtp username is not set', async () => {
+ findSmtpUsernameInput().setValue('');
+ findSmtpUsernameInput().trigger('change');
+
+ await clickButtonAndExpectNoSubmitEvent();
+ expect(findSmtpUsernameInput().classes()).toContain('is-invalid');
+ });
+
+ it('is invalid when password is too short', async () => {
+ findSmtpPasswordInput().setValue('2short');
+ findSmtpPasswordInput().trigger('change');
+
+ await clickButtonAndExpectNoSubmitEvent();
+ expect(findSmtpPasswordInput().classes()).toContain('is-invalid');
+ });
+
+ it('is invalid when password is not set', async () => {
+ findSmtpPasswordInput().setValue('');
+ findSmtpPasswordInput().trigger('change');
+
+ await clickButtonAndExpectNoSubmitEvent();
+ expect(findSmtpPasswordInput().classes()).toContain('is-invalid');
+ });
+
+ it('sets smtpUsername automatically when empty based on customEmail', async () => {
+ const email = 'support@example.com';
+
+ findSmtpUsernameInput().setValue('');
+ findSmtpUsernameInput().trigger('change');
+
+ findCustomEmailInput().setValue(email);
+ findCustomEmailInput().trigger('change');
+
+ await nextTick();
+
+ expect(findSmtpUsernameInput().element.value).toBe(email);
+ expect(wrapper.html()).not.toContain('is-invalid');
+ });
+
+ it('is valid and emits submit event with form data', async () => {
+ await nextTick();
+
+ expect(wrapper.html()).not.toContain('is-invalid');
+
+ findForm().trigger('submit');
+
+ expect(wrapper.emitted('submit')).toEqual([
+ [
+ {
+ custom_email: 'user@example.com',
+ smtp_address: 'smtp.example.com',
+ smtp_password: 'supersecret',
+ smtp_port: '587',
+ smtp_username: 'user@example.com',
+ },
+ ],
+ ]);
+ });
+ });
+ });
+});
diff --git a/spec/frontend/projects/settings_service_desk/components/custom_email_spec.js b/spec/frontend/projects/settings_service_desk/components/custom_email_spec.js
new file mode 100644
index 00000000000..2808a25296d
--- /dev/null
+++ b/spec/frontend/projects/settings_service_desk/components/custom_email_spec.js
@@ -0,0 +1,134 @@
+import { GlButton, GlToggle, GlBadge } from '@gitlab/ui';
+import { mount } from '@vue/test-utils';
+import { nextTick } from 'vue';
+import CustomEmail from '~/projects/settings_service_desk/components/custom_email.vue';
+import {
+ I18N_VERIFICATION_ERRORS,
+ I18N_STATE_VERIFICATION_STARTED,
+ I18N_STATE_VERIFICATION_FAILED,
+ I18N_STATE_VERIFICATION_FAILED_RESET_PARAGRAPH,
+ I18N_STATE_VERIFICATION_STARTED_RESET_PARAGRAPH,
+ I18N_STATE_VERIFICATION_FINISHED_RESET_PARAGRAPH,
+} from '~/projects/settings_service_desk/custom_email_constants';
+
+describe('CustomEmail', () => {
+ let wrapper;
+
+ const defaultProps = {
+ customEmail: 'user@example.com',
+ smtpAddress: 'smtp.example.com',
+ verificationState: 'started',
+ verificationError: null,
+ enabled: false,
+ submitting: false,
+ };
+
+ const findButton = () => wrapper.findComponent(GlButton);
+ const findBadge = () => wrapper.findComponent(GlBadge);
+ const findToggle = () => wrapper.findComponent(GlToggle);
+
+ const createWrapper = (props = {}) => {
+ wrapper = mount(CustomEmail, { propsData: { ...defaultProps, ...props } });
+ };
+
+ it('displays the custom email address and smtp address in the body', () => {
+ createWrapper();
+ const text = wrapper.text();
+
+ expect(text).toContain(defaultProps.customEmail);
+ expect(text).toContain(defaultProps.smtpAddress);
+ });
+
+ describe('when verificationState is started', () => {
+ beforeEach(() => {
+ createWrapper();
+ });
+
+ it('displays badge with correct props', () => {
+ expect(findBadge().props('variant')).toBe('info');
+ expect(findBadge().text()).toBe(I18N_STATE_VERIFICATION_STARTED);
+ });
+
+ it('displays reset paragraph', () => {
+ expect(wrapper.text()).toContain(I18N_STATE_VERIFICATION_STARTED_RESET_PARAGRAPH);
+ });
+ });
+
+ describe('when verificationState is failed', () => {
+ beforeEach(() => {
+ createWrapper({ verificationState: 'failed' });
+ });
+
+ it('displays badge with correct props', () => {
+ expect(findBadge().props('variant')).toBe('danger');
+ expect(findBadge().text()).toBe(I18N_STATE_VERIFICATION_FAILED);
+ });
+
+ it('displays reset paragraph', () => {
+ expect(wrapper.text()).toContain(I18N_STATE_VERIFICATION_FAILED_RESET_PARAGRAPH);
+ });
+ });
+
+ describe('verification error', () => {
+ it.each([
+ 'smtp_host_issue',
+ 'invalid_credentials',
+ 'mail_not_received_within_timeframe',
+ 'incorrect_from',
+ 'incorrect_token',
+ ])('displays %s label and description', (error) => {
+ createWrapper({ verificationError: error });
+ const text = wrapper.text();
+
+ expect(text).toContain(I18N_VERIFICATION_ERRORS[error].label);
+ expect(text).toContain(I18N_VERIFICATION_ERRORS[error].description);
+ });
+ });
+
+ describe('when verificationState is finished', () => {
+ beforeEach(() => {
+ createWrapper({ verificationState: 'finished' });
+ });
+
+ it('displays reset paragraph', () => {
+ expect(wrapper.text()).toContain(I18N_STATE_VERIFICATION_FINISHED_RESET_PARAGRAPH);
+ });
+
+ it('toggle value is false', () => {
+ expect(findToggle().props('value')).toBe(false);
+ });
+
+ it('emits a toggle event when toggle is clicked', async () => {
+ findToggle().vm.$emit('change', true);
+ await nextTick();
+
+ expect(wrapper.emitted('toggle')).toEqual([[true]]);
+ });
+ });
+
+ describe('when enabled', () => {
+ beforeEach(() => {
+ createWrapper({ verificationState: 'finished', isEnabled: true });
+ });
+
+ it('value is true', () => {
+ expect(findToggle().props('value')).toBe(true);
+ });
+ });
+
+ describe('button', () => {
+ it('emits a reset event when button clicked', () => {
+ createWrapper();
+ findButton().trigger('click');
+
+ expect(wrapper.emitted('reset')).toEqual([[]]);
+ });
+
+ it('does not emit event when button clicked and submitting', () => {
+ createWrapper({ isSubmitting: true });
+ findButton().trigger('click');
+
+ expect(wrapper.emitted('reset')).toBeUndefined();
+ });
+ });
+});
diff --git a/spec/frontend/projects/settings_service_desk/components/custom_email_wrapper_spec.js b/spec/frontend/projects/settings_service_desk/components/custom_email_wrapper_spec.js
new file mode 100644
index 00000000000..e54d09cf82f
--- /dev/null
+++ b/spec/frontend/projects/settings_service_desk/components/custom_email_wrapper_spec.js
@@ -0,0 +1,336 @@
+import { nextTick } from 'vue';
+import { GlLink, GlLoadingIcon, GlAlert } from '@gitlab/ui';
+import { mount } from '@vue/test-utils';
+import AxiosMockAdapter from 'axios-mock-adapter';
+import axios from '~/lib/utils/axios_utils';
+import waitForPromises from 'helpers/wait_for_promises';
+import { HTTP_STATUS_OK, HTTP_STATUS_NOT_FOUND } from '~/lib/utils/http_status';
+import CustomEmailWrapper from '~/projects/settings_service_desk/components/custom_email_wrapper.vue';
+import CustomEmailForm from '~/projects/settings_service_desk/components/custom_email_form.vue';
+import CustomEmail from '~/projects/settings_service_desk/components/custom_email.vue';
+import CustomEmailConfirmModal from '~/projects/settings_service_desk/components/custom_email_confirm_modal.vue';
+
+import {
+ FEEDBACK_ISSUE_URL,
+ I18N_GENERIC_ERROR,
+ I18N_TOAST_SAVED,
+ I18N_TOAST_DELETED,
+ I18N_TOAST_ENABLED,
+ I18N_TOAST_DISABLED,
+} from '~/projects/settings_service_desk/custom_email_constants';
+import {
+ MOCK_CUSTOM_EMAIL_EMPTY,
+ MOCK_CUSTOM_EMAIL_STARTED,
+ MOCK_CUSTOM_EMAIL_FAILED,
+ MOCK_CUSTOM_EMAIL_FINISHED,
+ MOCK_CUSTOM_EMAIL_ENABLED,
+ MOCK_CUSTOM_EMAIL_DISABLED,
+ MOCK_CUSTOM_EMAIL_FORM_SUBMIT,
+} from './mock_data';
+
+describe('CustomEmailWrapper', () => {
+ let axiosMock;
+ let wrapper;
+
+ const defaultProps = {
+ incomingEmail: 'incoming@example.com',
+ customEmailEndpoint: '/flightjs/Flight/-/service_desk/custom_email',
+ };
+
+ const showToast = jest.fn();
+
+ const createWrapper = (props = {}) => {
+ wrapper = mount(CustomEmailWrapper, {
+ propsData: { ...defaultProps, ...props },
+ mocks: {
+ $toast: {
+ show: showToast,
+ },
+ },
+ });
+ };
+
+ const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
+ const findAlert = () => wrapper.findComponent(GlAlert);
+ const findFeedbackLink = () => wrapper.findComponent(GlLink);
+ const findCustomEmailForm = () => wrapper.findComponent(CustomEmailForm);
+ const findCustomEmail = () => wrapper.findComponent(CustomEmail);
+ const findCustomEmailConfirmModal = () => wrapper.findComponent(CustomEmailConfirmModal);
+
+ beforeEach(() => {
+ axiosMock = new AxiosMockAdapter(axios);
+ });
+
+ afterEach(() => {
+ axiosMock.restore();
+ jest.clearAllTimers();
+ });
+
+ it('displays link to feedback issue', () => {
+ createWrapper();
+
+ expect(findFeedbackLink().attributes('href')).toBe(FEEDBACK_ISSUE_URL);
+ });
+
+ describe('when initial resource loading returns no configured custom email', () => {
+ beforeEach(() => {
+ axiosMock
+ .onGet(defaultProps.customEmailEndpoint)
+ .reply(HTTP_STATUS_OK, MOCK_CUSTOM_EMAIL_EMPTY);
+
+ createWrapper();
+ });
+
+ it('displays loading icon while fetching data', async () => {
+ // while loading
+ expect(findLoadingIcon().exists()).toBe(true);
+ await waitForPromises();
+ // loading completed
+ expect(findLoadingIcon().exists()).toBe(false);
+ });
+
+ it('displays form', async () => {
+ await waitForPromises();
+
+ expect(findCustomEmailForm().exists()).toBe(true);
+ });
+
+ describe('when CustomEmailForm emits submit event with valid params', () => {
+ beforeEach(() => {
+ axiosMock
+ .onPost(defaultProps.customEmailEndpoint)
+ .replyOnce(HTTP_STATUS_OK, MOCK_CUSTOM_EMAIL_STARTED);
+ });
+
+ it('creates custom email and displays CustomEmail component', async () => {
+ createWrapper();
+ await nextTick();
+
+ findCustomEmailForm().vm.$emit('submit', MOCK_CUSTOM_EMAIL_FORM_SUBMIT);
+
+ expect(findCustomEmailForm().emitted('submit')).toEqual([[MOCK_CUSTOM_EMAIL_FORM_SUBMIT]]);
+ await waitForPromises();
+
+ expect(showToast).toHaveBeenCalledWith(I18N_TOAST_SAVED);
+
+ expect(findCustomEmail().props()).toEqual({
+ customEmail: 'user@example.com',
+ smtpAddress: 'smtp.example.com',
+ verificationState: 'started',
+ verificationError: null,
+ isEnabled: false,
+ isSubmitting: false,
+ });
+ });
+ });
+ });
+
+ describe('when initial resource loading return started verification', () => {
+ beforeEach(async () => {
+ axiosMock
+ .onGet(defaultProps.customEmailEndpoint)
+ .reply(HTTP_STATUS_OK, MOCK_CUSTOM_EMAIL_STARTED);
+
+ createWrapper();
+ await waitForPromises();
+ });
+
+ it('displays CustomEmail component', () => {
+ expect(findCustomEmail().props()).toEqual({
+ customEmail: 'user@example.com',
+ smtpAddress: 'smtp.example.com',
+ verificationState: 'started',
+ verificationError: null,
+ isEnabled: false,
+ isSubmitting: false,
+ });
+ });
+
+ it('schedules and executes polling', async () => {
+ jest.runOnlyPendingTimers();
+ await waitForPromises();
+
+ // first after initial resource fetching, second after first polling
+ expect(axiosMock.history.get).toHaveLength(2);
+ expect(setTimeout).toHaveBeenCalledTimes(2);
+ expect(setTimeout).toHaveBeenLastCalledWith(expect.any(Function), 8000);
+ });
+
+ describe('when CustomEmail triggers reset event', () => {
+ beforeEach(() => {
+ findCustomEmail().vm.$emit('reset');
+ });
+
+ it('shows confirm modal', () => {
+ expect(findCustomEmailConfirmModal().props('visible')).toBe(true);
+ });
+ });
+
+ it('deletes custom email on remove event', async () => {
+ axiosMock
+ .onDelete(defaultProps.customEmailEndpoint)
+ .reply(HTTP_STATUS_OK, MOCK_CUSTOM_EMAIL_EMPTY);
+
+ findCustomEmailConfirmModal().vm.$emit('remove');
+ await waitForPromises();
+
+ expect(axiosMock.history.delete).toHaveLength(1);
+ expect(showToast).toHaveBeenCalledWith(I18N_TOAST_DELETED);
+
+ expect(findCustomEmailForm().exists()).toBe(true);
+ });
+ });
+
+ describe('when initial resource loading returns failed verification', () => {
+ beforeEach(async () => {
+ axiosMock.onGet(defaultProps.customEmailEndpoint).reply(200, MOCK_CUSTOM_EMAIL_FAILED);
+ createWrapper();
+ await waitForPromises();
+ });
+
+ it('fetches data from endpoint and displays CustomEmail component', () => {
+ expect(findCustomEmail().props()).toEqual({
+ customEmail: 'user@example.com',
+ smtpAddress: 'smtp.example.com',
+ verificationState: 'failed',
+ verificationError: 'smtp_host_issue',
+ isEnabled: false,
+ isSubmitting: false,
+ });
+ });
+
+ describe('when CustomEmail triggers reset event', () => {
+ beforeEach(() => {
+ findCustomEmail().vm.$emit('reset');
+ });
+
+ it('shows confirm modal', () => {
+ expect(findCustomEmailConfirmModal().props('visible')).toBe(true);
+ });
+ });
+ });
+
+ describe('when initial resource loading returns finished verification', () => {
+ beforeEach(async () => {
+ axiosMock
+ .onGet(defaultProps.customEmailEndpoint)
+ .reply(HTTP_STATUS_OK, MOCK_CUSTOM_EMAIL_FINISHED);
+
+ createWrapper();
+ await waitForPromises();
+ });
+
+ it('fetches data from endpoint and displays CustomEmail component', () => {
+ expect(findCustomEmail().props()).toEqual({
+ customEmail: 'user@example.com',
+ smtpAddress: 'smtp.example.com',
+ verificationState: 'finished',
+ verificationError: null,
+ isEnabled: false,
+ isSubmitting: false,
+ });
+ });
+
+ describe('when CustomEmail triggers reset event', () => {
+ beforeEach(() => {
+ findCustomEmail().vm.$emit('reset');
+ });
+
+ it('shows confirm modal', () => {
+ expect(findCustomEmailConfirmModal().props('visible')).toBe(true);
+ });
+ });
+
+ it('enables custom email on toggle event', async () => {
+ axiosMock
+ .onPut(defaultProps.customEmailEndpoint)
+ .reply(HTTP_STATUS_OK, MOCK_CUSTOM_EMAIL_ENABLED);
+
+ findCustomEmail().vm.$emit('toggle', true);
+
+ await waitForPromises();
+
+ expect(axiosMock.history.put).toHaveLength(1);
+ expect(showToast).toHaveBeenCalledWith(I18N_TOAST_ENABLED);
+
+ expect(findCustomEmail().props()).toEqual({
+ customEmail: 'user@example.com',
+ smtpAddress: 'smtp.example.com',
+ verificationState: 'finished',
+ verificationError: null,
+ isEnabled: true,
+ isSubmitting: false,
+ });
+ });
+ });
+
+ describe('when initial resource loading returns enabled custom email', () => {
+ beforeEach(async () => {
+ axiosMock
+ .onGet(defaultProps.customEmailEndpoint)
+ .reply(HTTP_STATUS_OK, MOCK_CUSTOM_EMAIL_ENABLED);
+
+ createWrapper();
+ await waitForPromises();
+ });
+
+ it('fetches data from endpoint and displays CustomEmail component', () => {
+ expect(findCustomEmail().props()).toEqual({
+ customEmail: 'user@example.com',
+ smtpAddress: 'smtp.example.com',
+ verificationState: 'finished',
+ verificationError: null,
+ isEnabled: true,
+ isSubmitting: false,
+ });
+ });
+
+ it('disables custom email on toggle event', async () => {
+ axiosMock
+ .onPut(defaultProps.customEmailEndpoint)
+ .reply(HTTP_STATUS_OK, MOCK_CUSTOM_EMAIL_DISABLED);
+
+ findCustomEmail().vm.$emit('toggle', false);
+
+ await waitForPromises();
+
+ expect(axiosMock.history.put).toHaveLength(1);
+ expect(showToast).toHaveBeenCalledWith(I18N_TOAST_DISABLED);
+
+ expect(findCustomEmail().props()).toEqual({
+ customEmail: 'user@example.com',
+ smtpAddress: 'smtp.example.com',
+ verificationState: 'finished',
+ verificationError: null,
+ isEnabled: false,
+ isSubmitting: false,
+ });
+ });
+ });
+
+ describe('when initial resource loading returns 404', () => {
+ beforeEach(async () => {
+ axiosMock.onGet(defaultProps.customEmailEndpoint).reply(HTTP_STATUS_NOT_FOUND);
+
+ createWrapper();
+ await waitForPromises();
+ });
+
+ it('displays error alert with correct text', () => {
+ expect(findLoadingIcon().exists()).toBe(false);
+
+ expect(findAlert().exists()).toBe(true);
+ expect(findAlert().text()).toBe(I18N_GENERIC_ERROR);
+ });
+
+ it('dismissing the alert removes it', async () => {
+ expect(findAlert().exists()).toBe(true);
+
+ findAlert().vm.$emit('dismiss');
+
+ await nextTick();
+
+ expect(findAlert().exists()).toBe(false);
+ });
+ });
+});
diff --git a/spec/frontend/projects/settings_service_desk/components/mock_data.js b/spec/frontend/projects/settings_service_desk/components/mock_data.js
index 934778ff601..0060aa8e990 100644
--- a/spec/frontend/projects/settings_service_desk/components/mock_data.js
+++ b/spec/frontend/projects/settings_service_desk/components/mock_data.js
@@ -6,3 +6,65 @@ export const TEMPLATES = [
{ name: 'Security release', project_id: 1 },
],
];
+
+export const MOCK_CUSTOM_EMAIL_EMPTY = {
+ custom_email: null,
+ custom_email_enabled: false,
+ custom_email_verification_state: null,
+ custom_email_verification_error: null,
+ custom_email_smtp_address: null,
+ error_message: null,
+};
+
+export const MOCK_CUSTOM_EMAIL_STARTED = {
+ custom_email: 'user@example.com',
+ custom_email_enabled: false,
+ custom_email_verification_state: 'started',
+ custom_email_verification_error: null,
+ custom_email_smtp_address: 'smtp.example.com',
+ error_message: null,
+};
+
+export const MOCK_CUSTOM_EMAIL_FAILED = {
+ custom_email: 'user@example.com',
+ custom_email_enabled: false,
+ custom_email_verification_state: 'failed',
+ custom_email_verification_error: 'smtp_host_issue',
+ custom_email_smtp_address: 'smtp.example.com',
+ error_message: null,
+};
+
+export const MOCK_CUSTOM_EMAIL_FINISHED = {
+ custom_email: 'user@example.com',
+ custom_email_enabled: false,
+ custom_email_verification_state: 'finished',
+ custom_email_verification_error: null,
+ custom_email_smtp_address: 'smtp.example.com',
+ error_message: null,
+};
+
+export const MOCK_CUSTOM_EMAIL_ENABLED = {
+ custom_email: 'user@example.com',
+ custom_email_enabled: true,
+ custom_email_verification_state: 'finished',
+ custom_email_verification_error: null,
+ custom_email_smtp_address: 'smtp.example.com',
+ error_message: null,
+};
+
+export const MOCK_CUSTOM_EMAIL_DISABLED = {
+ custom_email: 'user@example.com',
+ custom_email_enabled: false,
+ custom_email_verification_state: 'finished',
+ custom_email_verification_error: null,
+ custom_email_smtp_address: 'smtp.example.com',
+ error_message: null,
+};
+
+export const MOCK_CUSTOM_EMAIL_FORM_SUBMIT = {
+ custom_email: 'user@example.com',
+ smtp_address: 'smtp.example.com',
+ smtp_password: 'supersecret',
+ smtp_port: '587',
+ smtp_username: 'user@example.com',
+};
diff --git a/spec/frontend/projects/settings_service_desk/components/service_desk_root_spec.js b/spec/frontend/projects/settings_service_desk/components/service_desk_root_spec.js
index b84d1c9c0aa..8655845d1b7 100644
--- a/spec/frontend/projects/settings_service_desk/components/service_desk_root_spec.js
+++ b/spec/frontend/projects/settings_service_desk/components/service_desk_root_spec.js
@@ -6,6 +6,7 @@ import axios from '~/lib/utils/axios_utils';
import { HTTP_STATUS_OK } from '~/lib/utils/http_status';
import ServiceDeskRoot from '~/projects/settings_service_desk/components/service_desk_root.vue';
import ServiceDeskSetting from '~/projects/settings_service_desk/components/service_desk_setting.vue';
+import CustomEmailWrapper from '~/projects/settings_service_desk/components/custom_email_wrapper.vue';
describe('ServiceDeskRoot', () => {
let axiosMock;
@@ -25,6 +26,10 @@ describe('ServiceDeskRoot', () => {
selectedFileTemplateProjectId: 42,
templates: ['Bug', 'Documentation'],
publicProject: false,
+ customEmailEndpoint: '/gitlab-org/gitlab-test/-/service_desk/custom_email',
+ glFeatures: {
+ serviceDeskCustomEmail: true,
+ },
};
const getAlertText = () => wrapper.findComponent(GlAlert).text();
@@ -186,4 +191,46 @@ describe('ServiceDeskRoot', () => {
});
});
});
+
+ describe('CustomEmailWrapper component', () => {
+ it('is rendered', () => {
+ wrapper = createComponent();
+
+ expect(wrapper.findComponent(CustomEmailWrapper).exists()).toBe(true);
+ expect(wrapper.findComponent(CustomEmailWrapper).props()).toEqual({
+ incomingEmail: provideData.initialIncomingEmail,
+ customEmailEndpoint: provideData.customEmailEndpoint,
+ });
+ });
+
+ describe('when Service Desk is disabled', () => {
+ beforeEach(() => {
+ wrapper = createComponent({ initialIsEnabled: false });
+ });
+
+ it('is not rendered', () => {
+ expect(wrapper.findComponent(CustomEmailWrapper).exists()).toBe(false);
+ });
+ });
+
+ describe('when issue tracker is disabled', () => {
+ beforeEach(() => {
+ wrapper = createComponent({ isIssueTrackerEnabled: false });
+ });
+
+ it('is not rendered', () => {
+ expect(wrapper.findComponent(CustomEmailWrapper).exists()).toBe(false);
+ });
+ });
+
+ describe('when feature flag service_desk_custom_email is disabled', () => {
+ beforeEach(() => {
+ wrapper = createComponent({ glFeatures: { serviceDeskCustomEmail: false } });
+ });
+
+ it('is not rendered', () => {
+ expect(wrapper.findComponent(CustomEmailWrapper).exists()).toBe(false);
+ });
+ });
+ });
});
diff --git a/spec/frontend/ref/components/ref_selector_spec.js b/spec/frontend/ref/components/ref_selector_spec.js
index 290cde29866..12ca0d053e9 100644
--- a/spec/frontend/ref/components/ref_selector_spec.js
+++ b/spec/frontend/ref/components/ref_selector_spec.js
@@ -3,6 +3,7 @@ import Vue, { nextTick } from 'vue';
import axios from 'axios';
import MockAdapter from 'axios-mock-adapter';
import { merge, last } from 'lodash';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import tags from 'test_fixtures/api/tags/tags.json';
import commit from 'test_fixtures/api/commits/commit.json';
diff --git a/spec/frontend/releases/components/app_edit_new_spec.js b/spec/frontend/releases/components/app_edit_new_spec.js
index 69d8969f0ad..15436832be8 100644
--- a/spec/frontend/releases/components/app_edit_new_spec.js
+++ b/spec/frontend/releases/components/app_edit_new_spec.js
@@ -1,6 +1,7 @@
import axios from 'axios';
import MockAdapter from 'axios-mock-adapter';
import { merge } from 'lodash';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import { nextTick } from 'vue';
import { GlDatepicker, GlFormCheckbox } from '@gitlab/ui';
diff --git a/spec/frontend/releases/components/asset_links_form_spec.js b/spec/frontend/releases/components/asset_links_form_spec.js
index 8eee9acd808..53e78170f4a 100644
--- a/spec/frontend/releases/components/asset_links_form_spec.js
+++ b/spec/frontend/releases/components/asset_links_form_spec.js
@@ -1,5 +1,6 @@
import { mount } from '@vue/test-utils';
import Vue from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import originalRelease from 'test_fixtures/api/releases/release.json';
import * as commonUtils from '~/lib/utils/common_utils';
diff --git a/spec/frontend/releases/components/confirm_delete_modal_spec.js b/spec/frontend/releases/components/confirm_delete_modal_spec.js
index b4699302779..42ddb0dc12c 100644
--- a/spec/frontend/releases/components/confirm_delete_modal_spec.js
+++ b/spec/frontend/releases/components/confirm_delete_modal_spec.js
@@ -1,4 +1,5 @@
import Vue, { nextTick } from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import { GlModal } from '@gitlab/ui';
import originalOneReleaseForEditingQueryResponse from 'test_fixtures/graphql/releases/graphql/queries/one_release_for_editing.query.graphql.json';
diff --git a/spec/frontend/releases/components/tag_field_exsting_spec.js b/spec/frontend/releases/components/tag_field_exsting_spec.js
index 0e896eb645c..14b2fe32239 100644
--- a/spec/frontend/releases/components/tag_field_exsting_spec.js
+++ b/spec/frontend/releases/components/tag_field_exsting_spec.js
@@ -1,6 +1,7 @@
import { GlFormInput } from '@gitlab/ui';
import { shallowMount, mount } from '@vue/test-utils';
import Vue from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import TagFieldExisting from '~/releases/components/tag_field_existing.vue';
import createStore from '~/releases/stores';
diff --git a/spec/frontend/repository/commits_service_spec.js b/spec/frontend/repository/commits_service_spec.js
index 22ef552c2f9..5fb683bd370 100644
--- a/spec/frontend/repository/commits_service_spec.js
+++ b/spec/frontend/repository/commits_service_spec.js
@@ -25,8 +25,13 @@ describe('commits service', () => {
resetRequestedCommits();
});
- const requestCommits = (offset, project = 'my-project', path = '', ref = 'main') =>
- loadCommits(project, path, ref, offset);
+ const requestCommits = (
+ offset,
+ project = 'my-project',
+ path = '',
+ ref = 'main',
+ refType = 'heads',
+ ) => loadCommits(project, path, ref, offset, refType);
it('calls axios get', async () => {
const offset = 10;
@@ -37,7 +42,9 @@ describe('commits service', () => {
await requestCommits(offset, project, path, ref);
- expect(axios.get).toHaveBeenCalledWith(testUrl, { params: { format: 'json', offset } });
+ expect(axios.get).toHaveBeenCalledWith(testUrl, {
+ params: { format: 'json', offset, ref_type: 'heads' },
+ });
});
it('encodes the path and ref', async () => {
diff --git a/spec/frontend/repository/components/blob_button_group_spec.js b/spec/frontend/repository/components/blob_button_group_spec.js
index 2c63deb99c9..1a077028704 100644
--- a/spec/frontend/repository/components/blob_button_group_spec.js
+++ b/spec/frontend/repository/components/blob_button_group_spec.js
@@ -17,6 +17,7 @@ const DEFAULT_PROPS = {
isLocked: false,
canLock: true,
showForkSuggestion: false,
+ isUsingLfs: true,
};
const DEFAULT_INJECT = {
@@ -146,7 +147,7 @@ describe('BlobButtonGroup component', () => {
createComponent();
const { targetBranch, originalBranch } = DEFAULT_INJECT;
- const { name, canPushCode, deletePath, emptyRepo } = DEFAULT_PROPS;
+ const { name, canPushCode, deletePath, emptyRepo, isUsingLfs } = DEFAULT_PROPS;
const title = `Delete ${name}`;
expect(findDeleteBlobModal().props()).toMatchObject({
@@ -157,6 +158,7 @@ describe('BlobButtonGroup component', () => {
canPushCode,
deletePath,
emptyRepo,
+ isUsingLfs,
});
});
});
diff --git a/spec/frontend/repository/components/blob_content_viewer_spec.js b/spec/frontend/repository/components/blob_content_viewer_spec.js
index e2bb7cdb2d7..5ac2627dc5d 100644
--- a/spec/frontend/repository/components/blob_content_viewer_spec.js
+++ b/spec/frontend/repository/components/blob_content_viewer_spec.js
@@ -1,5 +1,6 @@
import { GlLoadingIcon } from '@gitlab/ui';
import { mount, shallowMount } from '@vue/test-utils';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import Vue, { nextTick } from 'vue';
import axios from 'axios';
@@ -64,7 +65,7 @@ const mockRouter = {
push: mockRouterPush,
};
-const legacyViewerUrl = 'some_file.js?format=json&viewer=simple';
+const legacyViewerUrl = '/some_file.js?format=json&viewer=simple';
const createComponent = async (mockData = {}, mountFn = shallowMount, mockRoute = {}) => {
Vue.use(VueApollo);
@@ -78,7 +79,6 @@ const createComponent = async (mockData = {}, mountFn = shallowMount, mockRoute
createMergeRequestIn = userPermissionsMock.createMergeRequestIn,
isBinary,
inject = {},
- highlightJs = true,
} = mockData;
const blobInfo = {
@@ -151,7 +151,6 @@ const createComponent = async (mockData = {}, mountFn = shallowMount, mockRoute
originalBranch: 'default-ref',
...inject,
glFeatures: {
- highlightJs,
highlightJsWorker: false,
},
},
@@ -223,16 +222,6 @@ describe('Blob content viewer component', () => {
describe('legacy viewers', () => {
const fileType = 'text';
- const highlightJs = false;
-
- it('loads a legacy viewer when a the fileType is text and the highlightJs feature is turned off', async () => {
- await createComponent({
- blob: { ...simpleViewerMock, fileType, highlightJs },
- });
-
- expect(mockAxios.history.get).toHaveLength(1);
- expect(mockAxios.history.get[0].url).toBe(legacyViewerUrl);
- });
it('loads a legacy viewer when the source viewer emits an error', async () => {
loadViewer.mockReturnValueOnce(SourceViewer);
@@ -255,25 +244,25 @@ describe('Blob content viewer component', () => {
'loads the legacy viewer when a file type is identified as legacy',
async (type) => {
await createComponent({ blob: { ...simpleViewerMock, fileType: type, webPath: type } });
- expect(mockAxios.history.get[0].url).toBe(`${type}?format=json&viewer=simple`);
+ expect(mockAxios.history.get[0].url).toBe(`/${type}?format=json&viewer=simple`);
},
);
it('loads the LineHighlighter', async () => {
mockAxios.onGet(legacyViewerUrl).replyOnce(HTTP_STATUS_OK, 'test');
- await createComponent({ blob: { ...simpleViewerMock, fileType, highlightJs } });
+ await createComponent({ blob: { ...simpleViewerMock, fileType } });
expect(LineHighlighter).toHaveBeenCalled();
});
it('does not load the LineHighlighter for RichViewers', async () => {
mockAxios.onGet(legacyViewerUrl).replyOnce(HTTP_STATUS_OK, 'test');
- await createComponent({ blob: { ...richViewerMock, fileType, highlightJs } });
+ await createComponent({ blob: { ...richViewerMock, fileType } });
expect(LineHighlighter).not.toHaveBeenCalled();
});
it('scrolls to the hash', async () => {
mockAxios.onGet(legacyViewerUrl).replyOnce(HTTP_STATUS_OK, 'test');
- await createComponent({ blob: { ...simpleViewerMock, fileType, highlightJs } });
+ await createComponent({ blob: { ...simpleViewerMock, fileType } });
expect(handleLocationHash).toHaveBeenCalled();
});
});
@@ -348,7 +337,7 @@ describe('Blob content viewer component', () => {
await createComponent({ blob: { ...richViewerMock, fileType: 'unknown' } });
expect(mockAxios.history.get).toHaveLength(1);
- expect(mockAxios.history.get[0].url).toEqual('some_file.js?format=json&viewer=rich');
+ expect(mockAxios.history.get[0].url).toEqual('/some_file.js?format=json&viewer=rich');
});
});
@@ -371,7 +360,7 @@ describe('Blob content viewer component', () => {
});
it('does not load a CodeIntelligence component when no viewers are loaded', async () => {
- const url = 'some_file.js?format=json&viewer=rich';
+ const url = '/some_file.js?format=json&viewer=rich';
mockAxios.onGet(url).replyOnce(HTTP_STATUS_INTERNAL_SERVER_ERROR);
await createComponent({ blob: { ...richViewerMock, fileType: 'unknown' } });
@@ -516,20 +505,13 @@ describe('Blob content viewer component', () => {
});
describe('blob info query', () => {
- it.each`
- highlightJs | shouldFetchRawText
- ${true} | ${true}
- ${false} | ${false}
- `(
- 'calls blob info query with shouldFetchRawText: $shouldFetchRawText when highlightJs (feature flag): $highlightJs',
- async ({ highlightJs, shouldFetchRawText }) => {
- await createComponent({ highlightJs });
+ it('calls blob info query with shouldFetchRawText: true', async () => {
+ await createComponent();
- expect(blobInfoMockResolver).toHaveBeenCalledWith(
- expect.objectContaining({ shouldFetchRawText }),
- );
- },
- );
+ expect(blobInfoMockResolver).toHaveBeenCalledWith(
+ expect.objectContaining({ shouldFetchRawText: true }),
+ );
+ });
it('is called with originalBranch value if the prop has a value', async () => {
await createComponent({ inject: { originalBranch: 'some-branch' } });
diff --git a/spec/frontend/repository/components/blob_viewers/image_viewer_spec.js b/spec/frontend/repository/components/blob_viewers/image_viewer_spec.js
index c23de0efdfd..4455851529d 100644
--- a/spec/frontend/repository/components/blob_viewers/image_viewer_spec.js
+++ b/spec/frontend/repository/components/blob_viewers/image_viewer_spec.js
@@ -7,19 +7,35 @@ describe('Image Viewer', () => {
const DEFAULT_BLOB_DATA = {
rawPath: 'some/image.png',
name: 'image.png',
+ externalStorageUrl: '',
};
- const createComponent = () => {
- wrapper = shallowMount(ImageViewer, { propsData: { blob: DEFAULT_BLOB_DATA } });
+ const createComponent = (blobData = DEFAULT_BLOB_DATA) => {
+ wrapper = shallowMount(ImageViewer, { propsData: { blob: blobData } });
};
const findImage = () => wrapper.find('[data-testid="image"]');
- it('renders a Source Editor component', () => {
- createComponent();
+ describe('When blob has externalStorageUrl', () => {
+ const externalStorageUrl = 'http://img.server.com/lfs-object/21/45/foo_bar';
- expect(findImage().exists()).toBe(true);
- expect(findImage().attributes('src')).toBe(DEFAULT_BLOB_DATA.rawPath);
- expect(findImage().attributes('alt')).toBe(DEFAULT_BLOB_DATA.name);
+ it('renders a Source Editor component with externalStorageUrl', () => {
+ const blobData = { ...DEFAULT_BLOB_DATA, externalStorageUrl };
+ createComponent(blobData);
+
+ expect(findImage().exists()).toBe(true);
+ expect(findImage().attributes('src')).toBe(externalStorageUrl);
+ expect(findImage().attributes('alt')).toBe(DEFAULT_BLOB_DATA.name);
+ });
+ });
+
+ describe('When blob does not have an externalStorageUrl', () => {
+ it('renders a Source Editor component with rawPath', () => {
+ createComponent(DEFAULT_BLOB_DATA);
+
+ expect(findImage().exists()).toBe(true);
+ expect(findImage().attributes('src')).toBe(DEFAULT_BLOB_DATA.rawPath);
+ expect(findImage().attributes('alt')).toBe(DEFAULT_BLOB_DATA.name);
+ });
});
});
diff --git a/spec/frontend/repository/components/blob_viewers/index_spec.js b/spec/frontend/repository/components/blob_viewers/index_spec.js
new file mode 100644
index 00000000000..d3ea46262e1
--- /dev/null
+++ b/spec/frontend/repository/components/blob_viewers/index_spec.js
@@ -0,0 +1,11 @@
+import { loadViewer, viewers } from '~/repository/components/blob_viewers';
+import { OPENAPI_FILE_TYPE, JSON_LANGUAGE } from '~/repository/constants';
+
+describe('Blob Viewers index', () => {
+ describe('loadViewer', () => {
+ it('loads the openapi viewer', () => {
+ const result = loadViewer(OPENAPI_FILE_TYPE, false, true, JSON_LANGUAGE);
+ expect(result).toBe(viewers[OPENAPI_FILE_TYPE]);
+ });
+ });
+});
diff --git a/spec/frontend/repository/components/delete_blob_modal_spec.js b/spec/frontend/repository/components/delete_blob_modal_spec.js
index 90f2150222c..e1723a091c4 100644
--- a/spec/frontend/repository/components/delete_blob_modal_spec.js
+++ b/spec/frontend/repository/components/delete_blob_modal_spec.js
@@ -1,7 +1,10 @@
-import { GlFormTextarea, GlModal, GlFormInput, GlToggle, GlForm } from '@gitlab/ui';
-import { shallowMount, mount } from '@vue/test-utils';
+import { GlFormTextarea, GlModal, GlFormInput, GlToggle, GlForm, GlSprintf } from '@gitlab/ui';
+import { mount } from '@vue/test-utils';
import { nextTick } from 'vue';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import { RENDER_ALL_SLOTS_TEMPLATE, stubComponent } from 'helpers/stub_component';
import DeleteBlobModal from '~/repository/components/delete_blob_modal.vue';
+import { sprintf } from '~/locale';
jest.mock('~/lib/utils/csrf', () => ({ token: 'mock-csrf-token' }));
@@ -17,6 +20,8 @@ const initialProps = {
emptyRepo: false,
};
+const { i18n } = DeleteBlobModal;
+
describe('DeleteBlobModal', () => {
let wrapper;
@@ -30,10 +35,14 @@ describe('DeleteBlobModal', () => {
static: true,
visible: true,
},
+ stubs: {
+ GlSprintf,
+ GlModal: stubComponent(GlModal, { template: RENDER_ALL_SLOTS_TEMPLATE }),
+ },
});
};
- const createComponent = createComponentFactory(shallowMount);
+ const createComponent = createComponentFactory(shallowMountExtended);
const createFullComponent = createComponentFactory(mount);
const findModal = () => wrapper.findComponent(GlModal);
@@ -49,6 +58,35 @@ describe('DeleteBlobModal', () => {
await findCommitTextarea().vm.$emit('input', commitText);
};
+ describe('LFS files', () => {
+ const lfsTitleText = i18n.LFS_WARNING_TITLE;
+ const primaryLfsText = sprintf(i18n.LFS_WARNING_PRIMARY_CONTENT, {
+ branch: initialProps.targetBranch,
+ });
+
+ const secondaryLfsText = sprintf(i18n.LFS_WARNING_SECONDARY_CONTENT, {
+ linkStart: '',
+ linkEnd: '',
+ });
+
+ beforeEach(() => createComponent({ isUsingLfs: true }));
+
+ it('renders a modal containing LFS text', () => {
+ expect(findModal().props('title')).toBe(lfsTitleText);
+ expect(findModal().text()).toContain(primaryLfsText);
+ expect(findModal().text()).toContain(secondaryLfsText);
+ });
+
+ it('hides the LFS content if the continue button is clicked', async () => {
+ findModal().vm.$emit('primary', { preventDefault: jest.fn() });
+ await nextTick();
+
+ expect(findModal().props('title')).not.toBe(lfsTitleText);
+ expect(findModal().text()).not.toContain(primaryLfsText);
+ expect(findModal().text()).not.toContain(secondaryLfsText);
+ });
+ });
+
it('renders Modal component', () => {
createComponent();
diff --git a/spec/frontend/repository/components/table/parent_row_spec.js b/spec/frontend/repository/components/table/parent_row_spec.js
index 77822a148b7..daba5a5f63f 100644
--- a/spec/frontend/repository/components/table/parent_row_spec.js
+++ b/spec/frontend/repository/components/table/parent_row_spec.js
@@ -16,6 +16,9 @@ function factory(path, loadingPath) {
path,
loadingPath,
},
+ provide: {
+ refType: 'heads',
+ },
stubs: {
RouterLink: RouterLinkStub,
},
@@ -28,16 +31,14 @@ function factory(path, loadingPath) {
describe('Repository parent row component', () => {
it.each`
path | to
- ${'app'} | ${'/-/tree/main/'}
+ ${'app'} | ${'/-/tree/main'}
${'app/assets'} | ${'/-/tree/main/app'}
${'app/assets#/test'} | ${'/-/tree/main/app/assets%23'}
${'app/assets#/test/world'} | ${'/-/tree/main/app/assets%23/test'}
`('renders link in $path to $to', ({ path, to }) => {
factory(path);
- expect(vm.findComponent(RouterLinkStub).props().to).toEqual({
- path: to,
- });
+ expect(vm.findComponent(RouterLinkStub).props().to).toBe(`${to}?ref_type=heads`);
});
it('pushes new router when clicking row', () => {
@@ -45,9 +46,7 @@ describe('Repository parent row component', () => {
vm.find('td').trigger('click');
- expect($router.push).toHaveBeenCalledWith({
- path: '/-/tree/main/app',
- });
+ expect($router.push).toHaveBeenCalledWith('/-/tree/main/app?ref_type=heads');
});
// We test that it does not get called when clicking any internal
@@ -57,9 +56,7 @@ describe('Repository parent row component', () => {
vm.find('a').trigger('click');
- expect($router.push).not.toHaveBeenCalledWith({
- path: '/-/tree/main/app',
- });
+ expect($router.push).not.toHaveBeenCalled();
});
it('renders loading icon when loading parent', () => {
diff --git a/spec/frontend/repository/components/table/row_spec.js b/spec/frontend/repository/components/table/row_spec.js
index 02b505c828c..80471d8734b 100644
--- a/spec/frontend/repository/components/table/row_spec.js
+++ b/spec/frontend/repository/components/table/row_spec.js
@@ -44,6 +44,9 @@ function factory({ mockData = { ref: 'main', escapedRef: 'main' }, propsData = {
type: 'tree',
...propsData,
},
+ provide: {
+ refType: 'heads',
+ },
directives: {
GlHoverLoad: createMockDirective('gl-hover-load'),
},
@@ -157,9 +160,9 @@ describe('Repository table row component', () => {
},
});
- expect(wrapper.findComponent({ ref: 'link' }).props('to')).toEqual({
- path: `/-/tree/main/${encodeURIComponent(path)}`,
- });
+ expect(wrapper.findComponent({ ref: 'link' }).props('to')).toBe(
+ `/-/tree/main/${encodeURIComponent(path)}?ref_type=heads`,
+ );
});
it('renders link for directory with hash', () => {
@@ -173,7 +176,7 @@ describe('Repository table row component', () => {
},
});
- expect(wrapper.find('.tree-item-link').props('to')).toEqual({ path: '/-/tree/main/test%23' });
+ expect(wrapper.find('.tree-item-link').props('to')).toBe(`/-/tree/main/test%23?ref_type=heads`);
});
it('renders commit ID for submodule', () => {
diff --git a/spec/frontend/repository/components/tree_content_spec.js b/spec/frontend/repository/components/tree_content_spec.js
index 8d45e24e9e6..c0eb65b28fe 100644
--- a/spec/frontend/repository/components/tree_content_spec.js
+++ b/spec/frontend/repository/components/tree_content_spec.js
@@ -51,6 +51,7 @@ describe('Repository table component', () => {
propsData: {
path,
},
+ provide: { refType: 'heads' },
});
};
@@ -170,8 +171,8 @@ describe('Repository table component', () => {
expect(isRequested).toHaveBeenCalledWith(rowNumber);
expect(loadCommits.mock.calls).toEqual([
- ['', path, '', rowNumber],
- ['', path, '', rowNumber - 25],
+ ['', path, '', rowNumber, 'heads'],
+ ['', path, '', rowNumber - 25, 'heads'],
]);
});
@@ -179,7 +180,7 @@ describe('Repository table component', () => {
createComponent({ path });
findFileTable().vm.$emit('row-appear', 0);
- expect(loadCommits.mock.calls).toEqual([['', path, '', 0]]);
+ expect(loadCommits.mock.calls).toEqual([['', path, '', 0, 'heads']]);
});
});
diff --git a/spec/frontend/repository/mixins/highlight_mixin_spec.js b/spec/frontend/repository/mixins/highlight_mixin_spec.js
index fd14f01747a..50cfd71d686 100644
--- a/spec/frontend/repository/mixins/highlight_mixin_spec.js
+++ b/spec/frontend/repository/mixins/highlight_mixin_spec.js
@@ -2,13 +2,8 @@ import { shallowMount } from '@vue/test-utils';
import { splitIntoChunks } from '~/vue_shared/components/source_viewer/workers/highlight_utils';
import highlightMixin from '~/repository/mixins/highlight_mixin';
import LineHighlighter from '~/blob/line_highlighter';
-import Tracking from '~/tracking';
import { TEXT_FILE_TYPE } from '~/repository/constants';
-import {
- EVENT_ACTION,
- EVENT_LABEL_FALLBACK,
- LINES_PER_CHUNK,
-} from '~/vue_shared/components/source_viewer/constants';
+import { LINES_PER_CHUNK } from '~/vue_shared/components/source_viewer/constants';
const lineHighlighter = new LineHighlighter();
jest.mock('~/blob/line_highlighter', () => jest.fn().mockReturnValue({ highlightHash: jest.fn() }));
@@ -24,7 +19,7 @@ describe('HighlightMixin', () => {
const hash = '#L50';
const contentArray = Array.from({ length: 140 }, () => 'newline'); // simulate 140 lines of code
const rawTextBlob = contentArray.join('\n');
- const languageMock = 'javascript';
+ const languageMock = 'json';
const createComponent = ({ fileType = TEXT_FILE_TYPE, language = languageMock } = {}) => {
const simpleViewer = { fileType };
@@ -50,26 +45,13 @@ describe('HighlightMixin', () => {
describe('initHighlightWorker', () => {
const firstSeventyLines = contentArray.slice(0, LINES_PER_CHUNK).join('\n');
- it('does not instruct worker if file is not a text file', () => {
+ it('does not instruct worker if file is not a JSON file', () => {
workerMock.postMessage.mockClear();
- createComponent({ fileType: 'markdown' });
+ createComponent({ language: 'javascript' });
expect(workerMock.postMessage).not.toHaveBeenCalled();
});
- it('tracks event if a language is not supported and does not instruct worker', () => {
- const unsupportedLanguage = 'some_unsupported_language';
- const eventData = { label: EVENT_LABEL_FALLBACK, property: unsupportedLanguage };
-
- jest.spyOn(Tracking, 'event');
- workerMock.postMessage.mockClear();
- createComponent({ language: unsupportedLanguage });
-
- expect(Tracking.event).toHaveBeenCalledWith(undefined, EVENT_ACTION, eventData);
- expect(onErrorMock).toHaveBeenCalled();
- expect(workerMock.postMessage).not.toHaveBeenCalled();
- });
-
it('generates a chunk for the first 70 lines of raw text', () => {
expect(splitIntoChunks).toHaveBeenCalledWith(languageMock, firstSeventyLines);
});
diff --git a/spec/frontend/repository/pages/index_spec.js b/spec/frontend/repository/pages/index_spec.js
index e50557e7d61..a67a472b936 100644
--- a/spec/frontend/repository/pages/index_spec.js
+++ b/spec/frontend/repository/pages/index_spec.js
@@ -9,7 +9,9 @@ describe('Repository index page component', () => {
let wrapper;
function factory() {
- wrapper = shallowMount(IndexPage);
+ wrapper = shallowMount(IndexPage, {
+ propsData: { refType: 'heads' },
+ });
}
afterEach(() => {
@@ -35,6 +37,6 @@ describe('Repository index page component', () => {
const child = wrapper.findComponent(TreePage);
expect(child.exists()).toBe(true);
- expect(child.props()).toEqual({ path: '/' });
+ expect(child.props()).toEqual({ path: '/', refType: 'heads' });
});
});
diff --git a/spec/frontend/search/mock_data.js b/spec/frontend/search/mock_data.js
index 3f23803bbf6..a063f20aca6 100644
--- a/spec/frontend/search/mock_data.js
+++ b/spec/frontend/search/mock_data.js
@@ -884,23 +884,4 @@ export const MOCK_FILTERED_LABELS = [
},
];
-export const MOCK_FILTERED_UNAPPLIED_SELECTED_LABELS = [
- {
- key: '6',
- count: 12,
- title: 'Cosche',
- color: '#cea786',
- type: 'GroupLabel',
- parent_full_name: 'Toolbox',
- },
- {
- key: '73',
- count: 12,
- title: 'Accent',
- color: '#a5c6fb',
- type: 'ProjectLabel',
- parent_full_name: 'Toolbox / Gitlab Smoke Tests',
- },
-];
-
export const CURRENT_SCOPE = 'blobs';
diff --git a/spec/frontend/search/sidebar/components/app_spec.js b/spec/frontend/search/sidebar/components/app_spec.js
index ba492833ec4..a4559c2dc34 100644
--- a/spec/frontend/search/sidebar/components/app_spec.js
+++ b/spec/frontend/search/sidebar/components/app_spec.js
@@ -1,12 +1,15 @@
import { shallowMount } from '@vue/test-utils';
import Vue from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import { MOCK_QUERY } from 'jest/search/mock_data';
import GlobalSearchSidebar from '~/search/sidebar/components/app.vue';
import IssuesFilters from '~/search/sidebar/components/issues_filters.vue';
+import MergeRequestsFilters from '~/search/sidebar/components/merge_requests_filters.vue';
+import BlobsFilters from '~/search/sidebar/components/blobs_filters.vue';
+import ProjectsFilters from '~/search/sidebar/components/projects_filters.vue';
import ScopeLegacyNavigation from '~/search/sidebar/components/scope_legacy_navigation.vue';
import ScopeSidebarNavigation from '~/search/sidebar/components/scope_sidebar_navigation.vue';
-import LanguageFilter from '~/search/sidebar/components/language_filter/index.vue';
Vue.use(Vuex);
@@ -17,7 +20,7 @@ describe('GlobalSearchSidebar', () => {
currentScope: jest.fn(() => 'issues'),
};
- const createComponent = (initialState = {}, featureFlags = {}) => {
+ const createComponent = (initialState = {}, ff = false) => {
const store = new Vuex.Store({
state: {
urlQuery: MOCK_QUERY,
@@ -30,17 +33,19 @@ describe('GlobalSearchSidebar', () => {
store,
provide: {
glFeatures: {
- ...featureFlags,
+ searchProjectsHideArchived: ff,
},
},
});
};
const findSidebarSection = () => wrapper.find('section');
- const findFilters = () => wrapper.findComponent(IssuesFilters);
+ const findIssuesFilters = () => wrapper.findComponent(IssuesFilters);
+ const findMergeRequestsFilters = () => wrapper.findComponent(MergeRequestsFilters);
+ const findBlobsFilters = () => wrapper.findComponent(BlobsFilters);
+ const findProjectsFilters = () => wrapper.findComponent(ProjectsFilters);
const findScopeLegacyNavigation = () => wrapper.findComponent(ScopeLegacyNavigation);
const findScopeSidebarNavigation = () => wrapper.findComponent(ScopeSidebarNavigation);
- const findLanguageAggregation = () => wrapper.findComponent(LanguageFilter);
describe('renders properly', () => {
describe('always', () => {
@@ -53,23 +58,33 @@ describe('GlobalSearchSidebar', () => {
});
describe.each`
- scope | showFilters | showsLanguage
- ${'issues'} | ${true} | ${false}
- ${'merge_requests'} | ${true} | ${false}
- ${'projects'} | ${false} | ${false}
- ${'blobs'} | ${false} | ${true}
- `('sidebar scope: $scope', ({ scope, showFilters, showsLanguage }) => {
+ scope | filter
+ ${'issues'} | ${findIssuesFilters}
+ ${'merge_requests'} | ${findMergeRequestsFilters}
+ ${'blobs'} | ${findBlobsFilters}
+ `('with sidebar $scope scope:', ({ scope, filter }) => {
beforeEach(() => {
getterSpies.currentScope = jest.fn(() => scope);
createComponent({ urlQuery: { scope } });
});
- it(`${!showFilters ? "doesn't" : ''} shows filters`, () => {
- expect(findFilters().exists()).toBe(showFilters);
+ it(`shows filter ${filter.name.replace('find', '')}`, () => {
+ expect(filter().exists()).toBe(true);
+ });
+ });
+
+ describe.each`
+ featureFlag
+ ${false}
+ ${true}
+ `('with sidebar $scope scope:', ({ featureFlag }) => {
+ beforeEach(() => {
+ getterSpies.currentScope = jest.fn(() => 'projects');
+ createComponent({ urlQuery: { scope: 'projects' } }, featureFlag);
});
- it(`${!showsLanguage ? "doesn't" : ''} shows language filters`, () => {
- expect(findLanguageAggregation().exists()).toBe(showsLanguage);
+ it(`shows filter ProjectsFilters}`, () => {
+ expect(findProjectsFilters().exists()).toBe(featureFlag);
});
});
diff --git a/spec/frontend/search/sidebar/components/archived_filter_spec.js b/spec/frontend/search/sidebar/components/archived_filter_spec.js
new file mode 100644
index 00000000000..69bf2ebd72e
--- /dev/null
+++ b/spec/frontend/search/sidebar/components/archived_filter_spec.js
@@ -0,0 +1,73 @@
+import { shallowMount } from '@vue/test-utils';
+import Vue from 'vue';
+// eslint-disable-next-line no-restricted-imports
+import Vuex from 'vuex';
+import { GlFormCheckboxGroup } from '@gitlab/ui';
+import ArchivedFilter from '~/search/sidebar/components/archived_filter/index.vue';
+
+import { archivedFilterData } from '~/search/sidebar/components/archived_filter/data';
+
+Vue.use(Vuex);
+
+describe('ArchivedFilter', () => {
+ let wrapper;
+
+ const createComponent = (state) => {
+ const store = new Vuex.Store({
+ state,
+ });
+
+ wrapper = shallowMount(ArchivedFilter, {
+ store,
+ });
+ };
+
+ const findCheckboxFilter = () => wrapper.findComponent(GlFormCheckboxGroup);
+ const findH5 = () => wrapper.findComponent('h5');
+
+ describe('old sidebar', () => {
+ beforeEach(() => {
+ createComponent({ useNewNavigation: false });
+ });
+
+ it('renders the component', () => {
+ expect(findCheckboxFilter().exists()).toBe(true);
+ });
+
+ it('renders the divider', () => {
+ expect(findH5().exists()).toBe(true);
+ expect(findH5().text()).toBe(archivedFilterData.headerLabel);
+ });
+ });
+
+ describe('new sidebar', () => {
+ beforeEach(() => {
+ createComponent({ useNewNavigation: true });
+ });
+
+ it('renders the component', () => {
+ expect(findCheckboxFilter().exists()).toBe(true);
+ });
+
+ it("doesn't render the divider", () => {
+ expect(findH5().exists()).toBe(true);
+ expect(findH5().text()).toBe(archivedFilterData.headerLabel);
+ });
+ });
+
+ describe.each`
+ include_archived | checkboxState
+ ${''} | ${'false'}
+ ${'false'} | ${'false'}
+ ${'true'} | ${'true'}
+ ${'sdfsdf'} | ${'false'}
+ `('selectedFilter', ({ include_archived, checkboxState }) => {
+ beforeEach(() => {
+ createComponent({ urlQuery: { include_archived } });
+ });
+
+ it('renders the component', () => {
+ expect(findCheckboxFilter().attributes('checked')).toBe(checkboxState);
+ });
+ });
+});
diff --git a/spec/frontend/search/sidebar/components/blobs_filters_spec.js b/spec/frontend/search/sidebar/components/blobs_filters_spec.js
new file mode 100644
index 00000000000..ff93e6f32e4
--- /dev/null
+++ b/spec/frontend/search/sidebar/components/blobs_filters_spec.js
@@ -0,0 +1,28 @@
+import { shallowMount } from '@vue/test-utils';
+import BlobsFilters from '~/search/sidebar/components/blobs_filters.vue';
+import LanguageFilter from '~/search/sidebar/components/language_filter/index.vue';
+import FiltersTemplate from '~/search/sidebar/components/filters_template.vue';
+
+describe('GlobalSearch BlobsFilters', () => {
+ let wrapper;
+
+ const findLanguageFilter = () => wrapper.findComponent(LanguageFilter);
+ const findFiltersTemplate = () => wrapper.findComponent(FiltersTemplate);
+
+ const createComponent = () => {
+ wrapper = shallowMount(BlobsFilters);
+ };
+
+ describe('Renders correctly', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+ it('renders FiltersTemplate', () => {
+ expect(findLanguageFilter().exists()).toBe(true);
+ });
+
+ it('renders ConfidentialityFilter', () => {
+ expect(findFiltersTemplate().exists()).toBe(true);
+ });
+ });
+});
diff --git a/spec/frontend/search/sidebar/components/checkbox_filter_spec.js b/spec/frontend/search/sidebar/components/checkbox_filter_spec.js
index 54fdf6e869e..b551e8c659c 100644
--- a/spec/frontend/search/sidebar/components/checkbox_filter_spec.js
+++ b/spec/frontend/search/sidebar/components/checkbox_filter_spec.js
@@ -1,5 +1,6 @@
import { GlFormCheckboxGroup, GlFormCheckbox } from '@gitlab/ui';
import Vue from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import { mockTracking, unmockTracking } from 'helpers/tracking_helper';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
diff --git a/spec/frontend/search/sidebar/components/confidentiality_filter_spec.js b/spec/frontend/search/sidebar/components/confidentiality_filter_spec.js
index 1f65884e959..6444ec10466 100644
--- a/spec/frontend/search/sidebar/components/confidentiality_filter_spec.js
+++ b/spec/frontend/search/sidebar/components/confidentiality_filter_spec.js
@@ -1,7 +1,8 @@
import { shallowMount } from '@vue/test-utils';
import Vue from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
-import ConfidentialityFilter from '~/search/sidebar/components/confidentiality_filter.vue';
+import ConfidentialityFilter from '~/search/sidebar/components/confidentiality_filter/index.vue';
import RadioFilter from '~/search/sidebar/components/radio_filter.vue';
Vue.use(Vuex);
@@ -20,33 +21,24 @@ describe('ConfidentialityFilter', () => {
};
const findRadioFilter = () => wrapper.findComponent(RadioFilter);
- const findHR = () => wrapper.findComponent('hr');
describe('old sidebar', () => {
beforeEach(() => {
- createComponent({ useNewNavigation: false });
+ createComponent({ useSidebarNavigation: false });
});
it('renders the component', () => {
expect(findRadioFilter().exists()).toBe(true);
});
-
- it('renders the divider', () => {
- expect(findHR().exists()).toBe(true);
- });
});
describe('new sidebar', () => {
beforeEach(() => {
- createComponent({ useNewNavigation: true });
+ createComponent({ useSidebarNavigation: true });
});
it('renders the component', () => {
expect(findRadioFilter().exists()).toBe(true);
});
-
- it("doesn't render the divider", () => {
- expect(findHR().exists()).toBe(false);
- });
});
});
diff --git a/spec/frontend/search/sidebar/components/filters_spec.js b/spec/frontend/search/sidebar/components/filters_spec.js
index a92fafd3508..d3c774929f5 100644
--- a/spec/frontend/search/sidebar/components/filters_spec.js
+++ b/spec/frontend/search/sidebar/components/filters_spec.js
@@ -1,11 +1,11 @@
-import { GlButton, GlLink } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import Vue from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import { MOCK_QUERY } from 'jest/search/mock_data';
import IssuesFilters from '~/search/sidebar/components/issues_filters.vue';
-import ConfidentialityFilter from '~/search/sidebar/components/confidentiality_filter.vue';
-import StatusFilter from '~/search/sidebar/components/status_filter.vue';
+import ConfidentialityFilter from '~/search/sidebar/components/confidentiality_filter/index.vue';
+import StatusFilter from '~/search/sidebar/components/status_filter/index.vue';
Vue.use(Vuex);
@@ -36,11 +36,8 @@ describe('GlobalSearchSidebarFilters', () => {
});
};
- const findSidebarForm = () => wrapper.find('form');
const findStatusFilter = () => wrapper.findComponent(StatusFilter);
const findConfidentialityFilter = () => wrapper.findComponent(ConfidentialityFilter);
- const findApplyButton = () => wrapper.findComponent(GlButton);
- const findResetLinkButton = () => wrapper.findComponent(GlLink);
describe('Renders correctly', () => {
beforeEach(() => {
@@ -53,82 +50,6 @@ describe('GlobalSearchSidebarFilters', () => {
it('renders ConfidentialityFilter', () => {
expect(findConfidentialityFilter().exists()).toBe(true);
});
-
- it('renders ApplyButton', () => {
- expect(findApplyButton().exists()).toBe(true);
- });
- });
-
- describe('ApplyButton', () => {
- describe('when sidebarDirty is false', () => {
- beforeEach(() => {
- createComponent({ sidebarDirty: false });
- });
-
- it('disables the button', () => {
- expect(findApplyButton().attributes('disabled')).toBeDefined();
- });
- });
-
- describe('when sidebarDirty is true', () => {
- beforeEach(() => {
- createComponent({ sidebarDirty: true });
- });
-
- it('enables the button', () => {
- expect(findApplyButton().attributes('disabled')).toBe(undefined);
- });
- });
- });
-
- describe('ResetLinkButton', () => {
- describe('with no filter selected', () => {
- beforeEach(() => {
- createComponent({ urlQuery: {} });
- });
-
- it('does not render', () => {
- expect(findResetLinkButton().exists()).toBe(false);
- });
- });
-
- describe('with filter selected', () => {
- beforeEach(() => {
- createComponent({ urlQuery: MOCK_QUERY });
- });
-
- it('does render', () => {
- expect(findResetLinkButton().exists()).toBe(true);
- });
- });
-
- describe('with filter selected and user updated query back to default', () => {
- beforeEach(() => {
- createComponent({ urlQuery: MOCK_QUERY, query: {} });
- });
-
- it('does render', () => {
- expect(findResetLinkButton().exists()).toBe(true);
- });
- });
- });
-
- describe('actions', () => {
- beforeEach(() => {
- createComponent({});
- });
-
- it('clicking ApplyButton calls applyQuery', () => {
- findSidebarForm().trigger('submit');
-
- expect(actionSpies.applyQuery).toHaveBeenCalled();
- });
-
- it('clicking ResetLinkButton calls resetQuery', () => {
- findResetLinkButton().vm.$emit('click');
-
- expect(actionSpies.resetQuery).toHaveBeenCalled();
- });
});
describe.each`
diff --git a/spec/frontend/search/sidebar/components/filters_template_spec.js b/spec/frontend/search/sidebar/components/filters_template_spec.js
new file mode 100644
index 00000000000..f1a807c5ceb
--- /dev/null
+++ b/spec/frontend/search/sidebar/components/filters_template_spec.js
@@ -0,0 +1,168 @@
+import { GlForm, GlButton, GlLink } from '@gitlab/ui';
+import Vue from 'vue';
+// eslint-disable-next-line no-restricted-imports
+import Vuex from 'vuex';
+
+import { mockTracking, unmockTracking } from 'helpers/tracking_helper';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import { MOCK_QUERY, MOCK_AGGREGATIONS } from 'jest/search/mock_data';
+
+import FiltersTemplate from '~/search/sidebar/components/filters_template.vue';
+
+import {
+ TRACKING_ACTION_CLICK,
+ TRACKING_LABEL_APPLY,
+ TRACKING_LABEL_RESET,
+} from '~/search/sidebar/constants/index';
+
+Vue.use(Vuex);
+
+describe('GlobalSearchSidebarLanguageFilter', () => {
+ let wrapper;
+ let trackingSpy;
+
+ const actionSpies = {
+ applyQuery: jest.fn(),
+ resetQuery: jest.fn(),
+ };
+
+ const getterSpies = {
+ currentScope: jest.fn(() => 'issues'),
+ };
+
+ const createComponent = (initialState) => {
+ const store = new Vuex.Store({
+ state: {
+ query: MOCK_QUERY,
+ urlQuery: MOCK_QUERY,
+ aggregations: MOCK_AGGREGATIONS,
+ sidebarDirty: false,
+ ...initialState,
+ },
+ actions: actionSpies,
+ getters: getterSpies,
+ });
+
+ wrapper = shallowMountExtended(FiltersTemplate, {
+ store,
+ slots: {
+ default: '<p>Filters Content</p>',
+ },
+ });
+ };
+
+ const findForm = () => wrapper.findComponent(GlForm);
+ const findDividers = () => wrapper.findAll('hr');
+ const findApplyButton = () => wrapper.findComponent(GlButton);
+ const findResetButton = () => wrapper.findComponent(GlLink);
+ const findSlotContent = () => wrapper.findByText('Filters Content');
+
+ describe('Renders correctly', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('renders form', () => {
+ expect(findForm().exists()).toBe(true);
+ });
+
+ it('renders dividers', () => {
+ expect(findDividers()).toHaveLength(2);
+ });
+
+ it('renders slot content', () => {
+ expect(findSlotContent().exists()).toBe(true);
+ });
+
+ it('renders ApplyButton', () => {
+ expect(findApplyButton().exists()).toBe(true);
+ });
+
+ it('renders reset button', () => {
+ expect(findResetButton().exists()).toBe(false);
+ });
+ });
+
+ describe('resetButton', () => {
+ describe.each`
+ description | sidebarDirty | queryLangFilters | exists
+ ${'sidebar dirty only'} | ${true} | ${[]} | ${true}
+ ${'query filters only'} | ${false} | ${['JSON', 'C']} | ${false}
+ ${'sidebar dirty and query filters'} | ${true} | ${['JSON', 'C']} | ${true}
+ ${'sidebar not dirty and no query filters'} | ${false} | ${[]} | ${false}
+ `('$description', ({ sidebarDirty, queryLangFilters, exists }) => {
+ beforeEach(() => {
+ getterSpies.queryLanguageFilters = jest.fn(() => queryLangFilters);
+
+ const query = {
+ ...MOCK_QUERY,
+ language: queryLangFilters,
+ state: undefined,
+ labels: undefined,
+ confidential: undefined,
+ };
+
+ createComponent({
+ sidebarDirty,
+ query,
+ urlQuery: query,
+ });
+ });
+
+ it(`button is ${exists ? 'shown' : 'hidden'}`, () => {
+ expect(findResetButton().exists()).toBe(exists);
+ });
+ });
+ });
+
+ describe('ApplyButton', () => {
+ describe('when sidebarDirty is false', () => {
+ beforeEach(() => {
+ createComponent({ sidebarDirty: false });
+ });
+
+ it('disables the button', () => {
+ expect(findApplyButton().attributes('disabled')).toBeDefined();
+ });
+ });
+
+ describe('when sidebarDirty is true', () => {
+ beforeEach(() => {
+ createComponent({ sidebarDirty: true });
+ });
+
+ it('enables the button', () => {
+ expect(findApplyButton().attributes('disabled')).toBe(undefined);
+ });
+ });
+ });
+
+ describe('actions', () => {
+ beforeEach(() => {
+ createComponent({ sidebarDirty: true });
+ trackingSpy = mockTracking(undefined, wrapper.element, jest.spyOn);
+ });
+
+ afterEach(() => {
+ unmockTracking();
+ });
+
+ it('clicking ApplyButton calls applyQuery', () => {
+ findForm().vm.$emit('submit', { preventDefault: () => {} });
+
+ expect(actionSpies.applyQuery).toHaveBeenCalled();
+ expect(trackingSpy).toHaveBeenCalledWith(TRACKING_ACTION_CLICK, TRACKING_LABEL_APPLY, {
+ label: getterSpies.currentScope(),
+ });
+ });
+
+ it('clicking resetButton calls resetQuery', () => {
+ findResetButton().vm.$emit('click');
+
+ expect(actionSpies.resetQuery).toHaveBeenCalled();
+ expect(trackingSpy).toHaveBeenCalledWith(TRACKING_ACTION_CLICK, TRACKING_LABEL_RESET, {
+ label: getterSpies.currentScope(),
+ });
+ });
+ });
+});
diff --git a/spec/frontend/search/sidebar/components/issues_filters_spec.js b/spec/frontend/search/sidebar/components/issues_filters_spec.js
new file mode 100644
index 00000000000..84c4258cbdb
--- /dev/null
+++ b/spec/frontend/search/sidebar/components/issues_filters_spec.js
@@ -0,0 +1,107 @@
+import { shallowMount } from '@vue/test-utils';
+import Vue from 'vue';
+// eslint-disable-next-line no-restricted-imports
+import Vuex from 'vuex';
+import { MOCK_QUERY } from 'jest/search/mock_data';
+import IssuesFilters from '~/search/sidebar/components/issues_filters.vue';
+import ConfidentialityFilter from '~/search/sidebar/components/confidentiality_filter/index.vue';
+import StatusFilter from '~/search/sidebar/components/status_filter/index.vue';
+import LabelFilter from '~/search/sidebar/components/label_filter/index.vue';
+
+Vue.use(Vuex);
+
+describe('GlobalSearch IssuesFilters', () => {
+ let wrapper;
+
+ const defaultGetters = {
+ currentScope: () => 'issues',
+ };
+
+ const createComponent = (initialState, ff = true) => {
+ const store = new Vuex.Store({
+ state: {
+ urlQuery: MOCK_QUERY,
+ ...initialState,
+ },
+ getters: defaultGetters,
+ });
+
+ wrapper = shallowMount(IssuesFilters, {
+ store,
+ provide: {
+ glFeatures: {
+ searchIssueLabelAggregation: ff,
+ },
+ },
+ });
+ };
+
+ const findStatusFilter = () => wrapper.findComponent(StatusFilter);
+ const findConfidentialityFilter = () => wrapper.findComponent(ConfidentialityFilter);
+ const findLabelFilter = () => wrapper.findComponent(LabelFilter);
+ const findDividers = () => wrapper.findAll('hr');
+
+ describe('Renders correctly with FF enabled', () => {
+ beforeEach(() => {
+ createComponent({ urlQuery: MOCK_QUERY });
+ });
+ it('renders StatusFilter', () => {
+ expect(findStatusFilter().exists()).toBe(true);
+ });
+
+ it('renders ConfidentialityFilter', () => {
+ expect(findConfidentialityFilter().exists()).toBe(true);
+ });
+
+ it('renders LabelFilter', () => {
+ expect(findLabelFilter().exists()).toBe(true);
+ });
+
+ it('renders dividers correctly', () => {
+ expect(findDividers()).toHaveLength(2);
+ });
+ });
+
+ describe('Renders correctly with FF disabled', () => {
+ beforeEach(() => {
+ createComponent({ urlQuery: MOCK_QUERY }, false);
+ });
+ it('renders StatusFilter', () => {
+ expect(findStatusFilter().exists()).toBe(true);
+ });
+
+ it('renders ConfidentialityFilter', () => {
+ expect(findConfidentialityFilter().exists()).toBe(true);
+ });
+
+ it("doesn't render LabelFilter", () => {
+ expect(findLabelFilter().exists()).toBe(false);
+ });
+
+ it('renders divider correctly', () => {
+ expect(findDividers()).toHaveLength(1);
+ });
+ });
+
+ describe('Renders correctly with wrong scope', () => {
+ beforeEach(() => {
+ defaultGetters.currentScope = () => 'blobs';
+ createComponent({ urlQuery: MOCK_QUERY });
+ });
+ it("doesn't render StatusFilter", () => {
+ expect(findStatusFilter().exists()).toBe(false);
+ });
+
+ it("doesn't render ConfidentialityFilter", () => {
+ expect(findConfidentialityFilter().exists()).toBe(false);
+ });
+
+ it("doesn't render LabelFilter", () => {
+ expect(findLabelFilter().exists()).toBe(false);
+ });
+
+ it("doesn't render dividers", () => {
+ expect(findDividers()).toHaveLength(0);
+ });
+ });
+});
diff --git a/spec/frontend/search/sidebar/components/label_dropdown_items_spec.js b/spec/frontend/search/sidebar/components/label_dropdown_items_spec.js
index 135b12956b2..9124da5cfe1 100644
--- a/spec/frontend/search/sidebar/components/label_dropdown_items_spec.js
+++ b/spec/frontend/search/sidebar/components/label_dropdown_items_spec.js
@@ -1,5 +1,6 @@
import { GlFormCheckbox } from '@gitlab/ui';
import Vue from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import { shallowMount } from '@vue/test-utils';
import { PROCESS_LABELS_DATA } from 'jest/search/mock_data';
diff --git a/spec/frontend/search/sidebar/components/label_filter_spec.js b/spec/frontend/search/sidebar/components/label_filter_spec.js
index 2a5b3a96045..07b2e176610 100644
--- a/spec/frontend/search/sidebar/components/label_filter_spec.js
+++ b/spec/frontend/search/sidebar/components/label_filter_spec.js
@@ -9,6 +9,7 @@ import {
GlDropdownDivider,
} from '@gitlab/ui';
import Vue from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import { mockTracking, unmockTracking } from 'helpers/tracking_helper';
import { mountExtended } from 'helpers/vue_test_utils_helper';
diff --git a/spec/frontend/search/sidebar/components/language_filter_spec.js b/spec/frontend/search/sidebar/components/language_filter_spec.js
index 817199d7cfe..b45f365e1e1 100644
--- a/spec/frontend/search/sidebar/components/language_filter_spec.js
+++ b/spec/frontend/search/sidebar/components/language_filter_spec.js
@@ -1,5 +1,6 @@
-import { GlAlert, GlFormCheckbox, GlForm } from '@gitlab/ui';
+import { GlAlert, GlFormCheckbox } from '@gitlab/ui';
import Vue, { nextTick } from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import { mockTracking, unmockTracking } from 'helpers/tracking_helper';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
@@ -13,13 +14,11 @@ import CheckboxFilter from '~/search/sidebar/components/language_filter/checkbox
import {
TRACKING_LABEL_SHOW_MORE,
- TRACKING_CATEGORY,
TRACKING_PROPERTY_MAX,
TRACKING_LABEL_MAX,
TRACKING_LABEL_FILTERS,
TRACKING_ACTION_SHOW,
TRACKING_ACTION_CLICK,
- TRACKING_LABEL_APPLY,
TRACKING_LABEL_ALL,
} from '~/search/sidebar/components/language_filter/tracking';
@@ -61,10 +60,7 @@ describe('GlobalSearchSidebarLanguageFilter', () => {
});
};
- const findForm = () => wrapper.findComponent(GlForm);
const findCheckboxFilter = () => wrapper.findComponent(CheckboxFilter);
- const findApplyButton = () => wrapper.findByTestId('apply-button');
- const findResetButton = () => wrapper.findByTestId('reset-button');
const findShowMoreButton = () => wrapper.findByTestId('show-more-button');
const findAlert = () => wrapper.findComponent(GlAlert);
const findAllCheckboxes = () => wrapper.findAllComponents(GlFormCheckbox);
@@ -80,10 +76,6 @@ describe('GlobalSearchSidebarLanguageFilter', () => {
unmockTracking();
});
- it('renders form', () => {
- expect(findForm().exists()).toBe(true);
- });
-
it('renders checkbox-filter', () => {
expect(findCheckboxFilter().exists()).toBe(true);
});
@@ -93,10 +85,6 @@ describe('GlobalSearchSidebarLanguageFilter', () => {
expect(findAllCheckboxes()).toHaveLength(10);
});
- it('renders ApplyButton', () => {
- expect(findApplyButton().exists()).toBe(true);
- });
-
it('renders Show More button', () => {
expect(findShowMoreButton().exists()).toBe(true);
});
@@ -106,47 +94,6 @@ describe('GlobalSearchSidebarLanguageFilter', () => {
});
});
- describe('resetButton', () => {
- describe.each`
- description | sidebarDirty | queryFilters | exists
- ${'sidebar dirty only'} | ${true} | ${[]} | ${false}
- ${'query filters only'} | ${false} | ${['JSON', 'C']} | ${false}
- ${'sidebar dirty and query filters'} | ${true} | ${['JSON', 'C']} | ${true}
- ${'no sidebar and no query filters'} | ${false} | ${[]} | ${false}
- `('$description', ({ sidebarDirty, queryFilters, exists }) => {
- beforeEach(() => {
- getterSpies.queryLanguageFilters = jest.fn(() => queryFilters);
- createComponent({ sidebarDirty, query: { ...MOCK_QUERY, language: queryFilters } });
- });
-
- it(`button is ${exists ? 'shown' : 'hidden'}`, () => {
- expect(findResetButton().exists()).toBe(exists);
- });
- });
- });
-
- describe('ApplyButton', () => {
- describe('when sidebarDirty is false', () => {
- beforeEach(() => {
- createComponent({ sidebarDirty: false });
- });
-
- it('disables the button', () => {
- expect(findApplyButton().attributes('disabled')).toBeDefined();
- });
- });
-
- describe('when sidebarDirty is true', () => {
- beforeEach(() => {
- createComponent({ sidebarDirty: true });
- });
-
- it('enables the button', () => {
- expect(findApplyButton().attributes('disabled')).toBe(undefined);
- });
- });
- });
-
describe('Show All button works', () => {
beforeEach(() => {
createComponent();
@@ -211,19 +158,5 @@ describe('GlobalSearchSidebarLanguageFilter', () => {
it('uses action fetchAllAggregation', () => {
expect(actionSpies.fetchAllAggregation).toHaveBeenCalled();
});
-
- it('clicking ApplyButton calls applyQuery', () => {
- findForm().vm.$emit('submit', { preventDefault: () => {} });
-
- expect(actionSpies.applyQuery).toHaveBeenCalled();
- });
-
- it('sends tracking information clicking ApplyButton', () => {
- findForm().vm.$emit('submit', { preventDefault: () => {} });
-
- expect(trackingSpy).toHaveBeenCalledWith(TRACKING_ACTION_CLICK, TRACKING_LABEL_APPLY, {
- label: TRACKING_CATEGORY,
- });
- });
});
});
diff --git a/spec/frontend/search/sidebar/components/merge_requests_filters_spec.js b/spec/frontend/search/sidebar/components/merge_requests_filters_spec.js
new file mode 100644
index 00000000000..0932f8e47d2
--- /dev/null
+++ b/spec/frontend/search/sidebar/components/merge_requests_filters_spec.js
@@ -0,0 +1,28 @@
+import { shallowMount } from '@vue/test-utils';
+import MergeRequestsFilters from '~/search/sidebar/components/merge_requests_filters.vue';
+import StatusFilter from '~/search/sidebar/components/status_filter/index.vue';
+import FiltersTemplate from '~/search/sidebar/components/filters_template.vue';
+
+describe('GlobalSearch MergeRequestsFilters', () => {
+ let wrapper;
+
+ const findStatusFilter = () => wrapper.findComponent(StatusFilter);
+ const findFiltersTemplate = () => wrapper.findComponent(FiltersTemplate);
+
+ const createComponent = () => {
+ wrapper = shallowMount(MergeRequestsFilters);
+ };
+
+ describe('Renders correctly', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+ it('renders ConfidentialityFilter', () => {
+ expect(findStatusFilter().exists()).toBe(true);
+ });
+
+ it('renders FiltersTemplate', () => {
+ expect(findFiltersTemplate().exists()).toBe(true);
+ });
+ });
+});
diff --git a/spec/frontend/search/sidebar/components/projects_filters_specs.js b/spec/frontend/search/sidebar/components/projects_filters_specs.js
new file mode 100644
index 00000000000..15e3254e289
--- /dev/null
+++ b/spec/frontend/search/sidebar/components/projects_filters_specs.js
@@ -0,0 +1,28 @@
+import { shallowMount } from '@vue/test-utils';
+import ProjectsFilters from '~/search/sidebar/components/projects_filters.vue';
+import ArchivedFilter from '~/search/sidebar/components/language_filter/index.vue';
+import FiltersTemplate from '~/search/sidebar/components/filters_template.vue';
+
+describe('GlobalSearch ProjectsFilters', () => {
+ let wrapper;
+
+ const findArchivedFilter = () => wrapper.findComponent(ArchivedFilter);
+ const findFiltersTemplate = () => wrapper.findComponent(FiltersTemplate);
+
+ const createComponent = () => {
+ wrapper = shallowMount(ProjectsFilters);
+ };
+
+ describe('Renders correctly', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+ it('renders ArchivedFilter', () => {
+ expect(findArchivedFilter().exists()).toBe(true);
+ });
+
+ it('renders FiltersTemplate', () => {
+ expect(findFiltersTemplate().exists()).toBe(true);
+ });
+ });
+});
diff --git a/spec/frontend/search/sidebar/components/radio_filter_spec.js b/spec/frontend/search/sidebar/components/radio_filter_spec.js
index 47235b828c3..b99daf9e2f3 100644
--- a/spec/frontend/search/sidebar/components/radio_filter_spec.js
+++ b/spec/frontend/search/sidebar/components/radio_filter_spec.js
@@ -1,11 +1,12 @@
import { GlFormRadioGroup, GlFormRadio } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import Vue from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import { MOCK_QUERY } from 'jest/search/mock_data';
import RadioFilter from '~/search/sidebar/components/radio_filter.vue';
-import { confidentialFilterData } from '~/search/sidebar/constants/confidential_filter_data';
-import { stateFilterData } from '~/search/sidebar/constants/state_filter_data';
+import { confidentialFilterData } from '~/search/sidebar/components/confidentiality_filter/data';
+import { statusFilterData } from '~/search/sidebar/components/status_filter/data';
Vue.use(Vuex);
@@ -21,7 +22,7 @@ describe('RadioFilter', () => {
};
const defaultProps = {
- filterData: stateFilterData,
+ filterData: statusFilterData,
};
const createComponent = (initialState, props = {}) => {
@@ -60,20 +61,20 @@ describe('RadioFilter', () => {
describe('Status Filter', () => {
it('renders a radio button for each filterOption', () => {
expect(findGlRadioButtonsText()).toStrictEqual(
- stateFilterData.filterByScope[stateFilterData.scopes.ISSUES].map((f) => {
- return f.value === stateFilterData.filters.ANY.value
- ? `Any ${stateFilterData.header.toLowerCase()}`
+ statusFilterData.filterByScope[statusFilterData.scopes.ISSUES].map((f) => {
+ return f.value === statusFilterData.filters.ANY.value
+ ? `Any ${statusFilterData.header.toLowerCase()}`
: f.label;
}),
);
});
it('clicking a radio button item calls setQuery', () => {
- const filter = stateFilterData.filters[Object.keys(stateFilterData.filters)[0]].value;
+ const filter = statusFilterData.filters[Object.keys(statusFilterData.filters)[0]].value;
findGlRadioButtonGroup().vm.$emit('input', filter);
expect(actionSpies.setQuery).toHaveBeenCalledWith(expect.any(Object), {
- key: stateFilterData.filterParam,
+ key: statusFilterData.filterParam,
value: filter,
});
});
diff --git a/spec/frontend/search/sidebar/components/scope_legacy_navigation_spec.js b/spec/frontend/search/sidebar/components/scope_legacy_navigation_spec.js
index 786ad806ea6..63d8b34fcf0 100644
--- a/spec/frontend/search/sidebar/components/scope_legacy_navigation_spec.js
+++ b/spec/frontend/search/sidebar/components/scope_legacy_navigation_spec.js
@@ -1,6 +1,7 @@
import { GlNav, GlNavItem, GlIcon } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import Vue from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import { MOCK_QUERY, MOCK_NAVIGATION } from 'jest/search/mock_data';
import ScopeLegacyNavigation from '~/search/sidebar/components/scope_legacy_navigation.vue';
diff --git a/spec/frontend/search/sidebar/components/scope_sidebar_navigation_spec.js b/spec/frontend/search/sidebar/components/scope_sidebar_navigation_spec.js
index 86939bdc5d6..d85942b9634 100644
--- a/spec/frontend/search/sidebar/components/scope_sidebar_navigation_spec.js
+++ b/spec/frontend/search/sidebar/components/scope_sidebar_navigation_spec.js
@@ -1,5 +1,6 @@
import { mount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import ScopeSidebarNavigation from '~/search/sidebar/components/scope_sidebar_navigation.vue';
import NavItem from '~/super_sidebar/components/nav_item.vue';
diff --git a/spec/frontend/search/sidebar/components/status_filter_spec.js b/spec/frontend/search/sidebar/components/status_filter_spec.js
index a332a43e624..c230341c172 100644
--- a/spec/frontend/search/sidebar/components/status_filter_spec.js
+++ b/spec/frontend/search/sidebar/components/status_filter_spec.js
@@ -1,8 +1,9 @@
import { shallowMount } from '@vue/test-utils';
import Vue from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import RadioFilter from '~/search/sidebar/components/radio_filter.vue';
-import StatusFilter from '~/search/sidebar/components/status_filter.vue';
+import StatusFilter from '~/search/sidebar/components/status_filter/index.vue';
Vue.use(Vuex);
@@ -20,33 +21,24 @@ describe('StatusFilter', () => {
};
const findRadioFilter = () => wrapper.findComponent(RadioFilter);
- const findHR = () => wrapper.findComponent('hr');
describe('old sidebar', () => {
beforeEach(() => {
- createComponent({ useNewNavigation: false });
+ createComponent({ useSidebarNavigation: false });
});
it('renders the component', () => {
expect(findRadioFilter().exists()).toBe(true);
});
-
- it('renders the divider', () => {
- expect(findHR().exists()).toBe(true);
- });
});
describe('new sidebar', () => {
beforeEach(() => {
- createComponent({ useNewNavigation: true });
+ createComponent({ useSidebarNavigation: true });
});
it('renders the component', () => {
expect(findRadioFilter().exists()).toBe(true);
});
-
- it("doesn't render the divider", () => {
- expect(findHR().exists()).toBe(false);
- });
});
});
diff --git a/spec/frontend/search/sort/components/app_spec.js b/spec/frontend/search/sort/components/app_spec.js
index 09c295e3ea9..f701952701c 100644
--- a/spec/frontend/search/sort/components/app_spec.js
+++ b/spec/frontend/search/sort/components/app_spec.js
@@ -1,6 +1,7 @@
import { GlButtonGroup, GlButton, GlCollapsibleListbox, GlListboxItem } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import Vue from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import { MOCK_QUERY, MOCK_SORT_OPTIONS } from 'jest/search/mock_data';
import GlobalSearchSort from '~/search/sort/components/app.vue';
diff --git a/spec/frontend/search/store/actions_spec.js b/spec/frontend/search/store/actions_spec.js
index 2051e731647..cc9c555b6c7 100644
--- a/spec/frontend/search/store/actions_spec.js
+++ b/spec/frontend/search/store/actions_spec.js
@@ -181,25 +181,36 @@ describe('Global Search Store Actions', () => {
});
describe('applyQuery', () => {
- it('calls visitUrl and setParams with the state.query', () => {
- return testAction(actions.applyQuery, null, state, [], [], () => {
- expect(urlUtils.setUrlParams).toHaveBeenCalledWith({ ...state.query, page: null });
- expect(urlUtils.visitUrl).toHaveBeenCalled();
- });
+ it('calls visitUrl and setParams with the state.query', async () => {
+ await testAction(actions.applyQuery, null, state, [], []);
+ expect(urlUtils.setUrlParams).toHaveBeenCalledWith(
+ { ...state.query, page: null },
+ 'http://test.host/',
+ false,
+ true,
+ );
+ expect(urlUtils.visitUrl).toHaveBeenCalled();
});
});
describe('resetQuery', () => {
- it('calls visitUrl and setParams with empty values', () => {
- return testAction(actions.resetQuery, null, state, [], [], () => {
- expect(urlUtils.setUrlParams).toHaveBeenCalledWith({
+ it('calls visitUrl and setParams with empty values', async () => {
+ await testAction(actions.resetQuery, null, state, [], []);
+ const resetParams = SIDEBAR_PARAMS.reduce((acc, param) => {
+ acc[param] = null;
+ return acc;
+ }, {});
+
+ expect(urlUtils.setUrlParams).toHaveBeenCalledWith(
+ {
...state.query,
page: null,
- state: null,
- confidential: null,
- });
- expect(urlUtils.visitUrl).toHaveBeenCalled();
- });
+ ...resetParams,
+ },
+ undefined,
+ true,
+ );
+ expect(urlUtils.visitUrl).toHaveBeenCalled();
});
});
@@ -327,28 +338,6 @@ describe('Global Search Store Actions', () => {
});
});
- describe('resetLanguageQueryWithRedirect', () => {
- it('calls visitUrl and setParams with the state.query', () => {
- return testAction(actions.resetLanguageQueryWithRedirect, null, state, [], [], () => {
- expect(urlUtils.setUrlParams).toHaveBeenCalledWith({ ...state.query, page: null });
- expect(urlUtils.visitUrl).toHaveBeenCalled();
- });
- });
- });
-
- describe('resetLanguageQuery', () => {
- it('calls commit SET_QUERY with value []', () => {
- state = { ...state, query: { ...state.query, language: ['YAML', 'Text', 'Markdown'] } };
- return testAction(
- actions.resetLanguageQuery,
- null,
- state,
- [{ type: types.SET_QUERY, payload: { key: 'language', value: [] } }],
- [],
- );
- });
- });
-
describe('closeLabel', () => {
beforeEach(() => {
state = createState({
diff --git a/spec/frontend/search/store/getters_spec.js b/spec/frontend/search/store/getters_spec.js
index 772acb39a57..571525bd025 100644
--- a/spec/frontend/search/store/getters_spec.js
+++ b/spec/frontend/search/store/getters_spec.js
@@ -17,7 +17,6 @@ import {
MOCK_LABEL_SEARCH_RESULT,
MOCK_FILTERED_APPLIED_SELECTED_LABELS,
MOCK_FILTERED_UNSELECTED_LABELS,
- MOCK_FILTERED_UNAPPLIED_SELECTED_LABELS,
} from '../mock_data';
describe('Global Search Store Getters', () => {
@@ -70,18 +69,6 @@ describe('Global Search Store Getters', () => {
});
});
- describe('currentUrlQueryHasLanguageFilters', () => {
- it.each`
- description | lang | result
- ${'has valid language'} | ${{ language: ['a', 'b'] }} | ${true}
- ${'has empty lang'} | ${{ language: [] }} | ${false}
- ${'has no lang'} | ${{}} | ${false}
- `('$description', ({ lang, result }) => {
- state.urlQuery = lang;
- expect(getters.currentUrlQueryHasLanguageFilters(state)).toBe(result);
- });
- });
-
describe('navigationItems', () => {
it('returns the re-mapped navigation data', () => {
state.navigation = MOCK_NAVIGATION;
@@ -133,25 +120,6 @@ describe('Global Search Store Getters', () => {
});
});
- describe('filteredUnappliedSelectedLabels', () => {
- beforeEach(() => {
- state.query.labels = ['6', '73'];
- });
-
- it('returns all labels that are selected (part of URL) no search', () => {
- expect(getters.filteredUnappliedSelectedLabels(state)).toStrictEqual(
- MOCK_FILTERED_UNAPPLIED_SELECTED_LABELS,
- );
- });
-
- it('returns labels that are selected (part of URL) and result of search', () => {
- state.searchLabelString = 'ACC';
- expect(getters.filteredUnappliedSelectedLabels(state)).toStrictEqual([
- MOCK_FILTERED_UNAPPLIED_SELECTED_LABELS[1],
- ]);
- });
- });
-
describe('filteredUnselectedLabels', () => {
it('returns all labels that are selected (part of URL) no search', () => {
expect(getters.filteredUnselectedLabels(state)).toStrictEqual(
diff --git a/spec/frontend/search/topbar/components/app_spec.js b/spec/frontend/search/topbar/components/app_spec.js
index 9dc14b97ce0..62d0e377d74 100644
--- a/spec/frontend/search/topbar/components/app_spec.js
+++ b/spec/frontend/search/topbar/components/app_spec.js
@@ -1,6 +1,7 @@
import { GlSearchBoxByClick, GlButton } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import Vue from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import { MOCK_QUERY } from 'jest/search/mock_data';
import { stubComponent } from 'helpers/stub_component';
diff --git a/spec/frontend/search/topbar/components/group_filter_spec.js b/spec/frontend/search/topbar/components/group_filter_spec.js
index 94882d181d3..fa8036a7f97 100644
--- a/spec/frontend/search/topbar/components/group_filter_spec.js
+++ b/spec/frontend/search/topbar/components/group_filter_spec.js
@@ -1,5 +1,6 @@
import { shallowMount } from '@vue/test-utils';
import Vue from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import { MOCK_GROUP, MOCK_QUERY, CURRENT_SCOPE } from 'jest/search/mock_data';
import { visitUrl, setUrlParams } from '~/lib/utils/url_utility';
diff --git a/spec/frontend/search/topbar/components/project_filter_spec.js b/spec/frontend/search/topbar/components/project_filter_spec.js
index c25d2b94027..e7808370098 100644
--- a/spec/frontend/search/topbar/components/project_filter_spec.js
+++ b/spec/frontend/search/topbar/components/project_filter_spec.js
@@ -1,5 +1,6 @@
import { shallowMount } from '@vue/test-utils';
import Vue from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import { MOCK_PROJECT, MOCK_QUERY, CURRENT_SCOPE } from 'jest/search/mock_data';
import { visitUrl, setUrlParams } from '~/lib/utils/url_utility';
diff --git a/spec/frontend/search/topbar/components/searchable_dropdown_spec.js b/spec/frontend/search/topbar/components/searchable_dropdown_spec.js
index f7d847674eb..5acaa1c1900 100644
--- a/spec/frontend/search/topbar/components/searchable_dropdown_spec.js
+++ b/spec/frontend/search/topbar/components/searchable_dropdown_spec.js
@@ -1,6 +1,7 @@
import { GlDropdown, GlDropdownItem, GlSearchBoxByType, GlSkeletonLoader } from '@gitlab/ui';
import { shallowMount, mount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import { MOCK_GROUPS, MOCK_GROUP, MOCK_QUERY } from 'jest/search/mock_data';
diff --git a/spec/frontend/service_desk/components/empty_state_with_any_issues_spec.js b/spec/frontend/service_desk/components/empty_state_with_any_issues_spec.js
new file mode 100644
index 00000000000..ce8a78767d4
--- /dev/null
+++ b/spec/frontend/service_desk/components/empty_state_with_any_issues_spec.js
@@ -0,0 +1,74 @@
+import { GlEmptyState } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import EmptyStateWithAnyIssues from '~/service_desk/components/empty_state_with_any_issues.vue';
+import {
+ noSearchResultsTitle,
+ noSearchResultsDescription,
+ infoBannerUserNote,
+ noOpenIssuesTitle,
+ noClosedIssuesTitle,
+} from '~/service_desk/constants';
+
+describe('EmptyStateWithAnyIssues component', () => {
+ let wrapper;
+
+ const defaultProvide = {
+ emptyStateSvgPath: 'empty/state/svg/path',
+ newIssuePath: 'new/issue/path',
+ showNewIssueLink: false,
+ };
+
+ const findGlEmptyState = () => wrapper.findComponent(GlEmptyState);
+
+ const mountComponent = (props = {}) => {
+ wrapper = shallowMount(EmptyStateWithAnyIssues, {
+ propsData: {
+ hasSearch: true,
+ isOpenTab: true,
+ ...props,
+ },
+ provide: defaultProvide,
+ });
+ };
+
+ describe('when there is a search (with no results)', () => {
+ beforeEach(() => {
+ mountComponent();
+ });
+
+ it('shows empty state', () => {
+ expect(findGlEmptyState().props()).toMatchObject({
+ description: noSearchResultsDescription,
+ title: noSearchResultsTitle,
+ svgPath: defaultProvide.emptyStateSvgPath,
+ });
+ });
+ });
+
+ describe('when "Open" tab is active', () => {
+ beforeEach(() => {
+ mountComponent({ hasSearch: false });
+ });
+
+ it('shows empty state', () => {
+ expect(findGlEmptyState().props()).toMatchObject({
+ description: infoBannerUserNote,
+ title: noOpenIssuesTitle,
+ svgPath: defaultProvide.emptyStateSvgPath,
+ });
+ });
+ });
+
+ describe('when "Closed" tab is active', () => {
+ beforeEach(() => {
+ mountComponent({ hasSearch: false, isClosedTab: true, isOpenTab: false });
+ });
+
+ it('shows empty state', () => {
+ expect(findGlEmptyState().props()).toMatchObject({
+ title: noClosedIssuesTitle,
+ svgPath: defaultProvide.emptyStateSvgPath,
+ });
+ });
+ });
+});
diff --git a/spec/frontend/service_desk/components/empty_state_without_any_issues_spec.js b/spec/frontend/service_desk/components/empty_state_without_any_issues_spec.js
new file mode 100644
index 00000000000..c67f9588ed4
--- /dev/null
+++ b/spec/frontend/service_desk/components/empty_state_without_any_issues_spec.js
@@ -0,0 +1,86 @@
+import { GlEmptyState, GlLink } from '@gitlab/ui';
+import { mountExtended } from 'helpers/vue_test_utils_helper';
+import EmptyStateWithoutAnyIssues from '~/service_desk/components/empty_state_without_any_issues.vue';
+import { infoBannerTitle, noIssuesSignedOutButtonText, learnMore } from '~/service_desk/constants';
+
+describe('EmptyStateWithoutAnyIssues component', () => {
+ let wrapper;
+
+ const defaultProvide = {
+ emptyStateSvgPath: 'empty/state/svg/path',
+ isSignedIn: true,
+ signInPath: 'sign/in/path',
+ canAdminIssues: true,
+ isServiceDeskEnabled: true,
+ serviceDeskEmailAddress: 'email@address.com',
+ serviceDeskHelpPath: 'service/desk/help/path',
+ };
+
+ const findGlEmptyState = () => wrapper.findComponent(GlEmptyState);
+ const findGlLink = () => wrapper.findComponent(GlLink);
+ const findIssuesHelpPageLink = () => wrapper.findByRole('link', { name: learnMore });
+
+ const mountComponent = ({ provide = {} } = {}) => {
+ wrapper = mountExtended(EmptyStateWithoutAnyIssues, {
+ provide: {
+ ...defaultProvide,
+ ...provide,
+ },
+ });
+ };
+
+ describe('when signed in', () => {
+ beforeEach(() => {
+ mountComponent();
+ });
+
+ it('renders empty state', () => {
+ expect(findGlEmptyState().props()).toMatchObject({
+ title: infoBannerTitle,
+ svgPath: defaultProvide.emptyStateSvgPath,
+ contentClass: 'gl-max-w-80!',
+ });
+ });
+
+ it('renders description with service desk docs link', () => {
+ expect(findIssuesHelpPageLink().attributes('href')).toBe(defaultProvide.serviceDeskHelpPath);
+ });
+
+ it('renders email address, when user can admin issues and service desk is enabled', () => {
+ expect(wrapper.text()).toContain(wrapper.vm.serviceDeskEmailAddress);
+ });
+
+ it('does not render email address, when user can not admin issues', () => {
+ mountComponent({ provide: { canAdminIssues: false } });
+
+ expect(wrapper.text()).not.toContain(wrapper.vm.serviceDeskEmailAddress);
+ });
+
+ it('does not render email address, when service desk is not setup', () => {
+ mountComponent({ provide: { isServiceDeskEnabled: false } });
+
+ expect(wrapper.text()).not.toContain(wrapper.vm.serviceDeskEmailAddress);
+ });
+ });
+
+ describe('when signed out', () => {
+ beforeEach(() => {
+ mountComponent({ provide: { isSignedIn: false } });
+ });
+
+ it('renders empty state', () => {
+ expect(findGlEmptyState().props()).toMatchObject({
+ title: infoBannerTitle,
+ svgPath: defaultProvide.emptyStateSvgPath,
+ primaryButtonText: noIssuesSignedOutButtonText,
+ primaryButtonLink: defaultProvide.signInPath,
+ contentClass: 'gl-max-w-80!',
+ });
+ });
+
+ it('renders service desk docs link', () => {
+ expect(findGlLink().attributes('href')).toBe(defaultProvide.serviceDeskHelpPath);
+ expect(findGlLink().text()).toBe(learnMore);
+ });
+ });
+});
diff --git a/spec/frontend/service_desk/components/service_desk_list_app_spec.js b/spec/frontend/service_desk/components/service_desk_list_app_spec.js
index 2ac789745aa..bdb6a48895e 100644
--- a/spec/frontend/service_desk/components/service_desk_list_app_spec.js
+++ b/spec/frontend/service_desk/components/service_desk_list_app_spec.js
@@ -1,61 +1,107 @@
import { shallowMount } from '@vue/test-utils';
-import Vue from 'vue';
+import Vue, { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
+import { cloneDeep } from 'lodash';
+import VueRouter from 'vue-router';
import * as Sentry from '@sentry/browser';
import createMockApollo from 'helpers/mock_apollo_helper';
+import setWindowLocation from 'helpers/set_window_location_helper';
+import { TEST_HOST } from 'helpers/test_constants';
import waitForPromises from 'helpers/wait_for_promises';
import IssuableList from '~/vue_shared/issuable/list/components/issuable_list_root.vue';
import { issuableListTabs } from '~/vue_shared/issuable/list/constants';
-import { STATUS_CLOSED, STATUS_OPEN } from '~/issues/constants';
-import getServiceDeskIssuesQuery from '~/service_desk/queries/get_service_desk_issues.query.graphql';
-import getServiceDeskIssuesCountsQuery from '~/service_desk/queries/get_service_desk_issues_counts.query.graphql';
+import { TYPENAME_USER } from '~/graphql_shared/constants';
+import { convertToGraphQLId } from '~/graphql_shared/utils';
+import { STATUS_CLOSED, STATUS_OPEN, STATUS_ALL } from '~/service_desk/constants';
+import getServiceDeskIssuesQuery from 'ee_else_ce/service_desk/queries/get_service_desk_issues.query.graphql';
+import getServiceDeskIssuesCountsQuery from 'ee_else_ce/service_desk/queries/get_service_desk_issues_counts.query.graphql';
import ServiceDeskListApp from '~/service_desk/components/service_desk_list_app.vue';
import InfoBanner from '~/service_desk/components/info_banner.vue';
+import EmptyStateWithAnyIssues from '~/service_desk/components/empty_state_with_any_issues.vue';
+import EmptyStateWithoutAnyIssues from '~/service_desk/components/empty_state_without_any_issues.vue';
+
+import {
+ TOKEN_TYPE_ASSIGNEE,
+ TOKEN_TYPE_AUTHOR,
+ TOKEN_TYPE_CONFIDENTIAL,
+ TOKEN_TYPE_LABEL,
+ TOKEN_TYPE_MILESTONE,
+ TOKEN_TYPE_MY_REACTION,
+ TOKEN_TYPE_RELEASE,
+ TOKEN_TYPE_SEARCH_WITHIN,
+} from '~/vue_shared/components/filtered_search_bar/constants';
import {
getServiceDeskIssuesQueryResponse,
+ getServiceDeskIssuesQueryEmptyResponse,
getServiceDeskIssuesCountsQueryResponse,
+ filteredTokens,
+ urlParams,
+ locationSearch,
} from '../mock_data';
jest.mock('@sentry/browser');
-describe('ServiceDeskListApp', () => {
+describe('CE ServiceDeskListApp', () => {
let wrapper;
+ let router;
Vue.use(VueApollo);
+ Vue.use(VueRouter);
const defaultProvide = {
+ releasesPath: 'releases/path',
+ autocompleteAwardEmojisPath: 'autocomplete/award/emojis/path',
+ hasIterationsFeature: true,
+ hasIssueWeightsFeature: true,
+ hasIssuableHealthStatusFeature: true,
+ groupPath: 'group/path',
emptyStateSvgPath: 'empty-state.svg',
isProject: true,
isSignedIn: true,
fullPath: 'path/to/project',
isServiceDeskSupported: true,
hasAnyIssues: true,
+ initialSort: '',
+ issuablesLoading: false,
};
- const defaultQueryResponse = getServiceDeskIssuesQueryResponse;
+ let defaultQueryResponse = getServiceDeskIssuesQueryResponse;
+ if (IS_EE) {
+ defaultQueryResponse = cloneDeep(getServiceDeskIssuesQueryResponse);
+ defaultQueryResponse.data.project.issues.nodes[0].healthStatus = null;
+ defaultQueryResponse.data.project.issues.nodes[0].weight = 5;
+ }
- const mockServiceDeskIssuesQueryResponse = jest.fn().mockResolvedValue(defaultQueryResponse);
- const mockServiceDeskIssuesCountsQueryResponse = jest
+ const mockServiceDeskIssuesQueryResponseHandler = jest
+ .fn()
+ .mockResolvedValue(defaultQueryResponse);
+ const mockServiceDeskIssuesQueryEmptyResponseHandler = jest
+ .fn()
+ .mockResolvedValue(getServiceDeskIssuesQueryEmptyResponse);
+ const mockServiceDeskIssuesCountsQueryResponseHandler = jest
.fn()
.mockResolvedValue(getServiceDeskIssuesCountsQueryResponse);
const findIssuableList = () => wrapper.findComponent(IssuableList);
const findInfoBanner = () => wrapper.findComponent(InfoBanner);
+ const findLabelsToken = () =>
+ findIssuableList()
+ .props('searchTokens')
+ .find((token) => token.type === TOKEN_TYPE_LABEL);
- const mountComponent = ({
+ const createComponent = ({
provide = {},
- data = {},
- serviceDeskIssuesQueryResponse = mockServiceDeskIssuesQueryResponse,
- serviceDeskIssuesCountsQueryResponse = mockServiceDeskIssuesCountsQueryResponse,
- stubs = {},
- mountFn = shallowMount,
+ serviceDeskIssuesQueryResponseHandler = mockServiceDeskIssuesQueryResponseHandler,
+ serviceDeskIssuesCountsQueryResponseHandler = mockServiceDeskIssuesCountsQueryResponseHandler,
} = {}) => {
const requestHandlers = [
- [getServiceDeskIssuesQuery, serviceDeskIssuesQueryResponse],
- [getServiceDeskIssuesCountsQuery, serviceDeskIssuesCountsQueryResponse],
+ [getServiceDeskIssuesQuery, serviceDeskIssuesQueryResponseHandler],
+ [getServiceDeskIssuesCountsQuery, serviceDeskIssuesCountsQueryResponseHandler],
];
- return mountFn(ServiceDeskListApp, {
+ router = new VueRouter({ mode: 'history' });
+
+ return shallowMount(ServiceDeskListApp, {
apolloProvider: createMockApollo(
requestHandlers,
{},
@@ -71,26 +117,35 @@ describe('ServiceDeskListApp', () => {
},
},
),
+ router,
provide: {
...defaultProvide,
...provide,
},
- data() {
- return data;
- },
- stubs,
});
};
beforeEach(() => {
- wrapper = mountComponent();
+ setWindowLocation(TEST_HOST);
+ wrapper = createComponent();
return waitForPromises();
});
+ it('renders the issuable list with skeletons while fetching service desk issues', async () => {
+ wrapper = createComponent();
+ await nextTick();
+
+ expect(findIssuableList().props('issuablesLoading')).toBe(true);
+
+ await waitForPromises();
+
+ expect(findIssuableList().props('issuablesLoading')).toBe(false);
+ });
+
it('fetches service desk issues and renders them in the issuable list', () => {
expect(findIssuableList().props()).toMatchObject({
namespace: 'service-desk',
- recentSearchesStorageKey: 'issues',
+ recentSearchesStorageKey: 'service-desk-issues',
issuables: defaultQueryResponse.data.project.issues.nodes,
tabs: issuableListTabs,
currentTab: STATUS_OPEN,
@@ -107,18 +162,140 @@ describe('ServiceDeskListApp', () => {
expect(findInfoBanner().exists()).toBe(true);
});
- it('does not render, when there are no issues', async () => {
- wrapper = mountComponent({ provide: { hasAnyIssues: false } });
- await waitForPromises();
+ it('does not render when Service Desk is not supported and has any number of issues', () => {
+ wrapper = createComponent({ provide: { isServiceDeskSupported: false } });
+
+ expect(findInfoBanner().exists()).toBe(false);
+ });
+
+ it('does not render, when there are no issues', () => {
+ wrapper = createComponent({
+ serviceDeskIssuesQueryResponseHandler: mockServiceDeskIssuesQueryEmptyResponseHandler,
+ });
expect(findInfoBanner().exists()).toBe(false);
});
});
+ describe('Empty states', () => {
+ describe('when there are issues', () => {
+ it('shows EmptyStateWithAnyIssues component', () => {
+ setWindowLocation(locationSearch);
+ wrapper = createComponent({
+ serviceDeskIssuesQueryResponseHandler: mockServiceDeskIssuesQueryEmptyResponseHandler,
+ });
+
+ expect(wrapper.findComponent(EmptyStateWithAnyIssues).props()).toEqual({
+ hasSearch: true,
+ isOpenTab: true,
+ });
+ });
+ });
+
+ describe('when there are no issues', () => {
+ it('shows EmptyStateWithoutAnyIssues component', () => {
+ wrapper = createComponent({
+ provide: { hasAnyIssues: false },
+ serviceDeskIssuesQueryResponseHandler: mockServiceDeskIssuesQueryEmptyResponseHandler,
+ });
+
+ expect(wrapper.findComponent(EmptyStateWithoutAnyIssues).exists()).toBe(true);
+ });
+ });
+ });
+
+ describe('Initial url params', () => {
+ describe('search', () => {
+ it('is set from the url params', () => {
+ setWindowLocation(locationSearch);
+ wrapper = createComponent();
+
+ expect(router.history.current.query).toMatchObject({ search: 'find issues' });
+ });
+ });
+
+ describe('state', () => {
+ it('is set from the url params', async () => {
+ const initialState = STATUS_ALL;
+ setWindowLocation(`?state=${initialState}`);
+ wrapper = createComponent();
+ await waitForPromises();
+
+ expect(findIssuableList().props('currentTab')).toBe(initialState);
+ });
+ });
+
+ describe('filter tokens', () => {
+ it('are set from the url params', () => {
+ setWindowLocation(locationSearch);
+ wrapper = createComponent();
+
+ expect(findIssuableList().props('initialFilterValue')).toEqual(filteredTokens);
+ });
+ });
+ });
+
+ describe('Tokens', () => {
+ const mockCurrentUser = {
+ id: 1,
+ name: 'Administrator',
+ username: 'root',
+ avatar_url: 'avatar/url',
+ };
+
+ describe('when user is signed out', () => {
+ beforeEach(() => {
+ wrapper = createComponent({ provide: { isSignedIn: false } });
+ return waitForPromises();
+ });
+
+ it('does not render My-Reaction or Confidential tokens', () => {
+ expect(findIssuableList().props('searchTokens')).not.toMatchObject([
+ { type: TOKEN_TYPE_AUTHOR, preloadedUsers: [mockCurrentUser] },
+ { type: TOKEN_TYPE_ASSIGNEE, preloadedUsers: [mockCurrentUser] },
+ { type: TOKEN_TYPE_MY_REACTION },
+ { type: TOKEN_TYPE_CONFIDENTIAL },
+ ]);
+ });
+ });
+
+ describe('when all tokens are available', () => {
+ beforeEach(() => {
+ window.gon = {
+ current_user_id: mockCurrentUser.id,
+ current_user_fullname: mockCurrentUser.name,
+ current_username: mockCurrentUser.username,
+ current_user_avatar_url: mockCurrentUser.avatar_url,
+ };
+
+ wrapper = createComponent();
+ return waitForPromises();
+ });
+
+ it('renders all tokens alphabetically', () => {
+ const preloadedUsers = [
+ { ...mockCurrentUser, id: convertToGraphQLId(TYPENAME_USER, mockCurrentUser.id) },
+ ];
+
+ expect(findIssuableList().props('searchTokens')).toMatchObject([
+ { type: TOKEN_TYPE_ASSIGNEE, preloadedUsers },
+ { type: TOKEN_TYPE_CONFIDENTIAL },
+ { type: TOKEN_TYPE_LABEL },
+ { type: TOKEN_TYPE_MILESTONE },
+ { type: TOKEN_TYPE_MY_REACTION },
+ { type: TOKEN_TYPE_RELEASE },
+ { type: TOKEN_TYPE_SEARCH_WITHIN },
+ ]);
+ });
+ });
+ });
+
describe('Events', () => {
describe('when "click-tab" event is emitted by IssuableList', () => {
- beforeEach(() => {
- mountComponent();
+ beforeEach(async () => {
+ wrapper = createComponent();
+ router.push = jest.fn();
+ await waitForPromises();
findIssuableList().vm.$emit('click-tab', STATUS_CLOSED);
});
@@ -126,26 +303,74 @@ describe('ServiceDeskListApp', () => {
it('updates ui to the new tab', () => {
expect(findIssuableList().props('currentTab')).toBe(STATUS_CLOSED);
});
+
+ it('updates url to the new tab', () => {
+ expect(router.push).toHaveBeenCalledWith({
+ query: expect.objectContaining({ state: STATUS_CLOSED }),
+ });
+ });
+ });
+
+ describe('when "filter" event is emitted by IssuableList', () => {
+ it('updates IssuableList with url params', async () => {
+ wrapper = createComponent();
+ router.push = jest.fn();
+ await waitForPromises();
+
+ findIssuableList().vm.$emit('filter', filteredTokens);
+ await nextTick();
+
+ expect(router.push).toHaveBeenCalledWith({
+ query: expect.objectContaining(urlParams),
+ });
+ });
});
});
describe('Errors', () => {
describe.each`
- error | mountOption | message
- ${'fetching issues'} | ${'serviceDeskIssuesQueryResponse'} | ${ServiceDeskListApp.i18n.errorFetchingIssues}
- ${'fetching issue counts'} | ${'serviceDeskIssuesCountsQueryResponse'} | ${ServiceDeskListApp.i18n.errorFetchingCounts}
- `('when there is an error $error', ({ mountOption, message }) => {
+ error | responseHandler
+ ${'fetching issues'} | ${'serviceDeskIssuesQueryResponseHandler'}
+ ${'fetching issue counts'} | ${'serviceDeskIssuesCountsQueryResponseHandler'}
+ `('when there is an error $error', ({ responseHandler }) => {
beforeEach(() => {
- wrapper = mountComponent({
- [mountOption]: jest.fn().mockRejectedValue(new Error('ERROR')),
+ wrapper = createComponent({
+ [responseHandler]: jest.fn().mockRejectedValue(new Error('ERROR')),
});
return waitForPromises();
});
it('shows an error message', () => {
- expect(findIssuableList().props('error')).toBe(message);
expect(Sentry.captureException).toHaveBeenCalledWith(new Error('ERROR'));
});
});
});
+
+ describe('When providing token for labels', () => {
+ it('passes function to fetchLatestLabels property if frontend caching is enabled', async () => {
+ wrapper = createComponent({
+ provide: {
+ glFeatures: {
+ frontendCaching: true,
+ },
+ },
+ });
+ await waitForPromises();
+
+ expect(typeof findLabelsToken().fetchLatestLabels).toBe('function');
+ });
+
+ it('passes null to fetchLatestLabels property if frontend caching is disabled', async () => {
+ wrapper = createComponent({
+ provide: {
+ glFeatures: {
+ frontendCaching: false,
+ },
+ },
+ });
+ await waitForPromises();
+
+ expect(findLabelsToken().fetchLatestLabels).toBe(null);
+ });
+ });
});
diff --git a/spec/frontend/service_desk/mock_data.js b/spec/frontend/service_desk/mock_data.js
index 17b400e8670..dc875cb5c1e 100644
--- a/spec/frontend/service_desk/mock_data.js
+++ b/spec/frontend/service_desk/mock_data.js
@@ -1,3 +1,20 @@
+import {
+ FILTERED_SEARCH_TERM,
+ OPERATOR_IS,
+ OPERATOR_NOT,
+ OPERATOR_OR,
+ TOKEN_TYPE_ASSIGNEE,
+ TOKEN_TYPE_CONFIDENTIAL,
+ TOKEN_TYPE_EPIC,
+ TOKEN_TYPE_ITERATION,
+ TOKEN_TYPE_LABEL,
+ TOKEN_TYPE_MILESTONE,
+ TOKEN_TYPE_MY_REACTION,
+ TOKEN_TYPE_RELEASE,
+ TOKEN_TYPE_WEIGHT,
+ TOKEN_TYPE_HEALTH,
+} from '~/vue_shared/components/filtered_search_bar/constants';
+
export const getServiceDeskIssuesQueryResponse = {
data: {
project: {
@@ -116,3 +133,104 @@ export const getServiceDeskIssuesCountsQueryResponse = {
},
},
};
+
+export const filteredTokens = [
+ { type: FILTERED_SEARCH_TERM, value: { data: 'find issues', operator: 'undefined' } },
+ { type: TOKEN_TYPE_ASSIGNEE, value: { data: 'bart', operator: OPERATOR_IS } },
+ { type: TOKEN_TYPE_ASSIGNEE, value: { data: 'lisa', operator: OPERATOR_IS } },
+ { type: TOKEN_TYPE_ASSIGNEE, value: { data: '5', operator: OPERATOR_IS } },
+ { type: TOKEN_TYPE_ASSIGNEE, value: { data: 'patty', operator: OPERATOR_NOT } },
+ { type: TOKEN_TYPE_ASSIGNEE, value: { data: 'selma', operator: OPERATOR_NOT } },
+ { type: TOKEN_TYPE_ASSIGNEE, value: { data: 'carl', operator: OPERATOR_OR } },
+ { type: TOKEN_TYPE_ASSIGNEE, value: { data: 'lenny', operator: OPERATOR_OR } },
+ { type: TOKEN_TYPE_MILESTONE, value: { data: 'season 3', operator: OPERATOR_IS } },
+ { type: TOKEN_TYPE_MILESTONE, value: { data: 'season 4', operator: OPERATOR_IS } },
+ { type: TOKEN_TYPE_MILESTONE, value: { data: 'season 20', operator: OPERATOR_NOT } },
+ { type: TOKEN_TYPE_MILESTONE, value: { data: 'season 30', operator: OPERATOR_NOT } },
+ { type: TOKEN_TYPE_LABEL, value: { data: 'cartoon', operator: OPERATOR_IS } },
+ { type: TOKEN_TYPE_LABEL, value: { data: 'tv', operator: OPERATOR_IS } },
+ { type: TOKEN_TYPE_LABEL, value: { data: 'live action', operator: OPERATOR_NOT } },
+ { type: TOKEN_TYPE_LABEL, value: { data: 'drama', operator: OPERATOR_NOT } },
+ { type: TOKEN_TYPE_LABEL, value: { data: 'comedy', operator: OPERATOR_OR } },
+ { type: TOKEN_TYPE_LABEL, value: { data: 'sitcom', operator: OPERATOR_OR } },
+ { type: TOKEN_TYPE_RELEASE, value: { data: 'v3', operator: OPERATOR_IS } },
+ { type: TOKEN_TYPE_RELEASE, value: { data: 'v4', operator: OPERATOR_IS } },
+ { type: TOKEN_TYPE_RELEASE, value: { data: 'v20', operator: OPERATOR_NOT } },
+ { type: TOKEN_TYPE_RELEASE, value: { data: 'v30', operator: OPERATOR_NOT } },
+ { type: TOKEN_TYPE_MY_REACTION, value: { data: 'thumbsup', operator: OPERATOR_IS } },
+ { type: TOKEN_TYPE_MY_REACTION, value: { data: 'thumbsdown', operator: OPERATOR_NOT } },
+ { type: TOKEN_TYPE_CONFIDENTIAL, value: { data: 'yes', operator: OPERATOR_IS } },
+ { type: TOKEN_TYPE_ITERATION, value: { data: '4', operator: OPERATOR_IS } },
+ { type: TOKEN_TYPE_ITERATION, value: { data: '12', operator: OPERATOR_IS } },
+ { type: TOKEN_TYPE_ITERATION, value: { data: '20', operator: OPERATOR_NOT } },
+ { type: TOKEN_TYPE_ITERATION, value: { data: '42', operator: OPERATOR_NOT } },
+ { type: TOKEN_TYPE_EPIC, value: { data: '12', operator: OPERATOR_IS } },
+ { type: TOKEN_TYPE_EPIC, value: { data: '34', operator: OPERATOR_NOT } },
+ { type: TOKEN_TYPE_WEIGHT, value: { data: '1', operator: OPERATOR_IS } },
+ { type: TOKEN_TYPE_WEIGHT, value: { data: '3', operator: OPERATOR_NOT } },
+ { type: TOKEN_TYPE_HEALTH, value: { data: 'atRisk', operator: OPERATOR_IS } },
+ { type: TOKEN_TYPE_HEALTH, value: { data: 'onTrack', operator: OPERATOR_NOT } },
+];
+
+export const urlParams = {
+ search: 'find issues',
+ 'assignee_username[]': ['bart', 'lisa', '5'],
+ 'not[assignee_username][]': ['patty', 'selma'],
+ 'or[assignee_username][]': ['carl', 'lenny'],
+ milestone_title: ['season 3', 'season 4'],
+ 'not[milestone_title]': ['season 20', 'season 30'],
+ 'label_name[]': ['cartoon', 'tv'],
+ 'not[label_name][]': ['live action', 'drama'],
+ 'or[label_name][]': ['comedy', 'sitcom'],
+ release_tag: ['v3', 'v4'],
+ 'not[release_tag]': ['v20', 'v30'],
+ my_reaction_emoji: 'thumbsup',
+ 'not[my_reaction_emoji]': 'thumbsdown',
+ confidential: 'yes',
+ iteration_id: ['4', '12'],
+ 'not[iteration_id]': ['20', '42'],
+ epic_id: '12',
+ 'not[epic_id]': '34',
+ weight: '1',
+ 'not[weight]': '3',
+ health_status: 'atRisk',
+ 'not[health_status]': 'onTrack',
+};
+
+export const locationSearch = [
+ '?search=find+issues',
+ 'assignee_username[]=bart',
+ 'assignee_username[]=lisa',
+ 'assignee_username[]=5',
+ 'not[assignee_username][]=patty',
+ 'not[assignee_username][]=selma',
+ 'or[assignee_username][]=carl',
+ 'or[assignee_username][]=lenny',
+ 'milestone_title=season+3',
+ 'milestone_title=season+4',
+ 'not[milestone_title]=season+20',
+ 'not[milestone_title]=season+30',
+ 'label_name[]=cartoon',
+ 'label_name[]=tv',
+ 'not[label_name][]=live action',
+ 'not[label_name][]=drama',
+ 'or[label_name][]=comedy',
+ 'or[label_name][]=sitcom',
+ 'release_tag=v3',
+ 'release_tag=v4',
+ 'not[release_tag]=v20',
+ 'not[release_tag]=v30',
+ 'my_reaction_emoji=thumbsup',
+ 'not[my_reaction_emoji]=thumbsdown',
+ 'confidential=yes',
+ 'iteration_id=4',
+ 'iteration_id=12',
+ 'not[iteration_id]=20',
+ 'not[iteration_id]=42',
+ 'epic_id=12',
+ 'not[epic_id]=34',
+ 'weight=1',
+ 'not[weight]=3',
+ 'health_status=atRisk',
+ 'not[health_status]=onTrack',
+].join('&');
diff --git a/spec/frontend/sessions/new/components/email_verification_spec.js b/spec/frontend/sessions/new/components/email_verification_spec.js
new file mode 100644
index 00000000000..30ba2782f2f
--- /dev/null
+++ b/spec/frontend/sessions/new/components/email_verification_spec.js
@@ -0,0 +1,251 @@
+import { GlForm, GlFormInput } from '@gitlab/ui';
+import axios from 'axios';
+import MockAdapter from 'axios-mock-adapter';
+import { mountExtended } from 'helpers/vue_test_utils_helper';
+import { s__ } from '~/locale';
+import { createAlert, VARIANT_SUCCESS } from '~/alert';
+import { HTTP_STATUS_NOT_FOUND, HTTP_STATUS_OK } from '~/lib/utils/http_status';
+import EmailVerification from '~/sessions/new/components/email_verification.vue';
+import UpdateEmail from '~/sessions/new/components/update_email.vue';
+import { visitUrl } from '~/lib/utils/url_utility';
+import {
+ I18N_EMAIL_EMPTY_CODE,
+ I18N_EMAIL_INVALID_CODE,
+ I18N_GENERIC_ERROR,
+ I18N_UPDATE_EMAIL,
+ I18N_RESEND_LINK,
+ I18N_EMAIL_RESEND_SUCCESS,
+} from '~/sessions/new/constants';
+
+jest.mock('~/alert');
+jest.mock('~/lib/utils/url_utility', () => ({
+ ...jest.requireActual('~/lib/utils/url_utility'),
+ visitUrl: jest.fn(),
+}));
+
+describe('EmailVerification', () => {
+ let wrapper;
+ let axiosMock;
+
+ const defaultPropsData = {
+ obfuscatedEmail: 'al**@g*****.com',
+ verifyPath: '/users/sign_in',
+ resendPath: '/users/resend_verification_code',
+ isOfferEmailReset: true,
+ updateEmailPath: '/users/update_email',
+ };
+
+ const createComponent = (props = {}) => {
+ wrapper = mountExtended(EmailVerification, {
+ propsData: { ...defaultPropsData, ...props },
+ });
+ };
+
+ const findForm = () => wrapper.findComponent(GlForm);
+ const findCodeInput = () => wrapper.findComponent(GlFormInput);
+ const findUpdateEmail = () => wrapper.findComponent(UpdateEmail);
+ const findSubmitButton = () => wrapper.find('[type="submit"]');
+ const findResendLink = () => wrapper.findByText(I18N_RESEND_LINK);
+ const findUpdateEmailLink = () => wrapper.findByText(I18N_UPDATE_EMAIL);
+ const enterCode = (code) => findCodeInput().setValue(code);
+ const submitForm = () => findForm().trigger('submit');
+
+ beforeEach(() => {
+ axiosMock = new MockAdapter(axios);
+ createComponent();
+ });
+
+ afterEach(() => {
+ createAlert.mockClear();
+ axiosMock.restore();
+ });
+
+ describe('rendering the form', () => {
+ it('contains the obfuscated email address', () => {
+ expect(wrapper.text()).toContain(defaultPropsData.obfuscatedEmail);
+ });
+ });
+
+ describe('verifying the code', () => {
+ describe('when successfully verifying the code', () => {
+ const redirectPath = 'root';
+
+ beforeEach(async () => {
+ enterCode('123456');
+
+ axiosMock
+ .onPost(defaultPropsData.verifyPath)
+ .reply(HTTP_STATUS_OK, { status: 'success', redirect_path: redirectPath });
+
+ await submitForm();
+ await axios.waitForAll();
+ });
+
+ it('redirects to the returned redirect path', () => {
+ expect(visitUrl).toHaveBeenCalledWith(redirectPath);
+ });
+ });
+
+ describe('error messages', () => {
+ it.each`
+ scenario | code | submit | codeValid | errorShown | message
+ ${'shows no error messages before submitting the form'} | ${''} | ${false} | ${false} | ${false} | ${null}
+ ${'shows no error messages before submitting the form'} | ${'xxx'} | ${false} | ${false} | ${false} | ${null}
+ ${'shows no error messages before submitting the form'} | ${'123456'} | ${false} | ${true} | ${false} | ${null}
+ ${'shows empty code error message when submitting the form'} | ${''} | ${true} | ${false} | ${true} | ${I18N_EMAIL_EMPTY_CODE}
+ ${'shows invalid error message when submitting the form'} | ${'xxx'} | ${true} | ${false} | ${true} | ${I18N_EMAIL_INVALID_CODE}
+ ${'shows incorrect code error message returned from the server'} | ${'123456'} | ${true} | ${true} | ${true} | ${s__('IdentityVerification|The code is incorrect. Enter it again, or send a new code.')}
+ `(`$scenario with code $code`, async ({ code, submit, codeValid, errorShown, message }) => {
+ enterCode(code);
+
+ if (submit && codeValid) {
+ axiosMock
+ .onPost(defaultPropsData.verifyPath)
+ .replyOnce(HTTP_STATUS_OK, { status: 'failure', message });
+ }
+
+ if (submit) {
+ await submitForm();
+ await axios.waitForAll();
+ }
+
+ expect(findCodeInput().classes('is-invalid')).toBe(errorShown);
+ expect(findSubmitButton().props('disabled')).toBe(errorShown);
+ if (message) expect(wrapper.text()).toContain(message);
+ });
+
+ it('keeps showing error messages for invalid codes after submitting the form', async () => {
+ const serverErrorMessage = 'error message';
+
+ enterCode('123456');
+
+ axiosMock
+ .onPost(defaultPropsData.verifyPath)
+ .replyOnce(HTTP_STATUS_OK, { status: 'failure', message: serverErrorMessage });
+
+ await submitForm();
+ await axios.waitForAll();
+
+ expect(wrapper.text()).toContain(serverErrorMessage);
+
+ await enterCode('');
+ expect(wrapper.text()).toContain(I18N_EMAIL_EMPTY_CODE);
+
+ await enterCode('xxx');
+ expect(wrapper.text()).toContain(I18N_EMAIL_INVALID_CODE);
+ });
+
+ it('captures the error and shows an alert message when the request failed', async () => {
+ enterCode('123456');
+
+ axiosMock.onPost(defaultPropsData.verifyPath).replyOnce(HTTP_STATUS_OK, null);
+
+ await submitForm();
+ await axios.waitForAll();
+
+ expect(createAlert).toHaveBeenCalledWith({
+ message: I18N_GENERIC_ERROR,
+ captureError: true,
+ error: expect.any(Error),
+ });
+ });
+
+ it('captures the error and shows an alert message when the request undefined', async () => {
+ enterCode('123456');
+
+ axiosMock.onPost(defaultPropsData.verifyPath).reply(HTTP_STATUS_OK, { status: undefined });
+
+ await submitForm();
+ await axios.waitForAll();
+
+ expect(createAlert).toHaveBeenCalledWith({
+ message: I18N_GENERIC_ERROR,
+ captureError: true,
+ error: undefined,
+ });
+ });
+ });
+ });
+
+ describe('resending the code', () => {
+ const failedMessage = 'Failure sending the code';
+ const successAlertObject = {
+ message: I18N_EMAIL_RESEND_SUCCESS,
+ variant: VARIANT_SUCCESS,
+ };
+ const failedAlertObject = {
+ message: failedMessage,
+ };
+ const undefinedAlertObject = {
+ captureError: true,
+ error: undefined,
+ message: I18N_GENERIC_ERROR,
+ };
+ const genericAlertObject = {
+ message: I18N_GENERIC_ERROR,
+ captureError: true,
+ error: expect.any(Error),
+ };
+
+ it.each`
+ scenario | statusCode | response | alertObject
+ ${'the code was successfully resend'} | ${HTTP_STATUS_OK} | ${{ status: 'success' }} | ${successAlertObject}
+ ${'there was a problem resending the code'} | ${HTTP_STATUS_OK} | ${{ status: 'failure', message: failedMessage }} | ${failedAlertObject}
+ ${'when the request is undefined'} | ${HTTP_STATUS_OK} | ${{ status: undefined }} | ${undefinedAlertObject}
+ ${'when the request failed'} | ${HTTP_STATUS_NOT_FOUND} | ${null} | ${genericAlertObject}
+ `(`shows an alert message when $scenario`, async ({ statusCode, response, alertObject }) => {
+ enterCode('xxx');
+
+ await submitForm();
+
+ axiosMock.onPost(defaultPropsData.resendPath).replyOnce(statusCode, response);
+
+ findResendLink().trigger('click');
+
+ await axios.waitForAll();
+
+ expect(createAlert).toHaveBeenCalledWith(alertObject);
+ expect(findCodeInput().element.value).toBe('');
+ });
+ });
+
+ describe('updating the email', () => {
+ it('contains the link to show the update email form', () => {
+ expect(findUpdateEmailLink().exists()).toBe(true);
+ });
+
+ describe('when the isOfferEmailReset property is set to false', () => {
+ beforeEach(() => {
+ createComponent({ isOfferEmailReset: false });
+ });
+
+ it('does not contain the link to show the update email form', () => {
+ expect(findUpdateEmailLink().exists()).toBe(false);
+ });
+ });
+
+ it('shows the UpdateEmail component when clicking the link', async () => {
+ expect(findUpdateEmail().exists()).toBe(false);
+
+ await findUpdateEmailLink().trigger('click');
+
+ expect(findUpdateEmail().exists()).toBe(true);
+ });
+
+ describe('when the UpdateEmail component triggers verifyToken', () => {
+ const newEmail = 'new@ema.il';
+
+ beforeEach(async () => {
+ enterCode('123');
+ await findUpdateEmailLink().trigger('click');
+ findUpdateEmail().vm.$emit('verifyToken', newEmail);
+ });
+
+ it('hides the UpdateEmail component, shows the updated email address and resets the form', () => {
+ expect(findUpdateEmail().exists()).toBe(false);
+ expect(wrapper.text()).toContain(newEmail);
+ expect(findCodeInput().element.value).toBe('');
+ });
+ });
+ });
+});
diff --git a/spec/frontend/sessions/new/components/update_email_spec.js b/spec/frontend/sessions/new/components/update_email_spec.js
new file mode 100644
index 00000000000..822720da898
--- /dev/null
+++ b/spec/frontend/sessions/new/components/update_email_spec.js
@@ -0,0 +1,180 @@
+import { GlForm, GlFormInput } from '@gitlab/ui';
+import axios from 'axios';
+import MockAdapter from 'axios-mock-adapter';
+import { mountExtended } from 'helpers/vue_test_utils_helper';
+import { createAlert, VARIANT_SUCCESS } from '~/alert';
+import { HTTP_STATUS_NOT_FOUND, HTTP_STATUS_OK } from '~/lib/utils/http_status';
+import UpdateEmail from '~/sessions/new/components/update_email.vue';
+import {
+ I18N_CANCEL,
+ I18N_EMAIL_INVALID,
+ I18N_UPDATE_EMAIL_SUCCESS,
+ I18N_GENERIC_ERROR,
+ SUCCESS_RESPONSE,
+ FAILURE_RESPONSE,
+} from '~/sessions/new/constants';
+
+const validEmailAddress = 'foo+bar@ema.il';
+const invalidEmailAddress = 'invalid@ema@il';
+
+jest.mock('~/alert');
+jest.mock('~/lib/utils/url_utility', () => ({
+ ...jest.requireActual('~/lib/utils/url_utility'),
+ visitUrl: jest.fn(),
+}));
+
+describe('EmailVerification', () => {
+ let wrapper;
+ let axiosMock;
+
+ const defaultPropsData = {
+ updateEmailPath: '/users/update_email',
+ };
+
+ const createComponent = (props = {}) => {
+ wrapper = mountExtended(UpdateEmail, {
+ propsData: { ...defaultPropsData, ...props },
+ });
+ };
+
+ const findForm = () => wrapper.findComponent(GlForm);
+ const findEmailInput = () => wrapper.findComponent(GlFormInput);
+ const findSubmitButton = () => wrapper.find('[type="submit"]');
+ const findCancelLink = () => wrapper.findByText(I18N_CANCEL);
+ const enterEmail = (email) => findEmailInput().setValue(email);
+ const submitForm = () => findForm().trigger('submit');
+
+ beforeEach(() => {
+ axiosMock = new MockAdapter(axios);
+ createComponent();
+ });
+
+ afterEach(() => {
+ createAlert.mockClear();
+ axiosMock.restore();
+ });
+
+ describe('when successfully verifying the email address', () => {
+ beforeEach(async () => {
+ enterEmail(validEmailAddress);
+
+ axiosMock
+ .onPatch(defaultPropsData.updateEmailPath)
+ .reply(HTTP_STATUS_OK, { status: SUCCESS_RESPONSE });
+
+ submitForm();
+ await axios.waitForAll();
+ });
+
+ it('shows a successfully updated alert', () => {
+ expect(createAlert).toHaveBeenCalledWith({
+ message: I18N_UPDATE_EMAIL_SUCCESS,
+ variant: VARIANT_SUCCESS,
+ });
+ });
+
+ it('emits a verifyToken event with the updated email address', () => {
+ expect(wrapper.emitted('verifyToken')[0]).toEqual([validEmailAddress]);
+ });
+ });
+
+ describe('error messages', () => {
+ beforeEach(() => {
+ enterEmail(invalidEmailAddress);
+ });
+
+ describe('when trying to submit an invalid email address', () => {
+ it('shows no error message before submitting the form', () => {
+ expect(wrapper.text()).not.toContain(I18N_EMAIL_INVALID);
+ expect(findSubmitButton().props('disabled')).toBe(false);
+ });
+
+ describe('when submitting the form', () => {
+ beforeEach(async () => {
+ submitForm();
+ await axios.waitForAll();
+ });
+
+ it('shows an error message and disables the submit button', () => {
+ expect(wrapper.text()).toContain(I18N_EMAIL_INVALID);
+ expect(findSubmitButton().props('disabled')).toBe(true);
+ });
+
+ describe('when entering a valid email address', () => {
+ beforeEach(() => {
+ enterEmail(validEmailAddress);
+ });
+
+ it('hides the error message and enables the submit button again', () => {
+ expect(wrapper.text()).not.toContain(I18N_EMAIL_INVALID);
+ expect(findSubmitButton().props('disabled')).toBe(false);
+ });
+ });
+ });
+ });
+
+ describe('when the server responds with an error message', () => {
+ const serverErrorMessage = 'server error message';
+
+ beforeEach(async () => {
+ enterEmail(validEmailAddress);
+
+ axiosMock
+ .onPatch(defaultPropsData.updateEmailPath)
+ .replyOnce(HTTP_STATUS_OK, { status: FAILURE_RESPONSE, message: serverErrorMessage });
+
+ submitForm();
+ await axios.waitForAll();
+ });
+
+ it('shows the error message and disables the submit button', () => {
+ expect(wrapper.text()).toContain(serverErrorMessage);
+ expect(findSubmitButton().props('disabled')).toBe(true);
+ });
+
+ describe('when entering a valid email address', () => {
+ beforeEach(async () => {
+ await enterEmail('');
+ enterEmail(validEmailAddress);
+ });
+
+ it('hides the error message and enables the submit button again', () => {
+ expect(wrapper.text()).not.toContain(serverErrorMessage);
+ expect(findSubmitButton().props('disabled')).toBe(false);
+ });
+ });
+ });
+
+ describe('when the server responds unexpectedly', () => {
+ it.each`
+ scenario | statusCode
+ ${'the response is undefined'} | ${HTTP_STATUS_OK}
+ ${'the request failed'} | ${HTTP_STATUS_NOT_FOUND}
+ `(`shows an alert when $scenario`, async ({ statusCode }) => {
+ enterEmail(validEmailAddress);
+
+ axiosMock.onPatch(defaultPropsData.updateEmailPath).replyOnce(statusCode);
+
+ submitForm();
+
+ await axios.waitForAll();
+
+ expect(createAlert).toHaveBeenCalledWith({
+ message: I18N_GENERIC_ERROR,
+ captureError: true,
+ error: expect.any(Error),
+ });
+ });
+ });
+ });
+
+ describe('when clicking the cancel link', () => {
+ beforeEach(() => {
+ findCancelLink().trigger('click');
+ });
+
+ it('emits a verifyToken event without an email address', () => {
+ expect(wrapper.emitted('verifyToken')[0]).toEqual([]);
+ });
+ });
+});
diff --git a/spec/frontend/set_status_modal/set_status_form_spec.js b/spec/frontend/set_status_modal/set_status_form_spec.js
index df740d4a431..e24561a9862 100644
--- a/spec/frontend/set_status_modal/set_status_form_spec.js
+++ b/spec/frontend/set_status_modal/set_status_form_spec.js
@@ -186,7 +186,7 @@ describe('SetStatusForm', () => {
it('emits `clear-status-after-click`', async () => {
await createComponent();
- await wrapper.findByTestId('thirtyMinutes').trigger('click');
+ await wrapper.findByTestId('listbox-item-thirtyMinutes').trigger('click');
expect(wrapper.emitted('clear-status-after-click')).toEqual([[thirtyMinutes]]);
});
diff --git a/spec/frontend/set_status_modal/set_status_modal_wrapper_spec.js b/spec/frontend/set_status_modal/set_status_modal_wrapper_spec.js
index 60267cf31be..9c79d564625 100644
--- a/spec/frontend/set_status_modal/set_status_modal_wrapper_spec.js
+++ b/spec/frontend/set_status_modal/set_status_modal_wrapper_spec.js
@@ -164,7 +164,7 @@ describe('SetStatusModalWrapper', () => {
findAvailabilityCheckbox().vm.$emit('input', true);
// set the currentClearStatusAfter to 30 minutes
- await wrapper.find('[data-testid="thirtyMinutes"]').trigger('click');
+ await wrapper.find('[data-testid="listbox-item-thirtyMinutes"]').trigger('click');
findModal().vm.$emit('primary');
await nextTick();
diff --git a/spec/frontend/sidebar/components/confidential/confidentiality_dropdown_spec.js b/spec/frontend/sidebar/components/confidential/confidentiality_dropdown_spec.js
new file mode 100644
index 00000000000..571c2add626
--- /dev/null
+++ b/spec/frontend/sidebar/components/confidential/confidentiality_dropdown_spec.js
@@ -0,0 +1,62 @@
+import { GlCollapsibleListbox } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import { nextTick } from 'vue';
+import ConfidentialityDropdown from '~/sidebar/components/confidential/confidentiality_dropdown.vue';
+
+describe('ConfidentialityDropdown component', () => {
+ let wrapper;
+
+ const findDropdown = () => wrapper.findComponent(GlCollapsibleListbox);
+ const findHiddenInput = () => wrapper.find('input');
+
+ function createComponent() {
+ wrapper = shallowMount(ConfidentialityDropdown, {
+ stubs: {
+ GlCollapsibleListbox,
+ },
+ });
+ }
+
+ describe('with no value selected', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('hidden input value is undefined', () => {
+ expect(findHiddenInput().attributes('value')).toBeUndefined();
+ });
+
+ it('renders default text', () => {
+ expect(findDropdown().props('toggleText')).toBe('Select confidentiality');
+ });
+ });
+
+ describe('when selecting a value', () => {
+ const optionToSelect = { text: 'Not confidential', value: 'false' };
+
+ beforeEach(() => {
+ createComponent();
+ findDropdown().vm.$emit('select', optionToSelect.value);
+ });
+
+ it('updates value of the hidden input', () => {
+ expect(findHiddenInput().attributes('value')).toBe(optionToSelect.value);
+ });
+ });
+
+ describe('when reset is triggered', () => {
+ beforeEach(() => {
+ createComponent();
+ findDropdown().vm.$emit('select', 'true');
+ });
+
+ it('clears dropdown selection', async () => {
+ expect(findDropdown().props('toggleText')).not.toBe('Select confidentiality');
+
+ findDropdown().vm.$emit('reset');
+ await nextTick();
+
+ expect(findDropdown().props('toggleText')).toBe('Select confidentiality');
+ });
+ });
+});
diff --git a/spec/frontend/sidebar/components/incidents/escalation_status_spec.js b/spec/frontend/sidebar/components/incidents/escalation_status_spec.js
index e356f02a36b..5ef56e30eb0 100644
--- a/spec/frontend/sidebar/components/incidents/escalation_status_spec.js
+++ b/spec/frontend/sidebar/components/incidents/escalation_status_spec.js
@@ -1,7 +1,5 @@
-import { GlDropdown, GlDropdownItem } from '@gitlab/ui';
-import { nextTick } from 'vue';
-import waitForPromises from 'helpers/wait_for_promises';
-import { mountExtended } from 'helpers/vue_test_utils_helper';
+import { GlCollapsibleListbox, GlListboxItem } from '@gitlab/ui';
+import { mount } from '@vue/test-utils';
import EscalationStatus from '~/sidebar/components/incidents/escalation_status.vue';
import { STATUS_LABELS, STATUS_TRIGGERED, STATUS_ACKNOWLEDGED } from '~/sidebar/constants';
@@ -9,7 +7,7 @@ describe('EscalationStatus', () => {
let wrapper;
function createComponent(props) {
- wrapper = mountExtended(EscalationStatus, {
+ wrapper = mount(EscalationStatus, {
propsData: {
value: STATUS_TRIGGERED,
...props,
@@ -17,67 +15,43 @@ describe('EscalationStatus', () => {
});
}
- const findDropdownComponent = () => wrapper.findComponent(GlDropdown);
- const findDropdownItems = () => wrapper.findAllComponents(GlDropdownItem);
- const findDropdownMenu = () => findDropdownComponent().find('.dropdown-menu');
- const toggleDropdown = async () => {
- await findDropdownComponent().findComponent('button').trigger('click');
- await waitForPromises();
- };
+ const findDropdownComponent = () => wrapper.findComponent(GlCollapsibleListbox);
+ const findDropdownItem = (at) => wrapper.findAllComponents(GlListboxItem).at(at);
describe('status', () => {
it('shows the current status', () => {
createComponent({ value: STATUS_ACKNOWLEDGED });
- expect(findDropdownComponent().props('text')).toBe(STATUS_LABELS[STATUS_ACKNOWLEDGED]);
+ expect(findDropdownComponent().props('toggleText')).toBe(STATUS_LABELS[STATUS_ACKNOWLEDGED]);
});
it('shows the None option when status is null', () => {
createComponent({ value: null });
- expect(findDropdownComponent().props('text')).toBe('None');
+ expect(findDropdownComponent().props('toggleText')).toBe('None');
});
- });
+ it('renders headerText when it is provided', () => {
+ const headerText = 'some text';
+ createComponent({ headerText });
- describe('events', () => {
- it('selects an item', async () => {
- createComponent();
+ expect(findDropdownComponent().text()).toContain(headerText);
+ });
- await findDropdownItems().at(1).vm.$emit('click');
+ it('renders subtext when it is provided', () => {
+ const subText = 'some subtext';
+ const statusSubtexts = { [STATUS_ACKNOWLEDGED]: subText };
+ createComponent({ statusSubtexts });
- expect(wrapper.emitted().input[0][0]).toBe(STATUS_ACKNOWLEDGED);
+ expect(findDropdownItem(1).text()).toContain(subText);
});
});
- describe('close behavior', () => {
- it('allows the dropdown to be closed by default', async () => {
+ describe('events', () => {
+ it('selects an item', () => {
createComponent();
- // Open dropdown
- await toggleDropdown();
- jest.runOnlyPendingTimers();
- await nextTick();
-
- expect(findDropdownMenu().classes('show')).toBe(true);
+ findDropdownComponent().vm.$emit('select', STATUS_ACKNOWLEDGED);
- // Attempt to close dropdown
- await toggleDropdown();
-
- expect(findDropdownMenu().classes('show')).toBe(false);
- });
-
- it('preventDropdownClose prevents the dropdown from closing', async () => {
- createComponent({ preventDropdownClose: true });
- // Open dropdown
- await toggleDropdown();
- jest.runOnlyPendingTimers();
- await nextTick();
-
- expect(findDropdownMenu().classes('show')).toBe(true);
-
- // Attempt to close dropdown
- await toggleDropdown();
-
- expect(findDropdownMenu().classes('show')).toBe(true);
+ expect(wrapper.emitted().input[0][0]).toBe(STATUS_ACKNOWLEDGED);
});
});
});
diff --git a/spec/frontend/sidebar/components/labels/labels_select_vue/dropdown_button_spec.js b/spec/frontend/sidebar/components/labels/labels_select_vue/dropdown_button_spec.js
index 084ca5ed3fc..2ca19c0927a 100644
--- a/spec/frontend/sidebar/components/labels/labels_select_vue/dropdown_button_spec.js
+++ b/spec/frontend/sidebar/components/labels/labels_select_vue/dropdown_button_spec.js
@@ -1,6 +1,7 @@
import { GlIcon, GlButton } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import DropdownButton from '~/sidebar/components/labels/labels_select_vue/dropdown_button.vue';
diff --git a/spec/frontend/sidebar/components/labels/labels_select_vue/dropdown_contents_create_view_spec.js b/spec/frontend/sidebar/components/labels/labels_select_vue/dropdown_contents_create_view_spec.js
index 7e53fcfe850..cd391765dde 100644
--- a/spec/frontend/sidebar/components/labels/labels_select_vue/dropdown_contents_create_view_spec.js
+++ b/spec/frontend/sidebar/components/labels/labels_select_vue/dropdown_contents_create_view_spec.js
@@ -1,5 +1,6 @@
import { GlButton, GlFormInput, GlLink, GlLoadingIcon } from '@gitlab/ui';
import Vue, { nextTick } from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import DropdownContentsCreateView from '~/sidebar/components/labels/labels_select_vue/dropdown_contents_create_view.vue';
diff --git a/spec/frontend/sidebar/components/labels/labels_select_vue/dropdown_contents_labels_view_spec.js b/spec/frontend/sidebar/components/labels/labels_select_vue/dropdown_contents_labels_view_spec.js
index 5c6358a94ab..392171390e1 100644
--- a/spec/frontend/sidebar/components/labels/labels_select_vue/dropdown_contents_labels_view_spec.js
+++ b/spec/frontend/sidebar/components/labels/labels_select_vue/dropdown_contents_labels_view_spec.js
@@ -6,6 +6,7 @@ import {
GlLink,
} from '@gitlab/ui';
import Vue, { nextTick } from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import { shallowMountExtended, mountExtended } from 'helpers/vue_test_utils_helper';
import DropdownContentsLabelsView from '~/sidebar/components/labels/labels_select_vue/dropdown_contents_labels_view.vue';
diff --git a/spec/frontend/sidebar/components/labels/labels_select_vue/dropdown_contents_spec.js b/spec/frontend/sidebar/components/labels/labels_select_vue/dropdown_contents_spec.js
index d74cea2827c..fa04089a681 100644
--- a/spec/frontend/sidebar/components/labels/labels_select_vue/dropdown_contents_spec.js
+++ b/spec/frontend/sidebar/components/labels/labels_select_vue/dropdown_contents_spec.js
@@ -1,5 +1,6 @@
import { shallowMount } from '@vue/test-utils';
import Vue from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import DropdownContents from '~/sidebar/components/labels/labels_select_vue/dropdown_contents.vue';
diff --git a/spec/frontend/sidebar/components/labels/labels_select_vue/dropdown_title_spec.js b/spec/frontend/sidebar/components/labels/labels_select_vue/dropdown_title_spec.js
index 367f6007194..3603ef64a9b 100644
--- a/spec/frontend/sidebar/components/labels/labels_select_vue/dropdown_title_spec.js
+++ b/spec/frontend/sidebar/components/labels/labels_select_vue/dropdown_title_spec.js
@@ -1,6 +1,7 @@
import { GlButton, GlLoadingIcon } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import DropdownTitle from '~/sidebar/components/labels/labels_select_vue/dropdown_title.vue';
diff --git a/spec/frontend/sidebar/components/labels/labels_select_vue/dropdown_value_spec.js b/spec/frontend/sidebar/components/labels/labels_select_vue/dropdown_value_spec.js
index 70aafceb00c..656b5ef5cd8 100644
--- a/spec/frontend/sidebar/components/labels/labels_select_vue/dropdown_value_spec.js
+++ b/spec/frontend/sidebar/components/labels/labels_select_vue/dropdown_value_spec.js
@@ -1,6 +1,7 @@
import { GlLabel } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import Vue from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import DropdownValue from '~/sidebar/components/labels/labels_select_vue/dropdown_value.vue';
diff --git a/spec/frontend/sidebar/components/labels/labels_select_vue/labels_select_root_spec.js b/spec/frontend/sidebar/components/labels/labels_select_vue/labels_select_root_spec.js
index 3add96f2c03..32ca7c24217 100644
--- a/spec/frontend/sidebar/components/labels/labels_select_vue/labels_select_root_spec.js
+++ b/spec/frontend/sidebar/components/labels/labels_select_vue/labels_select_root_spec.js
@@ -1,5 +1,6 @@
import { shallowMount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import { isInViewport } from '~/lib/utils/common_utils';
diff --git a/spec/frontend/sidebar/components/lock/issuable_lock_form_spec.js b/spec/frontend/sidebar/components/lock/issuable_lock_form_spec.js
index da79daebb93..e1c41fb8b46 100644
--- a/spec/frontend/sidebar/components/lock/issuable_lock_form_spec.js
+++ b/spec/frontend/sidebar/components/lock/issuable_lock_form_spec.js
@@ -1,6 +1,7 @@
import { GlIcon } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import { mockTracking, triggerEvent } from 'helpers/tracking_helper';
import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
diff --git a/spec/frontend/sidebar/components/time_tracking/set_time_estimate_form_spec.js b/spec/frontend/sidebar/components/time_tracking/set_time_estimate_form_spec.js
new file mode 100644
index 00000000000..657fb52d62c
--- /dev/null
+++ b/spec/frontend/sidebar/components/time_tracking/set_time_estimate_form_spec.js
@@ -0,0 +1,416 @@
+import Vue, { nextTick } from 'vue';
+import VueApollo from 'vue-apollo';
+import { GlModal, GlAlert } from '@gitlab/ui';
+import setIssueTimeEstimateWithErrors from 'test_fixtures/graphql/issue_set_time_estimate_with_errors.json';
+import setIssueTimeEstimateWithoutErrors from 'test_fixtures/graphql/issue_set_time_estimate_without_errors.json';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import SetTimeEstimateForm from '~/sidebar/components/time_tracking/set_time_estimate_form.vue';
+import issueSetTimeEstimateMutation from '~/sidebar/queries/issue_set_time_estimate.mutation.graphql';
+
+const mockProjectFullPath = 'group/project';
+const mockMutationErrorMessage = setIssueTimeEstimateWithErrors.errors[0].message;
+const mockIssuableIid = '1';
+const mockMutationTimeEstimateInHumanReadableFormat = '1d 2h';
+const mockTimeTrackingData = {
+ timeEstimate: 3600,
+ humanTimeEstimate: '1h',
+};
+
+const resolvedMutationWithoutErrorsMock = jest
+ .fn()
+ .mockResolvedValue(setIssueTimeEstimateWithoutErrors);
+const resolvedMutationWithErrorsMock = jest.fn().mockResolvedValue(setIssueTimeEstimateWithErrors);
+
+const rejectedMutationMock = jest.fn().mockRejectedValue();
+const modalCloseMock = jest.fn();
+
+describe('Set Time Estimate Form', () => {
+ Vue.use(VueApollo);
+
+ let wrapper;
+
+ const findModal = () => wrapper.findComponent(GlModal);
+ const findModalTitle = () => findModal().props('title');
+ const findAlert = () => wrapper.findComponent(GlAlert);
+ const findDocsLink = () => wrapper.findByTestId('timetracking-docs-link');
+ const findSaveButton = () => findModal().props('actionPrimary');
+ const findSaveButtonLoadingState = () => findSaveButton().attributes.loading;
+ const findSaveButtonDisabledState = () => findSaveButton().attributes.disabled;
+ const findResetButton = () => findModal().props('actionSecondary');
+ const findResetButtonLoadingState = () => findResetButton().attributes.loading;
+ const findResetButtonDisabledState = () => findResetButton().attributes.disabled;
+ const findTimeEstiamteInput = () => wrapper.findByTestId('time-estimate');
+
+ const triggerSave = () => {
+ const mockEvent = { preventDefault: jest.fn() };
+ findModal().vm.$emit('primary', mockEvent);
+ };
+
+ const triggerReset = () => {
+ const mockEvent = { preventDefault: jest.fn() };
+ findModal().vm.$emit('secondary', mockEvent);
+ };
+
+ const mountComponent = async ({
+ timeTracking = mockTimeTrackingData,
+ data,
+ providedProps,
+ mutationResolverMock = resolvedMutationWithoutErrorsMock,
+ } = {}) => {
+ wrapper = shallowMountExtended(SetTimeEstimateForm, {
+ data() {
+ return {
+ ...data,
+ };
+ },
+ provide: {
+ issuableType: 'issue',
+ ...providedProps,
+ },
+ propsData: {
+ issuableIid: mockIssuableIid,
+ fullPath: mockProjectFullPath,
+ timeTracking,
+ },
+ apolloProvider: createMockApollo([[issueSetTimeEstimateMutation, mutationResolverMock]]),
+ });
+
+ wrapper.vm.$refs.modal.close = modalCloseMock;
+
+ findModal().vm.$emit('show');
+ await nextTick();
+ };
+
+ describe('modal title', () => {
+ it('is `Set time estimate` when the current estimate is 0', async () => {
+ await mountComponent({
+ timeTracking: { timeEstimate: 0, humanTimeEstimate: '0h' },
+ mutationResolverMock: resolvedMutationWithoutErrorsMock,
+ });
+
+ expect(findModalTitle()).toBe('Set time estimate');
+ });
+
+ it('is `Edit time estimate` when the current estimate is not 0', async () => {
+ await mountComponent();
+
+ expect(findModalTitle()).toBe('Edit time estimate');
+ });
+ });
+
+ describe('modal', () => {
+ it('shows the provided human time estimate from the timeTracking prop', async () => {
+ await mountComponent();
+
+ expect(findTimeEstiamteInput().attributes('value')).toBe(
+ mockTimeTrackingData.humanTimeEstimate,
+ );
+ });
+ });
+
+ describe('save button', () => {
+ it('is not loading by default', async () => {
+ await mountComponent();
+
+ expect(findSaveButtonLoadingState()).toBe(false);
+ });
+
+ it('is disabled and not loading when time estimate is empty', async () => {
+ await mountComponent({ data: { timeEstimate: '' } });
+
+ expect(findSaveButtonLoadingState()).toBe(false);
+ expect(findSaveButtonDisabledState()).toBe(true);
+ });
+
+ it('is enabled and not loading when time estimate is not empty', async () => {
+ await mountComponent({
+ data: { timeEstimate: mockMutationTimeEstimateInHumanReadableFormat },
+ });
+
+ expect(findSaveButtonLoadingState()).toBe(false);
+ expect(findSaveButtonDisabledState()).toBe(false);
+ });
+
+ it('is disabled and loading when the the save button is clicked', async () => {
+ await mountComponent({
+ data: { timeEstimate: mockMutationTimeEstimateInHumanReadableFormat },
+ });
+
+ triggerSave();
+
+ await nextTick();
+
+ expect(findSaveButtonLoadingState()).toBe(true);
+ expect(findSaveButtonDisabledState()).toBe(true);
+ });
+
+ it('is disabled and loading when the the reset button is clicked', async () => {
+ await mountComponent({
+ data: { timeEstimate: mockMutationTimeEstimateInHumanReadableFormat },
+ });
+
+ triggerReset();
+
+ await nextTick();
+
+ expect(findSaveButtonLoadingState()).toBe(false);
+ expect(findSaveButtonDisabledState()).toBe(true);
+ });
+
+ it('is enabled and not loading the when the save button is clicked and the mutation had errors', async () => {
+ await mountComponent({
+ data: { timeEstimate: mockMutationTimeEstimateInHumanReadableFormat },
+ mutationResolverMock: rejectedMutationMock,
+ });
+
+ triggerSave();
+
+ await waitForPromises();
+
+ expect(rejectedMutationMock).toHaveBeenCalledWith({
+ input: {
+ projectPath: mockProjectFullPath,
+ iid: mockIssuableIid,
+ timeEstimate: mockMutationTimeEstimateInHumanReadableFormat,
+ },
+ });
+ expect(findSaveButtonLoadingState()).toBe(false);
+ expect(findSaveButtonDisabledState()).toBe(false);
+ });
+
+ it('is enabled and not loading the when save button is clicked and the mutation returns errors', async () => {
+ await mountComponent({
+ data: { timeEstimate: mockMutationTimeEstimateInHumanReadableFormat },
+ mutationResolverMock: resolvedMutationWithErrorsMock,
+ });
+
+ triggerSave();
+
+ await waitForPromises();
+
+ expect(resolvedMutationWithErrorsMock).toHaveBeenCalledWith({
+ input: {
+ projectPath: mockProjectFullPath,
+ iid: mockIssuableIid,
+ timeEstimate: mockMutationTimeEstimateInHumanReadableFormat,
+ },
+ });
+ expect(findSaveButtonLoadingState()).toBe(false);
+ expect(findSaveButtonDisabledState()).toBe(false);
+ });
+
+ it('closes the modal after submission and the mutation did not return any error', async () => {
+ await mountComponent({
+ data: { timeEstimate: mockMutationTimeEstimateInHumanReadableFormat },
+ mutationResolverMock: resolvedMutationWithoutErrorsMock,
+ });
+
+ triggerSave();
+
+ await waitForPromises();
+
+ expect(resolvedMutationWithoutErrorsMock).toHaveBeenCalledWith({
+ input: {
+ projectPath: mockProjectFullPath,
+ iid: mockIssuableIid,
+ timeEstimate: mockMutationTimeEstimateInHumanReadableFormat,
+ },
+ });
+ expect(modalCloseMock).toHaveBeenCalled();
+ });
+ });
+
+ describe('reset button', () => {
+ it('is not visible when the current estimate is 0', async () => {
+ await mountComponent({
+ timeTracking: { timeEstimate: 0, humanTimeEstimate: '0h' },
+ mutationResolverMock: resolvedMutationWithoutErrorsMock,
+ });
+
+ expect(findResetButton()).toBe(null);
+ });
+
+ it('is enabled and not loading even if time estimate is empty', async () => {
+ await mountComponent({ data: { timeEstimate: '' } });
+
+ expect(findResetButtonLoadingState()).toBe(false);
+ expect(findResetButtonDisabledState()).toBe(false);
+ });
+
+ it('is enabled and not loading when time estimate is not empty', async () => {
+ await mountComponent({
+ data: { timeEstimate: mockMutationTimeEstimateInHumanReadableFormat },
+ });
+
+ expect(findResetButtonLoadingState()).toBe(false);
+ expect(findResetButtonDisabledState()).toBe(false);
+ });
+
+ it('is disabled and loading when the the reset button is clicked', async () => {
+ await mountComponent({
+ data: { timeEstimate: mockMutationTimeEstimateInHumanReadableFormat },
+ });
+
+ triggerReset();
+
+ await nextTick();
+
+ expect(findResetButtonLoadingState()).toBe(true);
+ expect(findResetButtonDisabledState()).toBe(true);
+ });
+
+ it('is disabled and loading when the the save button is clicked', async () => {
+ await mountComponent({
+ data: { timeEstimate: mockMutationTimeEstimateInHumanReadableFormat },
+ });
+
+ triggerSave();
+
+ await nextTick();
+
+ expect(findResetButtonLoadingState()).toBe(false);
+ expect(findResetButtonDisabledState()).toBe(true);
+ });
+
+ it('is enabled and not loading the when the reset button is clicked and the mutation had errors', async () => {
+ await mountComponent({
+ data: { timeEstimate: mockMutationTimeEstimateInHumanReadableFormat },
+ mutationResolverMock: rejectedMutationMock,
+ });
+
+ triggerReset();
+
+ await waitForPromises();
+
+ expect(rejectedMutationMock).toHaveBeenCalledWith({
+ input: {
+ projectPath: mockProjectFullPath,
+ iid: mockIssuableIid,
+ timeEstimate: '0',
+ },
+ });
+ expect(findSaveButtonLoadingState()).toBe(false);
+ expect(findSaveButtonDisabledState()).toBe(false);
+ });
+
+ it('is enabled and not loading the when reset button is clicked and the mutation returns errors', async () => {
+ await mountComponent({
+ data: { timeEstimate: mockMutationTimeEstimateInHumanReadableFormat },
+ mutationResolverMock: resolvedMutationWithErrorsMock,
+ });
+
+ triggerReset();
+
+ await waitForPromises();
+
+ expect(resolvedMutationWithErrorsMock).toHaveBeenCalledWith({
+ input: {
+ projectPath: mockProjectFullPath,
+ iid: mockIssuableIid,
+ timeEstimate: '0',
+ },
+ });
+ expect(findSaveButtonLoadingState()).toBe(false);
+ expect(findSaveButtonDisabledState()).toBe(false);
+ });
+
+ it('closes the modal after submission and the mutation did not return any error', async () => {
+ await mountComponent({
+ data: { timeEstimate: mockMutationTimeEstimateInHumanReadableFormat },
+ mutationResolverMock: resolvedMutationWithoutErrorsMock,
+ });
+
+ triggerReset();
+
+ await waitForPromises();
+ await nextTick();
+
+ expect(resolvedMutationWithoutErrorsMock).toHaveBeenCalledWith({
+ input: {
+ projectPath: mockProjectFullPath,
+ iid: mockIssuableIid,
+ timeEstimate: '0',
+ },
+ });
+ expect(modalCloseMock).toHaveBeenCalled();
+ });
+ });
+
+ describe('alert', () => {
+ it('is hidden by default', async () => {
+ await mountComponent();
+
+ expect(findAlert().exists()).toBe(false);
+ });
+
+ describe('when saving a change', () => {
+ it('shows an error if the submission fails with a handled error', async () => {
+ await mountComponent({
+ data: { timeEstimate: mockMutationTimeEstimateInHumanReadableFormat },
+ mutationResolverMock: resolvedMutationWithErrorsMock,
+ });
+
+ triggerSave();
+
+ await waitForPromises();
+
+ expect(findAlert().exists()).toBe(true);
+ expect(findAlert().text()).toBe(mockMutationErrorMessage);
+ });
+
+ it('shows an error if the submission fails with an unhandled error', async () => {
+ await mountComponent({
+ data: { timeEstimate: mockMutationTimeEstimateInHumanReadableFormat },
+ mutationResolverMock: rejectedMutationMock,
+ });
+
+ triggerSave();
+
+ await waitForPromises();
+
+ expect(findAlert().exists()).toBe(true);
+ expect(findAlert().text()).toBe('An error occurred while saving the time estimate.');
+ });
+ });
+
+ describe('when resetting the time estimate', () => {
+ it('shows an error if the submission fails with a handled error', async () => {
+ await mountComponent({
+ data: { timeEstimate: mockMutationTimeEstimateInHumanReadableFormat },
+ mutationResolverMock: resolvedMutationWithErrorsMock,
+ });
+
+ triggerReset();
+
+ await waitForPromises();
+
+ expect(findAlert().exists()).toBe(true);
+ expect(findAlert().text()).toBe(mockMutationErrorMessage);
+ });
+
+ it('shows an error if the submission fails with an unhandled error', async () => {
+ await mountComponent({
+ data: { timeEstimate: mockMutationTimeEstimateInHumanReadableFormat },
+ mutationResolverMock: rejectedMutationMock,
+ });
+
+ triggerReset();
+
+ await waitForPromises();
+
+ expect(findAlert().exists()).toBe(true);
+ expect(findAlert().text()).toBe('An error occurred while saving the time estimate.');
+ });
+ });
+ });
+
+ describe('docs link message', () => {
+ it('is present', async () => {
+ await mountComponent();
+
+ expect(findDocsLink().exists()).toBe(true);
+ });
+ });
+});
diff --git a/spec/frontend/sidebar/components/time_tracking/time_tracker_spec.js b/spec/frontend/sidebar/components/time_tracking/time_tracker_spec.js
index e23d24f9629..f43fb17ca37 100644
--- a/spec/frontend/sidebar/components/time_tracking/time_tracker_spec.js
+++ b/spec/frontend/sidebar/components/time_tracking/time_tracker_spec.js
@@ -291,6 +291,68 @@ describe('Issuable Time Tracker', () => {
},
);
});
+
+ describe('Set time estimate button', () => {
+ const findSetTimeEstimateButton = () => findByTestId('set-time-estimate-button');
+
+ it.each`
+ visibility | canSetTimeEstimate
+ ${'not visible'} | ${false}
+ ${'visible'} | ${true}
+ `(
+ 'is $visibility when canSetTimeEstimate is $canSetTimeEstimate',
+ async ({ canSetTimeEstimate }) => {
+ wrapper = mountComponent({
+ props: {
+ initialTimeTracking: {
+ timeEstimate: 0,
+ totalTimeSpent: 0,
+ humanTimeEstimate: '',
+ humanTotalTimeSpent: '',
+ },
+ canSetTimeEstimate,
+ },
+ });
+ await nextTick();
+
+ expect(findSetTimeEstimateButton().exists()).toBe(canSetTimeEstimate);
+ },
+ );
+
+ it('shows a tooltip with `Set estimate` when the current estimate is 0', async () => {
+ wrapper = mountComponent({
+ props: {
+ initialTimeTracking: {
+ timeEstimate: 0,
+ totalTimeSpent: 0,
+ humanTimeEstimate: '',
+ humanTotalTimeSpent: '',
+ },
+ canSetTimeEstimate: true,
+ },
+ });
+ await nextTick();
+
+ expect(findSetTimeEstimateButton().attributes('title')).toBe('Set estimate');
+ });
+
+ it('shows a tooltip with `Edit estimate` when the current estimate is not 0', async () => {
+ wrapper = mountComponent({
+ props: {
+ initialTimeTracking: {
+ timeEstimate: 60,
+ totalTimeSpent: 0,
+ humanTimeEstimate: '1m',
+ humanTotalTimeSpent: '',
+ },
+ canSetTimeEstimate: true,
+ },
+ });
+ await nextTick();
+
+ expect(findSetTimeEstimateButton().attributes('title')).toBe('Edit estimate');
+ });
+ });
});
describe('Event listeners', () => {
diff --git a/spec/frontend/snippets/components/__snapshots__/snippet_description_edit_spec.js.snap b/spec/frontend/snippets/components/__snapshots__/snippet_description_edit_spec.js.snap
index 05c1a6dd11d..e783927f87b 100644
--- a/spec/frontend/snippets/components/__snapshots__/snippet_description_edit_spec.js.snap
+++ b/spec/frontend/snippets/components/__snapshots__/snippet_description_edit_spec.js.snap
@@ -89,10 +89,12 @@ exports[`Snippet Description Edit component rendering matches the snapshot 1`] =
</div>
<div
- class="js-vue-md-preview md-preview-holder gl-px-5 md"
+ class="js-vue-md-preview md-preview-holder gl-px-5"
style="display: none;"
>
- <div />
+ <div
+ class="md"
+ />
</div>
<!---->
diff --git a/spec/frontend/super_sidebar/components/context_header_spec.js b/spec/frontend/super_sidebar/components/context_header_spec.js
new file mode 100644
index 00000000000..943b659c997
--- /dev/null
+++ b/spec/frontend/super_sidebar/components/context_header_spec.js
@@ -0,0 +1,50 @@
+import { GlAvatar } from '@gitlab/ui';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import ContextHeader from '~/super_sidebar/components/context_header.vue';
+
+describe('ContextHeader component', () => {
+ let wrapper;
+
+ const context = {
+ id: 1,
+ title: 'Title',
+ avatar: '/path/to/avatar.png',
+ };
+
+ const findGlAvatar = () => wrapper.getComponent(GlAvatar);
+
+ const createWrapper = (props = {}) => {
+ wrapper = shallowMountExtended(ContextHeader, {
+ propsData: {
+ context,
+ expanded: false,
+ ...props,
+ },
+ });
+ };
+
+ describe('with an avatar', () => {
+ it('passes the correct props to GlAvatar', () => {
+ createWrapper();
+ const avatar = findGlAvatar();
+
+ expect(avatar.props('shape')).toBe('rect');
+ expect(avatar.props('entityName')).toBe(context.title);
+ expect(avatar.props('entityId')).toBe(context.id);
+ expect(avatar.props('src')).toBe(context.avatar);
+ });
+
+ it('renders the avatar with a custom shape', () => {
+ const customShape = 'circle';
+ createWrapper({
+ context: {
+ ...context,
+ avatar_shape: customShape,
+ },
+ });
+ const avatar = findGlAvatar();
+
+ expect(avatar.props('shape')).toBe(customShape);
+ });
+ });
+});
diff --git a/spec/frontend/super_sidebar/components/context_switcher_spec.js b/spec/frontend/super_sidebar/components/context_switcher_spec.js
index 4317f451377..dd8f39e7cb7 100644
--- a/spec/frontend/super_sidebar/components/context_switcher_spec.js
+++ b/spec/frontend/super_sidebar/components/context_switcher_spec.js
@@ -15,7 +15,7 @@ import { trackContextAccess, formatContextSwitcherItems } from '~/super_sidebar/
import { DEFAULT_DEBOUNCE_AND_THROTTLE_MS } from '~/lib/utils/constants';
import waitForPromises from 'helpers/wait_for_promises';
import { stubComponent } from 'helpers/stub_component';
-import { searchUserProjectsAndGroupsResponseMock } from '../mock_data';
+import { contextSwitcherLinks, searchUserProjectsAndGroupsResponseMock } from '../mock_data';
jest.mock('~/super_sidebar/utils', () => ({
getStorageKeyFor: jest.requireActual('~/super_sidebar/utils').getStorageKeyFor,
@@ -26,9 +26,6 @@ jest.mock('~/super_sidebar/utils', () => ({
}));
const focusInputMock = jest.fn();
-const persistentLinks = [
- { title: 'Explore', link: '/explore', icon: 'compass', link_classes: 'persistent-link-class' },
-];
const username = 'root';
const projectsPath = 'projectsPath';
const groupsPath = 'groupsPath';
@@ -71,8 +68,10 @@ describe('ContextSwitcher component', () => {
wrapper = shallowMountExtended(ContextSwitcher, {
apolloProvider: mockApollo,
+ provide: {
+ contextSwitcherLinks,
+ },
propsData: {
- persistentLinks,
username,
projectsPath,
groupsPath,
@@ -107,14 +106,14 @@ describe('ContextSwitcher component', () => {
createWrapper();
});
- it('renders the persistent links', () => {
+ it('renders the context switcher links', () => {
const navItems = findNavItems();
const firstNavItem = navItems.at(0);
- expect(navItems.length).toBe(persistentLinks.length);
- expect(firstNavItem.props('item')).toBe(persistentLinks[0]);
+ expect(navItems.length).toBe(contextSwitcherLinks.length);
+ expect(firstNavItem.props('item')).toBe(contextSwitcherLinks[0]);
expect(firstNavItem.props('linkClasses')).toEqual({
- [persistentLinks[0].link_classes]: persistentLinks[0].link_classes,
+ [contextSwitcherLinks[0].link_classes]: contextSwitcherLinks[0].link_classes,
});
});
diff --git a/spec/frontend/super_sidebar/components/context_switcher_toggle_spec.js b/spec/frontend/super_sidebar/components/context_switcher_toggle_spec.js
index 7172b60d0fa..c20d3c2745f 100644
--- a/spec/frontend/super_sidebar/components/context_switcher_toggle_spec.js
+++ b/spec/frontend/super_sidebar/components/context_switcher_toggle_spec.js
@@ -1,4 +1,4 @@
-import { GlAvatar } from '@gitlab/ui';
+import { GlIcon } from '@gitlab/ui';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import ContextSwitcherToggle from '~/super_sidebar/components/context_switcher_toggle.vue';
@@ -11,7 +11,7 @@ describe('ContextSwitcherToggle component', () => {
avatar: '/path/to/avatar.png',
};
- const findGlAvatar = () => wrapper.getComponent(GlAvatar);
+ const findGlIcon = () => wrapper.getComponent(GlIcon);
const createWrapper = (props = {}) => {
wrapper = shallowMountExtended(ContextSwitcherToggle, {
@@ -23,28 +23,17 @@ describe('ContextSwitcherToggle component', () => {
});
};
- describe('with an avatar', () => {
- it('passes the correct props to GlAvatar', () => {
- createWrapper();
- const avatar = findGlAvatar();
+ it('renders "chevron-down" icon when not expanded', () => {
+ createWrapper();
- expect(avatar.props('shape')).toBe('rect');
- expect(avatar.props('entityName')).toBe(context.title);
- expect(avatar.props('entityId')).toBe(context.id);
- expect(avatar.props('src')).toBe(context.avatar);
- });
+ expect(findGlIcon().props('name')).toBe('chevron-down');
+ });
- it('renders the avatar with a custom shape', () => {
- const customShape = 'circle';
- createWrapper({
- context: {
- ...context,
- avatar_shape: customShape,
- },
- });
- const avatar = findGlAvatar();
-
- expect(avatar.props('shape')).toBe(customShape);
+ it('renders "chevron-up" icon when expanded', () => {
+ createWrapper({
+ expanded: true,
});
+
+ expect(findGlIcon().props('name')).toBe('chevron-up');
});
});
diff --git a/spec/frontend/super_sidebar/components/flyout_menu_spec.js b/spec/frontend/super_sidebar/components/flyout_menu_spec.js
new file mode 100644
index 00000000000..b894d29c875
--- /dev/null
+++ b/spec/frontend/super_sidebar/components/flyout_menu_spec.js
@@ -0,0 +1,25 @@
+import { shallowMount } from '@vue/test-utils';
+import FlyoutMenu from '~/super_sidebar/components/flyout_menu.vue';
+
+jest.mock('@floating-ui/dom');
+
+describe('FlyoutMenu', () => {
+ let wrapper;
+
+ const createComponent = () => {
+ wrapper = shallowMount(FlyoutMenu, {
+ propsData: {
+ targetId: 'section-1',
+ items: [],
+ },
+ });
+ };
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('renders the component', () => {
+ expect(wrapper.exists()).toBe(true);
+ });
+});
diff --git a/spec/frontend/super_sidebar/components/global_search/components/frequent_groups_spec.js b/spec/frontend/super_sidebar/components/global_search/components/frequent_groups_spec.js
new file mode 100644
index 00000000000..e63768a03c0
--- /dev/null
+++ b/spec/frontend/super_sidebar/components/global_search/components/frequent_groups_spec.js
@@ -0,0 +1,63 @@
+import { shallowMount } from '@vue/test-utils';
+import FrequentItems from '~/super_sidebar/components/global_search/components/frequent_items.vue';
+import FrequentGroups from '~/super_sidebar/components/global_search/components/frequent_groups.vue';
+
+describe('FrequentlyVisitedGroups', () => {
+ let wrapper;
+
+ const groupsPath = '/mock/group/path';
+
+ const createComponent = (options) => {
+ wrapper = shallowMount(FrequentGroups, {
+ provide: {
+ groupsPath,
+ },
+ ...options,
+ });
+ };
+
+ const findFrequentItems = () => wrapper.findComponent(FrequentItems);
+ const receivedAttrs = (wrapperInstance) => ({
+ // See https://github.com/vuejs/test-utils/issues/2151.
+ ...wrapperInstance.vm.$attrs,
+ });
+
+ it('passes group-specific props', () => {
+ createComponent();
+
+ expect(findFrequentItems().props()).toMatchObject({
+ emptyStateText: 'Groups you visit often will appear here.',
+ groupName: 'Frequently visited groups',
+ maxItems: 3,
+ storageKey: null,
+ viewAllItemsIcon: 'group',
+ viewAllItemsText: 'View all my groups',
+ viewAllItemsPath: groupsPath,
+ });
+ });
+
+ it('with a user, passes a storage key string to FrequentItems', () => {
+ gon.current_username = 'test_user';
+ createComponent();
+
+ expect(findFrequentItems().props('storageKey')).toBe('test_user/frequent-groups');
+ });
+
+ it('passes attrs to FrequentItems', () => {
+ createComponent({ attrs: { bordered: true, class: 'test-class' } });
+
+ expect(findFrequentItems().classes()).toContain('test-class');
+ expect(receivedAttrs(findFrequentItems())).toMatchObject({
+ bordered: true,
+ });
+ });
+
+ it('forwards listeners to FrequentItems', () => {
+ const spy = jest.fn();
+ createComponent({ listeners: { 'nothing-to-render': spy } });
+
+ findFrequentItems().vm.$emit('nothing-to-render');
+
+ expect(spy).toHaveBeenCalledTimes(1);
+ });
+});
diff --git a/spec/frontend/super_sidebar/components/global_search/components/frequent_item_spec.js b/spec/frontend/super_sidebar/components/global_search/components/frequent_item_spec.js
new file mode 100644
index 00000000000..aae1fc543f9
--- /dev/null
+++ b/spec/frontend/super_sidebar/components/global_search/components/frequent_item_spec.js
@@ -0,0 +1,98 @@
+import { GlButton } from '@gitlab/ui';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import FrequentItem from '~/super_sidebar/components/global_search/components/frequent_item.vue';
+import ProjectAvatar from '~/vue_shared/components/project_avatar.vue';
+import { stubComponent } from 'helpers/stub_component';
+
+describe('FrequentlyVisitedItem', () => {
+ let wrapper;
+
+ const mockItem = {
+ id: 123,
+ title: 'mockTitle',
+ subtitle: 'mockSubtitle',
+ avatar: '/mock/avatar.png',
+ };
+
+ const createComponent = () => {
+ wrapper = shallowMountExtended(FrequentItem, {
+ propsData: {
+ item: mockItem,
+ },
+ stubs: {
+ GlButton: stubComponent(GlButton, {
+ template: '<button type="button" v-on="$listeners"></button>',
+ }),
+ },
+ });
+ };
+
+ const findProjectAvatar = () => wrapper.findComponent(ProjectAvatar);
+ const findRemoveButton = () => wrapper.findByRole('button');
+ const findSubtitle = () => wrapper.findByTestId('subtitle');
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('renders the project avatar with the expected props', () => {
+ expect(findProjectAvatar().props()).toMatchObject({
+ projectId: mockItem.id,
+ projectName: mockItem.title,
+ projectAvatarUrl: mockItem.avatar,
+ size: 24,
+ });
+ });
+
+ it('renders the title and subtitle', () => {
+ expect(wrapper.text()).toContain(mockItem.title);
+ expect(findSubtitle().text()).toContain(mockItem.subtitle);
+ });
+
+ it('does not render the subtitle if not given', async () => {
+ await wrapper.setProps({ item: { ...mockItem, subtitle: null } });
+ expect(findSubtitle().exists()).toBe(false);
+ });
+
+ describe('clicking the remove button', () => {
+ const bubbledClickSpy = jest.fn();
+ const clickSpy = jest.fn();
+
+ beforeEach(() => {
+ wrapper.element.addEventListener('click', bubbledClickSpy);
+ const button = findRemoveButton();
+ button.element.addEventListener('click', clickSpy);
+ button.trigger('click');
+ });
+
+ it('emits a remove event on clicking the remove button', () => {
+ expect(wrapper.emitted('remove')).toEqual([[mockItem]]);
+ });
+
+ it('stops the native event from bubbling and prevents its default behavior', () => {
+ expect(bubbledClickSpy).not.toHaveBeenCalled();
+ expect(clickSpy.mock.calls[0][0].defaultPrevented).toBe(true);
+ });
+ });
+
+ describe('pressing enter on the remove button', () => {
+ const bubbledKeydownSpy = jest.fn();
+ const keydownSpy = jest.fn();
+
+ beforeEach(() => {
+ wrapper.element.addEventListener('keydown', bubbledKeydownSpy);
+ const button = findRemoveButton();
+ button.element.addEventListener('keydown', keydownSpy);
+ button.trigger('keydown.enter');
+ });
+
+ it('emits a remove event on clicking the remove button', () => {
+ expect(wrapper.emitted('remove')).toEqual([[mockItem]]);
+ });
+
+ it('stops the native event from bubbling and prevents its default behavior', () => {
+ expect(bubbledKeydownSpy).not.toHaveBeenCalled();
+ expect(keydownSpy.mock.calls[0][0].defaultPrevented).toBe(true);
+ });
+ });
+});
diff --git a/spec/frontend/super_sidebar/components/global_search/components/frequent_items_spec.js b/spec/frontend/super_sidebar/components/global_search/components/frequent_items_spec.js
new file mode 100644
index 00000000000..4700e9c7e10
--- /dev/null
+++ b/spec/frontend/super_sidebar/components/global_search/components/frequent_items_spec.js
@@ -0,0 +1,159 @@
+import { GlDisclosureDropdownGroup, GlDisclosureDropdownItem, GlIcon } from '@gitlab/ui';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import GlobalSearchFrequentItems from '~/super_sidebar/components/global_search/components/frequent_items.vue';
+import FrequentItem from '~/super_sidebar/components/global_search/components/frequent_item.vue';
+import { getItemsFromLocalStorage, removeItemFromLocalStorage } from '~/super_sidebar/utils';
+import { cachedFrequentProjects } from 'jest/super_sidebar/mock_data';
+
+jest.mock('~/super_sidebar/utils', () => {
+ const original = jest.requireActual('~/super_sidebar/utils');
+
+ return {
+ ...original,
+ getItemsFromLocalStorage: jest.fn(),
+ removeItemFromLocalStorage: jest.fn(),
+ };
+});
+
+describe('FrequentlyVisitedItems', () => {
+ let wrapper;
+ const storageKey = 'mockStorageKey';
+ const mockStoredItems = JSON.parse(cachedFrequentProjects);
+ const mockProps = {
+ emptyStateText: 'mock empty state text',
+ groupName: 'mock group name',
+ maxItems: 42,
+ storageKey,
+ viewAllItemsText: 'View all items',
+ viewAllItemsIcon: 'question-o',
+ viewAllItemsPath: '/mock/all_items',
+ };
+
+ const createComponent = (props) => {
+ wrapper = shallowMountExtended(GlobalSearchFrequentItems, {
+ propsData: {
+ ...mockProps,
+ ...props,
+ },
+ stubs: {
+ GlDisclosureDropdownGroup,
+ },
+ });
+ };
+
+ const findItems = () => wrapper.findAllComponents(GlDisclosureDropdownItem);
+ const findItemRenderer = (root) => root.findComponent(FrequentItem);
+
+ const setStoredItems = (items) => {
+ getItemsFromLocalStorage.mockReturnValue(items);
+ };
+
+ beforeEach(() => {
+ setStoredItems(mockStoredItems);
+ });
+
+ describe('without a storage key', () => {
+ beforeEach(() => {
+ createComponent({ storageKey: null });
+ });
+
+ it('does not render anything', () => {
+ expect(wrapper.html()).toBe('');
+ });
+
+ it('emits a nothing-to-render event', () => {
+ expect(wrapper.emitted('nothing-to-render')).toEqual([[]]);
+ });
+ });
+
+ describe('with a storageKey', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ describe('common behavior', () => {
+ it('calls getItemsFromLocalStorage', () => {
+ expect(getItemsFromLocalStorage).toHaveBeenCalledWith({
+ storageKey,
+ maxItems: mockProps.maxItems,
+ });
+ });
+
+ it('renders the group name', () => {
+ expect(wrapper.text()).toContain(mockProps.groupName);
+ });
+
+ it('renders the view all items link', () => {
+ const lastItem = findItems().at(-1);
+ expect(lastItem.props('item')).toMatchObject({
+ text: mockProps.viewAllItemsText,
+ href: mockProps.viewAllItemsPath,
+ });
+
+ const icon = lastItem.findComponent(GlIcon);
+ expect(icon.props('name')).toBe(mockProps.viewAllItemsIcon);
+ });
+ });
+
+ describe('with stored items', () => {
+ it('renders the items', () => {
+ const items = findItems();
+
+ mockStoredItems.forEach((storedItem, index) => {
+ const dropdownItem = items.at(index);
+
+ // Check GlDisclosureDropdownItem's item has the right structure
+ expect(dropdownItem.props('item')).toMatchObject({
+ text: storedItem.name,
+ href: storedItem.webUrl,
+ });
+
+ // Check FrequentItem's item has the right structure
+ expect(findItemRenderer(dropdownItem).props('item')).toMatchObject({
+ id: storedItem.id,
+ title: storedItem.name,
+ subtitle: expect.any(String),
+ avatar: storedItem.avatarUrl,
+ });
+ });
+ });
+
+ it('does not render the empty state text', () => {
+ expect(wrapper.text()).not.toContain('mock empty state text');
+ });
+
+ describe('removing an item', () => {
+ let itemToRemove;
+
+ beforeEach(() => {
+ const itemRenderer = findItemRenderer(findItems().at(0));
+ itemToRemove = itemRenderer.props('item');
+ itemRenderer.vm.$emit('remove', itemToRemove);
+ });
+
+ it('calls removeItemFromLocalStorage when an item emits a remove event', () => {
+ expect(removeItemFromLocalStorage).toHaveBeenCalledWith({
+ storageKey,
+ item: itemToRemove,
+ });
+ });
+
+ it('no longer renders that item', () => {
+ const renderedItemTexts = findItems().wrappers.map((item) => item.props('item').text);
+ expect(renderedItemTexts).not.toContain(itemToRemove.text);
+ });
+ });
+ });
+ });
+
+ describe('with no stored items', () => {
+ beforeEach(() => {
+ setStoredItems([]);
+ createComponent();
+ });
+
+ it('renders the empty state text', () => {
+ expect(wrapper.text()).toContain(mockProps.emptyStateText);
+ });
+ });
+});
diff --git a/spec/frontend/super_sidebar/components/global_search/components/frequent_projects_spec.js b/spec/frontend/super_sidebar/components/global_search/components/frequent_projects_spec.js
new file mode 100644
index 00000000000..7554c123574
--- /dev/null
+++ b/spec/frontend/super_sidebar/components/global_search/components/frequent_projects_spec.js
@@ -0,0 +1,63 @@
+import { shallowMount } from '@vue/test-utils';
+import FrequentItems from '~/super_sidebar/components/global_search/components/frequent_items.vue';
+import FrequentProjects from '~/super_sidebar/components/global_search/components/frequent_projects.vue';
+
+describe('FrequentlyVisitedProjects', () => {
+ let wrapper;
+
+ const projectsPath = '/mock/project/path';
+
+ const createComponent = (options) => {
+ wrapper = shallowMount(FrequentProjects, {
+ provide: {
+ projectsPath,
+ },
+ ...options,
+ });
+ };
+
+ const findFrequentItems = () => wrapper.findComponent(FrequentItems);
+ const receivedAttrs = (wrapperInstance) => ({
+ // See https://github.com/vuejs/test-utils/issues/2151.
+ ...wrapperInstance.vm.$attrs,
+ });
+
+ it('passes project-specific props', () => {
+ createComponent();
+
+ expect(findFrequentItems().props()).toMatchObject({
+ emptyStateText: 'Projects you visit often will appear here.',
+ groupName: 'Frequently visited projects',
+ maxItems: 5,
+ storageKey: null,
+ viewAllItemsIcon: 'project',
+ viewAllItemsText: 'View all my projects',
+ viewAllItemsPath: projectsPath,
+ });
+ });
+
+ it('with a user, passes a storage key string to FrequentItems', () => {
+ gon.current_username = 'test_user';
+ createComponent();
+
+ expect(findFrequentItems().props('storageKey')).toBe('test_user/frequent-projects');
+ });
+
+ it('passes attrs to FrequentItems', () => {
+ createComponent({ attrs: { bordered: true, class: 'test-class' } });
+
+ expect(findFrequentItems().classes()).toContain('test-class');
+ expect(receivedAttrs(findFrequentItems())).toMatchObject({
+ bordered: true,
+ });
+ });
+
+ it('forwards listeners to FrequentItems', () => {
+ const spy = jest.fn();
+ createComponent({ listeners: { 'nothing-to-render': spy } });
+
+ findFrequentItems().vm.$emit('nothing-to-render');
+
+ expect(spy).toHaveBeenCalledTimes(1);
+ });
+});
diff --git a/spec/frontend/super_sidebar/components/global_search/components/global_search_autocomplete_items_spec.js b/spec/frontend/super_sidebar/components/global_search/components/global_search_autocomplete_items_spec.js
index aac321bd8e0..5af9b0372f7 100644
--- a/spec/frontend/super_sidebar/components/global_search/components/global_search_autocomplete_items_spec.js
+++ b/spec/frontend/super_sidebar/components/global_search/components/global_search_autocomplete_items_spec.js
@@ -7,6 +7,7 @@ import {
} from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import Vue from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import GlobalSearchAutocompleteItems from '~/super_sidebar/components/global_search/components/global_search_autocomplete_items.vue';
diff --git a/spec/frontend/super_sidebar/components/global_search/components/global_search_default_issuables_spec.js b/spec/frontend/super_sidebar/components/global_search/components/global_search_default_issuables_spec.js
new file mode 100644
index 00000000000..8130cceb61d
--- /dev/null
+++ b/spec/frontend/super_sidebar/components/global_search/components/global_search_default_issuables_spec.js
@@ -0,0 +1,140 @@
+import { GlDisclosureDropdownGroup, GlDisclosureDropdownItem } from '@gitlab/ui';
+import Vue from 'vue';
+// eslint-disable-next-line no-restricted-imports
+import Vuex from 'vuex';
+import { shallowMount } from '@vue/test-utils';
+import GlobalSearchDefaultIssuables from '~/super_sidebar/components/global_search/components/global_search_default_issuables.vue';
+import {
+ MOCK_SEARCH_CONTEXT,
+ MOCK_PROJECT_SEARCH_CONTEXT,
+ MOCK_GROUP_SEARCH_CONTEXT,
+ MOCK_DEFAULT_SEARCH_OPTIONS,
+} from '../mock_data';
+
+Vue.use(Vuex);
+
+describe('GlobalSearchDefaultPlaces', () => {
+ let wrapper;
+
+ const createComponent = ({
+ searchContext = null,
+ mockDefaultSearchOptions = [],
+ ...options
+ } = {}) => {
+ const store = new Vuex.Store({
+ state: {
+ searchContext,
+ },
+ getters: {
+ defaultSearchOptions: () => mockDefaultSearchOptions,
+ },
+ });
+
+ wrapper = shallowMount(GlobalSearchDefaultIssuables, {
+ store,
+ stubs: {
+ GlDisclosureDropdownGroup,
+ },
+ ...options,
+ });
+ };
+
+ const findGroup = () => wrapper.findComponent(GlDisclosureDropdownGroup);
+ const findItems = () => wrapper.findAllComponents(GlDisclosureDropdownItem);
+
+ describe('given no contextSwitcherLinks', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('renders nothing', () => {
+ expect(wrapper.html()).toBe('');
+ });
+
+ it('emits a nothing-to-render event', () => {
+ expect(wrapper.emitted('nothing-to-render')).toEqual([[]]);
+ });
+ });
+
+ describe('given some contextSwitcherLinks', () => {
+ beforeEach(() => {
+ createComponent({
+ searchContext: MOCK_SEARCH_CONTEXT,
+ mockDefaultSearchOptions: MOCK_DEFAULT_SEARCH_OPTIONS,
+ attrs: {
+ bordered: true,
+ class: 'test-class',
+ },
+ });
+ });
+
+ it('renders a disclosure dropdown group', () => {
+ expect(findGroup().exists()).toBe(true);
+ });
+
+ it('renders the expected header', () => {
+ expect(wrapper.text()).toContain('All GitLab');
+ });
+
+ it('passes attrs down', () => {
+ const group = findGroup();
+ expect(group.props('bordered')).toBe(true);
+ expect(group.classes()).toContain('test-class');
+ });
+
+ it('renders the links', () => {
+ const itemProps = findItems().wrappers.map((item) => item.props('item'));
+
+ expect(itemProps).toEqual([
+ {
+ text: 'Issues assigned to me',
+ href: '/dashboard/issues/?assignee_username=anyone',
+ },
+ {
+ text: "Issues I've created",
+ href: '/dashboard/issues/?author_username=anyone',
+ },
+ {
+ text: 'Merge requests assigned to me',
+ href: '/dashboard/merge_requests/?assignee_username=anyone',
+ },
+ {
+ text: "Merge requests that I'm a reviewer",
+ href: '/dashboard/merge_requests/?reviewer_username=anyone',
+ },
+ {
+ text: "Merge requests I've created",
+ href: '/dashboard/merge_requests/?author_username=anyone',
+ },
+ ]);
+ });
+ });
+
+ describe('group name', () => {
+ describe('in a project context', () => {
+ beforeEach(() => {
+ createComponent({
+ searchContext: MOCK_PROJECT_SEARCH_CONTEXT,
+ mockDefaultSearchOptions: MOCK_DEFAULT_SEARCH_OPTIONS,
+ });
+ });
+
+ it('renders the expected header', () => {
+ expect(wrapper.text()).toContain('MockProject');
+ });
+ });
+
+ describe('in a group context', () => {
+ beforeEach(() => {
+ createComponent({
+ searchContext: MOCK_GROUP_SEARCH_CONTEXT,
+ mockDefaultSearchOptions: MOCK_DEFAULT_SEARCH_OPTIONS,
+ });
+ });
+
+ it('renders the expected header', () => {
+ expect(wrapper.text()).toContain('MockGroup');
+ });
+ });
+ });
+});
diff --git a/spec/frontend/super_sidebar/components/global_search/components/global_search_default_items_spec.js b/spec/frontend/super_sidebar/components/global_search/components/global_search_default_items_spec.js
index 52e9aa52c14..d0d812c10ed 100644
--- a/spec/frontend/super_sidebar/components/global_search/components/global_search_default_items_spec.js
+++ b/spec/frontend/super_sidebar/components/global_search/components/global_search_default_items_spec.js
@@ -1,75 +1,76 @@
-import { GlDisclosureDropdownGroup, GlDisclosureDropdownItem } from '@gitlab/ui';
-import Vue from 'vue';
-import Vuex from 'vuex';
-import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
-import GlobalSearchDefaultItems from '~/super_sidebar/components/global_search/components/global_search_default_items.vue';
-import { MOCK_SEARCH_CONTEXT, MOCK_DEFAULT_SEARCH_OPTIONS } from '../mock_data';
+import { shallowMount } from '@vue/test-utils';
-Vue.use(Vuex);
+import GlobalSearchDefaultItems from '~/super_sidebar/components/global_search/components/global_search_default_items.vue';
+import GlobalSearchDefaultPlaces from '~/super_sidebar/components/global_search/components/global_search_default_places.vue';
+import FrequentProjects from '~/super_sidebar/components/global_search/components/frequent_projects.vue';
+import FrequentGroups from '~/super_sidebar/components/global_search/components/frequent_groups.vue';
+import GlobalSearchDefaultIssuables from '~/super_sidebar/components/global_search/components/global_search_default_issuables.vue';
describe('GlobalSearchDefaultItems', () => {
let wrapper;
- const createComponent = (initialState, props) => {
- const store = new Vuex.Store({
- state: {
- searchContext: MOCK_SEARCH_CONTEXT,
- ...initialState,
- },
- getters: {
- defaultSearchOptions: () => MOCK_DEFAULT_SEARCH_OPTIONS,
- },
- });
+ const createComponent = () => {
+ wrapper = shallowMount(GlobalSearchDefaultItems);
+ };
- wrapper = shallowMountExtended(GlobalSearchDefaultItems, {
- store,
- propsData: {
- ...props,
- },
- stubs: {
- GlDisclosureDropdownGroup,
- },
+ const findPlaces = () => wrapper.findComponent(GlobalSearchDefaultPlaces);
+ const findProjects = () => wrapper.findComponent(FrequentProjects);
+ const findGroups = () => wrapper.findComponent(FrequentGroups);
+ const findIssuables = () => wrapper.findComponent(GlobalSearchDefaultIssuables);
+ const receivedAttrs = (wrapperInstance) => ({
+ // See https://github.com/vuejs/test-utils/issues/2151.
+ ...wrapperInstance.vm.$attrs,
+ });
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ describe('all child components can render', () => {
+ it('renders the components', () => {
+ expect(findPlaces().exists()).toBe(true);
+ expect(findProjects().exists()).toBe(true);
+ expect(findGroups().exists()).toBe(true);
+ expect(findIssuables().exists()).toBe(true);
});
- };
- const findItems = () => wrapper.findAllComponents(GlDisclosureDropdownItem);
- const findItemsData = () => findItems().wrappers.map((w) => w.props('item'));
+ it('sets the expected props on first component', () => {
+ const places = findPlaces();
+ expect(receivedAttrs(places)).toEqual({});
+ expect(places.classes()).toEqual([]);
+ });
- describe('template', () => {
- describe('Dropdown items', () => {
- beforeEach(() => {
- createComponent();
+ it('sets the expected props on the second component onwards', () => {
+ const components = [findProjects(), findGroups(), findIssuables()];
+ components.forEach((component) => {
+ expect(receivedAttrs(component)).toEqual({ bordered: true });
+ expect(component.classes()).toEqual(['gl-mt-3']);
});
+ });
+ });
- it('renders item for each option in defaultSearchOptions', () => {
- expect(findItems()).toHaveLength(MOCK_DEFAULT_SEARCH_OPTIONS.length);
- });
+ describe('when child components emit nothing-to-render', () => {
+ beforeEach(() => {
+ // Emit from two elements to guard against naive index-based splicing
+ findPlaces().vm.$emit('nothing-to-render');
+ findIssuables().vm.$emit('nothing-to-render');
+ });
- it('provides the `item` prop to the `GlDisclosureDropdownItem` component', () => {
- expect(findItemsData()).toStrictEqual(MOCK_DEFAULT_SEARCH_OPTIONS);
- });
+ it('does not render the components', () => {
+ expect(findPlaces().exists()).toBe(false);
+ expect(findIssuables().exists()).toBe(false);
});
- describe.each`
- group | project | groupHeader
- ${null} | ${null} | ${'All GitLab'}
- ${{ name: 'Test Group' }} | ${null} | ${'Test Group'}
- ${{ name: 'Test Group' }} | ${{ name: 'Test Project' }} | ${'Test Project'}
- `('Group Header', ({ group, project, groupHeader }) => {
- describe(`when group is ${group?.name} and project is ${project?.name}`, () => {
- beforeEach(() => {
- createComponent({
- searchContext: {
- group,
- project,
- },
- });
- });
+ it('sets the expected props on first component', () => {
+ const projects = findProjects();
+ expect(receivedAttrs(projects)).toEqual({});
+ expect(projects.classes()).toEqual([]);
+ });
- it(`should render as ${groupHeader}`, () => {
- expect(wrapper.text()).toContain(groupHeader);
- });
- });
+ it('sets the expected props on the second component', () => {
+ const groups = findGroups();
+ expect(receivedAttrs(groups)).toEqual({ bordered: true });
+ expect(groups.classes()).toEqual(['gl-mt-3']);
});
});
});
diff --git a/spec/frontend/super_sidebar/components/global_search/components/global_search_default_places_spec.js b/spec/frontend/super_sidebar/components/global_search/components/global_search_default_places_spec.js
new file mode 100644
index 00000000000..c6126a348f5
--- /dev/null
+++ b/spec/frontend/super_sidebar/components/global_search/components/global_search_default_places_spec.js
@@ -0,0 +1,78 @@
+import { GlDisclosureDropdownGroup, GlDisclosureDropdownItem } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import GlobalSearchDefaultPlaces from '~/super_sidebar/components/global_search/components/global_search_default_places.vue';
+import { contextSwitcherLinks } from '../../../mock_data';
+
+describe('GlobalSearchDefaultPlaces', () => {
+ let wrapper;
+
+ const createComponent = ({ links = [], attrs } = {}) => {
+ wrapper = shallowMount(GlobalSearchDefaultPlaces, {
+ provide: {
+ contextSwitcherLinks: links,
+ },
+ attrs,
+ stubs: {
+ GlDisclosureDropdownGroup,
+ },
+ });
+ };
+
+ const findGroup = () => wrapper.findComponent(GlDisclosureDropdownGroup);
+ const findItems = () => wrapper.findAllComponents(GlDisclosureDropdownItem);
+
+ describe('given no contextSwitcherLinks', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('renders nothing', () => {
+ expect(wrapper.html()).toBe('');
+ });
+
+ it('emits a nothing-to-render event', () => {
+ expect(wrapper.emitted('nothing-to-render')).toEqual([[]]);
+ });
+ });
+
+ describe('given some contextSwitcherLinks', () => {
+ beforeEach(() => {
+ createComponent({
+ links: contextSwitcherLinks,
+ attrs: {
+ bordered: true,
+ class: 'test-class',
+ },
+ });
+ });
+
+ it('renders a disclosure dropdown group', () => {
+ expect(findGroup().exists()).toBe(true);
+ });
+
+ it('renders the expected header', () => {
+ expect(wrapper.text()).toContain('Places');
+ });
+
+ it('passes attrs down', () => {
+ const group = findGroup();
+ expect(group.props('bordered')).toBe(true);
+ expect(group.classes()).toContain('test-class');
+ });
+
+ it('renders the links', () => {
+ const itemProps = findItems().wrappers.map((item) => item.props('item'));
+
+ expect(itemProps).toEqual([
+ {
+ text: 'Explore',
+ href: '/explore',
+ },
+ {
+ text: 'Admin area',
+ href: '/admin',
+ },
+ ]);
+ });
+ });
+});
diff --git a/spec/frontend/super_sidebar/components/global_search/components/global_search_scoped_items_spec.js b/spec/frontend/super_sidebar/components/global_search/components/global_search_scoped_items_spec.js
index 4976f3be4cd..164eea991e5 100644
--- a/spec/frontend/super_sidebar/components/global_search/components/global_search_scoped_items_spec.js
+++ b/spec/frontend/super_sidebar/components/global_search/components/global_search_scoped_items_spec.js
@@ -1,6 +1,7 @@
import { GlDisclosureDropdownGroup, GlDisclosureDropdownItem, GlToken, GlIcon } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import Vue from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import { trimText } from 'helpers/text_helper';
import GlobalSearchScopedItems from '~/super_sidebar/components/global_search/components/global_search_scoped_items.vue';
diff --git a/spec/frontend/super_sidebar/components/global_search/components/global_search_spec.js b/spec/frontend/super_sidebar/components/global_search/components/global_search_spec.js
index 55108e116bd..f9a6690a391 100644
--- a/spec/frontend/super_sidebar/components/global_search/components/global_search_spec.js
+++ b/spec/frontend/super_sidebar/components/global_search/components/global_search_spec.js
@@ -1,5 +1,6 @@
import { GlModal, GlSearchBoxByType, GlToken, GlIcon } from '@gitlab/ui';
import Vue from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import { s__, sprintf } from '~/locale';
@@ -64,12 +65,13 @@ describe('GlobalSearchModal', () => {
scopedSearchOptions: () => MOCK_SCOPED_SEARCH_OPTIONS,
};
- const createComponent = (
+ const createComponent = ({
initialState = deafaultMockState,
mockGetters = defaultMockGetters,
stubs,
glFeatures = { commandPalette: false },
- ) => {
+ ...mountOptions
+ } = {}) => {
const store = new Vuex.Store({
state: {
...deafaultMockState,
@@ -88,6 +90,7 @@ describe('GlobalSearchModal', () => {
store,
stubs,
provide: { glFeatures },
+ ...mountOptions,
});
};
@@ -148,7 +151,7 @@ describe('GlobalSearchModal', () => {
describe(`when search is ${search}`, () => {
beforeEach(() => {
window.gon.current_username = MOCK_USERNAME;
- createComponent({ search }, {});
+ createComponent({ initialState: { search }, mockGetters: {} });
findGlobalSearchInput().vm.$emit('click');
});
@@ -180,15 +183,15 @@ describe('GlobalSearchModal', () => {
describe(`search is "${search}" and loading is ${loading}`, () => {
beforeEach(() => {
window.gon.current_username = username;
- createComponent(
- {
+ createComponent({
+ initialState: {
search,
loading,
},
- {
+ mockGetters: {
searchOptions: () => searchOptions,
},
- );
+ });
});
it(`sets description to ${expectedDesc}`, () => {
@@ -208,7 +211,7 @@ describe('GlobalSearchModal', () => {
`('token', ({ search, hasToken }) => {
beforeEach(() => {
window.gon.current_username = MOCK_USERNAME;
- createComponent({ search });
+ createComponent({ initialState: { search } });
findGlobalSearchInput().vm.$emit('click');
});
@@ -220,12 +223,12 @@ describe('GlobalSearchModal', () => {
describe.each(MOCK_SCOPED_SEARCH_OPTIONS)('token content', (searchOption) => {
beforeEach(() => {
window.gon.current_username = MOCK_USERNAME;
- createComponent(
- { search: MOCK_SEARCH },
- {
+ createComponent({
+ initialState: { search: MOCK_SEARCH },
+ mockGetters: {
searchOptions: () => [searchOption],
},
- );
+ });
findGlobalSearchInput().vm.$emit('click');
});
@@ -247,12 +250,12 @@ describe('GlobalSearchModal', () => {
`('token', ({ searchOptions, iconName }) => {
beforeEach(() => {
window.gon.current_username = MOCK_USERNAME;
- createComponent(
- { search: MOCK_SEARCH },
- {
+ createComponent({
+ initialState: { search: MOCK_SEARCH },
+ mockGetters: {
searchOptions: () => searchOptions,
},
- );
+ });
findGlobalSearchInput().vm.$emit('click');
});
@@ -287,8 +290,11 @@ describe('GlobalSearchModal', () => {
'when FF `command_palette` is enabled and search handle is %s',
(handle) => {
beforeEach(() => {
- createComponent({ search: handle }, undefined, undefined, {
- commandPalette: true,
+ createComponent({
+ initialState: { search: handle },
+ glFeatures: {
+ commandPalette: true,
+ },
});
});
@@ -358,12 +364,18 @@ describe('GlobalSearchModal', () => {
describe('Submitting a search', () => {
const submitSearch = () =>
- findGlobalSearchInput().vm.$emit('keydown', new KeyboardEvent({ key: ENTER_KEY }));
+ findGlobalSearchInput().vm.$emit(
+ 'keydown',
+ new KeyboardEvent('keydown', { key: ENTER_KEY }),
+ );
describe('in command mode', () => {
beforeEach(() => {
- createComponent({ search: '>' }, undefined, undefined, {
- commandPalette: true,
+ createComponent({
+ initialState: { search: '>' },
+ glFeatures: {
+ commandPalette: true,
+ },
});
submitSearch();
});
@@ -375,7 +387,7 @@ describe('GlobalSearchModal', () => {
describe('in search mode', () => {
it('will NOT submit a search with less than min characters', () => {
- createComponent({ search: 'x' });
+ createComponent({ initialState: { search: 'x' } });
submitSearch();
expect(visitUrl).not.toHaveBeenCalledWith(MOCK_SEARCH_QUERY);
});
@@ -391,7 +403,7 @@ describe('GlobalSearchModal', () => {
describe('Modal events', () => {
beforeEach(() => {
- createComponent({ search: 'searchQuery' });
+ createComponent({ initialState: { search: 'searchQuery' } });
});
it('should emit `shown` event when modal shown`', () => {
@@ -406,4 +418,101 @@ describe('GlobalSearchModal', () => {
});
});
});
+
+ describe('Navigating results', () => {
+ const findSearchInput = () => wrapper.findByRole('searchbox');
+ const triggerKeydownEvent = (target, code) => {
+ const event = new KeyboardEvent('keydown', { bubbles: true, cancelable: true, code });
+ target.dispatchEvent(event);
+ return event;
+ };
+
+ beforeEach(() => {
+ createComponent({
+ stubs: {
+ GlSearchBoxByType: {
+ inheritAttrs: false,
+ template: '<div><input v-bind="$attrs" v-on="$listeners"></div>',
+ },
+ GlobalSearchDefaultItems: {
+ template: `
+ <ul>
+ <li
+ v-for="n in 5"
+ class="gl-new-dropdown-item"
+ tabindex="0"
+ :data-testid="'test-result-' + n"
+ >Result {{ n }}</li>
+ </ul>`,
+ },
+ },
+ attachTo: document.body,
+ });
+ });
+
+ describe('when the search input has focus', () => {
+ beforeEach(() => {
+ findSearchInput().element.focus();
+ });
+
+ it('Home key keeps focus in input', () => {
+ const event = triggerKeydownEvent(findSearchInput().element, 'Home');
+ expect(document.activeElement).toBe(findSearchInput().element);
+ expect(event.defaultPrevented).toBe(false);
+ });
+
+ it('End key keeps focus on input', () => {
+ const event = triggerKeydownEvent(findSearchInput().element, 'End');
+ findSearchInput().trigger('keydown', { code: 'End' });
+ expect(document.activeElement).toBe(findSearchInput().element);
+ expect(event.defaultPrevented).toBe(false);
+ });
+
+ it('ArrowUp keeps focus on input', () => {
+ const event = triggerKeydownEvent(findSearchInput().element, 'ArrowUp');
+ expect(document.activeElement).toBe(findSearchInput().element);
+ expect(event.defaultPrevented).toBe(false);
+ });
+
+ it('ArrowDown focuses the first item', () => {
+ const event = triggerKeydownEvent(findSearchInput().element, 'ArrowDown');
+ expect(document.activeElement).toBe(wrapper.findByTestId('test-result-1').element);
+ expect(event.defaultPrevented).toBe(true);
+ });
+ });
+
+ describe('when search result item has focus', () => {
+ beforeEach(() => {
+ wrapper.findByTestId('test-result-2').element.focus();
+ });
+
+ it('Home key focuses first item', () => {
+ const event = triggerKeydownEvent(document.activeElement, 'Home');
+ expect(document.activeElement).toBe(wrapper.findByTestId('test-result-1').element);
+ expect(event.defaultPrevented).toBe(true);
+ });
+
+ it('End key focuses last item', () => {
+ const event = triggerKeydownEvent(document.activeElement, 'End');
+ expect(document.activeElement).toBe(wrapper.findByTestId('test-result-5').element);
+ expect(event.defaultPrevented).toBe(true);
+ });
+
+ it('ArrowUp focuses previous item if any, else input', () => {
+ let event = triggerKeydownEvent(document.activeElement, 'ArrowUp');
+ expect(document.activeElement).toBe(wrapper.findByTestId('test-result-1').element);
+ expect(event.defaultPrevented).toBe(true);
+
+ event = triggerKeydownEvent(document.activeElement, 'ArrowUp');
+ expect(document.activeElement).toBe(findSearchInput().element);
+ expect(event.defaultPrevented).toBe(true);
+ });
+
+ it('ArrowDown focuses next item', () => {
+ const event = triggerKeydownEvent(document.activeElement, 'ArrowDown');
+ expect(document.activeElement).toBe(wrapper.findByTestId('test-result-3').element);
+ expect(event.defaultPrevented).toBe(true);
+ });
+ });
+ });
});
diff --git a/spec/frontend/super_sidebar/components/global_search/mock_data.js b/spec/frontend/super_sidebar/components/global_search/mock_data.js
index ad7e7b0b30b..dfa8b458844 100644
--- a/spec/frontend/super_sidebar/components/global_search/mock_data.js
+++ b/spec/frontend/super_sidebar/components/global_search/mock_data.js
@@ -62,6 +62,24 @@ export const MOCK_SEARCH_CONTEXT = {
group_metadata: {},
};
+export const MOCK_GROUP_SEARCH_CONTEXT = {
+ ...MOCK_SEARCH_CONTEXT,
+ group: MOCK_GROUP,
+ group_metadata: {
+ issues_path: `${MOCK_GROUP.path}/issues`,
+ mr_path: `${MOCK_GROUP.path}/merge_requests`,
+ },
+};
+
+export const MOCK_PROJECT_SEARCH_CONTEXT = {
+ ...MOCK_GROUP_SEARCH_CONTEXT,
+ project: MOCK_PROJECT,
+ project_metadata: {
+ issues_path: `${MOCK_PROJECT.path}/issues`,
+ mr_path: `${MOCK_PROJECT.path}/merge_requests`,
+ },
+};
+
export const MOCK_DEFAULT_SEARCH_OPTIONS = [
{
text: MSG_ISSUES_ASSIGNED_TO_ME,
diff --git a/spec/frontend/super_sidebar/components/global_search/store/getters_spec.js b/spec/frontend/super_sidebar/components/global_search/store/getters_spec.js
index 68583d04b31..de636d1feec 100644
--- a/spec/frontend/super_sidebar/components/global_search/store/getters_spec.js
+++ b/spec/frontend/super_sidebar/components/global_search/store/getters_spec.js
@@ -7,6 +7,8 @@ import {
MOCK_MR_PATH,
MOCK_AUTOCOMPLETE_PATH,
MOCK_SEARCH_CONTEXT,
+ MOCK_GROUP_SEARCH_CONTEXT,
+ MOCK_PROJECT_SEARCH_CONTEXT,
MOCK_DEFAULT_SEARCH_OPTIONS,
MOCK_SCOPED_SEARCH_OPTIONS,
MOCK_SCOPED_SEARCH_GROUP,
@@ -74,37 +76,47 @@ describe('Global Search Store Getters', () => {
});
describe.each`
- group | group_metadata | project | project_metadata | expectedPath
- ${null} | ${null} | ${null} | ${null} | ${MOCK_ISSUE_PATH}
- ${{ name: 'Test Group' }} | ${{ issues_path: 'group/path' }} | ${null} | ${null} | ${'group/path'}
- ${{ name: 'Test Group' }} | ${{ issues_path: 'group/path' }} | ${{ name: 'Test Project' }} | ${{ issues_path: 'project/path' }} | ${'project/path'}
- `('scopedIssuesPath', ({ group, group_metadata, project, project_metadata, expectedPath }) => {
- describe(`when group is ${group?.name} and project is ${project?.name}`, () => {
- beforeEach(() => {
- createState({
- searchContext: {
- group,
- group_metadata,
- project,
- project_metadata,
- },
+ group | group_metadata | project | project_metadata | user | expectedPath
+ ${null} | ${null} | ${null} | ${null} | ${'a_user'} | ${MOCK_ISSUE_PATH}
+ ${null} | ${null} | ${null} | ${null} | ${null} | ${false}
+ ${{ name: 'Test Group' }} | ${{ issues_path: 'group/path' }} | ${null} | ${null} | ${null} | ${'group/path'}
+ ${{ name: 'Test Group' }} | ${{ issues_path: 'group/path' }} | ${{ id: '123' }} | ${{ issues_path: 'project/path' }} | ${null} | ${'project/path'}
+ ${{ name: 'Test Group' }} | ${{ issues_path: 'group/path' }} | ${{ id: '123' }} | ${{}} | ${null} | ${false}
+ `(
+ 'scopedIssuesPath',
+ ({ group, group_metadata, project, project_metadata, user, expectedPath }) => {
+ describe(`when group is ${group?.name} and project is ${project?.name}`, () => {
+ beforeEach(() => {
+ window.gon.current_username = user;
+
+ createState({
+ searchContext: {
+ group,
+ group_metadata,
+ project,
+ project_metadata,
+ },
+ });
});
- });
- it(`should return ${expectedPath}`, () => {
- expect(getters.scopedIssuesPath(state)).toBe(expectedPath);
+ it(`should return ${expectedPath}`, () => {
+ expect(getters.scopedIssuesPath(state)).toBe(expectedPath);
+ });
});
- });
- });
+ },
+ );
describe.each`
- group | group_metadata | project | project_metadata | expectedPath
- ${null} | ${null} | ${null} | ${null} | ${MOCK_MR_PATH}
- ${{ name: 'Test Group' }} | ${{ mr_path: 'group/path' }} | ${null} | ${null} | ${'group/path'}
- ${{ name: 'Test Group' }} | ${{ mr_path: 'group/path' }} | ${{ name: 'Test Project' }} | ${{ mr_path: 'project/path' }} | ${'project/path'}
- `('scopedMRPath', ({ group, group_metadata, project, project_metadata, expectedPath }) => {
+ group | group_metadata | project | project_metadata | user | expectedPath
+ ${null} | ${null} | ${null} | ${null} | ${'a_user'} | ${MOCK_MR_PATH}
+ ${null} | ${null} | ${null} | ${null} | ${null} | ${false}
+ ${{ name: 'Test Group' }} | ${{ mr_path: 'group/path' }} | ${null} | ${null} | ${null} | ${'group/path'}
+ ${{ name: 'Test Group' }} | ${{ mr_path: 'group/path' }} | ${{ name: 'Test Project' }} | ${{ mr_path: 'project/path' }} | ${null} | ${'project/path'}
+ `('scopedMRPath', ({ group, group_metadata, project, project_metadata, user, expectedPath }) => {
describe(`when group is ${group?.name} and project is ${project?.name}`, () => {
beforeEach(() => {
+ window.gon.current_username = user;
+
createState({
searchContext: {
group,
@@ -227,27 +239,88 @@ describe('Global Search Store Getters', () => {
});
describe('defaultSearchOptions', () => {
- const mockGetters = {
- scopedIssuesPath: MOCK_ISSUE_PATH,
- scopedMRPath: MOCK_MR_PATH,
- };
+ let mockGetters;
beforeEach(() => {
createState();
- window.gon.current_username = MOCK_USERNAME;
+ mockGetters = {
+ scopedIssuesPath: MOCK_ISSUE_PATH,
+ scopedMRPath: MOCK_MR_PATH,
+ };
});
- it('returns the correct array', () => {
- expect(getters.defaultSearchOptions(state, mockGetters)).toStrictEqual(
- MOCK_DEFAULT_SEARCH_OPTIONS,
- );
+ describe('with a user', () => {
+ beforeEach(() => {
+ window.gon.current_username = MOCK_USERNAME;
+ });
+
+ it('returns the correct array', () => {
+ expect(getters.defaultSearchOptions(state, mockGetters)).toStrictEqual(
+ MOCK_DEFAULT_SEARCH_OPTIONS,
+ );
+ });
+
+ it('returns the correct array if issues path is false', () => {
+ mockGetters.scopedIssuesPath = undefined;
+ expect(getters.defaultSearchOptions(state, mockGetters)).toStrictEqual(
+ MOCK_DEFAULT_SEARCH_OPTIONS.slice(2, MOCK_DEFAULT_SEARCH_OPTIONS.length),
+ );
+ });
});
- it('returns the correct array if issues path is false', () => {
- mockGetters.scopedIssuesPath = undefined;
- expect(getters.defaultSearchOptions(state, mockGetters)).toStrictEqual(
- MOCK_DEFAULT_SEARCH_OPTIONS.slice(2, MOCK_DEFAULT_SEARCH_OPTIONS.length),
- );
+ describe('without a user', () => {
+ describe('with no project or group context', () => {
+ beforeEach(() => {
+ mockGetters = {
+ scopedIssuesPath: false,
+ scopedMRPath: false,
+ };
+ });
+
+ it('returns an empty array', () => {
+ expect(getters.defaultSearchOptions(state, mockGetters)).toEqual([]);
+ });
+ });
+
+ describe('with a group context', () => {
+ beforeEach(() => {
+ createState({
+ searchContext: MOCK_GROUP_SEARCH_CONTEXT,
+ });
+
+ mockGetters = {
+ scopedIssuesPath: state.searchContext.group_metadata.issues_path,
+ scopedMRPath: state.searchContext.group_metadata.mr_path,
+ };
+ });
+
+ it('returns recent issues/merge requests options', () => {
+ expect(getters.defaultSearchOptions(state, mockGetters)).toEqual([
+ { href: '/mock-group/issues', text: 'Recent issues' },
+ { href: '/mock-group/merge_requests', text: 'Recent merge requests' },
+ ]);
+ });
+ });
+
+ describe('with a project context', () => {
+ beforeEach(() => {
+ createState({
+ searchContext: MOCK_PROJECT_SEARCH_CONTEXT,
+ });
+
+ mockGetters = {
+ scopedIssuesPath: state.searchContext.project_metadata.issues_path,
+ scopedMRPath: state.searchContext.project_metadata.mr_path,
+ };
+ });
+
+ it('returns recent issues/merge requests options', () => {
+ expect(getters.defaultSearchOptions(state, mockGetters)).toEqual([
+ { href: '/mock-project/issues', text: 'Recent issues' },
+ { href: '/mock-project/merge_requests', text: 'Recent merge requests' },
+ ]);
+ });
+ });
});
});
diff --git a/spec/frontend/super_sidebar/components/menu_section_spec.js b/spec/frontend/super_sidebar/components/menu_section_spec.js
index 556e07a2e31..288e317d4c6 100644
--- a/spec/frontend/super_sidebar/components/menu_section_spec.js
+++ b/spec/frontend/super_sidebar/components/menu_section_spec.js
@@ -2,6 +2,7 @@ import { GlCollapse } from '@gitlab/ui';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import MenuSection from '~/super_sidebar/components/menu_section.vue';
import NavItem from '~/super_sidebar/components/nav_item.vue';
+import FlyoutMenu from '~/super_sidebar/components/flyout_menu.vue';
import { stubComponent } from 'helpers/stub_component';
describe('MenuSection component', () => {
@@ -9,10 +10,11 @@ describe('MenuSection component', () => {
const findButton = () => wrapper.find('button');
const findCollapse = () => wrapper.getComponent(GlCollapse);
+ const findFlyout = () => wrapper.findComponent(FlyoutMenu);
const findNavItems = () => wrapper.findAllComponents(NavItem);
const createWrapper = (item, otherProps) => {
wrapper = shallowMountExtended(MenuSection, {
- propsData: { item, ...otherProps },
+ propsData: { item: { items: [], ...item }, ...otherProps },
stubs: {
GlCollapse: stubComponent(GlCollapse, {
props: ['visible'],
@@ -68,6 +70,59 @@ describe('MenuSection component', () => {
});
});
+ describe('flyout behavior', () => {
+ describe('when hasFlyout is false', () => {
+ it('is not rendered', () => {
+ createWrapper({ title: 'Asdf' }, { 'has-flyout': false });
+ expect(findFlyout().exists()).toBe(false);
+ });
+ });
+
+ describe('when hasFlyout is true', () => {
+ it('is rendered', () => {
+ createWrapper({ title: 'Asdf' }, { 'has-flyout': true });
+ expect(findFlyout().exists()).toBe(true);
+ });
+
+ describe('on mouse hover', () => {
+ describe('when section is expanded', () => {
+ it('is not shown', async () => {
+ createWrapper({ title: 'Asdf' }, { 'has-flyout': true, expanded: true });
+ await findButton().trigger('pointerover', { pointerType: 'mouse' });
+ expect(findFlyout().isVisible()).toBe(false);
+ });
+ });
+
+ describe('when section is not expanded', () => {
+ it('is shown', async () => {
+ createWrapper({ title: 'Asdf' }, { 'has-flyout': true, expanded: false });
+ await findButton().trigger('pointerover', { pointerType: 'mouse' });
+ expect(findFlyout().isVisible()).toBe(true);
+ });
+ });
+ });
+
+ describe('when section gets closed', () => {
+ beforeEach(async () => {
+ createWrapper({ title: 'Asdf' }, { expanded: true, 'has-flyout': true });
+ await findButton().trigger('click');
+ await findButton().trigger('pointerover', { pointerType: 'mouse' });
+ });
+
+ it('shows the flyout only after section title gets hovered out and in again', async () => {
+ expect(findCollapse().props('visible')).toBe(false);
+ expect(findFlyout().isVisible()).toBe(false);
+
+ await findButton().trigger('pointerleave');
+ await findButton().trigger('pointerover', { pointerType: 'mouse' });
+
+ expect(findCollapse().props('visible')).toBe(false);
+ expect(findFlyout().isVisible()).toBe(true);
+ });
+ });
+ });
+ });
+
describe('`separated` prop', () => {
describe('by default (false)', () => {
it('does not render a separator', () => {
diff --git a/spec/frontend/super_sidebar/components/nav_item_spec.js b/spec/frontend/super_sidebar/components/nav_item_spec.js
index 54ac4965ad8..f41f6954ed1 100644
--- a/spec/frontend/super_sidebar/components/nav_item_spec.js
+++ b/spec/frontend/super_sidebar/components/nav_item_spec.js
@@ -113,7 +113,7 @@ describe('NavItem component', () => {
createWrapper({ item: { title: 'Foo', to: { name: 'foo' } } });
expect(findNavItemRouterLink().findByTestId('active-indicator').classes()).toContain(
- 'gl-bg-transparent',
+ 'gl-opacity-0',
);
});
});
@@ -126,7 +126,7 @@ describe('NavItem component', () => {
});
expect(findNavItemRouterLink().findByTestId('active-indicator').classes()).toContain(
- 'gl-bg-blue-500',
+ 'gl-opacity-10',
);
});
});
@@ -138,7 +138,7 @@ describe('NavItem component', () => {
createWrapper({ item: { title: 'Foo', link: '/foo', is_active: false } });
expect(findNavItemLink().findByTestId('active-indicator').classes()).toContain(
- 'gl-bg-transparent',
+ 'gl-opacity-0',
);
});
});
@@ -148,7 +148,7 @@ describe('NavItem component', () => {
createWrapper({ item: { title: 'Foo', link: '/foo', is_active: true } });
expect(findNavItemLink().findByTestId('active-indicator').classes()).toContain(
- 'gl-bg-blue-500',
+ 'gl-opacity-10',
);
});
});
diff --git a/spec/frontend/super_sidebar/components/pinned_section_spec.js b/spec/frontend/super_sidebar/components/pinned_section_spec.js
index fd6e2b7343e..00cc7cf29c9 100644
--- a/spec/frontend/super_sidebar/components/pinned_section_spec.js
+++ b/spec/frontend/super_sidebar/components/pinned_section_spec.js
@@ -2,10 +2,12 @@ import { nextTick } from 'vue';
import Cookies from '~/lib/utils/cookies';
import { mountExtended } from 'helpers/vue_test_utils_helper';
import PinnedSection from '~/super_sidebar/components/pinned_section.vue';
+import MenuSection from '~/super_sidebar/components/menu_section.vue';
import NavItem from '~/super_sidebar/components/nav_item.vue';
import { SIDEBAR_PINS_EXPANDED_COOKIE, SIDEBAR_COOKIE_EXPIRATION } from '~/super_sidebar/constants';
import { setCookie } from '~/lib/utils/common_utils';
+jest.mock('@floating-ui/dom');
jest.mock('~/lib/utils/common_utils', () => ({
getCookie: jest.requireActual('~/lib/utils/common_utils').getCookie,
setCookie: jest.fn(),
@@ -16,10 +18,11 @@ describe('PinnedSection component', () => {
const findToggle = () => wrapper.find('button');
- const createWrapper = () => {
+ const createWrapper = (props = {}) => {
wrapper = mountExtended(PinnedSection, {
propsData: {
items: [{ title: 'Pin 1', href: '/page1' }],
+ ...props,
},
});
};
@@ -72,4 +75,16 @@ describe('PinnedSection component', () => {
});
});
});
+
+ describe('hasFlyout prop', () => {
+ describe.each([true, false])(`when %s`, (hasFlyout) => {
+ beforeEach(() => {
+ createWrapper({ hasFlyout });
+ });
+
+ it(`passes ${hasFlyout} to the section's hasFlyout prop`, () => {
+ expect(wrapper.findComponent(MenuSection).props('hasFlyout')).toBe(hasFlyout);
+ });
+ });
+ });
});
diff --git a/spec/frontend/super_sidebar/components/sidebar_menu_spec.js b/spec/frontend/super_sidebar/components/sidebar_menu_spec.js
index 21e5220edd9..5d9a35fbf70 100644
--- a/spec/frontend/super_sidebar/components/sidebar_menu_spec.js
+++ b/spec/frontend/super_sidebar/components/sidebar_menu_spec.js
@@ -1,3 +1,4 @@
+import { GlBreakpointInstance } from '@gitlab/ui/dist/utils';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import SidebarMenu from '~/super_sidebar/components/sidebar_menu.vue';
import PinnedSection from '~/super_sidebar/components/pinned_section.vue';
@@ -15,11 +16,16 @@ const menuItems = [
describe('Sidebar Menu', () => {
let wrapper;
+ let flyoutFlag = false;
const createWrapper = (extraProps = {}) => {
wrapper = shallowMountExtended(SidebarMenu, {
+ provide: {
+ glFeatures: { superSidebarFlyoutMenus: flyoutFlag },
+ },
propsData: {
items: sidebarData.current_menu_items,
+ isLoggedIn: sidebarData.is_logged_in,
pinnedItemIds: sidebarData.pinned_items,
panelType: sidebarData.panel_type,
updatePinsUrl: sidebarData.update_pins_url,
@@ -117,6 +123,65 @@ describe('Sidebar Menu', () => {
);
});
});
+
+ describe('flyout menus', () => {
+ describe('when feature is disabled', () => {
+ beforeEach(() => {
+ createWrapper({
+ items: menuItems,
+ });
+ });
+
+ it('does not add flyout menus to sections', () => {
+ expect(findNonStaticSectionItems().wrappers.map((w) => w.props('hasFlyout'))).toEqual([
+ false,
+ false,
+ ]);
+ });
+ });
+
+ describe('when feature is enabled', () => {
+ beforeEach(() => {
+ flyoutFlag = true;
+ });
+
+ describe('when screen width is smaller than "md" breakpoint', () => {
+ beforeEach(() => {
+ jest.spyOn(GlBreakpointInstance, 'windowWidth').mockImplementation(() => {
+ return 767;
+ });
+ createWrapper({
+ items: menuItems,
+ });
+ });
+
+ it('does not add flyout menus to sections', () => {
+ expect(findNonStaticSectionItems().wrappers.map((w) => w.props('hasFlyout'))).toEqual([
+ false,
+ false,
+ ]);
+ });
+ });
+
+ describe('when screen width is equal or larger than "md" breakpoint', () => {
+ beforeEach(() => {
+ jest.spyOn(GlBreakpointInstance, 'windowWidth').mockImplementation(() => {
+ return 768;
+ });
+ createWrapper({
+ items: menuItems,
+ });
+ });
+
+ it('adds flyout menus to sections', () => {
+ expect(findNonStaticSectionItems().wrappers.map((w) => w.props('hasFlyout'))).toEqual([
+ true,
+ true,
+ ]);
+ });
+ });
+ });
+ });
});
describe('Separators', () => {
diff --git a/spec/frontend/super_sidebar/components/sidebar_peek_behavior_spec.js b/spec/frontend/super_sidebar/components/sidebar_peek_behavior_spec.js
index abd9c1dc44d..94ef072a951 100644
--- a/spec/frontend/super_sidebar/components/sidebar_peek_behavior_spec.js
+++ b/spec/frontend/super_sidebar/components/sidebar_peek_behavior_spec.js
@@ -35,8 +35,10 @@ describe('SidebarPeek component', () => {
let wrapper;
let trackingSpy = null;
- const createComponent = () => {
- wrapper = mount(SidebarPeek);
+ const createComponent = (props = { isMouseOverSidebar: false }) => {
+ wrapper = mount(SidebarPeek, {
+ propsData: props,
+ });
};
const moveMouse = (clientX) => {
@@ -163,6 +165,17 @@ describe('SidebarPeek component', () => {
expect(lastNChangeEvents(2)).toEqual([STATE_OPEN, STATE_CLOSED]);
});
+ it('does not transition to will-close or closed when mouse is over sidebar child element', () => {
+ createComponent({ isMouseOverSidebar: true });
+ moveMouse(0);
+ jest.runOnlyPendingTimers();
+
+ moveMouse(X_SIDEBAR_EDGE);
+ moveMouse(X_AWAY_FROM_SIDEBAR);
+
+ expect(lastNChangeEvents(1)).toEqual([STATE_OPEN]);
+ });
+
it('immediately transitions will-close -> closed if mouse moves far away', () => {
moveMouse(0);
jest.runOnlyPendingTimers();
diff --git a/spec/frontend/super_sidebar/components/super_sidebar_spec.js b/spec/frontend/super_sidebar/components/super_sidebar_spec.js
index 0c785109b5e..7b7b8a7be13 100644
--- a/spec/frontend/super_sidebar/components/super_sidebar_spec.js
+++ b/spec/frontend/super_sidebar/components/super_sidebar_spec.js
@@ -11,6 +11,7 @@ import SidebarPeekBehavior, {
STATE_WILL_CLOSE,
} from '~/super_sidebar/components/sidebar_peek_behavior.vue';
import SidebarPortalTarget from '~/super_sidebar/components/sidebar_portal_target.vue';
+import ContextHeader from '~/super_sidebar/components/context_header.vue';
import ContextSwitcher from '~/super_sidebar/components/context_switcher.vue';
import SidebarMenu from '~/super_sidebar/components/sidebar_menu.vue';
import { sidebarState } from '~/super_sidebar/constants';
@@ -20,7 +21,7 @@ import {
} from '~/super_sidebar/super_sidebar_collapsed_state_manager';
import { stubComponent } from 'helpers/stub_component';
import { mockTracking, unmockTracking } from 'helpers/tracking_helper';
-import { sidebarData as mockSidebarData } from '../mock_data';
+import { sidebarData as mockSidebarData, loggedOutSidebarData } from '../mock_data';
const initialSidebarState = { ...sidebarState };
@@ -42,6 +43,7 @@ describe('SuperSidebar component', () => {
const findSidebar = () => wrapper.findByTestId('super-sidebar');
const findUserBar = () => wrapper.findComponent(UserBar);
+ const findContextHeader = () => wrapper.findComponent(ContextHeader);
const findContextSwitcher = () => wrapper.findComponent(ContextSwitcher);
const findNavContainer = () => wrapper.findByTestId('nav-container');
const findHelpCenter = () => wrapper.findComponent(HelpCenter);
@@ -230,6 +232,15 @@ describe('SuperSidebar component', () => {
expect(findSidebar().classes()).not.toContain(peekHintClass);
},
);
+
+ it('keeps track of if sidebar has mouseover or not', async () => {
+ createWrapper({ sidebarState: { isCollapsed: false, isPeekable: true } });
+ expect(findPeekBehavior().props('isMouseOverSidebar')).toBe(false);
+ await findSidebar().trigger('mouseenter');
+ expect(findPeekBehavior().props('isMouseOverSidebar')).toBe(true);
+ await findSidebar().trigger('mouseleave');
+ expect(findPeekBehavior().props('isMouseOverSidebar')).toBe(false);
+ });
});
describe('nav container', () => {
@@ -259,4 +270,15 @@ describe('SuperSidebar component', () => {
expect(findTrialStatusPopover().exists()).toBe(true);
});
});
+
+ describe('Logged out', () => {
+ beforeEach(() => {
+ createWrapper({ sidebarData: loggedOutSidebarData });
+ });
+
+ it('renders context header instead of context switcher', () => {
+ expect(findContextHeader().exists()).toBe(true);
+ expect(findContextSwitcher().exists()).toBe(false);
+ });
+ });
});
diff --git a/spec/frontend/super_sidebar/components/user_bar_spec.js b/spec/frontend/super_sidebar/components/user_bar_spec.js
index 272e0237219..c6dd8441094 100644
--- a/spec/frontend/super_sidebar/components/user_bar_spec.js
+++ b/spec/frontend/super_sidebar/components/user_bar_spec.js
@@ -1,9 +1,11 @@
import { GlBadge } from '@gitlab/ui';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import Vue, { nextTick } from 'vue';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import { __ } from '~/locale';
import CreateMenu from '~/super_sidebar/components/create_menu.vue';
+import UserMenu from '~/super_sidebar/components/user_menu.vue';
import SearchModal from '~/super_sidebar/components/global_search/components/global_search.vue';
import BrandLogo from 'jh_else_ce/super_sidebar/components/brand_logo.vue';
import MergeRequestMenu from '~/super_sidebar/components/merge_request_menu.vue';
@@ -11,13 +13,14 @@ import UserBar from '~/super_sidebar/components/user_bar.vue';
import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
import waitForPromises from 'helpers/wait_for_promises';
import { userCounts } from '~/super_sidebar/user_counts_manager';
-import { sidebarData } from '../mock_data';
+import { sidebarData as mockSidebarData, loggedOutSidebarData } from '../mock_data';
import { MOCK_DEFAULT_SEARCH_OPTIONS } from './global_search/mock_data';
describe('UserBar component', () => {
let wrapper;
const findCreateMenu = () => wrapper.findComponent(CreateMenu);
+ const findUserMenu = () => wrapper.findComponent(UserMenu);
const findIssuesCounter = () => wrapper.findByTestId('issues-shortcut-button');
const findMRsCounter = () => wrapper.findByTestId('merge-requests-shortcut-button');
const findTodosCounter = () => wrapper.findByTestId('todos-shortcut-button');
@@ -37,13 +40,13 @@ describe('UserBar component', () => {
});
const createWrapper = ({
hasCollapseButton = true,
- extraSidebarData = {},
+ sidebarData = mockSidebarData,
provideOverrides = {},
} = {}) => {
wrapper = shallowMountExtended(UserBar, {
propsData: {
hasCollapseButton,
- sidebarData: { ...sidebarData, ...extraSidebarData },
+ sidebarData,
},
provide: {
toggleNewNavEndpoint: '/-/profile/preferences',
@@ -63,17 +66,17 @@ describe('UserBar component', () => {
});
it('passes the "Create new..." menu groups to the create-menu component', () => {
- expect(findCreateMenu().props('groups')).toBe(sidebarData.create_new_menu_groups);
+ expect(findCreateMenu().props('groups')).toBe(mockSidebarData.create_new_menu_groups);
});
it('passes the "Merge request" menu groups to the merge_request_menu component', () => {
- expect(findMergeRequestMenu().props('items')).toBe(sidebarData.merge_request_menu);
+ expect(findMergeRequestMenu().props('items')).toBe(mockSidebarData.merge_request_menu);
});
it('renders issues counter', () => {
const isuesCounter = findIssuesCounter();
expect(isuesCounter.props('count')).toBe(userCounts.assigned_issues);
- expect(isuesCounter.props('href')).toBe(sidebarData.issues_dashboard_path);
+ expect(isuesCounter.props('href')).toBe(mockSidebarData.issues_dashboard_path);
expect(isuesCounter.props('label')).toBe(__('Issues'));
expect(isuesCounter.attributes('data-track-action')).toBe('click_link');
expect(isuesCounter.attributes('data-track-label')).toBe('issues_link');
@@ -95,7 +98,7 @@ describe('UserBar component', () => {
describe('Todos counter', () => {
it('renders it', () => {
const todosCounter = findTodosCounter();
- expect(todosCounter.props('href')).toBe(sidebarData.todos_dashboard_path);
+ expect(todosCounter.props('href')).toBe(mockSidebarData.todos_dashboard_path);
expect(todosCounter.props('label')).toBe(__('To-Do list'));
expect(todosCounter.attributes('data-track-action')).toBe('click_link');
expect(todosCounter.attributes('data-track-label')).toBe('todos_link');
@@ -114,7 +117,7 @@ describe('UserBar component', () => {
it('renders branding logo', () => {
expect(findBrandLogo().exists()).toBe(true);
- expect(findBrandLogo().props('logoUrl')).toBe(sidebarData.logo_url);
+ expect(findBrandLogo().props('logoUrl')).toBe(mockSidebarData.logo_url);
});
it('does not render the "Stop impersonating" button', () => {
@@ -134,16 +137,16 @@ describe('UserBar component', () => {
describe('GitLab Next badge', () => {
describe('when on canary', () => {
it('should render a badge to switch off GitLab Next', () => {
- createWrapper({ extraSidebarData: { gitlab_com_and_canary: true } });
+ createWrapper({ sidebarData: { ...mockSidebarData, gitlab_com_and_canary: true } });
const badge = wrapper.findComponent(GlBadge);
expect(badge.text()).toBe('Next');
- expect(badge.attributes('href')).toBe(sidebarData.canary_toggle_com_url);
+ expect(badge.attributes('href')).toBe(mockSidebarData.canary_toggle_com_url);
});
});
describe('when not on canary', () => {
it('should not render the GitLab Next badge', () => {
- createWrapper({ extraSidebarData: { gitlab_com_and_canary: false } });
+ createWrapper({ sidebarData: { ...mockSidebarData, gitlab_com_and_canary: false } });
const badge = wrapper.findComponent(GlBadge);
expect(badge.exists()).toBe(false);
});
@@ -206,8 +209,36 @@ describe('UserBar component', () => {
it('sets the href and data-method attributes', () => {
const btn = findStopImpersonationButton();
- expect(btn.attributes('href')).toBe(sidebarData.stop_impersonation_path);
+ expect(btn.attributes('href')).toBe(mockSidebarData.stop_impersonation_path);
expect(btn.attributes('data-method')).toBe('delete');
});
});
+
+ describe('Logged out', () => {
+ beforeEach(() => {
+ createWrapper({ sidebarData: loggedOutSidebarData, gitlab_com_and_canary: true });
+ });
+
+ it('does not render brand logo', () => {
+ expect(findBrandLogo().exists()).toBe(false);
+ });
+
+ it('does not render Next badge', () => {
+ expect(wrapper.findComponent(GlBadge).exists()).toBe(false);
+ });
+
+ it('does not render create menu', () => {
+ expect(findCreateMenu().exists()).toBe(false);
+ });
+
+ it('does not render user menu', () => {
+ expect(findUserMenu().exists()).toBe(false);
+ });
+
+ it('does not render counters', () => {
+ expect(findIssuesCounter().exists()).toBe(false);
+ expect(findMRsCounter().exists()).toBe(false);
+ expect(findTodosCounter().exists()).toBe(false);
+ });
+ });
});
diff --git a/spec/frontend/super_sidebar/components/user_name_group_spec.js b/spec/frontend/super_sidebar/components/user_name_group_spec.js
index bd02f3c17e3..a31ad93d143 100644
--- a/spec/frontend/super_sidebar/components/user_name_group_spec.js
+++ b/spec/frontend/super_sidebar/components/user_name_group_spec.js
@@ -97,6 +97,22 @@ describe('UserNameGroup component', () => {
it("sets the tooltip's target to the status container", () => {
expect(findGlTooltip().props('target')?.()).toBe(findUserStatus().element);
});
+
+ describe('Tooltip', () => {
+ it('renders the tooltip when message has some text', () => {
+ createWrapper({
+ status: { ...userMenuMockStatus, customized: true, message_html: 'Has text' },
+ });
+ expect(findGlTooltip().exists()).toBe(true);
+ });
+
+ it('does not render the tooltip when message is empty', () => {
+ createWrapper({
+ status: { ...userMenuMockStatus, customized: true, message_html: '' },
+ });
+ expect(findGlTooltip().exists()).toBe(false);
+ });
+ });
});
});
diff --git a/spec/frontend/super_sidebar/mock_data.js b/spec/frontend/super_sidebar/mock_data.js
index 72c67e34038..6fb9715824f 100644
--- a/spec/frontend/super_sidebar/mock_data.js
+++ b/spec/frontend/super_sidebar/mock_data.js
@@ -71,7 +71,13 @@ export const mergeRequestMenuGroup = [
},
];
+export const contextSwitcherLinks = [
+ { title: 'Explore', link: '/explore', icon: 'compass', link_classes: 'persistent-link-class' },
+ { title: 'Admin area', link: '/admin', icon: 'admin' },
+];
+
export const sidebarData = {
+ is_logged_in: true,
current_menu_items: [],
current_context_header: {
title: 'Your Work',
@@ -103,7 +109,7 @@ export const sidebarData = {
gitlab_version_check: { severity: 'success' },
gitlab_com_and_canary: false,
canary_toggle_com_url: 'https://next.gitlab.com',
- context_switcher_links: [],
+ context_switcher_links: contextSwitcherLinks,
search: {
search_path: '/search',
},
@@ -120,6 +126,26 @@ export const sidebarData = {
],
};
+export const loggedOutSidebarData = {
+ is_logged_in: false,
+ current_menu_items: [],
+ current_context_header: {
+ title: 'Your Work',
+ icon: 'work',
+ },
+ support_path: '/support',
+ display_whats_new: true,
+ whats_new_most_recent_release_items_count: 5,
+ whats_new_version_digest: 1,
+ show_version_check: false,
+ gitlab_version: { major: 16, minor: 0 },
+ gitlab_version_check: { severity: 'success' },
+ search: {
+ search_path: '/search',
+ },
+ panel_type: 'your_work',
+};
+
export const userMenuMockStatus = {
can_update: false,
busy: false,
diff --git a/spec/frontend/super_sidebar/utils_spec.js b/spec/frontend/super_sidebar/utils_spec.js
index 8c8673ddbc4..536599e6c12 100644
--- a/spec/frontend/super_sidebar/utils_spec.js
+++ b/spec/frontend/super_sidebar/utils_spec.js
@@ -1,14 +1,21 @@
+import * as Sentry from '@sentry/browser';
import {
getTopFrequentItems,
trackContextAccess,
formatContextSwitcherItems,
+ getItemsFromLocalStorage,
+ removeItemFromLocalStorage,
ariaCurrent,
} from '~/super_sidebar/utils';
import { useLocalStorageSpy } from 'helpers/local_storage_helper';
import AccessorUtilities from '~/lib/utils/accessor';
import { FREQUENT_ITEMS, FIFTEEN_MINUTES_IN_MS } from '~/frequent_items/constants';
import { unsortedFrequentItems, sortedFrequentItems } from '../frequent_items/mock_data';
-import { searchUserProjectsAndGroupsResponseMock } from './mock_data';
+import { cachedFrequentProjects, searchUserProjectsAndGroupsResponseMock } from './mock_data';
+
+jest.mock('@sentry/browser');
+
+useLocalStorageSpy();
describe('Super sidebar utils spec', () => {
describe('getTopFrequentItems', () => {
@@ -35,8 +42,6 @@ describe('Super sidebar utils spec', () => {
});
describe('trackContextAccess', () => {
- useLocalStorageSpy();
-
const username = 'root';
const context = {
namespace: 'groups',
@@ -65,7 +70,7 @@ describe('Super sidebar utils spec', () => {
);
});
- it('updates existing item if it was persisted to the local storage over 15 minutes ago', () => {
+ it('updates existing item frequency/access time if it was persisted to the local storage over 15 minutes ago', () => {
window.localStorage.setItem(
storageKey,
JSON.stringify([
@@ -90,7 +95,7 @@ describe('Super sidebar utils spec', () => {
);
});
- it('leaves item as is if it was persisted to the local storage under 15 minutes ago', () => {
+ it('leaves item frequency/access time as is if it was persisted to the local storage under 15 minutes ago', () => {
const jsonString = JSON.stringify([
{
id: 1,
@@ -109,6 +114,39 @@ describe('Super sidebar utils spec', () => {
expect(window.localStorage.setItem).toHaveBeenLastCalledWith(storageKey, jsonString);
});
+ it('always updates stored item metadata', () => {
+ window.localStorage.setItem(
+ storageKey,
+ JSON.stringify([
+ {
+ id: 1,
+ frequency: 2,
+ lastAccessedOn: Date.now(),
+ },
+ ]),
+ );
+
+ trackContextAccess(username, {
+ ...context,
+ item: {
+ ...context.item,
+ avatarUrl: '/group.png',
+ },
+ });
+
+ expect(window.localStorage.setItem).toHaveBeenCalledWith(
+ storageKey,
+ JSON.stringify([
+ {
+ id: 1,
+ avatarUrl: '/group.png',
+ frequency: 2,
+ lastAccessedOn: Date.now(),
+ },
+ ]),
+ );
+ });
+
it('replaces the least popular item in the local storage once the persisted items limit has been hit', () => {
// Add the maximum amount of items to the local storage, in increasing popularity
const storedItems = Array.from({ length: FREQUENT_ITEMS.MAX_COUNT }).map((_, i) => ({
@@ -159,6 +197,125 @@ describe('Super sidebar utils spec', () => {
});
});
+ describe('getItemsFromLocalStorage', () => {
+ const storageKey = 'mockStorageKey';
+ const maxItems = 5;
+ const storedItems = JSON.parse(cachedFrequentProjects);
+
+ beforeEach(() => {
+ window.localStorage.setItem(storageKey, cachedFrequentProjects);
+ });
+
+ describe('when localStorage cannot be accessed', () => {
+ beforeEach(() => {
+ jest.spyOn(AccessorUtilities, 'canUseLocalStorage').mockReturnValue(false);
+ });
+
+ it('returns an empty array', () => {
+ const items = getItemsFromLocalStorage({ storageKey, maxItems });
+ expect(items).toEqual([]);
+ });
+ });
+
+ describe('when localStorage contains parseable data', () => {
+ it('returns an array of items limited by max items', () => {
+ const items = getItemsFromLocalStorage({ storageKey, maxItems });
+ expect(items.length).toEqual(maxItems);
+
+ items.forEach((item) => {
+ expect(storedItems).toContainEqual(item);
+ });
+ });
+
+ it('returns all items if max items is large', () => {
+ const items = getItemsFromLocalStorage({ storageKey, maxItems: 1 });
+ expect(items.length).toEqual(1);
+
+ expect(storedItems).toContainEqual(items[0]);
+ });
+ });
+
+ describe('when localStorage contains unparseable data', () => {
+ let items;
+
+ beforeEach(() => {
+ window.localStorage.setItem(storageKey, 'unparseable');
+ items = getItemsFromLocalStorage({ storageKey, maxItems });
+ });
+
+ it('logs an error to Sentry', () => {
+ expect(Sentry.captureException).toHaveBeenCalled();
+ });
+
+ it('returns an empty array', () => {
+ expect(items).toEqual([]);
+ });
+ });
+ });
+
+ describe('removeItemFromLocalStorage', () => {
+ const storageKey = 'mockStorageKey';
+ const originalStoredItems = JSON.parse(cachedFrequentProjects);
+
+ beforeEach(() => {
+ window.localStorage.setItem(storageKey, cachedFrequentProjects);
+ });
+
+ describe('when given an item to delete', () => {
+ let items;
+ let modifiedStoredItems;
+
+ beforeEach(() => {
+ items = removeItemFromLocalStorage({ storageKey, item: { id: 3 } });
+ modifiedStoredItems = JSON.parse(window.localStorage.getItem(storageKey));
+ });
+
+ it('removes the item from localStorage', () => {
+ expect(modifiedStoredItems.length).toBe(originalStoredItems.length - 1);
+ expect(modifiedStoredItems).not.toContainEqual(originalStoredItems[2]);
+ });
+
+ it('returns the resulting stored structure', () => {
+ expect(items).toEqual(modifiedStoredItems);
+ });
+ });
+
+ describe('when given an unknown item to delete', () => {
+ let items;
+ let modifiedStoredItems;
+
+ beforeEach(() => {
+ items = removeItemFromLocalStorage({ storageKey, item: { id: 'does-not-exist' } });
+ modifiedStoredItems = JSON.parse(window.localStorage.getItem(storageKey));
+ });
+
+ it('does not change the stored value', () => {
+ expect(modifiedStoredItems).toEqual(originalStoredItems);
+ });
+
+ it('returns the stored structure', () => {
+ expect(items).toEqual(originalStoredItems);
+ });
+ });
+
+ describe('when localStorage has unparseable data', () => {
+ let items;
+
+ beforeEach(() => {
+ window.localStorage.setItem(storageKey, 'unparseable');
+ items = removeItemFromLocalStorage({ storageKey, item: { id: 3 } });
+ });
+
+ it('logs an error to Sentry', () => {
+ expect(Sentry.captureException).toHaveBeenCalled();
+ });
+
+ it('returns an empty array', () => {
+ expect(items).toEqual([]);
+ });
+ });
+ });
+
describe('ariaCurrent', () => {
it.each`
isActive | expected
diff --git a/spec/frontend/tags/components/delete_tag_modal_spec.js b/spec/frontend/tags/components/delete_tag_modal_spec.js
index 5a3104fad9b..682145c8d6c 100644
--- a/spec/frontend/tags/components/delete_tag_modal_spec.js
+++ b/spec/frontend/tags/components/delete_tag_modal_spec.js
@@ -5,6 +5,7 @@ import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
import DeleteTagModal from '~/tags/components/delete_tag_modal.vue';
import eventHub from '~/tags/event_hub';
+import { I18N_DELETE_TAG_MODAL } from '~/tags/constants';
let wrapper;
@@ -52,18 +53,17 @@ const findForm = () => wrapper.find('form');
describe('Delete tag modal', () => {
describe('Deleting a regular tag', () => {
- const expectedTitle = 'Delete tag. Are you ABSOLUTELY SURE?';
- const expectedMessage = "You're about to permanently delete the tag test-tag.";
+ const expectedMessage = 'Deleting the test-tag tag cannot be undone.';
beforeEach(() => {
createComponent();
});
it('renders the modal correctly', () => {
- expect(findModal().props('title')).toBe(expectedTitle);
+ expect(findModal().props('title')).toBe(I18N_DELETE_TAG_MODAL.modalTitle);
expect(findModalMessage().text()).toMatchInterpolatedText(expectedMessage);
- expect(findCancelButton().text()).toBe('Cancel, keep tag');
- expect(findDeleteButton().text()).toBe('Yes, delete tag');
+ expect(findCancelButton().text()).toBe(I18N_DELETE_TAG_MODAL.cancelButtonText);
+ expect(findDeleteButton().text()).toBe(I18N_DELETE_TAG_MODAL.deleteButtonText);
expect(findForm().attributes('action')).toBe(path);
});
@@ -92,11 +92,8 @@ describe('Delete tag modal', () => {
});
describe('Deleting a protected tag (for owner or maintainer)', () => {
- const expectedTitleProtected = 'Delete protected tag. Are you ABSOLUTELY SURE?';
- const expectedMessageProtected =
- "You're about to permanently delete the protected tag test-tag.";
- const expectedConfirmationText =
- 'After you confirm and select Yes, delete protected tag, you cannot recover this tag. Please type the following to confirm: test-tag';
+ const expectedMessage = 'Deleting the test-tag protected tag cannot be undone.';
+ const expectedConfirmationText = 'Please type the following to confirm: test-tag';
beforeEach(() => {
createComponent({ isProtected: true });
@@ -104,11 +101,11 @@ describe('Delete tag modal', () => {
describe('rendering the modal correctly for a protected tag', () => {
it('sets the modal title for a protected tag', () => {
- expect(findModal().props('title')).toBe(expectedTitleProtected);
+ expect(findModal().props('title')).toBe(I18N_DELETE_TAG_MODAL.modalTitleProtectedTag);
});
it('renders the correct text in the modal message', () => {
- expect(findModalMessage().text()).toMatchInterpolatedText(expectedMessageProtected);
+ expect(findModalMessage().text()).toMatchInterpolatedText(expectedMessage);
});
it('renders the protected tag name confirmation form with expected text and action', () => {
@@ -117,8 +114,8 @@ describe('Delete tag modal', () => {
});
it('renders the buttons with the correct button text', () => {
- expect(findCancelButton().text()).toBe('Cancel, keep tag');
- expect(findDeleteButton().text()).toBe('Yes, delete protected tag');
+ expect(findCancelButton().text()).toBe(I18N_DELETE_TAG_MODAL.cancelButtonText);
+ expect(findDeleteButton().text()).toBe(I18N_DELETE_TAG_MODAL.deleteButtonTextProtectedTag);
});
});
diff --git a/spec/frontend/token_access/inbound_token_access_spec.js b/spec/frontend/token_access/inbound_token_access_spec.js
index 1ca58053e68..d82d65e3549 100644
--- a/spec/frontend/token_access/inbound_token_access_spec.js
+++ b/spec/frontend/token_access/inbound_token_access_spec.js
@@ -21,6 +21,7 @@ import {
} from './mock_data';
const projectPath = 'root/my-repo';
+const testProjectPath = 'root/test';
const message = 'An error occurred';
const error = new Error(message);
@@ -53,10 +54,11 @@ describe('TokenAccess component', () => {
const findToggle = () => wrapper.findComponent(GlToggle);
const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
- const findAddProjectBtn = () => wrapper.findByRole('button', { name: 'Add project' });
+ const findAddProjectBtn = () => wrapper.findByTestId('add-project-btn');
const findCancelBtn = () => wrapper.findByRole('button', { name: 'Cancel' });
const findProjectInput = () => wrapper.findComponent(GlFormInput);
const findRemoveProjectBtn = () => wrapper.findByRole('button', { name: 'Remove access' });
+ const findToggleFormBtn = () => wrapper.findByTestId('toggle-form-btn');
const findTokenDisabledAlert = () => wrapper.findComponent(GlAlert);
const createMockApolloProvider = (requestHandlers) => {
@@ -69,11 +71,6 @@ describe('TokenAccess component', () => {
fullPath: projectPath,
},
apolloProvider: createMockApolloProvider(requestHandlers),
- data() {
- return {
- targetProjectPath: 'root/test',
- };
- },
});
};
@@ -222,11 +219,13 @@ describe('TokenAccess component', () => {
await waitForPromises();
+ await findToggleFormBtn().trigger('click');
+ await findProjectInput().vm.$emit('input', testProjectPath);
findAddProjectBtn().trigger('click');
expect(inboundAddProjectSuccessResponseHandler).toHaveBeenCalledWith({
projectPath,
- targetProjectPath: 'root/test',
+ targetProjectPath: testProjectPath,
});
});
@@ -242,6 +241,8 @@ describe('TokenAccess component', () => {
await waitForPromises();
+ await findToggleFormBtn().trigger('click');
+ await findProjectInput().vm.$emit('input', testProjectPath);
findAddProjectBtn().trigger('click');
await waitForPromises();
@@ -249,7 +250,7 @@ describe('TokenAccess component', () => {
expect(createAlert).toHaveBeenCalledWith({ message });
});
- it('clicking cancel clears target path', async () => {
+ it('clicking cancel hides the form and clears the target path', async () => {
createComponent(
[
[inboundGetCIJobTokenScopeQuery, inboundJobTokenScopeEnabledResponseHandler],
@@ -260,10 +261,18 @@ describe('TokenAccess component', () => {
await waitForPromises();
- expect(findProjectInput().element.value).toBe('root/test');
+ await findToggleFormBtn().trigger('click');
+
+ expect(findProjectInput().exists()).toBe(true);
+
+ await findProjectInput().vm.$emit('input', testProjectPath);
await findCancelBtn().trigger('click');
+ expect(findProjectInput().exists()).toBe(false);
+
+ await findToggleFormBtn().trigger('click');
+
expect(findProjectInput().element.value).toBe('');
});
});
diff --git a/spec/frontend/token_access/outbound_token_access_spec.js b/spec/frontend/token_access/outbound_token_access_spec.js
index f9eb201eb5c..c5224d5d942 100644
--- a/spec/frontend/token_access/outbound_token_access_spec.js
+++ b/spec/frontend/token_access/outbound_token_access_spec.js
@@ -38,9 +38,9 @@ describe('TokenAccess component', () => {
const findToggle = () => wrapper.findComponent(GlToggle);
const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
+ const findAddProjectBtn = () => wrapper.findByRole('button', { name: 'Add project' });
const findRemoveProjectBtn = () => wrapper.findByRole('button', { name: 'Remove access' });
const findDeprecationAlert = () => wrapper.findByTestId('deprecation-alert');
- const findProjectPathInput = () => wrapper.findByTestId('project-path-input');
const createMockApolloProvider = (requestHandlers) => {
return createMockApollo(requestHandlers);
@@ -247,7 +247,7 @@ describe('TokenAccess component', () => {
});
describe('adding a new project', () => {
- it('disables the input to add new projects', async () => {
+ it('disables the button for adding new projects', async () => {
createComponent(
[
[getCIJobTokenScopeQuery, disabledJobTokenScopeHandler],
@@ -260,7 +260,7 @@ describe('TokenAccess component', () => {
await waitForPromises();
- expect(findProjectPathInput().attributes('disabled')).toBe('disabled');
+ expect(findAddProjectBtn().attributes('disabled')).toBe('disabled');
});
});
});
diff --git a/spec/frontend/tracing/components/tracing_details_spec.js b/spec/frontend/tracing/components/tracing_details_spec.js
new file mode 100644
index 00000000000..c5efa2a7eb5
--- /dev/null
+++ b/spec/frontend/tracing/components/tracing_details_spec.js
@@ -0,0 +1,103 @@
+import { GlLoadingIcon } from '@gitlab/ui';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import TracingDetails from '~/tracing/components/tracing_details.vue';
+import waitForPromises from 'helpers/wait_for_promises';
+import { createAlert } from '~/alert';
+import { visitUrl, isSafeURL } from '~/lib/utils/url_utility';
+
+jest.mock('~/alert');
+jest.mock('~/lib/utils/url_utility');
+
+describe('TracingDetails', () => {
+ let wrapper;
+ let observabilityClientMock;
+
+ const TRACE_ID = 'test-trace-id';
+ const TRACING_INDEX_URL = 'https://www.gitlab.com/flightjs/Flight/-/tracing';
+
+ const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
+ const findTraceDetails = () => wrapper.findComponentByTestId('trace-details');
+
+ const props = {
+ traceId: TRACE_ID,
+ tracingIndexUrl: TRACING_INDEX_URL,
+ };
+
+ const mountComponent = async () => {
+ wrapper = shallowMountExtended(TracingDetails, {
+ propsData: {
+ ...props,
+ observabilityClient: observabilityClientMock,
+ },
+ });
+ await waitForPromises();
+ };
+
+ beforeEach(() => {
+ isSafeURL.mockReturnValue(true);
+
+ observabilityClientMock = {
+ isTracingEnabled: jest.fn(),
+ fetchTrace: jest.fn(),
+ };
+ });
+
+ it('renders the loading indicator while checking if tracing is enabled', () => {
+ mountComponent();
+
+ expect(findLoadingIcon().exists()).toBe(true);
+ expect(observabilityClientMock.isTracingEnabled).toHaveBeenCalled();
+ });
+
+ describe('when tracing is enabled', () => {
+ const mockTrace = { traceId: 'test-trace-id', foo: 'bar' };
+ beforeEach(async () => {
+ observabilityClientMock.isTracingEnabled.mockResolvedValueOnce(true);
+ observabilityClientMock.fetchTrace.mockResolvedValueOnce(mockTrace);
+
+ await mountComponent();
+ });
+
+ it('fetches the trace and renders the trace details', () => {
+ expect(observabilityClientMock.isTracingEnabled).toHaveBeenCalled();
+ expect(observabilityClientMock.fetchTrace).toHaveBeenCalled();
+ expect(findLoadingIcon().exists()).toBe(false);
+ expect(findTraceDetails().exists()).toBe(true);
+ });
+ });
+
+ describe('when tracing is not enabled', () => {
+ beforeEach(async () => {
+ observabilityClientMock.isTracingEnabled.mockResolvedValueOnce(false);
+
+ await mountComponent();
+ });
+
+ it('redirects to tracingIndexUrl', () => {
+ expect(visitUrl).toHaveBeenCalledWith(props.tracingIndexUrl);
+ });
+ });
+
+ describe('error handling', () => {
+ it('if isTracingEnabled fails, it renders an alert and empty page', async () => {
+ observabilityClientMock.isTracingEnabled.mockRejectedValueOnce('error');
+
+ await mountComponent();
+
+ expect(createAlert).toHaveBeenCalledWith({ message: 'Failed to load trace details.' });
+ expect(findLoadingIcon().exists()).toBe(false);
+ expect(findTraceDetails().exists()).toBe(false);
+ });
+
+ it('if fetchTrace fails, it renders an alert and empty page', async () => {
+ observabilityClientMock.isTracingEnabled.mockReturnValueOnce(true);
+ observabilityClientMock.fetchTrace.mockRejectedValueOnce('error');
+
+ await mountComponent();
+
+ expect(createAlert).toHaveBeenCalledWith({ message: 'Failed to load trace details.' });
+ expect(findLoadingIcon().exists()).toBe(false);
+ expect(findTraceDetails().exists()).toBe(false);
+ });
+ });
+});
diff --git a/spec/frontend/tracing/components/tracing_empty_state_spec.js b/spec/frontend/tracing/components/tracing_empty_state_spec.js
index c3df187e1c5..d91c62a1dad 100644
--- a/spec/frontend/tracing/components/tracing_empty_state_spec.js
+++ b/spec/frontend/tracing/components/tracing_empty_state_spec.js
@@ -8,12 +8,7 @@ describe('TracingEmptyState', () => {
const findEnableButton = () => wrapper.findComponent(GlButton);
beforeEach(() => {
- wrapper = shallowMountExtended(TracingEmptyState, {
- propsData: {
- enableTracing: jest.fn(),
- },
- stubs: { GlButton },
- });
+ wrapper = shallowMountExtended(TracingEmptyState);
});
it('renders the component properly', () => {
@@ -36,9 +31,9 @@ describe('TracingEmptyState', () => {
expect(enableButton.text()).toBe('Enable');
});
- it('calls enableTracing method when enable button is clicked', () => {
+ it('emits enable-tracing when enable button is clicked', () => {
findEnableButton().vm.$emit('click');
- expect(wrapper.props().enableTracing).toHaveBeenCalled();
+ expect(wrapper.emitted('enable-tracing')).toHaveLength(1);
});
});
diff --git a/spec/frontend/tracing/components/tracing_list_filtered_search_spec.js b/spec/frontend/tracing/components/tracing_list_filtered_search_spec.js
new file mode 100644
index 00000000000..ad15dd4a371
--- /dev/null
+++ b/spec/frontend/tracing/components/tracing_list_filtered_search_spec.js
@@ -0,0 +1,38 @@
+import { GlFilteredSearch } from '@gitlab/ui';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+
+import TracingListFilteredSearch from '~/tracing/components/tracing_list_filtered_search.vue';
+
+describe('TracingListFilteredSearch', () => {
+ let wrapper;
+ const initialFilters = [
+ { type: 'period', value: '1h' },
+ { type: 'service_name', value: 'example-service' },
+ ];
+ beforeEach(() => {
+ wrapper = shallowMountExtended(TracingListFilteredSearch, {
+ propsData: {
+ initialFilters,
+ },
+ });
+ });
+
+ it('renders the component', () => {
+ expect(wrapper.exists()).toBe(true);
+ });
+
+ it('sets initialFilters prop correctly', () => {
+ expect(wrapper.findComponent(GlFilteredSearch).props('value')).toEqual(initialFilters);
+ });
+
+ it('emits submit event on filtered search submit', () => {
+ wrapper
+ .findComponent(GlFilteredSearch)
+ .vm.$emit('submit', { filters: [{ type: 'period', value: '1h' }] });
+
+ expect(wrapper.emitted('submit')).toHaveLength(1);
+ expect(wrapper.emitted('submit')[0][0]).toEqual({
+ filters: [{ type: 'period', value: '1h' }],
+ });
+ });
+});
diff --git a/spec/frontend/tracing/components/tracing_list_spec.js b/spec/frontend/tracing/components/tracing_list_spec.js
index 183578cff31..9aa37ac9c9c 100644
--- a/spec/frontend/tracing/components/tracing_list_spec.js
+++ b/spec/frontend/tracing/components/tracing_list_spec.js
@@ -5,8 +5,19 @@ import TracingEmptyState from '~/tracing/components/tracing_empty_state.vue';
import TracingTableList from '~/tracing/components/tracing_table_list.vue';
import waitForPromises from 'helpers/wait_for_promises';
import { createAlert } from '~/alert';
+import * as urlUtility from '~/lib/utils/url_utility';
+import {
+ queryToFilterObj,
+ filterObjToQuery,
+ filterObjToFilterToken,
+ filterTokensToFilterObj,
+} from '~/tracing/filters';
+import FilteredSearch from '~/tracing/components/tracing_list_filtered_search.vue';
+import UrlSync from '~/vue_shared/components/url_sync.vue';
+import setWindowLocation from 'helpers/set_window_location_helper';
jest.mock('~/alert');
+jest.mock('~/tracing/filters');
describe('TracingList', () => {
let wrapper;
@@ -15,16 +26,13 @@ describe('TracingList', () => {
const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
const findEmptyState = () => wrapper.findComponent(TracingEmptyState);
const findTableList = () => wrapper.findComponent(TracingTableList);
+ const findFilteredSearch = () => wrapper.findComponent(FilteredSearch);
+ const findUrlSync = () => wrapper.findComponent(UrlSync);
const mountComponent = async () => {
wrapper = shallowMountExtended(TracingList, {
propsData: {
observabilityClient: observabilityClientMock,
- stubs: {
- GlLoadingIcon: true,
- TracingEmptyState: true,
- TracingTableList: true,
- },
},
});
await waitForPromises();
@@ -41,6 +49,10 @@ describe('TracingList', () => {
it('renders the loading indicator while checking if tracing is enabled', () => {
mountComponent();
expect(findLoadingIcon().exists()).toBe(true);
+ expect(findEmptyState().exists()).toBe(false);
+ expect(findTableList().exists()).toBe(false);
+ expect(findFilteredSearch().exists()).toBe(false);
+ expect(findUrlSync().exists()).toBe(false);
expect(observabilityClientMock.isTracingEnabled).toHaveBeenCalled();
});
@@ -52,12 +64,15 @@ describe('TracingList', () => {
await mountComponent();
});
- it('fetches the traces and renders the trace list', () => {
+
+ it('fetches the traces and renders the trace list with filtered search', () => {
expect(observabilityClientMock.isTracingEnabled).toHaveBeenCalled();
expect(observabilityClientMock.fetchTraces).toHaveBeenCalled();
expect(findLoadingIcon().exists()).toBe(false);
expect(findEmptyState().exists()).toBe(false);
expect(findTableList().exists()).toBe(true);
+ expect(findFilteredSearch().exists()).toBe(true);
+ expect(findUrlSync().exists()).toBe(true);
expect(findTableList().props('traces')).toBe(mockTraces);
});
@@ -69,6 +84,76 @@ describe('TracingList', () => {
expect(observabilityClientMock.fetchTraces).toHaveBeenCalledTimes(1);
});
+
+ it('on trace selection it redirects to the details url', () => {
+ setWindowLocation('base_path');
+ const visitUrlMock = jest.spyOn(urlUtility, 'visitUrl').mockReturnValue({});
+
+ findTableList().vm.$emit('trace-selected', { trace_id: 'test-trace-id' });
+
+ expect(visitUrlMock).toHaveBeenCalledTimes(1);
+ expect(visitUrlMock).toHaveBeenCalledWith('/base_path/test-trace-id');
+ });
+ });
+
+ describe('filtered search', () => {
+ let mockFilterObj;
+ let mockFilterToken;
+ let mockQuery;
+ let mockUpdatedFilterObj;
+
+ beforeEach(async () => {
+ observabilityClientMock.isTracingEnabled.mockResolvedValue(true);
+ observabilityClientMock.fetchTraces.mockResolvedValue([]);
+
+ setWindowLocation('?trace-id=foo');
+
+ mockFilterObj = { mock: 'filter-obj' };
+ queryToFilterObj.mockReturnValue(mockFilterObj);
+
+ mockFilterToken = ['mock-token'];
+ filterObjToFilterToken.mockReturnValue(mockFilterToken);
+
+ mockQuery = { mock: 'query' };
+ filterObjToQuery.mockReturnValueOnce(mockQuery);
+
+ mockUpdatedFilterObj = { mock: 'filter-obj-upd' };
+ filterTokensToFilterObj.mockReturnValue(mockUpdatedFilterObj);
+
+ await mountComponent();
+ });
+
+ it('renders FilteredSeach with initial filters parsed from window.location', () => {
+ expect(queryToFilterObj).toHaveBeenCalledWith('?trace-id=foo');
+ expect(filterObjToFilterToken).toHaveBeenCalledWith(mockFilterObj);
+ expect(findFilteredSearch().props('initialFilters')).toBe(mockFilterToken);
+ });
+
+ it('renders UrlSync and sets query prop', () => {
+ expect(filterObjToQuery).toHaveBeenCalledWith(mockFilterObj);
+ expect(findUrlSync().props('query')).toBe(mockQuery);
+ });
+
+ it('process filters on search submit', async () => {
+ const mockUpdatedQuery = { mock: 'updated-query' };
+ filterObjToQuery.mockReturnValueOnce(mockUpdatedQuery);
+ const mockFilters = { mock: 'some-filter' };
+
+ findFilteredSearch().vm.$emit('submit', mockFilters);
+ await waitForPromises();
+
+ expect(filterTokensToFilterObj).toHaveBeenCalledWith(mockFilters);
+ expect(filterObjToQuery).toHaveBeenCalledWith(mockUpdatedFilterObj);
+ expect(findUrlSync().props('query')).toBe(mockUpdatedQuery);
+ });
+
+ it('fetches traces with filters', () => {
+ expect(observabilityClientMock.fetchTraces).toHaveBeenCalledWith(mockFilterObj);
+
+ findFilteredSearch().vm.$emit('submit', {});
+
+ expect(observabilityClientMock.fetchTraces).toHaveBeenLastCalledWith(mockUpdatedFilterObj);
+ });
});
describe('when tracing is not enabled', () => {
@@ -83,8 +168,8 @@ describe('TracingList', () => {
expect(findEmptyState().exists()).toBe(true);
});
- it('set enableTracing as TracingEmptyState enable-tracing callback', () => {
- findEmptyState().props('enableTracing')();
+ it('calls enableTracing when TracingEmptyState emits enable-tracing', () => {
+ findEmptyState().vm.$emit('enable-tracing');
expect(observabilityClientMock.enableTraces).toHaveBeenCalled();
});
@@ -119,7 +204,7 @@ describe('TracingList', () => {
await mountComponent();
- findEmptyState().props('enableTracing')();
+ findEmptyState().vm.$emit('enable-tracing');
await waitForPromises();
expect(createAlert).toHaveBeenCalledWith({ message: 'Failed to enable tracing.' });
diff --git a/spec/frontend/tracing/components/tracing_table_list_spec.js b/spec/frontend/tracing/components/tracing_table_list_spec.js
index 773b3eb8ed2..aa96b9b370f 100644
--- a/spec/frontend/tracing/components/tracing_table_list_spec.js
+++ b/spec/frontend/tracing/components/tracing_table_list_spec.js
@@ -1,3 +1,4 @@
+import { nextTick } from 'vue';
import { mountExtended } from 'helpers/vue_test_utils_helper';
import TracingTableList from '~/tracing/components/tracing_table_list.vue';
@@ -27,13 +28,18 @@ describe('TracingTableList', () => {
};
const getRows = () => wrapper.findComponent({ name: 'GlTable' }).find('tbody').findAll('tr');
-
+ const getRow = (idx) => getRows().at(idx);
const getCells = (trIdx) => getRows().at(trIdx).findAll('td');
const getCell = (trIdx, tdIdx) => {
return getCells(trIdx).at(tdIdx);
};
+ const selectRow = async (idx) => {
+ getRow(idx).trigger('click');
+ await nextTick();
+ };
+
it('renders traces as table', () => {
mountComponent();
@@ -50,6 +56,14 @@ describe('TracingTableList', () => {
});
});
+ it('emits trace-selected on row selection', async () => {
+ mountComponent();
+
+ await selectRow(0);
+ expect(wrapper.emitted('trace-selected')).toHaveLength(1);
+ expect(wrapper.emitted('trace-selected')[0][0]).toBe(mockTraces[0]);
+ });
+
it('renders the empty state when no traces are provided', () => {
mountComponent({ traces: [] });
diff --git a/spec/frontend/tracing/details_index_spec.js b/spec/frontend/tracing/details_index_spec.js
new file mode 100644
index 00000000000..e0d368b6cb7
--- /dev/null
+++ b/spec/frontend/tracing/details_index_spec.js
@@ -0,0 +1,42 @@
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import DetailsIndex from '~/tracing/details_index.vue';
+import TracingDetails from '~/tracing/components/tracing_details.vue';
+import ObservabilityContainer from '~/observability/components/observability_container.vue';
+
+describe('DetailsIndex', () => {
+ const props = {
+ traceId: 'test-trace-id',
+ tracingIndexUrl: 'https://example.com/tracing/index',
+ oauthUrl: 'https://example.com/oauth',
+ tracingUrl: 'https://example.com/tracing',
+ provisioningUrl: 'https://example.com/provisioning',
+ };
+
+ let wrapper;
+
+ const mountComponent = () => {
+ wrapper = shallowMountExtended(DetailsIndex, {
+ propsData: props,
+ });
+ };
+
+ it('renders ObservabilityContainer component', () => {
+ mountComponent();
+
+ const observabilityContainer = wrapper.findComponent(ObservabilityContainer);
+ expect(observabilityContainer.exists()).toBe(true);
+ expect(observabilityContainer.props('oauthUrl')).toBe(props.oauthUrl);
+ expect(observabilityContainer.props('tracingUrl')).toBe(props.tracingUrl);
+ expect(observabilityContainer.props('provisioningUrl')).toBe(props.provisioningUrl);
+ });
+
+ it('renders TracingList component inside ObservabilityContainer', () => {
+ mountComponent();
+
+ const observabilityContainer = wrapper.findComponent(ObservabilityContainer);
+ const detailsCmp = observabilityContainer.findComponent(TracingDetails);
+ expect(detailsCmp.exists()).toBe(true);
+ expect(detailsCmp.props('traceId')).toBe(props.traceId);
+ expect(detailsCmp.props('tracingIndexUrl')).toBe(props.tracingIndexUrl);
+ });
+});
diff --git a/spec/frontend/tracing/filters_spec.js b/spec/frontend/tracing/filters_spec.js
new file mode 100644
index 00000000000..ee396326f45
--- /dev/null
+++ b/spec/frontend/tracing/filters_spec.js
@@ -0,0 +1,141 @@
+import {
+ filterToQueryObject,
+ urlQueryToFilter,
+ prepareTokens,
+ processFilters,
+} from '~/vue_shared/components/filtered_search_bar/filtered_search_utils';
+import { FILTERED_SEARCH_TERM } from '~/vue_shared/components/filtered_search_bar/constants';
+
+import {
+ PERIOD_FILTER_TOKEN_TYPE,
+ SERVICE_NAME_FILTER_TOKEN_TYPE,
+ OPERATION_FILTER_TOKEN_TYPE,
+ TRACE_ID_FILTER_TOKEN_TYPE,
+ DURATION_MS_FILTER_TOKEN_TYPE,
+ queryToFilterObj,
+ filterObjToQuery,
+ filterObjToFilterToken,
+ filterTokensToFilterObj,
+} from '~/tracing/filters';
+
+jest.mock('~/vue_shared/components/filtered_search_bar/filtered_search_utils');
+
+describe('utils', () => {
+ describe('queryToFilterObj', () => {
+ it('should build a filter obj', () => {
+ const url = 'http://example.com/';
+ urlQueryToFilter.mockReturnValue({
+ period: '7d',
+ service: 'my_service',
+ operation: 'my_operation',
+ trace_id: 'my_trace_id',
+ durationMs: '500',
+ [FILTERED_SEARCH_TERM]: 'test',
+ });
+
+ const filterObj = queryToFilterObj(url);
+
+ expect(urlQueryToFilter).toHaveBeenCalledWith(url, {
+ customOperators: [
+ { operator: '>', prefix: 'gt' },
+ { operator: '<', prefix: 'lt' },
+ ],
+ filteredSearchTermKey: 'search',
+ });
+ expect(filterObj).toEqual({
+ period: '7d',
+ service: 'my_service',
+ operation: 'my_operation',
+ traceId: 'my_trace_id',
+ durationMs: '500',
+ search: 'test',
+ });
+ });
+ });
+
+ describe('filterObjToQuery', () => {
+ it('should convert filter object to URL query', () => {
+ filterToQueryObject.mockReturnValue('mockquery');
+
+ const query = filterObjToQuery({
+ period: '7d',
+ serviceName: 'my_service',
+ operation: 'my_operation',
+ traceId: 'my_trace_id',
+ durationMs: '500',
+ search: 'test',
+ });
+
+ expect(filterToQueryObject).toHaveBeenCalledWith(
+ {
+ period: '7d',
+ service: 'my_service',
+ operation: 'my_operation',
+ trace_id: 'my_trace_id',
+ durationMs: '500',
+ 'filtered-search-term': 'test',
+ },
+ {
+ customOperators: [
+ { applyOnlyToKey: 'durationMs', operator: '>', prefix: 'gt' },
+ { applyOnlyToKey: 'durationMs', operator: '<', prefix: 'lt' },
+ ],
+ filteredSearchTermKey: 'search',
+ },
+ );
+ expect(query).toBe('mockquery');
+ });
+ });
+
+ describe('filterObjToFilterToken', () => {
+ it('should convert filter object to filter tokens', () => {
+ const mockTokens = [];
+ prepareTokens.mockReturnValue(mockTokens);
+
+ const tokens = filterObjToFilterToken({
+ period: '7d',
+ serviceName: 'my_service',
+ operation: 'my_operation',
+ traceId: 'my_trace_id',
+ durationMs: '500',
+ search: 'test',
+ });
+
+ expect(prepareTokens).toHaveBeenCalledWith({
+ [PERIOD_FILTER_TOKEN_TYPE]: '7d',
+ [SERVICE_NAME_FILTER_TOKEN_TYPE]: 'my_service',
+ [OPERATION_FILTER_TOKEN_TYPE]: 'my_operation',
+ [TRACE_ID_FILTER_TOKEN_TYPE]: 'my_trace_id',
+ [DURATION_MS_FILTER_TOKEN_TYPE]: '500',
+ [FILTERED_SEARCH_TERM]: 'test',
+ });
+ expect(tokens).toBe(mockTokens);
+ });
+ });
+
+ describe('filterTokensToFilterObj', () => {
+ it('should convert filter tokens to filter object', () => {
+ const mockTokens = [];
+ processFilters.mockReturnValue({
+ [SERVICE_NAME_FILTER_TOKEN_TYPE]: 'my_service',
+ [PERIOD_FILTER_TOKEN_TYPE]: '7d',
+ [OPERATION_FILTER_TOKEN_TYPE]: 'my_operation',
+ [TRACE_ID_FILTER_TOKEN_TYPE]: 'my_trace_id',
+ [DURATION_MS_FILTER_TOKEN_TYPE]: '500',
+ [FILTERED_SEARCH_TERM]: 'test',
+ });
+
+ const filterObj = filterTokensToFilterObj(mockTokens);
+
+ expect(processFilters).toHaveBeenCalledWith(mockTokens);
+ expect(filterObj).toEqual({
+ serviceName: 'my_service',
+ period: '7d',
+ operation: 'my_operation',
+ traceId: 'my_trace_id',
+ durationMs: '500',
+ search: 'test',
+ });
+ });
+ });
+});
diff --git a/spec/frontend/tracking/internal_events_spec.js b/spec/frontend/tracking/internal_events_spec.js
index ad2ffa7cef4..ca244c25b06 100644
--- a/spec/frontend/tracking/internal_events_spec.js
+++ b/spec/frontend/tracking/internal_events_spec.js
@@ -2,12 +2,16 @@ import API from '~/api';
import { mockTracking } from 'helpers/tracking_helper';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import InternalEvents from '~/tracking/internal_events';
-import { GITLAB_INTERNAL_EVENT_CATEGORY, SERVICE_PING_SCHEMA } from '~/tracking/constants';
+import {
+ GITLAB_INTERNAL_EVENT_CATEGORY,
+ SERVICE_PING_SCHEMA,
+ LOAD_INTERNAL_EVENTS_SELECTOR,
+} from '~/tracking/constants';
import * as utils from '~/tracking/utils';
import { Tracker } from '~/tracking/tracker';
jest.mock('~/api', () => ({
- trackRedisHllUserEvent: jest.fn(),
+ trackInternalEvent: jest.fn(),
}));
jest.mock('~/tracking/utils', () => ({
@@ -19,13 +23,13 @@ Tracker.enabled = jest.fn();
describe('InternalEvents', () => {
describe('track_event', () => {
- it('track_event calls trackRedisHllUserEvent with correct arguments', () => {
+ it('track_event calls API.trackInternalEvent with correct arguments', () => {
const event = 'TestEvent';
InternalEvents.track_event(event);
- expect(API.trackRedisHllUserEvent).toHaveBeenCalledTimes(1);
- expect(API.trackRedisHllUserEvent).toHaveBeenCalledWith(event);
+ expect(API.trackInternalEvent).toHaveBeenCalledTimes(1);
+ expect(API.trackInternalEvent).toHaveBeenCalledWith(event);
});
it('track_event calls tracking.event functions with correct arguments', () => {
@@ -97,4 +101,48 @@ describe('InternalEvents', () => {
expect(result).toEqual({ name: 'click', func: expect.any(Function) });
});
});
+
+ describe('trackInternalLoadEvents', () => {
+ let querySelectorAllMock;
+ let mockElements;
+ const action = 'i_devops_action';
+
+ beforeEach(() => {
+ Tracker.enabled.mockReturnValue(true);
+ querySelectorAllMock = jest.fn();
+ document.querySelectorAll = querySelectorAllMock;
+ });
+
+ it('should return an empty array if Tracker is not enabled', () => {
+ Tracker.enabled.mockReturnValue(false);
+ const result = InternalEvents.trackInternalLoadEvents();
+ expect(result).toEqual([]);
+ });
+
+ describe('tracking', () => {
+ let trackEventSpy;
+ beforeEach(() => {
+ trackEventSpy = jest.spyOn(InternalEvents, 'track_event');
+ });
+
+ it('should track event if action exists', () => {
+ mockElements = [{ dataset: { eventTracking: action, eventTrackingLoad: true } }];
+ querySelectorAllMock.mockReturnValue(mockElements);
+
+ const result = InternalEvents.trackInternalLoadEvents();
+ expect(trackEventSpy).toHaveBeenCalledWith(action);
+ expect(trackEventSpy).toHaveBeenCalledTimes(1);
+ expect(querySelectorAllMock).toHaveBeenCalledWith(LOAD_INTERNAL_EVENTS_SELECTOR);
+ expect(result).toEqual(mockElements);
+ });
+
+ it('should not track event if action is not present', () => {
+ mockElements = [{ dataset: { eventTracking: undefined, eventTrackingLoad: true } }];
+ querySelectorAllMock.mockReturnValue(mockElements);
+
+ InternalEvents.trackInternalLoadEvents();
+ expect(trackEventSpy).toHaveBeenCalledTimes(0);
+ });
+ });
+ });
});
diff --git a/spec/frontend/usage_quotas/storage/components/project_storage_app_spec.js b/spec/frontend/usage_quotas/storage/components/project_storage_app_spec.js
index 1a200090805..88ab51cf135 100644
--- a/spec/frontend/usage_quotas/storage/components/project_storage_app_spec.js
+++ b/spec/frontend/usage_quotas/storage/components/project_storage_app_spec.js
@@ -1,16 +1,24 @@
import { GlAlert, GlLoadingIcon } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
import Vue from 'vue';
import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
-import { extendedWrapper } from 'helpers/vue_test_utils_helper';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
import ProjectStorageApp from '~/usage_quotas/storage/components/project_storage_app.vue';
import UsageGraph from '~/usage_quotas/storage/components/usage_graph.vue';
-import { TOTAL_USAGE_DEFAULT_TEXT } from '~/usage_quotas/storage/constants';
+import {
+ descendingStorageUsageSort,
+ getStorageTypesFromProjectStatistics,
+} from '~/usage_quotas/storage/utils';
+import {
+ storageTypeHelpPaths,
+ PROJECT_STORAGE_TYPES,
+ NAMESPACE_STORAGE_TYPES,
+ TOTAL_USAGE_DEFAULT_TEXT,
+} from '~/usage_quotas/storage/constants';
import getProjectStorageStatistics from '~/usage_quotas/storage/queries/project_storage.query.graphql';
+import { numberToHumanSize } from '~/lib/utils/number_utils';
import {
- projectData,
mockGetProjectStorageStatisticsGraphQLResponse,
mockEmptyResponse,
defaultProjectProvideValues,
@@ -36,25 +44,26 @@ describe('ProjectStorageApp', () => {
};
const createComponent = ({ provide = {}, mockApollo } = {}) => {
- wrapper = extendedWrapper(
- shallowMount(ProjectStorageApp, {
- apolloProvider: mockApollo,
- provide: {
- ...defaultProjectProvideValues,
- ...provide,
- },
- }),
- );
+ wrapper = shallowMountExtended(ProjectStorageApp, {
+ apolloProvider: mockApollo,
+ provide: {
+ ...defaultProjectProvideValues,
+ ...provide,
+ },
+ });
};
const findAlert = () => wrapper.findComponent(GlAlert);
const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
const findUsagePercentage = () => wrapper.findByTestId('total-usage');
- const findUsageQuotasHelpLink = () => wrapper.findByTestId('usage-quotas-help-link');
const findUsageGraph = () => wrapper.findComponent(UsageGraph);
+ const findProjectDetailsTable = () => wrapper.findByTestId('usage-quotas-project-usage-details');
+ const findNamespaceDetailsTable = () =>
+ wrapper.findByTestId('usage-quotas-namespace-usage-details');
describe('with apollo fetching successful', () => {
let mockApollo;
+ const mockProjectData = mockGetProjectStorageStatisticsGraphQLResponse.data.project;
beforeEach(async () => {
mockApollo = createMockApolloProvider({
@@ -65,13 +74,33 @@ describe('ProjectStorageApp', () => {
});
it('renders correct total usage', () => {
- expect(findUsagePercentage().text()).toBe(projectData.storage.totalUsage);
+ const expectedValue = numberToHumanSize(
+ mockGetProjectStorageStatisticsGraphQLResponse.data.project.statistics.storageSize,
+ 1,
+ );
+ expect(findUsagePercentage().text()).toBe(expectedValue);
+ });
+
+ it('passes project storage entities to project details table', () => {
+ const expectedValue = getStorageTypesFromProjectStatistics(
+ PROJECT_STORAGE_TYPES,
+ mockProjectData.statistics,
+ mockProjectData.statisticsDetailsPaths,
+ storageTypeHelpPaths,
+ ).sort(descendingStorageUsageSort('value'));
+
+ expect(findProjectDetailsTable().props('storageTypes')).toStrictEqual(expectedValue);
});
- it('renders correct usage quotas help link', () => {
- expect(findUsageQuotasHelpLink().attributes('href')).toBe(
- defaultProjectProvideValues.helpLinks.usageQuotas,
+ it('passes namespace storage entities to namespace details table', () => {
+ const expectedValue = getStorageTypesFromProjectStatistics(
+ NAMESPACE_STORAGE_TYPES,
+ mockProjectData.statistics,
+ mockProjectData.statisticsDetailsPaths,
+ storageTypeHelpPaths,
);
+
+ expect(findNamespaceDetailsTable().props('storageTypes')).toStrictEqual(expectedValue);
});
});
@@ -104,6 +133,14 @@ describe('ProjectStorageApp', () => {
it('shows default text for total usage', () => {
expect(findUsagePercentage().text()).toBe(TOTAL_USAGE_DEFAULT_TEXT);
});
+
+ it('passes empty array to project details table', () => {
+ expect(findProjectDetailsTable().props('storageTypes')).toStrictEqual([]);
+ });
+
+ it('passes empty array to namespace details table', () => {
+ expect(findNamespaceDetailsTable().props('storageTypes')).toStrictEqual([]);
+ });
});
describe('with apollo fetching error', () => {
diff --git a/spec/frontend/usage_quotas/storage/components/project_storage_detail_spec.js b/spec/frontend/usage_quotas/storage/components/project_storage_detail_spec.js
index 37fc9602315..364517a474f 100644
--- a/spec/frontend/usage_quotas/storage/components/project_storage_detail_spec.js
+++ b/spec/frontend/usage_quotas/storage/components/project_storage_detail_spec.js
@@ -1,15 +1,36 @@
-import { GlTableLite, GlPopover } from '@gitlab/ui';
-import { mount } from '@vue/test-utils';
+import { GlTableLite } from '@gitlab/ui';
+import { mount, Wrapper } from '@vue/test-utils'; // eslint-disable-line no-unused-vars
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import ProjectStorageDetail from '~/usage_quotas/storage/components/project_storage_detail.vue';
-import { containerRegistryPopoverId, containerRegistryId } from '~/usage_quotas/storage/constants';
import { numberToHumanSize } from '~/lib/utils/number_utils';
-import { projectData, projectHelpLinks } from '../mock_data';
describe('ProjectStorageDetail', () => {
+ /** @type { Wrapper } */
let wrapper;
- const { storageTypes } = projectData.storage;
+ const generateStorageType = (props) => {
+ return {
+ id: 'id',
+ name: 'name',
+ description: 'description',
+ helpPath: '/help-path',
+ detailsPath: '/details-link',
+ value: 42,
+ ...props,
+ };
+ };
+
+ const storageTypes = [
+ generateStorageType({ id: 'one' }),
+ generateStorageType({ id: 'two' }),
+ generateStorageType({
+ id: 'three',
+ warning: {
+ content: 'warning message',
+ },
+ }),
+ ];
+
const defaultProps = { storageTypes };
const createComponent = (props = {}) => {
@@ -26,23 +47,7 @@ describe('ProjectStorageDetail', () => {
);
};
- const generateStorageType = (id = 'buildArtifacts') => {
- return {
- storageType: {
- id,
- name: 'Test Name',
- description: 'Test Description',
- helpPath: '/test-type',
- },
- value: 400000,
- };
- };
-
const findTable = () => wrapper.findComponent(GlTableLite);
- const findPopoverById = (id) =>
- wrapper.findAllComponents(GlPopover).filter((p) => p.attributes('data-testid') === id);
- const findContainerRegistryPopover = () => findPopoverById(containerRegistryPopoverId);
- const findContainerRegistryWarningIcon = () => wrapper.find(`#${containerRegistryPopoverId}`);
beforeEach(() => {
createComponent();
@@ -51,33 +56,23 @@ describe('ProjectStorageDetail', () => {
describe('with storage types', () => {
it.each(storageTypes)(
'renders table row correctly %o',
- ({ storageType: { id, name, description } }) => {
+ ({ id, name, value, description, helpPath, warning }) => {
expect(wrapper.findByTestId(`${id}-name`).text()).toBe(name);
expect(wrapper.findByTestId(`${id}-description`).text()).toBe(description);
expect(wrapper.findByTestId(`${id}-icon`).props('name')).toBe(id);
- expect(wrapper.findByTestId(`${id}-help-link`).attributes('href')).toBe(
- projectHelpLinks[id],
- );
+ expect(wrapper.findByTestId(`${id}-help-link`).attributes('href')).toBe(helpPath);
+ expect(wrapper.findByTestId(`${id}-value`).text()).toContain(numberToHumanSize(value, 1));
+
+ expect(wrapper.findByTestId(`${id}-warning-icon`).exists()).toBe(Boolean(warning));
+ expect(wrapper.findByTestId(`${id}-popover`).exists()).toBe(Boolean(warning));
},
);
-
- it('should render items in order from the biggest usage size to the smallest', () => {
- const rows = findTable().find('tbody').findAll('tr');
- // Cloning array not to mutate the source
- const sortedStorageTypes = [...storageTypes].sort((a, b) => b.value - a.value);
-
- sortedStorageTypes.forEach((storageType, i) => {
- const rowUsageAmount = rows.wrappers[i].find('td:last-child').text();
- const expectedUsageAmount = numberToHumanSize(storageType.value, 1);
- expect(rowUsageAmount).toBe(expectedUsageAmount);
- });
- });
});
describe('with details links', () => {
it.each(storageTypes)('each $storageType.id', (item) => {
- const shouldExist = Boolean(item.storageType.detailsPath && item.value);
- const detailsLink = wrapper.findByTestId(`${item.storageType.id}-details-link`);
+ const shouldExist = Boolean(item.detailsPath && item.value);
+ const detailsLink = wrapper.findByTestId(`${item.id}-details-link`);
expect(detailsLink.exists()).toBe(shouldExist);
});
});
@@ -95,21 +90,4 @@ describe('ProjectStorageDetail', () => {
expect(findTable().find('td').exists()).toBe(false);
});
});
-
- describe.each`
- description | mockStorageTypes | rendersContainerRegistryPopover
- ${'without any storage type that has popover'} | ${[generateStorageType()]} | ${false}
- ${'with container registry storage type'} | ${[generateStorageType(containerRegistryId)]} | ${true}
- `('$description', ({ mockStorageTypes, rendersContainerRegistryPopover }) => {
- beforeEach(() => {
- createComponent({ storageTypes: mockStorageTypes });
- });
-
- it(`does ${
- rendersContainerRegistryPopover ? '' : ' not'
- } render container registry warning icon and popover`, () => {
- expect(findContainerRegistryWarningIcon().exists()).toBe(rendersContainerRegistryPopover);
- expect(findContainerRegistryPopover().exists()).toBe(rendersContainerRegistryPopover);
- });
- });
});
diff --git a/spec/frontend/usage_quotas/storage/components/usage_graph_spec.js b/spec/frontend/usage_quotas/storage/components/usage_graph_spec.js
index 7fef20c900e..fc116211bf0 100644
--- a/spec/frontend/usage_quotas/storage/components/usage_graph_spec.js
+++ b/spec/frontend/usage_quotas/storage/components/usage_graph_spec.js
@@ -44,7 +44,6 @@ describe('UsageGraph', () => {
buildArtifactsSize,
lfsObjectsSize,
packagesSize,
- containerRegistrySize,
repositorySize,
wikiSize,
snippetsSize,
@@ -57,14 +56,11 @@ describe('UsageGraph', () => {
expect(types.at(2).text()).toMatchInterpolatedText(
`Packages ${numberToHumanSize(packagesSize)}`,
);
- expect(types.at(3).text()).toMatchInterpolatedText(
- `Container Registry ${numberToHumanSize(containerRegistrySize)}`,
- );
- expect(types.at(4).text()).toMatchInterpolatedText(`LFS ${numberToHumanSize(lfsObjectsSize)}`);
- expect(types.at(5).text()).toMatchInterpolatedText(
+ expect(types.at(3).text()).toMatchInterpolatedText(`LFS ${numberToHumanSize(lfsObjectsSize)}`);
+ expect(types.at(4).text()).toMatchInterpolatedText(
`Snippets ${numberToHumanSize(snippetsSize)}`,
);
- expect(types.at(6).text()).toMatchInterpolatedText(
+ expect(types.at(5).text()).toMatchInterpolatedText(
`Job artifacts ${numberToHumanSize(buildArtifactsSize)}`,
);
});
@@ -102,7 +98,6 @@ describe('UsageGraph', () => {
'0.29411764705882354',
'0.23529411764705882',
'0.17647058823529413',
- '0.14705882352941177',
'0.11764705882352941',
'0.11764705882352941',
'0.041176470588235294',
@@ -121,7 +116,6 @@ describe('UsageGraph', () => {
'0.29411764705882354',
'0.23529411764705882',
'0.17647058823529413',
- '0.14705882352941177',
'0.11764705882352941',
'0.11764705882352941',
'0.041176470588235294',
diff --git a/spec/frontend/usage_quotas/storage/mock_data.js b/spec/frontend/usage_quotas/storage/mock_data.js
index 452fa83b9a7..16c03a13028 100644
--- a/spec/frontend/usage_quotas/storage/mock_data.js
+++ b/spec/frontend/usage_quotas/storage/mock_data.js
@@ -3,95 +3,6 @@ import mockGetProjectStorageStatisticsGraphQLResponse from 'test_fixtures/graphq
export { mockGetProjectStorageStatisticsGraphQLResponse };
export const mockEmptyResponse = { data: { project: null } };
-export const projectData = {
- storage: {
- totalUsage: '13.4 MiB',
- storageTypes: [
- {
- storageType: {
- id: 'containerRegistry',
- name: 'Container Registry',
- description: 'Gitlab-integrated Docker Container Registry for storing Docker Images.',
- helpPath: '/container_registry',
- detailsPath: 'http://localhost/frontend-fixtures/builds-project/container_registry',
- },
- value: 3900000,
- },
- {
- storageType: {
- id: 'buildArtifacts',
- name: 'Job artifacts',
- description: 'Job artifacts created by CI/CD.',
- helpPath: '/build-artifacts',
- detailsPath: 'http://localhost/frontend-fixtures/builds-project/-/artifacts',
- },
- value: 400000,
- },
- {
- storageType: {
- id: 'lfsObjects',
- name: 'LFS',
- description: 'Audio samples, videos, datasets, and graphics.',
- helpPath: '/lsf-objects',
- },
- value: 4800000,
- },
- {
- storageType: {
- id: 'packages',
- name: 'Packages',
- description: 'Code packages and container images.',
- helpPath: '/packages',
- detailsPath: 'http://localhost/frontend-fixtures/builds-project/-/packages',
- },
- value: 3800000,
- },
- {
- storageType: {
- id: 'repository',
- name: 'Repository',
- description: 'Git repository.',
- helpPath: '/repository',
- detailsPath: 'http://localhost/frontend-fixtures/builds-project/-/tree/master',
- },
- value: 3900000,
- },
- {
- storageType: {
- id: 'snippets',
- name: 'Snippets',
- description: 'Shared bits of code and text.',
- helpPath: '/snippets',
- detailsPath: 'http://localhost/frontend-fixtures/builds-project/-/snippets',
- },
- value: 0,
- },
- {
- storageType: {
- id: 'wiki',
- name: 'Wiki',
- description: 'Wiki content.',
- helpPath: '/wiki',
- detailsPath: 'http://localhost/frontend-fixtures/builds-project/-/wikis/pages',
- },
- value: 300000,
- },
- ],
- },
-};
-
-export const projectHelpLinks = {
- containerRegistry: '/container_registry',
- usageQuotas: '/usage-quotas',
- buildArtifacts: '/build-artifacts',
- lfsObjects: '/lsf-objects',
- packages: '/packages',
- repository: '/repository',
- snippets: '/snippets',
- wiki: '/wiki',
-};
-
export const defaultProjectProvideValues = {
projectPath: '/project-path',
- helpLinks: projectHelpLinks,
};
diff --git a/spec/frontend/usage_quotas/storage/utils_spec.js b/spec/frontend/usage_quotas/storage/utils_spec.js
index e3a271adc57..dd05e105c26 100644
--- a/spec/frontend/usage_quotas/storage/utils_spec.js
+++ b/spec/frontend/usage_quotas/storage/utils_spec.js
@@ -1,15 +1,9 @@
-import cloneDeep from 'lodash/cloneDeep';
import { PROJECT_STORAGE_TYPES } from '~/usage_quotas/storage/constants';
import {
- parseGetProjectStorageResults,
getStorageTypesFromProjectStatistics,
descendingStorageUsageSort,
} from '~/usage_quotas/storage/utils';
-import {
- mockGetProjectStorageStatisticsGraphQLResponse,
- defaultProjectProvideValues,
- projectData,
-} from './mock_data';
+import { mockGetProjectStorageStatisticsGraphQLResponse } from './mock_data';
describe('getStorageTypesFromProjectStatistics', () => {
const {
@@ -18,15 +12,18 @@ describe('getStorageTypesFromProjectStatistics', () => {
} = mockGetProjectStorageStatisticsGraphQLResponse.data.project;
describe('matches project statistics value with matching storage type', () => {
- const typesWithStats = getStorageTypesFromProjectStatistics(projectStatistics);
+ const typesWithStats = getStorageTypesFromProjectStatistics(
+ PROJECT_STORAGE_TYPES,
+ projectStatistics,
+ );
it.each(PROJECT_STORAGE_TYPES)('storage type: $id', ({ id }) => {
- expect(typesWithStats).toContainEqual({
- storageType: expect.objectContaining({
+ expect(typesWithStats).toContainEqual(
+ expect.objectContaining({
id,
+ value: projectStatistics[`${id}Size`],
}),
- value: projectStatistics[`${id}Size`],
- });
+ );
});
});
@@ -38,57 +35,31 @@ describe('getStorageTypesFromProjectStatistics', () => {
};
}, {});
- const typesWithStats = getStorageTypesFromProjectStatistics(projectStatistics, helpLinks);
+ const typesWithStats = getStorageTypesFromProjectStatistics(
+ PROJECT_STORAGE_TYPES,
+ projectStatistics,
+ {},
+ helpLinks,
+ );
typesWithStats.forEach((type) => {
- const key = type.storageType.id;
- expect(type.storageType.helpPath).toBe(helpLinks[key]);
+ expect(type.helpPath).toBe(helpLinks[type.id]);
});
});
it('adds details page path', () => {
const typesWithStats = getStorageTypesFromProjectStatistics(
+ PROJECT_STORAGE_TYPES,
projectStatistics,
- {},
statisticsDetailsPaths,
+ {},
);
typesWithStats.forEach((type) => {
- expect(type.storageType.detailsPath).toBe(statisticsDetailsPaths[type.storageType.id]);
+ expect(type.detailsPath).toBe(statisticsDetailsPaths[type.id]);
});
});
});
-describe('parseGetProjectStorageResults', () => {
- it('parses project statistics correctly', () => {
- expect(
- parseGetProjectStorageResults(
- mockGetProjectStorageStatisticsGraphQLResponse.data,
- defaultProjectProvideValues.helpLinks,
- ),
- ).toMatchObject(projectData);
- });
-
- it('includes storage type with size of 0 in returned value', () => {
- const mockedResponse = cloneDeep(mockGetProjectStorageStatisticsGraphQLResponse.data);
- // ensuring a specific storage type item has size of 0
- mockedResponse.project.statistics.repositorySize = 0;
-
- const response = parseGetProjectStorageResults(
- mockedResponse,
- defaultProjectProvideValues.helpLinks,
- );
-
- expect(response.storage.storageTypes).toEqual(
- expect.arrayContaining([
- {
- storageType: expect.any(Object),
- value: 0,
- },
- ]),
- );
- });
-});
-
describe('descendingStorageUsageSort', () => {
it('sorts items by a given key in descending order', () => {
const items = [{ k: 1 }, { k: 3 }, { k: 2 }];
diff --git a/spec/frontend/user_lists/components/edit_user_list_spec.js b/spec/frontend/user_lists/components/edit_user_list_spec.js
index 21a883aefe0..5656c5ebf60 100644
--- a/spec/frontend/user_lists/components/edit_user_list_spec.js
+++ b/spec/frontend/user_lists/components/edit_user_list_spec.js
@@ -1,6 +1,7 @@
import { GlAlert, GlLoadingIcon } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import waitForPromises from 'helpers/wait_for_promises';
import Api from '~/api';
diff --git a/spec/frontend/user_lists/components/new_user_list_spec.js b/spec/frontend/user_lists/components/new_user_list_spec.js
index 004cfb6ca07..f2c4d29d05a 100644
--- a/spec/frontend/user_lists/components/new_user_list_spec.js
+++ b/spec/frontend/user_lists/components/new_user_list_spec.js
@@ -1,6 +1,7 @@
import { GlAlert } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import waitForPromises from 'helpers/wait_for_promises';
import Api from '~/api';
diff --git a/spec/frontend/user_lists/components/user_list_spec.js b/spec/frontend/user_lists/components/user_list_spec.js
index e02862cad2b..286fb9fef5f 100644
--- a/spec/frontend/user_lists/components/user_list_spec.js
+++ b/spec/frontend/user_lists/components/user_list_spec.js
@@ -2,6 +2,7 @@ import { GlAlert, GlEmptyState, GlLoadingIcon } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
import { uniq } from 'lodash';
import Vue, { nextTick } from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import Api from '~/api';
import UserList from '~/user_lists/components/user_list.vue';
diff --git a/spec/frontend/user_lists/components/user_lists_spec.js b/spec/frontend/user_lists/components/user_lists_spec.js
index 2da2eb0dd5f..ec892104a1c 100644
--- a/spec/frontend/user_lists/components/user_lists_spec.js
+++ b/spec/frontend/user_lists/components/user_lists_spec.js
@@ -2,6 +2,7 @@ import { GlEmptyState, GlLoadingIcon } from '@gitlab/ui';
import { within } from '@testing-library/dom';
import { mount, createWrapper } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import waitForPromises from 'helpers/wait_for_promises';
import Api from '~/api';
@@ -19,7 +20,7 @@ describe('~/user_lists/components/user_lists.vue', () => {
const mockProvide = {
newUserListPath: '/user-lists/new',
featureFlagsHelpPagePath: '/help/feature-flags',
- errorStateSvgPath: '/assets/illustrations/feature_flag.svg',
+ errorStateSvgPath: '/assets/illustrations/empty-state/empty-feature-flag-md.svg',
};
const mockState = {
diff --git a/spec/frontend/users/profile/actions/components/user_actions_app_spec.js b/spec/frontend/users/profile/actions/components/user_actions_app_spec.js
index d27962440ee..a33474375e6 100644
--- a/spec/frontend/users/profile/actions/components/user_actions_app_spec.js
+++ b/spec/frontend/users/profile/actions/components/user_actions_app_spec.js
@@ -1,16 +1,19 @@
import { GlDisclosureDropdown } from '@gitlab/ui';
import { mountExtended } from 'helpers/vue_test_utils_helper';
import UserActionsApp from '~/users/profile/actions/components/user_actions_app.vue';
+import AbuseCategorySelector from '~/abuse_reports/components/abuse_category_selector.vue';
describe('User Actions App', () => {
let wrapper;
const USER_ID = 'test-id';
+ const DEFAULT_SUBSCRIPTION_PATH = '';
const createWrapper = (propsData = {}) => {
wrapper = mountExtended(UserActionsApp, {
propsData: {
userId: USER_ID,
+ rssSubscriptionPath: DEFAULT_SUBSCRIPTION_PATH,
...propsData,
},
});
@@ -19,15 +22,34 @@ describe('User Actions App', () => {
const findDropdown = () => wrapper.findComponent(GlDisclosureDropdown);
const findActions = () => wrapper.findAllByTestId('disclosure-dropdown-item');
const findAction = (position = 0) => findActions().at(position);
+ const findSubscriptionLink = () => wrapper.findByTestId('user-profile-rss-subscription-link');
it('shows dropdown', () => {
createWrapper();
expect(findDropdown().exists()).toBe(true);
});
- it('shows actions correctly', () => {
- createWrapper();
- expect(findActions()).toHaveLength(1);
+ describe('shows user action items', () => {
+ it('should show items without RSS subscriptions', () => {
+ createWrapper();
+ expect(findActions()).toHaveLength(1);
+ });
+
+ it('should show items with RSS subscriptions', () => {
+ createWrapper({
+ rssSubscriptionPath: '/test/path',
+ });
+ expect(findActions()).toHaveLength(2);
+ });
+
+ it('should show items with report abuse', () => {
+ createWrapper({
+ rssSubscriptionPath: '/test/path',
+ reportedUserId: 1,
+ reportedFromUrl: '/report/path',
+ });
+ expect(findActions()).toHaveLength(3);
+ });
});
it('shows copy user id action', () => {
@@ -35,4 +57,34 @@ describe('User Actions App', () => {
expect(findAction().text()).toBe(`Copy user ID: ${USER_ID}`);
expect(findAction().findComponent('button').attributes('data-clipboard-text')).toBe(USER_ID);
});
+
+ it('shows subscription link when subscription url was presented', () => {
+ const testSubscriptionPath = '/test/path';
+
+ createWrapper({
+ rssSubscriptionPath: testSubscriptionPath,
+ });
+
+ const rssLink = findSubscriptionLink();
+ expect(rssLink.exists()).toBe(true);
+ expect(rssLink.attributes('href')).toBe(testSubscriptionPath);
+ expect(rssLink.text()).toBe('Subscribe');
+ });
+
+ it('shows report abuse action when reported user id was presented', () => {
+ const reportUrl = '/path/to/report';
+ const reportUserId = 1;
+ createWrapper({
+ rssSubscriptionPath: '/test/path',
+ reportedUserId: reportUserId,
+ reportedFromUrl: reportUrl,
+ });
+ const abuseCategorySelector = wrapper.findComponent(AbuseCategorySelector);
+ expect(abuseCategorySelector.exists()).toBe(true);
+ expect(abuseCategorySelector.props()).toEqual({
+ reportedUserId: reportUserId,
+ reportedFromUrl: reportUrl,
+ showDrawer: false,
+ });
+ });
});
diff --git a/spec/frontend/users/profile/components/report_abuse_button_spec.js b/spec/frontend/users/profile/components/report_abuse_button_spec.js
index 1ef856c9849..1ca944dce12 100644
--- a/spec/frontend/users/profile/components/report_abuse_button_spec.js
+++ b/spec/frontend/users/profile/components/report_abuse_button_spec.js
@@ -1,7 +1,7 @@
import { GlButton } from '@gitlab/ui';
+import { createWrapper } from '@vue/test-utils';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import { BV_HIDE_TOOLTIP } from '~/lib/utils/constants';
-
import ReportAbuseButton from '~/users/profile/components/report_abuse_button.vue';
import AbuseCategorySelector from '~/abuse_reports/components/abuse_category_selector.vue';
@@ -41,7 +41,7 @@ describe('ReportAbuseButton', () => {
});
expect(findReportAbuseButton().attributes('aria-label')).toBe(
- wrapper.vm.$options.i18n.reportAbuse,
+ ReportAbuseButton.i18n.reportAbuse,
);
});
@@ -69,11 +69,11 @@ describe('ReportAbuseButton', () => {
describe('when user hovers out of the button', () => {
it(`should emit ${BV_HIDE_TOOLTIP} to close the tooltip`, () => {
- jest.spyOn(wrapper.vm.$root, '$emit');
+ const rootWrapper = createWrapper(wrapper.vm.$root);
findReportAbuseButton().vm.$emit('mouseout');
- expect(wrapper.vm.$root.$emit).toHaveBeenCalledWith(BV_HIDE_TOOLTIP);
+ expect(rootWrapper.emitted(BV_HIDE_TOOLTIP)).toHaveLength(1);
});
});
});
diff --git a/spec/frontend/vue3migration/compiler_spec.js b/spec/frontend/vue3migration/compiler_spec.js
index 3623f69fe07..bac905d3fd0 100644
--- a/spec/frontend/vue3migration/compiler_spec.js
+++ b/spec/frontend/vue3migration/compiler_spec.js
@@ -28,11 +28,11 @@ describe('Vue.js 3 compiler edge cases', () => {
it('treats empty slots with comments as empty', () => {
const wrapper = mount(SlotWithComment);
- expect(wrapper.html()).toBe('<div>Simple</div>');
+ expect(wrapper.html()).toBe('<div>SimpleComponent</div>');
});
it('treats empty default slot with comments as empty', () => {
const wrapper = mount(DefaultSlotWithComment);
- expect(wrapper.html()).toBe('<div>Simple</div>');
+ expect(wrapper.html()).toBe('<div>SimpleComponent</div>');
});
});
diff --git a/spec/frontend/vue3migration/components/default_slot_with_comment.vue b/spec/frontend/vue3migration/components/default_slot_with_comment.vue
index d2589104a5d..32cfa966e34 100644
--- a/spec/frontend/vue3migration/components/default_slot_with_comment.vue
+++ b/spec/frontend/vue3migration/components/default_slot_with_comment.vue
@@ -1,18 +1,18 @@
<script>
-import Simple from './simple.vue';
+import SimpleComponent from './simple_component.vue';
export default {
components: {
- Simple,
+ SimpleComponent,
},
};
</script>
<template>
- <simple>
+ <simple-component>
<!-- slot comment typical for gitlab-ui, for example -->
<!-- slot comment typical for gitlab-ui, for example -->
<slot></slot>
<!-- slot comment typical for gitlab-ui, for example -->
<!-- slot comment typical for gitlab-ui, for example -->
- </simple>
+ </simple-component>
</template>
diff --git a/spec/frontend/vue3migration/components/simple.vue b/spec/frontend/vue3migration/components/simple_component.vue
index 1d9854b5b4d..36ff341221f 100644
--- a/spec/frontend/vue3migration/components/simple.vue
+++ b/spec/frontend/vue3migration/components/simple_component.vue
@@ -1,6 +1,6 @@
<script>
export default {
- name: 'Simple',
+ name: 'SimpleComponent',
};
</script>
<template>
diff --git a/spec/frontend/vue3migration/components/slot_with_comment.vue b/spec/frontend/vue3migration/components/slot_with_comment.vue
index 56bb41e432f..af858deeb5e 100644
--- a/spec/frontend/vue3migration/components/slot_with_comment.vue
+++ b/spec/frontend/vue3migration/components/slot_with_comment.vue
@@ -1,14 +1,14 @@
<script>
-import Simple from './simple.vue';
+import SimpleComponent from './simple_component.vue';
export default {
components: {
- Simple,
+ SimpleComponent,
},
};
</script>
<template>
- <simple>
+ <simple-component>
<template #default>
<!-- slot comment typical for gitlab-ui, for example -->
<!-- slot comment typical for gitlab-ui, for example -->
@@ -16,5 +16,5 @@ export default {
<!-- slot comment typical for gitlab-ui, for example -->
<!-- slot comment typical for gitlab-ui, for example -->
</template>
- </simple>
+ </simple-component>
</template>
diff --git a/spec/frontend/vue3migration/components/slots_with_same_name.vue b/spec/frontend/vue3migration/components/slots_with_same_name.vue
index 37604cd9f6e..f3107f41d8b 100644
--- a/spec/frontend/vue3migration/components/slots_with_same_name.vue
+++ b/spec/frontend/vue3migration/components/slots_with_same_name.vue
@@ -1,14 +1,14 @@
<script>
-import Simple from './simple.vue';
+import SimpleComponent from './simple_component.vue';
export default {
name: 'SlotsWithSameName',
- components: { Simple },
+ components: { SimpleComponent },
};
</script>
<template>
- <simple>
+ <simple-component>
<template v-if="true" #default>{{ $options.name }}</template>
<template v-else #default>{{ $options.name }}</template>
- </simple>
+ </simple-component>
</template>
diff --git a/spec/frontend/vue_compat_test_setup.js b/spec/frontend/vue_compat_test_setup.js
index fe43f8f2617..ad1230f2ca9 100644
--- a/spec/frontend/vue_compat_test_setup.js
+++ b/spec/frontend/vue_compat_test_setup.js
@@ -21,6 +21,13 @@ function isLegacyExtendedComponent(component) {
function unwrapLegacyVueExtendComponent(selector) {
return isLegacyExtendedComponent(selector) ? selector.options : selector;
}
+function getStubProps(component) {
+ const stubProps = { ...component.props };
+ component.mixins?.forEach((mixin) => {
+ Object.assign(stubProps, unwrapLegacyVueExtendComponent(mixin).props);
+ });
+ return stubProps;
+}
if (global.document) {
const compatConfig = {
@@ -148,33 +155,27 @@ if (global.document) {
return true;
};
- VTU.config.plugins.createStubs = ({ name, component: rawComponent, registerStub }) => {
+ VTU.config.plugins.createStubs = ({ name, component: rawComponent, registerStub, stubs }) => {
const component = unwrapLegacyVueExtendComponent(rawComponent);
const hyphenatedName = name.replace(/\B([A-Z])/g, '-$1').toLowerCase();
+ const stubTag = stubs?.[name] ? name : hyphenatedName;
const stub = Vue.defineComponent({
name: getComponentName(component),
- props: component.props,
- model: component.model,
+ props: getStubProps(component),
+ model: component.model ?? component.mixins?.find((m) => m.model),
methods: Object.fromEntries(
Object.entries(component.methods ?? {}).map(([key]) => [key, noop]),
),
render() {
- const {
- $slots: slots = {},
- $scopedSlots: scopedSlots = {},
- $parent: parent,
- $vnode: vnode,
- } = this;
-
- const hasStaticDefaultSlot = 'default' in slots && !('default' in scopedSlots);
- const isTheOnlyChild = parent?.$.subTree === vnode;
- // this condition should be altered when https://github.com/vuejs/vue-test-utils/pull/2068 is merged
- // and our codebase will be updated to include it (@vue/test-utils@1.3.6 I assume)
- const shouldRenderAllSlots = !hasStaticDefaultSlot && isTheOnlyChild;
+ const { $scopedSlots: scopedSlots = {} } = this;
+
+ // eslint-disable-next-line no-underscore-dangle
+ const hasDefaultSlot = 'default' in scopedSlots && scopedSlots.default._ns;
+ const shouldRenderAllSlots = !component.functional && !hasDefaultSlot;
const renderSlotByName = (slotName) => {
- const slot = scopedSlots[slotName] || slots[slotName];
+ const slot = scopedSlots[slotName];
let result;
if (typeof slot === 'function') {
try {
@@ -189,16 +190,16 @@ if (global.document) {
};
const slotContents = shouldRenderAllSlots
- ? [...new Set([...Object.keys(slots), ...Object.keys(scopedSlots)])]
- .map(renderSlotByName)
- .filter(Boolean)
+ ? Object.keys(scopedSlots).map(renderSlotByName).filter(Boolean)
: renderSlotByName('default');
const props = Object.fromEntries(
- Object.entries(this.$props).filter(([prop]) => isPropertyValidOnDomNode(prop)),
+ Object.entries(this.$props)
+ .filter(([prop]) => isPropertyValidOnDomNode(prop))
+ .map(([key, value]) => [key, typeof value === 'function' ? '[Function]' : value]),
);
- return Vue.h(`${hyphenatedName || 'anonymous'}-stub`, props, slotContents);
+ return Vue.h(`${stubTag || 'anonymous'}-stub`, props, slotContents);
},
});
diff --git a/spec/frontend/vue_merge_request_widget/components/artifacts_list_app_spec.js b/spec/frontend/vue_merge_request_widget/components/artifacts_list_app_spec.js
index 9516aacea0a..79cfd2fd1ed 100644
--- a/spec/frontend/vue_merge_request_widget/components/artifacts_list_app_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/artifacts_list_app_spec.js
@@ -2,6 +2,7 @@ import { GlLoadingIcon } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
import MockAdapter from 'axios-mock-adapter';
import Vue, { nextTick } from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import { TEST_HOST as FAKE_ENDPOINT } from 'helpers/test_constants';
import axios from '~/lib/utils/axios_utils';
diff --git a/spec/frontend/vue_merge_request_widget/components/mr_widget_pipeline_spec.js b/spec/frontend/vue_merge_request_widget/components/mr_widget_pipeline_spec.js
index 820e486c13f..a0064224b46 100644
--- a/spec/frontend/vue_merge_request_widget/components/mr_widget_pipeline_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/mr_widget_pipeline_spec.js
@@ -6,7 +6,7 @@ import { trimText } from 'helpers/text_helper';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import { HTTP_STATUS_OK } from '~/lib/utils/http_status';
import MRWidgetPipelineComponent from '~/vue_merge_request_widget/components/mr_widget_pipeline.vue';
-import PipelineMiniGraph from '~/pipelines/components/pipeline_mini_graph/pipeline_mini_graph.vue';
+import LegacyPipelineMiniGraph from '~/pipelines/components/pipeline_mini_graph/legacy_pipeline_mini_graph.vue';
import { SUCCESS } from '~/vue_merge_request_widget/constants';
import mockData from '../mock_data';
@@ -28,6 +28,7 @@ describe('MRWidgetPipeline', () => {
const findCIErrorMessage = () => wrapper.findByTestId('ci-error-message');
const findPipelineID = () => wrapper.findByTestId('pipeline-id');
const findPipelineInfoContainer = () => wrapper.findByTestId('pipeline-info-container');
+ const findPipelineDetailsContainer = () => wrapper.findByTestId('pipeline-details-container');
const findCommitLink = () => wrapper.findByTestId('commit-link');
const findPipelineFinishedAt = () => wrapper.findByTestId('finished-at');
const findPipelineCoverage = () => wrapper.findByTestId('pipeline-coverage');
@@ -36,7 +37,7 @@ describe('MRWidgetPipeline', () => {
wrapper.findByTestId('pipeline-coverage-tooltip').text();
const findPipelineCoverageDeltaTooltipText = () =>
wrapper.findByTestId('pipeline-coverage-delta-tooltip').text();
- const findPipelineMiniGraph = () => wrapper.findComponent(PipelineMiniGraph);
+ const findLegacyPipelineMiniGraph = () => wrapper.findComponent(LegacyPipelineMiniGraph);
const findMonitoringPipelineMessage = () => wrapper.findByTestId('monitoring-pipeline-message');
const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
@@ -100,8 +101,8 @@ describe('MRWidgetPipeline', () => {
it('should render pipeline graph', () => {
const stagesCount = mockData.pipeline.details.stages.length;
- expect(findPipelineMiniGraph().exists()).toBe(true);
- expect(findPipelineMiniGraph().props('stages')).toHaveLength(stagesCount);
+ expect(findLegacyPipelineMiniGraph().exists()).toBe(true);
+ expect(findLegacyPipelineMiniGraph().props('stages')).toHaveLength(stagesCount);
});
it('should render the latest downstream pipelines only', () => {
@@ -109,7 +110,7 @@ describe('MRWidgetPipeline', () => {
// because we retried the trigger job, so the mini pipeline graph will only
// render the newly created downstream pipeline instead
expect(mockData.pipeline.triggered).toHaveLength(2);
- expect(findPipelineMiniGraph().props('downstreamPipelines')).toHaveLength(1);
+ expect(findLegacyPipelineMiniGraph().props('downstreamPipelines')).toHaveLength(1);
});
describe('should render pipeline coverage information', () => {
@@ -181,8 +182,8 @@ describe('MRWidgetPipeline', () => {
it('should render pipeline graph', () => {
const stagesCount = mockData.pipeline.details.stages.length;
- expect(findPipelineMiniGraph().exists()).toBe(true);
- expect(findPipelineMiniGraph().props('stages')).toHaveLength(stagesCount);
+ expect(findLegacyPipelineMiniGraph().exists()).toBe(true);
+ expect(findLegacyPipelineMiniGraph().props('stages')).toHaveLength(stagesCount);
});
it('should render coverage information', () => {
@@ -214,7 +215,7 @@ describe('MRWidgetPipeline', () => {
});
it('should not render a pipeline graph', () => {
- expect(findPipelineMiniGraph().exists()).toBe(false);
+ expect(findLegacyPipelineMiniGraph().exists()).toBe(false);
});
});
@@ -238,43 +239,77 @@ describe('MRWidgetPipeline', () => {
};
describe('for a branch pipeline', () => {
- it('renders a pipeline widget that reads "Pipeline <ID> <status> for <SHA> on <branch>"', () => {
+ it('renders a pipeline widget that reads "Pipeline <ID> <status>"', () => {
pipeline.ref.branch = true;
factory();
- const expected = `Pipeline #${pipeline.id} ${pipeline.details.status.label} for ${pipeline.commit.short_id} on ${mockData.source_branch_link}`;
+ const expected = `Pipeline #${pipeline.id} ${pipeline.details.status.label}`;
const actual = trimText(findPipelineInfoContainer().text());
expect(actual).toBe(expected);
});
+
+ it('renders a pipeline widget that reads "Pipeline <status> for <SHA> on <branch>"', () => {
+ pipeline.ref.branch = true;
+
+ factory();
+
+ const expected = `Pipeline ${pipeline.details.status.label} for ${pipeline.commit.short_id} on ${mockData.source_branch_link}`;
+ const actual = trimText(findPipelineDetailsContainer().text());
+
+ expect(actual).toBe(expected);
+ });
});
describe('for a tag pipeline', () => {
- it('renders a pipeline widget that reads "Pipeline <ID> <status> for <SHA> on <branch>"', () => {
+ it('renders a pipeline widget that reads "Pipeline <ID> <status>"', () => {
pipeline.ref.tag = true;
factory();
- const expected = `Pipeline #${pipeline.id} ${pipeline.details.status.label} for ${pipeline.commit.short_id}`;
+ const expected = `Pipeline #${pipeline.id} ${pipeline.details.status.label}`;
const actual = trimText(findPipelineInfoContainer().text());
expect(actual).toBe(expected);
});
+
+ it('renders a pipeline widget that reads "Pipeline <status> for <SHA> on <branch>"', () => {
+ pipeline.ref.tag = true;
+
+ factory();
+
+ const expected = `Pipeline ${pipeline.details.status.label} for ${pipeline.commit.short_id}`;
+ const actual = trimText(findPipelineDetailsContainer().text());
+
+ expect(actual).toBe(expected);
+ });
});
describe('for a detached merge request pipeline', () => {
- it('renders a pipeline widget that reads "Merge request pipeline <ID> <status> for <SHA>"', () => {
+ it('renders a pipeline widget that reads "Merge request pipeline <ID> <status>"', () => {
pipeline.details.event_type_name = 'Merge request pipeline';
pipeline.merge_request_event_type = 'detached';
factory();
- const expected = `Merge request pipeline #${pipeline.id} ${pipeline.details.status.label} for ${pipeline.commit.short_id}`;
+ const expected = `Merge request pipeline #${pipeline.id} ${pipeline.details.status.label}`;
const actual = trimText(findPipelineInfoContainer().text());
expect(actual).toBe(expected);
});
+
+ it('renders a pipeline widget that reads "Merge request pipeline <status> for <SHA>"', () => {
+ pipeline.details.event_type_name = 'Merge request pipeline';
+ pipeline.merge_request_event_type = 'detached';
+
+ factory();
+
+ const expected = `Merge request pipeline ${pipeline.details.status.label} for ${pipeline.commit.short_id}`;
+ const actual = trimText(findPipelineDetailsContainer().text());
+
+ expect(actual).toBe(expected);
+ });
});
});
});
diff --git a/spec/frontend/vue_merge_request_widget/components/widget/__snapshots__/dynamic_content_spec.js.snap b/spec/frontend/vue_merge_request_widget/components/widget/__snapshots__/dynamic_content_spec.js.snap
index 02d17b8dfd2..ce4bf11f16b 100644
--- a/spec/frontend/vue_merge_request_widget/components/widget/__snapshots__/dynamic_content_spec.js.snap
+++ b/spec/frontend/vue_merge_request_widget/components/widget/__snapshots__/dynamic_content_spec.js.snap
@@ -7,7 +7,7 @@ exports[`~/vue_merge_request_widget/components/widget/dynamic_content.vue render
<div class=\\"gl-display-flex\\">
<div class=\\"gl-mb-2\\"><strong class=\\"gl-display-block\\">This is a header</strong><span class=\\"gl-display-block\\">This is a subheader</span></div>
<div class=\\"gl-ml-auto gl-display-flex gl-align-items-baseline\\">
- <help-popover-stub options=\\"[object Object]\\" icon=\\"information-o\\" class=\\"\\">
+ <help-popover-stub options=\\"[object Object]\\" icon=\\"information-o\\" triggerclass=\\"\\" class=\\"\\">
<p class=\\"gl-mb-0\\">Widget help popover content</p>
<!---->
</help-popover-stub>
@@ -18,15 +18,17 @@ exports[`~/vue_merge_request_widget/components/widget/dynamic_content.vue render
<status-icon-stub level=\\"2\\" name=\\"MyWidget\\" iconname=\\"success\\"></status-icon-stub>
<div class=\\"gl-w-full gl-display-flex\\">
<div class=\\"gl-display-flex gl-flex-grow-1\\">
- <div class=\\"gl-display-flex gl-flex-grow-1 gl-flex-direction-column\\">
- <p class=\\"gl-mb-0 gl-mr-1\\">Main text for the row</p>
- <gl-link-stub href=\\"https://gitlab.com\\">Optional link to display after text</gl-link-stub>
- <!---->
+ <div class=\\"gl-display-flex gl-flex-grow-1 gl-align-items-baseline\\">
+ <div>
+ <p class=\\"gl-mb-0 gl-mr-1\\">Main text for the row</p>
+ <gl-link-stub href=\\"https://gitlab.com\\">Optional link to display after text</gl-link-stub>
+ <!---->
+ </div>
<gl-badge-stub size=\\"md\\" variant=\\"info\\" iconsize=\\"md\\">
Badge is optional. Text to be displayed inside badge
</gl-badge-stub>
</div>
- <actions-stub widget=\\"MyWidget\\" tertiarybuttons=\\"\\" class=\\"gl-ml-auto gl-pl-3\\"></actions-stub>
+ <!---->
<p class=\\"gl-m-0 gl-font-sm\\">Optional: Smaller sub-text to be displayed below the main text</p>
</div>
<ul class=\\"gl-m-0 gl-p-0 gl-list-style-none\\">
@@ -44,13 +46,15 @@ exports[`~/vue_merge_request_widget/components/widget/dynamic_content.vue render
<!---->
<div class=\\"gl-w-full gl-display-flex\\">
<div class=\\"gl-display-flex gl-flex-grow-1\\">
- <div class=\\"gl-display-flex gl-flex-grow-1 gl-flex-direction-column\\">
- <p class=\\"gl-mb-0 gl-mr-1\\">This is recursive. It will be listed in level 3.</p>
- <!---->
- <!---->
+ <div class=\\"gl-display-flex gl-flex-grow-1 gl-align-items-baseline\\">
+ <div>
+ <p class=\\"gl-mb-0 gl-mr-1\\">This is recursive. It will be listed in level 3.</p>
+ <!---->
+ <!---->
+ </div>
<!---->
</div>
- <actions-stub widget=\\"MyWidget\\" tertiarybuttons=\\"\\" class=\\"gl-ml-auto gl-pl-3\\"></actions-stub>
+ <!---->
<!---->
</div>
<!---->
diff --git a/spec/frontend/vue_merge_request_widget/components/widget/widget_spec.js b/spec/frontend/vue_merge_request_widget/components/widget/widget_spec.js
index 9343a3a5e90..18fdba32f52 100644
--- a/spec/frontend/vue_merge_request_widget/components/widget/widget_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/widget/widget_spec.js
@@ -121,14 +121,15 @@ describe('~/vue_merge_request_widget/components/widget/widget.vue', () => {
});
describe('fetch', () => {
- it('sets the data.collapsed property after a successfull call - multiPolling: false', async () => {
+ it('calls fetchCollapsedData properly when multiPolling is false', async () => {
const mockData = { headers: {}, status: HTTP_STATUS_OK, data: { vulnerabilities: [] } };
- createComponent({ propsData: { fetchCollapsedData: () => Promise.resolve(mockData) } });
+ const fetchCollapsedData = jest.fn().mockResolvedValue(mockData);
+ createComponent({ propsData: { fetchCollapsedData } });
await waitForPromises();
- expect(wrapper.emitted('input')[0][0]).toEqual({ collapsed: mockData.data, expanded: null });
+ expect(fetchCollapsedData).toHaveBeenCalledTimes(1);
});
- it('sets the data.collapsed property after a successfull call - multiPolling: true', async () => {
+ it('calls fetchCollapsedData properly when multiPolling is true', async () => {
const mockData1 = {
headers: {},
status: HTTP_STATUS_OK,
@@ -140,22 +141,22 @@ describe('~/vue_merge_request_widget/components/widget/widget.vue', () => {
data: { vulnerabilities: [{ vuln: 2 }] },
};
+ const fetchCollapsedData = [
+ jest.fn().mockResolvedValue(mockData1),
+ jest.fn().mockResolvedValue(mockData2),
+ ];
+
createComponent({
propsData: {
multiPolling: true,
- fetchCollapsedData: () => [
- () => Promise.resolve(mockData1),
- () => Promise.resolve(mockData2),
- ],
+ fetchCollapsedData: () => fetchCollapsedData,
},
});
await waitForPromises();
- expect(wrapper.emitted('input')[0][0]).toEqual({
- collapsed: [mockData1.data, mockData2.data],
- expanded: null,
- });
+ expect(fetchCollapsedData[0]).toHaveBeenCalledTimes(1);
+ expect(fetchCollapsedData[1]).toHaveBeenCalledTimes(1);
});
it('throws an error when the handler does not include headers or status objects', async () => {
@@ -328,11 +329,12 @@ describe('~/vue_merge_request_widget/components/widget/widget.vue', () => {
};
const fetchExpandedData = jest.fn().mockResolvedValue(mockDataExpanded);
+ const fetchCollapsedData = jest.fn().mockResolvedValue(mockDataCollapsed);
await createComponent({
propsData: {
isCollapsible: true,
- fetchCollapsedData: () => Promise.resolve(mockDataCollapsed),
+ fetchCollapsedData,
fetchExpandedData,
},
});
@@ -340,17 +342,8 @@ describe('~/vue_merge_request_widget/components/widget/widget.vue', () => {
findToggleButton().vm.$emit('click');
await waitForPromises();
- // First fetches the collapsed data
- expect(wrapper.emitted('input')[0][0]).toEqual({
- collapsed: mockDataCollapsed.data,
- expanded: null,
- });
-
- // Then fetches the expanded data
- expect(wrapper.emitted('input')[1][0]).toEqual({
- collapsed: null,
- expanded: mockDataExpanded.data,
- });
+ expect(fetchCollapsedData).toHaveBeenCalledTimes(1);
+ expect(fetchExpandedData).toHaveBeenCalledTimes(1);
// Triggering a click does not call the expanded data again
findToggleButton().vm.$emit('click');
@@ -371,14 +364,7 @@ describe('~/vue_merge_request_widget/components/widget/widget.vue', () => {
findToggleButton().vm.$emit('click');
await waitForPromises();
- // First fetches the collapsed data
- expect(wrapper.emitted('input')[0][0]).toEqual({
- collapsed: undefined,
- expanded: null,
- });
-
expect(fetchExpandedData).toHaveBeenCalledTimes(1);
- expect(wrapper.emitted('input')).toHaveLength(1); // Should not an emit an input call because request failed
findToggleButton().vm.$emit('click');
await waitForPromises();
diff --git a/spec/frontend/vue_merge_request_widget/extentions/code_quality/index_spec.js b/spec/frontend/vue_merge_request_widget/extentions/code_quality/index_spec.js
index 8d3bf3dd3be..d5e04c666e0 100644
--- a/spec/frontend/vue_merge_request_widget/extentions/code_quality/index_spec.js
+++ b/spec/frontend/vue_merge_request_widget/extentions/code_quality/index_spec.js
@@ -4,9 +4,7 @@ import { mountExtended } from 'helpers/vue_test_utils_helper';
import { trimText } from 'helpers/text_helper';
import waitForPromises from 'helpers/wait_for_promises';
import axios from '~/lib/utils/axios_utils';
-import extensionsContainer from '~/vue_merge_request_widget/components/extensions/container';
-import { registerExtension } from '~/vue_merge_request_widget/components/extensions';
-import codeQualityExtension from '~/vue_merge_request_widget/extensions/code_quality';
+import codeQualityExtension from '~/vue_merge_request_widget/extensions/code_quality/index.vue';
import {
HTTP_STATUS_INTERNAL_SERVER_ERROR,
HTTP_STATUS_NO_CONTENT,
@@ -26,10 +24,7 @@ import {
describe('Code Quality extension', () => {
let wrapper;
let mock;
-
- registerExtension(codeQualityExtension);
-
- const endpoint = '/root/repo/-/merge_requests/4/accessibility_reports.json';
+ const endpoint = '/root/repo/-/merge_requests/4/codequality_reports.json';
const mockApi = (statusCode, data) => {
mock.onGet(endpoint).reply(statusCode, data);
@@ -43,10 +38,11 @@ describe('Code Quality extension', () => {
const getSuccessIcon = () => wrapper.findByTestId('status-success-icon').exists();
const createComponent = () => {
- wrapper = mountExtended(extensionsContainer, {
+ wrapper = mountExtended(codeQualityExtension, {
propsData: {
mr: {
- codeQuality: endpoint,
+ codequality: endpoint,
+ codequalityReportsPath: endpoint,
blobPath: {
head_path: 'example/path',
base_path: 'example/path',
@@ -198,7 +194,7 @@ describe('Code Quality extension', () => {
"Minor - Parsing error: 'return' outside of function in index.js:12",
);
expect(text.resolvedError).toContain(
- "Minor - Parsing error: 'return' outside of function Fixed in index.js:12",
+ "Minor - Parsing error: 'return' outside of function in index.js:12 Fixed",
);
});
@@ -212,7 +208,7 @@ describe('Code Quality extension', () => {
'Minor - Rubocop/Metrics/ParameterLists - Avoid parameter lists longer than 5 parameters. [12/5] in main.rb:3',
);
expect(text.resolvedError).toContain(
- 'Minor - Rubocop/Metrics/ParameterLists - Avoid parameter lists longer than 5 parameters. [12/5] Fixed in main.rb:3',
+ 'Minor - Rubocop/Metrics/ParameterLists - Avoid parameter lists longer than 5 parameters. [12/5] in main.rb:3 Fixed',
);
});
diff --git a/spec/frontend/vue_merge_request_widget/mock_data.js b/spec/frontend/vue_merge_request_widget/mock_data.js
index 9da687c0ff8..5b3f533f34e 100644
--- a/spec/frontend/vue_merge_request_widget/mock_data.js
+++ b/spec/frontend/vue_merge_request_widget/mock_data.js
@@ -332,7 +332,7 @@ export default {
base_path: 'blob_path',
head_path: 'blob_path',
},
- codequality_reports_path: 'codequality_reports.json',
+ codequality_reports_path: '',
codequality_help_path: 'code_quality.html',
target_branch_path: '/root/acets-app/branches/main',
source_branch_path: '/root/acets-app/branches/daaaa',
diff --git a/spec/frontend/vue_merge_request_widget/mr_widget_how_to_merge_modal_spec.js b/spec/frontend/vue_merge_request_widget/mr_widget_how_to_merge_modal_spec.js
index 20f1796008a..60525a96907 100644
--- a/spec/frontend/vue_merge_request_widget/mr_widget_how_to_merge_modal_spec.js
+++ b/spec/frontend/vue_merge_request_widget/mr_widget_how_to_merge_modal_spec.js
@@ -41,9 +41,23 @@ describe('MRWidgetHowToMerge', () => {
expect(findTipLink().exists()).toBe(false);
});
- it('should render different instructions based on if the user can merge', () => {
- mountComponent({ props: { canMerge: true } });
- expect(findInstructionsFields().at(1).text()).toContain('git push origin');
+ it('should render instructions to push', () => {
+ mountComponent({ props: { sourceBranch: 'branch-of-user' } });
+ expect(findInstructionsFields().at(1).text()).toContain("git push origin 'branch-of-user'");
+ });
+
+ it('should render instructions to push to fork', () => {
+ mountComponent({
+ props: {
+ sourceProjectDefaultUrl: 'git@gitlab.com:contributor/Underscore.git',
+ sourceProjectPath: 'Underscore',
+ sourceBranch: 'branch-of-user',
+ isFork: true,
+ },
+ });
+ expect(findInstructionsFields().at(1).text()).toContain(
+ 'git push "git@gitlab.com:contributor/Underscore.git" \'Underscore-branch-of-user:branch-of-user\'',
+ );
});
it('escapes the source branch name shell-secure', () => {
diff --git a/spec/frontend/vue_merge_request_widget/stores/mr_widget_store_spec.js b/spec/frontend/vue_merge_request_widget/stores/mr_widget_store_spec.js
index 37df041210c..dda621b5db2 100644
--- a/spec/frontend/vue_merge_request_widget/stores/mr_widget_store_spec.js
+++ b/spec/frontend/vue_merge_request_widget/stores/mr_widget_store_spec.js
@@ -164,4 +164,30 @@ describe('MergeRequestStore', () => {
},
);
});
+
+ describe('preventMerge', () => {
+ it('is false when approvals are not available', () => {
+ store.hasApprovalsAvailable = false;
+
+ expect(store.preventMerge).toBe(false);
+ });
+
+ describe('when approvals are available', () => {
+ beforeEach(() => {
+ store.hasApprovalsAvailable = true;
+ });
+
+ it('is true when MR is not approved', () => {
+ store.setApprovals({ approved: false });
+
+ expect(store.preventMerge).toBe(true);
+ });
+
+ it('is false when MR is approved', () => {
+ store.setApprovals({ approved: true });
+
+ expect(store.preventMerge).toBe(false);
+ });
+ });
+ });
});
diff --git a/spec/frontend/vue_shared/components/__snapshots__/split_button_spec.js.snap b/spec/frontend/vue_shared/components/__snapshots__/split_button_spec.js.snap
index 2b89e36344d..62d75fbdc5f 100644
--- a/spec/frontend/vue_shared/components/__snapshots__/split_button_spec.js.snap
+++ b/spec/frontend/vue_shared/components/__snapshots__/split_button_spec.js.snap
@@ -12,6 +12,7 @@ exports[`SplitButton renders actionItems 1`] = `
menu-class=""
size="medium"
split="true"
+ splithref=""
text="professor"
variant="default"
>
diff --git a/spec/frontend/vue_shared/components/actions_button_spec.js b/spec/frontend/vue_shared/components/actions_button_spec.js
deleted file mode 100644
index 9f9a27c6997..00000000000
--- a/spec/frontend/vue_shared/components/actions_button_spec.js
+++ /dev/null
@@ -1,119 +0,0 @@
-import {
- GlDisclosureDropdown,
- GlDisclosureDropdownGroup,
- GlDisclosureDropdownItem,
-} from '@gitlab/ui';
-import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
-import ActionsButton from '~/vue_shared/components/actions_button.vue';
-
-const TEST_ACTION = {
- key: 'action1',
- text: 'Sample',
- secondaryText: 'Lorem ipsum.',
- href: '/sample',
- attrs: {
- 'data-test': '123',
- category: 'secondary',
- href: '/sample',
- variant: 'default',
- },
- handle: jest.fn(),
-};
-const TEST_ACTION_2 = {
- key: 'action2',
- text: 'Sample 2',
- secondaryText: 'Dolar sit amit.',
- href: '#',
- attrs: { 'data-test': '456' },
- handle: jest.fn(),
-};
-
-describe('vue_shared/components/actions_button', () => {
- let wrapper;
-
- function createComponent({ props = {}, slots = {} } = {}) {
- wrapper = shallowMountExtended(ActionsButton, {
- propsData: { actions: [TEST_ACTION, TEST_ACTION_2], toggleText: 'Edit', ...props },
- stubs: {
- GlDisclosureDropdownItem,
- },
- slots,
- });
- }
- const findDropdown = () => wrapper.findComponent(GlDisclosureDropdown);
-
- it('dropdown toggle displays provided toggleLabel', () => {
- createComponent();
-
- expect(findDropdown().props().toggleText).toBe('Edit');
- });
-
- it('dropdown has a fluid width', () => {
- createComponent();
-
- expect(findDropdown().props().fluidWidth).toBe(true);
- });
-
- it('provides a default slot', () => {
- const slotContent = 'default text';
-
- createComponent({
- slots: {
- default: slotContent,
- },
- });
-
- expect(findDropdown().text()).toContain(slotContent);
- });
-
- it('allows customizing variant and category', () => {
- const variant = 'confirm';
- const category = 'secondary';
-
- createComponent({ props: { variant, category } });
-
- expect(findDropdown().props()).toMatchObject({ category, variant });
- });
-
- it('displays a single dropdown group', () => {
- createComponent();
-
- expect(wrapper.findAllComponents(GlDisclosureDropdownGroup)).toHaveLength(1);
- });
-
- it('create dropdown items for every action', () => {
- createComponent();
-
- [TEST_ACTION, TEST_ACTION_2].forEach((action, index) => {
- const dropdownItem = wrapper.findAllComponents(GlDisclosureDropdownItem).at(index);
-
- expect(dropdownItem.props().item).toBe(action);
- expect(dropdownItem.attributes()).toMatchObject(action.attrs);
- expect(dropdownItem.text()).toContain(action.text);
- expect(dropdownItem.text()).toContain(action.secondaryText);
- });
- });
-
- describe('when clicking a dropdown item', () => {
- it("invokes the action's handle method", () => {
- createComponent();
-
- [TEST_ACTION, TEST_ACTION_2].forEach((action, index) => {
- const dropdownItem = wrapper.findAllComponents(GlDisclosureDropdownItem).at(index);
-
- dropdownItem.vm.$emit('action');
-
- expect(action.handle).toHaveBeenCalled();
- });
- });
- });
-
- it.each(['shown', 'hidden'])(
- 'bubbles up %s event from the disclosure dropdown component',
- (event) => {
- createComponent();
- findDropdown().vm.$emit(event);
- expect(wrapper.emitted(event)).toHaveLength(1);
- },
- );
-});
diff --git a/spec/frontend/vue_shared/components/badges/__snapshots__/beta_badge_spec.js.snap b/spec/frontend/vue_shared/components/badges/__snapshots__/beta_badge_spec.js.snap
new file mode 100644
index 00000000000..24b2c54f20b
--- /dev/null
+++ b/spec/frontend/vue_shared/components/badges/__snapshots__/beta_badge_spec.js.snap
@@ -0,0 +1,54 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`Beta badge component renders the badge 1`] = `
+<div>
+ <gl-badge-stub
+ class="gl-cursor-pointer"
+ href="#"
+ iconsize="md"
+ size="md"
+ variant="neutral"
+ >
+ Beta
+ </gl-badge-stub>
+
+ <gl-popover-stub
+ cssclasses=""
+ data-testid="beta-badge"
+ showclosebutton="true"
+ target="[Function]"
+ title="What's Beta?"
+ triggers="hover focus click"
+ >
+ <p>
+ A Beta feature is not production-ready, but is unlikely to change drastically before it's released. We encourage users to try Beta features and provide feedback.
+ </p>
+
+ <p
+ class="gl-mb-0"
+ >
+ A Beta feature:
+ </p>
+
+ <ul
+ class="gl-pl-4"
+ >
+ <li>
+ May be unstable.
+ </li>
+
+ <li>
+ Should not cause data loss.
+ </li>
+
+ <li>
+ Is supported by a commercially reasonable effort.
+ </li>
+
+ <li>
+ Is complete or near completion.
+ </li>
+ </ul>
+ </gl-popover-stub>
+</div>
+`;
diff --git a/spec/frontend/vue_shared/components/badges/beta_badge_spec.js b/spec/frontend/vue_shared/components/badges/beta_badge_spec.js
new file mode 100644
index 00000000000..c930c6d5708
--- /dev/null
+++ b/spec/frontend/vue_shared/components/badges/beta_badge_spec.js
@@ -0,0 +1,32 @@
+import { shallowMount } from '@vue/test-utils';
+import { GlBadge } from '@gitlab/ui';
+import BetaBadge from '~/vue_shared/components/badges/beta_badge.vue';
+
+describe('Beta badge component', () => {
+ let wrapper;
+
+ const findBadge = () => wrapper.findComponent(GlBadge);
+ const createWrapper = (props = {}) => {
+ wrapper = shallowMount(BetaBadge, {
+ propsData: { ...props },
+ });
+ };
+
+ it('renders the badge', () => {
+ createWrapper();
+
+ expect(wrapper.element).toMatchSnapshot();
+ });
+
+ it('passes default size to badge', () => {
+ createWrapper();
+
+ expect(findBadge().props('size')).toBe('md');
+ });
+
+ it('passes given size to badge', () => {
+ createWrapper({ size: 'sm' });
+
+ expect(findBadge().props('size')).toBe('sm');
+ });
+});
diff --git a/spec/frontend/vue_shared/components/blob_viewers/rich_viewer_spec.js b/spec/frontend/vue_shared/components/blob_viewers/rich_viewer_spec.js
index 1f3029435ee..fc8155bd381 100644
--- a/spec/frontend/vue_shared/components/blob_viewers/rich_viewer_spec.js
+++ b/spec/frontend/vue_shared/components/blob_viewers/rich_viewer_spec.js
@@ -3,8 +3,10 @@ import { shallowMount } from '@vue/test-utils';
import { handleBlobRichViewer } from '~/blob/viewer';
import RichViewer from '~/vue_shared/components/blob_viewers/rich_viewer.vue';
import MarkdownFieldView from '~/vue_shared/components/markdown/field_view.vue';
+import { handleLocationHash } from '~/lib/utils/common_utils';
jest.mock('~/blob/viewer');
+jest.mock('~/lib/utils/common_utils');
describe('Blob Rich Viewer component', () => {
let wrapper;
@@ -50,4 +52,8 @@ describe('Blob Rich Viewer component', () => {
it('is using Markdown View Field', () => {
expect(wrapper.findComponent(MarkdownFieldView).exists()).toBe(true);
});
+
+ it('scrolls to the hash location', () => {
+ expect(handleLocationHash).toHaveBeenCalled();
+ });
});
diff --git a/spec/frontend/vue_shared/components/clipboard_button_spec.js b/spec/frontend/vue_shared/components/clipboard_button_spec.js
index 08a9c2a42d8..271c99be57a 100644
--- a/spec/frontend/vue_shared/components/clipboard_button_spec.js
+++ b/spec/frontend/vue_shared/components/clipboard_button_spec.js
@@ -1,7 +1,8 @@
import { GlButton } from '@gitlab/ui';
-import { mount } from '@vue/test-utils';
+import { mount, createWrapper as makeWrapper } from '@vue/test-utils';
import { nextTick } from 'vue';
+import { BV_HIDE_TOOLTIP, BV_SHOW_TOOLTIP } from '~/lib/utils/constants';
import initCopyToClipboard, {
CLIPBOARD_SUCCESS_EVENT,
CLIPBOARD_ERROR_EVENT,
@@ -31,7 +32,7 @@ describe('clipboard button', () => {
title,
});
- wrapper.vm.$root.$emit = jest.fn();
+ const rootWrapper = makeWrapper(wrapper.vm.$root);
const button = findButton();
@@ -42,7 +43,7 @@ describe('clipboard button', () => {
await button.trigger(event);
- expect(wrapper.vm.$root.$emit).toHaveBeenCalledWith('bv::show::tooltip', 'clipboard-button-1');
+ expect(rootWrapper.emitted(BV_SHOW_TOOLTIP)[0]).toContain('clipboard-button-1');
expect(button.attributes()).toMatchObject({
title: message,
@@ -56,7 +57,7 @@ describe('clipboard button', () => {
title,
'aria-label': title,
});
- expect(wrapper.vm.$root.$emit).toHaveBeenCalledWith('bv::hide::tooltip', 'clipboard-button-1');
+ expect(rootWrapper.emitted(BV_HIDE_TOOLTIP)[0]).toContain('clipboard-button-1');
};
describe('without gfm', () => {
diff --git a/spec/frontend/vue_shared/components/diff_viewer/viewers/renamed_spec.js b/spec/frontend/vue_shared/components/diff_viewer/viewers/renamed_spec.js
index 0d536b23c45..2f165338577 100644
--- a/spec/frontend/vue_shared/components/diff_viewer/viewers/renamed_spec.js
+++ b/spec/frontend/vue_shared/components/diff_viewer/viewers/renamed_spec.js
@@ -1,5 +1,6 @@
import { shallowMount, mount } from '@vue/test-utils';
import Vue from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import { GlAlert, GlLink, GlLoadingIcon } from '@gitlab/ui';
import waitForPromises from 'helpers/wait_for_promises';
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/filtered_search_utils_spec.js b/spec/frontend/vue_shared/components/filtered_search_bar/filtered_search_utils_spec.js
index 21a1303ccf3..ce8897027a4 100644
--- a/spec/frontend/vue_shared/components/filtered_search_bar/filtered_search_utils_spec.js
+++ b/spec/frontend/vue_shared/components/filtered_search_bar/filtered_search_utils_spec.js
@@ -110,6 +110,8 @@ describe('processFilters', () => {
{ type: 'foo', value: { data: 'foo', operator: '=' } },
{ type: 'bar', value: { data: 'bar1', operator: '=' } },
{ type: 'bar', value: { data: 'bar2', operator: '!=' } },
+ 'just a string',
+ 'and another',
]);
expect(result).toStrictEqual({
@@ -118,6 +120,10 @@ describe('processFilters', () => {
{ value: 'bar1', operator: '=' },
{ value: 'bar2', operator: '!=' },
],
+ 'filtered-search-term': [
+ { value: 'just a string', operator: undefined },
+ { value: 'and another', operator: undefined },
+ ],
});
});
@@ -208,6 +214,67 @@ describe('filterToQueryObject', () => {
expect(res).toEqual(result);
},
);
+
+ describe('with custom operators', () => {
+ it('does not handle filters without custom operators', () => {
+ const res = filterToQueryObject({
+ foo: [
+ { value: '100', operator: '>' },
+ { value: '200', operator: '<' },
+ ],
+ });
+ expect(res).toEqual({ foo: null, 'not[foo]': null });
+ });
+
+ it('handles filters with custom operators', () => {
+ const res = filterToQueryObject(
+ {
+ foo: [
+ { value: '100', operator: '>' },
+ { value: '200', operator: '<' },
+ ],
+ },
+ {
+ customOperators: [
+ {
+ operator: '>',
+ prefix: 'gt',
+ },
+ {
+ operator: '<',
+ prefix: 'lt',
+ },
+ ],
+ },
+ );
+ expect(res).toEqual({ foo: null, 'gt[foo]': ['100'], 'lt[foo]': ['200'], 'not[foo]': null });
+ });
+ });
+
+ it('when applyOnlyToKey is present, it only process custom operators for the given key', () => {
+ const res = filterToQueryObject(
+ {
+ foo: [{ value: '100', operator: '>' }],
+ bar: [{ value: '100', operator: '>' }],
+ },
+ {
+ customOperators: [
+ {
+ operator: '>',
+ prefix: 'gt',
+ applyOnlyToKey: 'foo',
+ },
+ ],
+ },
+ );
+ expect(res).toEqual({
+ bar: null,
+ 'not[bar]': null,
+ foo: null,
+ 'gt[foo]': ['100'],
+ 'not[foo]': null,
+ });
+ });
});
describe('urlQueryToFilter', () => {
@@ -275,28 +342,40 @@ describe('urlQueryToFilter', () => {
[
'search=my terms',
{
- [FILTERED_SEARCH_TERM]: [{ value: 'my' }, { value: 'terms' }],
+ [FILTERED_SEARCH_TERM]: [{ value: 'my terms' }],
},
{ filteredSearchTermKey: 'search' },
],
[
'search[]=my&search[]=terms',
{
- [FILTERED_SEARCH_TERM]: [{ value: 'my' }, { value: 'terms' }],
+ [FILTERED_SEARCH_TERM]: [{ value: 'my terms' }],
},
{ filteredSearchTermKey: 'search' },
],
[
'search=my+terms',
{
- [FILTERED_SEARCH_TERM]: [{ value: 'my' }, { value: 'terms' }],
+ [FILTERED_SEARCH_TERM]: [{ value: 'my terms' }],
},
{ filteredSearchTermKey: 'search' },
],
[
'search=my terms&foo=bar&nop=xxx',
{
- [FILTERED_SEARCH_TERM]: [{ value: 'my' }, { value: 'terms' }],
+ [FILTERED_SEARCH_TERM]: [{ value: 'my terms' }],
+ foo: { value: 'bar', operator: '=' },
+ },
+ { filteredSearchTermKey: 'search', filterNamesAllowList: ['foo'] },
+ ],
+ [
+ {
+ search: 'my terms',
+ foo: 'bar',
+ nop: 'xxx',
+ },
+ {
+ [FILTERED_SEARCH_TERM]: [{ value: 'my terms' }],
foo: { value: 'bar', operator: '=' },
},
{ filteredSearchTermKey: 'search', filterNamesAllowList: ['foo'] },
@@ -308,6 +387,20 @@ describe('urlQueryToFilter', () => {
expect(res).toEqual(result);
},
);
+
+ describe('custom operators', () => {
+ it('handles query param with custom operators', () => {
+ const res = urlQueryToFilter('gt[foo]=bar', {
+ customOperators: [{ operator: '>', prefix: 'gt' }],
+ });
+ expect(res).toEqual({ foo: { operator: '>', value: 'bar' } });
+ });
+
+ it('does not handle query param without custom operators', () => {
+ const res = urlQueryToFilter('gt[foo]=bar');
+ expect(res).toEqual({ 'gt[foo]': { operator: '=', value: 'bar' } });
+ });
+ });
});
describe('getRecentlyUsedSuggestions', () => {
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/date_token_spec.js b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/date_token_spec.js
new file mode 100644
index 00000000000..56a59790210
--- /dev/null
+++ b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/date_token_spec.js
@@ -0,0 +1,49 @@
+import { GlDatepicker, GlFilteredSearchToken } from '@gitlab/ui';
+import { mount } from '@vue/test-utils';
+import DateToken from '~/vue_shared/components/filtered_search_bar/tokens/date_token.vue';
+
+const propsData = {
+ active: true,
+ config: {},
+ value: { operator: '>', data: null },
+};
+
+function createComponent() {
+ return mount(DateToken, {
+ propsData,
+ provide: {
+ portalName: 'fake target',
+ alignSuggestions: function fakeAlignSuggestions() {},
+ termsAsTokens: () => false,
+ },
+ });
+}
+
+describe('DateToken', () => {
+ let wrapper;
+
+ const findGlFilteredSearchToken = () => wrapper.findComponent(GlFilteredSearchToken);
+ const findDatepicker = () => wrapper.findComponent(GlDatepicker);
+
+ beforeEach(() => {
+ wrapper = createComponent();
+ });
+
+ it('renders GlDatepicker', () => {
+ expect(findDatepicker().exists()).toBe(true);
+ });
+
+ it('renders GlFilteredSearchToken', () => {
+ expect(findGlFilteredSearchToken().exists()).toBe(true);
+ });
+
+ it('emits `complete` and `select` with the formatted date when a value is selected', () => {
+ findDatepicker().vm.$emit('input', new Date('October 13, 2014 11:13:00'));
+ findDatepicker().vm.$emit('close');
+
+ expect(findGlFilteredSearchToken().emitted()).toEqual({
+ complete: [[]],
+ select: [['2014-10-13']],
+ });
+ });
+});
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/emoji_token_spec.js b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/emoji_token_spec.js
index 5e675c10038..db116a31de7 100644
--- a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/emoji_token_spec.js
+++ b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/emoji_token_spec.js
@@ -122,7 +122,7 @@ describe('EmojiToken', () => {
it('calls `createAlert`', () => {
expect(createAlert).toHaveBeenCalledWith({
- message: 'There was a problem fetching emojis.',
+ message: 'There was a problem fetching emoji.',
});
});
diff --git a/spec/frontend/vue_shared/components/form/input_copy_toggle_visibility_spec.js b/spec/frontend/vue_shared/components/form/input_copy_toggle_visibility_spec.js
index 4f1603f93ba..eee85ce4fd3 100644
--- a/spec/frontend/vue_shared/components/form/input_copy_toggle_visibility_spec.js
+++ b/spec/frontend/vue_shared/components/form/input_copy_toggle_visibility_spec.js
@@ -1,26 +1,25 @@
-import { merge } from 'lodash';
+import { nextTick } from 'vue';
import { GlFormInputGroup } from '@gitlab/ui';
import InputCopyToggleVisibility from '~/vue_shared/components/form/input_copy_toggle_visibility.vue';
import ClipboardButton from '~/vue_shared/components/clipboard_button.vue';
import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
-
import { mountExtended } from 'helpers/vue_test_utils_helper';
+import { MOUSETRAP_COPY_KEYBOARD_SHORTCUT } from '~/lib/mousetrap';
describe('InputCopyToggleVisibility', () => {
let wrapper;
const valueProp = 'hR8x1fuJbzwu5uFKLf9e';
- const createComponent = (options = {}) => {
- wrapper = mountExtended(
- InputCopyToggleVisibility,
- merge({}, options, {
- directives: {
- GlTooltip: createMockDirective('gl-tooltip'),
- },
- }),
- );
+ const createComponent = ({ props, ...options } = {}) => {
+ wrapper = mountExtended(InputCopyToggleVisibility, {
+ propsData: props,
+ directives: {
+ GlTooltip: createMockDirective('gl-tooltip'),
+ },
+ ...options,
+ });
};
const findFormInputGroup = () => wrapper.findComponent(GlFormInputGroup);
@@ -40,6 +39,18 @@ describe('InputCopyToggleVisibility', () => {
return event;
};
+ const triggerCopyShortcut = () => {
+ wrapper.vm.$options.mousetrap.trigger(MOUSETRAP_COPY_KEYBOARD_SHORTCUT);
+ };
+
+ function expectInputToBeMasked() {
+ expect(findFormInput().element.type).toBe('password');
+ }
+
+ function expectInputToBeRevealed() {
+ expect(findFormInput().element.type).toBe('text');
+ expect(findFormInput().element.value).toBe(valueProp);
+ }
const itDoesNotModifyCopyEvent = () => {
it('does not modify copy event', () => {
@@ -55,35 +66,61 @@ describe('InputCopyToggleVisibility', () => {
describe('when `value` prop is passed', () => {
beforeEach(() => {
createComponent({
- propsData: {
+ props: {
value: valueProp,
},
});
});
- it('displays value as hidden', () => {
- expect(findFormInput().element.value).toBe('********************');
+ it('hides the value with a password input', () => {
+ expectInputToBeMasked();
});
- it('saves actual value to clipboard when manually copied', () => {
- const event = createCopyEvent();
- findFormInput().element.dispatchEvent(event);
-
- expect(event.clipboardData.setData).toHaveBeenCalledWith('text/plain', valueProp);
- expect(event.preventDefault).toHaveBeenCalled();
- });
+ it('emits `copy` event and sets clipboard when copying token via keyboard shortcut', async () => {
+ const writeTextSpy = jest.spyOn(global.navigator.clipboard, 'writeText');
- it('emits `copy` event when manually copied the token', () => {
expect(wrapper.emitted('copy')).toBeUndefined();
- findFormInput().element.dispatchEvent(createCopyEvent());
+ triggerCopyShortcut();
+ await nextTick();
- expect(wrapper.emitted()).toHaveProperty('copy');
- expect(wrapper.emitted('copy')).toHaveLength(1);
expect(wrapper.emitted('copy')[0]).toEqual([]);
+ expect(writeTextSpy).toHaveBeenCalledWith(valueProp);
});
+ describe('copy button', () => {
+ it('renders button with correct props passed', () => {
+ expect(findCopyButton().props()).toMatchObject({
+ text: valueProp,
+ title: 'Copy',
+ });
+ });
+
+ describe('when clicked', () => {
+ beforeEach(async () => {
+ await findCopyButton().trigger('click');
+ });
+
+ it('emits `copy` event', () => {
+ expect(wrapper.emitted()).toHaveProperty('copy');
+ expect(wrapper.emitted('copy')).toHaveLength(1);
+ expect(wrapper.emitted('copy')[0]).toEqual([]);
+ });
+ });
+ });
+ });
+
+ describe('when input is readonly', () => {
describe('visibility toggle button', () => {
+ beforeEach(() => {
+ createComponent({
+ props: {
+ value: valueProp,
+ readonly: true,
+ },
+ });
+ });
+
it('renders a reveal button', () => {
const revealButton = findRevealButton();
@@ -103,7 +140,7 @@ describe('InputCopyToggleVisibility', () => {
});
it('displays value', () => {
- expect(findFormInput().element.value).toBe(valueProp);
+ expectInputToBeRevealed();
});
it('renders a hide button', () => {
@@ -127,78 +164,161 @@ describe('InputCopyToggleVisibility', () => {
});
});
- describe('copy button', () => {
- it('renders button with correct props passed', () => {
- expect(findCopyButton().props()).toMatchObject({
- text: valueProp,
- title: 'Copy',
+ describe('when `initialVisibility` prop is `true`', () => {
+ const label = 'My label';
+ beforeEach(() => {
+ createComponent({
+ props: {
+ value: valueProp,
+ initialVisibility: true,
+ readonly: true,
+ label,
+ 'label-for': 'my-input',
+ formInputGroupProps: {
+ id: 'my-input',
+ },
+ },
});
});
- describe('when clicked', () => {
- beforeEach(async () => {
- await findCopyButton().trigger('click');
+ it('displays value', () => {
+ expectInputToBeRevealed();
+ });
+
+ itDoesNotModifyCopyEvent();
+
+ describe('when input is clicked', () => {
+ it('selects input value', async () => {
+ const mockSelect = jest.fn();
+ findFormInput().element.select = mockSelect;
+ await findFormInput().trigger('click');
+
+ expect(mockSelect).toHaveBeenCalled();
});
+ });
- it('emits `copy` event', () => {
- expect(wrapper.emitted()).toHaveProperty('copy');
- expect(wrapper.emitted('copy')).toHaveLength(1);
- expect(wrapper.emitted('copy')[0]).toEqual([]);
+ describe('when label is clicked', () => {
+ it('selects input value', async () => {
+ const mockSelect = jest.fn();
+ findFormInput().element.select = mockSelect;
+ await wrapper.find('label').trigger('click');
+
+ expect(mockSelect).toHaveBeenCalled();
});
});
});
});
- describe('when `value` prop is not passed', () => {
- beforeEach(() => {
- createComponent();
- });
+ describe('when input is editable', () => {
+ describe('and no `value` prop is passed', () => {
+ beforeEach(() => {
+ createComponent({
+ props: {
+ value: '',
+ readonly: false,
+ },
+ });
+ });
- it('displays value as hidden with 20 asterisks', () => {
- expect(findFormInput().element.value).toBe('********************');
- });
- });
+ it('displays value', () => {
+ expect(findRevealButton().exists()).toBe(false);
+ expect(findHideButton().exists()).toBe(true);
- describe('when `initialVisibility` prop is `true`', () => {
- const label = 'My label';
+ const input = findFormInput();
+ input.element.value = valueProp;
+ input.trigger('input');
- beforeEach(() => {
- createComponent({
- propsData: {
- value: valueProp,
- initialVisibility: true,
- label,
- 'label-for': 'my-input',
- formInputGroupProps: {
- id: 'my-input',
- },
- },
+ expectInputToBeRevealed();
});
});
- it('displays value', () => {
- expect(findFormInput().element.value).toBe(valueProp);
- });
+ describe('and `value` prop is passed', () => {
+ beforeEach(() => {
+ createComponent({
+ props: {
+ value: valueProp,
+ readonly: false,
+ },
+ });
+ });
- itDoesNotModifyCopyEvent();
+ it('renders a reveal button', () => {
+ const revealButton = findRevealButton();
+
+ expect(revealButton.exists()).toBe(true);
- describe('when input is clicked', () => {
- it('selects input value', async () => {
- const mockSelect = jest.fn();
- wrapper.vm.$refs.input.$el.select = mockSelect;
- await wrapper.findByLabelText(label).trigger('click');
+ const tooltip = getBinding(revealButton.element, 'gl-tooltip');
- expect(mockSelect).toHaveBeenCalled();
+ expect(tooltip.value).toBe(InputCopyToggleVisibility.i18n.toggleVisibilityLabelReveal);
});
- });
- describe('when label is clicked', () => {
- it('selects input value', async () => {
- const mockSelect = jest.fn();
- wrapper.vm.$refs.input.$el.select = mockSelect;
- await wrapper.find('label').trigger('click');
+ it('renders a hide button once revealed', async () => {
+ const revealButton = findRevealButton();
+ await revealButton.trigger('click');
+ await nextTick();
+
+ const hideButton = findHideButton();
+ expect(hideButton.exists()).toBe(true);
+
+ const tooltip = getBinding(hideButton.element, 'gl-tooltip');
- expect(mockSelect).toHaveBeenCalled();
+ expect(tooltip.value).toBe(InputCopyToggleVisibility.i18n.toggleVisibilityLabelHide);
+ });
+
+ it('emits `input` event when editing', () => {
+ expect(wrapper.emitted('input')).toBeUndefined();
+ const newVal = 'ding!';
+
+ const input = findFormInput();
+ input.element.value = newVal;
+ input.trigger('input');
+
+ expect(wrapper.emitted()).toHaveProperty('input');
+ expect(wrapper.emitted('input')).toHaveLength(1);
+ expect(wrapper.emitted('input')[0][0]).toBe(newVal);
+ });
+
+ it('copies updated value to clipboard after editing', async () => {
+ const writeTextSpy = jest.spyOn(global.navigator.clipboard, 'writeText');
+
+ triggerCopyShortcut();
+ await nextTick();
+
+ expect(wrapper.emitted('copy')).toHaveLength(1);
+ expect(writeTextSpy).toHaveBeenCalledWith(valueProp);
+
+ const updatedValue = 'wow amazing';
+ wrapper.setProps({ value: updatedValue });
+ await nextTick();
+
+ triggerCopyShortcut();
+ await nextTick();
+
+ expect(wrapper.emitted('copy')).toHaveLength(2);
+ expect(writeTextSpy).toHaveBeenCalledWith(updatedValue);
+ });
+
+ describe('when input is clicked', () => {
+ it('shows the actual value', async () => {
+ const input = findFormInput();
+
+ expectInputToBeMasked();
+ await findFormInput().trigger('click');
+
+ expect(input.element.value).toBe(valueProp);
+ });
+
+ it('ensures the selection start/end are in the correct position once the actual value has been revealed', async () => {
+ const input = findFormInput();
+ const selectionStart = 2;
+ const selectionEnd = 4;
+
+ input.element.setSelectionRange(selectionStart, selectionEnd);
+ await input.trigger('click');
+
+ expect(input.element.selectionStart).toBe(selectionStart);
+ expect(input.element.selectionEnd).toBe(selectionEnd);
+ });
});
});
});
@@ -206,7 +326,7 @@ describe('InputCopyToggleVisibility', () => {
describe('when `showToggleVisibilityButton` is `false`', () => {
beforeEach(() => {
createComponent({
- propsData: {
+ props: {
value: valueProp,
showToggleVisibilityButton: false,
},
@@ -219,7 +339,7 @@ describe('InputCopyToggleVisibility', () => {
});
it('displays value', () => {
- expect(findFormInput().element.value).toBe(valueProp);
+ expectInputToBeRevealed();
});
itDoesNotModifyCopyEvent();
@@ -228,7 +348,7 @@ describe('InputCopyToggleVisibility', () => {
describe('when `showCopyButton` is `false`', () => {
beforeEach(() => {
createComponent({
- propsData: {
+ props: {
showCopyButton: false,
},
});
@@ -239,9 +359,23 @@ describe('InputCopyToggleVisibility', () => {
});
});
+ describe('when `size` is used', () => {
+ it('passes no `size` prop', () => {
+ createComponent();
+
+ expect(findFormInput().props('size')).toBe(null);
+ });
+
+ it('passes `size` prop to the input', () => {
+ createComponent({ props: { size: 'md' } });
+
+ expect(findFormInput().props('size')).toBe('md');
+ });
+ });
+
it('passes `formInputGroupProps` prop only to the input', () => {
createComponent({
- propsData: {
+ props: {
formInputGroupProps: {
name: 'Foo bar',
'data-qa-selector': 'Foo bar',
@@ -267,7 +401,7 @@ describe('InputCopyToggleVisibility', () => {
it('passes `copyButtonTitle` prop to `ClipboardButton`', () => {
createComponent({
- propsData: {
+ props: {
copyButtonTitle: 'Copy token',
},
});
diff --git a/spec/frontend/vue_shared/components/gl_modal_vuex_spec.js b/spec/frontend/vue_shared/components/gl_modal_vuex_spec.js
index 6dc018797a6..271214907fc 100644
--- a/spec/frontend/vue_shared/components/gl_modal_vuex_spec.js
+++ b/spec/frontend/vue_shared/components/gl_modal_vuex_spec.js
@@ -1,6 +1,7 @@
import { GlModal } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import { BV_SHOW_MODAL, BV_HIDE_MODAL } from '~/lib/utils/constants';
import GlModalVuex from '~/vue_shared/components/gl_modal_vuex.vue';
diff --git a/spec/frontend/vue_shared/components/groups_list/groups_list_item_spec.js b/spec/frontend/vue_shared/components/groups_list/groups_list_item_spec.js
new file mode 100644
index 00000000000..877de4f4695
--- /dev/null
+++ b/spec/frontend/vue_shared/components/groups_list/groups_list_item_spec.js
@@ -0,0 +1,182 @@
+import { GlAvatarLabeled, GlIcon } from '@gitlab/ui';
+import { mountExtended } from 'helpers/vue_test_utils_helper';
+import GroupsListItem from '~/vue_shared/components/groups_list/groups_list_item.vue';
+import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
+import {
+ VISIBILITY_TYPE_ICON,
+ VISIBILITY_LEVEL_INTERNAL_STRING,
+ GROUP_VISIBILITY_TYPE,
+} from '~/visibility_level/constants';
+import UserAccessRoleBadge from '~/vue_shared/components/user_access_role_badge.vue';
+import { ACCESS_LEVEL_LABELS } from '~/access_level/constants';
+import { groups } from './mock_data';
+
+describe('GroupsListItem', () => {
+ let wrapper;
+
+ const [group] = groups;
+
+ const defaultPropsData = { group };
+
+ const createComponent = ({ propsData = {} } = {}) => {
+ wrapper = mountExtended(GroupsListItem, {
+ propsData: { ...defaultPropsData, ...propsData },
+ directives: {
+ GlTooltip: createMockDirective('gl-tooltip'),
+ },
+ });
+ };
+
+ const findAvatarLabeled = () => wrapper.findComponent(GlAvatarLabeled);
+ const findGroupDescription = () => wrapper.findByTestId('group-description');
+ const findVisibilityIcon = () => findAvatarLabeled().findComponent(GlIcon);
+
+ it('renders group avatar', () => {
+ createComponent();
+
+ const avatarLabeled = findAvatarLabeled();
+
+ expect(avatarLabeled.props()).toMatchObject({
+ label: group.fullName,
+ labelLink: group.webUrl,
+ });
+
+ expect(avatarLabeled.attributes()).toMatchObject({
+ 'entity-id': group.id.toString(),
+ 'entity-name': group.fullName,
+ shape: 'rect',
+ });
+ });
+
+ it('renders visibility icon with tooltip', () => {
+ createComponent();
+
+ const icon = findAvatarLabeled().findComponent(GlIcon);
+ const tooltip = getBinding(icon.element, 'gl-tooltip');
+
+ expect(icon.props('name')).toBe(VISIBILITY_TYPE_ICON[VISIBILITY_LEVEL_INTERNAL_STRING]);
+ expect(tooltip.value).toBe(GROUP_VISIBILITY_TYPE[VISIBILITY_LEVEL_INTERNAL_STRING]);
+ });
+
+ it('renders subgroup count', () => {
+ createComponent();
+
+ const countWrapper = wrapper.findByTestId('subgroups-count');
+ const tooltip = getBinding(countWrapper.element, 'gl-tooltip');
+
+ expect(tooltip.value).toBe(GroupsListItem.i18n.subgroups);
+ expect(countWrapper.text()).toBe(group.descendantGroupsCount.toString());
+ expect(countWrapper.findComponent(GlIcon).props('name')).toBe('subgroup');
+ });
+
+ it('renders projects count', () => {
+ createComponent();
+
+ const countWrapper = wrapper.findByTestId('projects-count');
+ const tooltip = getBinding(countWrapper.element, 'gl-tooltip');
+
+ expect(tooltip.value).toBe(GroupsListItem.i18n.projects);
+ expect(countWrapper.text()).toBe(group.projectsCount.toString());
+ expect(countWrapper.findComponent(GlIcon).props('name')).toBe('project');
+ });
+
+ it('renders members count', () => {
+ createComponent();
+
+ const countWrapper = wrapper.findByTestId('members-count');
+ const tooltip = getBinding(countWrapper.element, 'gl-tooltip');
+
+ expect(tooltip.value).toBe(GroupsListItem.i18n.directMembers);
+ expect(countWrapper.text()).toBe(group.groupMembersCount.toString());
+ expect(countWrapper.findComponent(GlIcon).props('name')).toBe('users');
+ });
+
+ describe('when visibility is not provided', () => {
+ it('does not render visibility icon', () => {
+ const { visibility, ...groupWithoutVisibility } = group;
+ createComponent({
+ propsData: {
+ group: groupWithoutVisibility,
+ },
+ });
+
+ expect(findVisibilityIcon().exists()).toBe(false);
+ });
+ });
+
+ it('renders access role badge', () => {
+ createComponent();
+
+ expect(findAvatarLabeled().findComponent(UserAccessRoleBadge).text()).toBe(
+ ACCESS_LEVEL_LABELS[group.accessLevel.integerValue],
+ );
+ });
+
+ describe('when group has a description', () => {
+ it('renders description', () => {
+ const descriptionHtml = '<p>Foo bar</p>';
+
+ createComponent({
+ propsData: {
+ group: {
+ ...group,
+ descriptionHtml,
+ },
+ },
+ });
+
+ expect(findGroupDescription().element.innerHTML).toBe(descriptionHtml);
+ });
+ });
+
+ describe('when group does not have a description', () => {
+ it('does not render description', () => {
+ createComponent({
+ propsData: {
+ group: {
+ ...group,
+ descriptionHtml: null,
+ },
+ },
+ });
+
+ expect(findGroupDescription().exists()).toBe(false);
+ });
+ });
+
+ describe('when `showGroupIcon` prop is `true`', () => {
+ describe('when `parent` attribute is `null`', () => {
+ it('shows group icon', () => {
+ createComponent({ propsData: { showGroupIcon: true } });
+
+ expect(wrapper.findByTestId('group-icon').exists()).toBe(true);
+ });
+ });
+
+ describe('when `parent` attribute is set', () => {
+ it('shows subgroup icon', () => {
+ createComponent({
+ propsData: {
+ showGroupIcon: true,
+ group: {
+ ...group,
+ parent: {
+ id: 'gid://gitlab/Group/35',
+ },
+ },
+ },
+ });
+
+ expect(wrapper.findByTestId('subgroup-icon').exists()).toBe(true);
+ });
+ });
+ });
+
+ describe('when `showGroupIcon` prop is `false`', () => {
+ it('does not show group icon', () => {
+ createComponent();
+
+ expect(wrapper.findByTestId('group-icon').exists()).toBe(false);
+ });
+ });
+});
diff --git a/spec/frontend/vue_shared/components/groups_list/groups_list_spec.js b/spec/frontend/vue_shared/components/groups_list/groups_list_spec.js
new file mode 100644
index 00000000000..c65aa347bcf
--- /dev/null
+++ b/spec/frontend/vue_shared/components/groups_list/groups_list_spec.js
@@ -0,0 +1,34 @@
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import GroupsList from '~/vue_shared/components/groups_list/groups_list.vue';
+import GroupsListItem from '~/vue_shared/components/groups_list/groups_list_item.vue';
+import { groups } from './mock_data';
+
+describe('GroupsList', () => {
+ let wrapper;
+
+ const defaultPropsData = {
+ groups,
+ };
+
+ const createComponent = () => {
+ wrapper = shallowMountExtended(GroupsList, {
+ propsData: defaultPropsData,
+ });
+ };
+
+ it('renders list with `GroupsListItem` component', () => {
+ createComponent();
+
+ const groupsListItemWrappers = wrapper.findAllComponents(GroupsListItem).wrappers;
+ const expectedProps = groupsListItemWrappers.map((groupsListItemWrapper) =>
+ groupsListItemWrapper.props(),
+ );
+
+ expect(expectedProps).toEqual(
+ defaultPropsData.groups.map((group) => ({
+ group,
+ showGroupIcon: false,
+ })),
+ );
+ });
+});
diff --git a/spec/frontend/vue_shared/components/groups_list/mock_data.js b/spec/frontend/vue_shared/components/groups_list/mock_data.js
new file mode 100644
index 00000000000..0dad27f8311
--- /dev/null
+++ b/spec/frontend/vue_shared/components/groups_list/mock_data.js
@@ -0,0 +1,35 @@
+export const groups = [
+ {
+ id: 1,
+ fullName: 'Gitlab Org',
+ parent: null,
+ webUrl: 'http://127.0.0.1:3000/groups/gitlab-org',
+ descriptionHtml:
+ '<p data-sourcepos="1:1-1:64" dir="auto">Dolorem dolorem omnis impedit cupiditate pariatur officia velit. Fusce eget orci a ipsum tempus vehicula. Donec rhoncus ante sed lacus pharetra, vitae imperdiet felis lobortis. Donec maximus dapibus orci, sit amet euismod dolor rhoncus vel. In nec mauris nibh.</p>',
+ avatarUrl: null,
+ descendantGroupsCount: 1,
+ projectsCount: 1,
+ groupMembersCount: 2,
+ visibility: 'internal',
+ accessLevel: {
+ integerValue: 10,
+ },
+ },
+ {
+ id: 2,
+ fullName: 'Gitlab Org / test subgroup',
+ parent: {
+ id: 1,
+ },
+ webUrl: 'http://127.0.0.1:3000/groups/gitlab-org/test-subgroup',
+ descriptionHtml: '',
+ avatarUrl: null,
+ descendantGroupsCount: 4,
+ projectsCount: 4,
+ groupMembersCount: 4,
+ visibility: 'private',
+ accessLevel: {
+ integerValue: 20,
+ },
+ },
+];
diff --git a/spec/frontend/vue_shared/components/help_popover_spec.js b/spec/frontend/vue_shared/components/help_popover_spec.js
index 76e66d07fa0..e39061476b4 100644
--- a/spec/frontend/vue_shared/components/help_popover_spec.js
+++ b/spec/frontend/vue_shared/components/help_popover_spec.js
@@ -74,6 +74,22 @@ describe('HelpPopover', () => {
});
});
+ describe('with trigger classes', () => {
+ it.each`
+ triggerClass
+ ${'class-a class-b'}
+ ${['class-a', 'class-b']}
+ ${{ 'class-a': true, 'class-b': true }}
+ `('renders button with classes given $triggerClass', ({ triggerClass }) => {
+ createComponent({
+ props: { triggerClass },
+ });
+
+ expect(findQuestionButton().classes('class-a')).toBe(true);
+ expect(findQuestionButton().classes('class-b')).toBe(true);
+ });
+ });
+
describe('with other options', () => {
const placement = 'bottom';
diff --git a/spec/frontend/vue_shared/components/listbox_input/listbox_input_spec.js b/spec/frontend/vue_shared/components/listbox_input/listbox_input_spec.js
index b782a2b19da..141c3aa7da6 100644
--- a/spec/frontend/vue_shared/components/listbox_input/listbox_input_spec.js
+++ b/spec/frontend/vue_shared/components/listbox_input/listbox_input_spec.js
@@ -96,6 +96,15 @@ describe('ListboxInput', () => {
expect(findGlListbox().props('fluidWidth')).toBe(fluidWidth);
});
+ it.each(['class-a class-b', ['class-a', 'class-b'], { 'class-a': true, 'class-b': true }])(
+ 'passes %s class to listbox',
+ (toggleClass) => {
+ createComponent({ toggleClass });
+
+ expect(findGlListbox().props('toggleClass')).toBe(toggleClass);
+ },
+ );
+
it.each(['right', 'left'])("passes %s to the listbox's placement prop", (placement) => {
createComponent({ placement });
diff --git a/spec/frontend/vue_shared/components/markdown/comment_templates_dropdown_spec.js b/spec/frontend/vue_shared/components/markdown/comment_templates_dropdown_spec.js
index 2bef6dd15df..cd9f27dccbd 100644
--- a/spec/frontend/vue_shared/components/markdown/comment_templates_dropdown_spec.js
+++ b/spec/frontend/vue_shared/components/markdown/comment_templates_dropdown_spec.js
@@ -1,11 +1,18 @@
+import { GlCollapsibleListbox } from '@gitlab/ui';
import Vue from 'vue';
import VueApollo from 'vue-apollo';
import savedRepliesResponse from 'test_fixtures/graphql/comment_templates/saved_replies.query.graphql.json';
+import { mockTracking } from 'helpers/tracking_helper';
import { mountExtended } from 'helpers/vue_test_utils_helper';
import createMockApollo from 'helpers/mock_apollo_helper';
+import { useMockLocationHelper } from 'helpers/mock_window_location_helper';
import waitForPromises from 'helpers/wait_for_promises';
import CommentTemplatesDropdown from '~/vue_shared/components/markdown/comment_templates_dropdown.vue';
import savedRepliesQuery from '~/vue_shared/components/markdown/saved_replies.query.graphql';
+import {
+ TRACKING_SAVED_REPLIES_USE,
+ TRACKING_SAVED_REPLIES_USE_IN_MR,
+} from '~/vue_shared/components/markdown/constants';
let wrapper;
let savedRepliesResp;
@@ -31,19 +38,24 @@ function createComponent(options = {}) {
});
}
-describe('Comment templates dropdown', () => {
- it('fetches data when dropdown gets opened', async () => {
- const mockApollo = createMockApolloProvider(savedRepliesResponse);
- wrapper = createComponent({ mockApollo });
+function findDropdownComponent() {
+ return wrapper.findComponent(GlCollapsibleListbox);
+}
- wrapper.find('.js-comment-template-toggle').trigger('click');
+async function selectSavedReply() {
+ const dropdown = findDropdownComponent();
- await waitForPromises();
+ dropdown.vm.$emit('shown');
- expect(savedRepliesResp).toHaveBeenCalled();
- });
+ await waitForPromises();
+
+ dropdown.vm.$emit('select', savedRepliesResponse.data.currentUser.savedReplies.nodes[0].id);
+}
+
+useMockLocationHelper();
- it('adds emits a select event on selecting a comment', async () => {
+describe('Comment templates dropdown', () => {
+ it('fetches data when dropdown gets opened', async () => {
const mockApollo = createMockApolloProvider(savedRepliesResponse);
wrapper = createComponent({ mockApollo });
@@ -51,8 +63,67 @@ describe('Comment templates dropdown', () => {
await waitForPromises();
- wrapper.find('.gl-new-dropdown-item').trigger('click');
+ expect(savedRepliesResp).toHaveBeenCalled();
+ });
- expect(wrapper.emitted().select[0]).toEqual(['Saved Reply Content']);
+ describe('when selecting a comment', () => {
+ let trackingSpy;
+ let mockApollo;
+
+ beforeEach(() => {
+ trackingSpy = mockTracking(undefined, window.document, jest.spyOn);
+ mockApollo = createMockApolloProvider(savedRepliesResponse);
+ wrapper = createComponent({ mockApollo });
+ });
+
+ it('emits a select event', async () => {
+ wrapper.find('.js-comment-template-toggle').trigger('click');
+
+ await waitForPromises();
+
+ wrapper.find('.gl-new-dropdown-item').trigger('click');
+
+ expect(wrapper.emitted().select[0]).toEqual(['Saved Reply Content']);
+ });
+
+ describe('tracking', () => {
+ it('tracks overall usage', async () => {
+ await selectSavedReply();
+
+ expect(trackingSpy).toHaveBeenCalledWith(
+ expect.any(String),
+ TRACKING_SAVED_REPLIES_USE,
+ expect.any(Object),
+ );
+ });
+
+ describe('MR-specific usage event', () => {
+ it('is sent when in an MR', async () => {
+ window.location.toString.mockReturnValue('this/looks/like/a/-/merge_requests/1');
+
+ await selectSavedReply();
+
+ expect(trackingSpy).toHaveBeenCalledWith(
+ expect.any(String),
+ TRACKING_SAVED_REPLIES_USE_IN_MR,
+ expect.any(Object),
+ );
+ expect(trackingSpy).toHaveBeenCalledTimes(2);
+ });
+
+ it('is not sent when not in an MR', async () => {
+ window.location.toString.mockReturnValue('this/looks/like/a/-/issues/1');
+
+ await selectSavedReply();
+
+ expect(trackingSpy).not.toHaveBeenCalledWith(
+ expect.any(String),
+ TRACKING_SAVED_REPLIES_USE_IN_MR,
+ expect.any(Object),
+ );
+ expect(trackingSpy).toHaveBeenCalledTimes(1);
+ });
+ });
+ });
});
});
diff --git a/spec/frontend/vue_shared/components/markdown/header_spec.js b/spec/frontend/vue_shared/components/markdown/header_spec.js
index eb728879fb7..40875ed5dbc 100644
--- a/spec/frontend/vue_shared/components/markdown/header_spec.js
+++ b/spec/frontend/vue_shared/components/markdown/header_spec.js
@@ -46,84 +46,39 @@ describe('Markdown field header component', () => {
createWrapper();
});
- describe('markdown header buttons', () => {
+ describe.each`
+ i | buttonTitle | nonMacTitle | buttonType
+ ${0} | ${'Insert suggestion'} | ${'Insert suggestion'} | ${'codeSuggestion'}
+ ${1} | ${'Add bold text (⌘B)'} | ${'Add bold text (Ctrl+B)'} | ${'bold'}
+ ${2} | ${'Add italic text (⌘I)'} | ${'Add italic text (Ctrl+I)'} | ${'italic'}
+ ${3} | ${'Add strikethrough text (⌘⇧X)'} | ${'Add strikethrough text (Ctrl+Shift+X)'} | ${'strike'}
+ ${4} | ${'Insert a quote'} | ${'Insert a quote'} | ${'blockquote'}
+ ${5} | ${'Insert code'} | ${'Insert code'} | ${'code'}
+ ${6} | ${'Add a link (⌘K)'} | ${'Add a link (Ctrl+K)'} | ${'link'}
+ ${7} | ${'Add a bullet list'} | ${'Add a bullet list'} | ${'bulletList'}
+ ${8} | ${'Add a numbered list'} | ${'Add a numbered list'} | ${'orderedList'}
+ ${9} | ${'Add a checklist'} | ${'Add a checklist'} | ${'taskList'}
+ ${10} | ${'Indent line (⌘])'} | ${'Indent line (Ctrl+])'} | ${'indent'}
+ ${11} | ${'Outdent line (⌘[)'} | ${'Outdent line (Ctrl+[)'} | ${'outdent'}
+ ${12} | ${'Add a collapsible section'} | ${'Add a collapsible section'} | ${'details'}
+ ${13} | ${'Add a table'} | ${'Add a table'} | ${'table'}
+ ${14} | ${'Attach a file or image'} | ${'Attach a file or image'} | ${'upload'}
+ ${15} | ${'Go full screen'} | ${'Go full screen'} | ${'fullScreen'}
+ `('markdown header buttons', ({ i, buttonTitle, nonMacTitle, buttonType }) => {
it('renders the buttons with the correct title', () => {
- const buttons = [
- 'Insert suggestion',
- 'Add bold text (⌘B)',
- 'Add italic text (⌘I)',
- 'Add strikethrough text (⌘⇧X)',
- 'Insert a quote',
- 'Insert code',
- 'Add a link (⌘K)',
- 'Add a bullet list',
- 'Add a numbered list',
- 'Add a checklist',
- 'Indent line (⌘])',
- 'Outdent line (⌘[)',
- 'Add a collapsible section',
- 'Add a table',
- 'Go full screen',
- ];
- const elements = findToolbarButtons();
-
- elements.wrappers.forEach((buttonEl, index) => {
- expect(buttonEl.props('buttonTitle')).toBe(buttons[index]);
- });
+ expect(findToolbarButtons().wrappers[i].props('buttonTitle')).toBe(buttonTitle);
});
it('renders correct title on non MacOS systems', () => {
- window.gl = {
- client: {
- isMac: false,
- },
- };
+ window.gl = { client: { isMac: false } };
createWrapper();
- const buttons = [
- 'Insert suggestion',
- 'Add bold text (Ctrl+B)',
- 'Add italic text (Ctrl+I)',
- 'Add strikethrough text (Ctrl+Shift+X)',
- 'Insert a quote',
- 'Insert code',
- 'Add a link (Ctrl+K)',
- 'Add a bullet list',
- 'Add a numbered list',
- 'Add a checklist',
- 'Indent line (Ctrl+])',
- 'Outdent line (Ctrl+[)',
- 'Add a collapsible section',
- 'Add a table',
- 'Go full screen',
- ];
- const elements = findToolbarButtons();
-
- elements.wrappers.forEach((buttonEl, index) => {
- expect(buttonEl.props('buttonTitle')).toBe(buttons[index]);
- });
- });
-
- it('renders "Attach a file or image" button using gl-button', () => {
- const button = wrapper.findByTestId('button-attach-file');
-
- expect(button.element.tagName).toBe('GL-BUTTON-STUB');
- expect(button.attributes('title')).toBe('Attach a file or image');
+ expect(findToolbarButtons().wrappers[i].props('buttonTitle')).toBe(nonMacTitle);
});
- describe('when the user is on a non-Mac', () => {
- beforeEach(() => {
- delete window.gl.client.isMac;
-
- createWrapper();
- });
-
- it('renders keyboard shortcuts with Ctrl+ instead of ⌘', () => {
- const boldButton = findToolbarButtonByProp('icon', 'bold');
-
- expect(boldButton.props('buttonTitle')).toBe('Add bold text (Ctrl+B)');
- });
+ it('passes button type to `trackingProperty` prop', () => {
+ expect(findToolbarButtons().wrappers[i].props('trackingProperty')).toBe(buttonType);
});
});
diff --git a/spec/frontend/vue_shared/components/markdown/toolbar_button_spec.js b/spec/frontend/vue_shared/components/markdown/toolbar_button_spec.js
index 33e9d6add99..54510bf043d 100644
--- a/spec/frontend/vue_shared/components/markdown/toolbar_button_spec.js
+++ b/spec/frontend/vue_shared/components/markdown/toolbar_button_spec.js
@@ -1,6 +1,10 @@
import { GlButton } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import ToolbarButton from '~/vue_shared/components/markdown/toolbar_button.vue';
+import {
+ TOOLBAR_CONTROL_TRACKING_ACTION,
+ MARKDOWN_EDITOR_TRACKING_LABEL,
+} from '~/vue_shared/components/markdown/tracking';
describe('toolbar_button', () => {
let wrapper;
@@ -20,9 +24,8 @@ describe('toolbar_button', () => {
});
};
- const getButtonShortcutsAttr = () => {
- return wrapper.findComponent(GlButton).attributes('data-md-shortcuts');
- };
+ const findToolbarButton = () => wrapper.findComponent(GlButton);
+ const getButtonShortcutsAttr = () => findToolbarButton().attributes('data-md-shortcuts');
describe('keyboard shortcuts', () => {
it.each`
@@ -40,4 +43,24 @@ describe('toolbar_button', () => {
},
);
});
+
+ it('adds tracking attributes to the button when `trackingProperty` prop is defined', () => {
+ const buttonType = 'bold';
+
+ createComponent({ trackingProperty: buttonType });
+
+ expect(findToolbarButton().attributes('data-track-action')).toBe(
+ TOOLBAR_CONTROL_TRACKING_ACTION,
+ );
+ expect(findToolbarButton().attributes('data-track-label')).toBe(MARKDOWN_EDITOR_TRACKING_LABEL);
+ expect(findToolbarButton().attributes('data-track-property')).toBe(buttonType);
+ });
+
+ it('does not add tracking attributes to the button when `trackingProperty` prop is undefined', () => {
+ createComponent();
+
+ ['data-track-action', 'data-track-label', 'data-track-property'].forEach((dataAttribute) => {
+ expect(findToolbarButton().attributes(dataAttribute)).toBeUndefined();
+ });
+ });
});
diff --git a/spec/frontend/vue_shared/components/markdown/toolbar_spec.js b/spec/frontend/vue_shared/components/markdown/toolbar_spec.js
index 5bf11ff2b26..90d8ce3b500 100644
--- a/spec/frontend/vue_shared/components/markdown/toolbar_spec.js
+++ b/spec/frontend/vue_shared/components/markdown/toolbar_spec.js
@@ -3,6 +3,7 @@ import Toolbar from '~/vue_shared/components/markdown/toolbar.vue';
import EditorModeSwitcher from '~/vue_shared/components/markdown/editor_mode_switcher.vue';
import { updateText } from '~/lib/utils/text_markdown';
import { setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
+import { PROMO_URL } from 'jh_else_ce/lib/utils/url_utility';
jest.mock('~/lib/utils/text_markdown');
@@ -98,7 +99,7 @@ describe('toolbar', () => {
expect.objectContaining({
tag: `### Rich text editor
-Try out **styling** _your_ content right here or read the [direction](https://about.gitlab.com/direction/plan/knowledge/content_editor/).`,
+Try out **styling** _your_ content right here or read the [direction](${PROMO_URL}/direction/plan/knowledge/content_editor/).`,
textArea: document.querySelector('textarea'),
cursorOffset: 0,
wrap: false,
diff --git a/spec/frontend/vue_shared/components/metric_images/metric_images_tab_spec.js b/spec/frontend/vue_shared/components/metric_images/metric_images_tab_spec.js
index 4b0b89fe1e7..36f5517decf 100644
--- a/spec/frontend/vue_shared/components/metric_images/metric_images_tab_spec.js
+++ b/spec/frontend/vue_shared/components/metric_images/metric_images_tab_spec.js
@@ -2,6 +2,7 @@ import { GlFormInput, GlModal } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import Vue from 'vue';
import merge from 'lodash/merge';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import MetricImagesTable from '~/vue_shared/components/metric_images/metric_images_table.vue';
import MetricImagesTab from '~/vue_shared/components/metric_images/metric_images_tab.vue';
diff --git a/spec/frontend/vue_shared/components/metric_images/metric_images_table_spec.js b/spec/frontend/vue_shared/components/metric_images/metric_images_table_spec.js
index 12dca95e9ba..ca141f53bf1 100644
--- a/spec/frontend/vue_shared/components/metric_images/metric_images_table_spec.js
+++ b/spec/frontend/vue_shared/components/metric_images/metric_images_table_spec.js
@@ -2,6 +2,7 @@ import { GlLink, GlModal } from '@gitlab/ui';
import { shallowMount, mount } from '@vue/test-utils';
import Vue from 'vue';
import merge from 'lodash/merge';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import createStore from '~/vue_shared/components/metric_images/store';
import MetricsImageTable from '~/vue_shared/components/metric_images/metric_images_table.vue';
diff --git a/spec/frontend/vue_shared/components/metric_images/store/actions_spec.js b/spec/frontend/vue_shared/components/metric_images/store/actions_spec.js
index 626f6fc735e..544466a22ca 100644
--- a/spec/frontend/vue_shared/components/metric_images/store/actions_spec.js
+++ b/spec/frontend/vue_shared/components/metric_images/store/actions_spec.js
@@ -1,4 +1,5 @@
import Vue from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import actionsFactory from '~/vue_shared/components/metric_images/store/actions';
import * as types from '~/vue_shared/components/metric_images/store/mutation_types';
diff --git a/spec/frontend/vue_shared/components/modal_copy_button_spec.js b/spec/frontend/vue_shared/components/modal_copy_button_spec.js
index 2f8f97c5b95..7f3cf9820db 100644
--- a/spec/frontend/vue_shared/components/modal_copy_button_spec.js
+++ b/spec/frontend/vue_shared/components/modal_copy_button_spec.js
@@ -27,16 +27,19 @@ describe('modal copy button', () => {
wrapper.trigger('click');
await nextTick();
- expect(wrapper.emitted().success).not.toBeEmpty();
+ expect(wrapper.emitted('error')).toBeUndefined();
+ expect(wrapper.emitted('success')).toHaveLength(1);
expect(document.execCommand).toHaveBeenCalledWith('copy');
expect(root.emitted(BV_HIDE_TOOLTIP)).toEqual([['test-id']]);
});
+
it("should propagate the clipboard error event if execCommand doesn't work", async () => {
document.execCommand = jest.fn(() => false);
wrapper.trigger('click');
await nextTick();
- expect(wrapper.emitted().error).not.toBeEmpty();
+ expect(wrapper.emitted('success')).toBeUndefined();
+ expect(wrapper.emitted('error')).toHaveLength(1);
expect(document.execCommand).toHaveBeenCalledWith('copy');
});
});
diff --git a/spec/frontend/vue_shared/components/new_resource_dropdown/new_resource_dropdown_spec.js b/spec/frontend/vue_shared/components/new_resource_dropdown/new_resource_dropdown_spec.js
index f04e1976a5f..7efc0e162b8 100644
--- a/spec/frontend/vue_shared/components/new_resource_dropdown/new_resource_dropdown_spec.js
+++ b/spec/frontend/vue_shared/components/new_resource_dropdown/new_resource_dropdown_spec.js
@@ -138,7 +138,7 @@ describe('NewResourceDropdown component', () => {
});
it('dropdown button is not a link', () => {
- expect(findDropdown().attributes('split-href')).toBeUndefined();
+ expect(findDropdown().props('splitHref')).toBe('');
});
it('displays default text on the dropdown button', () => {
@@ -162,7 +162,7 @@ describe('NewResourceDropdown component', () => {
it('dropdown button is a link', () => {
const href = joinPaths(project1.webUrl, DASH_SCOPE, expectedPath);
- expect(findDropdown().attributes('split-href')).toBe(href);
+ expect(findDropdown().props('splitHref')).toBe(href);
});
it('displays project name on the dropdown button', () => {
@@ -199,7 +199,7 @@ describe('NewResourceDropdown component', () => {
await nextTick();
const dropdown = findDropdown();
- expect(dropdown.attributes('split-href')).toBe(
+ expect(dropdown.props('splitHref')).toBe(
joinPaths(project1.webUrl, DASH_SCOPE, 'issues/new'),
);
expect(dropdown.props('text')).toBe(`New issue in ${project1.name}`);
@@ -217,7 +217,7 @@ describe('NewResourceDropdown component', () => {
await nextTick();
const dropdown = findDropdown();
- expect(dropdown.attributes('split-href')).toBe(
+ expect(dropdown.props('splitHref')).toBe(
joinPaths(project1.webUrl, DASH_SCOPE, 'issues/new'),
);
expect(dropdown.props('text')).toBe(`New issue in ${project1.name}`);
diff --git a/spec/frontend/vue_shared/components/notes/placeholder_note_spec.js b/spec/frontend/vue_shared/components/notes/placeholder_note_spec.js
index 7e669fb7c71..6d4745e8e3d 100644
--- a/spec/frontend/vue_shared/components/notes/placeholder_note_spec.js
+++ b/spec/frontend/vue_shared/components/notes/placeholder_note_spec.js
@@ -1,5 +1,6 @@
import { shallowMount } from '@vue/test-utils';
import Vue from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import IssuePlaceholderNote from '~/vue_shared/components/notes/placeholder_note.vue';
import { userDataMock } from 'jest/notes/mock_data';
diff --git a/spec/frontend/vue_shared/components/page_size_selector_spec.js b/spec/frontend/vue_shared/components/page_size_selector_spec.js
index fce7ceee2fe..ecb25fa7468 100644
--- a/spec/frontend/vue_shared/components/page_size_selector_spec.js
+++ b/spec/frontend/vue_shared/components/page_size_selector_spec.js
@@ -1,4 +1,4 @@
-import { GlDropdown, GlDropdownItem } from '@gitlab/ui';
+import { GlCollapsibleListbox } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import PageSizeSelector, { PAGE_SIZES } from '~/vue_shared/components/page_size_selector.vue';
@@ -11,30 +11,30 @@ describe('Page size selector component', () => {
});
};
- const findDropdown = () => wrapper.findComponent(GlDropdown);
- const findDropdownItems = () => wrapper.findAllComponents(GlDropdownItem);
+ const findListbox = () => wrapper.findComponent(GlCollapsibleListbox);
- it.each(PAGE_SIZES)('shows expected text in the dropdown button for page size %s', (pageSize) => {
- createWrapper({ pageSize });
+ it.each(PAGE_SIZES)('shows expected text in the listbox button for page size %s', (pageSize) => {
+ createWrapper({ pageSize: pageSize.value });
- expect(findDropdown().props('text')).toBe(`Show ${pageSize} items`);
+ expect(findListbox().props('toggleText')).toBe(`Show ${pageSize.value} items`);
});
- it('shows the expected dropdown items', () => {
+ it('shows the expected listbox items', () => {
createWrapper();
+ const options = findListbox().props('items');
+
PAGE_SIZES.forEach((pageSize, index) => {
- expect(findDropdownItems().at(index).text()).toBe(`Show ${pageSize} items`);
+ expect(options[index].text).toBe(pageSize.text);
});
});
- it('will emit the new page size when a dropdown item is clicked', () => {
+ it('will emit the new page size when a listbox item is clicked', () => {
createWrapper();
- findDropdownItems().wrappers.forEach((itemWrapper, index) => {
- itemWrapper.vm.$emit('click');
-
- expect(wrapper.emitted('input')[index][0]).toBe(PAGE_SIZES[index]);
+ PAGE_SIZES.forEach((pageSize, index) => {
+ findListbox().vm.$emit('select', pageSize.value);
+ expect(wrapper.emitted('input')[index][0]).toBe(PAGE_SIZES[index].value);
});
});
});
diff --git a/spec/frontend/vue_shared/components/projects_list/projects_list_item_spec.js b/spec/frontend/vue_shared/components/projects_list/projects_list_item_spec.js
index 0e387d1c139..2490422e4e8 100644
--- a/spec/frontend/vue_shared/components/projects_list/projects_list_item_spec.js
+++ b/spec/frontend/vue_shared/components/projects_list/projects_list_item_spec.js
@@ -1,7 +1,10 @@
-import { GlAvatarLabeled, GlBadge, GlIcon, GlPopover } from '@gitlab/ui';
+import { GlAvatarLabeled, GlBadge, GlIcon, GlPopover, GlDisclosureDropdown } from '@gitlab/ui';
+import uniqueId from 'lodash/uniqueId';
import projects from 'test_fixtures/api/users/projects/get.json';
import { mountExtended } from 'helpers/vue_test_utils_helper';
+import { __ } from '~/locale';
import ProjectsListItem from '~/vue_shared/components/projects_list/projects_list_item.vue';
+import { ACTION_EDIT, ACTION_DELETE } from '~/vue_shared/components/projects_list/constants';
import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils';
import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
import {
@@ -13,8 +16,9 @@ import UserAccessRoleBadge from '~/vue_shared/components/user_access_role_badge.
import { ACCESS_LEVEL_LABELS } from '~/access_level/constants';
import { FEATURABLE_DISABLED, FEATURABLE_ENABLED } from '~/featurable/constants';
import TimeAgoTooltip from '~/vue_shared/components/time_ago_tooltip.vue';
+import DeleteModal from '~/projects/components/shared/delete_modal.vue';
-jest.mock('lodash/uniqueId', () => (prefix) => `${prefix}1`);
+jest.mock('lodash/uniqueId');
describe('ProjectsListItem', () => {
let wrapper;
@@ -40,6 +44,10 @@ describe('ProjectsListItem', () => {
const findProjectDescription = () => wrapper.findByTestId('project-description');
const findVisibilityIcon = () => findAvatarLabeled().findComponent(GlIcon);
+ beforeEach(() => {
+ uniqueId.mockImplementation(jest.requireActual('lodash/uniqueId'));
+ });
+
it('renders project avatar', () => {
createComponent();
@@ -207,6 +215,10 @@ describe('ProjectsListItem', () => {
});
describe('if project has topics', () => {
+ beforeEach(() => {
+ uniqueId.mockImplementation((prefix) => `${prefix}1`);
+ });
+
it('renders first three topics', () => {
createComponent();
@@ -306,4 +318,72 @@ describe('ProjectsListItem', () => {
expect(wrapper.findByTestId('project-icon').exists()).toBe(false);
});
});
+
+ describe('when project has actions', () => {
+ const editPath = '/foo/bar/edit';
+
+ beforeEach(() => {
+ createComponent({
+ propsData: {
+ project: {
+ ...project,
+ actions: [ACTION_EDIT, ACTION_DELETE],
+ isForked: true,
+ editPath,
+ },
+ },
+ });
+ });
+
+ it('displays actions dropdown', () => {
+ expect(wrapper.findComponent(GlDisclosureDropdown).props()).toMatchObject({
+ items: [
+ {
+ id: ACTION_EDIT,
+ text: __('Edit'),
+ href: editPath,
+ },
+ {
+ id: ACTION_DELETE,
+ text: __('Delete'),
+ extraAttrs: {
+ class: 'gl-text-red-500!',
+ },
+ action: expect.any(Function),
+ },
+ ],
+ });
+ });
+
+ describe('when delete action is fired', () => {
+ beforeEach(() => {
+ wrapper
+ .findComponent(GlDisclosureDropdown)
+ .props('items')
+ .find((item) => item.id === ACTION_DELETE)
+ .action();
+ });
+
+ it('displays confirmation modal with correct props', () => {
+ expect(wrapper.findComponent(DeleteModal).props()).toMatchObject({
+ visible: true,
+ confirmPhrase: project.name,
+ isFork: true,
+ issuesCount: '0',
+ forksCount: '0',
+ starsCount: '0',
+ });
+ });
+
+ describe('when deletion is confirmed', () => {
+ beforeEach(() => {
+ wrapper.findComponent(DeleteModal).vm.$emit('primary');
+ });
+
+ it('emits `delete` event', () => {
+ expect(wrapper.emitted('delete')).toMatchObject([[project]]);
+ });
+ });
+ });
+ });
});
diff --git a/spec/frontend/vue_shared/components/projects_list/projects_list_spec.js b/spec/frontend/vue_shared/components/projects_list/projects_list_spec.js
index a0adbb89894..fb195dfe08e 100644
--- a/spec/frontend/vue_shared/components/projects_list/projects_list_spec.js
+++ b/spec/frontend/vue_shared/components/projects_list/projects_list_spec.js
@@ -32,4 +32,18 @@ describe('ProjectsList', () => {
})),
);
});
+
+ describe('when `ProjectListItem` emits `delete` event', () => {
+ const [firstProject] = defaultPropsData.projects;
+
+ beforeEach(() => {
+ createComponent();
+
+ wrapper.findComponent(ProjectsListItem).vm.$emit('delete', firstProject);
+ });
+
+ it('emits `delete` event', () => {
+ expect(wrapper.emitted('delete')).toEqual([[firstProject]]);
+ });
+ });
});
diff --git a/spec/frontend/vue_shared/components/registry/persisted_dropdown_selection_spec.js b/spec/frontend/vue_shared/components/registry/persisted_dropdown_selection_spec.js
index b93fa37546f..400be4ad131 100644
--- a/spec/frontend/vue_shared/components/registry/persisted_dropdown_selection_spec.js
+++ b/spec/frontend/vue_shared/components/registry/persisted_dropdown_selection_spec.js
@@ -1,5 +1,5 @@
-import { GlDropdown, GlDropdownItem } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
+import { GlCollapsibleListbox, GlListboxItem } from '@gitlab/ui';
+import { mount } from '@vue/test-utils';
import { nextTick } from 'vue';
import LocalStorageSync from '~/vue_shared/components/local_storage_sync.vue';
import component from '~/vue_shared/components/registry/persisted_dropdown_selection.vue';
@@ -16,7 +16,7 @@ describe('Persisted dropdown selection', () => {
};
function createComponent({ props = {}, data = {} } = {}) {
- wrapper = shallowMount(component, {
+ wrapper = mount(component, {
propsData: {
...defaultProps,
...props,
@@ -28,8 +28,10 @@ describe('Persisted dropdown selection', () => {
}
const findLocalStorageSync = () => wrapper.findComponent(LocalStorageSync);
- const findDropdown = () => wrapper.findComponent(GlDropdown);
- const findDropdownItems = () => wrapper.findAllComponents(GlDropdownItem);
+ const findGlCollapsibleListbox = () => wrapper.findComponent(GlCollapsibleListbox);
+ const findGlListboxItems = () => wrapper.findAllComponents(GlListboxItem);
+ const findGlListboxToggleText = () =>
+ findGlCollapsibleListbox().find('.gl-new-dropdown-button-text');
describe('local storage sync', () => {
it('uses the local storage sync component with the correct props', () => {
@@ -63,20 +65,22 @@ describe('Persisted dropdown selection', () => {
it('has a dropdown component', () => {
createComponent();
- expect(findDropdown().exists()).toBe(true);
+ expect(findGlCollapsibleListbox().exists()).toBe(true);
});
describe('dropdown text', () => {
it('when no selection shows the first', () => {
createComponent();
- expect(findDropdown().props('text')).toBe('Maven');
+ expect(findGlListboxToggleText().text()).toBe('Maven');
});
- it('when an option is selected, shows that option label', () => {
- createComponent({ data: { selected: defaultProps.options[1].value } });
+ it('when an option is selected, shows that option label', async () => {
+ createComponent();
+ findGlCollapsibleListbox().vm.$emit('select', defaultProps.options[1].value);
+ await nextTick();
- expect(findDropdown().props('text')).toBe('Gradle');
+ expect(findGlListboxToggleText().text()).toBe('Gradle');
});
});
@@ -84,34 +88,20 @@ describe('Persisted dropdown selection', () => {
it('has one item for each option', () => {
createComponent();
- expect(findDropdownItems()).toHaveLength(defaultProps.options.length);
- });
-
- it('binds the correct props', () => {
- createComponent({ data: { selected: defaultProps.options[0].value } });
-
- expect(findDropdownItems().at(0).props()).toMatchObject({
- isChecked: true,
- isCheckItem: true,
- });
-
- expect(findDropdownItems().at(1).props()).toMatchObject({
- isChecked: false,
- isCheckItem: true,
- });
+ expect(findGlListboxItems()).toHaveLength(defaultProps.options.length);
});
it('on click updates the data and emits event', async () => {
- createComponent({ data: { selected: defaultProps.options[0].value } });
- expect(findDropdownItems().at(0).props('isChecked')).toBe(true);
+ createComponent();
+ const selectedItem = 'gradle';
- findDropdownItems().at(1).vm.$emit('click');
+ expect(findGlCollapsibleListbox().props('selected')).toBe('maven');
+ findGlCollapsibleListbox().vm.$emit('select', selectedItem);
await nextTick();
- expect(wrapper.emitted('change')).toStrictEqual([['gradle']]);
- expect(findDropdownItems().at(0).props('isChecked')).toBe(false);
- expect(findDropdownItems().at(1).props('isChecked')).toBe(true);
+ expect(wrapper.emitted('change').at(-1)).toStrictEqual([selectedItem]);
+ expect(findGlCollapsibleListbox().props('selected')).toBe(selectedItem);
});
});
});
diff --git a/spec/frontend/vue_shared/components/registry/registry_search_spec.js b/spec/frontend/vue_shared/components/registry/registry_search_spec.js
index 59bb0646350..f86406d05cb 100644
--- a/spec/frontend/vue_shared/components/registry/registry_search_spec.js
+++ b/spec/frontend/vue_shared/components/registry/registry_search_spec.js
@@ -25,6 +25,8 @@ describe('Registry Search', () => {
orderBy: 'name',
search: [],
sort: 'asc',
+ after: null,
+ before: null,
};
const mountComponent = (propsData = defaultProps) => {
diff --git a/spec/frontend/vue_shared/components/registry/title_area_spec.js b/spec/frontend/vue_shared/components/registry/title_area_spec.js
index ec1451de470..138027be0cc 100644
--- a/spec/frontend/vue_shared/components/registry/title_area_spec.js
+++ b/spec/frontend/vue_shared/components/registry/title_area_spec.js
@@ -1,21 +1,16 @@
import { GlAvatar, GlSprintf, GlLink, GlSkeletonLoader } from '@gitlab/ui';
-import { nextTick } from 'vue';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import component from '~/vue_shared/components/registry/title_area.vue';
describe('title area', () => {
let wrapper;
- const DYNAMIC_SLOT = 'metadata-dynamic-slot';
-
const findSubHeaderSlot = () => wrapper.findByTestId('sub-header');
const findRightActionsSlot = () => wrapper.findByTestId('right-actions');
const findMetadataSlot = (name) => wrapper.findByTestId(name);
const findTitle = () => wrapper.findByTestId('title');
const findAvatar = () => wrapper.findComponent(GlAvatar);
const findInfoMessages = () => wrapper.findAllByTestId('info-message');
- const findDynamicSlot = () => wrapper.findByTestId(DYNAMIC_SLOT);
- const findSlotOrderElements = () => wrapper.findAll('[slot-test]');
const findSkeletonLoader = () => wrapper.findComponent(GlSkeletonLoader);
const mountComponent = ({ propsData = { title: 'foo' }, slots } = {}) => {
@@ -93,19 +88,17 @@ describe('title area', () => {
`('$slotNames metadata slots', ({ slotNames }) => {
const slots = generateSlotMocks(slotNames);
- it('exist when the slot is present', async () => {
+ it('exist when the slot is present', () => {
mountComponent({ slots });
- await nextTick();
slotNames.forEach((name) => {
expect(findMetadataSlot(name).exists()).toBe(true);
});
});
- it('is/are hidden when metadata-loading is true', async () => {
+ it('is/are hidden when metadata-loading is true', () => {
mountComponent({ slots, propsData: { title: 'foo', metadataLoading: true } });
- await nextTick();
slotNames.forEach((name) => {
expect(findMetadataSlot(name).exists()).toBe(false);
});
@@ -115,67 +108,19 @@ describe('title area', () => {
describe('metadata skeleton loader', () => {
const slots = generateSlotMocks(['metadata-foo']);
- it('is hidden when metadata loading is false', async () => {
+ it('is hidden when metadata loading is false', () => {
mountComponent({ slots });
- await nextTick();
-
expect(findSkeletonLoader().exists()).toBe(false);
});
- it('is shown when metadata loading is true', async () => {
+ it('is shown when metadata loading is true', () => {
mountComponent({ propsData: { metadataLoading: true }, slots });
- await nextTick();
-
expect(findSkeletonLoader().exists()).toBe(true);
});
});
- describe('dynamic slots', () => {
- const createDynamicSlot = () => {
- return wrapper.vm.$createElement('div', {
- attrs: {
- 'data-testid': DYNAMIC_SLOT,
- 'slot-test': true,
- },
- });
- };
-
- it('shows dynamic slots', async () => {
- mountComponent();
- // we manually add a new slot to simulate dynamic slots being evaluated after the initial mount
- wrapper.vm.$slots[DYNAMIC_SLOT] = createDynamicSlot();
-
- // updating the slots like we do on line 141 does not cause the updated lifecycle-hook to be triggered
- wrapper.vm.$forceUpdate();
- await nextTick();
-
- expect(findDynamicSlot().exists()).toBe(true);
- });
-
- it('preserve the order of the slots', async () => {
- mountComponent({
- slots: {
- 'metadata-foo': '<div slot-test data-testid="metadata-foo"></div>',
- },
- });
-
- // rewrite slot putting dynamic slot as first
- wrapper.vm.$slots = {
- 'metadata-dynamic-slot': createDynamicSlot(),
- 'metadata-foo': wrapper.vm.$slots['metadata-foo'],
- };
-
- // updating the slots like we do on line 159 does not cause the updated lifecycle-hook to be triggered
- wrapper.vm.$forceUpdate();
- await nextTick();
-
- expect(findSlotOrderElements().at(0).attributes('data-testid')).toBe(DYNAMIC_SLOT);
- expect(findSlotOrderElements().at(1).attributes('data-testid')).toBe('metadata-foo');
- });
- });
-
describe('info-messages', () => {
it('shows a message when the props contains one', () => {
mountComponent({ propsData: { infoMessages: [{ text: 'foo foo bar bar' }] } });
diff --git a/spec/frontend/vue_shared/components/source_viewer/source_viewer_new_spec.js b/spec/frontend/vue_shared/components/source_viewer/source_viewer_new_spec.js
index 6b711b6b6b2..431ede17954 100644
--- a/spec/frontend/vue_shared/components/source_viewer/source_viewer_new_spec.js
+++ b/spec/frontend/vue_shared/components/source_viewer/source_viewer_new_spec.js
@@ -7,15 +7,22 @@ import LineHighlighter from '~/blob/line_highlighter';
import addBlobLinksTracking from '~/blob/blob_links_tracking';
import { BLOB_DATA_MOCK, CHUNK_1, CHUNK_2, LANGUAGE_MOCK } from './mock_data';
-jest.mock('~/blob/line_highlighter');
+const lineHighlighter = new LineHighlighter();
+jest.mock('~/blob/line_highlighter', () =>
+ jest.fn().mockReturnValue({
+ highlightHash: jest.fn(),
+ }),
+);
jest.mock('~/blob/blob_links_tracking');
describe('Source Viewer component', () => {
let wrapper;
const CHUNKS_MOCK = [CHUNK_1, CHUNK_2];
+ const hash = '#L142';
const createComponent = () => {
wrapper = shallowMountExtended(SourceViewer, {
+ mocks: { $route: { hash } },
propsData: { blob: BLOB_DATA_MOCK, chunks: CHUNKS_MOCK },
});
};
@@ -48,4 +55,10 @@ describe('Source Viewer component', () => {
expect(findChunks().at(1).props()).toMatchObject(CHUNK_2);
});
});
+
+ describe('hash highlighting', () => {
+ it('calls highlightHash with expected parameter', () => {
+ expect(lineHighlighter.highlightHash).toHaveBeenCalledWith(hash);
+ });
+ });
});
diff --git a/spec/frontend/vue_shared/components/source_viewer/source_viewer_spec.js b/spec/frontend/vue_shared/components/source_viewer/source_viewer_spec.js
index 6b1d65c5a6a..a486d13a856 100644
--- a/spec/frontend/vue_shared/components/source_viewer/source_viewer_spec.js
+++ b/spec/frontend/vue_shared/components/source_viewer/source_viewer_spec.js
@@ -1,6 +1,8 @@
import hljs from 'highlight.js/lib/core';
import Vue from 'vue';
import VueRouter from 'vue-router';
+import axios from 'axios';
+import MockAdapter from 'axios-mock-adapter';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import SourceViewer from '~/vue_shared/components/source_viewer/source_viewer.vue';
import { registerPlugins } from '~/vue_shared/components/source_viewer/plugins/index';
@@ -14,7 +16,9 @@ import {
LEGACY_FALLBACKS,
CODEOWNERS_FILE_NAME,
CODEOWNERS_LANGUAGE,
+ SVELTE_LANGUAGE,
} from '~/vue_shared/components/source_viewer/constants';
+import { HTTP_STATUS_OK } from '~/lib/utils/http_status';
import waitForPromises from 'helpers/wait_for_promises';
import LineHighlighter from '~/blob/line_highlighter';
import eventHub from '~/notes/event_hub';
@@ -25,6 +29,7 @@ jest.mock('highlight.js/lib/core');
jest.mock('~/vue_shared/components/source_viewer/plugins/index');
Vue.use(VueRouter);
const router = new VueRouter();
+const mockAxios = new MockAdapter(axios);
const generateContent = (content, totalLines = 1, delimiter = '\n') => {
let generatedContent = '';
@@ -71,6 +76,42 @@ describe('Source Viewer component', () => {
return createComponent();
});
+ describe('Displaying LFS blob', () => {
+ const rawPath = '/org/project/-/raw/file.xml';
+ const externalStorageUrl = 'http://127.0.0.1:9000/lfs-objects/91/12/1341234';
+ const rawTextBlob = 'This is the external content';
+ const blob = {
+ storedExternally: true,
+ externalStorage: 'lfs',
+ simpleViewer: { fileType: 'text' },
+ rawPath,
+ };
+
+ afterEach(() => {
+ mockAxios.reset();
+ });
+
+ it('Uses externalStorageUrl to fetch content if present', async () => {
+ mockAxios.onGet(externalStorageUrl).replyOnce(HTTP_STATUS_OK, rawTextBlob);
+
+ await createComponent({ ...blob, externalStorageUrl });
+
+ expect(mockAxios.history.get).toHaveLength(1);
+ expect(mockAxios.history.get[0].url).toBe(externalStorageUrl);
+ expect(wrapper.vm.$data.content).toBe(rawTextBlob);
+ });
+
+ it('Falls back to rawPath to fetch content', async () => {
+ mockAxios.onGet(rawPath).replyOnce(HTTP_STATUS_OK, rawTextBlob);
+
+ await createComponent(blob);
+
+ expect(mockAxios.history.get).toHaveLength(1);
+ expect(mockAxios.history.get[0].url).toBe(rawPath);
+ expect(wrapper.vm.$data.content).toBe(rawTextBlob);
+ });
+ });
+
describe('event tracking', () => {
it('fires a tracking event when the component is created', () => {
const eventData = { label: EVENT_LABEL_VIEWER, property: language };
@@ -120,6 +161,33 @@ describe('Source Viewer component', () => {
);
});
+ describe('sub-languages', () => {
+ const languageDefinition = {
+ subLanguage: 'xml',
+ contains: [{ subLanguage: 'javascript' }, { subLanguage: 'typescript' }],
+ };
+
+ beforeEach(async () => {
+ jest.spyOn(hljs, 'getLanguage').mockReturnValue(languageDefinition);
+ createComponent();
+ await waitForPromises();
+ });
+
+ it('registers the primary sub-language', () => {
+ expect(hljs.registerLanguage).toHaveBeenCalledWith(
+ languageDefinition.subLanguage,
+ expect.any(Function),
+ );
+ });
+
+ it.each(languageDefinition.contains)(
+ 'registers the rest of the sub-languages',
+ ({ subLanguage }) => {
+ expect(hljs.registerLanguage).toHaveBeenCalledWith(subLanguage, expect.any(Function));
+ },
+ );
+ });
+
it('registers json language definition if fileType is package_json', async () => {
await createComponent({ language: 'json', fileType: 'package_json' });
const languageDefinition = await import(`highlight.js/lib/languages/json`);
@@ -146,6 +214,18 @@ describe('Source Viewer component', () => {
);
});
+ it('registers svelte language definition if file name ends with .svelte', async () => {
+ await createComponent({ name: `component.${SVELTE_LANGUAGE}` });
+ const languageDefinition = await import(
+ '~/vue_shared/components/source_viewer/languages/svelte'
+ );
+
+ expect(hljs.registerLanguage).toHaveBeenCalledWith(
+ SVELTE_LANGUAGE,
+ languageDefinition.default,
+ );
+ });
+
it('highlights the first chunk', () => {
expect(hljs.highlight).toHaveBeenCalledWith(chunk1.trim(), { language: mappedLanguage });
expect(findChunks().at(0).props('isFirstChunk')).toBe(true);
diff --git a/spec/frontend/vue_shared/components/web_ide_link_spec.js b/spec/frontend/vue_shared/components/web_ide_link_spec.js
index b6c22ceaa23..56d89d428f7 100644
--- a/spec/frontend/vue_shared/components/web_ide_link_spec.js
+++ b/spec/frontend/vue_shared/components/web_ide_link_spec.js
@@ -1,15 +1,20 @@
-import { GlModal } from '@gitlab/ui';
+import { GlModal, GlDisclosureDropdown, GlDisclosureDropdownItem } from '@gitlab/ui';
+import { omit } from 'lodash';
import Vue, { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
import getWritableForksResponse from 'test_fixtures/graphql/vue_shared/components/web_ide/get_writable_forks.query.graphql_none.json';
-import ActionsButton from '~/vue_shared/components/actions_button.vue';
import WebIdeLink, { i18n } from '~/vue_shared/components/web_ide_link.vue';
import ConfirmForkModal from '~/vue_shared/components/web_ide/confirm_fork_modal.vue';
-import { stubComponent } from 'helpers/stub_component';
-import { shallowMountExtended, mountExtended } from 'helpers/vue_test_utils_helper';
import createMockApollo from 'helpers/mock_apollo_helper';
+import { stubComponent } from 'helpers/stub_component';
+import { mockTracking } from 'helpers/tracking_helper';
+import {
+ shallowMountExtended,
+ mountExtended,
+ extendedWrapper,
+} from 'helpers/vue_test_utils_helper';
import { visitUrl } from '~/lib/utils/url_utility';
import getWritableForksQuery from '~/vue_shared/components/web_ide/get_writable_forks.query.graphql';
@@ -26,13 +31,15 @@ const forkPath = '/some/fork/path';
const ACTION_EDIT = {
href: TEST_EDIT_URL,
- key: 'edit',
+ handle: undefined,
text: 'Edit single file',
secondaryText: 'Edit this file only.',
attrs: {
- 'data-qa-selector': 'edit_button',
- 'data-track-action': 'click_consolidated_edit',
- 'data-track-label': 'edit',
+ 'data-qa-selector': 'edit_menu_item',
+ },
+ tracking: {
+ action: 'click_consolidated_edit',
+ label: 'single_file',
},
};
const ACTION_EDIT_CONFIRM_FORK = {
@@ -41,15 +48,17 @@ const ACTION_EDIT_CONFIRM_FORK = {
handle: expect.any(Function),
};
const ACTION_WEB_IDE = {
- key: 'webide',
secondaryText: i18n.webIdeText,
text: 'Web IDE',
attrs: {
- 'data-qa-selector': 'web_ide_button',
- 'data-track-action': 'click_consolidated_edit_ide',
- 'data-track-label': 'web_ide',
+ 'data-qa-selector': 'webide_menu_item',
},
+ href: undefined,
handle: expect.any(Function),
+ tracking: {
+ action: 'click_consolidated_edit',
+ label: 'web_ide',
+ },
};
const ACTION_WEB_IDE_CONFIRM_FORK = {
...ACTION_WEB_IDE,
@@ -58,11 +67,15 @@ const ACTION_WEB_IDE_CONFIRM_FORK = {
const ACTION_WEB_IDE_EDIT_FORK = { ...ACTION_WEB_IDE, text: 'Edit fork in Web IDE' };
const ACTION_GITPOD = {
href: TEST_GITPOD_URL,
- key: 'gitpod',
+ handle: undefined,
secondaryText: 'Launch a ready-to-code development environment for your project.',
text: 'Gitpod',
attrs: {
- 'data-qa-selector': 'gitpod_button',
+ 'data-qa-selector': 'gitpod_menu_item',
+ },
+ tracking: {
+ action: 'click_consolidated_edit',
+ label: 'gitpod',
},
};
const ACTION_GITPOD_ENABLE = {
@@ -72,11 +85,14 @@ const ACTION_GITPOD_ENABLE = {
};
const ACTION_PIPELINE_EDITOR = {
href: TEST_PIPELINE_EDITOR_URL,
- key: 'pipeline_editor',
secondaryText: 'Edit, lint, and visualize your pipeline.',
text: 'Edit in pipeline editor',
attrs: {
- 'data-qa-selector': 'pipeline_editor_button',
+ 'data-qa-selector': 'pipeline_editor_menu_item',
+ },
+ tracking: {
+ action: 'click_consolidated_edit',
+ label: 'pipeline_editor',
},
};
@@ -84,6 +100,7 @@ describe('vue_shared/components/web_ide_link', () => {
Vue.use(VueApollo);
let wrapper;
+ let trackingSpy;
function createComponent(props, { mountFn = shallowMountExtended, slots = {} } = {}) {
const fakeApollo = createMockApollo([
@@ -108,16 +125,37 @@ describe('vue_shared/components/web_ide_link', () => {
<slot name="modal-footer"></slot>
</div>`,
}),
+ GlDisclosureDropdownItem,
},
apolloProvider: fakeApollo,
});
+
+ trackingSpy = mockTracking(undefined, wrapper.element, jest.spyOn);
}
- const findActionsButton = () => wrapper.findComponent(ActionsButton);
+ const findDisclosureDropdown = () => wrapper.findComponent(GlDisclosureDropdown);
+ const findDisclosureDropdownItems = () => wrapper.findAllComponents(GlDisclosureDropdownItem);
const findModal = () => wrapper.findComponent(GlModal);
const findForkConfirmModal = () => wrapper.findComponent(ConfirmForkModal);
+ const getDropdownItemsAsData = () =>
+ findDisclosureDropdownItems().wrappers.map((item) => {
+ const extendedWrapperItem = extendedWrapper(item);
+ const attributes = extendedWrapperItem.attributes();
+ const props = extendedWrapperItem.props();
+
+ return {
+ text: extendedWrapperItem.findByTestId('action-primary-text').text(),
+ secondaryText: extendedWrapperItem.findByTestId('action-secondary-text').text(),
+ href: props.item.href,
+ handle: props.item.handle,
+ attrs: {
+ 'data-qa-selector': attributes['data-qa-selector'],
+ },
+ };
+ });
+ const omitTrackingParams = (actions) => actions.map((action) => omit(action, 'tracking'));
- it.each([
+ describe.each([
{
props: {},
expectedActions: [ACTION_WEB_IDE, ACTION_EDIT],
@@ -207,10 +245,27 @@ describe('vue_shared/components/web_ide_link', () => {
props: { showEditButton: false },
expectedActions: [ACTION_WEB_IDE],
},
- ])('renders actions with appropriately for given props', ({ props, expectedActions }) => {
- createComponent(props);
+ ])('for a set of props', ({ props, expectedActions }) => {
+ beforeEach(() => {
+ createComponent(props);
+ });
+
+ it('renders the appropiate actions', () => {
+ // omit tracking property because it is not included in the dropdown item
+ expect(getDropdownItemsAsData()).toEqual(omitTrackingParams(expectedActions));
+ });
+
+ describe('when an action is clicked', () => {
+ it('tracks event', () => {
+ expectedActions.forEach((action, index) => {
+ findDisclosureDropdownItems().at(index).vm.$emit('action');
- expect(findActionsButton().props('actions')).toEqual(expectedActions);
+ expect(trackingSpy).toHaveBeenCalledWith(undefined, action.tracking.action, {
+ label: action.tracking.label,
+ });
+ });
+ });
+ });
});
it('bubbles up shown and hidden events triggered by actions button component', () => {
@@ -219,17 +274,17 @@ describe('vue_shared/components/web_ide_link', () => {
expect(wrapper.emitted('shown')).toBe(undefined);
expect(wrapper.emitted('hidden')).toBe(undefined);
- findActionsButton().vm.$emit('shown');
- findActionsButton().vm.$emit('hidden');
+ findDisclosureDropdown().vm.$emit('shown');
+ findDisclosureDropdown().vm.$emit('hidden');
expect(wrapper.emitted('shown')).toHaveLength(1);
expect(wrapper.emitted('hidden')).toHaveLength(1);
});
- it('exposes a default slot', () => {
- const slotContent = 'default slot content';
+ it.each(['before-actions', 'after-actions'])('exposes a %s slot', (slot) => {
+ const slotContent = 'slot content';
- createComponent({}, { slots: { default: slotContent } });
+ createComponent({}, { slots: { [slot]: slotContent } });
expect(wrapper.text()).toContain(slotContent);
});
@@ -248,13 +303,13 @@ describe('vue_shared/components/web_ide_link', () => {
});
it('displays Pipeline Editor as the first action', () => {
- expect(findActionsButton().props()).toMatchObject({
- actions: [ACTION_PIPELINE_EDITOR, ACTION_WEB_IDE, ACTION_GITPOD],
- });
+ expect(getDropdownItemsAsData()).toEqual(
+ omitTrackingParams([ACTION_PIPELINE_EDITOR, ACTION_WEB_IDE, ACTION_GITPOD]),
+ );
});
it('when web ide button is clicked it opens in a new tab', async () => {
- findActionsButton().props('actions')[1].handle();
+ findDisclosureDropdownItems().at(1).props().item.handle();
await nextTick();
expect(visitUrl).toHaveBeenCalledWith(TEST_WEB_IDE_URL, true);
});
@@ -289,7 +344,7 @@ describe('vue_shared/components/web_ide_link', () => {
({ props, expectedEventPayload }) => {
createComponent({ ...props, needsToFork: true, disableForkModal: true });
- findActionsButton().props('actions')[0].handle();
+ findDisclosureDropdownItems().at(0).props().item.handle();
expect(wrapper.emitted('edit')).toEqual([[expectedEventPayload]]);
},
@@ -309,7 +364,7 @@ describe('vue_shared/components/web_ide_link', () => {
it.each(testActions)('opens the modal when the button is clicked', async ({ props }) => {
createComponent({ ...props, needsToFork: true }, { mountFn: mountExtended });
- wrapper.findComponent(ActionsButton).props().actions[0].handle();
+ findDisclosureDropdownItems().at(0).props().item.handle();
await nextTick();
await wrapper.findByRole('button', { name: /Web IDE|Edit/im }).trigger('click');
diff --git a/spec/frontend/vue_shared/issuable/create/components/issuable_create_root_spec.js b/spec/frontend/vue_shared/issuable/create/components/issuable_create_root_spec.js
index e983519d9fc..03f509a3fa3 100644
--- a/spec/frontend/vue_shared/issuable/create/components/issuable_create_root_spec.js
+++ b/spec/frontend/vue_shared/issuable/create/components/issuable_create_root_spec.js
@@ -1,8 +1,13 @@
import { mount } from '@vue/test-utils';
+import Vue from 'vue';
+import VueApollo from 'vue-apollo';
+import createMockApollo from 'helpers/mock_apollo_helper';
import IssuableCreateRoot from '~/vue_shared/issuable/create/components/issuable_create_root.vue';
import IssuableForm from '~/vue_shared/issuable/create/components/issuable_form.vue';
+Vue.use(VueApollo);
+
const createComponent = ({
descriptionPreviewPath = '/gitlab-org/gitlab-shell/preview_markdown',
descriptionHelpPath = '/help/user/markdown',
@@ -16,6 +21,7 @@ const createComponent = ({
labelsFetchPath,
labelsManagePath,
},
+ apolloProvider: createMockApollo(),
slots: {
title: `
<h1 class="js-create-title">New Issuable</h1>
diff --git a/spec/frontend/vue_shared/issuable/create/components/issuable_form_spec.js b/spec/frontend/vue_shared/issuable/create/components/issuable_form_spec.js
index ae2fd5ebffa..338dc80b43e 100644
--- a/spec/frontend/vue_shared/issuable/create/components/issuable_form_spec.js
+++ b/spec/frontend/vue_shared/issuable/create/components/issuable_form_spec.js
@@ -2,8 +2,9 @@ import { GlFormInput } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import IssuableForm from '~/vue_shared/issuable/create/components/issuable_form.vue';
-import MarkdownField from '~/vue_shared/components/markdown/field.vue';
+import MarkdownEditor from '~/vue_shared/components/markdown/markdown_editor.vue';
import LabelsSelect from '~/sidebar/components/labels/labels_select_vue/labels_select_root.vue';
+import { __ } from '~/locale';
const createComponent = ({
descriptionPreviewPath = '/gitlab-org/gitlab-shell/preview_markdown',
@@ -24,7 +25,7 @@ const createComponent = ({
`,
},
stubs: {
- MarkdownField,
+ MarkdownEditor,
},
});
};
@@ -71,18 +72,20 @@ describe('IssuableForm', () => {
expect(descriptionFieldEl.exists()).toBe(true);
expect(descriptionFieldEl.find('label').text()).toBe('Description');
- expect(descriptionFieldEl.findComponent(MarkdownField).exists()).toBe(true);
- expect(descriptionFieldEl.findComponent(MarkdownField).props()).toMatchObject({
- markdownPreviewPath: wrapper.vm.descriptionPreviewPath,
+ expect(descriptionFieldEl.findComponent(MarkdownEditor).exists()).toBe(true);
+ expect(descriptionFieldEl.findComponent(MarkdownEditor).props()).toMatchObject({
+ renderMarkdownPath: wrapper.vm.descriptionPreviewPath,
markdownDocsPath: wrapper.vm.descriptionHelpPath,
- addSpacingClasses: false,
- showSuggestPopover: true,
- textareaValue: '',
+ value: '',
+ formFieldProps: {
+ ariaLabel: __('Description'),
+ class: 'rspec-issuable-form-description',
+ placeholder: __('Write a comment or drag your files here…'),
+ dataQaSelector: 'issuable_form_description_field',
+ id: 'issuable-description',
+ name: 'issuable-description',
+ },
});
- expect(descriptionFieldEl.find('textarea').exists()).toBe(true);
- expect(descriptionFieldEl.find('textarea').attributes('placeholder')).toBe(
- 'Write a comment or drag your files here…',
- );
});
it('renders labels select field', () => {
diff --git a/spec/frontend/vue_shared/issuable/list/components/issuable_item_spec.js b/spec/frontend/vue_shared/issuable/list/components/issuable_item_spec.js
index 502fa609ebc..77333a878d1 100644
--- a/spec/frontend/vue_shared/issuable/list/components/issuable_item_spec.js
+++ b/spec/frontend/vue_shared/issuable/list/components/issuable_item_spec.js
@@ -15,6 +15,8 @@ const createComponent = ({
showCheckbox = true,
slots = {},
showWorkItemTypeIcon = false,
+ isActive = false,
+ preventRedirect = false,
} = {}) =>
shallowMount(IssuableItem, {
propsData: {
@@ -24,6 +26,8 @@ const createComponent = ({
showDiscussions: true,
showCheckbox,
showWorkItemTypeIcon,
+ isActive,
+ preventRedirect,
},
slots,
stubs: {
@@ -43,6 +47,8 @@ describe('IssuableItem', () => {
const findTimestampWrapper = () => wrapper.find('[data-testid="issuable-timestamp"]');
const findWorkItemTypeIcon = () => wrapper.findComponent(WorkItemTypeIcon);
+ const findIssuableTitleLink = () => wrapper.findComponentByTestId('issuable-title-link');
+ const findIssuableItemWrapper = () => wrapper.findByTestId('issuable-item-wrapper');
beforeEach(() => {
gon.gitlab_url = MOCK_GITLAB_URL;
@@ -553,4 +559,35 @@ describe('IssuableItem', () => {
});
});
});
+
+ describe('when preventing redirect on clicking the link', () => {
+ it('emits an event on item click', () => {
+ const { iid, webUrl } = mockIssuable;
+
+ wrapper = createComponent({
+ preventRedirect: true,
+ });
+
+ findIssuableTitleLink().vm.$emit('click', new MouseEvent('click'));
+
+ expect(wrapper.emitted('select-issuable')).toEqual([[{ iid, webUrl }]]);
+ });
+
+ it('does not apply highlighted class when item is not active', () => {
+ wrapper = createComponent({
+ preventRedirect: true,
+ });
+
+ expect(findIssuableItemWrapper().classes('gl-bg-blue-50')).toBe(false);
+ });
+
+ it('applies highlghted class when item is active', () => {
+ wrapper = createComponent({
+ isActive: true,
+ preventRedirect: true,
+ });
+
+ expect(findIssuableItemWrapper().classes('gl-bg-blue-50')).toBe(true);
+ });
+ });
});
diff --git a/spec/frontend/vue_shared/issuable/list/components/issuable_list_root_spec.js b/spec/frontend/vue_shared/issuable/list/components/issuable_list_root_spec.js
index 68904603f40..51aae9b4512 100644
--- a/spec/frontend/vue_shared/issuable/list/components/issuable_list_root_spec.js
+++ b/spec/frontend/vue_shared/issuable/list/components/issuable_list_root_spec.js
@@ -530,4 +530,28 @@ describe('IssuableListRoot', () => {
expect(findIssuableGrid().exists()).toBe(true);
});
});
+
+ it('passes `isActive` prop as false if there is no active issuable', () => {
+ wrapper = createComponent({});
+
+ expect(findIssuableItem().props('isActive')).toBe(false);
+ });
+
+ it('passes `isActive` prop as true if active issuable matches issuable item', () => {
+ wrapper = createComponent({
+ props: {
+ activeIssuable: mockIssuableListProps.issuables[0],
+ },
+ });
+
+ expect(findIssuableItem().props('isActive')).toBe(true);
+ });
+
+ it('emits `select-issuable` event on emitting `select-issuable` from issuable item', () => {
+ const mockIssuable = mockIssuableListProps.issuables[0];
+ wrapper = createComponent({});
+ findIssuableItem().vm.$emit('select-issuable', mockIssuable);
+
+ expect(wrapper.emitted('select-issuable')).toEqual([[mockIssuable]]);
+ });
});
diff --git a/spec/frontend/vue_shared/issuable/show/components/issuable_header_spec.js b/spec/frontend/vue_shared/issuable/show/components/issuable_header_spec.js
index d2b7b2e89c8..4d08ad54e58 100644
--- a/spec/frontend/vue_shared/issuable/show/components/issuable_header_spec.js
+++ b/spec/frontend/vue_shared/issuable/show/components/issuable_header_spec.js
@@ -1,195 +1,289 @@
-import { GlButton, GlBadge, GlIcon, GlAvatarLabeled, GlAvatarLink } from '@gitlab/ui';
-import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
-import { setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
-import { TYPE_ISSUE, WORKSPACE_PROJECT } from '~/issues/constants';
+import { GlBadge, GlButton, GlIcon, GlLink, GlSprintf } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import { resetHTMLFixture, setHTMLFixture } from 'helpers/fixtures';
+import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
+import {
+ STATUS_CLOSED,
+ STATUS_OPEN,
+ STATUS_REOPENED,
+ TYPE_ISSUE,
+ WORKSPACE_PROJECT,
+} from '~/issues/constants';
+import { __ } from '~/locale';
import ConfidentialityBadge from '~/vue_shared/components/confidentiality_badge.vue';
+import TimeAgoTooltip from '~/vue_shared/components/time_ago_tooltip.vue';
import IssuableHeader from '~/vue_shared/issuable/show/components/issuable_header.vue';
-import { mockIssuableShowProps, mockIssuable } from '../mock_data';
+import WorkItemTypeIcon from '~/work_items/components/work_item_type_icon.vue';
+import { mockIssuable, mockIssuableShowProps } from '../mock_data';
-const issuableHeaderProps = {
- ...mockIssuable,
- ...mockIssuableShowProps,
- issuableType: TYPE_ISSUE,
- workspaceType: WORKSPACE_PROJECT,
-};
-
-describe('IssuableHeader', () => {
+describe('IssuableHeader component', () => {
let wrapper;
- const findAvatar = () => wrapper.findByTestId('avatar');
- const findTaskStatusEl = () => wrapper.findByTestId('task-status');
- const findButton = () => wrapper.findComponent(GlButton);
- const findGlAvatarLink = () => wrapper.findComponent(GlAvatarLink);
+ const findConfidentialityBadge = () => wrapper.findComponent(ConfidentialityBadge);
+ const findStatusBadge = () => wrapper.findComponent(GlBadge);
+ const findToggleButton = () => wrapper.findComponent(GlButton);
+ const findAuthorLink = () => wrapper.findComponent(GlLink);
+ const findTimeAgoTooltip = () => wrapper.findComponent(TimeAgoTooltip);
+ const findWorkItemTypeIcon = () => wrapper.findComponent(WorkItemTypeIcon);
+ const findGlIconWithName = (name) =>
+ wrapper.findAllComponents(GlIcon).filter((component) => component.props('name') === name);
+ const findIcon = (name) =>
+ findGlIconWithName(name).exists() ? findGlIconWithName(name).at(0) : undefined;
+ const findBlockedIcon = () => findIcon('lock');
+ const findHiddenIcon = () => findIcon('spam');
+ const findExternalLinkIcon = () => findIcon('external-link');
+ const findFirstContributionIcon = () => findIcon('first-contribution');
+ const findComponentTooltip = (component) => getBinding(component.element, 'gl-tooltip');
const createComponent = (props = {}, { stubs } = {}) => {
- wrapper = shallowMountExtended(IssuableHeader, {
+ wrapper = shallowMount(IssuableHeader, {
+ directives: {
+ GlTooltip: createMockDirective('gl-tooltip'),
+ },
propsData: {
- ...issuableHeaderProps,
+ ...mockIssuable,
+ ...mockIssuableShowProps,
+ issuableState: STATUS_OPEN,
+ issuableType: TYPE_ISSUE,
+ workspaceType: WORKSPACE_PROJECT,
...props,
},
slots: {
- 'status-badge': 'Open',
- 'header-actions': `
- <button class="js-close">Close issuable</button>
- <a class="js-new" href="/gitlab-org/gitlab-shell/-/issues/new">New issuable</a>
- `,
+ 'header-actions': `Header actions slot`,
+ },
+ stubs: {
+ GlSprintf,
+ ...stubs,
},
- stubs,
});
};
- afterEach(() => {
- resetHTMLFixture();
- });
+ describe('status badge', () => {
+ describe('variant', () => {
+ it('is `success` when status is open', () => {
+ createComponent({ issuableState: STATUS_OPEN });
- describe('computed', () => {
- describe('authorId', () => {
- it('returns numeric ID from GraphQL ID of `author` prop', () => {
- createComponent();
- expect(findGlAvatarLink().attributes('data-user-id')).toBe('1');
+ expect(findStatusBadge().props('variant')).toBe('success');
+ });
+
+ it('is `success` when status is reopened', () => {
+ createComponent({ issuableState: STATUS_REOPENED });
+
+ expect(findStatusBadge().props('variant')).toBe('success');
+ });
+
+ it('is `info` when status is closed', () => {
+ createComponent({ issuableState: STATUS_CLOSED });
+
+ expect(findStatusBadge().props('variant')).toBe('info');
});
});
- });
- describe('handleRightSidebarToggleClick', () => {
- beforeEach(() => {
- setHTMLFixture('<button class="js-toggle-right-sidebar-button">Collapse sidebar</button>');
+ describe('icon', () => {
+ it('renders when statusIcon prop exists', () => {
+ createComponent({ statusIcon: 'issues' });
+
+ expect(findStatusBadge().findComponent(GlIcon).props('name')).toBe('issues');
+ });
+
+ it('does not render when statusIcon prop does not exist', () => {
+ createComponent({ statusIcon: '' });
+
+ expect(findStatusBadge().findComponent(GlIcon).exists()).toBe(false);
+ });
});
- it('emits a "toggle" event', () => {
+ it('renders status text', () => {
createComponent();
- findButton().vm.$emit('click');
+ expect(findStatusBadge().text()).toBe(__('Open'));
+ });
+ });
+
+ describe('confidential badge', () => {
+ it('renders when issuable is confidential', () => {
+ createComponent({ confidential: true });
+
+ expect(findConfidentialityBadge().props()).toEqual({
+ issuableType: 'issue',
+ workspaceType: 'project',
+ });
+ });
+
+ it('does not render when issuable is not confidential', () => {
+ createComponent({ confidential: false });
- expect(wrapper.emitted('toggle')).toEqual([[]]);
+ expect(findConfidentialityBadge().exists()).toBe(false);
});
+ });
- it('dispatches `click` event on sidebar toggle button', () => {
- createComponent();
- const toggleSidebarButtonEl = document.querySelector('.js-toggle-right-sidebar-button');
- const dispatchEvent = jest
- .spyOn(toggleSidebarButtonEl, 'dispatchEvent')
- .mockImplementation(jest.fn);
+ describe('blocked icon', () => {
+ it('renders when issuable is blocked', () => {
+ createComponent({ blocked: true });
- findButton().vm.$emit('click');
+ expect(findBlockedIcon().props('ariaLabel')).toBe('Blocked');
+ });
- expect(dispatchEvent).toHaveBeenCalledWith(
- expect.objectContaining({
- type: 'click',
- }),
+ it('has tooltip', () => {
+ createComponent({ blocked: true });
+
+ expect(findComponentTooltip(findBlockedIcon())).toBeDefined();
+ expect(findBlockedIcon().attributes('title')).toBe(
+ 'This issue is locked. Only project members can comment.',
);
});
+
+ it('does not render when issuable is not blocked', () => {
+ createComponent({ blocked: false });
+
+ expect(findBlockedIcon()).toBeUndefined();
+ });
});
- describe('template', () => {
- it('renders issuable status icon and text', () => {
- createComponent();
- const statusBoxEl = wrapper.findComponent(GlBadge);
- const statusIconEl = statusBoxEl.findComponent(GlIcon);
+ describe('hidden icon', () => {
+ it('renders when issuable is hidden', () => {
+ createComponent({ isHidden: true });
- expect(statusBoxEl.exists()).toBe(true);
- expect(statusIconEl.props('name')).toBe(mockIssuableShowProps.statusIcon);
- expect(statusIconEl.attributes('class')).toBe(mockIssuableShowProps.statusIconClass);
- expect(statusBoxEl.text()).toContain('Open');
+ expect(findHiddenIcon().props('ariaLabel')).toBe('Hidden');
});
- it('renders blocked icon when issuable is blocked', () => {
- createComponent({
- blocked: true,
- });
+ it('has tooltip', () => {
+ createComponent({ isHidden: true });
- const blockedEl = wrapper.findByTestId('blocked');
+ expect(findComponentTooltip(findHiddenIcon())).toBeDefined();
+ expect(findHiddenIcon().attributes('title')).toBe(
+ 'This issue is hidden because its author has been banned',
+ );
+ });
- expect(blockedEl.exists()).toBe(true);
- expect(blockedEl.findComponent(GlIcon).props('name')).toBe('lock');
+ it('does not render when issuable is not hidden', () => {
+ createComponent({ isHidden: false });
+
+ expect(findHiddenIcon()).toBeUndefined();
});
+ });
- it('renders confidential icon when issuable is confidential', () => {
- createComponent({ confidential: true });
+ describe('work item type icon', () => {
+ it('renders when showWorkItemTypeIcon=true and work item type exists', () => {
+ createComponent({ showWorkItemTypeIcon: true, issuableType: 'issue' });
- expect(wrapper.findComponent(ConfidentialityBadge).props()).toEqual({
- issuableType: 'issue',
- workspaceType: 'project',
+ expect(findWorkItemTypeIcon().props()).toMatchObject({
+ showText: true,
+ workItemType: 'ISSUE',
});
});
- it('renders issuable author avatar', () => {
+ it('does not render when showWorkItemTypeIcon=false', () => {
+ createComponent({ showWorkItemTypeIcon: false });
+
+ expect(findWorkItemTypeIcon().exists()).toBe(false);
+ });
+ });
+
+ describe('timeago tooltip', () => {
+ it('renders', () => {
createComponent();
- const { username, name, webUrl, avatarUrl } = mockIssuable.author;
- const avatarElAttrs = {
+
+ expect(findTimeAgoTooltip().props('time')).toBe('2020-06-29T13:52:56Z');
+ });
+ });
+
+ describe('author', () => {
+ it('renders link', () => {
+ createComponent();
+
+ expect(findAuthorLink().text()).toContain('Administrator');
+ expect(findAuthorLink().attributes()).toMatchObject({
+ href: 'http://0.0.0.0:3000/root',
'data-user-id': '1',
- 'data-username': username,
- 'data-name': name,
- href: webUrl,
- target: '_blank',
- };
- const avatarEl = findAvatar();
- expect(avatarEl.exists()).toBe(true);
- expect(avatarEl.attributes()).toMatchObject(avatarElAttrs);
- expect(avatarEl.findComponent(GlAvatarLabeled).attributes()).toMatchObject({
- size: '24',
- src: avatarUrl,
- label: name,
});
- expect(avatarEl.findComponent(GlAvatarLabeled).findComponent(GlIcon).exists()).toBe(false);
+ expect(findAuthorLink().classes()).toContain('js-user-link');
+ });
+
+ describe('when author exists outside of GitLab', () => {
+ it('renders external link icon', () => {
+ createComponent({ author: { webUrl: 'https://example.com/test-user' } });
+
+ expect(findExternalLinkIcon().props('ariaLabel')).toBe('external link');
+ });
+ });
+ });
+
+ describe('first contribution icon', () => {
+ it('renders when isFirstContribution=true', () => {
+ createComponent({ isFirstContribution: true });
+
+ expect(findFirstContributionIcon().props('ariaLabel')).toBe('1st contribution!');
+ });
+
+ it('has tooltip', () => {
+ createComponent({ isFirstContribution: true });
+
+ expect(findComponentTooltip(findFirstContributionIcon())).toBeDefined();
+ expect(findFirstContributionIcon().attributes('title')).toBe('1st contribution!');
});
+ it('does not render when isFirstContribution=false', () => {
+ createComponent({ isFirstContribution: false });
+
+ expect(findFirstContributionIcon()).toBeUndefined();
+ });
+ });
+
+ describe('task status', () => {
it('renders task status text when `taskCompletionStatus` prop is defined', () => {
createComponent();
- expect(findTaskStatusEl().exists()).toBe(true);
- expect(findTaskStatusEl().text()).toContain('0 of 5 checklist items completed');
+ expect(wrapper.text()).toContain('0 of 5 checklist items completed');
});
it('does not render task status text when tasks count is 0', () => {
- createComponent({
- taskCompletionStatus: {
- count: 0,
- completedCount: 0,
- },
- });
+ createComponent({ taskCompletionStatus: { count: 0, completedCount: 0 } });
- expect(findTaskStatusEl().exists()).toBe(false);
+ expect(wrapper.text()).not.toContain('checklist item');
});
+ });
- it('renders sidebar toggle button', () => {
+ describe('sidebar toggle button', () => {
+ beforeEach(() => {
+ setHTMLFixture('<button class="js-toggle-right-sidebar-button">Collapse sidebar</button>');
createComponent();
- const toggleButtonEl = wrapper.findByTestId('sidebar-toggle');
-
- expect(toggleButtonEl.exists()).toBe(true);
- expect(toggleButtonEl.props('icon')).toBe('chevron-double-lg-left');
});
- it('renders header actions', () => {
- createComponent();
- const actionsEl = wrapper.findByTestId('header-actions');
+ afterEach(() => {
+ resetHTMLFixture();
+ });
- expect(actionsEl.find('button.js-close').exists()).toBe(true);
- expect(actionsEl.find('a.js-new').exists()).toBe(true);
+ it('renders', () => {
+ expect(findToggleButton().props('icon')).toBe('chevron-double-lg-left');
+ expect(findToggleButton().attributes('aria-label')).toBe('Expand sidebar');
});
- describe('when author exists outside of GitLab', () => {
- it("renders 'external-link' icon in avatar label", () => {
- createComponent(
- {
- author: {
- ...issuableHeaderProps.author,
- webUrl: 'https://jira.com/test-user/author.jpg',
- },
- },
- {
- stubs: {
- GlAvatarLabeled,
- },
- },
- );
-
- const avatarEl = wrapper.findComponent(GlAvatarLabeled);
- const icon = avatarEl.findComponent(GlIcon);
-
- expect(icon.exists()).toBe(true);
- expect(icon.props('name')).toBe('external-link');
+ describe('when clicked', () => {
+ it('emits a "toggle" event', () => {
+ findToggleButton().vm.$emit('click');
+
+ expect(wrapper.emitted('toggle')).toEqual([[]]);
+ });
+
+ it('dispatches `click` event on sidebar toggle button', () => {
+ const toggleSidebarButton = document.querySelector('.js-toggle-right-sidebar-button');
+ const dispatchEvent = jest
+ .spyOn(toggleSidebarButton, 'dispatchEvent')
+ .mockImplementation(jest.fn);
+
+ findToggleButton().vm.$emit('click');
+
+ expect(dispatchEvent).toHaveBeenCalledWith(expect.objectContaining({ type: 'click' }));
});
});
});
+
+ describe('header actions', () => {
+ it('renders slot', () => {
+ createComponent();
+
+ expect(wrapper.text()).toContain('Header actions slot');
+ });
+ });
});
diff --git a/spec/frontend/vue_shared/issuable/show/components/issuable_show_root_spec.js b/spec/frontend/vue_shared/issuable/show/components/issuable_show_root_spec.js
index f976e0499f0..ad7afefff12 100644
--- a/spec/frontend/vue_shared/issuable/show/components/issuable_show_root_spec.js
+++ b/spec/frontend/vue_shared/issuable/show/components/issuable_show_root_spec.js
@@ -1,3 +1,4 @@
+import { GlBadge } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import IssuableBody from '~/vue_shared/issuable/show/components/issuable_body.vue';
@@ -72,7 +73,7 @@ describe('IssuableShowRoot', () => {
author,
taskCompletionStatus,
});
- expect(issuableHeader.find('.issuable-status-badge').text()).toContain('Open');
+ expect(issuableHeader.findComponent(GlBadge).text()).toBe('Open');
expect(issuableHeader.find('.detail-page-header-actions button.js-close').exists()).toBe(
true,
);
diff --git a/spec/frontend/whats_new/components/app_spec.js b/spec/frontend/whats_new/components/app_spec.js
index b74473b5494..c0954ac1133 100644
--- a/spec/frontend/whats_new/components/app_spec.js
+++ b/spec/frontend/whats_new/components/app_spec.js
@@ -1,6 +1,7 @@
import { GlDrawer, GlInfiniteScroll } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
+// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import { mockTracking, unmockTracking, triggerEvent } from 'helpers/tracking_helper';
import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
diff --git a/spec/frontend/whats_new/components/feature_spec.js b/spec/frontend/whats_new/components/feature_spec.js
index d69ac2803df..a51bb507285 100644
--- a/spec/frontend/whats_new/components/feature_spec.js
+++ b/spec/frontend/whats_new/components/feature_spec.js
@@ -1,5 +1,7 @@
import { shallowMount } from '@vue/test-utils';
+import timezoneMock from 'timezone-mock';
import Feature from '~/whats_new/components/feature.vue';
+import { DOCS_URL_IN_EE_DIR } from 'jh_else_ce/lib/utils/url_utility';
describe("What's new single feature", () => {
/** @type {import("@vue/test-utils").Wrapper} */
@@ -13,10 +15,9 @@ describe("What's new single feature", () => {
'self-managed': true,
'gitlab-com': true,
available_in: ['Ultimate'],
- documentation_link:
- 'https://docs.gitlab.com/ee/user/project/settings/#compliance-pipeline-configuration',
+ documentation_link: `${DOCS_URL_IN_EE_DIR}/user/project/settings/#compliance-pipeline-configuration`,
image_url: 'https://img.youtube.com/vi/upLJ_equomw/hqdefault.jpg',
- published_at: '2021-04-22T00:00:00.000Z',
+ published_at: '2021-04-22',
release: '13.11',
};
@@ -53,6 +54,22 @@ describe("What's new single feature", () => {
});
});
+ describe('when the user is in a time zone West of UTC', () => {
+ beforeEach(() => {
+ timezoneMock.register('US/Pacific');
+ });
+
+ afterEach(() => {
+ timezoneMock.unregister();
+ });
+
+ it('renders the date', () => {
+ createWrapper({ feature: exampleFeature });
+
+ expect(findReleaseDate().text()).toBe('April 22, 2021');
+ });
+ });
+
describe('when image_url is null', () => {
it('does not render image link', () => {
createWrapper({ feature: { ...exampleFeature, image_url: null } });
diff --git a/spec/frontend/work_items/components/item_state_spec.js b/spec/frontend/work_items/components/item_state_spec.js
deleted file mode 100644
index c3bdbfe030e..00000000000
--- a/spec/frontend/work_items/components/item_state_spec.js
+++ /dev/null
@@ -1,66 +0,0 @@
-import { GlFormSelect } from '@gitlab/ui';
-import { mount } from '@vue/test-utils';
-import { STATE_OPEN, STATE_CLOSED } from '~/work_items/constants';
-import ItemState from '~/work_items/components/item_state.vue';
-
-describe('ItemState', () => {
- let wrapper;
-
- const findLabel = () => wrapper.find('label').text();
- const findFormSelect = () => wrapper.findComponent(GlFormSelect);
- const selectedValue = () => wrapper.find('option:checked').element.value;
-
- const clickOpen = () => wrapper.findAll('option').at(0).setSelected();
-
- const createComponent = ({ state = STATE_OPEN, disabled = false } = {}) => {
- wrapper = mount(ItemState, {
- propsData: {
- state,
- disabled,
- },
- });
- };
-
- it('renders label and dropdown', () => {
- createComponent();
-
- expect(findLabel()).toBe('Status');
- expect(selectedValue()).toBe(STATE_OPEN);
- });
-
- it('renders dropdown for closed', () => {
- createComponent({ state: STATE_CLOSED });
-
- expect(selectedValue()).toBe(STATE_CLOSED);
- });
-
- it('emits changed event', async () => {
- createComponent({ state: STATE_CLOSED });
-
- await clickOpen();
-
- expect(wrapper.emitted('changed')).toEqual([[STATE_OPEN]]);
- });
-
- it('does not emits changed event if clicking selected value', async () => {
- createComponent({ state: STATE_OPEN });
-
- await clickOpen();
-
- expect(wrapper.emitted('changed')).toBeUndefined();
- });
-
- describe('form select disabled prop', () => {
- describe.each`
- description | disabled | value
- ${'when not disabled'} | ${false} | ${undefined}
- ${'when disabled'} | ${true} | ${'disabled'}
- `('$description', ({ disabled, value }) => {
- it(`renders form select component with disabled=${value}`, () => {
- createComponent({ disabled });
-
- expect(findFormSelect().attributes('disabled')).toBe(value);
- });
- });
- });
-});
diff --git a/spec/frontend/work_items/components/notes/work_item_add_note_spec.js b/spec/frontend/work_items/components/notes/work_item_add_note_spec.js
index e6d20dcb0d9..4b1b7b27ad9 100644
--- a/spec/frontend/work_items/components/notes/work_item_add_note_spec.js
+++ b/spec/frontend/work_items/components/notes/work_item_add_note_spec.js
@@ -247,6 +247,14 @@ describe('Work item add note', () => {
expect(clearDraft).toHaveBeenCalledWith('gid://gitlab/WorkItem/1-comment');
});
+
+ it('emits error to parent when the comment form emits error', async () => {
+ await createComponent({ isEditing: true, signedIn: true });
+ const error = 'error';
+ findCommentForm().vm.$emit('error', error);
+
+ expect(wrapper.emitted('error')).toEqual([[error]]);
+ });
});
});
diff --git a/spec/frontend/work_items/components/notes/work_item_comment_form_spec.js b/spec/frontend/work_items/components/notes/work_item_comment_form_spec.js
index 6c00d52aac5..dd88f34ae4f 100644
--- a/spec/frontend/work_items/components/notes/work_item_comment_form_spec.js
+++ b/spec/frontend/work_items/components/notes/work_item_comment_form_spec.js
@@ -6,18 +6,11 @@ import { createMockDirective } from 'helpers/vue_mock_directive';
import waitForPromises from 'helpers/wait_for_promises';
import * as autosave from '~/lib/utils/autosave';
import { ESC_KEY, ENTER_KEY } from '~/lib/utils/keys';
-import {
- STATE_OPEN,
- STATE_CLOSED,
- STATE_EVENT_REOPEN,
- STATE_EVENT_CLOSE,
-} from '~/work_items/constants';
+import { STATE_OPEN } from '~/work_items/constants';
import * as confirmViaGlModal from '~/lib/utils/confirm_via_gl_modal/confirm_via_gl_modal';
import WorkItemCommentForm from '~/work_items/components/notes/work_item_comment_form.vue';
import MarkdownEditor from '~/vue_shared/components/markdown/markdown_editor.vue';
-import createMockApollo from 'helpers/mock_apollo_helper';
-import updateWorkItemMutation from '~/work_items/graphql/update_work_item.mutation.graphql';
-import { updateWorkItemMutationResponse, workItemQueryResponse } from 'jest/work_items/mock_data';
+import WorkItemStateToggleButton from '~/work_items/components/work_item_state_toggle_button.vue';
Vue.use(VueApollo);
@@ -44,8 +37,7 @@ describe('Work item comment form component', () => {
const findConfirmButton = () => wrapper.find('[data-testid="confirm-button"]');
const findInternalNoteCheckbox = () => wrapper.findComponent(GlFormCheckbox);
const findInternalNoteTooltipIcon = () => wrapper.findComponent(GlIcon);
-
- const mutationSuccessHandler = jest.fn().mockResolvedValue(updateWorkItemMutationResponse);
+ const findWorkItemToggleStateButton = () => wrapper.findComponent(WorkItemStateToggleButton);
const createComponent = ({
isSubmitting = false,
@@ -53,10 +45,8 @@ describe('Work item comment form component', () => {
isNewDiscussion = false,
workItemState = STATE_OPEN,
workItemType = 'Task',
- mutationHandler = mutationSuccessHandler,
} = {}) => {
wrapper = shallowMount(WorkItemCommentForm, {
- apolloProvider: createMockApollo([[updateWorkItemMutation, mutationHandler]]),
propsData: {
workItemState,
workItemId,
@@ -205,61 +195,20 @@ describe('Work item comment form component', () => {
});
describe('when used as a top level/is a new discussion', () => {
- describe('cancel button text', () => {
- it.each`
- workItemState | workItemType | buttonText
- ${STATE_OPEN} | ${'Task'} | ${'Close task'}
- ${STATE_CLOSED} | ${'Task'} | ${'Reopen task'}
- ${STATE_OPEN} | ${'Objective'} | ${'Close objective'}
- ${STATE_CLOSED} | ${'Objective'} | ${'Reopen objective'}
- ${STATE_OPEN} | ${'Key result'} | ${'Close key result'}
- ${STATE_CLOSED} | ${'Key result'} | ${'Reopen key result'}
- `(
- 'is "$buttonText" when "$workItemType" state is "$workItemState"',
- ({ workItemState, workItemType, buttonText }) => {
- createComponent({ isNewDiscussion: true, workItemState, workItemType });
-
- expect(findCancelButton().text()).toBe(buttonText);
- },
- );
- });
-
- describe('Close/reopen button click', () => {
- it.each`
- workItemState | stateEvent
- ${STATE_OPEN} | ${STATE_EVENT_CLOSE}
- ${STATE_CLOSED} | ${STATE_EVENT_REOPEN}
- `(
- 'calls mutation with "$stateEvent" when workItemState is "$workItemState"',
- async ({ workItemState, stateEvent }) => {
- createComponent({ isNewDiscussion: true, workItemState });
-
- findCancelButton().vm.$emit('click');
-
- await waitForPromises();
-
- expect(mutationSuccessHandler).toHaveBeenCalledWith({
- input: {
- id: workItemQueryResponse.data.workItem.id,
- stateEvent,
- },
- });
- },
+ it('emits an error message when the mutation was unsuccessful', async () => {
+ createComponent({
+ isNewDiscussion: true,
+ });
+ findWorkItemToggleStateButton().vm.$emit(
+ 'error',
+ 'Something went wrong while updating the task. Please try again.',
);
- it('emits an error message when the mutation was unsuccessful', async () => {
- createComponent({
- isNewDiscussion: true,
- mutationHandler: jest.fn().mockRejectedValue('Error!'),
- });
- findCancelButton().vm.$emit('click');
-
- await waitForPromises();
+ await waitForPromises();
- expect(wrapper.emitted('error')).toEqual([
- ['Something went wrong while updating the task. Please try again.'],
- ]);
- });
+ expect(wrapper.emitted('error')).toEqual([
+ ['Something went wrong while updating the task. Please try again.'],
+ ]);
});
});
diff --git a/spec/frontend/work_items/components/shared/work_item_link_child_contents_spec.js b/spec/frontend/work_items/components/shared/work_item_link_child_contents_spec.js
new file mode 100644
index 00000000000..9a20e2ec98f
--- /dev/null
+++ b/spec/frontend/work_items/components/shared/work_item_link_child_contents_spec.js
@@ -0,0 +1,179 @@
+import { GlLabel, GlIcon } from '@gitlab/ui';
+import Vue from 'vue';
+import VueApollo from 'vue-apollo';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+
+import WorkItemLinkChildMetadata from 'ee_else_ce/work_items/components/shared/work_item_link_child_metadata.vue';
+
+import { createAlert } from '~/alert';
+import RichTimestampTooltip from '~/vue_shared/components/rich_timestamp_tooltip.vue';
+
+import WorkItemLinkChildContents from '~/work_items/components/shared/work_item_link_child_contents.vue';
+import WorkItemLinksMenu from '~/work_items/components/shared/work_item_links_menu.vue';
+import { TASK_TYPE_NAME, WORK_ITEM_TYPE_VALUE_OBJECTIVE } from '~/work_items/constants';
+
+import {
+ workItemTask,
+ workItemObjectiveWithChild,
+ workItemObjectiveNoMetadata,
+ confidentialWorkItemTask,
+ closedWorkItemTask,
+ workItemObjectiveMetadataWidgets,
+} from '../../mock_data';
+
+jest.mock('~/alert');
+
+describe('WorkItemLinkChildContents', () => {
+ Vue.use(VueApollo);
+
+ const WORK_ITEM_ID = 'gid://gitlab/WorkItem/2';
+ let wrapper;
+ const { LABELS } = workItemObjectiveMetadataWidgets;
+ const mockLabels = LABELS.labels.nodes;
+ const mockFullPath = 'gitlab-org/gitlab-test';
+
+ const findStatusIconComponent = () =>
+ wrapper.findByTestId('item-status-icon').findComponent(GlIcon);
+ const findConfidentialIconComponent = () => wrapper.findByTestId('confidential-icon');
+ const findTitleEl = () => wrapper.findByTestId('item-title');
+ const findStatusTooltipComponent = () => wrapper.findComponent(RichTimestampTooltip);
+ const findMetadataComponent = () => wrapper.findComponent(WorkItemLinkChildMetadata);
+ const findAllLabels = () => wrapper.findAllComponents(GlLabel);
+ const findRegularLabel = () => findAllLabels().at(0);
+ const findScopedLabel = () => findAllLabels().at(1);
+ const findLinksMenuComponent = () => wrapper.findComponent(WorkItemLinksMenu);
+
+ const createComponent = ({
+ canUpdate = true,
+ parentWorkItemId = WORK_ITEM_ID,
+ childItem = workItemTask,
+ workItemType = TASK_TYPE_NAME,
+ } = {}) => {
+ wrapper = shallowMountExtended(WorkItemLinkChildContents, {
+ propsData: {
+ canUpdate,
+ parentWorkItemId,
+ childItem,
+ workItemType,
+ fullPath: mockFullPath,
+ childPath: '/gitlab-org/gitlab-test/-/work_items/4',
+ },
+ });
+ };
+
+ beforeEach(() => {
+ createAlert.mockClear();
+ });
+
+ it.each`
+ status | childItem | statusIconName | statusIconColorClass | rawTimestamp | tooltipContents
+ ${'open'} | ${workItemTask} | ${'issue-open-m'} | ${'gl-text-green-500'} | ${workItemTask.createdAt} | ${'Created'}
+ ${'closed'} | ${closedWorkItemTask} | ${'issue-close'} | ${'gl-text-blue-500'} | ${closedWorkItemTask.closedAt} | ${'Closed'}
+ `(
+ 'renders item status icon and tooltip when item status is `$status`',
+ ({ childItem, statusIconName, statusIconColorClass, rawTimestamp, tooltipContents }) => {
+ createComponent({ childItem });
+
+ expect(findStatusIconComponent().props('name')).toBe(statusIconName);
+ expect(findStatusIconComponent().classes()).toContain(statusIconColorClass);
+ expect(findStatusTooltipComponent().props('rawTimestamp')).toBe(rawTimestamp);
+ expect(findStatusTooltipComponent().props('timestampTypeText')).toContain(tooltipContents);
+ },
+ );
+
+ it('renders confidential icon when item is confidential', () => {
+ createComponent({ childItem: confidentialWorkItemTask });
+
+ expect(findConfidentialIconComponent().props('name')).toBe('eye-slash');
+ expect(findConfidentialIconComponent().attributes('title')).toBe('Confidential');
+ });
+
+ describe('item title', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('renders item title', () => {
+ expect(findTitleEl().attributes('href')).toBe('/gitlab-org/gitlab-test/-/work_items/4');
+ expect(findTitleEl().text()).toBe(workItemTask.title);
+ });
+
+ it.each`
+ action | event | emittedEvent
+ ${'on mouseover'} | ${'mouseover'} | ${'mouseover'}
+ ${'on mouseout'} | ${'mouseout'} | ${'mouseout'}
+ `('$action item title emit `$emittedEvent` event', ({ event, emittedEvent }) => {
+ findTitleEl().vm.$emit(event);
+
+ expect(wrapper.emitted(emittedEvent)).toEqual([[]]);
+ });
+
+ it('emits click event with correct parameters on clicking title', () => {
+ const eventObj = {
+ preventDefault: jest.fn(),
+ };
+ findTitleEl().vm.$emit('click', eventObj);
+
+ expect(wrapper.emitted('click')).toEqual([[eventObj]]);
+ });
+ });
+
+ describe('item metadata', () => {
+ beforeEach(() => {
+ createComponent({
+ childItem: workItemObjectiveWithChild,
+ workItemType: WORK_ITEM_TYPE_VALUE_OBJECTIVE,
+ });
+ });
+
+ it('renders item metadata component when item has metadata present', () => {
+ expect(findMetadataComponent().props()).toMatchObject({
+ metadataWidgets: workItemObjectiveMetadataWidgets,
+ });
+ });
+
+ it('does not render item metadata component when item has no metadata present', () => {
+ createComponent({
+ childItem: workItemObjectiveNoMetadata,
+ workItemType: WORK_ITEM_TYPE_VALUE_OBJECTIVE,
+ });
+
+ expect(findMetadataComponent().exists()).toBe(false);
+ });
+
+ it('renders labels', () => {
+ const mockLabel = mockLabels[0];
+
+ expect(findAllLabels()).toHaveLength(mockLabels.length);
+ expect(findRegularLabel().props()).toMatchObject({
+ title: mockLabel.title,
+ backgroundColor: mockLabel.color,
+ description: mockLabel.description,
+ scoped: false,
+ });
+ expect(findScopedLabel().props('scoped')).toBe(true); // Second label is scoped
+ });
+ });
+
+ describe('item menu', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('renders work-item-links-menu', () => {
+ expect(findLinksMenuComponent().exists()).toBe(true);
+ });
+
+ it('does not render work-item-links-menu when canUpdate is false', () => {
+ createComponent({ canUpdate: false });
+
+ expect(findLinksMenuComponent().exists()).toBe(false);
+ });
+
+ it('removeChild event on menu triggers `click-remove-child` event', () => {
+ findLinksMenuComponent().vm.$emit('removeChild');
+
+ expect(wrapper.emitted('removeChild')).toEqual([[workItemTask]]);
+ });
+ });
+});
diff --git a/spec/frontend/work_items/components/work_item_links/work_item_link_child_metadata_spec.js b/spec/frontend/work_items/components/shared/work_item_link_child_metadata_spec.js
index 07efb1c5ac8..25ef0e69a40 100644
--- a/spec/frontend/work_items/components/work_item_links/work_item_link_child_metadata_spec.js
+++ b/spec/frontend/work_items/components/shared/work_item_link_child_metadata_spec.js
@@ -3,7 +3,7 @@ import { GlAvatarsInline } from '@gitlab/ui';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import ItemMilestone from '~/issuable/components/issue_milestone.vue';
-import WorkItemLinkChildMetadata from '~/work_items/components/work_item_links/work_item_link_child_metadata.vue';
+import WorkItemLinkChildMetadata from '~/work_items/components/shared/work_item_link_child_metadata.vue';
import { workItemObjectiveMetadataWidgets } from '../../mock_data';
diff --git a/spec/frontend/work_items/components/work_item_links/work_item_links_menu_spec.js b/spec/frontend/work_items/components/shared/work_item_links_menu_spec.js
index f02a9fbd021..721db6c3315 100644
--- a/spec/frontend/work_items/components/work_item_links/work_item_links_menu_spec.js
+++ b/spec/frontend/work_items/components/shared/work_item_links_menu_spec.js
@@ -1,7 +1,7 @@
import { GlDropdown, GlDropdownItem } from '@gitlab/ui';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
-import WorkItemLinksMenu from '~/work_items/components/work_item_links/work_item_links_menu.vue';
+import WorkItemLinksMenu from '~/work_items/components/shared/work_item_links_menu.vue';
describe('WorkItemLinksMenu', () => {
let wrapper;
diff --git a/spec/frontend/work_items/components/work_item_actions_spec.js b/spec/frontend/work_items/components/work_item_actions_spec.js
index e03c6a7e28d..0fe517d7d74 100644
--- a/spec/frontend/work_items/components/work_item_actions_spec.js
+++ b/spec/frontend/work_items/components/work_item_actions_spec.js
@@ -22,6 +22,8 @@ import {
import updateWorkItemNotificationsMutation from '~/work_items/graphql/update_work_item_notifications.mutation.graphql';
import projectWorkItemTypesQuery from '~/work_items/graphql/project_work_item_types.query.graphql';
import convertWorkItemMutation from '~/work_items/graphql/work_item_convert.mutation.graphql';
+import workItemByIidQuery from '~/work_items/graphql/work_item_by_iid.query.graphql';
+
import {
convertWorkItemMutationResponse,
projectWorkItemTypesQueryResponse,
@@ -38,6 +40,8 @@ describe('WorkItemActions component', () => {
let wrapper;
let mockApollo;
const mockWorkItemReference = 'gitlab-org/gitlab-test#1';
+ const mockWorkItemIid = '1';
+ const mockFullPath = 'gitlab-org/gitlab-test';
const mockWorkItemCreateNoteEmail =
'gitlab-incoming+gitlab-org-gitlab-test-2-ddpzuq0zd2wefzofcpcdr3dg7-issue-1@gmail.com';
@@ -74,6 +78,7 @@ describe('WorkItemActions component', () => {
const convertWorkItemMutationSuccessHandler = jest
.fn()
.mockResolvedValue(convertWorkItemMutationResponse);
+
const convertWorkItemMutationErrorHandler = jest
.fn()
.mockResolvedValue(convertWorkItemMutationErrorResponse);
@@ -90,6 +95,7 @@ describe('WorkItemActions component', () => {
workItemType = 'Task',
workItemReference = mockWorkItemReference,
workItemCreateNoteEmail = mockWorkItemCreateNoteEmail,
+ writeQueryCache = false,
} = {}) => {
const handlers = [notificationsMock];
mockApollo = createMockApollo([
@@ -97,6 +103,18 @@ describe('WorkItemActions component', () => {
[convertWorkItemMutation, convertWorkItemMutationHandler],
[projectWorkItemTypesQuery, typesQuerySuccessHandler],
]);
+
+ // Write the query cache only when required e.g., notification widget mutation is called
+ if (writeQueryCache) {
+ const workItemQueryResponse = workItemByIidResponseFactory({ canUpdate: true });
+
+ mockApollo.clients.defaultClient.cache.writeQuery({
+ query: workItemByIidQuery,
+ variables: { fullPath: mockFullPath, iid: mockWorkItemIid },
+ data: workItemQueryResponse.data,
+ });
+ }
+
wrapper = shallowMountExtended(WorkItemActions, {
isLoggedIn: isLoggedIn(),
apolloProvider: mockApollo,
@@ -110,9 +128,10 @@ describe('WorkItemActions component', () => {
workItemType,
workItemReference,
workItemCreateNoteEmail,
+ workItemIid: '1',
},
provide: {
- fullPath: 'gitlab-org/gitlab',
+ fullPath: mockFullPath,
glFeatures: { workItemsMvc2: true },
},
mocks: {
@@ -233,45 +252,28 @@ describe('WorkItemActions component', () => {
describe('notifications action', () => {
const errorMessage = 'Failed to subscribe';
- const id = 'gid://gitlab/WorkItem/1';
const notificationToggledOffMessage = 'Notifications turned off.';
const notificationToggledOnMessage = 'Notifications turned on.';
- const inputVariablesOff = {
- id,
- notificationsWidget: {
- subscribed: false,
- },
- };
-
- const inputVariablesOn = {
- id,
- notificationsWidget: {
- subscribed: true,
- },
- };
-
- const notificationsOffExpectedResponse = workItemByIidResponseFactory({
- subscribed: false,
- });
-
const toggleNotificationsOffHandler = jest.fn().mockResolvedValue({
data: {
- workItemUpdate: {
- workItem: notificationsOffExpectedResponse.data.workspace.workItems.nodes[0],
+ updateWorkItemNotificationsSubscription: {
+ issue: {
+ id: 'gid://gitlab/WorkItem/1',
+ subscribed: false,
+ },
errors: [],
},
},
});
- const notificationsOnExpectedResponse = workItemByIidResponseFactory({
- subscribed: true,
- });
-
const toggleNotificationsOnHandler = jest.fn().mockResolvedValue({
data: {
- workItemUpdate: {
- workItem: notificationsOnExpectedResponse.data.workspace.workItems.nodes[0],
+ updateWorkItemNotificationsSubscription: {
+ issue: {
+ id: 'gid://gitlab/WorkItem/1',
+ subscribed: true,
+ },
errors: [],
},
},
@@ -292,7 +294,7 @@ describe('WorkItemActions component', () => {
];
beforeEach(() => {
- createComponent();
+ createComponent({ writeQueryCache: true });
isLoggedIn.mockReturnValue(true);
});
@@ -301,13 +303,13 @@ describe('WorkItemActions component', () => {
});
it.each`
- scenario | subscribedToNotifications | notificationsMock | inputVariables | toastMessage
- ${'turned off'} | ${false} | ${notificationsOffMock} | ${inputVariablesOff} | ${notificationToggledOffMessage}
- ${'turned on'} | ${true} | ${notificationsOnMock} | ${inputVariablesOn} | ${notificationToggledOnMessage}
+ scenario | subscribedToNotifications | notificationsMock | subscribedState | toastMessage
+ ${'turned off'} | ${false} | ${notificationsOffMock} | ${false} | ${notificationToggledOffMessage}
+ ${'turned on'} | ${true} | ${notificationsOnMock} | ${true} | ${notificationToggledOnMessage}
`(
'calls mutation and displays toast when notification toggle is $scenario',
- async ({ subscribedToNotifications, notificationsMock, inputVariables, toastMessage }) => {
- createComponent({ notificationsMock });
+ async ({ subscribedToNotifications, notificationsMock, subscribedState, toastMessage }) => {
+ createComponent({ notificationsMock, writeQueryCache: true });
await waitForPromises();
@@ -316,14 +318,18 @@ describe('WorkItemActions component', () => {
await waitForPromises();
expect(notificationsMock[1]).toHaveBeenCalledWith({
- input: inputVariables,
+ input: {
+ projectPath: mockFullPath,
+ iid: mockWorkItemIid,
+ subscribedState,
+ },
});
expect(toast).toHaveBeenCalledWith(toastMessage);
},
);
it('emits error when the update notification mutation fails', async () => {
- createComponent({ notificationsMock: notificationsFailureMock });
+ createComponent({ notificationsMock: notificationsFailureMock, writeQueryCache: true });
await waitForPromises();
@@ -359,6 +365,7 @@ describe('WorkItemActions component', () => {
expect(convertWorkItemMutationSuccessHandler).toHaveBeenCalled();
expect($toast.show).toHaveBeenCalledWith('Promoted to objective.');
+ expect(wrapper.emitted('promotedToObjective')).toEqual([[]]);
});
it('emits error when promote mutation fails', async () => {
diff --git a/spec/frontend/work_items/components/work_item_assignees_spec.js b/spec/frontend/work_items/components/work_item_assignees_spec.js
index ff1998ab2ed..50a8847032e 100644
--- a/spec/frontend/work_items/components/work_item_assignees_spec.js
+++ b/spec/frontend/work_items/components/work_item_assignees_spec.js
@@ -107,7 +107,7 @@ describe('WorkItemAssignees component', () => {
it('container does not have shadow by default', () => {
createComponent();
- expect(findTokenSelector().props('containerClass')).toBe('gl-shadow-none!');
+ expect(findTokenSelector().props('containerClass')).toContain('gl-shadow-none!');
});
it('container has shadow after focusing token selector', async () => {
@@ -415,7 +415,7 @@ describe('WorkItemAssignees component', () => {
findTokenSelector().vm.$emit('input', dropdownItems);
await nextTick();
- expect(findTokenSelector().props('containerClass')).toBe('gl-shadow-none!');
+ expect(findTokenSelector().props('containerClass')).toContain('gl-shadow-none!');
});
it('calls the mutation for updating assignees with the correct input', async () => {
diff --git a/spec/frontend/work_items/components/work_item_attributes_wrapper_spec.js b/spec/frontend/work_items/components/work_item_attributes_wrapper_spec.js
index ba9af7b2b68..8b7e04854af 100644
--- a/spec/frontend/work_items/components/work_item_attributes_wrapper_spec.js
+++ b/spec/frontend/work_items/components/work_item_attributes_wrapper_spec.js
@@ -1,7 +1,6 @@
import { shallowMount } from '@vue/test-utils';
import WorkItemAssignees from '~/work_items/components/work_item_assignees.vue';
import WorkItemDueDate from '~/work_items/components/work_item_due_date.vue';
-import WorkItemState from '~/work_items/components/work_item_state.vue';
import WorkItemLabels from '~/work_items/components/work_item_labels.vue';
import WorkItemMilestone from '~/work_items/components/work_item_milestone.vue';
@@ -13,7 +12,6 @@ describe('WorkItemAttributesWrapper component', () => {
const workItemQueryResponse = workItemResponseFactory({ canUpdate: true, canDelete: true });
- const findWorkItemState = () => wrapper.findComponent(WorkItemState);
const findWorkItemDueDate = () => wrapper.findComponent(WorkItemDueDate);
const findWorkItemAssignees = () => wrapper.findComponent(WorkItemAssignees);
const findWorkItemLabels = () => wrapper.findComponent(WorkItemLabels);
@@ -40,14 +38,6 @@ describe('WorkItemAttributesWrapper component', () => {
});
};
- describe('work item state', () => {
- it('renders the work item state', () => {
- createComponent();
-
- expect(findWorkItemState().exists()).toBe(true);
- });
- });
-
describe('assignees widget', () => {
it('renders assignees component when widget is returned from the API', () => {
createComponent();
diff --git a/spec/frontend/work_items/components/work_item_created_updated_spec.js b/spec/frontend/work_items/components/work_item_created_updated_spec.js
index 68ede7d5bc0..f77c5481906 100644
--- a/spec/frontend/work_items/components/work_item_created_updated_spec.js
+++ b/spec/frontend/work_items/components/work_item_created_updated_spec.js
@@ -1,10 +1,12 @@
-import { GlAvatarLink, GlSprintf } from '@gitlab/ui';
+import { GlAvatarLink, GlSprintf, GlLoadingIcon } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import Vue from 'vue';
import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import WorkItemCreatedUpdated from '~/work_items/components/work_item_created_updated.vue';
+import ConfidentialityBadge from '~/vue_shared/components/confidentiality_badge.vue';
+import WorkItemTypeIcon from '~/work_items/components/work_item_type_icon.vue';
import workItemByIidQuery from '~/work_items/graphql/work_item_by_iid.query.graphql';
import { workItemByIidResponseFactory, mockAssignees } from '../mock_data';
@@ -18,11 +20,21 @@ describe('WorkItemCreatedUpdated component', () => {
const findUpdatedAt = () => wrapper.find('[data-testid="work-item-updated"]');
const findCreatedAtText = () => findCreatedAt().text().replace(/\s+/g, ' ');
-
- const createComponent = async ({ workItemIid = '1', author = null, updatedAt } = {}) => {
+ const findWorkItemTypeIcon = () => wrapper.findComponent(WorkItemTypeIcon);
+ const findConfidentialityBadge = () => wrapper.findComponent(ConfidentialityBadge);
+ const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
+
+ const createComponent = async ({
+ workItemIid = '1',
+ author = null,
+ updatedAt,
+ confidential = false,
+ updateInProgress = false,
+ } = {}) => {
const workItemQueryResponse = workItemByIidResponseFactory({
author,
updatedAt,
+ confidential,
});
successHandler = jest.fn().mockResolvedValue(workItemQueryResponse);
@@ -32,7 +44,7 @@ describe('WorkItemCreatedUpdated component', () => {
provide: {
fullPath: '/some/project',
},
- propsData: { workItemIid },
+ propsData: { workItemIid, updateInProgress },
stubs: {
GlAvatarLink,
GlSprintf,
@@ -48,17 +60,31 @@ describe('WorkItemCreatedUpdated component', () => {
expect(successHandler).not.toHaveBeenCalled();
});
+ it('shows work item type metadata with type and icon', async () => {
+ await createComponent();
+
+ const {
+ data: { workspace: { workItems } = {} },
+ } = workItemByIidResponseFactory();
+
+ expect(findWorkItemTypeIcon().props()).toMatchObject({
+ showText: true,
+ workItemIconName: workItems.nodes[0].workItemType.iconName,
+ workItemType: workItems.nodes[0].workItemType.name,
+ });
+ });
+
it('shows author name and link', async () => {
const author = mockAssignees[0];
await createComponent({ author });
- expect(findCreatedAtText()).toBe(`Created by ${author.name}`);
+ expect(findCreatedAtText()).toBe(`created by ${author.name}`);
});
it('shows created time when author is null', async () => {
await createComponent({ author: null });
- expect(findCreatedAtText()).toBe('Created');
+ expect(findCreatedAtText()).toBe('created');
});
it('shows updated time', async () => {
@@ -72,4 +98,24 @@ describe('WorkItemCreatedUpdated component', () => {
expect(findUpdatedAt().exists()).toBe(false);
});
+
+ describe('confidential badge', () => {
+ it('renders badge when the work item is confidential', async () => {
+ await createComponent({ confidential: true });
+
+ expect(findConfidentialityBadge().exists()).toBe(true);
+ });
+
+ it('does not render badge when the work item is confidential', async () => {
+ await createComponent({ confidential: false });
+
+ expect(findConfidentialityBadge().exists()).toBe(false);
+ });
+
+ it('shows loading icon badge when the work item is confidential', async () => {
+ await createComponent({ updateInProgress: true });
+
+ expect(findLoadingIcon().exists()).toBe(true);
+ });
+ });
});
diff --git a/spec/frontend/work_items/components/work_item_detail_spec.js b/spec/frontend/work_items/components/work_item_detail_spec.js
index 7ceae935d2d..d3c7c9e2074 100644
--- a/spec/frontend/work_items/components/work_item_detail_spec.js
+++ b/spec/frontend/work_items/components/work_item_detail_spec.js
@@ -1,7 +1,5 @@
import {
GlAlert,
- GlBadge,
- GlLoadingIcon,
GlSkeletonLoader,
GlButton,
GlEmptyState,
@@ -24,6 +22,8 @@ import WorkItemTitle from '~/work_items/components/work_item_title.vue';
import WorkItemTree from '~/work_items/components/work_item_links/work_item_tree.vue';
import WorkItemNotes from '~/work_items/components/work_item_notes.vue';
import WorkItemDetailModal from '~/work_items/components/work_item_detail_modal.vue';
+import WorkItemTypeIcon from '~/work_items/components/work_item_type_icon.vue';
+import WorkItemStateToggleButton from '~/work_items/components/work_item_state_toggle_button.vue';
import AbuseCategorySelector from '~/abuse_reports/components/abuse_category_selector.vue';
import WorkItemTodos from '~/work_items/components/work_item_todos.vue';
import { i18n } from '~/work_items/constants';
@@ -47,6 +47,10 @@ describe('WorkItemDetail component', () => {
Vue.use(VueApollo);
const workItemQueryResponse = workItemByIidResponseFactory({ canUpdate: true, canDelete: true });
+ const workItemQueryResponseWithCannotUpdate = workItemByIidResponseFactory({
+ canUpdate: false,
+ canDelete: false,
+ });
const workItemQueryResponseWithoutParent = workItemByIidResponseFactory({
parent: null,
canUpdate: true,
@@ -62,7 +66,6 @@ describe('WorkItemDetail component', () => {
const findAlert = () => wrapper.findComponent(GlAlert);
const findEmptyState = () => wrapper.findComponent(GlEmptyState);
const findSkeleton = () => wrapper.findComponent(GlSkeletonLoader);
- const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
const findWorkItemActions = () => wrapper.findComponent(WorkItemActions);
const findWorkItemTitle = () => wrapper.findComponent(WorkItemTitle);
const findCreatedUpdated = () => wrapper.findComponent(WorkItemCreatedUpdated);
@@ -82,6 +85,8 @@ describe('WorkItemDetail component', () => {
const findWorkItemTwoColumnViewContainer = () => wrapper.findByTestId('work-item-overview');
const findRightSidebar = () => wrapper.findByTestId('work-item-overview-right-sidebar');
const triggerPageScroll = () => findIntersectionObserver().vm.$emit('disappear');
+ const findWorkItemStateToggleButton = () => wrapper.findComponent(WorkItemStateToggleButton);
+ const findWorkItemTypeIcon = () => wrapper.findComponent(WorkItemTypeIcon);
const createComponent = ({
isModal = false,
@@ -194,6 +199,25 @@ describe('WorkItemDetail component', () => {
});
});
+ describe('work item state toggle button', () => {
+ describe.each`
+ description | canUpdate
+ ${'when user cannot update'} | ${false}
+ ${'when user can update'} | ${true}
+ `('$description', ({ canUpdate }) => {
+ it(`${canUpdate ? 'is rendered' : 'is not rendered'}`, async () => {
+ createComponent({
+ handler: canUpdate
+ ? jest.fn().mockResolvedValue(workItemQueryResponse)
+ : jest.fn().mockResolvedValue(workItemQueryResponseWithCannotUpdate),
+ });
+ await waitForPromises();
+
+ expect(findWorkItemStateToggleButton().exists()).toBe(canUpdate);
+ });
+ });
+ });
+
describe('close button', () => {
describe('when isModal prop is false', () => {
it('does not render', async () => {
@@ -289,27 +313,7 @@ describe('WorkItemDetail component', () => {
`(
'when work item has $context',
({ handlerMock, confidentialityMock, confidentialityFailureMock, inputVariables }) => {
- it('renders confidential badge when work item is confidential', async () => {
- createComponent({
- handler: jest.fn().mockResolvedValue(confidentialWorkItem),
- confidentialityMock,
- });
-
- await waitForPromises();
-
- const confidentialBadge = wrapper.findComponent(GlBadge);
- expect(confidentialBadge.exists()).toBe(true);
- expect(confidentialBadge.props()).toMatchObject({
- variant: 'warning',
- icon: 'eye-slash',
- });
- expect(confidentialBadge.attributes('title')).toBe(
- 'Only project members with at least the Reporter role, the author, and assignees can view or be notified about this task.',
- );
- expect(confidentialBadge.text()).toBe('Confidential');
- });
-
- it('renders gl-loading-icon while update mutation is in progress', async () => {
+ it('sends updateInProgress props to child component', async () => {
createComponent({
handler: handlerMock,
confidentialityMock,
@@ -321,10 +325,10 @@ describe('WorkItemDetail component', () => {
await nextTick();
- expect(findLoadingIcon().exists()).toBe(true);
+ expect(findCreatedUpdated().props('updateInProgress')).toBe(true);
});
- it('emits workItemUpdated and shows confidentiality badge when mutation is successful', async () => {
+ it('emits workItemUpdated when mutation is successful', async () => {
createComponent({
handler: handlerMock,
confidentialityMock,
@@ -339,7 +343,6 @@ describe('WorkItemDetail component', () => {
expect(confidentialityMock[1]).toHaveBeenCalledWith({
input: inputVariables,
});
- expect(findLoadingIcon().exists()).toBe(false);
});
it('shows an alert when mutation fails', async () => {
@@ -357,7 +360,6 @@ describe('WorkItemDetail component', () => {
expect(findAlert().exists()).toBe(true);
expect(findAlert().text()).toBe(errorMessage);
- expect(findLoadingIcon().exists()).toBe(false);
});
},
);
@@ -397,8 +399,8 @@ describe('WorkItemDetail component', () => {
createComponent({ handler: jest.fn().mockResolvedValue(workItemQueryResponseWithoutParent) });
await waitForPromises();
- expect(findWorkItemType().exists()).toBe(true);
- expect(findWorkItemType().text()).toBe('Task #1');
+ expect(findWorkItemTypeIcon().props('showText')).toBe(true);
+ expect(findWorkItemType().text()).toBe('#1');
});
describe('with parent', () => {
@@ -450,8 +452,8 @@ describe('WorkItemDetail component', () => {
});
it('shows work item type and iid', () => {
- const { iid, workItemType } = workItemQueryResponse.data.workspace.workItems.nodes[0];
- expect(findParent().text()).toContain(`${workItemType.name} #${iid}`);
+ const { iid } = workItemQueryResponse.data.workspace.workItems.nodes[0];
+ expect(findParent().text()).toContain(`#${iid}`);
});
});
});
diff --git a/spec/frontend/work_items/components/work_item_links/okr_actions_split_button_spec.js b/spec/frontend/work_items/components/work_item_links/okr_actions_split_button_spec.js
index 688dccbda79..55d5b34ae70 100644
--- a/spec/frontend/work_items/components/work_item_links/okr_actions_split_button_spec.js
+++ b/spec/frontend/work_items/components/work_item_links/okr_actions_split_button_spec.js
@@ -1,11 +1,17 @@
-import { GlDropdownSectionHeader } from '@gitlab/ui';
+import { GlDisclosureDropdown, GlDisclosureDropdownGroup } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import OkrActionsSplitButton from '~/work_items/components/work_item_links/okr_actions_split_button.vue';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
const createComponent = () => {
- return extendedWrapper(shallowMount(OkrActionsSplitButton));
+ return extendedWrapper(
+ shallowMount(OkrActionsSplitButton, {
+ stubs: {
+ GlDisclosureDropdown,
+ },
+ }),
+ );
};
describe('RelatedItemsTree', () => {
@@ -18,11 +24,11 @@ describe('RelatedItemsTree', () => {
describe('OkrActionsSplitButton', () => {
describe('template', () => {
it('renders objective and key results sections', () => {
- expect(wrapper.findAllComponents(GlDropdownSectionHeader).at(0).text()).toContain(
+ expect(wrapper.findAllComponents(GlDisclosureDropdownGroup).at(0).props('group').name).toBe(
'Objective',
);
- expect(wrapper.findAllComponents(GlDropdownSectionHeader).at(1).text()).toContain(
+ expect(wrapper.findAllComponents(GlDisclosureDropdownGroup).at(1).props('group').name).toBe(
'Key result',
);
});
diff --git a/spec/frontend/work_items/components/work_item_links/work_item_link_child_spec.js b/spec/frontend/work_items/components/work_item_links/work_item_link_child_spec.js
index 71d1a0e253f..803ff950cbe 100644
--- a/spec/frontend/work_items/components/work_item_links/work_item_link_child_spec.js
+++ b/spec/frontend/work_items/components/work_item_links/work_item_link_child_spec.js
@@ -1,20 +1,16 @@
-import { GlLabel, GlIcon } from '@gitlab/ui';
import Vue from 'vue';
import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
-import WorkItemLinkChildMetadata from 'ee_else_ce/work_items/components/work_item_links/work_item_link_child_metadata.vue';
-
import { createAlert } from '~/alert';
-import RichTimestampTooltip from '~/vue_shared/components/rich_timestamp_tooltip.vue';
import getWorkItemTreeQuery from '~/work_items/graphql/work_item_tree.query.graphql';
import updateWorkItemMutation from '~/work_items/graphql/update_work_item.mutation.graphql';
import WorkItemLinkChild from '~/work_items/components/work_item_links/work_item_link_child.vue';
-import WorkItemLinksMenu from '~/work_items/components/work_item_links/work_item_links_menu.vue';
import WorkItemTreeChildren from '~/work_items/components/work_item_links/work_item_tree_children.vue';
+import WorkItemLinkChildContents from '~/work_items/components/shared/work_item_link_child_contents.vue';
import {
WIDGET_TYPE_HIERARCHY,
TASK_TYPE_NAME,
@@ -24,12 +20,8 @@ import {
import {
workItemTask,
workItemObjectiveWithChild,
- workItemObjectiveNoMetadata,
- confidentialWorkItemTask,
- closedWorkItemTask,
workItemHierarchyTreeResponse,
workItemHierarchyTreeFailureResponse,
- workItemObjectiveMetadataWidgets,
changeIndirectWorkItemParentMutationResponse,
workItemUpdateFailureResponse,
} from '../../mock_data';
@@ -41,8 +33,6 @@ describe('WorkItemLinkChild', () => {
let wrapper;
let getWorkItemTreeQueryHandler;
let mutationChangeParentHandler;
- const { LABELS } = workItemObjectiveMetadataWidgets;
- const mockLabels = LABELS.labels.nodes;
const $toast = {
show: jest.fn(),
@@ -51,6 +41,8 @@ describe('WorkItemLinkChild', () => {
Vue.use(VueApollo);
+ const findWorkItemLinkChildContents = () => wrapper.findComponent(WorkItemLinkChildContents);
+
const createComponent = ({
canUpdate = true,
issuableGid = WORK_ITEM_ID,
@@ -89,87 +81,7 @@ describe('WorkItemLinkChild', () => {
createAlert.mockClear();
});
- it.each`
- status | childItem | statusIconName | statusIconColorClass | rawTimestamp | tooltipContents
- ${'open'} | ${workItemTask} | ${'issue-open-m'} | ${'gl-text-green-500'} | ${workItemTask.createdAt} | ${'Created'}
- ${'closed'} | ${closedWorkItemTask} | ${'issue-close'} | ${'gl-text-blue-500'} | ${closedWorkItemTask.closedAt} | ${'Closed'}
- `(
- 'renders item status icon and tooltip when item status is `$status`',
- ({ childItem, statusIconName, statusIconColorClass, rawTimestamp, tooltipContents }) => {
- createComponent({ childItem });
-
- const statusIcon = wrapper.findByTestId('item-status-icon').findComponent(GlIcon);
- const statusTooltip = wrapper.findComponent(RichTimestampTooltip);
-
- expect(statusIcon.props('name')).toBe(statusIconName);
- expect(statusIcon.classes()).toContain(statusIconColorClass);
- expect(statusTooltip.props('rawTimestamp')).toBe(rawTimestamp);
- expect(statusTooltip.props('timestampTypeText')).toContain(tooltipContents);
- },
- );
-
- it('renders confidential icon when item is confidential', () => {
- createComponent({ childItem: confidentialWorkItemTask });
-
- const confidentialIcon = wrapper.findByTestId('confidential-icon');
-
- expect(confidentialIcon.props('name')).toBe('eye-slash');
- expect(confidentialIcon.attributes('title')).toBe('Confidential');
- });
-
- describe('item title', () => {
- let titleEl;
-
- beforeEach(() => {
- createComponent();
-
- titleEl = wrapper.findByTestId('item-title');
- });
-
- it('renders item title', () => {
- expect(titleEl.attributes('href')).toBe('/gitlab-org/gitlab-test/-/work_items/4');
- expect(titleEl.text()).toBe(workItemTask.title);
- });
-
- describe('renders item title correctly for relative instance', () => {
- beforeEach(() => {
- window.gon = { relative_url_root: '/test' };
- createComponent();
- titleEl = wrapper.findByTestId('item-title');
- });
-
- it('renders item title with correct href', () => {
- expect(titleEl.attributes('href')).toBe('/test/gitlab-org/gitlab-test/-/work_items/4');
- });
-
- it('renders item title with correct text', () => {
- expect(titleEl.text()).toBe(workItemTask.title);
- });
- });
-
- it.each`
- action | event | emittedEvent
- ${'doing mouseover on'} | ${'mouseover'} | ${'mouseover'}
- ${'doing mouseout on'} | ${'mouseout'} | ${'mouseout'}
- `('$action item title emit `$emittedEvent` event', ({ event, emittedEvent }) => {
- titleEl.vm.$emit(event);
-
- expect(wrapper.emitted(emittedEvent)).toEqual([[]]);
- });
-
- it('emits click event with correct parameters on clicking title', () => {
- const eventObj = {
- preventDefault: jest.fn(),
- };
- titleEl.vm.$emit('click', eventObj);
-
- expect(wrapper.emitted('click')).toEqual([[eventObj]]);
- });
- });
-
- describe('item metadata', () => {
- const findMetadataComponent = () => wrapper.findComponent(WorkItemLinkChildMetadata);
-
+ describe('renders WorkItemLinkChildContents', () => {
beforeEach(() => {
createComponent({
childItem: workItemObjectiveWithChild,
@@ -177,67 +89,31 @@ describe('WorkItemLinkChild', () => {
});
});
- it('renders item metadata component when item has metadata present', () => {
- const metadataEl = findMetadataComponent();
- expect(metadataEl.exists()).toBe(true);
- expect(metadataEl.props()).toMatchObject({
- metadataWidgets: workItemObjectiveMetadataWidgets,
- });
- });
-
- it('does not render item metadata component when item has no metadata present', () => {
- createComponent({
- childItem: workItemObjectiveNoMetadata,
- workItemType: WORK_ITEM_TYPE_VALUE_OBJECTIVE,
+ it('with default props', () => {
+ expect(findWorkItemLinkChildContents().props()).toEqual({
+ childItem: workItemObjectiveWithChild,
+ canUpdate: true,
+ parentWorkItemId: 'gid://gitlab/WorkItem/2',
+ workItemType: 'Objective',
+ childPath: '/gitlab-org/gitlab-test/-/work_items/12',
});
-
- expect(findMetadataComponent().exists()).toBe(false);
});
- it('renders labels', () => {
- const labels = wrapper.findAllComponents(GlLabel);
- const mockLabel = mockLabels[0];
-
- expect(labels).toHaveLength(mockLabels.length);
- expect(labels.at(0).props()).toMatchObject({
- title: mockLabel.title,
- backgroundColor: mockLabel.color,
- description: mockLabel.description,
- scoped: false,
+ describe('with relative instance', () => {
+ beforeEach(() => {
+ window.gon = { relative_url_root: '/test' };
+ createComponent({
+ childItem: workItemObjectiveWithChild,
+ workItemType: WORK_ITEM_TYPE_VALUE_OBJECTIVE,
+ });
});
- expect(labels.at(1).props('scoped')).toBe(true); // Second label is scoped
- });
- });
-
- describe('item menu', () => {
- let itemMenuEl;
-
- beforeEach(() => {
- createComponent();
-
- itemMenuEl = wrapper.findComponent(WorkItemLinksMenu);
- });
- it('renders work-item-links-menu', () => {
- expect(itemMenuEl.exists()).toBe(true);
-
- expect(itemMenuEl.attributes()).toMatchObject({
- 'work-item-id': workItemTask.id,
- 'parent-work-item-id': WORK_ITEM_ID,
+ it('adds the relative url to child path value', () => {
+ expect(findWorkItemLinkChildContents().props('childPath')).toBe(
+ '/test/gitlab-org/gitlab-test/-/work_items/12',
+ );
});
});
-
- it('does not render work-item-links-menu when canUpdate is false', () => {
- createComponent({ canUpdate: false });
-
- expect(wrapper.findComponent(WorkItemLinksMenu).exists()).toBe(false);
- });
-
- it('removeChild event on menu triggers `click-remove-child` event', () => {
- itemMenuEl.vm.$emit('removeChild');
-
- expect(wrapper.emitted('removeChild')).toEqual([[workItemTask]]);
- });
});
describe('nested children', () => {
@@ -252,7 +128,6 @@ describe('WorkItemLinkChild', () => {
const findFirstItem = () => getChildrenNodes()[0];
beforeEach(() => {
- getWorkItemTreeQueryHandler.mockClear();
createComponent({
childItem: workItemObjectiveWithChild,
workItemType: WORK_ITEM_TYPE_VALUE_OBJECTIVE,
diff --git a/spec/frontend/work_items/components/work_item_links/work_item_links_form_spec.js b/spec/frontend/work_items/components/work_item_links/work_item_links_form_spec.js
index 5f7f56d7063..8caacc2dc97 100644
--- a/spec/frontend/work_items/components/work_item_links/work_item_links_form_spec.js
+++ b/spec/frontend/work_items/components/work_item_links/work_item_links_form_spec.js
@@ -94,7 +94,7 @@ describe('WorkItemLinksForm', () => {
preventDefault: jest.fn(),
});
await waitForPromises();
- expect(wrapper.vm.childWorkItemType).toEqual('gid://gitlab/WorkItems::Type/3');
+
expect(createMutationResolver).toHaveBeenCalledWith({
input: {
title: 'Create task test',
@@ -106,6 +106,7 @@ describe('WorkItemLinksForm', () => {
confidential: false,
},
});
+ expect(wrapper.emitted('addChild')).toEqual([[]]);
});
it('creates child task in confidential parent', async () => {
diff --git a/spec/frontend/work_items/components/work_item_links/work_item_links_spec.js b/spec/frontend/work_items/components/work_item_links/work_item_links_spec.js
index dd46505bd65..e24cfe27616 100644
--- a/spec/frontend/work_items/components/work_item_links/work_item_links_spec.js
+++ b/spec/frontend/work_items/components/work_item_links/work_item_links_spec.js
@@ -108,8 +108,8 @@ describe('WorkItemLinks', () => {
describe('add link form', () => {
it('displays add work item form on click add dropdown then add existing button and hides form on cancel', async () => {
await createComponent();
- findToggleFormDropdown().vm.$emit('click');
- findToggleAddFormButton().vm.$emit('click');
+ findToggleFormDropdown().vm.$emit('action');
+ findToggleAddFormButton().vm.$emit('action');
await nextTick();
expect(findAddLinksForm().exists()).toBe(true);
@@ -123,8 +123,8 @@ describe('WorkItemLinks', () => {
it('displays create work item form on click add dropdown then create button and hides form on cancel', async () => {
await createComponent();
- findToggleFormDropdown().vm.$emit('click');
- findToggleCreateFormButton().vm.$emit('click');
+ findToggleFormDropdown().vm.$emit('action');
+ findToggleCreateFormButton().vm.$emit('action');
await nextTick();
expect(findAddLinksForm().exists()).toBe(true);
@@ -195,8 +195,8 @@ describe('WorkItemLinks', () => {
.fn()
.mockResolvedValue(getIssueDetailsResponse({ confidential: true })),
});
- findToggleFormDropdown().vm.$emit('click');
- findToggleAddFormButton().vm.$emit('click');
+ findToggleFormDropdown().vm.$emit('action');
+ findToggleAddFormButton().vm.$emit('action');
await nextTick();
expect(findAddLinksForm().props('parentConfidential')).toBe(true);
diff --git a/spec/frontend/work_items/components/work_item_links/work_item_tree_spec.js b/spec/frontend/work_items/components/work_item_links/work_item_tree_spec.js
index e90775a5240..01fa4591cde 100644
--- a/spec/frontend/work_items/components/work_item_links/work_item_tree_spec.js
+++ b/spec/frontend/work_items/components/work_item_links/work_item_tree_spec.js
@@ -118,4 +118,14 @@ describe('WorkItemTree', () => {
expect(findWorkItemLinkChildrenWrapper().props('canUpdate')).toBe(false);
});
});
+
+ it('emits `addChild` event when form emits `addChild` event', async () => {
+ createComponent();
+
+ findToggleFormSplitButton().vm.$emit('showCreateObjectiveForm');
+ await nextTick();
+ findForm().vm.$emit('addChild');
+
+ expect(wrapper.emitted('addChild')).toEqual([[]]);
+ });
});
diff --git a/spec/frontend/work_items/components/work_item_state_badge_spec.js b/spec/frontend/work_items/components/work_item_state_badge_spec.js
new file mode 100644
index 00000000000..888d712cc5a
--- /dev/null
+++ b/spec/frontend/work_items/components/work_item_state_badge_spec.js
@@ -0,0 +1,32 @@
+import { GlBadge } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import { STATE_OPEN, STATE_CLOSED } from '~/work_items/constants';
+import WorkItemStateBadge from '~/work_items/components/work_item_state_badge.vue';
+
+describe('WorkItemStateBadge', () => {
+ let wrapper;
+
+ const createComponent = ({ workItemState = STATE_OPEN } = {}) => {
+ wrapper = shallowMount(WorkItemStateBadge, {
+ propsData: {
+ workItemState,
+ },
+ });
+ };
+ const findStatusBadge = () => wrapper.findComponent(GlBadge);
+
+ it.each`
+ state | icon | stateText | variant
+ ${STATE_OPEN} | ${'issue-open-m'} | ${'Open'} | ${'success'}
+ ${STATE_CLOSED} | ${'issue-close'} | ${'Closed'} | ${'info'}
+ `(
+ 'renders icon as "$icon" and text as "$stateText" when the work item state is "$state"',
+ ({ state, icon, stateText, variant }) => {
+ createComponent({ workItemState: state });
+
+ expect(findStatusBadge().props('icon')).toBe(icon);
+ expect(findStatusBadge().props('variant')).toBe(variant);
+ expect(findStatusBadge().text()).toBe(stateText);
+ },
+ );
+});
diff --git a/spec/frontend/work_items/components/work_item_state_spec.js b/spec/frontend/work_items/components/work_item_state_toggle_button_spec.js
index d1262057c73..c0b206e5da4 100644
--- a/spec/frontend/work_items/components/work_item_state_spec.js
+++ b/spec/frontend/work_items/components/work_item_state_toggle_button_spec.js
@@ -1,11 +1,11 @@
+import { GlButton } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import Vue from 'vue';
import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
import { mockTracking } from 'helpers/tracking_helper';
import waitForPromises from 'helpers/wait_for_promises';
-import ItemState from '~/work_items/components/item_state.vue';
-import WorkItemState from '~/work_items/components/work_item_state.vue';
+import WorkItemStateToggleButton from '~/work_items/components/work_item_state_toggle_button.vue';
import {
STATE_OPEN,
STATE_CLOSED,
@@ -16,59 +16,58 @@ import {
import updateWorkItemMutation from '~/work_items/graphql/update_work_item.mutation.graphql';
import { updateWorkItemMutationResponse, workItemQueryResponse } from '../mock_data';
-describe('WorkItemState component', () => {
+describe('Work Item State toggle button component', () => {
let wrapper;
Vue.use(VueApollo);
const mutationSuccessHandler = jest.fn().mockResolvedValue(updateWorkItemMutationResponse);
- const findItemState = () => wrapper.findComponent(ItemState);
+ const findStateToggleButton = () => wrapper.findComponent(GlButton);
+
+ const { id } = workItemQueryResponse.data.workItem;
const createComponent = ({
- state = STATE_OPEN,
mutationHandler = mutationSuccessHandler,
canUpdate = true,
+ workItemState = STATE_OPEN,
+ workItemType = 'Task',
} = {}) => {
- const { id, workItemType } = workItemQueryResponse.data.workItem;
- wrapper = shallowMount(WorkItemState, {
+ wrapper = shallowMount(WorkItemStateToggleButton, {
apolloProvider: createMockApollo([[updateWorkItemMutation, mutationHandler]]),
propsData: {
- workItem: {
- id,
- state,
- workItemType,
- },
+ workItemId: id,
+ workItemState,
+ workItemType,
canUpdate,
},
});
};
- it('renders state', () => {
- createComponent();
-
- expect(findItemState().props('state')).toBe(workItemQueryResponse.data.workItem.state);
- });
-
- describe('item state disabled prop', () => {
- describe.each`
- description | canUpdate | value
- ${'when cannot update'} | ${false} | ${true}
- ${'when can update'} | ${true} | ${false}
- `('$description', ({ canUpdate, value }) => {
- it(`renders item state component with disabled=${value}`, () => {
- createComponent({ canUpdate });
-
- expect(findItemState().props('disabled')).toBe(value);
- });
- });
+ describe('work item State button text', () => {
+ it.each`
+ workItemState | workItemType | buttonText
+ ${STATE_OPEN} | ${'Task'} | ${'Close task'}
+ ${STATE_CLOSED} | ${'Task'} | ${'Reopen task'}
+ ${STATE_OPEN} | ${'Objective'} | ${'Close objective'}
+ ${STATE_CLOSED} | ${'Objective'} | ${'Reopen objective'}
+ ${STATE_OPEN} | ${'Key result'} | ${'Close key result'}
+ ${STATE_CLOSED} | ${'Key result'} | ${'Reopen key result'}
+ `(
+ 'is "$buttonText" when "$workItemType" state is "$workItemState"',
+ ({ workItemState, workItemType, buttonText }) => {
+ createComponent({ workItemState, workItemType });
+
+ expect(findStateToggleButton().text()).toBe(buttonText);
+ },
+ );
});
describe('when updating the state', () => {
it('calls a mutation', () => {
createComponent();
- findItemState().vm.$emit('changed', STATE_CLOSED);
+ findStateToggleButton().vm.$emit('click');
expect(mutationSuccessHandler).toHaveBeenCalledWith({
input: {
@@ -80,10 +79,10 @@ describe('WorkItemState component', () => {
it('calls a mutation with REOPEN', () => {
createComponent({
- state: STATE_CLOSED,
+ workItemState: STATE_CLOSED,
});
- findItemState().vm.$emit('changed', STATE_OPEN);
+ findStateToggleButton().vm.$emit('click');
expect(mutationSuccessHandler).toHaveBeenCalledWith({
input: {
@@ -96,7 +95,7 @@ describe('WorkItemState component', () => {
it('emits an error message when the mutation was unsuccessful', async () => {
createComponent({ mutationHandler: jest.fn().mockRejectedValue('Error!') });
- findItemState().vm.$emit('changed', STATE_CLOSED);
+ findStateToggleButton().vm.$emit('click');
await waitForPromises();
expect(wrapper.emitted('error')).toEqual([
@@ -109,7 +108,7 @@ describe('WorkItemState component', () => {
createComponent();
- findItemState().vm.$emit('changed', STATE_CLOSED);
+ findStateToggleButton().vm.$emit('click');
await waitForPromises();
expect(trackingSpy).toHaveBeenCalledWith(TRACKING_CATEGORY_SHOW, 'updated_state', {
diff --git a/spec/frontend/work_items/components/work_item_type_icon_spec.js b/spec/frontend/work_items/components/work_item_type_icon_spec.js
index a5e955c4dbf..7cf59cc04d1 100644
--- a/spec/frontend/work_items/components/work_item_type_icon_spec.js
+++ b/spec/frontend/work_items/components/work_item_type_icon_spec.js
@@ -27,6 +27,13 @@ describe('Work Item type component', () => {
${'INCIDENT'} | ${''} | ${'issue-type-incident'} | ${'Incident'} | ${false}
${'TEST_CASE'} | ${''} | ${'issue-type-test-case'} | ${'Test case'} | ${true}
${'random-issue-type'} | ${''} | ${'issue-type-issue'} | ${''} | ${true}
+ ${'Task'} | ${''} | ${'issue-type-task'} | ${'Task'} | ${false}
+ ${'Issue'} | ${''} | ${'issue-type-issue'} | ${'Issue'} | ${true}
+ ${'Requirements'} | ${''} | ${'issue-type-requirements'} | ${'Requirements'} | ${true}
+ ${'Incident'} | ${''} | ${'issue-type-incident'} | ${'Incident'} | ${false}
+ ${'Test_case'} | ${''} | ${'issue-type-test-case'} | ${'Test case'} | ${true}
+ ${'Objective'} | ${''} | ${'issue-type-objective'} | ${'Objective'} | ${true}
+ ${'Key Result'} | ${''} | ${'issue-type-keyresult'} | ${'Key result'} | ${true}
`(
'with workItemType set to "$workItemType" and workItemIconName set to "$workItemIconName"',
({ workItemType, workItemIconName, iconName, text, showTooltipOnHover }) => {
diff --git a/spec/frontend/work_items/list/components/work_items_list_app_spec.js b/spec/frontend/work_items/list/components/work_items_list_app_spec.js
new file mode 100644
index 00000000000..c92d092eb43
--- /dev/null
+++ b/spec/frontend/work_items/list/components/work_items_list_app_spec.js
@@ -0,0 +1,85 @@
+import * as Sentry from '@sentry/browser';
+import { shallowMount } from '@vue/test-utils';
+import Vue, { nextTick } from 'vue';
+import VueApollo from 'vue-apollo';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import { STATUS_OPEN } from '~/issues/constants';
+import IssuableList from '~/vue_shared/issuable/list/components/issuable_list_root.vue';
+import WorkItemsListApp from '~/work_items/list/components/work_items_list_app.vue';
+import getWorkItemsQuery from '~/work_items/list/queries/get_work_items.query.graphql';
+import { groupWorkItemsQueryResponse } from '../../mock_data';
+
+jest.mock('@sentry/browser');
+
+describe('WorkItemsListApp component', () => {
+ let wrapper;
+
+ Vue.use(VueApollo);
+
+ const defaultQueryHandler = jest.fn().mockResolvedValue(groupWorkItemsQueryResponse);
+
+ const findIssuableList = () => wrapper.findComponent(IssuableList);
+
+ const mountComponent = ({ queryHandler = defaultQueryHandler } = {}) => {
+ wrapper = shallowMount(WorkItemsListApp, {
+ apolloProvider: createMockApollo([[getWorkItemsQuery, queryHandler]]),
+ provide: {
+ fullPath: 'full/path',
+ },
+ });
+ };
+
+ it('renders IssuableList component', () => {
+ mountComponent();
+
+ expect(findIssuableList().props()).toMatchObject({
+ currentTab: STATUS_OPEN,
+ error: '',
+ issuables: [],
+ namespace: 'work-items',
+ recentSearchesStorageKey: 'issues',
+ searchInputPlaceholder: 'Search or filter results...',
+ searchTokens: [],
+ showWorkItemTypeIcon: true,
+ sortOptions: [],
+ tabs: WorkItemsListApp.issuableListTabs,
+ });
+ });
+
+ it('renders work items', async () => {
+ mountComponent();
+ await waitForPromises();
+
+ expect(findIssuableList().props('issuables')).toEqual(
+ groupWorkItemsQueryResponse.data.group.workItems.nodes,
+ );
+ });
+
+ it('fetches work items', () => {
+ mountComponent();
+
+ expect(defaultQueryHandler).toHaveBeenCalledWith({ fullPath: 'full/path' });
+ });
+
+ describe('when there is an error fetching work items', () => {
+ beforeEach(async () => {
+ mountComponent({ queryHandler: jest.fn().mockRejectedValue(new Error('ERROR')) });
+ await waitForPromises();
+ });
+
+ it('renders an error message', () => {
+ const message = 'Something went wrong when fetching work items. Please try again.';
+
+ expect(findIssuableList().props('error')).toBe(message);
+ expect(Sentry.captureException).toHaveBeenCalledWith(new Error('ERROR'));
+ });
+
+ it('clears error message when "dismiss-alert" event is emitted from IssuableList', async () => {
+ findIssuableList().vm.$emit('dismiss-alert');
+ await nextTick();
+
+ expect(findIssuableList().props('error')).toBe('');
+ });
+ });
+});
diff --git a/spec/frontend/work_items/mock_data.js b/spec/frontend/work_items/mock_data.js
index f88e69a7ffe..05e83c0df3d 100644
--- a/spec/frontend/work_items/mock_data.js
+++ b/spec/frontend/work_items/mock_data.js
@@ -44,6 +44,7 @@ export const mockMilestone = {
expired: false,
startDate: '2022-10-17',
dueDate: '2022-10-24',
+ webPath: '123',
};
export const mockAwardEmojiThumbsUp = {
@@ -451,6 +452,7 @@ export const objectiveType = {
};
export const workItemResponseFactory = ({
+ iid = '1',
canUpdate = false,
canDelete = false,
canCreateNote = false,
@@ -482,14 +484,15 @@ export const workItemResponseFactory = ({
createdAt = '2022-08-03T12:41:54Z',
updatedAt = '2022-08-08T12:32:54Z',
awardEmoji = mockAwardsWidget,
+ state = 'OPEN',
} = {}) => ({
data: {
workItem: {
__typename: 'WorkItem',
id: 'gid://gitlab/WorkItem/1',
- iid: '1',
+ iid,
title: 'Updated title',
- state: 'OPEN',
+ state,
description: 'description',
confidential,
createdAt,
@@ -581,6 +584,7 @@ export const workItemResponseFactory = ({
__typename: 'WorkItemWidgetProgress',
type: 'PROGRESS',
progress: 0,
+ updatedAt: new Date(),
}
: { type: 'MOCK TYPE' },
milestoneWidgetPresent
@@ -1142,6 +1146,7 @@ export const workItemObjectiveMetadataWidgets = {
type: 'PROGRESS',
__typename: 'WorkItemWidgetProgress',
progress: 10,
+ updatedAt: new Date(),
},
};
@@ -1210,6 +1215,7 @@ export const workItemObjectiveNoMetadata = {
__typename: 'WorkItemWidgetProgress',
type: 'PROGRESS',
progress: null,
+ updatedAt: null,
},
{
__typename: 'WorkItemWidgetMilestone',
@@ -3298,3 +3304,63 @@ export const getTodosMutationResponse = (state) => {
},
};
};
+
+export const groupWorkItemsQueryResponse = {
+ data: {
+ group: {
+ id: 'gid://gitlab/Group/3',
+ workItems: {
+ nodes: [
+ {
+ id: 'gid://gitlab/WorkItem/58',
+ iid: '23',
+ author: {
+ id: 'gid://gitlab/User/9',
+ avatarUrl: 'author/avatar/url',
+ name: 'Arthur',
+ username: 'arthur',
+ webUrl: 'author/web/url',
+ },
+ closedAt: '',
+ confidential: true,
+ createdAt: '2020-01-23T12:34:56Z',
+ reference: 'javascriptjs/js#23',
+ state: 'OPEN',
+ title: 'a group level work item',
+ updatedAt: '',
+ webUrl: 'web/url',
+ widgets: [
+ {
+ __typename: 'WorkItemWidgetAssignees',
+ assignees: {
+ nodes: mockAssignees,
+ },
+ type: 'ASSIGNEES',
+ },
+ {
+ __typename: 'WorkItemWidgetLabels',
+ allowsScopedLabels: false,
+ labels: {
+ nodes: [
+ {
+ __typename: 'Label',
+ id: 'gid://gitlab/Label/7',
+ color: '#f00',
+ description: '',
+ title: 'Label 7',
+ },
+ ],
+ },
+ type: 'LABELS',
+ },
+ ],
+ workItemType: {
+ id: 'gid://gitlab/WorkItems::Type/5',
+ name: 'Issue',
+ },
+ },
+ ],
+ },
+ },
+ },
+};
diff --git a/spec/graphql/mutations/issues/update_spec.rb b/spec/graphql/mutations/issues/update_spec.rb
index ac82037b7e2..622ccb86b2e 100644
--- a/spec/graphql/mutations/issues/update_spec.rb
+++ b/spec/graphql/mutations/issues/update_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Mutations::Issues::Update do
+RSpec.describe Mutations::Issues::Update, feature_category: :team_planning do
let_it_be(:project) { create(:project) }
let_it_be(:user) { create(:user) }
let_it_be(:project_label) { create(:label, project: project) }
@@ -177,6 +177,17 @@ RSpec.describe Mutations::Issues::Update do
end
end
+ context 'when timeEstimate is negative' do
+ let(:time_estimate) { '-1h' }
+
+ it 'raises an argument error and changes are not applied' do
+ expect { mutation.ready?(time_estimate: time_estimate) }
+ .to raise_error(Gitlab::Graphql::Errors::ArgumentError,
+ 'timeEstimate must be greater than or equal to zero. Remember that every new timeEstimate overwrites the previous value.')
+ expect { subject }.not_to change { issue.time_estimate }
+ end
+ end
+
context 'when timeEstimate is 0' do
let(:time_estimate) { '0' }
diff --git a/spec/graphql/mutations/merge_requests/update_spec.rb b/spec/graphql/mutations/merge_requests/update_spec.rb
index 8a10f6cadd0..6ced71c5f4c 100644
--- a/spec/graphql/mutations/merge_requests/update_spec.rb
+++ b/spec/graphql/mutations/merge_requests/update_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Mutations::MergeRequests::Update do
+RSpec.describe Mutations::MergeRequests::Update, feature_category: :team_planning do
let(:merge_request) { create(:merge_request) }
let(:user) { create(:user) }
@@ -59,6 +59,18 @@ RSpec.describe Mutations::MergeRequests::Update do
end
end
+ context 'when timeEstimate is negative' do
+ let(:time_estimate) { '-1h' }
+
+ it 'raises an argument error and changes are not applied' do
+ expect { mutation.ready?(time_estimate: time_estimate) }
+ .to raise_error(Gitlab::Graphql::Errors::ArgumentError,
+ 'timeEstimate must be greater than or equal to zero. ' \
+ 'Remember that every new timeEstimate overwrites the previous value.')
+ expect { subject }.not_to change { merge_request.time_estimate }
+ end
+ end
+
context 'when timeEstimate is 0' do
let(:time_estimate) { '0' }
diff --git a/spec/graphql/mutations/namespace/package_settings/update_spec.rb b/spec/graphql/mutations/namespace/package_settings/update_spec.rb
index 576f514183f..b7f9eac3755 100644
--- a/spec/graphql/mutations/namespace/package_settings/update_spec.rb
+++ b/spec/graphql/mutations/namespace/package_settings/update_spec.rb
@@ -31,6 +31,8 @@ RSpec.describe Mutations::Namespace::PackageSettings::Update, feature_category:
maven_duplicate_exception_regex: 'SNAPSHOT',
generic_duplicates_allowed: true,
generic_duplicate_exception_regex: 'foo',
+ nuget_duplicates_allowed: true,
+ nuget_duplicate_exception_regex: 'foo',
maven_package_requests_forwarding: nil,
lock_maven_package_requests_forwarding: false,
npm_package_requests_forwarding: nil,
@@ -42,6 +44,8 @@ RSpec.describe Mutations::Namespace::PackageSettings::Update, feature_category:
maven_duplicate_exception_regex: 'RELEASE',
generic_duplicates_allowed: false,
generic_duplicate_exception_regex: 'bar',
+ nuget_duplicates_allowed: false,
+ nuget_duplicate_exception_regex: 'bar',
maven_package_requests_forwarding: true,
lock_maven_package_requests_forwarding: true,
npm_package_requests_forwarding: true,
@@ -69,6 +73,18 @@ RSpec.describe Mutations::Namespace::PackageSettings::Update, feature_category:
)
end
end
+
+ context 'when nuget_duplicates_option FF is disabled' do
+ let_it_be(:params) { { namespace_path: namespace.full_path, nuget_duplicates_allowed: false } }
+
+ before do
+ stub_feature_flags(nuget_duplicates_option: false)
+ end
+
+ it 'raises an error' do
+ expect { subject }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable, /feature flag is disabled/)
+ end
+ end
end
RSpec.shared_examples 'denying access to namespace package setting' do
@@ -95,6 +111,8 @@ RSpec.describe Mutations::Namespace::PackageSettings::Update, feature_category:
maven_duplicate_exception_regex: 'RELEASE',
generic_duplicates_allowed: false,
generic_duplicate_exception_regex: 'bar',
+ nuget_duplicates_allowed: false,
+ nuget_duplicate_exception_regex: 'bar',
maven_package_requests_forwarding: true,
lock_maven_package_requests_forwarding: true,
npm_package_requests_forwarding: true,
diff --git a/spec/graphql/mutations/work_items/linked_items/base_spec.rb b/spec/graphql/mutations/work_items/linked_items/base_spec.rb
new file mode 100644
index 00000000000..7061c37abd3
--- /dev/null
+++ b/spec/graphql/mutations/work_items/linked_items/base_spec.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Mutations::WorkItems::LinkedItems::Base, feature_category: :groups_and_projects do
+ include GraphqlHelpers
+
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project).tap { |group| group.add_maintainer(user) } }
+ let_it_be(:work_item) { create(:work_item, project: project) }
+
+ it 'raises a NotImplementedError error if the update_links method is called on the base class' do
+ mutation = described_class.new(context: { current_user: user }, object: nil, field: nil)
+
+ expect { mutation.resolve(id: work_item.to_gid) }.to raise_error(NotImplementedError)
+ end
+end
diff --git a/spec/graphql/resolvers/group_labels_resolver_spec.rb b/spec/graphql/resolvers/group_labels_resolver_spec.rb
index 341448d7add..08e17cedfcc 100644
--- a/spec/graphql/resolvers/group_labels_resolver_spec.rb
+++ b/spec/graphql/resolvers/group_labels_resolver_spec.rb
@@ -65,7 +65,7 @@ RSpec.describe Resolvers::GroupLabelsResolver do
end
it 'prevents N+1 queries' do
- control = Gitlab::WithRequestStore.with_request_store do
+ control = Gitlab::SafeRequestStore.ensure_request_store do
ActiveRecord::QueryRecorder.new { resolve_labels(group, params).to_a }
end
@@ -75,7 +75,7 @@ RSpec.describe Resolvers::GroupLabelsResolver do
create(:group_label, group: another_subgroup, name: 'another group feature')
expect do
- Gitlab::WithRequestStore.with_request_store do
+ Gitlab::SafeRequestStore.ensure_request_store do
resolve_labels(group, params).to_a
end
end.not_to exceed_query_limit(control.count)
diff --git a/spec/graphql/resolvers/labels_resolver_spec.rb b/spec/graphql/resolvers/labels_resolver_spec.rb
index 8196315dd7c..16cf2e73736 100644
--- a/spec/graphql/resolvers/labels_resolver_spec.rb
+++ b/spec/graphql/resolvers/labels_resolver_spec.rb
@@ -65,7 +65,7 @@ RSpec.describe Resolvers::LabelsResolver do
end
it 'prevents N+1 queries' do
- control = Gitlab::WithRequestStore.with_request_store do
+ control = Gitlab::SafeRequestStore.ensure_request_store do
ActiveRecord::QueryRecorder.new { resolve_labels(project, params).to_a }
end
@@ -75,7 +75,7 @@ RSpec.describe Resolvers::LabelsResolver do
create(:group_label, group: another_subgroup, name: 'another group feature')
expect do
- Gitlab::WithRequestStore.with_request_store do
+ Gitlab::SafeRequestStore.ensure_request_store do
resolve_labels(project, params).to_a
end
end.not_to exceed_query_limit(control.count)
diff --git a/spec/graphql/resolvers/metrics/dashboards/annotation_resolver_spec.rb b/spec/graphql/resolvers/metrics/dashboards/annotation_resolver_spec.rb
index 2ca194d519c..75e0a816086 100644
--- a/spec/graphql/resolvers/metrics/dashboards/annotation_resolver_spec.rb
+++ b/spec/graphql/resolvers/metrics/dashboards/annotation_resolver_spec.rb
@@ -7,13 +7,12 @@ RSpec.describe Resolvers::Metrics::Dashboards::AnnotationResolver, feature_categ
describe '#resolve' do
context 'user with developer access' do
- subject(:resolve_annotations) { resolve(described_class, obj: dashboard, args: args, ctx: { current_user: current_user }) }
+ subject(:resolve_annotations) { resolve(described_class, obj: nil, args: args, ctx: { current_user: current_user }) }
let_it_be(:current_user) { create(:user) }
let_it_be(:environment) { create(:environment) }
let_it_be(:path) { 'config/prometheus/common_metrics.yml' }
- let(:dashboard) { PerformanceMonitoring::PrometheusDashboard.new(path: path, environment: environment) }
let(:args) do
{
from: 10.minutes.ago,
@@ -30,36 +29,6 @@ RSpec.describe Resolvers::Metrics::Dashboards::AnnotationResolver, feature_categ
end
context 'with annotation records' do
- let_it_be(:annotation_1) { create(:metrics_dashboard_annotation, environment: environment, starting_at: 9.minutes.ago, dashboard_path: path) }
-
- it 'loads annotations with usage of finder class', :aggregate_failures do
- expect_next_instance_of(::Metrics::Dashboards::AnnotationsFinder, dashboard: dashboard, params: args) do |finder|
- expect(finder).to receive(:execute).and_return [annotation_1]
- end
-
- expect(resolve_annotations).to eql [annotation_1]
- end
-
- context 'dashboard is missing' do
- let(:dashboard) { nil }
-
- it 'returns empty array', :aggregate_failures do
- expect(::Metrics::Dashboards::AnnotationsFinder).not_to receive(:new)
-
- expect(resolve_annotations).to be_empty
- end
- end
-
- context 'there are no annotations records' do
- it 'returns empty array' do
- allow_next_instance_of(::Metrics::Dashboards::AnnotationsFinder) do |finder|
- allow(finder).to receive(:execute).and_return []
- end
-
- expect(resolve_annotations).to be_empty
- end
- end
-
context 'when metrics dashboard feature is unavailable' do
before do
stub_feature_flags(remove_monitor_metrics: true)
@@ -69,6 +38,10 @@ RSpec.describe Resolvers::Metrics::Dashboards::AnnotationResolver, feature_categ
expect(resolve_annotations).to be_nil
end
end
+
+ it 'returns [] all the time' do
+ expect(resolve_annotations).to be_empty
+ end
end
end
end
diff --git a/spec/graphql/types/access_levels/deploy_key_type_spec.rb b/spec/graphql/types/access_levels/deploy_key_type_spec.rb
new file mode 100644
index 00000000000..02f58ec4c15
--- /dev/null
+++ b/spec/graphql/types/access_levels/deploy_key_type_spec.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['AccessLevelDeployKey'], feature_category: :source_code_management do
+ subject { described_class }
+
+ let(:fields) { %i[id title expires_at user] }
+
+ specify { is_expected.to require_graphql_authorizations(:read_deploy_key) }
+
+ specify { is_expected.to have_graphql_fields(fields).at_least }
+end
diff --git a/spec/graphql/types/access_levels/user_type_spec.rb b/spec/graphql/types/access_levels/user_type_spec.rb
new file mode 100644
index 00000000000..7a34f70e166
--- /dev/null
+++ b/spec/graphql/types/access_levels/user_type_spec.rb
@@ -0,0 +1,41 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['AccessLevelUser'], feature_category: :source_code_management do
+ include GraphqlHelpers
+
+ describe 'config' do
+ subject { described_class }
+
+ let(:expected_fields) { %w[id username name publicEmail avatarUrl webUrl webPath] }
+
+ it { is_expected.to require_graphql_authorizations(:read_user) }
+ it { is_expected.to have_graphql_fields(expected_fields).only }
+ end
+
+ describe 'fields' do
+ let(:object) { instance_double(User) }
+ let(:current_user) { instance_double(User) }
+
+ before do
+ allow(described_class).to receive(:authorized?).and_return(true)
+ end
+
+ describe '#name' do
+ it 'calls User#redacted_name(current_user)' do
+ allow(object).to receive(:redacted_name).with(current_user)
+ resolve_field(:name, object, current_user: current_user)
+ expect(object).to have_received(:redacted_name).with(current_user).once
+ end
+ end
+
+ describe '#avatar_url' do
+ it 'calls User#avatar_url(only_path: false)' do
+ allow(object).to receive(:avatar_url).with(only_path: false)
+ resolve_field(:avatar_url, object, current_user: current_user)
+ expect(object).to have_received(:avatar_url).with(only_path: false).once
+ end
+ end
+ end
+end
diff --git a/spec/graphql/types/alert_management/alert_type_spec.rb b/spec/graphql/types/alert_management/alert_type_spec.rb
index 92e8104fc4d..7c7b4cde60b 100644
--- a/spec/graphql/types/alert_management/alert_type_spec.rb
+++ b/spec/graphql/types/alert_management/alert_type_spec.rb
@@ -34,6 +34,7 @@ RSpec.describe GitlabSchema.types['AlertManagementAlert'], feature_category: :in
runbook
todos
details_url
+ metrics_dashboard_url
prometheus_alert
environment
web_url
diff --git a/spec/graphql/types/branch_protections/merge_access_level_type_spec.rb b/spec/graphql/types/branch_protections/merge_access_level_type_spec.rb
index 8cc1005d97e..0586a643196 100644
--- a/spec/graphql/types/branch_protections/merge_access_level_type_spec.rb
+++ b/spec/graphql/types/branch_protections/merge_access_level_type_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe GitlabSchema.types['MergeAccessLevel'] do
+RSpec.describe GitlabSchema.types['MergeAccessLevel'], feature_category: :source_code_management do
subject { described_class }
let(:fields) { %i[access_level access_level_description] }
diff --git a/spec/graphql/types/branch_protections/push_access_level_type_spec.rb b/spec/graphql/types/branch_protections/push_access_level_type_spec.rb
index c78c0bda74c..ec5d42ac720 100644
--- a/spec/graphql/types/branch_protections/push_access_level_type_spec.rb
+++ b/spec/graphql/types/branch_protections/push_access_level_type_spec.rb
@@ -2,10 +2,10 @@
require 'spec_helper'
-RSpec.describe GitlabSchema.types['PushAccessLevel'] do
+RSpec.describe GitlabSchema.types['PushAccessLevel'], feature_category: :source_code_management do
subject { described_class }
- let(:fields) { %i[access_level access_level_description] }
+ let(:fields) { %i[access_level access_level_description deploy_key] }
specify { is_expected.to require_graphql_authorizations(:read_protected_branch) }
diff --git a/spec/graphql/types/branch_rules/branch_protection_type_spec.rb b/spec/graphql/types/branch_rules/branch_protection_type_spec.rb
index bbc92fd8fef..d74c76d3f94 100644
--- a/spec/graphql/types/branch_rules/branch_protection_type_spec.rb
+++ b/spec/graphql/types/branch_rules/branch_protection_type_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe GitlabSchema.types['BranchProtection'] do
+RSpec.describe GitlabSchema.types['BranchProtection'], feature_category: :source_code_management do
subject { described_class }
let(:fields) { %i[merge_access_levels push_access_levels allow_force_push] }
diff --git a/spec/graphql/types/ci/detailed_status_type_spec.rb b/spec/graphql/types/ci/detailed_status_type_spec.rb
index 69fb2bc43c0..81ab1b52552 100644
--- a/spec/graphql/types/ci/detailed_status_type_spec.rb
+++ b/spec/graphql/types/ci/detailed_status_type_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe Types::Ci::DetailedStatusType do
include GraphqlHelpers
- let_it_be(:stage) { create(:ci_stage, status: :manual) }
+ let_it_be(:stage) { create(:ci_stage, status: :skipped) }
specify { expect(described_class.graphql_name).to eq('DetailedStatus') }
diff --git a/spec/graphql/types/ci/pipeline_trigger_type_spec.rb b/spec/graphql/types/ci/pipeline_trigger_type_spec.rb
new file mode 100644
index 00000000000..2d39118bad8
--- /dev/null
+++ b/spec/graphql/types/ci/pipeline_trigger_type_spec.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['PipelineTrigger'], feature_category: :continuous_integration do
+ specify do
+ expect(described_class).to have_graphql_fields(%i[
+ can_access_project
+ description
+ has_token_exposed
+ last_used
+ id
+ owner
+ token
+ ]).at_least
+ end
+end
diff --git a/spec/graphql/types/ci/runner_manager_type_spec.rb b/spec/graphql/types/ci/runner_manager_type_spec.rb
index 6f73171cd8f..ff7297b0a0e 100644
--- a/spec/graphql/types/ci/runner_manager_type_spec.rb
+++ b/spec/graphql/types/ci/runner_manager_type_spec.rb
@@ -9,7 +9,7 @@ RSpec.describe GitlabSchema.types['CiRunnerManager'], feature_category: :runner_
it 'contains attributes related to a runner manager' do
expected_fields = %w[
- architecture_name contacted_at created_at executor_name id ip_address platform_name revision
+ architecture_name contacted_at created_at executor_name id ip_address job_execution_status platform_name revision
runner status system_id version
]
diff --git a/spec/graphql/types/commit_type_spec.rb b/spec/graphql/types/commit_type_spec.rb
index 561d165148b..6af5ea04dd2 100644
--- a/spec/graphql/types/commit_type_spec.rb
+++ b/spec/graphql/types/commit_type_spec.rb
@@ -12,8 +12,14 @@ RSpec.describe GitlabSchema.types['Commit'] do
it 'contains attributes related to commit' do
expect(described_class).to have_graphql_fields(
:id, :sha, :short_id, :title, :full_title, :full_title_html, :description, :description_html, :message, :title_html, :authored_date,
- :author_name, :author_email, :author_gravatar, :author, :web_url, :web_path,
- :pipelines, :signature_html, :signature
+ :author_name, :author_email, :author_gravatar, :author, :diffs, :web_url, :web_path,
+ :pipelines, :signature_html, :signature, :committer_name, :committer_email, :committed_date
)
end
+
+ describe 'diffs' do
+ it 'limits field call count' do
+ expect(described_class.fields['diffs'].extensions).to include(a_kind_of(::Gitlab::Graphql::Limit::FieldCallCount))
+ end
+ end
end
diff --git a/spec/graphql/types/custom_emoji_type_spec.rb b/spec/graphql/types/custom_emoji_type_spec.rb
index 7f3c99e4b63..17697321602 100644
--- a/spec/graphql/types/custom_emoji_type_spec.rb
+++ b/spec/graphql/types/custom_emoji_type_spec.rb
@@ -3,9 +3,20 @@
require 'spec_helper'
RSpec.describe GitlabSchema.types['CustomEmoji'] do
+ expected_fields = %w[
+ id
+ name
+ url
+ external
+ created_at
+ user_permissions
+ ]
+
specify { expect(described_class.graphql_name).to eq('CustomEmoji') }
specify { expect(described_class).to require_graphql_authorizations(:read_custom_emoji) }
- specify { expect(described_class).to have_graphql_fields(:id, :name, :url, :external) }
+ specify { expect(described_class).to have_graphql_fields(*expected_fields) }
+
+ specify { expect(described_class).to expose_permissions_using(Types::PermissionTypes::CustomEmoji) }
end
diff --git a/spec/graphql/types/diff_type_spec.rb b/spec/graphql/types/diff_type_spec.rb
new file mode 100644
index 00000000000..04f4ff9feed
--- /dev/null
+++ b/spec/graphql/types/diff_type_spec.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['Diff'], feature_category: :code_review_workflow do
+ include RepoHelpers
+ include GraphqlHelpers
+
+ specify { expect(described_class.graphql_name).to eq('Diff') }
+
+ it 'contains attributes related to diff' do
+ expect(described_class).to have_graphql_fields(
+ :a_mode, :b_mode, :deleted_file, :diff, :new_file, :new_path, :old_path, :renamed_file
+ )
+ end
+
+ describe '#diff' do
+ subject { resolve_field(:diff, diff, object_type: described_class) }
+
+ let(:merge_request_diff) { create(:merge_request).merge_request_diff }
+ let(:diff) { merge_request_diff.diffs.diffs.first }
+
+ it 'returns the diff of the passed commit' do
+ is_expected.to eq(diff.diff)
+ end
+ end
+end
diff --git a/spec/graphql/types/group_type_spec.rb b/spec/graphql/types/group_type_spec.rb
index 0fbf50fe258..6622551f063 100644
--- a/spec/graphql/types/group_type_spec.rb
+++ b/spec/graphql/types/group_type_spec.rb
@@ -26,7 +26,7 @@ RSpec.describe GitlabSchema.types['Group'] do
dependency_proxy_image_prefix dependency_proxy_image_ttl_policy
shared_runners_setting timelogs organization_state_counts organizations
contact_state_counts contacts work_item_types
- recent_issue_boards ci_variables releases
+ recent_issue_boards ci_variables releases environment_scopes work_items autocomplete_users
]
expect(described_class).to include_graphql_fields(*expected_fields)
@@ -77,6 +77,13 @@ RSpec.describe GitlabSchema.types['Group'] do
it { is_expected.to have_graphql_resolver(Resolvers::GroupReleasesResolver) }
end
+ describe 'work_items field' do
+ subject { described_class.fields['workItems'] }
+
+ it { is_expected.to have_graphql_type(Types::WorkItemType.connection_type) }
+ it { is_expected.to have_graphql_resolver(Resolvers::Namespaces::WorkItemsResolver) }
+ end
+
it_behaves_like 'a GraphQL type with labels' do
let(:labels_resolver_arguments) { [:search_term, :includeAncestorGroups, :includeDescendantGroups, :onlyGroupLabels] }
end
diff --git a/spec/graphql/types/issue_type_enum_spec.rb b/spec/graphql/types/issue_type_enum_spec.rb
index 33a3a9cf8ce..5b1bc9c3d9c 100644
--- a/spec/graphql/types/issue_type_enum_spec.rb
+++ b/spec/graphql/types/issue_type_enum_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe Types::IssueTypeEnum, feature_category: :team_planning do
specify { expect(described_class.graphql_name).to eq('IssueType') }
- it 'exposes all the existing issue type values except key_result' do
+ it 'exposes all the existing issue type values except epic' do
expect(described_class.values.keys).to match_array(
%w[ISSUE INCIDENT TEST_CASE REQUIREMENT TASK OBJECTIVE KEY_RESULT]
)
diff --git a/spec/graphql/types/issue_type_spec.rb b/spec/graphql/types/issue_type_spec.rb
index 7c4f2a06147..6c4e68fba6b 100644
--- a/spec/graphql/types/issue_type_spec.rb
+++ b/spec/graphql/types/issue_type_spec.rb
@@ -19,7 +19,7 @@ RSpec.describe GitlabSchema.types['Issue'] do
it 'has specific fields' do
fields = %i[id iid title description state reference author assignees updated_by participants labels milestone due_date
confidential hidden discussion_locked upvotes downvotes merge_requests_count user_notes_count user_discussions_count web_path web_url relative_position
- emails_disabled subscribed time_estimate total_time_spent human_time_estimate human_total_time_spent closed_at created_at updated_at task_completion_status
+ emails_disabled emails_enabled subscribed time_estimate total_time_spent human_time_estimate human_total_time_spent closed_at created_at updated_at task_completion_status
design_collection alert_management_alert alert_management_alerts severity current_user_todos moved moved_to
closed_as_duplicate_of create_note_email timelogs project_id customer_relations_contacts escalation_status]
diff --git a/spec/graphql/types/merge_request_state_enum_spec.rb b/spec/graphql/types/merge_request_state_enum_spec.rb
index 6fc5803a5d0..9c286c54e15 100644
--- a/spec/graphql/types/merge_request_state_enum_spec.rb
+++ b/spec/graphql/types/merge_request_state_enum_spec.rb
@@ -8,6 +8,6 @@ RSpec.describe GitlabSchema.types['MergeRequestState'] do
it_behaves_like 'issuable state'
it 'exposes all the existing merge request states' do
- expect(described_class.values.keys).to include('merged')
+ expect(described_class.values.keys).to include('merged', 'opened')
end
end
diff --git a/spec/graphql/types/namespace/package_settings_type_spec.rb b/spec/graphql/types/namespace/package_settings_type_spec.rb
index 40048b7dfa6..d823f2017b6 100644
--- a/spec/graphql/types/namespace/package_settings_type_spec.rb
+++ b/spec/graphql/types/namespace/package_settings_type_spec.rb
@@ -21,6 +21,8 @@ RSpec.describe GitlabSchema.types['PackageSettings'], feature_category: :package
maven_duplicate_exception_regex
generic_duplicates_allowed
generic_duplicate_exception_regex
+ nuget_duplicates_allowed
+ nuget_duplicate_exception_regex
maven_package_requests_forwarding
lock_maven_package_requests_forwarding
npm_package_requests_forwarding
diff --git a/spec/graphql/types/project_type_spec.rb b/spec/graphql/types/project_type_spec.rb
index 262164a0821..cd9a0642ae6 100644
--- a/spec/graphql/types/project_type_spec.rb
+++ b/spec/graphql/types/project_type_spec.rb
@@ -38,7 +38,7 @@ RSpec.describe GitlabSchema.types['Project'] do
ci_template timelogs merge_commit_template squash_commit_template work_item_types
recent_issue_boards ci_config_path_or_default packages_cleanup_policy ci_variables
timelog_categories fork_targets branch_rules ci_config_variables pipeline_schedules languages
- incident_management_timeline_event_tags visible_forks inherited_ci_variables
+ incident_management_timeline_event_tags visible_forks inherited_ci_variables autocomplete_users
]
expect(described_class).to include_graphql_fields(*expected_fields)
@@ -291,7 +291,7 @@ RSpec.describe GitlabSchema.types['Project'] do
let_it_be(:project) { create(:project_empty_repo) }
it 'raises an error' do
- expect(subject['errors'][0]['message']).to eq('UF You must <a target="_blank" rel="noopener noreferrer" ' \
+ expect(subject['errors'][0]['message']).to eq('You must <a target="_blank" rel="noopener noreferrer" ' \
'href="http://localhost/help/user/project/repository/index.md#' \
'add-files-to-a-repository">add at least one file to the ' \
'repository</a> before using Security features.')
diff --git a/spec/graphql/types/projects/branch_rule_type_spec.rb b/spec/graphql/types/projects/branch_rule_type_spec.rb
index 54ea4f6857b..fc7bf4252f1 100644
--- a/spec/graphql/types/projects/branch_rule_type_spec.rb
+++ b/spec/graphql/types/projects/branch_rule_type_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe GitlabSchema.types['BranchRule'] do
+RSpec.describe GitlabSchema.types['BranchRule'], feature_category: :source_code_management do
include GraphqlHelpers
subject { described_class }
diff --git a/spec/graphql/types/users/autocompleted_user_type_spec.rb b/spec/graphql/types/users/autocompleted_user_type_spec.rb
new file mode 100644
index 00000000000..7b7af429765
--- /dev/null
+++ b/spec/graphql/types/users/autocompleted_user_type_spec.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['AutocompletedUser'], feature_category: :team_planning do
+ it { expect(described_class).to require_graphql_authorizations(:read_user) }
+
+ describe '#merge_request_interaction' do
+ subject { described_class.fields['mergeRequestInteraction'] }
+
+ it 'returns the correct type' do
+ is_expected.to have_graphql_type(Types::UserMergeRequestInteractionType)
+ end
+
+ it 'has the correct arguments' do
+ expect(subject.arguments).to have_key('id')
+ end
+ end
+end
diff --git a/spec/graphql/types/work_items/linked_item_type_spec.rb b/spec/graphql/types/work_items/linked_item_type_spec.rb
new file mode 100644
index 00000000000..7d7fda45ce4
--- /dev/null
+++ b/spec/graphql/types/work_items/linked_item_type_spec.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Types::WorkItems::LinkedItemType, feature_category: :portfolio_management do
+ specify { expect(described_class.graphql_name).to eq('LinkedWorkItemType') }
+
+ it 'exposes the expected fields' do
+ expected_fields = %i[linkCreatedAt linkId linkType linkUpdatedAt workItem]
+
+ expect(described_class).to have_graphql_fields(*expected_fields)
+ end
+end
diff --git a/spec/graphql/types/work_items/related_link_type_enum_spec.rb b/spec/graphql/types/work_items/related_link_type_enum_spec.rb
new file mode 100644
index 00000000000..38c180b58d4
--- /dev/null
+++ b/spec/graphql/types/work_items/related_link_type_enum_spec.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Types::WorkItems::RelatedLinkTypeEnum, feature_category: :portfolio_management do
+ specify { expect(described_class.graphql_name).to eq('WorkItemRelatedLinkType') }
+
+ it 'exposes all the existing access levels' do
+ expected_fields = Gitlab.ee? ? %w[RELATED BLOCKS BLOCKED_BY] : %w[RELATED]
+
+ expect(described_class.values.keys).to match_array(expected_fields)
+ end
+end
diff --git a/spec/graphql/types/work_items/widget_interface_spec.rb b/spec/graphql/types/work_items/widget_interface_spec.rb
index d955ec5023e..645e63033c5 100644
--- a/spec/graphql/types/work_items/widget_interface_spec.rb
+++ b/spec/graphql/types/work_items/widget_interface_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Types::WorkItems::WidgetInterface do
+RSpec.describe Types::WorkItems::WidgetInterface, feature_category: :team_planning do
include GraphqlHelpers
it 'exposes the expected fields' do
@@ -23,6 +23,7 @@ RSpec.describe Types::WorkItems::WidgetInterface do
WorkItems::Widgets::Notifications | Types::WorkItems::Widgets::NotificationsType
WorkItems::Widgets::CurrentUserTodos | Types::WorkItems::Widgets::CurrentUserTodosType
WorkItems::Widgets::AwardEmoji | Types::WorkItems::Widgets::AwardEmojiType
+ WorkItems::Widgets::LinkedItems | Types::WorkItems::Widgets::LinkedItemsType
end
with_them do
diff --git a/spec/graphql/types/work_items/widgets/linked_items_type_spec.rb b/spec/graphql/types/work_items/widgets/linked_items_type_spec.rb
new file mode 100644
index 00000000000..db6f27ecf1f
--- /dev/null
+++ b/spec/graphql/types/work_items/widgets/linked_items_type_spec.rb
@@ -0,0 +1,12 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Types::WorkItems::Widgets::LinkedItemsType, feature_category: :portfolio_management do
+ it 'exposes the expected fields' do
+ expected_fields = %i[type linkedItems]
+
+ expect(described_class.graphql_name).to eq('WorkItemWidgetLinkedItems')
+ expect(described_class).to include_graphql_fields(*expected_fields)
+ end
+end
diff --git a/spec/helpers/admin/application_settings/settings_helper_spec.rb b/spec/helpers/admin/application_settings/settings_helper_spec.rb
index b008f52c0eb..9981e0d12bd 100644
--- a/spec/helpers/admin/application_settings/settings_helper_spec.rb
+++ b/spec/helpers/admin/application_settings/settings_helper_spec.rb
@@ -31,24 +31,4 @@ RSpec.describe Admin::ApplicationSettings::SettingsHelper do
})
end
end
-
- describe 'Code Suggestions for Self-Managed instances', feature_category: :code_suggestions do
- describe '#code_suggestions_description' do
- subject { helper.code_suggestions_description }
-
- it { is_expected.to include 'https://docs.gitlab.com/ee/user/project/repository/code_suggestions.html' }
- end
-
- describe '#code_suggestions_token_explanation' do
- subject { helper.code_suggestions_token_explanation }
-
- it { is_expected.to include 'https://docs.gitlab.com/ee/user/profile/personal_access_tokens.html#create-a-personal-access-token' }
- end
-
- describe '#code_suggestions_agreement' do
- subject { helper.code_suggestions_agreement }
-
- it { is_expected.to include 'https://about.gitlab.com/handbook/legal/testing-agreement/' }
- end
- end
end
diff --git a/spec/helpers/broadcast_messages_helper_spec.rb b/spec/helpers/admin/broadcast_messages_helper_spec.rb
index 05e745e249e..434b79d5271 100644
--- a/spec/helpers/broadcast_messages_helper_spec.rb
+++ b/spec/helpers/admin/broadcast_messages_helper_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe BroadcastMessagesHelper, feature_category: :onboarding do
+RSpec.describe Admin::BroadcastMessagesHelper, feature_category: :onboarding do
include Gitlab::Routing.url_helpers
let_it_be(:user) { create(:user) }
@@ -102,7 +102,7 @@ RSpec.describe BroadcastMessagesHelper, feature_category: :onboarding do
end
describe '#broadcast_message' do
- let(:current_broadcast_message) { BroadcastMessage.new(message: 'Current Message') }
+ let(:current_broadcast_message) { System::BroadcastMessage.new(message: 'Current Message') }
it 'returns nil when no current message' do
expect(helper.broadcast_message(nil)).to be_nil
@@ -133,6 +133,36 @@ RSpec.describe BroadcastMessagesHelper, feature_category: :onboarding do
end
end
+ describe '#render_broadcast_message' do
+ context 'when message is banner' do
+ let_it_be(:broadcast_message) do
+ System::BroadcastMessage.new(message: 'Current Message', broadcast_type: :banner)
+ end.freeze
+
+ it 'renders broadcast message' do
+ expect(helper.render_broadcast_message(broadcast_message)).to eq("<p>Current Message</p>")
+ end
+ end
+
+ context 'when message is notification' do
+ let_it_be(:broadcast_message) do
+ System::BroadcastMessage.new(message: 'Current Message', broadcast_type: :notification)
+ end.freeze
+
+ it 'renders broadcast message' do
+ expect(helper.render_broadcast_message(broadcast_message)).to eq("<p>Current Message</p>")
+ end
+ end
+ end
+
+ describe '#target_access_levels_display' do
+ let_it_be(:access_levels) { [Gitlab::Access::REPORTER, Gitlab::Access::DEVELOPER] }.freeze
+
+ it 'joins access levels' do
+ expect(helper.target_access_levels_display(access_levels)).to eq("Reporter, Developer")
+ end
+ end
+
describe '#admin_broadcast_messages_data' do
let(:starts_at) { 1.day.ago }
let(:ends_at) { 1.day.from_now }
diff --git a/spec/helpers/admin/deploy_key_helper_spec.rb b/spec/helpers/admin/deploy_key_helper_spec.rb
index ca951ccf485..0c07ecf90ce 100644
--- a/spec/helpers/admin/deploy_key_helper_spec.rb
+++ b/spec/helpers/admin/deploy_key_helper_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe Admin::DeployKeyHelper do
let_it_be(:edit_path) { '/admin/deploy_keys/:id/edit' }
let_it_be(:delete_path) { '/admin/deploy_keys/:id' }
let_it_be(:create_path) { '/admin/deploy_keys/new' }
- let_it_be(:empty_state_svg_path) { '/assets/illustrations/empty-state/empty-deploy-keys-lg.svg' }
+ let_it_be(:empty_state_svg_path) { '/assets/illustrations/empty-state/empty-access-token-md.svg' }
subject(:result) { helper.admin_deploy_keys_data }
@@ -15,7 +15,7 @@ RSpec.describe Admin::DeployKeyHelper do
expect(helper).to receive(:edit_admin_deploy_key_path).with(':id').and_return(edit_path)
expect(helper).to receive(:admin_deploy_key_path).with(':id').and_return(delete_path)
expect(helper).to receive(:new_admin_deploy_key_path).and_return(create_path)
- expect(helper).to receive(:image_path).with('illustrations/empty-state/empty-deploy-keys-lg.svg').and_return(empty_state_svg_path)
+ expect(helper).to receive(:image_path).with('illustrations/empty-state/empty-access-token-md.svg').and_return(empty_state_svg_path)
expect(result).to eq({
edit_path: edit_path,
diff --git a/spec/helpers/application_helper_spec.rb b/spec/helpers/application_helper_spec.rb
index 6ef57f8e22c..ad81c125055 100644
--- a/spec/helpers/application_helper_spec.rb
+++ b/spec/helpers/application_helper_spec.rb
@@ -845,4 +845,50 @@ RSpec.describe ApplicationHelper do
end
end
end
+
+ describe '#hidden_resource_icon', feature_category: :insider_threat do
+ let_it_be(:mock_svg) { '<svg></svg>'.html_safe }
+
+ shared_examples 'returns icon with tooltip' do
+ before do
+ allow(helper).to receive(:sprite_icon).with('spam', css_class: 'gl-vertical-align-text-bottom').and_return(mock_svg)
+ end
+
+ it 'returns icon with tooltip' do
+ result = helper.hidden_resource_icon(resource)
+ expect(result).to eq("<span class=\"has-tooltip\" title=\"#{expected_title}\">#{mock_svg}</span>")
+ end
+ end
+
+ context 'when resource is an issue' do
+ let_it_be(:resource) { build(:issue) }
+ let(:expected_title) { 'This issue is hidden because its author has been banned' }
+
+ it_behaves_like 'returns icon with tooltip'
+ end
+
+ context 'when resource is a merge request' do
+ let_it_be(:resource) { build(:merge_request) }
+ let(:expected_title) { 'This merge request is hidden because its author has been banned' }
+
+ it_behaves_like 'returns icon with tooltip'
+ end
+
+ context 'when resource is a project' do
+ let_it_be(:resource) { build(:project) }
+ let(:expected_title) { 'This project is hidden because its creator has been banned' }
+
+ it_behaves_like 'returns icon with tooltip'
+ end
+
+ context 'when css_class is provided' do
+ let_it_be(:resource) { build(:issue) }
+
+ it 'passes the value to sprite_icon' do
+ expect(helper).to receive(:sprite_icon).with('spam', css_class: 'gl-vertical-align-text-bottom extra-class').and_return(mock_svg)
+
+ helper.hidden_resource_icon(resource, css_class: 'extra-class')
+ end
+ end
+ end
end
diff --git a/spec/helpers/application_settings_helper_spec.rb b/spec/helpers/application_settings_helper_spec.rb
index f924704ab54..9d591164547 100644
--- a/spec/helpers/application_settings_helper_spec.rb
+++ b/spec/helpers/application_settings_helper_spec.rb
@@ -74,6 +74,10 @@ RSpec.describe ApplicationSettingsHelper do
expect(helper.visible_attributes).to include(*params)
end
+ it 'contains :namespace_aggregation_schedule_lease_duration_in_seconds' do
+ expect(helper.visible_attributes).to include(:namespace_aggregation_schedule_lease_duration_in_seconds)
+ end
+
context 'when on SaaS', :saas do
it 'does not contain :deactivate_dormant_users' do
expect(helper.visible_attributes).not_to include(:deactivate_dormant_users)
diff --git a/spec/helpers/ci/runners_helper_spec.rb b/spec/helpers/ci/runners_helper_spec.rb
index c170d7fae67..febdc3bab65 100644
--- a/spec/helpers/ci/runners_helper_spec.rb
+++ b/spec/helpers/ci/runners_helper_spec.rb
@@ -83,22 +83,9 @@ RSpec.describe Ci::RunnersHelper, feature_category: :runner_fleet do
end
describe '#admin_runners_data_attributes' do
- let_it_be(:admin) { create(:user, :admin) }
- let_it_be(:instance_runner) { create(:ci_runner, :instance) }
- let_it_be(:project_runner) { create(:ci_runner, :project) }
+ subject { helper.admin_runners_data_attributes }
- before do
- allow(helper).to receive(:current_user).and_return(admin)
- end
-
- it 'returns the data in format' do
- expect(helper.admin_runners_data_attributes).to include(
- runner_install_help_page: 'https://docs.gitlab.com/runner/install/',
- registration_token: Gitlab::CurrentSettings.runners_registration_token,
- online_contact_timeout_secs: 7200,
- stale_timeout_secs: 7889238
- )
- end
+ it_behaves_like 'admin_runners_data_attributes contains data'
end
describe '#group_shared_runners_settings_data' do
@@ -115,12 +102,19 @@ RSpec.describe Ci::RunnersHelper, feature_category: :runner_fleet do
}
end
+ before do
+ allow(helper).to receive(:can?).with(user, :admin_group, parent).and_return(true)
+ end
+
it 'returns group data for top level group' do
result = {
group_id: parent.id,
group_name: parent.name,
group_is_empty: 'false',
shared_runners_setting: Namespace::SR_ENABLED,
+
+ parent_name: nil,
+ parent_settings_path: nil,
parent_shared_runners_setting: nil
}.merge(runner_constants)
@@ -133,7 +127,27 @@ RSpec.describe Ci::RunnersHelper, feature_category: :runner_fleet do
group_name: group.name,
group_is_empty: 'true',
shared_runners_setting: Namespace::SR_DISABLED_AND_UNOVERRIDABLE,
- parent_shared_runners_setting: Namespace::SR_ENABLED
+
+ parent_shared_runners_setting: Namespace::SR_ENABLED,
+ parent_name: parent.name,
+ parent_settings_path: group_settings_ci_cd_path(group.parent, anchor: 'runners-settings')
+ }.merge(runner_constants)
+
+ expect(helper.group_shared_runners_settings_data(group)).to eq result
+ end
+
+ it 'returns groups data for child group with no access to parent' do
+ allow(helper).to receive(:can?).with(user, :admin_group, parent).and_return(false)
+
+ result = {
+ group_id: group.id,
+ group_name: group.name,
+ group_is_empty: 'true',
+ shared_runners_setting: Namespace::SR_DISABLED_AND_UNOVERRIDABLE,
+
+ parent_shared_runners_setting: Namespace::SR_ENABLED,
+ parent_name: nil,
+ parent_settings_path: nil
}.merge(runner_constants)
expect(helper.group_shared_runners_settings_data(group)).to eq result
@@ -145,7 +159,10 @@ RSpec.describe Ci::RunnersHelper, feature_category: :runner_fleet do
group_name: group_with_project.name,
group_is_empty: 'false',
shared_runners_setting: Namespace::SR_ENABLED,
- parent_shared_runners_setting: Namespace::SR_ENABLED
+
+ parent_shared_runners_setting: Namespace::SR_ENABLED,
+ parent_name: parent.name,
+ parent_settings_path: group_settings_ci_cd_path(group.parent, anchor: 'runners-settings')
}.merge(runner_constants)
expect(helper.group_shared_runners_settings_data(group_with_project)).to eq result
@@ -190,8 +207,28 @@ RSpec.describe Ci::RunnersHelper, feature_category: :runner_fleet do
context 'when project has runners' do
it 'returns the correct value for is_enabled' do
+ allow(helper).to receive(:can?).with(user, :admin_group, group).and_return(false)
+
+ data = helper.toggle_shared_runners_settings_data(project_with_runners)
+
+ expect(data).to include(
+ is_enabled: 'true',
+ group_name: nil,
+ group_settings_path: nil
+ )
+ end
+ end
+
+ context 'when group can be configured by user' do
+ it 'returns values to configure group' do
+ allow(helper).to receive(:can?).with(user, :admin_group, group).and_return(true)
+
data = helper.toggle_shared_runners_settings_data(project_with_runners)
- expect(data[:is_enabled]).to eq("true")
+
+ expect(data).to include(
+ group_name: group.name,
+ group_settings_path: group_settings_ci_cd_path(group, anchor: 'runners-settings')
+ )
end
end
@@ -218,9 +255,9 @@ RSpec.describe Ci::RunnersHelper, feature_category: :runner_fleet do
using RSpec::Parameterized::TableSyntax
where(:shared_runners_setting, :is_disabled_and_unoverridable) do
- :shared_runners_enabled | "false"
- :disabled_and_overridable | "false"
- :disabled_and_unoverridable | "true"
+ :shared_runners_enabled | "false"
+ :shared_runners_disabled_and_overridable | "false"
+ :shared_runners_disabled_and_unoverridable | "true"
end
with_them do
diff --git a/spec/helpers/ci/variables_helper_spec.rb b/spec/helpers/ci/variables_helper_spec.rb
index 9c3236ace72..13970dd95b4 100644
--- a/spec/helpers/ci/variables_helper_spec.rb
+++ b/spec/helpers/ci/variables_helper_spec.rb
@@ -3,9 +3,71 @@
require 'spec_helper'
RSpec.describe Ci::VariablesHelper, feature_category: :secrets_management do
+ describe '#create_deploy_token_path' do
+ let_it_be(:group) { build_stubbed(:group) }
+ let_it_be(:project) { build_stubbed(:project) }
+
+ it 'returns the project deploy token path' do
+ expect(helper.create_deploy_token_path(project)).to eq(
+ create_deploy_token_project_settings_repository_path(project, {})
+ )
+ end
+
+ it 'returns the group deploy token path' do
+ expect(helper.create_deploy_token_path(group)).to eq(
+ create_deploy_token_group_settings_repository_path(group, {})
+ )
+ end
+ end
+
+ describe '#ci_variable_protected?' do
+ let(:variable) { build_stubbed(:ci_variable, key: 'test_key', value: 'test_value', protected: true) }
+
+ context 'when variable is provided and only_key_value is false' do
+ it 'expect ci_variable_protected? to return true' do
+ expect(helper.ci_variable_protected?(variable, false)).to eq(true)
+ end
+ end
+
+ context 'when variable is not provided / provided and only_key_value is true' do
+ it 'is equal to the value of ci_variable_protected_by_default?' do
+ expect(helper.ci_variable_protected?(nil, true)).to eq(
+ helper.ci_variable_protected_by_default?
+ )
+
+ expect(helper.ci_variable_protected?(variable, true)).to eq(
+ helper.ci_variable_protected_by_default?
+ )
+ end
+ end
+ end
+
+ describe '#ci_variable_masked?' do
+ let(:variable) { build_stubbed(:ci_variable, key: 'test_key', value: 'test_value', masked: true) }
+
+ context 'when variable is provided and only_key_value is false' do
+ it 'expect ci_variable_masked? to return true' do
+ expect(helper.ci_variable_masked?(variable, false)).to eq(true)
+ end
+ end
+
+ context 'when variable is not provided / provided and only_key_value is true' do
+ it 'expect ci_variable_masked? to return false' do
+ expect(helper.ci_variable_masked?(nil, true)).to eq(false)
+ expect(helper.ci_variable_masked?(variable, true)).to eq(false)
+ end
+ end
+ end
+
describe '#ci_variable_maskable_raw_regex' do
it 'converts to a javascript regex' do
expect(helper.ci_variable_maskable_raw_regex).to eq("^\\S{8,}$")
end
end
+
+ describe '#ci_variable_maskable_regex' do
+ it 'converts to a javascript regex' do
+ expect(helper.ci_variable_maskable_regex).to eq("^[a-zA-Z0-9_+=/@:.~-]{8,}$")
+ end
+ end
end
diff --git a/spec/helpers/commits_helper_spec.rb b/spec/helpers/commits_helper_spec.rb
index 2d06f42dee4..49adba22ebe 100644
--- a/spec/helpers/commits_helper_spec.rb
+++ b/spec/helpers/commits_helper_spec.rb
@@ -91,22 +91,22 @@ RSpec.describe CommitsHelper do
let(:node) { Nokogiri::HTML.parse(helper.diff_mode_swap_button(keyword, 'abc')).at_css('a') }
context 'for rendered' do
- it 'renders the correct select-rendered button' do
+ it 'renders the correct select-rendered button', :aggregate_failures do
expect(node[:title]).to eq('Display rendered diff')
expect(node['data-file-hash']).to eq('abc')
expect(node['data-diff-toggle-entity']).to eq('renderedButton')
- expect(node.xpath("//a/svg")[0]["data-testid"]).to eq('doc-text-icon')
+ expect(node.xpath("//a/span/svg")[0]["data-testid"]).to eq('doc-text-icon')
end
end
context 'for raw' do
let(:keyword) { 'raw' }
- it 'renders the correct select-raw button' do
+ it 'renders the correct select-raw button', :aggregate_failures do
expect(node[:title]).to eq('Display raw diff')
expect(node['data-file-hash']).to eq('abc')
expect(node['data-diff-toggle-entity']).to eq('rawButton')
- expect(node.xpath("//a/svg")[0]["data-testid"]).to eq('doc-code-icon')
+ expect(node.xpath("//a/span/svg")[0]["data-testid"]).to eq('doc-code-icon')
end
end
end
@@ -357,4 +357,46 @@ RSpec.describe CommitsHelper do
it { is_expected.to eq(expected_path) }
end
+
+ describe '#local_committed_date' do
+ let(:commit) { build(:commit, committed_date: time) }
+ let(:user) { build(:user) }
+ let(:time) { Time.find_zone('UTC').parse('2023-01-01') }
+
+ subject { helper.local_committed_date(commit, user).to_s }
+
+ it { is_expected.to eq('2023-01-01') }
+
+ context 'when user has a custom timezone' do
+ let(:user) { build(:user, timezone: 'America/Mexico_City') }
+
+ it 'selects timezone of the user' do
+ is_expected.to eq('2022-12-31')
+ end
+ end
+
+ context "when user doesn't have a preferred timezone" do
+ let(:user) { build(:user, timezone: nil) }
+
+ it 'uses system timezone' do
+ is_expected.to eq('2023-01-01')
+ end
+ end
+
+ context 'when user timezone is not supported' do
+ let(:user) { build(:user, timezone: 'unknown') }
+
+ it 'uses system timezone' do
+ is_expected.to eq('2023-01-01')
+ end
+ end
+
+ context 'when user is missing' do
+ let(:user) { nil }
+
+ it 'uses system timezone' do
+ is_expected.to eq('2023-01-01')
+ end
+ end
+ end
end
diff --git a/spec/helpers/environments_helper_spec.rb b/spec/helpers/environments_helper_spec.rb
index b69d6022e70..c0c729f2b67 100644
--- a/spec/helpers/environments_helper_spec.rb
+++ b/spec/helpers/environments_helper_spec.rb
@@ -41,8 +41,7 @@ RSpec.describe EnvironmentsHelper, feature_category: :environment_management do
'custom_metrics_available' => 'true',
'custom_dashboard_base_path' => Gitlab::Metrics::Dashboard::RepoDashboardFinder::DASHBOARD_ROOT,
'operations_settings_path' => project_settings_operations_path(project),
- 'can_access_operations_settings' => 'true',
- 'panel_preview_endpoint' => project_metrics_dashboards_builder_path(project, format: :json)
+ 'can_access_operations_settings' => 'true'
)
end
diff --git a/spec/helpers/events_helper_spec.rb b/spec/helpers/events_helper_spec.rb
index 39901047b0f..6ffca876361 100644
--- a/spec/helpers/events_helper_spec.rb
+++ b/spec/helpers/events_helper_spec.rb
@@ -2,10 +2,20 @@
require 'spec_helper'
-RSpec.describe EventsHelper do
+# Persisting records is required because Event#target's AR scope.
+# We are trying hard to minimize record creations by:
+# * Using `let_it_be`
+# * Factory defaults via `create_default` + `factory_default: :keep`
+#
+# rubocop:disable RSpec/FactoryBot/AvoidCreate
+RSpec.describe EventsHelper, factory_default: :keep, feature_category: :user_profile do
include Gitlab::Routing
include Banzai::Filter::OutputSafety
+ let_it_be(:project) { create_default(:project).freeze }
+ let_it_be(:project_with_repo) { create(:project, :public, :repository).freeze }
+ let_it_be(:user) { create_default(:user).freeze }
+
describe '#link_to_author' do
let(:user) { create(:user) }
let(:event) { create(:event, author: user) }
@@ -40,9 +50,8 @@ RSpec.describe EventsHelper do
end
context 'when target is not a work item' do
- let(:project) { create(:project) }
- let(:issue) { create(:issue, project: project) }
- let(:event) { create(:event, target: issue, project: project) }
+ let(:issue) { create(:issue) }
+ let(:event) { create(:event, target: issue) }
it { is_expected.to eq([project, issue]) }
end
@@ -51,7 +60,7 @@ RSpec.describe EventsHelper do
describe '#localized_action_name' do
it 'handles all valid design events' do
created, updated, destroyed = %i[created updated destroyed].map do |trait|
- event = build(:design_event, trait)
+ event = build_stubbed(:design_event, trait)
helper.localized_action_name(event)
end
@@ -60,44 +69,46 @@ RSpec.describe EventsHelper do
expect(destroyed).to eq(_('removed'))
end
- context 'handles correct base actions' do
+ describe 'handles correct base actions' do
using RSpec::Parameterized::TableSyntax
- where(:trait, :localized_action_name) do
- :created | s_('Event|created')
- :updated | s_('Event|opened')
- :closed | s_('Event|closed')
- :reopened | s_('Event|opened')
- :commented | s_('Event|commented on')
- :merged | s_('Event|accepted')
- :joined | s_('Event|joined')
- :left | s_('Event|left')
- :destroyed | s_('Event|destroyed')
- :expired | s_('Event|removed due to membership expiration from')
- :approved | s_('Event|approved')
+ where(:trait, :localized_action_key) do
+ :created | 'Event|created'
+ :updated | 'Event|opened'
+ :closed | 'Event|closed'
+ :reopened | 'Event|opened'
+ :commented | 'Event|commented on'
+ :merged | 'Event|accepted'
+ :joined | 'Event|joined'
+ :left | 'Event|left'
+ :destroyed | 'Event|destroyed'
+ :expired | 'Event|removed due to membership expiration from'
+ :approved | 'Event|approved'
end
with_them do
it 'with correct name and method' do
- event = build(:event, trait)
+ Gitlab::I18n.with_locale(:de) do
+ event = build_stubbed(:event, trait)
- expect(helper.localized_action_name(event)).to eq(localized_action_name)
+ expect(helper.localized_action_name(event)).to eq(s_(localized_action_key))
+ end
end
end
end
end
describe '#event_commit_title' do
- let(:message) { 'foo & bar ' + 'A' * 70 + '\n' + 'B' * 80 }
+ let(:message) { "foo & bar #{'A' * 70}\\n#{'B' * 80}" }
subject { helper.event_commit_title(message) }
it 'returns the first line, truncated to 70 chars' do
- is_expected.to eq(message[0..66] + "...")
+ is_expected.to eq("#{message[0..66]}...")
end
it 'is not html-safe' do
- is_expected.not_to be_a(ActiveSupport::SafeBuffer)
+ is_expected.not_to be_html_safe
end
it 'handles empty strings' do
@@ -115,9 +126,8 @@ RSpec.describe EventsHelper do
describe '#event_feed_url' do
let(:event) { create(:event).present }
- let(:project) { create(:project, :public, :repository) }
- context 'issue' do
+ context 'for issue' do
before do
event.target = create(:issue)
end
@@ -131,9 +141,9 @@ RSpec.describe EventsHelper do
end
end
- context 'merge request' do
+ context 'for merge request' do
before do
- event.target = create(:merge_request)
+ event.target = create(:merge_request, source_project: project_with_repo)
end
it 'returns the project merge request url' do
@@ -146,7 +156,7 @@ RSpec.describe EventsHelper do
end
it 'returns project commit url' do
- event.target = create(:note_on_commit, project: project)
+ event.target = create(:note_on_commit, project: project_with_repo)
expect(helper.event_feed_url(event)).to eq(project_commit_url(event.project, event.note_target))
end
@@ -158,7 +168,6 @@ RSpec.describe EventsHelper do
end
it 'returns project url' do
- event.project = project
event.action = 1
expect(helper.event_feed_url(event)).to eq(project_url(event.project))
@@ -173,7 +182,8 @@ RSpec.describe EventsHelper do
it 'returns nil for push event with multiple refs' do
event = create(:push_event)
- create(:push_event_payload, event: event, ref_count: 2, ref: nil, ref_type: :tag, commit_count: 0, action: :pushed)
+ create(:push_event_payload, event: event, ref_count: 2, ref: nil, ref_type: :tag, commit_count: 0,
+ action: :pushed)
expect(helper.event_feed_url(event)).to eq(nil)
end
@@ -229,8 +239,8 @@ RSpec.describe EventsHelper do
end
end
- describe 'event_wiki_page_target_url' do
- let(:project) { create(:project) }
+ describe '#event_wiki_page_target_url' do
+ let_it_be_with_reload(:project) { create(:project) }
let(:wiki_page) { create(:wiki_page, wiki: create(:project_wiki, project: project)) }
let(:event) { create(:wiki_page_event, project: project, wiki_page: wiki_page) }
@@ -240,7 +250,7 @@ RSpec.describe EventsHelper do
expect(helper.event_wiki_page_target_url(event)).to eq(url)
end
- context 'there is no canonical slug' do
+ context 'without canonical slug' do
let(:event) { create(:wiki_page_event, project: project) }
before do
@@ -274,14 +284,13 @@ RSpec.describe EventsHelper do
end
describe '#event_note_target_url' do
- let(:project) { create(:project, :public, :repository) }
- let(:event) { create(:event, project: project) }
+ let_it_be(:event) { create(:event) }
let(:project_base_url) { namespace_project_url(namespace_id: project.namespace, id: project) }
subject { helper.event_note_target_url(event) }
it 'returns a commit note url' do
- event.target = create(:note_on_commit, note: '+1 from me')
+ event.target = create(:note_on_commit, project: project_with_repo, note: '+1 from me')
expect(subject).to eq("#{project_base_url}/-/commit/#{event.target.commit_id}#note_#{event.target.id}")
end
@@ -289,7 +298,8 @@ RSpec.describe EventsHelper do
it 'returns a project snippet note url' do
event.target = create(:note_on_project_snippet, note: 'keep going')
- expect(subject).to eq("#{project_snippet_url(event.note_target.project, event.note_target)}#note_#{event.target.id}")
+ expect(subject).to eq("#{project_snippet_url(event.note_target.project,
+ event.note_target)}#note_#{event.target.id}")
end
it 'returns a personal snippet note url' do
@@ -311,7 +321,7 @@ RSpec.describe EventsHelper do
end
context 'for design note events' do
- let(:event) { create(:event, :for_design, project: project) }
+ let(:event) { create(:event, :for_design) }
it 'returns an appropriate URL' do
iid = event.note_target.issue.iid
@@ -326,54 +336,62 @@ RSpec.describe EventsHelper do
describe '#event_filter_visible' do
include DesignManagementTestHelpers
- let_it_be(:project) { create(:project) }
- let_it_be(:current_user) { create(:user) }
-
subject { helper.event_filter_visible(key) }
before do
enable_design_management
- project.add_reporter(current_user)
- allow(helper).to receive(:current_user).and_return(current_user)
+ allow(helper).to receive(:current_user).and_return(user)
end
- def disable_read_design_activity(object)
+ def can_read_design_activity(object, ability)
allow(Ability).to receive(:allowed?)
- .with(current_user, :read_design_activity, eq(object))
- .and_return(false)
+ .with(user, :read_design_activity, eq(object))
+ .and_return(ability)
end
context 'for :designs' do
let(:key) { :designs }
- context 'there is no relevant instance variable' do
+ context 'without relevant instance variable' do
it { is_expected.to be(true) }
end
- context 'a project has been assigned' do
+ context 'with assigned project' do
before do
assign(:project, project)
end
- it { is_expected.to be(true) }
+ context 'with permission' do
+ before do
+ can_read_design_activity(project, true)
+ end
+
+ it { is_expected.to be(true) }
+ end
- context 'the current user cannot read design activity' do
+ context 'without permission' do
before do
- disable_read_design_activity(project)
+ can_read_design_activity(project, false)
end
it { is_expected.to be(false) }
end
end
- context 'projects have been assigned' do
+ context 'with projects assigned' do
before do
- assign(:projects, Project.where(id: project.id))
+ assign(:projects, Project.id_in(project))
end
- it { is_expected.to be(true) }
+ context 'with permission' do
+ before do
+ can_read_design_activity(project, true)
+ end
+
+ it { is_expected.to be(true) }
+ end
- context 'the collection is empty' do
+ context 'with empty collection' do
before do
assign(:projects, Project.none)
end
@@ -381,36 +399,40 @@ RSpec.describe EventsHelper do
it { is_expected.to be(false) }
end
- context 'the current user cannot read design activity' do
+ context 'without permission' do
before do
- disable_read_design_activity(project)
+ can_read_design_activity(project, false)
end
it { is_expected.to be(false) }
end
end
- context 'a group has been assigned' do
+ context 'with group assigned' do
let_it_be(:group) { create(:group) }
before do
assign(:group, group)
end
- context 'there are no projects in the group' do
+ context 'without projects in the group' do
it { is_expected.to be(false) }
end
- context 'the group has at least one project' do
- before do
- create(:project_group_link, project: project, group: group)
- end
+ context 'with at least one project in the project' do
+ let_it_be(:group_link) { create(:project_group_link, group: group) }
- it { is_expected.to be(true) }
+ context 'with permission' do
+ before do
+ can_read_design_activity(group, true)
+ end
+
+ it { is_expected.to be(true) }
+ end
- context 'the current user cannot read design activity' do
+ context 'without permission' do
before do
- disable_read_design_activity(group)
+ can_read_design_activity(group, false)
end
it { is_expected.to be(false) }
@@ -420,3 +442,4 @@ RSpec.describe EventsHelper do
end
end
end
+# rubocop:enable RSpec/FactoryBot/AvoidCreate
diff --git a/spec/helpers/integrations_helper_spec.rb b/spec/helpers/integrations_helper_spec.rb
index f481611b2a2..6c5a489e664 100644
--- a/spec/helpers/integrations_helper_spec.rb
+++ b/spec/helpers/integrations_helper_spec.rb
@@ -271,6 +271,7 @@ RSpec.describe IntegrationsHelper, feature_category: :integrations do
"test_case" | _('Test case')
"requirement" | _('Requirement')
"task" | _('Task')
+ "ticket" | _('Service Desk Ticket')
end
with_them do
@@ -285,7 +286,7 @@ RSpec.describe IntegrationsHelper, feature_category: :integrations do
end
it "only consider these enumeration values are valid" do
- expected_valid_types = %w[issue incident test_case requirement task objective key_result]
+ expected_valid_types = %w[issue incident test_case requirement task objective key_result epic ticket]
expect(WorkItems::Type.base_types.keys).to contain_exactly(*expected_valid_types)
end
end
diff --git a/spec/helpers/issuables_helper_spec.rb b/spec/helpers/issuables_helper_spec.rb
index a2b8ee061bb..7b5537c54cc 100644
--- a/spec/helpers/issuables_helper_spec.rb
+++ b/spec/helpers/issuables_helper_spec.rb
@@ -524,6 +524,64 @@ RSpec.describe IssuablesHelper, feature_category: :team_planning do
end
end
end
+
+ describe '#duplicatedToIssueUrl' do
+ let(:issue) { create(:issue, author: user) }
+
+ before do
+ assign(:project, issue.project)
+ end
+
+ context 'when issue is duplicated' do
+ before do
+ allow(issue).to receive(:duplicated?).and_return(true)
+ allow(issue).to receive(:duplicated_to).and_return(issue)
+ end
+
+ it 'returns url' do
+ expect(helper.issuable_initial_data(issue)[:duplicatedToIssueUrl]).to be_truthy
+ end
+ end
+
+ context 'when issue is not duplicated' do
+ before do
+ allow(issue).to receive(:duplicated?).and_return(false)
+ end
+
+ it 'returns nil' do
+ expect(helper.issuable_initial_data(issue)[:duplicatedToIssueUrl]).to be_nil
+ end
+ end
+ end
+
+ describe '#movedToIssueUrl' do
+ let(:issue) { create(:issue, author: user) }
+
+ before do
+ assign(:project, issue.project)
+ end
+
+ context 'when issue is moved' do
+ before do
+ allow(issue).to receive(:moved?).and_return(true)
+ allow(issue).to receive(:moved_to).and_return(issue)
+ end
+
+ it 'returns url' do
+ expect(helper.issuable_initial_data(issue)[:movedToIssueUrl]).to be_truthy
+ end
+ end
+
+ context 'when issue is not moved' do
+ before do
+ allow(issue).to receive(:moved?).and_return(false)
+ end
+
+ it 'returns nil' do
+ expect(helper.issuable_initial_data(issue)[:movedToIssueUrl]).to be_nil
+ end
+ end
+ end
end
describe '#assignee_sidebar_data' do
@@ -674,30 +732,6 @@ RSpec.describe IssuablesHelper, feature_category: :team_planning do
end
end
- describe '#hidden_issuable_icon', feature_category: :insider_threat do
- let_it_be(:mock_svg) { '<svg></svg>'.html_safe }
-
- before do
- allow(helper).to receive(:sprite_icon).and_return(mock_svg)
- end
-
- context 'when issuable is an issue' do
- let_it_be(:issuable) { build(:issue) }
-
- it 'returns icon with tooltip' do
- expect(helper.hidden_issuable_icon(issuable)).to eq("<span class=\"has-tooltip\" title=\"This issue is hidden because its author has been banned\">#{mock_svg}</span>")
- end
- end
-
- context 'when issuable is a merge request' do
- let_it_be(:issuable) { build(:merge_request) }
-
- it 'returns icon with tooltip' do
- expect(helper.hidden_issuable_icon(issuable)).to eq("<span class=\"has-tooltip\" title=\"This merge request is hidden because its author has been banned\">#{mock_svg}</span>")
- end
- end
- end
-
describe '#issuable_type_selector_data' do
using RSpec::Parameterized::TableSyntax
diff --git a/spec/helpers/issues_helper_spec.rb b/spec/helpers/issues_helper_spec.rb
index ba323140720..0cde9aeac8d 100644
--- a/spec/helpers/issues_helper_spec.rb
+++ b/spec/helpers/issues_helper_spec.rb
@@ -486,23 +486,26 @@ RSpec.describe IssuesHelper do
end
describe '#hidden_issue_icon' do
- let_it_be(:banned_user) { build(:user, :banned) }
- let_it_be(:hidden_issue) { build(:issue, author: banned_user) }
let_it_be(:mock_svg) { '<svg></svg>'.html_safe }
before do
- allow(helper).to receive(:sprite_icon).and_return(mock_svg)
+ allow(helper).to receive(:hidden_resource_icon).with(resource).and_return(mock_svg)
end
context 'when issue is hidden' do
+ let_it_be(:banned_user) { build(:user, :banned) }
+ let_it_be(:resource) { build(:issue, author: banned_user) }
+
it 'returns icon with tooltip' do
- expect(helper.hidden_issue_icon(hidden_issue)).to eq("<span class=\"has-tooltip\" title=\"This issue is hidden because its author has been banned\">#{mock_svg}</span>")
+ expect(helper.hidden_issue_icon(resource)).to eq(mock_svg)
end
end
context 'when issue is not hidden' do
+ let_it_be(:resource) { issue }
+
it 'returns `nil`' do
- expect(helper.hidden_issue_icon(issue)).to be_nil
+ expect(helper.hidden_issue_icon(resource)).to be_nil
end
end
end
diff --git a/spec/helpers/labels_helper_spec.rb b/spec/helpers/labels_helper_spec.rb
index b4549630813..4877ab1ff03 100644
--- a/spec/helpers/labels_helper_spec.rb
+++ b/spec/helpers/labels_helper_spec.rb
@@ -275,9 +275,18 @@ RSpec.describe LabelsHelper do
let(:html) { '<img src="example.png">This is an image</img>' }
let(:label_with_html_content) { create(:label, title: 'test', description: html) }
- it 'removes HTML' do
- tooltip = label_tooltip_title(label_with_html_content)
- expect(tooltip).to eq('This is an image')
+ context 'tooltip shows description' do
+ it 'removes HTML' do
+ tooltip = label_tooltip_title(label_with_html_content)
+ expect(tooltip).to eq('This is an image')
+ end
+ end
+
+ context 'tooltip shows title' do
+ it 'shows title' do
+ tooltip = label_tooltip_title(label_with_html_content, tooltip_shows_title: true)
+ expect(tooltip).to eq('test')
+ end
end
end
diff --git a/spec/helpers/markup_helper_spec.rb b/spec/helpers/markup_helper_spec.rb
index 562d6683d97..22d1113ee8c 100644
--- a/spec/helpers/markup_helper_spec.rb
+++ b/spec/helpers/markup_helper_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe MarkupHelper do
+RSpec.describe MarkupHelper, feature_category: :team_planning do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) do
user = create(:user, username: 'gfm')
@@ -461,7 +461,7 @@ RSpec.describe MarkupHelper do
it 'displays the first line of a code block' do
object = create_object("```\nCode block\nwith two lines\n```")
- expected = %r{<pre.+><code><span class="line">Code block\.\.\.</span>\n</code></pre>}
+ expected = %r{<pre.+><code><span class="line">Code block\.\.\.</span></code></pre>}
expect(helper.first_line_in_markdown(object, attribute, 100, is_todo: true, project: project)).to match(expected)
end
@@ -477,7 +477,7 @@ RSpec.describe MarkupHelper do
it 'preserves code color scheme' do
object = create_object("```ruby\ndef test\n 'hello world'\nend\n```")
expected = "\n<pre class=\"code highlight js-syntax-highlight language-ruby\">" \
- "<code><span class=\"line\"><span class=\"k\">def</span> <span class=\"nf\">test</span>...</span>\n" \
+ "<code><span class=\"line\"><span class=\"k\">def</span> <span class=\"nf\">test</span>...</span>" \
"</code></pre>\n"
expect(helper.first_line_in_markdown(object, attribute, 150, is_todo: true, project: project)).to eq(expected)
diff --git a/spec/helpers/nav_helper_spec.rb b/spec/helpers/nav_helper_spec.rb
index 4a02b184522..4b83561b265 100644
--- a/spec/helpers/nav_helper_spec.rb
+++ b/spec/helpers/nav_helper_spec.rb
@@ -169,15 +169,39 @@ RSpec.describe NavHelper, feature_category: :navigation do
end
end
- context 'when nil is provided' do
- specify { expect(helper.show_super_sidebar?(nil)).to eq false }
+ shared_examples 'anonymous show_super_sidebar is supposed to' do
+ before do
+ stub_feature_flags(super_sidebar_logged_out: feature_flag)
+ end
+
+ context 'when super_sidebar_logged_out feature flag is disabled' do
+ let(:feature_flag) { false }
+
+ specify { expect(subject).to eq false }
+ end
+
+ context 'when super_sidebar_logged_out feature flag is enabled' do
+ let(:feature_flag) { true }
+
+ specify { expect(subject).to eq true }
+ end
end
- context 'when no user is signed-in' do
- specify do
- allow(helper).to receive(:current_user).and_return(nil)
+ context 'without a user' do
+ context 'with current_user (nil) as a default' do
+ before do
+ allow(helper).to receive(:current_user).and_return(nil)
+ end
+
+ subject { helper.show_super_sidebar? }
+
+ it_behaves_like 'anonymous show_super_sidebar is supposed to'
+ end
+
+ context 'with nil provided as an argument' do
+ subject { helper.show_super_sidebar?(nil) }
- expect(helper.show_super_sidebar?).to eq false
+ it_behaves_like 'anonymous show_super_sidebar is supposed to'
end
end
diff --git a/spec/helpers/notes_helper_spec.rb b/spec/helpers/notes_helper_spec.rb
index 91635ffcdc0..62c0d1b1ff7 100644
--- a/spec/helpers/notes_helper_spec.rb
+++ b/spec/helpers/notes_helper_spec.rb
@@ -331,7 +331,9 @@ RSpec.describe NotesHelper, feature_category: :team_planning do
end
describe '#notes_data' do
- let(:issue) { create(:issue, project: project) }
+ let_it_be(:issue) { create(:issue, project: project) }
+
+ let(:notes_data) { helper.notes_data(issue) }
before do
@project = project
@@ -343,7 +345,14 @@ RSpec.describe NotesHelper, feature_category: :team_planning do
it 'includes the current notes filter for the user' do
guest.set_notes_filter(UserPreference::NOTES_FILTERS[:only_comments], issue)
- expect(helper.notes_data(issue)[:notesFilter]).to eq(UserPreference::NOTES_FILTERS[:only_comments])
+ expect(notes_data[:notesFilter]).to eq(UserPreference::NOTES_FILTERS[:only_comments])
+ end
+
+ it 'includes info about the noteable', :aggregate_failures do
+ expect(notes_data[:noteableType]).to eq('issue')
+ expect(notes_data[:noteableId]).to eq(issue.id)
+ expect(notes_data[:projectId]).to eq(project.id)
+ expect(notes_data[:groupId]).to be_nil
end
end
end
diff --git a/spec/helpers/profiles_helper_spec.rb b/spec/helpers/profiles_helper_spec.rb
index 4c43b1ec4cf..15ca5f61b51 100644
--- a/spec/helpers/profiles_helper_spec.rb
+++ b/spec/helpers/profiles_helper_spec.rb
@@ -124,6 +124,41 @@ RSpec.describe ProfilesHelper do
end
end
+ describe '#user_profile_data' do
+ let(:time) { 3.hours.ago }
+ let(:user) do
+ build_stubbed(:user, status: UserStatus.new(
+ message: 'Some message',
+ emoji: 'basketball',
+ availability: 'busy',
+ clear_status_at: time
+ ))
+ end
+
+ before do
+ allow(helper).to receive(:current_user).and_return(user)
+ end
+
+ it 'returns user profile data' do
+ data = helper.user_profile_data(user)
+
+ expect(data[:profile_path]).to be_a(String)
+ expect(data[:profile_avatar_path]).to be_a(String)
+ expect(data[:avatar_url]).to be_http_url
+ expect(data[:has_avatar]).to be_a(String)
+ expect(data[:gravatar_enabled]).to be_a(String)
+ expect(Gitlab::Json.parse(data[:gravatar_link])).to match(hash_including('hostname' => Gitlab.config.gravatar.host, 'url' => a_valid_url))
+ expect(data[:brand_profile_image_guidelines]).to be_a(String)
+ expect(data[:cropper_css_path]).to eq(ActionController::Base.helpers.stylesheet_path('lazy_bundles/cropper.css'))
+ expect(data[:user_path]).to be_a(String)
+ expect(data[:current_emoji]).to eq('basketball')
+ expect(data[:current_message]).to eq('Some message')
+ expect(data[:current_availability]).to eq('busy')
+ expect(data[:current_clear_status_after]).to eq(time.to_fs(:iso8601))
+ expect(data[:default_emoji]).to eq(UserStatus::DEFAULT_EMOJI)
+ end
+ end
+
def stub_auth0_omniauth_provider
provider = OpenStruct.new(
'name' => example_omniauth_provider,
diff --git a/spec/helpers/projects/observability_helper_spec.rb b/spec/helpers/projects/observability_helper_spec.rb
index 65b6ddf04ec..0f47cdb8be2 100644
--- a/spec/helpers/projects/observability_helper_spec.rb
+++ b/spec/helpers/projects/observability_helper_spec.rb
@@ -4,10 +4,12 @@ require 'spec_helper'
require 'json'
RSpec.describe Projects::ObservabilityHelper, type: :helper, feature_category: :tracing do
- describe '#observability_tracing_view_model' do
- let_it_be(:group) { build_stubbed(:group) }
- let_it_be(:project) { build_stubbed(:project, group: group) }
+ include Gitlab::Routing.url_helpers
+
+ let_it_be(:group) { build_stubbed(:group) }
+ let_it_be(:project) { build_stubbed(:project, group: group) }
+ describe '#observability_tracing_view_model' do
it 'generates the correct JSON' do
expected_json = {
tracingUrl: Gitlab::Observability.tracing_url(project),
@@ -18,4 +20,18 @@ RSpec.describe Projects::ObservabilityHelper, type: :helper, feature_category: :
expect(helper.observability_tracing_view_model(project)).to eq(expected_json)
end
end
+
+ describe '#observability_tracing_details_model' do
+ it 'generates the correct JSON' do
+ expected_json = {
+ tracingIndexUrl: namespace_project_tracing_index_path(project.group, project),
+ traceId: "trace-id",
+ tracingUrl: Gitlab::Observability.tracing_url(project),
+ provisioningUrl: Gitlab::Observability.provisioning_url(project),
+ oauthUrl: Gitlab::Observability.oauth_url
+ }.to_json
+
+ expect(helper.observability_tracing_details_model(project, "trace-id")).to eq(expected_json)
+ end
+ end
end
diff --git a/spec/helpers/projects_helper_spec.rb b/spec/helpers/projects_helper_spec.rb
index 768038d8736..aa064a26ec4 100644
--- a/spec/helpers/projects_helper_spec.rb
+++ b/spec/helpers/projects_helper_spec.rb
@@ -993,6 +993,7 @@ RSpec.describe ProjectsHelper, feature_category: :source_code_management do
expect(settings).to include(
packagesEnabled: !!project.packages_enabled,
+ packageRegistryAllowAnyoneToPullOption: ::Gitlab::CurrentSettings.package_registry_allow_anyone_to_pull_option,
visibilityLevel: project.visibility_level,
requestAccessEnabled: !!project.request_access_enabled,
issuesAccessLevel: project.project_feature.issues_access_level,
@@ -1006,7 +1007,7 @@ RSpec.describe ProjectsHelper, feature_category: :source_code_management do
analyticsAccessLevel: project.project_feature.analytics_access_level,
containerRegistryEnabled: !!project.container_registry_enabled,
lfsEnabled: !!project.lfs_enabled,
- emailsDisabled: project.emails_disabled?,
+ emailsEnabled: project.emails_enabled?,
showDefaultAwardEmojis: project.show_default_award_emojis?,
securityAndComplianceAccessLevel: project.security_and_compliance_access_level,
containerRegistryAccessLevel: project.project_feature.container_registry_access_level,
@@ -1129,16 +1130,39 @@ RSpec.describe ProjectsHelper, feature_category: :source_code_management do
end
end
- describe '#fork_button_disabled_tooltip' do
+ describe '#fork_button_data_attributes' do
using RSpec::Parameterized::TableSyntax
- subject { helper.fork_button_disabled_tooltip(project) }
+ let_it_be(:project) { create(:project, :repository, :public) }
- where(:has_user, :can_fork_project, :can_create_fork, :expected) do
- false | false | false | nil
- true | true | true | nil
- true | false | true | 'You don\'t have permission to fork this project'
- true | true | false | 'You have reached your project limit'
+ project_path = '/project/path'
+ project_forks_path = '/project/forks'
+ project_new_fork_path = '/project/new/fork'
+ user_fork_url = '/user/fork'
+
+ common_data_attributes = {
+ forks_count: 4,
+ project_full_path: project_path,
+ project_forks_url: project_forks_path,
+ can_create_fork: "true",
+ can_fork_project: "true",
+ can_read_code: "true",
+ new_fork_url: project_new_fork_path
+ }
+
+ data_attributes_with_user_fork_url = common_data_attributes.merge({ user_fork_url: user_fork_url })
+ data_attributes_without_user_fork_url = common_data_attributes.merge({ user_fork_url: nil })
+
+ subject { helper.fork_button_data_attributes(project) }
+
+ # The stubs for the forkable namespaces seem not to make sense (they're just numbers),
+ # but they're set up that way because we don't really care about what the array contains, only about its length
+ where(:has_user, :project_already_forked, :forkable_namespaces, :expected) do
+ false | false | [] | nil
+ true | false | [0] | data_attributes_without_user_fork_url
+ true | false | [0, 1] | data_attributes_without_user_fork_url
+ true | true | [0] | data_attributes_with_user_fork_url
+ true | true | [0, 1] | data_attributes_without_user_fork_url
end
with_them do
@@ -1146,13 +1170,22 @@ RSpec.describe ProjectsHelper, feature_category: :source_code_management do
current_user = user if has_user
allow(helper).to receive(:current_user).and_return(current_user)
- allow(user).to receive(:can?).with(:fork_project, project).and_return(can_fork_project)
- allow(user).to receive(:can?).with(:create_fork).and_return(can_create_fork)
- end
+ allow(user).to receive(:can?).with(:fork_project, project).and_return(true)
+ allow(user).to receive(:can?).with(:create_fork).and_return(true)
+ allow(user).to receive(:can?).with(:create_projects, anything).and_return(true)
+ allow(user).to receive(:already_forked?).with(project).and_return(project_already_forked)
+ allow(user).to receive(:forkable_namespaces).and_return(forkable_namespaces)
- it 'returns tooltip text when user lacks privilege' do
- expect(subject).to eq(expected)
+ allow(project).to receive(:forks_count).and_return(4)
+ allow(project).to receive(:full_path).and_return(project_path)
+
+ user_fork_path = user_fork_url if project_already_forked
+ allow(helper).to receive(:namespace_project_path).with(user, anything).and_return(user_fork_path)
+ allow(helper).to receive(:new_project_fork_path).with(project).and_return(project_new_fork_path)
+ allow(helper).to receive(:project_forks_path).with(project).and_return(project_forks_path)
end
+
+ it { is_expected.to eq(expected) }
end
end
@@ -1614,4 +1647,62 @@ RSpec.describe ProjectsHelper, feature_category: :source_code_management do
it { is_expected.to eq(project_settings_repository_path(project, anchor: 'js-branch-rules')) }
end
+
+ describe '#visibility_level_content' do
+ shared_examples 'returns visibility level content_tag' do
+ let(:icon) { '<svg>fake visib level icon</svg>'.html_safe }
+ let(:description) { 'Fake visib desc' }
+
+ before do
+ allow(helper).to receive(:visibility_icon_description).and_return(description)
+ allow(helper).to receive(:visibility_level_icon).and_return(icon)
+ end
+
+ it 'returns visibility level content_tag' do
+ expected_result = "<span class=\"has-tooltip\" data-container=\"body\" data-placement=\"top\" title=\"#{description}\">#{icon}</span>"
+ expect(helper.visibility_level_content(project)).to eq(expected_result)
+ end
+
+ it 'returns visibility level content_tag with extra CSS classes' do
+ expected_result = "<span class=\"has-tooltip extra-class\" data-container=\"body\" data-placement=\"top\" title=\"#{description}\">#{icon}</span>"
+
+ expect(helper).to receive(:visibility_level_icon)
+ .with(anything, options: { class: 'extra-icon-class' })
+ .and_return(icon)
+ result = helper.visibility_level_content(project, css_class: 'extra-class', icon_css_class: 'extra-icon-class')
+ expect(result).to eq(expected_result)
+ end
+ end
+
+ it_behaves_like 'returns visibility level content_tag'
+
+ context 'when project creator is banned' do
+ let(:hidden_resource_icon) { '<svg>fake hidden resource icon</svg>' }
+
+ before do
+ allow(project).to receive(:created_and_owned_by_banned_user?).and_return(true)
+ allow(helper).to receive(:hidden_resource_icon).and_return(hidden_resource_icon)
+ end
+
+ it 'returns hidden resource icon' do
+ expect(helper.visibility_level_content(project)).to eq hidden_resource_icon
+ end
+ end
+
+ context 'with hide_projects_of_banned_users feature flag disabled' do
+ before do
+ stub_feature_flags(hide_projects_of_banned_users: false)
+ end
+
+ it_behaves_like 'returns visibility level content_tag'
+
+ context 'when project creator is banned' do
+ before do
+ allow(project).to receive(:created_and_owned_by_banned_user?).and_return(true)
+ end
+
+ it_behaves_like 'returns visibility level content_tag'
+ end
+ end
+ end
end
diff --git a/spec/helpers/sessions_helper_spec.rb b/spec/helpers/sessions_helper_spec.rb
index 5a46a20ce1a..366032100de 100644
--- a/spec/helpers/sessions_helper_spec.rb
+++ b/spec/helpers/sessions_helper_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe SessionsHelper do
+RSpec.describe SessionsHelper, feature_category: :system_access do
describe '#recently_confirmed_com?' do
subject { helper.recently_confirmed_com? }
@@ -51,28 +51,66 @@ RSpec.describe SessionsHelper do
end
end
- describe '#send_rate_limited?' do
+ describe '#unconfirmed_verification_email?', :freeze_time do
+ using RSpec::Parameterized::TableSyntax
+
let(:user) { build_stubbed(:user) }
+ let(:token_valid_for) { ::Users::EmailVerification::ValidateTokenService::TOKEN_VALID_FOR_MINUTES }
+
+ subject { helper.unconfirmed_verification_email?(user) }
+
+ where(:reset_first_offer?, :unconfirmed_email_present?, :token_valid?, :result) do
+ true | true | true | true
+ false | true | true | false
+ true | false | true | false
+ true | true | false | false
+ end
+
+ with_them do
+ before do
+ user.email_reset_offered_at = 1.minute.ago unless reset_first_offer?
+ user.unconfirmed_email = 'unconfirmed@email' if unconfirmed_email_present?
+ user.confirmation_sent_at = (token_valid? ? token_valid_for - 1 : token_valid_for + 1).minutes.ago
+ end
+
+ it { is_expected.to eq(result) }
+ end
+ end
+
+ describe '#verification_email' do
+ let(:unconfirmed_email) { 'unconfirmed@email' }
+ let(:user) { build_stubbed(:user, unconfirmed_email: unconfirmed_email) }
+
+ subject { helper.verification_email(user) }
- subject { helper.send_rate_limited?(user) }
+ context 'when there is an unconfirmed verification email' do
+ before do
+ allow(helper).to receive(:unconfirmed_verification_email?).and_return(true)
+ end
- before do
- allow(::Gitlab::ApplicationRateLimiter)
- .to receive(:peek)
- .with(:email_verification_code_send, scope: user)
- .and_return(rate_limited)
+ it { is_expected.to eq(unconfirmed_email) }
end
- context 'when rate limited' do
- let(:rate_limited) { true }
+ context 'when there is no unconfirmed verification email' do
+ before do
+ allow(helper).to receive(:unconfirmed_verification_email?).and_return(false)
+ end
- it { is_expected.to eq(true) }
+ it { is_expected.to eq(user.email) }
end
+ end
- context 'when not rate limited' do
- let(:rate_limited) { false }
+ describe '#verification_data' do
+ let(:user) { build_stubbed(:user) }
- it { is_expected.to eq(false) }
+ it 'returns the expected data' do
+ expect(helper.verification_data(user)).to eq({
+ obfuscated_email: obfuscated_email(user.email),
+ verify_path: helper.session_path(:user),
+ resend_path: users_resend_verification_code_path,
+ offer_email_reset: user.email_reset_offered_at.nil?.to_s,
+ update_email_path: users_update_email_path
+ })
end
end
diff --git a/spec/helpers/sidebars_helper_spec.rb b/spec/helpers/sidebars_helper_spec.rb
index 8d8bbcd2737..4109eb01caa 100644
--- a/spec/helpers/sidebars_helper_spec.rb
+++ b/spec/helpers/sidebars_helper_spec.rb
@@ -91,15 +91,21 @@ RSpec.describe SidebarsHelper, feature_category: :navigation do
allow(user).to receive(:pinned_nav_items).and_return({ panel_type => %w[foo bar], 'another_panel' => %w[baz] })
end
+ # Tests for logged-out sidebar context
+ it_behaves_like 'logged-out super-sidebar context'
+
+ # Tests for logged-in sidebar context below
+ it_behaves_like 'shared super sidebar context'
+ it { is_expected.to include({ is_logged_in: true }) }
+
it 'returns sidebar values from user', :use_clean_rails_memory_store_caching do
expect(subject).to include({
- current_context_header: nil,
- current_menu_items: nil,
+ is_logged_in: true,
name: user.name,
username: user.username,
avatar_url: user.avatar_url,
has_link_to_profile: helper.current_user_menu?(:profile),
- link_to_profile: user_url(user),
+ link_to_profile: user_path(user),
status: {
can_update: helper.can?(user, :update_user_status, user),
busy: user.status&.busy?,
@@ -128,26 +134,11 @@ RSpec.describe SidebarsHelper, feature_category: :navigation do
todos_dashboard_path: dashboard_todos_path,
projects_path: dashboard_projects_path,
groups_path: dashboard_groups_path,
- support_path: helper.support_url,
- display_whats_new: helper.display_whats_new?,
- whats_new_most_recent_release_items_count: helper.whats_new_most_recent_release_items_count,
- whats_new_version_digest: helper.whats_new_version_digest,
- show_version_check: helper.show_version_check?,
- gitlab_version: Gitlab.version_info,
- gitlab_version_check: helper.gitlab_version_check,
gitlab_com_but_not_canary: Gitlab.com_but_not_canary?,
gitlab_com_and_canary: Gitlab.com_and_canary?,
canary_toggle_com_url: Gitlab::Saas.canary_toggle_com_url,
- search: {
- search_path: search_path,
- issues_path: issues_dashboard_path,
- mr_path: merge_requests_dashboard_path,
- autocomplete_path: search_autocomplete_path,
- search_context: helper.header_search_context
- },
pinned_items: %w[foo bar],
- panel_type: panel_type,
- update_pins_url: pins_url,
+ update_pins_url: pins_path,
shortcut_links: [
{
title: _('Milestones'),
@@ -383,11 +374,17 @@ RSpec.describe SidebarsHelper, feature_category: :navigation do
describe 'context switcher persistent links' do
let_it_be(:public_link) do
[
- { title: s_('Navigation|Your work'), link: '/', icon: 'work' },
{ title: s_('Navigation|Explore'), link: '/explore', icon: 'compass' }
]
end
+ let_it_be(:public_links_for_user) do
+ [
+ { title: s_('Navigation|Your work'), link: '/', icon: 'work' },
+ *public_link
+ ]
+ end
+
let_it_be(:admin_area_link) do
{ title: s_('Navigation|Admin Area'), link: '/admin', icon: 'admin' }
end
@@ -405,12 +402,20 @@ RSpec.describe SidebarsHelper, feature_category: :navigation do
helper.super_sidebar_context(user, group: nil, project: nil, panel: panel, panel_type: panel_type)
end
- context 'when user is not an admin' do
- it 'returns only the public links' do
+ context 'when user is not logged in' do
+ let(:user) { nil }
+
+ it 'returns only the public links for an anonymous user' do
expect(subject[:context_switcher_links]).to eq(public_link)
end
end
+ context 'when user is not an admin' do
+ it 'returns only the public links for a user' do
+ expect(subject[:context_switcher_links]).to eq(public_links_for_user)
+ end
+ end
+
context 'when user is an admin' do
before do
allow(user).to receive(:admin?).and_return(true)
@@ -429,7 +434,7 @@ RSpec.describe SidebarsHelper, feature_category: :navigation do
it 'returns public links, admin area and leave admin mode links' do
expect(subject[:context_switcher_links]).to eq([
- *public_link,
+ *public_links_for_user,
admin_area_link,
leave_admin_mode_link
])
@@ -439,7 +444,7 @@ RSpec.describe SidebarsHelper, feature_category: :navigation do
context 'when admin mode is off' do
it 'returns public links and enter admin mode link' do
expect(subject[:context_switcher_links]).to eq([
- *public_link,
+ *public_links_for_user,
enter_admin_mode_link
])
end
@@ -453,7 +458,7 @@ RSpec.describe SidebarsHelper, feature_category: :navigation do
it 'returns public links and admin area link' do
expect(subject[:context_switcher_links]).to eq([
- *public_link,
+ *public_links_for_user,
admin_area_link
])
end
@@ -471,8 +476,11 @@ RSpec.describe SidebarsHelper, feature_category: :navigation do
end
describe 'when impersonating' do
+ before do
+ session[:impersonator_id] = 5
+ end
+
it 'sets is_impersonating to `true`' do
- expect(helper).to receive(:session).and_return({ impersonator_id: 1 })
expect(subject[:is_impersonating]).to be(true)
end
end
diff --git a/spec/helpers/snippets_helper_spec.rb b/spec/helpers/snippets_helper_spec.rb
index 43e663464c8..1e899396de4 100644
--- a/spec/helpers/snippets_helper_spec.rb
+++ b/spec/helpers/snippets_helper_spec.rb
@@ -33,7 +33,7 @@ RSpec.describe SnippetsHelper do
end
def download_link(url)
- "<a class=\"gl-button btn btn-default\" target=\"_blank\" rel=\"noopener noreferrer\" title=\"Open raw\" href=\"#{url}\">#{external_snippet_icon('doc-code')}</a>"
+ "<a rel=\"noopener noreferrer\" title=\"Open raw\" class=\"gl-button btn btn-md btn-default \" target=\"_blank\" href=\"#{url}\"><span class=\"gl-button-text\">\n#{external_snippet_icon('doc-code')}\n</span>\n\n</a>"
end
end
@@ -60,7 +60,7 @@ RSpec.describe SnippetsHelper do
end
def download_link(url)
- "<a class=\"gl-button btn btn-default\" target=\"_blank\" title=\"Download\" rel=\"noopener noreferrer\" href=\"#{url}?inline=false\">#{external_snippet_icon('download')}</a>"
+ "<a rel=\"noopener noreferrer\" title=\"Download\" class=\"gl-button btn btn-md btn-default \" target=\"_blank\" href=\"#{url}?inline=false\"><span class=\"gl-button-text\">\n#{external_snippet_icon('download')}\n</span>\n\n</a>"
end
end
@@ -102,7 +102,7 @@ RSpec.describe SnippetsHelper do
end
def copy_button(blob_id)
- "<button class=\"gl-button btn btn-default copy-to-clipboard-btn\" title=\"Copy snippet contents\" onclick=\"copyToClipboard(&#39;.blob-content[data-blob-id=&quot;#{blob_id}&quot;] &gt; pre&#39;)\">#{external_snippet_icon('copy-to-clipboard')}</button>"
+ "<button title=\"Copy snippet contents\" onclick=\"copyToClipboard(&#39;.blob-content[data-blob-id=&quot;#{blob_id}&quot;] &gt; pre&#39;)\" type=\"button\" class=\"gl-button btn btn-md btn-default \"><span class=\"gl-button-text\">\n#{external_snippet_icon('copy-to-clipboard')}\n</span>\n\n</button>"
end
end
diff --git a/spec/helpers/time_helper_spec.rb b/spec/helpers/time_helper_spec.rb
index 3e406f5e74e..02e28b2ba05 100644
--- a/spec/helpers/time_helper_spec.rb
+++ b/spec/helpers/time_helper_spec.rb
@@ -11,7 +11,7 @@ RSpec.describe TimeHelper do
100.32 => "1 minute and 40 seconds",
120 => "2 minutes",
121 => "2 minutes and 1 second",
- 3721 => "62 minutes and 1 second",
+ 3721 => "1 hour, 2 minutes, and 1 second",
0 => "0 seconds"
}
diff --git a/spec/helpers/todos_helper_spec.rb b/spec/helpers/todos_helper_spec.rb
index 9cbcca69dc8..dfb5cb995bc 100644
--- a/spec/helpers/todos_helper_spec.rb
+++ b/spec/helpers/todos_helper_spec.rb
@@ -370,6 +370,7 @@ RSpec.describe TodosHelper do
Todo::APPROVAL_REQUIRED | false | format(s_("Todos|set %{who} as an approver"), who: _('you'))
Todo::UNMERGEABLE | true | s_('Todos|Could not merge')
Todo::MERGE_TRAIN_REMOVED | true | s_("Todos|Removed from Merge Train")
+ Todo::REVIEW_SUBMITTED | false | s_('Todos|reviewed your merge request')
end
with_them do
diff --git a/spec/helpers/tree_helper_spec.rb b/spec/helpers/tree_helper_spec.rb
index 1ca5b8eb954..c94844eebbc 100644
--- a/spec/helpers/tree_helper_spec.rb
+++ b/spec/helpers/tree_helper_spec.rb
@@ -21,12 +21,14 @@ RSpec.describe TreeHelper do
describe '#vue_file_list_data' do
it 'returns a list of attributes related to the project' do
+ helper.instance_variable_set(:@ref_type, 'heads')
expect(helper.vue_file_list_data(project, sha)).to include(
project_path: project.full_path,
project_short_path: project.path,
ref: sha,
escaped_ref: sha,
- full_name: project.name_with_namespace
+ full_name: project.name_with_namespace,
+ ref_type: 'heads'
)
end
end
diff --git a/spec/helpers/users_helper_spec.rb b/spec/helpers/users_helper_spec.rb
index c0d3c31a36d..ad8aef276bb 100644
--- a/spec/helpers/users_helper_spec.rb
+++ b/spec/helpers/users_helper_spec.rb
@@ -150,6 +150,76 @@ RSpec.describe UsersHelper do
end
end
+ describe '#can_impersonate_user' do
+ let(:user) { create(:user) }
+ let(:impersonation_in_progress) { false }
+
+ subject { helper.can_impersonate_user(user, impersonation_in_progress) }
+
+ context 'when password is expired' do
+ let(:user) { create(:user, password_expires_at: 1.minute.ago) }
+
+ it { is_expected.to be false }
+ end
+
+ context 'when impersonation is in progress' do
+ let(:impersonation_in_progress) { true }
+
+ it { is_expected.to be false }
+ end
+
+ context 'when user is blocked' do
+ let(:user) { create(:user, :blocked) }
+
+ it { is_expected.to be false }
+ end
+
+ context 'when user is internal' do
+ let(:user) { create(:user, :bot) }
+
+ it { is_expected.to be false }
+ end
+
+ it { is_expected.to be true }
+ end
+
+ describe '#impersonation_error_text' do
+ let(:user) { create(:user) }
+ let(:impersonation_in_progress) { false }
+
+ subject { helper.impersonation_error_text(user, impersonation_in_progress) }
+
+ context 'when password is expired' do
+ let(:user) { create(:user, password_expires_at: 1.minute.ago) }
+
+ it { is_expected.to eq(_("You cannot impersonate a user with an expired password")) }
+ end
+
+ context 'when impersonation is in progress' do
+ let(:impersonation_in_progress) { true }
+
+ it { is_expected.to eq(_("You are already impersonating another user")) }
+ end
+
+ context 'when user is blocked' do
+ let(:user) { create(:user, :blocked) }
+
+ it { is_expected.to eq(_("You cannot impersonate a blocked user")) }
+ end
+
+ context 'when user is internal' do
+ let(:user) { create(:user, :bot) }
+
+ it { is_expected.to eq(_("You cannot impersonate an internal user")) }
+ end
+
+ context 'when user is inactive' do
+ let(:user) { create(:user, :deactivated) }
+
+ it { is_expected.to eq(_("You cannot impersonate a user who cannot log in")) }
+ end
+ end
+
describe '#user_badges_in_admin_section' do
before do
allow(helper).to receive(:current_user).and_return(user)
@@ -534,7 +604,7 @@ RSpec.describe UsersHelper do
describe '#load_max_project_member_accesses' do
let_it_be(:projects) { create_list(:project, 3) }
- before(:all) do
+ before_all do
projects.first.add_developer(user)
end
@@ -612,4 +682,58 @@ RSpec.describe UsersHelper do
it { is_expected.to eq('Active') }
end
end
+
+ describe '#user_profile_actions_data' do
+ let(:user_1) { create(:user) }
+ let(:user_2) { create(:user) }
+ let(:user_path) { '/users/root' }
+
+ subject { helper.user_profile_actions_data(user_1) }
+
+ before do
+ allow(helper).to receive(:user_path).and_return(user_path)
+ allow(helper).to receive(:user_url).and_return(user_path)
+ end
+
+ shared_examples 'user cannot report' do
+ it 'returns data without reporting related data' do
+ is_expected.to match({
+ user_id: user_1.id,
+ rss_subscription_path: user_path
+ })
+ end
+ end
+
+ context 'user is current user' do
+ before do
+ allow(helper).to receive(:current_user).and_return(user_1)
+ end
+
+ it_behaves_like 'user cannot report'
+ end
+
+ context 'user is not current user' do
+ before do
+ allow(helper).to receive(:current_user).and_return(user_2)
+ end
+
+ it 'returns data for reporting related data' do
+ is_expected.to match({
+ user_id: user_1.id,
+ rss_subscription_path: user_path,
+ report_abuse_path: add_category_abuse_reports_path,
+ reported_user_id: user_1.id,
+ reported_from_url: user_path
+ })
+ end
+ end
+
+ context 'when logged out' do
+ before do
+ allow(helper).to receive(:current_user).and_return(nil)
+ end
+
+ it_behaves_like 'user cannot report'
+ end
+ end
end
diff --git a/spec/initializers/6_validations_spec.rb b/spec/initializers/6_validations_spec.rb
index cdd96640933..4d317a7583e 100644
--- a/spec/initializers/6_validations_spec.rb
+++ b/spec/initializers/6_validations_spec.rb
@@ -7,7 +7,11 @@ RSpec.describe '6_validations' do
describe 'validate_storages_config' do
context 'with correct settings' do
before do
- mock_storages('foo' => Gitlab::GitalyClient::StorageSettings.new('path' => 'tmp/tests/paths/a/b/c'), 'bar' => Gitlab::GitalyClient::StorageSettings.new('path' => 'tmp/tests/paths/a/b/d'))
+ mock_storages(
+ 'storage' => Gitlab::GitalyClient::StorageSettings.new('path' => 'tmp/tests/paths/a/b/c'),
+ 'storage.with_VALID-chars01' => Gitlab::GitalyClient::StorageSettings.new('path' => 'tmp/tests/paths/a/b/d'),
+ 'gitaly.c.gitlab-prd-164c.internal' => Gitlab::GitalyClient::StorageSettings.new('path' => 'tmp/tests/paths/a/b/e')
+ )
end
it 'passes through' do
diff --git a/spec/initializers/action_cable_subscription_adapter_identifier_spec.rb b/spec/initializers/action_cable_subscription_adapter_identifier_spec.rb
index 94134ce44fd..cf82fd751dd 100644
--- a/spec/initializers/action_cable_subscription_adapter_identifier_spec.rb
+++ b/spec/initializers/action_cable_subscription_adapter_identifier_spec.rb
@@ -6,8 +6,13 @@ RSpec.describe 'ActionCableSubscriptionAdapterIdentifier override' do
describe '#identifier' do
let!(:original_config) { ::ActionCable::Server::Base.config.cable }
+ before do
+ ActionCable.server.restart
+ end
+
after do
::ActionCable::Server::Base.config.cable = original_config
+ ActionCable.server.restart
end
context 'when id key is nil on cable.yml' do
diff --git a/spec/initializers/postgresql_cte_spec.rb b/spec/initializers/postgresql_cte_spec.rb
new file mode 100644
index 00000000000..66dcb905491
--- /dev/null
+++ b/spec/initializers/postgresql_cte_spec.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'ActiveRecord::Relation patch for PostgreSQL WITH statements', feature_category: :database do
+ describe 'ActiveRecord::Relation::WithChain#recursive' do
+ subject(:relation) { User.with.recursive }
+
+ it 'sets recursive value flag on the relation' do
+ expect(relation.recursive_value).to eq(true)
+ end
+
+ it 'raises an error when #update_all is called' do
+ expect { relation.update_all(attribute: 42) }.to raise_exception(ActiveRecord::ReadOnlyRecord)
+ end
+ end
+end
diff --git a/spec/lib/api/entities/nuget/metadatum_spec.rb b/spec/lib/api/entities/nuget/metadatum_spec.rb
index cb4e53a1960..2cf26d59279 100644
--- a/spec/lib/api/entities/nuget/metadatum_spec.rb
+++ b/spec/lib/api/entities/nuget/metadatum_spec.rb
@@ -16,6 +16,7 @@ RSpec.describe API::Entities::Nuget::Metadatum, feature_category: :package_regis
let(:expected) do
{
'authors': 'Authors',
+ 'description': 'Description',
'summary': 'Description',
'projectUrl': 'http://sandbox.com/project',
'licenseUrl': 'http://sandbox.com/license',
@@ -50,8 +51,10 @@ RSpec.describe API::Entities::Nuget::Metadatum, feature_category: :package_regis
context 'with default value' do
let(:metadatum) { super().merge(description: nil) }
+ it { is_expected.to have_key(:description) }
it { is_expected.to have_key(:summary) }
- it { is_expected.to eq(expected.merge(summary: '')) }
+
+ it { is_expected.to eq(expected.merge(description: '', summary: '')) }
end
end
end
diff --git a/spec/lib/api/entities/nuget/package_metadata_catalog_entry_spec.rb b/spec/lib/api/entities/nuget/package_metadata_catalog_entry_spec.rb
index 2fad42f907b..b39456973ea 100644
--- a/spec/lib/api/entities/nuget/package_metadata_catalog_entry_spec.rb
+++ b/spec/lib/api/entities/nuget/package_metadata_catalog_entry_spec.rb
@@ -31,6 +31,7 @@ RSpec.describe API::Entities::Nuget::PackageMetadataCatalogEntry, feature_catego
'dependencyGroups': [],
'tags': 'tag1 tag2 tag3',
'packageContent': 'http://sandbox.com/archive/package',
+ 'description': 'Summary',
'summary': 'Summary',
'projectUrl': 'http://sandbox.com/project',
'licenseUrl': 'http://sandbox.com/license',
diff --git a/spec/lib/api/entities/nuget/search_result_spec.rb b/spec/lib/api/entities/nuget/search_result_spec.rb
index 5edff28824f..9de2719999e 100644
--- a/spec/lib/api/entities/nuget/search_result_spec.rb
+++ b/spec/lib/api/entities/nuget/search_result_spec.rb
@@ -34,6 +34,7 @@ RSpec.describe API::Entities::Nuget::SearchResult, feature_category: :package_re
'authors': 'Author',
'id': 'PackageTest',
'title': 'PackageTest',
+ 'description': 'Description',
'summary': 'Description',
'totalDownloads': 100,
'verified': true,
diff --git a/spec/lib/api/entities/user_spec.rb b/spec/lib/api/entities/user_spec.rb
index 6475dcd7618..1d80aad2127 100644
--- a/spec/lib/api/entities/user_spec.rb
+++ b/spec/lib/api/entities/user_spec.rb
@@ -54,19 +54,7 @@ RSpec.describe API::Entities::User do
it_behaves_like 'exposes relationship'
end
- context 'when current user can read user profile and disable_follow_users is switched off' do
- let(:can_read_user_profile) { true }
-
- before do
- stub_feature_flags(disable_follow_users: false)
- user.enabled_following = false
- user.save!
- end
-
- it_behaves_like 'exposes relationship'
- end
-
- context 'when current user can read user profile, disable_follow_users is switched on and user disabled it for themself' do
+ context 'when current user can read user profile and user disabled it for themself' do
let(:can_read_user_profile) { true }
before do
@@ -77,7 +65,7 @@ RSpec.describe API::Entities::User do
it_behaves_like 'does not expose relationship'
end
- context 'when current user can read user profile, disable_follow_users is switched on and current user disabled it for themself' do
+ context 'when current user can read user profile and current user disabled it for themself' do
let(:can_read_user_profile) { true }
before do
diff --git a/spec/lib/api/ml/mlflow/api_helpers_spec.rb b/spec/lib/api/ml/mlflow/api_helpers_spec.rb
new file mode 100644
index 00000000000..4f6a37c66c4
--- /dev/null
+++ b/spec/lib/api/ml/mlflow/api_helpers_spec.rb
@@ -0,0 +1,40 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe API::Ml::Mlflow::ApiHelpers, feature_category: :mlops do
+ include described_class
+
+ describe '#packages_url' do
+ subject { packages_url }
+
+ let_it_be(:user_project) { build_stubbed(:project) }
+
+ context 'with an empty relative URL root' do
+ before do
+ allow(Gitlab::Application.routes).to receive(:default_url_options)
+ .and_return(protocol: 'http', host: 'localhost', script_name: '')
+ end
+
+ it { is_expected.to eql("http://localhost/api/v4/projects/#{user_project.id}/packages/generic") }
+ end
+
+ context 'with a forward slash relative URL root' do
+ before do
+ allow(Gitlab::Application.routes).to receive(:default_url_options)
+ .and_return(protocol: 'http', host: 'localhost', script_name: '/')
+ end
+
+ it { is_expected.to eql("http://localhost/api/v4/projects/#{user_project.id}/packages/generic") }
+ end
+
+ context 'with a relative URL root' do
+ before do
+ allow(Gitlab::Application.routes).to receive(:default_url_options)
+ .and_return(protocol: 'http', host: 'localhost', script_name: '/gitlab/root')
+ end
+
+ it { is_expected.to eql("http://localhost/gitlab/root/api/v4/projects/#{user_project.id}/packages/generic") }
+ end
+ end
+end
diff --git a/spec/lib/api/validations/validators/git_sha_spec.rb b/spec/lib/api/validations/validators/git_sha_spec.rb
index ae6be52a4c7..2ae3fca7a6e 100644
--- a/spec/lib/api/validations/validators/git_sha_spec.rb
+++ b/spec/lib/api/validations/validators/git_sha_spec.rb
@@ -8,6 +8,7 @@ RSpec.describe API::Validations::Validators::GitSha do
let(:sha) { RepoHelpers.sample_commit.id }
let(:short_sha) { sha[0, Gitlab::Git::Commit::MIN_SHA_LENGTH] }
let(:too_short_sha) { sha[0, Gitlab::Git::Commit::MIN_SHA_LENGTH - 1] }
+ let(:too_long_sha) { "a" * (Gitlab::Git::Commit::MAX_SHA_LENGTH + 1) }
subject do
described_class.new(['test'], {}, false, scope.new)
@@ -29,7 +30,7 @@ RSpec.describe API::Validations::Validators::GitSha do
context 'invalid sha' do
it 'raises a validation error' do
- expect_validation_error('test' => "#{sha}2") # Sha length > 40
+ expect_validation_error('test' => too_long_sha) # too long SHA
expect_validation_error('test' => 'somestring')
expect_validation_error('test' => too_short_sha) # sha length < MIN_SHA_LENGTH (7)
end
diff --git a/spec/lib/atlassian/jira_connect/serializers/deployment_entity_spec.rb b/spec/lib/atlassian/jira_connect/serializers/deployment_entity_spec.rb
index 57e0b67e9e6..45ede09c7bb 100644
--- a/spec/lib/atlassian/jira_connect/serializers/deployment_entity_spec.rb
+++ b/spec/lib/atlassian/jira_connect/serializers/deployment_entity_spec.rb
@@ -92,16 +92,6 @@ RSpec.describe Atlassian::JiraConnect::Serializers::DeploymentEntity, feature_ca
expect(subject.issue_keys).to contain_exactly('add a')
end
- context 'when `jira_deployment_issue_keys` flag is disabled' do
- before do
- stub_feature_flags(jira_deployment_issue_keys: false)
- end
-
- it 'does not extract issue keys from commits' do
- expect(subject.issue_keys).to be_empty
- end
- end
-
context 'when deploy happened at an older commit' do
before do
# SHA is from a commit between 1) and 2) in the commit list above.
diff --git a/spec/lib/backup/database_spec.rb b/spec/lib/backup/database_spec.rb
index dd8a4a14531..61e6c59a1a5 100644
--- a/spec/lib/backup/database_spec.rb
+++ b/spec/lib/backup/database_spec.rb
@@ -22,7 +22,7 @@ RSpec.describe Backup::Database, feature_category: :backup_restore do
end
end
- before(:all) do
+ before(:all) do # rubocop:disable RSpec/BeforeAll
Rake::Task.define_task(:environment)
Rake.application.rake_require 'active_record/railties/databases'
Rake.application.rake_require 'tasks/gitlab/backup'
@@ -272,14 +272,13 @@ RSpec.describe Backup::Database, feature_category: :backup_restore do
end
context 'with PostgreSQL settings defined in the environment' do
- let(:cmd) { %W[#{Gem.ruby} -e] + ["$stderr.puts ENV.to_h.select { |k, _| k.start_with?('PG') }"] }
let(:config) { YAML.load_file(File.join(Rails.root, 'config', 'database.yml'))['test'] }
before do
- stub_const 'ENV', ENV.to_h.merge({
+ stub_env(ENV.to_h.merge({
'GITLAB_BACKUP_PGHOST' => 'test.example.com',
'PGPASSWORD' => 'donotchange'
- })
+ }))
end
it 'overrides default config values' do
@@ -289,12 +288,13 @@ RSpec.describe Backup::Database, feature_category: :backup_restore do
expect(Rake::Task['gitlab:db:drop_tables:main']).to receive(:invoke)
end
+ expect(ENV).to receive(:[]=).with('PGHOST', 'test.example.com')
+ expect(ENV).not_to receive(:[]=).with('PGPASSWORD', anything)
+
subject.restore(backup_dir)
- expect(output).to include(%("PGHOST"=>"test.example.com"))
- expect(output).to include(%("PGPASSWORD"=>"donotchange"))
- expect(output).to include(%("PGPORT"=>"#{config['port']}")) if config['port']
- expect(output).to include(%("PGUSER"=>"#{config['username']}")) if config['username']
+ expect(ENV['PGPORT']).to eq(config['port']) if config['port']
+ expect(ENV['PGUSER']).to eq(config['username']) if config['username']
end
end
diff --git a/spec/lib/backup/gitaly_backup_spec.rb b/spec/lib/backup/gitaly_backup_spec.rb
index 172fc28dd3e..1105f39124b 100644
--- a/spec/lib/backup/gitaly_backup_spec.rb
+++ b/spec/lib/backup/gitaly_backup_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe Backup::GitalyBackup, feature_category: :backup_restore do
let(:storage_parallelism) { nil }
let(:destination) { File.join(Gitlab.config.backup.path, 'repositories') }
let(:backup_id) { '20220101' }
+ let(:server_side) { false }
let(:progress) do
Tempfile.new('progress').tap do |progress|
@@ -26,7 +27,14 @@ RSpec.describe Backup::GitalyBackup, feature_category: :backup_restore do
progress.close
end
- subject { described_class.new(progress, max_parallelism: max_parallelism, storage_parallelism: storage_parallelism) }
+ subject do
+ described_class.new(
+ progress,
+ max_parallelism: max_parallelism,
+ storage_parallelism: storage_parallelism,
+ server_side: server_side
+ )
+ end
context 'unknown' do
it 'fails to start unknown' do
@@ -92,6 +100,17 @@ RSpec.describe Backup::GitalyBackup, feature_category: :backup_restore do
end
end
+ context 'server-side option set' do
+ let(:server_side) { true }
+
+ it 'passes option through' do
+ expect(Open3).to receive(:popen2).with(expected_env, anything, 'create', '-server-side', '-id', backup_id).and_call_original
+
+ subject.start(:create, destination, backup_id: backup_id)
+ subject.finish!
+ end
+ end
+
it 'raises when the exit code not zero' do
expect(subject).to receive(:bin_path).and_return(Gitlab::Utils.which('false'))
@@ -132,11 +151,8 @@ RSpec.describe Backup::GitalyBackup, feature_category: :backup_restore do
)
end
- before do
- stub_const('ENV', ssl_env)
- end
-
it 'passes through SSL envs' do
+ expect(subject).to receive(:current_env).and_return(ssl_env)
expect(Open3).to receive(:popen2).with(expected_env, anything, 'create', '-path', anything, '-layout', 'pointer', '-id', backup_id).and_call_original
subject.start(:create, destination, backup_id: backup_id)
@@ -146,21 +162,27 @@ RSpec.describe Backup::GitalyBackup, feature_category: :backup_restore do
end
context 'restore' do
- let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:project) { create(:project, :repository, :design_repo) }
let_it_be(:personal_snippet) { create(:personal_snippet, author: project.first_owner) }
let_it_be(:project_snippet) { create(:project_snippet, project: project, author: project.first_owner) }
- def copy_bundle_to_backup_path(bundle_name, destination)
- FileUtils.mkdir_p(File.join(Gitlab.config.backup.path, 'repositories', File.dirname(destination)))
- FileUtils.cp(Rails.root.join('spec/fixtures/lib/backup', bundle_name), File.join(Gitlab.config.backup.path, 'repositories', destination))
+ def copy_fixture_to_backup_path(backup_name, repo_disk_path)
+ FileUtils.mkdir_p(File.join(Gitlab.config.backup.path, 'repositories', File.dirname(repo_disk_path)))
+
+ %w[.bundle .refs].each do |filetype|
+ FileUtils.cp(
+ Rails.root.join('spec/fixtures/lib/backup', backup_name + filetype),
+ File.join(Gitlab.config.backup.path, 'repositories', repo_disk_path + filetype)
+ )
+ end
end
it 'restores from repository bundles', :aggregate_failures do
- copy_bundle_to_backup_path('project_repo.bundle', project.disk_path + '.bundle')
- copy_bundle_to_backup_path('wiki_repo.bundle', project.disk_path + '.wiki.bundle')
- copy_bundle_to_backup_path('design_repo.bundle', project.disk_path + '.design.bundle')
- copy_bundle_to_backup_path('personal_snippet_repo.bundle', personal_snippet.disk_path + '.bundle')
- copy_bundle_to_backup_path('project_snippet_repo.bundle', project_snippet.disk_path + '.bundle')
+ copy_fixture_to_backup_path('project_repo', project.disk_path)
+ copy_fixture_to_backup_path('wiki_repo', project.wiki.disk_path)
+ copy_fixture_to_backup_path('design_repo', project.design_repository.disk_path)
+ copy_fixture_to_backup_path('personal_snippet_repo', personal_snippet.disk_path)
+ copy_fixture_to_backup_path('project_snippet_repo', project_snippet.disk_path)
expect(Open3).to receive(:popen2).with(expected_env, anything, 'restore', '-path', anything, '-layout', 'pointer').and_call_original
@@ -184,7 +206,7 @@ RSpec.describe Backup::GitalyBackup, feature_category: :backup_restore do
it 'clears specified storages when remove_all_repositories is set' do
expect(Open3).to receive(:popen2).with(expected_env, anything, 'restore', '-path', anything, '-layout', 'pointer', '-remove-all-repositories', 'default').and_call_original
- copy_bundle_to_backup_path('project_repo.bundle', project.disk_path + '.bundle')
+ copy_fixture_to_backup_path('project_repo', project.disk_path)
subject.start(:restore, destination, backup_id: backup_id, remove_all_repositories: %w[default])
subject.enqueue(project, Gitlab::GlRepository::PROJECT)
subject.finish!
@@ -212,6 +234,35 @@ RSpec.describe Backup::GitalyBackup, feature_category: :backup_restore do
end
end
+ context 'server-side option set' do
+ let(:server_side) { true }
+
+ it 'passes option through' do
+ expect(Open3).to receive(:popen2).with(expected_env, anything, 'restore', '-server-side', '-id', backup_id).and_call_original
+
+ subject.start(:restore, destination, backup_id: backup_id)
+ subject.finish!
+ end
+
+ context 'missing backup_id' do
+ it 'wont set the option' do
+ expect(Open3).to receive(:popen2).with(expected_env, anything, 'restore', '-server-side').and_call_original
+
+ subject.start(:restore, destination)
+ subject.finish!
+ end
+ end
+ end
+
+ context 'missing backup_id' do
+ it 'wont set the option' do
+ expect(Open3).to receive(:popen2).with(expected_env, anything, 'restore', '-path', anything, '-layout', 'pointer').and_call_original
+
+ subject.start(:restore, destination)
+ subject.finish!
+ end
+ end
+
it 'raises when the exit code not zero' do
expect(subject).to receive(:bin_path).and_return(Gitlab::Utils.which('false'))
diff --git a/spec/lib/banzai/filter/issuable_reference_expansion_filter_spec.rb b/spec/lib/banzai/filter/issuable_reference_expansion_filter_spec.rb
index e14b1362687..06bb0edc92c 100644
--- a/spec/lib/banzai/filter/issuable_reference_expansion_filter_spec.rb
+++ b/spec/lib/banzai/filter/issuable_reference_expansion_filter_spec.rb
@@ -173,6 +173,48 @@ RSpec.describe Banzai::Filter::IssuableReferenceExpansionFilter, feature_categor
expect(doc.css('a').last.text).to eq("#{issuable.title} (#{issuable.to_reference} - closed)")
end
+ context 'for references with fenced emoji' do
+ def issuable_link(issuable)
+ create_link(
+ issuable.to_reference,
+ "#{issuable_type}": issuable.id,
+ reference_type: issuable_type,
+ reference_format: '+'
+ )
+ end
+
+ it 'expands emoji for references with +' do
+ issuable = create_item(issuable_type, :opened, title: 'Some issue :eagle:')
+ doc = filter(issuable_link(issuable), context)
+
+ expect(/\p{Emoji_Presentation}/ =~ doc.css('a').last.text).not_to be_nil
+ expect(doc.css('a').last.text.scan(/\p{Emoji_Presentation}/)).to eq(["🦅"])
+ end
+
+ it 'expands when emoji is embedded at the beginning of a string' do
+ issuable = create_item(issuable_type, :opened, title: ':eagle: Some issue')
+ doc = filter(issuable_link(issuable), context)
+
+ expect(/\p{Emoji_Presentation}/ =~ doc.css('a').last.text).not_to be_nil
+ expect(doc.css('a').last.text.scan(/\p{Emoji_Presentation}/)).to eq(["🦅"])
+ end
+
+ it 'expands when emoji appears multiple times' do
+ issuable = create_item(issuable_type, :opened, title: ':eagle: Some issue :dog:')
+ doc = filter(issuable_link(issuable), context)
+
+ expect(/\p{Emoji_Presentation}/ =~ doc.css('a').last.text).not_to be_nil
+ expect(doc.css('a').last.text.scan(/\p{Emoji_Presentation}/)).to eq(["🦅", "🐶"])
+ end
+
+ it 'does not expand when emoji is embedded mid-string' do
+ issuable = create_item(issuable_type, :opened, title: 'Some:eagle:issue')
+ doc = filter(issuable_link(issuable), context)
+
+ expect(/\p{Emoji_Presentation}/ =~ doc.css('a').last.text).to be_nil
+ end
+ end
+
it 'shows title for references with +s' do
issuable = create_item(issuable_type, :opened, title: 'Some issue')
link = create_link(issuable.to_reference, "#{issuable_type}": issuable.id, reference_type: issuable_type,
diff --git a/spec/lib/banzai/filter/truncate_visible_filter_spec.rb b/spec/lib/banzai/filter/truncate_visible_filter_spec.rb
index 0d352850682..d55d54f766d 100644
--- a/spec/lib/banzai/filter/truncate_visible_filter_spec.rb
+++ b/spec/lib/banzai/filter/truncate_visible_filter_spec.rb
@@ -44,7 +44,7 @@ RSpec.describe Banzai::Filter::TruncateVisibleFilter, feature_category: :team_pl
describe 'truncates the first line of a code block' do
let(:markdown) { "```\nCode block\nwith two lines\n```" }
- let(:expected) { "Code block...</span>\n</code>" }
+ let(:expected) { "Code block...</span></code>" }
it_behaves_like 'truncates text'
end
diff --git a/spec/lib/bulk_imports/common/graphql/get_members_query_spec.rb b/spec/lib/bulk_imports/common/graphql/get_members_query_spec.rb
index e3a7335a238..bcc2d6fd5ed 100644
--- a/spec/lib/bulk_imports/common/graphql/get_members_query_spec.rb
+++ b/spec/lib/bulk_imports/common/graphql/get_members_query_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe BulkImports::Common::Graphql::GetMembersQuery do
+RSpec.describe BulkImports::Common::Graphql::GetMembersQuery, feature_category: :importers do
let(:entity) { create(:bulk_import_entity, :group_entity) }
let(:tracker) { create(:bulk_import_tracker, entity: entity) }
let(:context) { BulkImports::Pipeline::Context.new(tracker) }
@@ -41,6 +41,7 @@ RSpec.describe BulkImports::Common::Graphql::GetMembersQuery do
it 'queries group & group members' do
expect(query.to_s).to include('group')
expect(query.to_s).to include('groupMembers')
+ expect(query.to_s).to include('SHARED_FROM_GROUPS')
end
end
@@ -50,6 +51,7 @@ RSpec.describe BulkImports::Common::Graphql::GetMembersQuery do
it 'queries project & project members' do
expect(query.to_s).to include('project')
expect(query.to_s).to include('projectMembers')
+ expect(query.to_s).to include('INVITED_GROUPS SHARED_INTO_ANCESTORS')
end
end
end
diff --git a/spec/lib/bulk_imports/groups/transformers/group_attributes_transformer_spec.rb b/spec/lib/bulk_imports/groups/transformers/group_attributes_transformer_spec.rb
index 9782f2aac27..69d5997cf96 100644
--- a/spec/lib/bulk_imports/groups/transformers/group_attributes_transformer_spec.rb
+++ b/spec/lib/bulk_imports/groups/transformers/group_attributes_transformer_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe BulkImports::Groups::Transformers::GroupAttributesTransformer, fe
describe '#transform' do
let(:bulk_import) { build_stubbed(:bulk_import) }
let(:destination_group) { create(:group) }
- let(:destination_namespace) { destination_group.full_path }
+ let(:destination_namespace) { destination_group&.full_path }
let(:entity) do
build_stubbed(
@@ -178,52 +178,7 @@ RSpec.describe BulkImports::Groups::Transformers::GroupAttributesTransformer, fe
end
describe 'visibility level' do
- subject(:transformed_data) { described_class.new.transform(context, data) }
-
include_examples 'visibility level settings'
-
- context 'when destination is blank' do
- let(:destination_namespace) { '' }
-
- context 'when visibility level is public' do
- let(:data) { { 'visibility' => 'public' } }
-
- it 'sets visibility level to public' do
- expect(transformed_data[:visibility_level]).to eq(Gitlab::VisibilityLevel::PUBLIC)
- end
- end
-
- context 'when when visibility level is internal' do
- let(:data) { { 'visibility' => 'internal' } }
-
- it 'sets visibility level to internal' do
- expect(transformed_data[:visibility_level]).to eq(Gitlab::VisibilityLevel::INTERNAL)
- end
- end
-
- context 'when private' do
- let(:data) { { 'visibility' => 'private' } }
-
- it 'sets visibility level to private' do
- expect(transformed_data[:visibility_level]).to eq(Gitlab::VisibilityLevel::PRIVATE)
- end
- end
-
- context 'when visibility level is restricted' do
- let(:data) { { 'visibility' => 'internal' } }
-
- it 'sets visibility level to private' do
- stub_application_setting(
- restricted_visibility_levels: [
- Gitlab::VisibilityLevel::INTERNAL,
- Gitlab::VisibilityLevel::PUBLIC
- ]
- )
-
- expect(transformed_data[:visibility_level]).to eq(Gitlab::VisibilityLevel::PRIVATE)
- end
- end
- end
end
end
end
diff --git a/spec/lib/bulk_imports/projects/transformers/project_attributes_transformer_spec.rb b/spec/lib/bulk_imports/projects/transformers/project_attributes_transformer_spec.rb
index 0e3d8b36fb2..ac74f17cc21 100644
--- a/spec/lib/bulk_imports/projects/transformers/project_attributes_transformer_spec.rb
+++ b/spec/lib/bulk_imports/projects/transformers/project_attributes_transformer_spec.rb
@@ -19,7 +19,7 @@ RSpec.describe BulkImports::Projects::Transformers::ProjectAttributesTransformer
end
let(:destination_group) { create(:group) }
- let(:destination_namespace) { destination_group.full_path }
+ let(:destination_namespace) { destination_group&.full_path }
let(:tracker) { create(:bulk_import_tracker, entity: entity) }
let(:context) { BulkImports::Pipeline::Context.new(tracker) }
let(:data) do
@@ -127,7 +127,7 @@ RSpec.describe BulkImports::Projects::Transformers::ProjectAttributesTransformer
end
describe 'visibility level' do
- include_examples 'visibility level settings'
+ include_examples 'visibility level settings', true
end
end
end
diff --git a/spec/lib/click_house/bind_index_manager_spec.rb b/spec/lib/click_house/bind_index_manager_spec.rb
new file mode 100644
index 00000000000..1c659017c63
--- /dev/null
+++ b/spec/lib/click_house/bind_index_manager_spec.rb
@@ -0,0 +1,33 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ClickHouse::BindIndexManager, feature_category: :database do
+ describe '#next_bind_str' do
+ context 'when initialized without a start index' do
+ let(:bind_manager) { described_class.new }
+
+ it 'starts from index 1 by default' do
+ expect(bind_manager.next_bind_str).to eq('$1')
+ end
+
+ it 'increments the bind string on subsequent calls' do
+ bind_manager.next_bind_str
+ expect(bind_manager.next_bind_str).to eq('$2')
+ end
+ end
+
+ context 'when initialized with a start index' do
+ let(:bind_manager) { described_class.new(2) }
+
+ it 'starts from the given index' do
+ expect(bind_manager.next_bind_str).to eq('$2')
+ end
+
+ it 'increments the bind string on subsequent calls' do
+ bind_manager.next_bind_str
+ expect(bind_manager.next_bind_str).to eq('$3')
+ end
+ end
+ end
+end
diff --git a/spec/lib/click_house/query_builder_spec.rb b/spec/lib/click_house/query_builder_spec.rb
new file mode 100644
index 00000000000..9e3f1118eeb
--- /dev/null
+++ b/spec/lib/click_house/query_builder_spec.rb
@@ -0,0 +1,334 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ClickHouse::QueryBuilder, feature_category: :database do
+ let(:table_name) { :test_table }
+ let(:builder) { described_class.new(table_name) }
+
+ shared_examples "generates correct sql on multiple calls to `to_sql`" do |method_name, argument1, argument2|
+ it 'returns the same SQL when called multiple times on the same builder' do
+ query_builder = builder.public_send(method_name, argument1)
+ first_sql = query_builder.to_sql
+ second_sql = query_builder.to_sql
+
+ expect(first_sql).to eq(second_sql)
+ end
+
+ it 'returns different SQL when called multiple times on different builders' do
+ query_builder = builder.public_send(method_name, argument1)
+ query_builder_2 = query_builder.public_send(method_name, argument2)
+
+ first_sql = query_builder.to_sql
+ second_sql = query_builder_2.to_sql
+
+ expect(first_sql).not_to eq(second_sql)
+ end
+ end
+
+ describe "#initialize" do
+ it 'initializes with correct table' do
+ expect(builder.table.name).to eq(table_name.to_s)
+ end
+ end
+
+ describe '#where' do
+ context 'with simple conditions' do
+ it 'builds correct where query' do
+ expected_sql = <<~SQL.lines(chomp: true).join(' ')
+ SELECT * FROM "test_table"
+ WHERE "test_table"."column1" = 'value1'
+ AND "test_table"."column2" = 'value2'
+ SQL
+
+ sql = builder.where(column1: 'value1', column2: 'value2').to_sql
+
+ expect(sql).to eq(expected_sql)
+ end
+ end
+
+ context 'with array conditions' do
+ it 'builds correct where query' do
+ expected_sql = <<~SQL.lines(chomp: true).join(' ')
+ SELECT * FROM "test_table"
+ WHERE "test_table"."column1" IN (1, 2, 3)
+ SQL
+
+ sql = builder.where(column1: [1, 2, 3]).to_sql
+
+ expect(sql).to eq(expected_sql)
+ end
+ end
+
+ it_behaves_like "generates correct sql on multiple calls to `to_sql`", :where, { column1: 'value1' },
+ { column2: 'value2' }
+
+ context 'with supported arel nodes' do
+ it 'builds a query using the In node' do
+ expected_sql = <<~SQL.lines(chomp: true).join(' ')
+ SELECT * FROM "test_table"
+ WHERE "test_table"."column1" IN ('value1', 'value2')
+ SQL
+
+ sql = builder.where(builder.table[:column1].in(%w[value1 value2])).to_sql
+
+ expect(sql).to eq(expected_sql)
+ end
+
+ it 'builds a query using the Equality node' do
+ expected_sql = <<~SQL.lines(chomp: true).join(' ')
+ SELECT * FROM "test_table"
+ WHERE "test_table"."column1" = 'value1'
+ SQL
+
+ sql = builder.where(builder.table[:column1].eq('value1')).to_sql
+
+ expect(sql).to eq(expected_sql)
+ end
+
+ it 'builds a query using the LessThan node' do
+ expected_sql = <<~SQL.lines(chomp: true).join(' ')
+ SELECT * FROM "test_table"
+ WHERE "test_table"."column1" < 5
+ SQL
+
+ sql = builder.where(builder.table[:column1].lt(5)).to_sql
+
+ expect(sql).to eq(expected_sql)
+ end
+
+ it 'builds a query using the LessThanOrEqual node' do
+ expected_sql = <<~SQL.lines(chomp: true).join(' ')
+ SELECT * FROM "test_table"
+ WHERE "test_table"."column1" <= 5
+ SQL
+
+ sql = builder.where(builder.table[:column1].lteq(5)).to_sql
+
+ expect(sql).to eq(expected_sql)
+ end
+
+ it 'builds a query using the GreaterThan node' do
+ expected_sql = <<~SQL.lines(chomp: true).join(' ')
+ SELECT * FROM "test_table"
+ WHERE "test_table"."column1" > 5
+ SQL
+
+ sql = builder.where(builder.table[:column1].gt(5)).to_sql
+
+ expect(sql).to eq(expected_sql)
+ end
+
+ it 'builds a query using the GreaterThanOrEqual node' do
+ expected_sql = <<~SQL.lines(chomp: true).join(' ')
+ SELECT * FROM "test_table"
+ WHERE "test_table"."column1" >= 5
+ SQL
+
+ sql = builder.where(builder.table[:column1].gteq(5)).to_sql
+
+ expect(sql).to eq(expected_sql)
+ end
+ end
+
+ context 'with unsupported arel nodes' do
+ it 'raises an error for the unsupported node' do
+ expect do
+ builder.where(builder.table[:column1].not_eq('value1')).to_sql
+ end.to raise_error(ArgumentError, /Unsupported Arel node type for QueryBuilder:/)
+ end
+ end
+ end
+
+ describe '#select' do
+ it 'builds correct select query with single field' do
+ expected_sql = <<~SQL.chomp
+ SELECT "test_table"."column1" FROM "test_table"
+ SQL
+
+ sql = builder.select(:column1).to_sql
+
+ expect(sql).to eq(expected_sql)
+ end
+
+ it 'builds correct select query with multiple fields' do
+ expected_sql = <<~SQL.chomp
+ SELECT "test_table"."column1", "test_table"."column2" FROM "test_table"
+ SQL
+
+ sql = builder.select(:column1, :column2).to_sql
+
+ expect(sql).to eq(expected_sql)
+ end
+
+ it 'adds new fields on multiple calls without duplicating' do
+ expected_sql = <<~SQL.chomp
+ SELECT "test_table"."column1", "test_table"."column2" FROM "test_table"
+ SQL
+
+ sql = builder.select(:column1).select(:column2).select(:column1).to_sql
+
+ expect(sql).to eq(expected_sql)
+ end
+
+ it_behaves_like "generates correct sql on multiple calls to `to_sql`", :select, :column1, :column2
+ end
+
+ describe '#order' do
+ it 'builds correct order query with direction :desc' do
+ expected_sql = <<~SQL.lines(chomp: true).join(' ')
+ SELECT * FROM "test_table"
+ ORDER BY "test_table"."column1" DESC
+ SQL
+
+ sql = builder.order(:column1, :desc).to_sql
+
+ expect(sql).to eq(expected_sql)
+ end
+
+ it 'builds correct order query with default direction asc' do
+ expected_sql = <<~SQL.lines(chomp: true).join(' ')
+ SELECT * FROM "test_table"
+ ORDER BY "test_table"."column1" ASC
+ SQL
+
+ sql = builder.order(:column1).to_sql
+
+ expect(sql).to eq(expected_sql)
+ end
+
+ it 'appends orderings on multiple calls' do
+ expected_sql = <<~SQL.lines(chomp: true).join(' ')
+ SELECT * FROM "test_table"
+ ORDER BY "test_table"."column1" DESC,
+ "test_table"."column2" ASC
+ SQL
+
+ sql = builder.order(:column1, :desc).order(:column2, :asc).to_sql
+
+ expect(sql).to eq(expected_sql)
+ end
+
+ it 'appends orderings for the same column when ordered multiple times' do
+ expected_sql = <<~SQL.lines(chomp: true).join(' ')
+ SELECT * FROM "test_table"
+ ORDER BY "test_table"."column1" DESC,
+ "test_table"."column1" ASC
+ SQL
+
+ sql = builder.order(:column1, :desc).order(:column1, :asc).to_sql
+
+ expect(sql).to eq(expected_sql)
+ end
+
+ it 'raises error for invalid direction' do
+ expect do
+ builder.order(:column1, :invalid)
+ end.to raise_error(ArgumentError, "Invalid order direction 'invalid'. Must be :asc or :desc")
+ end
+
+ it_behaves_like "generates correct sql on multiple calls to `to_sql`", :order, :column1, :column2
+ end
+
+ describe '#limit' do
+ it 'builds correct limit query' do
+ expected_sql = <<~SQL.lines(chomp: true).join(' ')
+ SELECT * FROM "test_table"
+ LIMIT 10
+ SQL
+
+ sql = builder.limit(10).to_sql
+
+ expect(sql).to eq(expected_sql)
+ end
+
+ it 'overrides previous limit value when called multiple times' do
+ expected_sql = <<~SQL.lines(chomp: true).join(' ')
+ SELECT * FROM "test_table"
+ LIMIT 20
+ SQL
+
+ sql = builder.limit(10).limit(20).to_sql
+
+ expect(sql).to eq(expected_sql)
+ end
+ end
+
+ describe '#offset' do
+ it 'builds correct offset query' do
+ expected_sql = <<~SQL.lines(chomp: true).join(' ')
+ SELECT * FROM "test_table"
+ OFFSET 5
+ SQL
+
+ sql = builder.offset(5).to_sql
+
+ expect(sql).to eq(expected_sql)
+ end
+
+ it 'overrides previous offset value when called multiple times' do
+ expected_sql = <<~SQL.lines(chomp: true).join(' ')
+ SELECT * FROM "test_table"
+ OFFSET 10
+ SQL
+
+ sql = builder.offset(5).offset(10).to_sql
+
+ expect(sql).to eq(expected_sql)
+ end
+ end
+
+ describe '#to_sql' do
+ it 'delegates to the Arel::SelectManager' do
+ expect(builder.send(:manager)).to receive(:to_sql)
+
+ builder.to_sql
+ end
+ end
+
+ describe '#to_redacted_sql' do
+ it 'calls ::ClickHouse::Redactor correctly' do
+ expect(::ClickHouse::Redactor).to receive(:redact).with(builder)
+
+ builder.to_redacted_sql
+ end
+ end
+
+ describe '#apply_conditions!' do
+ it 'applies conditions to the manager' do
+ manager = builder.send(:manager)
+ condition = Arel::Nodes::Equality.new(builder.table[:column1], 'value1')
+ builder.conditions << condition
+
+ expect(manager).to receive(:where).with(condition)
+
+ builder.send(:apply_conditions!)
+ end
+ end
+
+ describe 'method chaining', :freeze_time do
+ it 'builds correct SQL query when methods are chained' do
+ expected_sql = <<~SQL.lines(chomp: true).join(' ')
+ SELECT "test_table"."column1", "test_table"."column2"
+ FROM "test_table"
+ WHERE "test_table"."column1" = 'value1'
+ AND "test_table"."column2" = 'value2'
+ AND "test_table"."created_at" <= '#{Date.today}'
+ ORDER BY "test_table"."column1" DESC
+ LIMIT 10
+ OFFSET 5
+ SQL
+
+ sql = builder
+ .select(:column1, :column2)
+ .where(column1: 'value1', column2: 'value2')
+ .where(builder.table[:created_at].lteq(Date.today))
+ .order(:column1, 'desc')
+ .limit(10)
+ .offset(5)
+ .to_sql
+
+ expect(sql).to eq(expected_sql)
+ end
+ end
+end
diff --git a/spec/lib/click_house/redactor_spec.rb b/spec/lib/click_house/redactor_spec.rb
new file mode 100644
index 00000000000..d8354b6cbb9
--- /dev/null
+++ b/spec/lib/click_house/redactor_spec.rb
@@ -0,0 +1,166 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ClickHouse::Redactor, feature_category: :database do
+ let(:builder) { ClickHouse::QueryBuilder.new(:test_table) }
+
+ describe '.redact' do
+ context 'when given simple conditions' do
+ let(:new_builder) { builder.where(column1: 'value1', column2: 'value2') }
+ let(:redacted_query) { described_class.redact(new_builder) }
+
+ it 'redacts equality conditions correctly' do
+ expected_redacted_sql = <<~SQL.chomp.lines(chomp: true).join(' ')
+ SELECT * FROM "test_table"
+ WHERE "test_table"."column1" = $1
+ AND "test_table"."column2" = $2
+ SQL
+
+ expect(redacted_query).to eq(expected_redacted_sql)
+ end
+ end
+
+ context 'when given IN conditions' do
+ let(:new_builder) { builder.where(column1: %w[value1 value2 value3]) }
+ let(:redacted_query) { described_class.redact(new_builder) }
+
+ it 'redacts IN conditions correctly' do
+ expected_redacted_sql = <<~SQL.lines(chomp: true).join(' ')
+ SELECT * FROM "test_table"
+ WHERE "test_table"."column1" IN ($1, $2, $3)
+ SQL
+
+ expect(redacted_query).to eq(expected_redacted_sql)
+ end
+ end
+
+ context 'with supported arel nodes' do
+ it 'redacts a query using the In node' do
+ new_builder = builder.where(builder.table[:column1].in(%w[value1 value2]))
+ redacted_query = described_class.redact(new_builder)
+
+ expected_redacted_sql = <<~SQL.lines(chomp: true).join(' ')
+ SELECT * FROM "test_table"
+ WHERE "test_table"."column1" IN ($1, $2)
+ SQL
+
+ expect(redacted_query).to eq(expected_redacted_sql)
+ end
+
+ it 'redacts a query using the Equality node' do
+ new_builder = builder.where(builder.table[:column1].eq('value1'))
+ redacted_query = described_class.redact(new_builder)
+
+ expected_redacted_sql = <<~SQL.lines(chomp: true).join(' ')
+ SELECT * FROM "test_table"
+ WHERE "test_table"."column1" = $1
+ SQL
+
+ expect(redacted_query).to eq(expected_redacted_sql)
+ end
+
+ it 'redacts a query using the LessThan node' do
+ new_builder = builder.where(builder.table[:column1].lt(5))
+ redacted_query = described_class.redact(new_builder)
+
+ expected_redacted_sql = <<~SQL.lines(chomp: true).join(' ')
+ SELECT * FROM "test_table"
+ WHERE "test_table"."column1" < $1
+ SQL
+
+ expect(redacted_query).to eq(expected_redacted_sql)
+ end
+
+ it 'redacts a query using the LessThanOrEqual node' do
+ new_builder = builder.where(builder.table[:column1].lteq(5))
+ redacted_query = described_class.redact(new_builder)
+
+ expected_redacted_sql = <<~SQL.lines(chomp: true).join(' ')
+ SELECT * FROM "test_table"
+ WHERE "test_table"."column1" <= $1
+ SQL
+
+ expect(redacted_query).to eq(expected_redacted_sql)
+ end
+
+ it 'redacts a query using the GreaterThan node' do
+ new_builder = builder.where(builder.table[:column1].gt(5))
+ redacted_query = described_class.redact(new_builder)
+
+ expected_redacted_sql = <<~SQL.lines(chomp: true).join(' ')
+ SELECT * FROM "test_table"
+ WHERE "test_table"."column1" > $1
+ SQL
+
+ expect(redacted_query).to eq(expected_redacted_sql)
+ end
+
+ it 'redacts a query using the GreaterThanOrEqual node' do
+ new_builder = builder.where(builder.table[:column1].gteq(5))
+ redacted_query = described_class.redact(new_builder)
+
+ expected_redacted_sql = <<~SQL.lines(chomp: true).join(' ')
+ SELECT * FROM "test_table"
+ WHERE "test_table"."column1" >= $1
+ SQL
+
+ expect(redacted_query).to eq(expected_redacted_sql)
+ end
+ end
+
+ context 'with unsupported arel nodes' do
+ let(:unsupported_node) { Arel::Nodes::NotEqual.new(Arel::Table.new(:test_table)[:column1], 'value1') }
+ let(:manager) do
+ instance_double(
+ 'Arel::SelectManager',
+ constraints: [],
+ where: true,
+ to_sql: "SELECT * FROM \"test_table\""
+ )
+ end
+
+ let(:mocked_builder) do
+ instance_double(
+ 'ClickHouse::QueryBuilder',
+ conditions: [unsupported_node],
+ manager: manager
+ )
+ end
+
+ it 'raises an error for the unsupported node' do
+ expect do
+ described_class.redact(mocked_builder)
+ end.to raise_error(ArgumentError, /Unsupported Arel node type for Redactor:/)
+ end
+ end
+
+ context 'when method chaining is used' do
+ let(:new_builder) do
+ builder.where(column1: 'value1').where(column2: 'value2').where(builder.table[:column3].gteq(5))
+ end
+
+ let(:redacted_query) { described_class.redact(new_builder) }
+
+ it 'redacts chained conditions correctly' do
+ expected_redacted_sql = <<~SQL.lines(chomp: true).join(' ')
+ SELECT * FROM "test_table"
+ WHERE "test_table"."column1" = $1
+ AND "test_table"."column2" = $2
+ AND "test_table"."column3" >= $3
+ SQL
+ expect(redacted_query).to eq(expected_redacted_sql)
+ end
+ end
+
+ context 'when calling .redact multiple times' do
+ let(:new_builder) { builder.where(column1: 'value1', column2: 'value2') }
+ let(:first_redacted_query) { described_class.redact(new_builder) }
+ let(:second_redacted_query) { described_class.redact(new_builder) }
+
+ it 'produces consistent redacted SQL' do
+ expect(first_redacted_query).to eq(second_redacted_query)
+ end
+ end
+ end
+end
diff --git a/spec/lib/container_registry/gitlab_api_client_spec.rb b/spec/lib/container_registry/gitlab_api_client_spec.rb
index ebc69201513..b53b5b44c2e 100644
--- a/spec/lib/container_registry/gitlab_api_client_spec.rb
+++ b/spec/lib/container_registry/gitlab_api_client_spec.rb
@@ -215,7 +215,6 @@ RSpec.describe ContainerRegistry::GitlabApiClient, feature_category: :container_
describe '#tags' do
let(:path) { 'namespace/path/to/repository' }
let(:page_size) { 100 }
- let(:last) { nil }
let(:response) do
[
{
@@ -235,7 +234,10 @@ RSpec.describe ContainerRegistry::GitlabApiClient, feature_category: :container_
]
end
- subject { client.tags(path, page_size: page_size, last: last) }
+ let(:previous_page_url) { 'http://sandbox.org/test?before=b' }
+ let(:next_page_url) { 'http://sandbox.org/test?last=b' }
+
+ subject { client.tags(path, page_size: page_size) }
context 'with valid parameters' do
let(:expected) do
@@ -252,8 +254,7 @@ RSpec.describe ContainerRegistry::GitlabApiClient, feature_category: :container_
it { is_expected.to eq(expected) }
end
- context 'with a response with a link header' do
- let(:next_page_url) { 'http://sandbox.org/test?last=b' }
+ context 'with a response with a link header containing next page' do
let(:expected) do
{
pagination: { next: { uri: URI(next_page_url) } },
@@ -268,25 +269,38 @@ RSpec.describe ContainerRegistry::GitlabApiClient, feature_category: :container_
it { is_expected.to eq(expected) }
end
- context 'with a large page size set' do
- let(:page_size) { described_class::MAX_TAGS_PAGE_SIZE + 1000 }
+ context 'with a response with a link header containing previous page' do
+ let(:expected) do
+ {
+ pagination: { previous: { uri: URI(previous_page_url) } },
+ response_body: ::Gitlab::Json.parse(response.to_json)
+ }
+ end
+ before do
+ stub_tags(path, page_size: page_size, previous_page_url: previous_page_url, respond_with: response)
+ end
+
+ it { is_expected.to eq(expected) }
+ end
+
+ context 'with a response with a link header containing previous and next pages' do
let(:expected) do
{
- pagination: {},
+ pagination: { previous: { uri: URI(previous_page_url) }, next: { uri: URI(next_page_url) } },
response_body: ::Gitlab::Json.parse(response.to_json)
}
end
before do
- stub_tags(path, page_size: described_class::MAX_TAGS_PAGE_SIZE, respond_with: response)
+ stub_tags(path, page_size: page_size, previous_page_url: previous_page_url, next_page_url: next_page_url, respond_with: response)
end
it { is_expected.to eq(expected) }
end
- context 'with a last parameter set' do
- let(:last) { 'test' }
+ context 'with a large page size set' do
+ let(:page_size) { described_class::MAX_TAGS_PAGE_SIZE + 1000 }
let(:expected) do
{
@@ -296,12 +310,41 @@ RSpec.describe ContainerRegistry::GitlabApiClient, feature_category: :container_
end
before do
- stub_tags(path, page_size: page_size, last: last, respond_with: response)
+ stub_tags(path, page_size: described_class::MAX_TAGS_PAGE_SIZE, respond_with: response)
end
it { is_expected.to eq(expected) }
end
+ context 'with pagination parameters set' do
+ subject do
+ client.tags(path, page_size: page_size, last: last, before: before, name: name, sort: sort)
+ end
+
+ where(:last, :before, :name, :sort, :input) do
+ 'test' | nil | nil | nil | { last: 'test' }
+ nil | 'test' | nil | nil | { before: 'test' }
+ nil | nil | 'test' | nil | { name: 'test' }
+ nil | nil | nil | 'asc' | { sort: 'asc' }
+ 'a' | 'b' | 'test' | 'desc' | { last: 'a', before: 'b', name: 'test', sort: 'desc' }
+ end
+
+ with_them do
+ let(:expected) do
+ {
+ pagination: {},
+ response_body: ::Gitlab::Json.parse(response.to_json)
+ }
+ end
+
+ before do
+ stub_tags(path, page_size: page_size, input: input, respond_with: response)
+ end
+
+ it { is_expected.to eq(expected) }
+ end
+ end
+
context 'with non successful response' do
before do
stub_tags(path, page_size: page_size, status_code: 404)
@@ -829,8 +872,14 @@ RSpec.describe ContainerRegistry::GitlabApiClient, feature_category: :container_
.to_return(status: status_code, body: respond_with.to_json, headers: { 'Content-Type' => described_class::JSON_TYPE })
end
- def stub_tags(path, page_size: nil, last: nil, next_page_url: nil, status_code: 200, respond_with: {})
- params = { n: page_size, last: last }.compact
+ def stub_tags(path, page_size: nil, input: {}, previous_page_url: nil, next_page_url: nil, status_code: 200, respond_with: {})
+ params = {
+ n: page_size,
+ last: input[:last],
+ name: input[:name],
+ sort: input[:sort],
+ before: input[:before]
+ }.compact
url = "#{registry_api_url}/gitlab/v1/repositories/#{path}/tags/list/"
@@ -842,8 +891,12 @@ RSpec.describe ContainerRegistry::GitlabApiClient, feature_category: :container_
request_headers['Authorization'] = "bearer #{token}" if token
response_headers = { 'Content-Type' => described_class::JSON_TYPE }
- if next_page_url
- response_headers['Link'] = "<#{next_page_url}>; rel=\"next\""
+ if next_page_url || previous_page_url
+ previous_page_url = %(<#{previous_page_url}>; rel="previous") if previous_page_url
+ next_page_url = %(<#{next_page_url}>; rel="next") if next_page_url
+
+ link_header = [previous_page_url, next_page_url].compact.join(" ,")
+ response_headers['Link'] = link_header
end
stub_request(:get, url)
diff --git a/spec/lib/csv_builder_spec.rb b/spec/lib/csv_builder_spec.rb
deleted file mode 100644
index ec065ee6f7d..00000000000
--- a/spec/lib/csv_builder_spec.rb
+++ /dev/null
@@ -1,121 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe CsvBuilder do
- let(:object) { double(question: :answer) }
- let(:fake_relation) { FakeRelation.new([object]) }
- let(:subject) { described_class.new(fake_relation, 'Q & A' => :question, 'Reversed' => -> (o) { o.question.to_s.reverse }) }
- let(:csv_data) { subject.render }
-
- before do
- stub_const('FakeRelation', Array)
-
- FakeRelation.class_eval do
- def find_each(&block)
- each(&block)
- end
- end
- end
-
- it 'generates a csv' do
- expect(csv_data.scan(/(,|\n)/).join).to include ",\n,"
- end
-
- it 'uses a temporary file to reduce memory allocation' do
- expect(CSV).to receive(:new).with(instance_of(Tempfile)).and_call_original
-
- subject.render
- end
-
- it 'counts the number of rows' do
- subject.render
-
- expect(subject.rows_written).to eq 1
- end
-
- describe 'rows_expected' do
- it 'uses rows_written if CSV rendered successfully' do
- subject.render
-
- expect(fake_relation).not_to receive(:count)
- expect(subject.rows_expected).to eq 1
- end
-
- it 'falls back to calling .count before rendering begins' do
- expect(subject.rows_expected).to eq 1
- end
- end
-
- describe 'truncation' do
- let(:big_object) { double(question: 'Long' * 1024) }
- let(:row_size) { big_object.question.length * 2 }
- let(:fake_relation) { FakeRelation.new([big_object, big_object, big_object]) }
-
- it 'occurs after given number of bytes' do
- expect(subject.render(row_size * 2).length).to be_between(row_size * 2, row_size * 3)
- expect(subject).to be_truncated
- expect(subject.rows_written).to eq 2
- end
-
- it 'is ignored by default' do
- expect(subject.render.length).to be > row_size * 3
- expect(subject.rows_written).to eq 3
- end
-
- it 'causes rows_expected to fall back to .count' do
- subject.render(0)
-
- expect(fake_relation).to receive(:count).and_call_original
- expect(subject.rows_expected).to eq 3
- end
- end
-
- it 'avoids loading all data in a single query' do
- expect(fake_relation).to receive(:find_each)
-
- subject.render
- end
-
- it 'uses hash keys as headers' do
- expect(csv_data).to start_with 'Q & A'
- end
-
- it 'gets data by calling method provided as hash value' do
- expect(csv_data).to include 'answer'
- end
-
- it 'allows lamdas to look up more complicated data' do
- expect(csv_data).to include 'rewsna'
- end
-
- describe 'excel sanitization' do
- let(:dangerous_title) { double(title: "=cmd|' /C calc'!A0 title", description: "*safe_desc") }
- let(:dangerous_desc) { double(title: "*safe_title", description: "=cmd|' /C calc'!A0 desc") }
- let(:fake_relation) { FakeRelation.new([dangerous_title, dangerous_desc]) }
- let(:subject) { described_class.new(fake_relation, 'Title' => 'title', 'Description' => 'description') }
- let(:csv_data) { subject.render }
-
- it 'sanitizes dangerous characters at the beginning of a column' do
- expect(csv_data).to include "'=cmd|' /C calc'!A0 title"
- expect(csv_data).to include "'=cmd|' /C calc'!A0 desc"
- end
-
- it 'does not sanitize safe symbols at the beginning of a column' do
- expect(csv_data).not_to include "'*safe_desc"
- expect(csv_data).not_to include "'*safe_title"
- end
-
- context 'when dangerous characters are after a line break' do
- it 'does not append single quote to description' do
- fake_object = double(title: "Safe title", description: "With task list\n-[x] todo 1")
- fake_relation = FakeRelation.new([fake_object])
- builder = described_class.new(fake_relation, 'Title' => 'title', 'Description' => 'description')
-
- csv_data = builder.render
-
- expect(csv_data).to eq("Title,Description\nSafe title,\"With task list\n-[x] todo 1\"\n")
- end
- end
- end
-end
diff --git a/spec/lib/csv_builders/stream_spec.rb b/spec/lib/csv_builders/stream_spec.rb
deleted file mode 100644
index 7df55fe4230..00000000000
--- a/spec/lib/csv_builders/stream_spec.rb
+++ /dev/null
@@ -1,44 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe CsvBuilders::Stream do
- let(:event_1) { double(title: 'Added salt', description: 'A teaspoon') }
- let(:event_2) { double(title: 'Added sugar', description: 'Just a pinch') }
- let(:fake_relation) { FakeRelation.new([event_1, event_2]) }
-
- subject(:builder) { described_class.new(fake_relation, 'Title' => 'title', 'Description' => 'description') }
-
- describe '#render' do
- before do
- stub_const('FakeRelation', Array)
-
- FakeRelation.class_eval do
- def find_each(&block)
- each(&block)
- end
- end
- end
-
- it 'returns a lazy enumerator' do
- expect(builder.render).to be_an(Enumerator::Lazy)
- end
-
- it 'returns all rows up to default max value' do
- expect(builder.render.to_a).to eq(
- [
- "Title,Description\n",
- "Added salt,A teaspoon\n",
- "Added sugar,Just a pinch\n"
- ])
- end
-
- it 'truncates to max rows' do
- expect(builder.render(1).to_a).to eq(
- [
- "Title,Description\n",
- "Added salt,A teaspoon\n"
- ])
- end
- end
-end
diff --git a/spec/lib/generators/batched_background_migration/expected_files/ee_my_batched_migration.txt b/spec/lib/generators/batched_background_migration/expected_files/ee_my_batched_migration.txt
index 004ae46ca5f..f88e3a5ee09 100644
--- a/spec/lib/generators/batched_background_migration/expected_files/ee_my_batched_migration.txt
+++ b/spec/lib/generators/batched_background_migration/expected_files/ee_my_batched_migration.txt
@@ -13,7 +13,7 @@ module EE
extend ::Gitlab::Utils::Override
prepended do
- # operation_name :my_operation
+ # operation_name :my_operation # This is used as the key on collecting metrics
# scope_to ->(relation) { relation.where(column: "value") }
end
diff --git a/spec/lib/generators/batched_background_migration/expected_files/my_batched_migration.txt b/spec/lib/generators/batched_background_migration/expected_files/my_batched_migration.txt
index b2378b414b1..03908928fb8 100644
--- a/spec/lib/generators/batched_background_migration/expected_files/my_batched_migration.txt
+++ b/spec/lib/generators/batched_background_migration/expected_files/my_batched_migration.txt
@@ -8,7 +8,7 @@
module Gitlab
module BackgroundMigration
class MyBatchedMigration < BatchedMigrationJob
- # operation_name :my_operation
+ # operation_name :my_operation # This is used as the key on collecting metrics
# scope_to ->(relation) { relation.where(column: "value") }
feature_category :database
diff --git a/spec/lib/generators/gitlab/analytics/internal_events_generator_spec.rb b/spec/lib/generators/gitlab/analytics/internal_events_generator_spec.rb
index d9acd59aa71..e67e48d83a3 100644
--- a/spec/lib/generators/gitlab/analytics/internal_events_generator_spec.rb
+++ b/spec/lib/generators/gitlab/analytics/internal_events_generator_spec.rb
@@ -15,7 +15,7 @@ RSpec.describe Gitlab::Analytics::InternalEventsGenerator, :silence_stdout, feat
let(:section) { "analytics" }
let(:mr) { "https://gitlab.com/some-group/some-project/-/merge_requests/123" }
let(:event) { "view_analytics_dashboard" }
- let(:unique) { "user_id" }
+ let(:unique) { "user.id" }
let(:time_frames) { %w[7d] }
let(:include_default_identifiers) { 'yes' }
let(:options) do
@@ -31,7 +31,8 @@ RSpec.describe Gitlab::Analytics::InternalEventsGenerator, :silence_stdout, feat
}.stringify_keys
end
- let(:key_path_7d) { "count_distinct_#{unique}_from_#{event}_7d" }
+ let(:key_path_without_time_frame) { "count_distinct_#{unique.sub('.', '_')}_from_#{event}" }
+ let(:key_path_7d) { "#{key_path_without_time_frame}_7d" }
let(:metric_definition_path_7d) { Dir.glob(File.join(temp_dir, "metrics/counts_7d/#{key_path_7d}.yml")).first }
let(:metric_definition_7d) do
{
@@ -50,15 +51,17 @@ RSpec.describe Gitlab::Analytics::InternalEventsGenerator, :silence_stdout, feat
"data_category" => "optional",
"instrumentation_class" => "RedisHLLMetric",
"distribution" => %w[ce ee],
- "tier" => %w[free premium ultimate]
+ "tier" => %w[free premium ultimate],
+ "options" => {
+ "events" => [event]
+ },
+ "events" => [{ "name" => event, "unique" => unique }]
}
end
before do
stub_const("#{described_class}::TOP_LEVEL_DIR_EE", ee_temp_dir)
stub_const("#{described_class}::TOP_LEVEL_DIR", temp_dir)
- stub_const("#{described_class}::KNOWN_EVENTS_PATH", tmpfile.path)
- stub_const("#{described_class}::KNOWN_EVENTS_PATH_EE", tmpfile.path)
# Stub version so that `milestone` key remains constant between releases to prevent flakiness.
stub_const('Gitlab::VERSION', '13.9.0')
@@ -83,7 +86,7 @@ RSpec.describe Gitlab::Analytics::InternalEventsGenerator, :silence_stdout, feat
let(:identifiers) { %w[project user namespace] }
let(:event_definition) do
{
- "category" => "GitlabInternalEvents",
+ "category" => "InternalEventTracking",
"action" => event,
"description" => description,
"product_section" => section,
@@ -132,7 +135,7 @@ RSpec.describe Gitlab::Analytics::InternalEventsGenerator, :silence_stdout, feat
end
context 'with duplicated event' do
- context 'in known_events files' do
+ context 'in known_events' do
before do
allow(::Gitlab::UsageDataCounters::HLLRedisCounter)
.to receive(:known_event?).with(event).and_return(true)
@@ -249,7 +252,7 @@ RSpec.describe Gitlab::Analytics::InternalEventsGenerator, :silence_stdout, feat
context 'for multiple time frames' do
let(:time_frames) { %w[7d 28d] }
- let(:key_path_28d) { "count_distinct_#{unique}_from_#{event}_28d" }
+ let(:key_path_28d) { "#{key_path_without_time_frame}_28d" }
let(:metric_definition_path_28d) { Dir.glob(File.join(temp_dir, "metrics/counts_28d/#{key_path_28d}.yml")).first }
let(:metric_definition_28d) do
metric_definition_7d.merge(
@@ -268,7 +271,7 @@ RSpec.describe Gitlab::Analytics::InternalEventsGenerator, :silence_stdout, feat
context 'with default time frames' do
let(:time_frames) { nil }
- let(:key_path_28d) { "count_distinct_#{unique}_from_#{event}_28d" }
+ let(:key_path_28d) { "#{key_path_without_time_frame}_28d" }
let(:metric_definition_path_28d) { Dir.glob(File.join(temp_dir, "metrics/counts_28d/#{key_path_28d}.yml")).first }
let(:metric_definition_28d) do
metric_definition_7d.merge(
@@ -285,32 +288,4 @@ RSpec.describe Gitlab::Analytics::InternalEventsGenerator, :silence_stdout, feat
end
end
end
-
- describe 'Creating known event entry' do
- let(:time_frames) { %w[7d 28d] }
- let(:expected_known_events) { [{ "name" => event }] }
-
- it 'creates a metric definition file using the template' do
- described_class.new([], options).invoke_all
-
- expect(YAML.safe_load(File.read(tmpfile.path))).to match_array(expected_known_events)
- end
-
- context 'for ultimate only feature' do
- let(:ee_tmpfile) { Tempfile.new('test-metadata') }
-
- after do
- FileUtils.rm_rf(ee_tmpfile)
- end
-
- it 'creates a metric definition file using the template' do
- stub_const("#{described_class}::KNOWN_EVENTS_PATH_EE", ee_tmpfile.path)
-
- described_class.new([], options.merge(tiers: %w[ultimate])).invoke_all
-
- expect(YAML.safe_load(File.read(tmpfile.path))).to be nil
- expect(YAML.safe_load(File.read(ee_tmpfile.path))).to match_array(expected_known_events)
- end
- end
- end
end
diff --git a/spec/lib/generators/gitlab/snowplow_event_definition_generator_spec.rb b/spec/lib/generators/gitlab/snowplow_event_definition_generator_spec.rb
index 6826006949e..62a52ee5fb9 100644
--- a/spec/lib/generators/gitlab/snowplow_event_definition_generator_spec.rb
+++ b/spec/lib/generators/gitlab/snowplow_event_definition_generator_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::SnowplowEventDefinitionGenerator, :silence_stdout, feature_category: :product_analytics do
+RSpec.describe Gitlab::SnowplowEventDefinitionGenerator, :silence_stdout, feature_category: :product_analytics_data_management do
let(:ce_temp_dir) { Dir.mktmpdir }
let(:ee_temp_dir) { Dir.mktmpdir }
let(:timestamp) { Time.now.utc.strftime('%Y%m%d%H%M%S') }
diff --git a/spec/lib/generators/gitlab/usage_metric_definition_generator_spec.rb b/spec/lib/generators/gitlab/usage_metric_definition_generator_spec.rb
index 6a30bcd0e2c..f7a4bac39d7 100644
--- a/spec/lib/generators/gitlab/usage_metric_definition_generator_spec.rb
+++ b/spec/lib/generators/gitlab/usage_metric_definition_generator_spec.rb
@@ -26,11 +26,10 @@ RSpec.describe Gitlab::UsageMetricDefinitionGenerator, :silence_stdout do
# Stub version so that `milestone` key remains constant between releases to prevent flakiness.
before do
stub_const('Gitlab::VERSION', '13.9.0')
- allow(::Gitlab::Usage::Metrics::NamesSuggestions::Generator).to receive(:generate).and_return('test metric name')
end
context 'without ee option' do
- let(:sample_filename) { 'sample_metric_with_name_suggestions.yml' }
+ let(:sample_filename) { 'sample_metric.yml' }
let(:metric_definition_path) { Dir.glob(File.join(temp_dir, 'metrics/counts_7d/*_test_metric.yml')).first }
it 'creates a metric definition file using the template' do
@@ -91,16 +90,6 @@ RSpec.describe Gitlab::UsageMetricDefinitionGenerator, :silence_stdout do
end
end
- describe 'Name suggestions' do
- it 'adds name key to metric definition' do
- expect(::Gitlab::Usage::Metrics::NamesSuggestions::Generator).to receive(:generate).and_return('some name')
- described_class.new([key_path], { 'dir' => dir, 'class_name' => class_name }).invoke_all
- metric_definition_path = Dir.glob(File.join(temp_dir, 'metrics/counts_7d/*_test_metric.yml')).first
-
- expect(YAML.safe_load(File.read(metric_definition_path))).to include("name" => "some name")
- end
- end
-
context 'with multiple file names' do
let(:key_paths) { ['counts_weekly.test_metric', 'counts_weekly.test1_metric'] }
diff --git a/spec/lib/gitlab/alert_management/payload/managed_prometheus_spec.rb b/spec/lib/gitlab/alert_management/payload/managed_prometheus_spec.rb
deleted file mode 100644
index d7184c89933..00000000000
--- a/spec/lib/gitlab/alert_management/payload/managed_prometheus_spec.rb
+++ /dev/null
@@ -1,153 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::AlertManagement::Payload::ManagedPrometheus do
- let_it_be(:project) { create(:project) }
-
- let(:raw_payload) { {} }
-
- let(:parsed_payload) { described_class.new(project: project, payload: raw_payload) }
-
- it_behaves_like 'subclass has expected api'
-
- shared_context 'with gitlab alert' do
- let_it_be(:gitlab_alert) { create(:prometheus_alert, project: project) }
- let(:metric_id) { gitlab_alert.prometheus_metric_id.to_s }
- let(:alert_id) { gitlab_alert.id.to_s }
- end
-
- describe '#metric_id' do
- subject { parsed_payload.metric_id }
-
- it { is_expected.to be_nil }
-
- context 'with gitlab_alert_id' do
- let(:raw_payload) { { 'labels' => { 'gitlab_alert_id' => '12' } } }
-
- it { is_expected.to eq(12) }
- end
- end
-
- describe '#gitlab_prometheus_alert_id' do
- subject { parsed_payload.gitlab_prometheus_alert_id }
-
- it { is_expected.to be_nil }
-
- context 'with gitlab_alert_id' do
- let(:raw_payload) { { 'labels' => { 'gitlab_prometheus_alert_id' => '12' } } }
-
- it { is_expected.to eq(12) }
- end
- end
-
- describe '#gitlab_alert' do
- subject { parsed_payload.gitlab_alert }
-
- context 'without alert info in payload' do
- it { is_expected.to be_nil }
- end
-
- context 'with metric id in payload' do
- let(:raw_payload) { { 'labels' => { 'gitlab_alert_id' => metric_id } } }
- let(:metric_id) { '-1' }
-
- context 'without matching alert' do
- it { is_expected.to be_nil }
- end
-
- context 'with matching alert' do
- include_context 'with gitlab alert'
-
- it { is_expected.to eq(gitlab_alert) }
-
- context 'when unclear which alert applies' do
- # With multiple alerts for different environments,
- # we can't be sure which prometheus alert the payload
- # belongs to
- let_it_be(:another_alert) do
- create(:prometheus_alert,
- prometheus_metric: gitlab_alert.prometheus_metric,
- project: project)
- end
-
- it { is_expected.to be_nil }
- end
- end
- end
-
- context 'with alert id' do
- # gitlab_prometheus_alert_id is a stronger identifier,
- # but was added after gitlab_alert_id; we won't
- # see it without gitlab_alert_id also present
- let(:raw_payload) do
- {
- 'labels' => {
- 'gitlab_alert_id' => metric_id,
- 'gitlab_prometheus_alert_id' => alert_id
- }
- }
- end
-
- context 'without matching alert' do
- let(:alert_id) { '-1' }
- let(:metric_id) { '-1' }
-
- it { is_expected.to be_nil }
- end
-
- context 'with matching alerts' do
- include_context 'with gitlab alert'
-
- it { is_expected.to eq(gitlab_alert) }
- end
- end
- end
-
- describe '#full_query' do
- subject { parsed_payload.full_query }
-
- it { is_expected.to be_nil }
-
- context 'with gitlab alert' do
- include_context 'with gitlab alert'
-
- let(:raw_payload) { { 'labels' => { 'gitlab_alert_id' => metric_id } } }
-
- it { is_expected.to eq(gitlab_alert.full_query) }
- end
-
- context 'with sufficient fallback info' do
- let(:raw_payload) { { 'generatorURL' => 'http://localhost:9090/graph?g0.expr=vector%281%29' } }
-
- it { is_expected.to eq('vector(1)') }
- end
- end
-
- describe '#environment' do
- subject { parsed_payload.environment }
-
- context 'with gitlab alert' do
- include_context 'with gitlab alert'
-
- let(:raw_payload) { { 'labels' => { 'gitlab_alert_id' => metric_id } } }
-
- it { is_expected.to eq(gitlab_alert.environment) }
- end
-
- context 'with sufficient fallback info' do
- let_it_be(:environment) { create(:environment, project: project, name: 'production') }
-
- let(:raw_payload) do
- {
- 'labels' => {
- 'gitlab_alert_id' => '-1',
- 'gitlab_environment_name' => 'production'
- }
- }
- end
-
- it { is_expected.to eq(environment) }
- end
- end
-end
diff --git a/spec/lib/gitlab/alert_management/payload_spec.rb b/spec/lib/gitlab/alert_management/payload_spec.rb
index efde7ed3772..fe14e6ae53c 100644
--- a/spec/lib/gitlab/alert_management/payload_spec.rb
+++ b/spec/lib/gitlab/alert_management/payload_spec.rb
@@ -19,12 +19,6 @@ RSpec.describe Gitlab::AlertManagement::Payload do
let(:payload) { { 'monitoring_tool' => 'Prometheus' } }
it { is_expected.to be_a Gitlab::AlertManagement::Payload::Prometheus }
-
- context 'with gitlab-managed attributes' do
- let(:payload) { { 'monitoring_tool' => 'Prometheus', 'labels' => { 'gitlab_alert_id' => '12' } } }
-
- it { is_expected.to be_a Gitlab::AlertManagement::Payload::ManagedPrometheus }
- end
end
context 'with the payload specifying an unknown tool' do
@@ -43,12 +37,6 @@ RSpec.describe Gitlab::AlertManagement::Payload do
context 'with an externally managed prometheus payload' do
it { is_expected.to be_a Gitlab::AlertManagement::Payload::Prometheus }
end
-
- context 'with a self-managed prometheus payload' do
- let(:payload) { { 'labels' => { 'gitlab_alert_id' => '14' } } }
-
- it { is_expected.to be_a Gitlab::AlertManagement::Payload::ManagedPrometheus }
- end
end
context 'as an unknown tool' do
diff --git a/spec/lib/gitlab/analytics/cycle_analytics/records_fetcher_spec.rb b/spec/lib/gitlab/analytics/cycle_analytics/records_fetcher_spec.rb
index e9a9dfeca82..276f797536b 100644
--- a/spec/lib/gitlab/analytics/cycle_analytics/records_fetcher_spec.rb
+++ b/spec/lib/gitlab/analytics/cycle_analytics/records_fetcher_spec.rb
@@ -117,7 +117,7 @@ RSpec.describe Gitlab::Analytics::CycleAnalytics::RecordsFetcher do
})
end
- before(:all) do
+ before_all do
issue1.metrics.update!(first_added_to_board_at: 3.days.ago, first_mentioned_in_commit_at: 2.days.ago)
issue2.metrics.update!(first_added_to_board_at: 3.days.ago, first_mentioned_in_commit_at: 2.days.ago)
issue3.metrics.update!(first_added_to_board_at: 3.days.ago, first_mentioned_in_commit_at: 2.days.ago)
diff --git a/spec/lib/gitlab/analytics/cycle_analytics/stage_events/stage_event_spec.rb b/spec/lib/gitlab/analytics/cycle_analytics/stage_events/stage_event_spec.rb
index 24248c557bd..df0e4fb92a0 100644
--- a/spec/lib/gitlab/analytics/cycle_analytics/stage_events/stage_event_spec.rb
+++ b/spec/lib/gitlab/analytics/cycle_analytics/stage_events/stage_event_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Analytics::CycleAnalytics::StageEvents::StageEvent, feature_category: :product_analytics do
+RSpec.describe Gitlab::Analytics::CycleAnalytics::StageEvents::StageEvent, feature_category: :product_analytics_data_management do
let(:instance) { described_class.new({}) }
it { expect(described_class).to respond_to(:name) }
diff --git a/spec/lib/gitlab/audit/auditor_spec.rb b/spec/lib/gitlab/audit/auditor_spec.rb
index 386d4157e90..bde72a656b8 100644
--- a/spec/lib/gitlab/audit/auditor_spec.rb
+++ b/spec/lib/gitlab/audit/auditor_spec.rb
@@ -25,35 +25,33 @@ RSpec.describe Gitlab::Audit::Auditor, feature_category: :audit_events do
describe '.audit' do
let(:audit!) { auditor.audit(context) }
+ before do
+ allow(Gitlab::Audit::Type::Definition).to receive(:defined?).and_call_original
+ allow(Gitlab::Audit::Type::Definition).to receive(:defined?).with(name).and_return(true)
+ end
+
context 'when yaml definition is not defined' do
before do
- allow(Gitlab::Audit::Type::Definition).to receive(:defined?).and_return(false)
- allow(Gitlab::AppLogger).to receive(:warn).and_return(app_logger)
+ allow(Gitlab::Audit::Type::Definition).to receive(:defined?).and_call_original
+ allow(Gitlab::Audit::Type::Definition).to receive(:defined?).with(name).and_return(false)
end
- it 'logs a warning when YAML is not defined' do
- expected_warning = {
- message: 'Logging audit events without an event type definition will be deprecated soon ' \
- '(https://docs.gitlab.com/ee/development/audit_event_guide/#event-type-definitions)',
- event_type: name
- }
-
- audit!
+ it 'raises an error' do
+ expected_error = "Audit event type YML file is not defined for audit_operation. " \
+ "Please read https://docs.gitlab.com/ee/development/audit_event_guide/" \
+ "#how-to-instrument-new-audit-events for adding a new audit event"
- expect(Gitlab::AppLogger).to have_received(:warn).with(expected_warning)
+ expect { audit! }.to raise_error(StandardError, expected_error)
end
end
context 'when yaml definition is defined' do
before do
allow(Gitlab::Audit::Type::Definition).to receive(:defined?).and_return(true)
- allow(Gitlab::AppLogger).to receive(:warn).and_return(app_logger)
end
- it 'does not log a warning when YAML is defined' do
- audit!
-
- expect(Gitlab::AppLogger).not_to have_received(:warn)
+ it 'does not raise an error' do
+ expect { audit! }.not_to raise_error
end
end
diff --git a/spec/lib/gitlab/auth/auth_finders_spec.rb b/spec/lib/gitlab/auth/auth_finders_spec.rb
index 1a1e165c50a..b0ec46a3a0e 100644
--- a/spec/lib/gitlab/auth/auth_finders_spec.rb
+++ b/spec/lib/gitlab/auth/auth_finders_spec.rb
@@ -516,17 +516,23 @@ RSpec.describe Gitlab::Auth::AuthFinders, feature_category: :system_access do
set_bearer_token(token_3.token)
end
- it 'revokes the latest rotated token' do
- expect(token_1).not_to be_revoked
+ context 'with url related to access tokens' do
+ before do
+ set_header('SCRIPT_NAME', "/personal_access_tokens/#{token_3.id}/rotate")
+ end
+
+ it 'revokes the latest rotated token' do
+ expect(token_1).not_to be_revoked
- expect { find_user_from_access_token }.to raise_error(Gitlab::Auth::RevokedError)
+ expect { find_user_from_access_token }.to raise_error(Gitlab::Auth::RevokedError)
- expect(token_1.reload).to be_revoked
+ expect(token_1.reload).to be_revoked
+ end
end
- context 'when the feature flag is disabled' do
+ context 'with url not related to access tokens' do
before do
- stub_feature_flags(pat_reuse_detection: false)
+ set_header('SCRIPT_NAME', '/epics/1')
end
it 'does not revoke the latest rotated token' do
diff --git a/spec/lib/gitlab/auth/saml/auth_hash_spec.rb b/spec/lib/gitlab/auth/saml/auth_hash_spec.rb
index f1fad946f35..5286e22abc9 100644
--- a/spec/lib/gitlab/auth/saml/auth_hash_spec.rb
+++ b/spec/lib/gitlab/auth/saml/auth_hash_spec.rb
@@ -40,6 +40,32 @@ RSpec.describe Gitlab::Auth::Saml::AuthHash do
end
end
+ describe '#azure_group_overage_claim?' do
+ context 'when the claim is not present' do
+ let(:raw_info_attr) { {} }
+
+ it 'is false' do
+ expect(saml_auth_hash.azure_group_overage_claim?).to eq(false)
+ end
+ end
+
+ context 'when the claim is present' do
+ # The value of the claim is irrelevant, but it's still included
+ # in the test response to keep tests as real-world as possible.
+ # https://learn.microsoft.com/en-us/security/zero-trust/develop/configure-tokens-group-claims-app-roles#group-overages
+ let(:raw_info_attr) do
+ {
+ 'http://schemas.microsoft.com/claims/groups.link' =>
+ ['https://graph.windows.net/8c750e43/users/e631c82c/getMemberObjects']
+ }
+ end
+
+ it 'is true' do
+ expect(saml_auth_hash.azure_group_overage_claim?).to eq(true)
+ end
+ end
+ end
+
describe '#authn_context' do
let(:auth_hash_data) do
{
diff --git a/spec/lib/gitlab/auth/two_factor_auth_verifier_spec.rb b/spec/lib/gitlab/auth/two_factor_auth_verifier_spec.rb
index 876c23a91bd..e0ef45d5621 100644
--- a/spec/lib/gitlab/auth/two_factor_auth_verifier_spec.rb
+++ b/spec/lib/gitlab/auth/two_factor_auth_verifier_spec.rb
@@ -5,10 +5,13 @@ require 'spec_helper'
RSpec.describe Gitlab::Auth::TwoFactorAuthVerifier do
using RSpec::Parameterized::TableSyntax
- subject(:verifier) { described_class.new(user) }
+ let(:request) { instance_double(ActionDispatch::Request, session: session) }
+ let(:session) { {} }
let(:user) { build_stubbed(:user, otp_grace_period_started_at: Time.zone.now) }
+ subject(:verifier) { described_class.new(user, request) }
+
describe '#two_factor_authentication_enforced?' do
subject { verifier.two_factor_authentication_enforced? }
@@ -34,25 +37,69 @@ RSpec.describe Gitlab::Auth::TwoFactorAuthVerifier do
describe '#two_factor_authentication_required?' do
subject { verifier.two_factor_authentication_required? }
- where(:instance_level_enabled, :group_level_enabled, :should_be_required) do
- true | false | true
- false | true | true
- false | false | false
+ where(:instance_level_enabled, :group_level_enabled, :should_be_required, :provider_2FA) do
+ true | false | true | false
+ false | true | false | true
+ false | true | true | false
+ false | false | false | true
end
with_them do
before do
stub_application_setting(require_two_factor_authentication: instance_level_enabled)
allow(user).to receive(:require_two_factor_authentication_from_group?).and_return(group_level_enabled)
+ session[:provider_2FA] = provider_2FA
end
it { is_expected.to eq(should_be_required) }
end
+
+ context 'when feature by_pass_two_factor_for_current_session is disabled' do
+ where(:instance_level_enabled, :group_level_enabled, :should_be_required, :provider_2FA) do
+ true | false | true | false
+ false | true | true | true
+ false | false | false | true
+ end
+
+ with_them do
+ before do
+ allow(request).to receive(:session).and_return(session)
+ stub_feature_flags(by_pass_two_factor_for_current_session: false)
+ stub_application_setting(require_two_factor_authentication: instance_level_enabled)
+ allow(user).to receive(:require_two_factor_authentication_from_group?).and_return(group_level_enabled)
+ session[:provider_2FA] = provider_2FA
+ end
+
+ it { is_expected.to eq(should_be_required) }
+ end
+ end
+
+ context 'when request is nil' do
+ let(:request) { nil }
+
+ where(:instance_level_enabled, :group_level_enabled, :should_be_required, :provider_2FA) do
+ true | false | true | false
+ false | true | true | true
+ false | false | false | true
+ end
+
+ with_them do
+ before do
+ allow(request).to receive(:session).and_return(session)
+ stub_feature_flags(bypass_two_factor: false)
+ stub_application_setting(require_two_factor_authentication: instance_level_enabled)
+ allow(user).to receive(:require_two_factor_authentication_from_group?).and_return(group_level_enabled)
+ session[:provider_2FA] = provider_2FA
+ end
+
+ it { is_expected.to eq(should_be_required) }
+ end
+ end
end
describe '#current_user_needs_to_setup_two_factor?' do
it 'returns false when current_user is nil' do
- expect(described_class.new(nil).current_user_needs_to_setup_two_factor?).to be_falsey
+ expect(described_class.new(nil, request).current_user_needs_to_setup_two_factor?).to be_falsey
end
it 'returns false when current_user does not have temp email' do
diff --git a/spec/lib/gitlab/avatar_cache_spec.rb b/spec/lib/gitlab/avatar_cache_spec.rb
index c959c5d80b2..65cde195a61 100644
--- a/spec/lib/gitlab/avatar_cache_spec.rb
+++ b/spec/lib/gitlab/avatar_cache_spec.rb
@@ -47,7 +47,7 @@ RSpec.describe Gitlab::AvatarCache, :clean_gitlab_redis_cache do
it "finds the cached value in the request store and doesn't execute the block" do
expect(thing).to receive(:avatar_path).once
- Gitlab::WithRequestStore.with_request_store do
+ Gitlab::SafeRequestStore.ensure_request_store do
described_class.by_email("foo@bar.com", 20, 2, true) do
thing.avatar_path
end
diff --git a/spec/lib/gitlab/background_migration/backfill_default_branch_protection_namespace_setting_spec.rb b/spec/lib/gitlab/background_migration/backfill_default_branch_protection_namespace_setting_spec.rb
new file mode 100644
index 00000000000..62c9e240b7a
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/backfill_default_branch_protection_namespace_setting_spec.rb
@@ -0,0 +1,65 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::BackfillDefaultBranchProtectionNamespaceSetting,
+ schema: 20230724071541,
+ feature_category: :database do
+ let(:namespaces_table) { table(:namespaces) }
+ let(:namespace_settings_table) { table(:namespace_settings) }
+
+ subject(:perform_migration) do
+ described_class.new(
+ start_id: 1,
+ end_id: 30,
+ batch_table: :namespace_settings,
+ batch_column: :namespace_id,
+ sub_batch_size: 2,
+ pause_ms: 0,
+ connection: ActiveRecord::Base.connection
+ ).perform
+ end
+
+ before do
+ namespaces_table.create!(id: 1, name: 'group_namespace', path: 'path-1', type: 'Group',
+ default_branch_protection: 0)
+ namespaces_table.create!(id: 2, name: 'user_namespace', path: 'path-2', type: 'User', default_branch_protection: 1)
+ namespaces_table.create!(id: 3, name: 'user_three_namespace', path: 'path-3', type: 'User',
+ default_branch_protection: 2)
+ namespaces_table.create!(id: 4, name: 'group_four_namespace', path: 'path-4', type: 'Group',
+ default_branch_protection: 3)
+ namespaces_table.create!(id: 5, name: 'group_five_namespace', path: 'path-5', type: 'Group',
+ default_branch_protection: 4)
+
+ namespace_settings_table.create!(namespace_id: 1, default_branch_protection_defaults: {})
+ namespace_settings_table.create!(namespace_id: 2, default_branch_protection_defaults: {})
+ namespace_settings_table.create!(namespace_id: 3, default_branch_protection_defaults: {})
+ namespace_settings_table.create!(namespace_id: 4, default_branch_protection_defaults: {})
+ namespace_settings_table.create!(namespace_id: 5, default_branch_protection_defaults: {})
+ end
+
+ it 'updates default_branch_protection_defaults to a correct value', :aggregate_failures do
+ expect(ActiveRecord::QueryRecorder.new { perform_migration }.count).to eq(16)
+
+ expect(migrated_attribute(1)).to eq({ "allow_force_push" => true,
+ "allowed_to_merge" => [{ "access_level" => 30 }],
+ "allowed_to_push" => [{ "access_level" => 30 }] })
+ expect(migrated_attribute(2)).to eq({ "allow_force_push" => false,
+ "allowed_to_merge" => [{ "access_level" => 30 }],
+ "allowed_to_push" => [{ "access_level" => 30 }] })
+ expect(migrated_attribute(3)).to eq({ "allow_force_push" => false,
+ "allowed_to_merge" => [{ "access_level" => 40 }],
+ "allowed_to_push" => [{ "access_level" => 40 }] })
+ expect(migrated_attribute(4)).to eq({ "allow_force_push" => true,
+ "allowed_to_merge" => [{ "access_level" => 30 }],
+ "allowed_to_push" => [{ "access_level" => 40 }] })
+ expect(migrated_attribute(5)).to eq({ "allow_force_push" => true,
+ "allowed_to_merge" => [{ "access_level" => 30 }],
+ "allowed_to_push" => [{ "access_level" => 40 }],
+ "developer_can_initial_push" => true })
+ end
+
+ def migrated_attribute(namespace_id)
+ namespace_settings_table.find(namespace_id).default_branch_protection_defaults
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/backfill_project_statistics_storage_size_without_pipeline_artifacts_size_job_spec.rb b/spec/lib/gitlab/background_migration/backfill_project_statistics_storage_size_without_pipeline_artifacts_size_job_spec.rb
new file mode 100644
index 00000000000..c85636f4998
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/backfill_project_statistics_storage_size_without_pipeline_artifacts_size_job_spec.rb
@@ -0,0 +1,113 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::BackfillProjectStatisticsStorageSizeWithoutPipelineArtifactsSizeJob,
+ schema: 20230719083202,
+ feature_category: :consumables_cost_management do
+ include MigrationHelpers::ProjectStatisticsHelper
+
+ include_context 'when backfilling project statistics'
+
+ let(:default_pipeline_artifacts_size) { 5 }
+ let(:default_stats) do
+ {
+ repository_size: 1,
+ wiki_size: 1,
+ lfs_objects_size: 1,
+ build_artifacts_size: 1,
+ packages_size: 1,
+ snippets_size: 1,
+ uploads_size: 1,
+ pipeline_artifacts_size: default_pipeline_artifacts_size,
+ storage_size: default_storage_size
+ }
+ end
+
+ describe '#filter_batch' do
+ it 'filters out project_statistics with no artifacts size' do
+ project_statistics = generate_records(default_projects, project_statistics_table, default_stats)
+ project_statistics_table.create!(
+ project_id: proj5.id,
+ namespace_id: proj5.namespace_id,
+ repository_size: 1,
+ wiki_size: 1,
+ lfs_objects_size: 1,
+ build_artifacts_size: 1,
+ packages_size: 1,
+ snippets_size: 1,
+ pipeline_artifacts_size: 0,
+ uploads_size: 1,
+ storage_size: 7
+ )
+
+ expected = project_statistics.map(&:id)
+ actual = migration.filter_batch(project_statistics_table).pluck(:id)
+
+ expect(actual).to match_array(expected)
+ end
+ end
+
+ describe '#perform' do
+ subject(:perform_migration) { migration.perform }
+
+ context 'when project_statistics backfill runs' do
+ before do
+ generate_records(default_projects, project_statistics_table, default_stats)
+ end
+
+ context 'when storage_size includes pipeline_artifacts_size' do
+ it 'removes pipeline_artifacts_size from storage_size' do
+ allow(::Namespaces::ScheduleAggregationWorker).to receive(:perform_async)
+ expect(project_statistics_table.pluck(:storage_size).uniq).to match_array([default_storage_size])
+
+ perform_migration
+
+ expect(project_statistics_table.pluck(:storage_size).uniq).to match_array(
+ [default_storage_size - default_pipeline_artifacts_size]
+ )
+ expect(::Namespaces::ScheduleAggregationWorker).to have_received(:perform_async).exactly(4).times
+ end
+ end
+
+ context 'when storage_size does not include default_pipeline_artifacts_size' do
+ it 'does not update the record' do
+ allow(::Namespaces::ScheduleAggregationWorker).to receive(:perform_async)
+ proj_stat = project_statistics_table.last
+ expect(proj_stat.storage_size).to eq(default_storage_size)
+ proj_stat.storage_size = default_storage_size - default_pipeline_artifacts_size
+ proj_stat.save!
+
+ perform_migration
+
+ expect(project_statistics_table.pluck(:storage_size).uniq).to match_array(
+ [default_storage_size - default_pipeline_artifacts_size]
+ )
+ expect(::Namespaces::ScheduleAggregationWorker).to have_received(:perform_async).exactly(3).times
+ end
+ end
+ end
+
+ it 'coerces a null wiki_size to 0' do
+ project_statistics = create_project_stats(projects, namespaces, default_stats, { wiki_size: nil })
+ allow(::Namespaces::ScheduleAggregationWorker).to receive(:perform_async)
+ migration = create_migration(end_id: project_statistics.project_id)
+
+ migration.perform
+
+ project_statistics.reload
+ expect(project_statistics.storage_size).to eq(6)
+ end
+
+ it 'coerces a null snippets_size to 0' do
+ project_statistics = create_project_stats(projects, namespaces, default_stats, { snippets_size: nil })
+ allow(::Namespaces::ScheduleAggregationWorker).to receive(:perform_async)
+ migration = create_migration(end_id: project_statistics.project_id)
+
+ migration.perform
+
+ project_statistics.reload
+ expect(project_statistics.storage_size).to eq(6)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/fix_allow_descendants_override_disabled_shared_runners_spec.rb b/spec/lib/gitlab/background_migration/fix_allow_descendants_override_disabled_shared_runners_spec.rb
new file mode 100644
index 00000000000..5f5dcb35836
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/fix_allow_descendants_override_disabled_shared_runners_spec.rb
@@ -0,0 +1,53 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::FixAllowDescendantsOverrideDisabledSharedRunners, schema: 20230802085923, feature_category: :runner_fleet do # rubocop:disable Layout/LineLength
+ let(:namespaces) { table(:namespaces) }
+
+ let!(:valid_enabled) do
+ namespaces.create!(name: 'valid_enabled', path: 'valid_enabled',
+ shared_runners_enabled: true,
+ allow_descendants_override_disabled_shared_runners: false)
+ end
+
+ let!(:invalid_enabled) do
+ namespaces.create!(name: 'invalid_enabled', path: 'invalid_enabled',
+ shared_runners_enabled: true,
+ allow_descendants_override_disabled_shared_runners: true)
+ end
+
+ let!(:disabled_and_overridable) do
+ namespaces.create!(name: 'disabled_and_overridable', path: 'disabled_and_overridable',
+ shared_runners_enabled: false,
+ allow_descendants_override_disabled_shared_runners: true)
+ end
+
+ let!(:disabled_and_unoverridable) do
+ namespaces.create!(name: 'disabled_and_unoverridable', path: 'disabled_and_unoverridable',
+ shared_runners_enabled: false,
+ allow_descendants_override_disabled_shared_runners: false)
+ end
+
+ let(:migration_attrs) do
+ {
+ start_id: namespaces.minimum(:id),
+ end_id: namespaces.maximum(:id),
+ batch_table: :namespaces,
+ batch_column: :id,
+ sub_batch_size: 2,
+ pause_ms: 0,
+ connection: ApplicationRecord.connection
+ }
+ end
+
+ it 'fixes invalid allow_descendants_override_disabled_shared_runners and does not affect others' do
+ expect do
+ described_class.new(**migration_attrs).perform
+ end.to change { invalid_enabled.reload.allow_descendants_override_disabled_shared_runners }.from(true).to(false)
+ .and not_change { valid_enabled.reload.allow_descendants_override_disabled_shared_runners }.from(false)
+ .and not_change { disabled_and_overridable.reload.allow_descendants_override_disabled_shared_runners }.from(true)
+ .and not_change { disabled_and_unoverridable.reload.allow_descendants_override_disabled_shared_runners }
+ .from(false)
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/redis/backfill_project_pipeline_status_ttl_spec.rb b/spec/lib/gitlab/background_migration/redis/backfill_project_pipeline_status_ttl_spec.rb
index e3b1b67cb40..c52d1b4c9f2 100644
--- a/spec/lib/gitlab/background_migration/redis/backfill_project_pipeline_status_ttl_spec.rb
+++ b/spec/lib/gitlab/background_migration/redis/backfill_project_pipeline_status_ttl_spec.rb
@@ -26,7 +26,16 @@ RSpec.describe Gitlab::BackgroundMigration::Redis::BackfillProjectPipelineStatus
describe '#scan_match_pattern' do
it "finds all the required keys only" do
- expect(redis.scan('0').second).to match_array(keys + invalid_keys)
+ cursor = '0'
+ scanned = []
+ loop do
+ # multiple scans are performed if it is a Redis cluster
+ cursor, result = redis.scan(cursor)
+ scanned.concat(result)
+ break if cursor == '0'
+ end
+
+ expect(scanned).to match_array(keys + invalid_keys)
expect(subject.redis.scan_each(match: subject.scan_match_pattern).to_a).to contain_exactly(*keys)
end
end
diff --git a/spec/lib/gitlab/background_migration/remove_backfilled_job_artifacts_expire_at_spec.rb b/spec/lib/gitlab/background_migration/remove_backfilled_job_artifacts_expire_at_spec.rb
index 582c0fe1b1b..af8b5240e40 100644
--- a/spec/lib/gitlab/background_migration/remove_backfilled_job_artifacts_expire_at_spec.rb
+++ b/spec/lib/gitlab/background_migration/remove_backfilled_job_artifacts_expire_at_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe Gitlab::BackgroundMigration::RemoveBackfilledJobArtifactsExpireAt
describe '#perform' do
let(:job_artifact) { table(:ci_job_artifacts, database: :ci) }
+ let(:jobs) { table(:ci_builds, database: :ci) { |model| model.primary_key = :id } }
let(:test_worker) do
described_class.new(
@@ -85,7 +86,7 @@ RSpec.describe Gitlab::BackgroundMigration::RemoveBackfilledJobArtifactsExpireAt
private
def create_job_artifact(id:, file_type:, expire_at:)
- job = table(:ci_builds, database: :ci).create!(id: id, partition_id: 100)
+ job = jobs.create!(partition_id: 100)
job_artifact.create!(
id: id, job_id: job.id, expire_at: expire_at, project_id: project.id,
file_type: file_type, partition_id: 100
diff --git a/spec/lib/gitlab/blame_spec.rb b/spec/lib/gitlab/blame_spec.rb
index f636ce283ae..bfe2b7d1360 100644
--- a/spec/lib/gitlab/blame_spec.rb
+++ b/spec/lib/gitlab/blame_spec.rb
@@ -33,12 +33,18 @@ RSpec.describe Gitlab::Blame do
expect(subject.count).to eq(18)
expect(subject[0][:commit].sha).to eq('913c66a37b4a45b9769037c55c2d238bd0942d2e')
expect(subject[0][:lines]).to eq(["require 'fileutils'", "require 'open3'", ""])
+ expect(subject[0][:span]).to eq(3)
+ expect(subject[0][:lineno]).to eq(1)
expect(subject[1][:commit].sha).to eq('874797c3a73b60d2187ed6e2fcabd289ff75171e')
expect(subject[1][:lines]).to eq(["module Popen", " extend self"])
+ expect(subject[1][:span]).to eq(2)
+ expect(subject[1][:lineno]).to eq(4)
expect(subject[-1][:commit].sha).to eq('913c66a37b4a45b9769037c55c2d238bd0942d2e')
expect(subject[-1][:lines]).to eq([" end", "end"])
+ expect(subject[-1][:span]).to eq(2)
+ expect(subject[-1][:lineno]).to eq(36)
end
context 'with a range 1..5' do
diff --git a/spec/lib/gitlab/cache/json_cache_spec.rb b/spec/lib/gitlab/cache/json_cache_spec.rb
index 05126319ef9..1904e42f937 100644
--- a/spec/lib/gitlab/cache/json_cache_spec.rb
+++ b/spec/lib/gitlab/cache/json_cache_spec.rb
@@ -15,7 +15,7 @@ RSpec.describe Gitlab::Cache::JsonCache, feature_category: :shared do
describe '#active?' do
context 'when backend respond to active? method' do
it 'delegates to the underlying cache implementation' do
- backend = instance_double(Gitlab::NullRequestStore, active?: false)
+ backend = instance_double(Gitlab::SafeRequestStore::NullStore, active?: false)
cache = described_class.new(namespace: namespace, backend: backend)
diff --git a/spec/lib/gitlab/cache/json_caches/json_keyed_spec.rb b/spec/lib/gitlab/cache/json_caches/json_keyed_spec.rb
index c4ec393c3ac..8afd5c2bfcd 100644
--- a/spec/lib/gitlab/cache/json_caches/json_keyed_spec.rb
+++ b/spec/lib/gitlab/cache/json_caches/json_keyed_spec.rb
@@ -51,7 +51,7 @@ RSpec.describe Gitlab::Cache::JsonCaches::JsonKeyed, feature_category: :shared d
current_cache = { '_other_revision_' => '_other_value_' }.merge(nested_cache_result).to_json
allow(backend).to receive(:read).with(expanded_key).and_return(current_cache)
- expect(cache.read(key, BroadcastMessage)).to eq(broadcast_message)
+ expect(cache.read(key, System::BroadcastMessage)).to eq(broadcast_message)
end
end
end
diff --git a/spec/lib/gitlab/cache/json_caches/redis_keyed_spec.rb b/spec/lib/gitlab/cache/json_caches/redis_keyed_spec.rb
index 6e98cdd74ce..f408bbf8d25 100644
--- a/spec/lib/gitlab/cache/json_caches/redis_keyed_spec.rb
+++ b/spec/lib/gitlab/cache/json_caches/redis_keyed_spec.rb
@@ -21,7 +21,7 @@ RSpec.describe Gitlab::Cache::JsonCaches::RedisKeyed, feature_category: :shared
allow(backend).to receive(:read).with(expanded_key).and_return(true)
expect(Gitlab::Json).to receive(:parse).with("true").and_call_original
- expect(cache.read(key, BroadcastMessage)).to eq(true)
+ expect(cache.read(key, System::BroadcastMessage)).to eq(true)
end
end
@@ -30,7 +30,7 @@ RSpec.describe Gitlab::Cache::JsonCaches::RedisKeyed, feature_category: :shared
allow(backend).to receive(:read).with(expanded_key).and_return(false)
expect(Gitlab::Json).to receive(:parse).with("false").and_call_original
- expect(cache.read(key, BroadcastMessage)).to eq(false)
+ expect(cache.read(key, System::BroadcastMessage)).to eq(false)
end
end
end
diff --git a/spec/lib/gitlab/checks/branch_check_spec.rb b/spec/lib/gitlab/checks/branch_check_spec.rb
index 9950d4dbd12..c3d6b9510e5 100644
--- a/spec/lib/gitlab/checks/branch_check_spec.rb
+++ b/spec/lib/gitlab/checks/branch_check_spec.rb
@@ -38,6 +38,18 @@ RSpec.describe Gitlab::Checks::BranchCheck, feature_category: :source_code_manag
expect { subject.validate! }.to raise_error(Gitlab::GitAccess::ForbiddenError, "You cannot create a branch with a 40-character hexadecimal branch name.")
end
+ it "prohibits 64-character hexadecimal branch names" do
+ allow(subject).to receive(:branch_name).and_return("09b9fd3ea68e9b95a51b693a29568c898e27d1476bbd83c825664f18467fc175")
+
+ expect { subject.validate! }.to raise_error(Gitlab::GitAccess::ForbiddenError, "You cannot create a branch with a 40-character hexadecimal branch name.")
+ end
+
+ it "prohibits 64-character hexadecimal branch names as the start of a path" do
+ allow(subject).to receive(:branch_name).and_return("09b9fd3ea68e9b95a51b693a29568c898e27d1476bbd83c825664f18467fc175/test")
+
+ expect { subject.validate! }.to raise_error(Gitlab::GitAccess::ForbiddenError, "You cannot create a branch with a 40-character hexadecimal branch name.")
+ end
+
it "doesn't prohibit a nested hexadecimal in a branch name" do
allow(subject).to receive(:branch_name).and_return("267208abfe40e546f5e847444276f7d43a39503e-fix")
diff --git a/spec/lib/gitlab/checks/file_size_check/allow_existing_oversized_blobs_spec.rb b/spec/lib/gitlab/checks/file_size_check/allow_existing_oversized_blobs_spec.rb
deleted file mode 100644
index 3b52d2e1364..00000000000
--- a/spec/lib/gitlab/checks/file_size_check/allow_existing_oversized_blobs_spec.rb
+++ /dev/null
@@ -1,86 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Checks::FileSizeCheck::AllowExistingOversizedBlobs, feature_category: :source_code_management do
- subject { checker.find }
-
- let_it_be(:project) { create(:project, :public, :repository) }
- let(:checker) do
- described_class.new(
- project: project,
- changes: changes,
- file_size_limit_megabytes: 1)
- end
-
- describe '#find' do
- let(:branch_name) { SecureRandom.uuid }
- let(:other_branch_name) { SecureRandom.uuid }
- let(:filename) { 'log.log' }
- let(:create_file) do
- project.repository.create_file(
- project.owner,
- filename,
- initial_contents,
- branch_name: branch_name,
- message: 'whatever'
- )
- end
-
- let(:changed_ref) do
- project.repository.update_file(
- project.owner,
- filename,
- changed_contents,
- branch_name: other_branch_name,
- start_branch_name: branch_name,
- message: 'whatever'
- )
- end
-
- let(:changes) { [oldrev: create_file, newrev: changed_ref] }
-
- before do
- # set up a branch
- create_file
-
- # branch off that branch
- changed_ref
-
- # delete stuff so it can be picked up by new_blobs
- project.repository.delete_branch(other_branch_name)
- end
-
- context 'when changing from valid to oversized' do
- let(:initial_contents) { 'a' }
- let(:changed_contents) { 'a' * ((2**20) + 1) } # 1 MB + 1 byte
-
- it 'returns an array with blobs that became oversized' do
- blob = subject.first
- expect(blob.path).to eq(filename)
- expect(subject).to contain_exactly(blob)
- end
- end
-
- context 'when changing from oversized to oversized' do
- let(:initial_contents) { 'a' * ((2**20) + 1) } # 1 MB + 1 byte
- let(:changed_contents) { 'a' * ((2**20) + 2) } # 1 MB + 1 byte
-
- it { is_expected.to be_blank }
- end
-
- context 'when changing from oversized to valid' do
- let(:initial_contents) { 'a' * ((2**20) + 1) } # 1 MB + 1 byte
- let(:changed_contents) { 'aa' }
-
- it { is_expected.to be_blank }
- end
-
- context 'when changing from valid to valid' do
- let(:initial_contents) { 'abc' }
- let(:changed_contents) { 'def' }
-
- it { is_expected.to be_blank }
- end
- end
-end
diff --git a/spec/lib/gitlab/checks/file_size_check/hook_environment_aware_any_oversized_blobs_spec.rb b/spec/lib/gitlab/checks/file_size_check/hook_environment_aware_any_oversized_blobs_spec.rb
new file mode 100644
index 00000000000..bea0c02cfb8
--- /dev/null
+++ b/spec/lib/gitlab/checks/file_size_check/hook_environment_aware_any_oversized_blobs_spec.rb
@@ -0,0 +1,75 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Checks::FileSizeCheck::HookEnvironmentAwareAnyOversizedBlobs, feature_category: :source_code_management do
+ let_it_be(:project) { create(:project, :small_repo) }
+ let(:repository) { project.repository }
+ let(:file_size_limit) { 1 }
+ let(:any_quarantined_blobs) do
+ described_class.new(
+ project: project,
+ changes: changes,
+ file_size_limit_megabytes: file_size_limit)
+ end
+
+ let(:changes) { [{ newrev: 'master' }] }
+
+ describe '#find' do
+ subject { any_quarantined_blobs.find }
+
+ let(:stubbed_result) { 'stubbed' }
+
+ it 'returns the result from AnyOversizedBlobs' do
+ expect_next_instance_of(Gitlab::Checks::FileSizeCheck::AnyOversizedBlobs) do |instance|
+ expect(instance).to receive(:find).and_return(stubbed_result)
+ end
+
+ expect(subject).to eq(stubbed_result)
+ end
+
+ context 'with hook env' do
+ context 'with hook environment' do
+ let(:git_env) do
+ {
+ 'GIT_OBJECT_DIRECTORY_RELATIVE' => "objects",
+ 'GIT_ALTERNATE_OBJECT_DIRECTORIES_RELATIVE' => ['/dir/one', '/dir/two']
+ }
+ end
+
+ before do
+ allow(Gitlab::Git::HookEnv).to receive(:all).with(repository.gl_repository).and_return(git_env)
+ end
+
+ it 'returns an emtpy array' do
+ expect(subject).to eq([])
+ end
+
+ context 'when the file is over the limit' do
+ let(:file_size_limit) { 0 }
+
+ context 'when the blob does not exist in the repo' do
+ before do
+ allow(repository.gitaly_commit_client).to receive(:object_existence_map).and_return(Hash.new { false })
+ end
+
+ it 'returns an array with the blobs that are over the limit' do
+ expect(subject.size).to eq(1)
+ expect(subject.first).to be_kind_of(Gitlab::Git::Blob)
+ end
+ end
+
+ context 'when the blob exists in the repo' do
+ before do
+ allow(repository.gitaly_commit_client).to receive(:object_existence_map).and_return(Hash.new { true })
+ end
+
+ it 'filters out the blobs in the repo' do
+ expect(subject).to eq([])
+ end
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/checks/global_file_size_check_spec.rb b/spec/lib/gitlab/checks/global_file_size_check_spec.rb
index 9ea0c73b1c7..a2b3ee0f761 100644
--- a/spec/lib/gitlab/checks/global_file_size_check_spec.rb
+++ b/spec/lib/gitlab/checks/global_file_size_check_spec.rb
@@ -14,13 +14,13 @@ RSpec.describe Gitlab::Checks::GlobalFileSizeCheck, feature_category: :source_co
it 'does not log' do
expect(subject).not_to receive(:log_timed)
expect(Gitlab::AppJsonLogger).not_to receive(:info)
- expect(Gitlab::Checks::FileSizeCheck::AllowExistingOversizedBlobs).not_to receive(:new)
+ expect(Gitlab::Checks::FileSizeCheck::HookEnvironmentAwareAnyOversizedBlobs).not_to receive(:new)
subject.validate!
end
end
it 'checks for file sizes' do
- expect_next_instance_of(Gitlab::Checks::FileSizeCheck::AllowExistingOversizedBlobs,
+ expect_next_instance_of(Gitlab::Checks::FileSizeCheck::HookEnvironmentAwareAnyOversizedBlobs,
project: project,
changes: changes,
file_size_limit_megabytes: 100
@@ -32,5 +32,35 @@ RSpec.describe Gitlab::Checks::GlobalFileSizeCheck, feature_category: :source_co
expect(Gitlab::AppJsonLogger).to receive(:info).with('Checking for blobs over the file size limit')
subject.validate!
end
+
+ context 'when there are oversized blobs' do
+ let(:blob_double) { instance_double(Gitlab::Git::Blob, size: 10) }
+
+ before do
+ allow_next_instance_of(Gitlab::Checks::FileSizeCheck::HookEnvironmentAwareAnyOversizedBlobs,
+ project: project,
+ changes: changes,
+ file_size_limit_megabytes: 100
+ ) do |check|
+ allow(check).to receive(:find).and_return([blob_double])
+ end
+ end
+
+ it 'logs a message with blob size and raises an exception' do
+ expect(Gitlab::AppJsonLogger).to receive(:info).with('Checking for blobs over the file size limit')
+ expect(Gitlab::AppJsonLogger).to receive(:info).with(message: 'Found blob over global limit', blob_sizes: [10])
+ expect { subject.validate! }.to raise_exception(Gitlab::GitAccess::ForbiddenError)
+ end
+
+ context 'when the enforce_global_file_size_limit feature flag is disabled' do
+ before do
+ stub_feature_flags(enforce_global_file_size_limit: false)
+ end
+
+ it 'does not raise an exception' do
+ expect { subject.validate! }.not_to raise_error
+ end
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/ci/artifacts/decompressed_artifact_size_validator_spec.rb b/spec/lib/gitlab/ci/artifacts/decompressed_artifact_size_validator_spec.rb
index ef39a431d63..47d91e2478e 100644
--- a/spec/lib/gitlab/ci/artifacts/decompressed_artifact_size_validator_spec.rb
+++ b/spec/lib/gitlab/ci/artifacts/decompressed_artifact_size_validator_spec.rb
@@ -12,7 +12,7 @@ RSpec.describe Gitlab::Ci::Artifacts::DecompressedArtifactSizeValidator, feature
let(:gzip_valid?) { true }
let(:validator) { instance_double(::Gitlab::Ci::DecompressedGzipSizeValidator, valid?: gzip_valid?) }
- before(:all) do
+ before_all do
Zlib::GzipWriter.open(file_path) do |gz|
gz.write('Hello World!')
end
diff --git a/spec/lib/gitlab/ci/components/instance_path_spec.rb b/spec/lib/gitlab/ci/components/instance_path_spec.rb
index 511036efd37..f4bc706f9b4 100644
--- a/spec/lib/gitlab/ci/components/instance_path_spec.rb
+++ b/spec/lib/gitlab/ci/components/instance_path_spec.rb
@@ -106,7 +106,7 @@ RSpec.describe Gitlab::Ci::Components::InstancePath, feature_category: :pipeline
create(:release, project: existing_project, sha: 'sha-1', released_at: Time.zone.now)
end
- before(:all) do
+ before_all do
# Previous release
create(:release, project: existing_project, sha: 'sha-2', released_at: Time.zone.now - 1.day)
end
diff --git a/spec/lib/gitlab/ci/config/entry/include/rules/rule_spec.rb b/spec/lib/gitlab/ci/config/entry/include/rules/rule_spec.rb
index 10c1d92e209..dd15b049b9b 100644
--- a/spec/lib/gitlab/ci/config/entry/include/rules/rule_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/include/rules/rule_spec.rb
@@ -1,117 +1,132 @@
# frozen_string_literal: true
-require 'fast_spec_helper'
+require 'spec_helper' # Change this to fast spec helper when FF `ci_refactor_external_rules` is removed
require_dependency 'active_model'
RSpec.describe Gitlab::Ci::Config::Entry::Include::Rules::Rule, feature_category: :pipeline_composition do
let(:factory) do
- Gitlab::Config::Entry::Factory.new(described_class)
- .value(config)
+ Gitlab::Config::Entry::Factory.new(described_class).value(config)
end
subject(:entry) { factory.create! }
- describe '.new' do
- shared_examples 'an invalid config' do |error_message|
- it { is_expected.not_to be_valid }
+ before do
+ entry.compose!
+ end
+
+ shared_examples 'a valid config' do
+ it { is_expected.to be_valid }
+
+ it 'returns the expected value' do
+ expect(entry.value).to eq(config.compact)
+ end
- it 'has errors' do
- expect(entry.errors).to include(error_message)
+ context 'when FF `ci_refactor_external_rules` is disabled' do
+ before do
+ stub_feature_flags(ci_refactor_external_rules: false)
end
+
+ it 'returns the expected value' do
+ expect(entry.value).to eq(config)
+ end
+ end
+ end
+
+ shared_examples 'an invalid config' do |error_message|
+ it { is_expected.not_to be_valid }
+
+ it 'has errors' do
+ expect(entry.errors).to include(error_message)
end
+ end
- context 'when specifying an if: clause' do
- let(:config) { { if: '$THIS || $THAT' } }
+ context 'when specifying an if: clause' do
+ let(:config) { { if: '$THIS || $THAT' } }
- it { is_expected.to be_valid }
+ it_behaves_like 'a valid config'
- context 'with when:' do
- let(:config) { { if: '$THIS || $THAT', when: 'never' } }
+ context 'with when:' do
+ let(:config) { { if: '$THIS || $THAT', when: 'never' } }
- it { is_expected.to be_valid }
- end
+ it_behaves_like 'a valid config'
end
- context 'when specifying an exists: clause' do
- let(:config) { { exists: './this.md' } }
+ context 'with when: <invalid string>' do
+ let(:config) { { if: '$THIS || $THAT', when: 'on_success' } }
- it { is_expected.to be_valid }
+ it_behaves_like 'an invalid config', /when unknown value: on_success/
end
- context 'using a list of multiple expressions' do
- let(:config) { { if: ['$MY_VAR == "this"', '$YOUR_VAR == "that"'] } }
+ context 'with when: null' do
+ let(:config) { { if: '$THIS || $THAT', when: nil } }
- it_behaves_like 'an invalid config', /invalid expression syntax/
+ it_behaves_like 'a valid config'
end
- context 'when specifying an invalid if: clause expression' do
- let(:config) { { if: ['$MY_VAR =='] } }
+ context 'when if: clause is invalid' do
+ let(:config) { { if: '$MY_VAR ==' } }
it_behaves_like 'an invalid config', /invalid expression syntax/
end
- context 'when specifying an if: clause expression with an invalid token' do
- let(:config) { { if: ['$MY_VAR == 123'] } }
+ context 'when if: clause has an integer operand' do
+ let(:config) { { if: '$MY_VAR == 123' } }
it_behaves_like 'an invalid config', /invalid expression syntax/
end
- context 'when using invalid regex in an if: clause' do
- let(:config) { { if: ['$MY_VAR =~ /some ( thing/'] } }
+ context 'when if: clause has invalid regex' do
+ let(:config) { { if: '$MY_VAR =~ /some ( thing/' } }
it_behaves_like 'an invalid config', /invalid expression syntax/
end
- context 'when using an if: clause with lookahead regex character "?"' do
+ context 'when if: clause has lookahead regex character "?"' do
let(:config) { { if: '$CI_COMMIT_REF =~ /^(?!master).+/' } }
it_behaves_like 'an invalid config', /invalid expression syntax/
end
- context 'when specifying unknown policy' do
- let(:config) { { invalid: :something } }
+ context 'when if: clause has array of expressions' do
+ let(:config) { { if: ['$MY_VAR == "this"', '$YOUR_VAR == "that"'] } }
- it_behaves_like 'an invalid config', /unknown keys: invalid/
+ it_behaves_like 'an invalid config', /invalid expression syntax/
end
+ end
+
+ context 'when specifying an exists: clause' do
+ let(:config) { { exists: './this.md' } }
- context 'when clause is empty' do
- let(:config) { {} }
+ it_behaves_like 'a valid config'
- it_behaves_like 'an invalid config', /can't be blank/
+ context 'when array' do
+ let(:config) { { exists: ['./this.md', './that.md'] } }
+
+ it_behaves_like 'a valid config'
end
- context 'when policy strategy does not match' do
- let(:config) { 'string strategy' }
+ context 'when null' do
+ let(:config) { { exists: nil } }
- it_behaves_like 'an invalid config', /should be a hash/
+ it_behaves_like 'a valid config'
end
end
- describe '#value' do
- subject(:value) { entry.value }
-
- context 'when specifying an if: clause' do
- let(:config) { { if: '$THIS || $THAT' } }
+ context 'when specifying an unknown keyword' do
+ let(:config) { { invalid: :something } }
- it 'returns the config' do
- expect(subject).to eq(if: '$THIS || $THAT')
- end
+ it_behaves_like 'an invalid config', /unknown keys: invalid/
+ end
- context 'with when:' do
- let(:config) { { if: '$THIS || $THAT', when: 'never' } }
+ context 'when config is blank' do
+ let(:config) { {} }
- it 'returns the config' do
- expect(subject).to eq(if: '$THIS || $THAT', when: 'never')
- end
- end
- end
+ it_behaves_like 'an invalid config', /can't be blank/
+ end
- context 'when specifying an exists: clause' do
- let(:config) { { exists: './test.md' } }
+ context 'when config type is invalid' do
+ let(:config) { 'invalid' }
- it 'returns the config' do
- expect(subject).to eq(exists: './test.md')
- end
- end
+ it_behaves_like 'an invalid config', /should be a hash/
end
end
diff --git a/spec/lib/gitlab/ci/config/entry/include/rules_spec.rb b/spec/lib/gitlab/ci/config/entry/include/rules_spec.rb
index d5988dbbb58..05db81abfc1 100644
--- a/spec/lib/gitlab/ci/config/entry/include/rules_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/include/rules_spec.rb
@@ -1,9 +1,9 @@
# frozen_string_literal: true
-require 'fast_spec_helper'
+require 'spec_helper' # Change this to fast spec helper when FF `ci_refactor_external_rules` is removed
require_dependency 'active_model'
-RSpec.describe Gitlab::Ci::Config::Entry::Include::Rules do
+RSpec.describe Gitlab::Ci::Config::Entry::Include::Rules, feature_category: :pipeline_composition do
let(:factory) do
Gitlab::Config::Entry::Factory.new(described_class)
.value(config)
@@ -77,23 +77,68 @@ RSpec.describe Gitlab::Ci::Config::Entry::Include::Rules do
describe '#value' do
subject(:value) { entry.value }
- context 'with an "if"' do
- let(:config) do
- [{ if: '$THIS == "that"' }]
+ let(:config) do
+ [
+ { if: '$THIS == "that"' },
+ { if: '$SKIP', when: 'never' }
+ ]
+ end
+
+ it { is_expected.to eq([]) }
+
+ context 'when composed' do
+ before do
+ entry.compose!
end
- it { is_expected.to eq(config) }
+ it 'returns the composed entries value' do
+ expect(entry).to be_valid
+ is_expected.to eq(
+ [
+ { if: '$THIS == "that"' },
+ { if: '$SKIP', when: 'never' }
+ ]
+ )
+ end
+
+ context 'when invalid' do
+ let(:config) do
+ [
+ { if: '$THIS == "that"' },
+ { if: '$SKIP', invalid: 'invalid' }
+ ]
+ end
+
+ it 'returns the invalid config' do
+ expect(entry).not_to be_valid
+ is_expected.to eq(config)
+ end
+ end
end
- context 'with a list of two rules' do
- let(:config) do
- [
- { if: '$THIS == "that"' },
- { if: '$SKIP' }
- ]
+ context 'when FF `ci_refactor_external_rules` is disabled' do
+ before do
+ stub_feature_flags(ci_refactor_external_rules: false)
+ end
+
+ context 'with an "if"' do
+ let(:config) do
+ [{ if: '$THIS == "that"' }]
+ end
+
+ it { is_expected.to eq(config) }
end
- it { is_expected.to eq(config) }
+ context 'with a list of two rules' do
+ let(:config) do
+ [
+ { if: '$THIS == "that"' },
+ { if: '$SKIP' }
+ ]
+ end
+
+ it { is_expected.to eq(config) }
+ end
end
end
end
diff --git a/spec/lib/gitlab/ci/config/entry/need_spec.rb b/spec/lib/gitlab/ci/config/entry/need_spec.rb
index ab2e8d4db78..eba9411560e 100644
--- a/spec/lib/gitlab/ci/config/entry/need_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/need_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe ::Gitlab::Ci::Config::Entry::Need do
+RSpec.describe ::Gitlab::Ci::Config::Entry::Need, feature_category: :pipeline_composition do
subject(:need) { described_class.new(config) }
shared_examples 'job type' do
@@ -219,6 +219,81 @@ RSpec.describe ::Gitlab::Ci::Config::Entry::Need do
it_behaves_like 'job type'
end
+
+ context 'when parallel:matrix has a value' do
+ before do
+ need.compose!
+ end
+
+ context 'and it is a string value' do
+ let(:config) do
+ { job: 'job_name', parallel: { matrix: [{ platform: 'p1', stack: 's1' }] } }
+ end
+
+ describe '#valid?' do
+ it { is_expected.to be_valid }
+ end
+
+ describe '#value' do
+ it 'returns job needs configuration' do
+ expect(need.value).to eq(
+ name: 'job_name',
+ artifacts: true,
+ optional: false,
+ parallel: { matrix: [{ "platform" => ['p1'], "stack" => ['s1'] }] }
+ )
+ end
+ end
+
+ it_behaves_like 'job type'
+ end
+
+ context 'and it is an array value' do
+ let(:config) do
+ { job: 'job_name', parallel: { matrix: [{ platform: %w[p1 p2], stack: %w[s1 s2] }] } }
+ end
+
+ describe '#valid?' do
+ it { is_expected.to be_valid }
+ end
+
+ describe '#value' do
+ it 'returns job needs configuration' do
+ expect(need.value).to eq(
+ name: 'job_name',
+ artifacts: true,
+ optional: false,
+ parallel: { matrix: [{ 'platform' => %w[p1 p2], 'stack' => %w[s1 s2] }] }
+ )
+ end
+ end
+
+ it_behaves_like 'job type'
+ end
+
+ context 'and it is a both an array and string value' do
+ let(:config) do
+ { job: 'job_name', parallel: { matrix: [{ platform: %w[p1 p2], stack: 's1' }] } }
+ end
+
+ describe '#valid?' do
+ it { is_expected.to be_valid }
+ end
+
+ describe '#value' do
+ it 'returns job needs configuration' do
+ expect(need.value).to eq(
+ name: 'job_name',
+ artifacts: true,
+ optional: false,
+ parallel: { matrix: [{ 'platform' => %w[p1 p2], 'stack' => ['s1'] }] }
+ )
+ end
+ end
+
+ it_behaves_like 'job type'
+ end
+ end
end
context 'with cross pipeline artifacts needs' do
diff --git a/spec/lib/gitlab/ci/config/entry/needs_spec.rb b/spec/lib/gitlab/ci/config/entry/needs_spec.rb
index 489fbac68b2..d1a8a74ac06 100644
--- a/spec/lib/gitlab/ci/config/entry/needs_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/needs_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe ::Gitlab::Ci::Config::Entry::Needs do
+RSpec.describe ::Gitlab::Ci::Config::Entry::Needs, feature_category: :pipeline_composition do
subject(:needs) { described_class.new(config) }
before do
@@ -67,6 +67,141 @@ RSpec.describe ::Gitlab::Ci::Config::Entry::Needs do
end
end
+ context 'when needs value is a hash' do
+ context 'with a job value' do
+ let(:config) do
+ { job: 'job_name' }
+ end
+
+ describe '#valid?' do
+ it { is_expected.to be_valid }
+ end
+ end
+
+ context 'with a parallel value that is a numeric value' do
+ let(:config) do
+ { job: 'job_name', parallel: 2 }
+ end
+
+ describe '#valid?' do
+ it { is_expected.not_to be_valid }
+ end
+
+ describe '#errors' do
+ it 'returns errors about number values being invalid for needs:parallel' do
+ expect(needs.errors).to match_array(["needs config cannot use \"parallel: <number>\"."])
+ end
+ end
+ end
+ end
+
+ context 'when needs:parallel value is incorrect' do
+ context 'with a keyword that is not "matrix"' do
+ let(:config) do
+ [
+ { job: 'job_name', parallel: { not_matrix: [{ one: 'aaa', two: 'bbb' }] } }
+ ]
+ end
+
+ describe '#valid?' do
+ it { is_expected.not_to be_valid }
+ end
+
+ describe '#errors' do
+ it 'returns errors about incorrect matrix keyword' do
+ expect(needs.errors).to match_array([
+ 'need:parallel config contains unknown keys: not_matrix',
+ 'need:parallel config missing required keys: matrix'
+ ])
+ end
+ end
+ end
+
+ context 'with a number value' do
+ let(:config) { [{ job: 'job_name', parallel: 2 }] }
+
+ describe '#valid?' do
+ it { is_expected.not_to be_valid }
+ end
+
+ describe '#errors' do
+ it 'returns errors about number values being invalid for needs:parallel' do
+ expect(needs.errors).to match_array(["needs config cannot use \"parallel: <number>\"."])
+ end
+ end
+ end
+ end
+
+ context 'when needs:parallel:matrix value is empty' do
+ let(:config) { [{ job: 'job_name', parallel: { matrix: {} } }] }
+
+ describe '#valid?' do
+ it { is_expected.not_to be_valid }
+ end
+
+ describe '#errors' do
+ it 'returns error about incorrect type' do
+ expect(needs.errors).to contain_exactly(
+ 'need:parallel:matrix config should be an array of hashes')
+ end
+ end
+ end
+
+ context 'when needs:parallel:matrix value is incorrect' do
+ let(:config) { [{ job: 'job_name', parallel: { matrix: 'aaa' } }] }
+
+ describe '#valid?' do
+ it { is_expected.not_to be_valid }
+ end
+
+ describe '#errors' do
+ it 'returns error about incorrect type' do
+ expect(needs.errors).to contain_exactly(
+ 'need:parallel:matrix config should be an array of hashes')
+ end
+ end
+ end
+
+ context 'when needs:parallel:matrix value is correct' do
+ context 'with a simple config' do
+ let(:config) do
+ [
+ { job: 'job_name', parallel: { matrix: [{ A: 'a1', B: 'b1' }] } }
+ ]
+ end
+
+ describe '#valid?' do
+ it { is_expected.to be_valid }
+ end
+ end
+
+ context 'with a complex config' do
+ let(:config) do
+ [
+ {
+ job: 'job_name1',
+ artifacts: true,
+ parallel: { matrix: [{ A: %w[a1 a2], B: %w[b1 b2 b3], C: %w[c1 c2] }] }
+ },
+ {
+ job: 'job_name2',
+ parallel: {
+ matrix: [
+ { A: %w[a1 a2], D: %w[d1 d2] },
+ { E: %w[e1 e2], F: ['f1'] },
+ { C: %w[c1 c2 c3], G: %w[g1 g2], H: ['h1'] }
+ ]
+ }
+ }
+ ]
+ end
+
+ describe '#valid?' do
+ it { is_expected.to be_valid }
+ end
+ end
+ end
+
context 'with too many cross pipeline dependencies' do
let(:limit) { described_class::NEEDS_CROSS_PIPELINE_DEPENDENCIES_LIMIT }
diff --git a/spec/lib/gitlab/ci/config/entry/reports_spec.rb b/spec/lib/gitlab/ci/config/entry/reports_spec.rb
index 73bf2d422b7..d610c3ce2f6 100644
--- a/spec/lib/gitlab/ci/config/entry/reports_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/reports_spec.rb
@@ -48,6 +48,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Reports, feature_category: :pipeline_c
:terraform | 'tfplan.json'
:accessibility | 'gl-accessibility.json'
:cyclonedx | 'gl-sbom.cdx.zip'
+ :annotations | 'gl-annotations.json'
end
with_them do
diff --git a/spec/lib/gitlab/ci/config/external/context_spec.rb b/spec/lib/gitlab/ci/config/external/context_spec.rb
index d917924f257..d8bd578be94 100644
--- a/spec/lib/gitlab/ci/config/external/context_spec.rb
+++ b/spec/lib/gitlab/ci/config/external/context_spec.rb
@@ -57,6 +57,24 @@ RSpec.describe Gitlab::Ci::Config::External::Context, feature_category: :pipelin
end
end
end
+
+ describe 'max_total_yaml_size_bytes' do
+ context 'when application setting `max_total_yaml_size_bytes` is requsted and was never updated by the admin' do
+ it 'returns the default value `max_total_yaml_size_bytes`' do
+ expect(subject.max_total_yaml_size_bytes).to eq(157286400)
+ end
+ end
+
+ context 'when `max_total_yaml_size_bytes` was adjusted by the admin' do
+ before do
+ stub_application_setting(ci_max_total_yaml_size_bytes: 200000000)
+ end
+
+ it 'returns the updated value of application setting `max_total_yaml_size_bytes`' do
+ expect(subject.max_total_yaml_size_bytes).to eq(200000000)
+ end
+ end
+ end
end
describe '#set_deadline' do
diff --git a/spec/lib/gitlab/ci/config/external/file/base_spec.rb b/spec/lib/gitlab/ci/config/external/file/base_spec.rb
index d6dd75f4b10..1415dbeb532 100644
--- a/spec/lib/gitlab/ci/config/external/file/base_spec.rb
+++ b/spec/lib/gitlab/ci/config/external/file/base_spec.rb
@@ -254,7 +254,12 @@ RSpec.describe Gitlab::Ci::Config::External::File::Base, feature_category: :pipe
describe '#load_and_validate_expanded_hash!' do
let(:location) { 'some/file/config.yml' }
let(:logger) { instance_double(::Gitlab::Ci::Pipeline::Logger, :instrument) }
- let(:context_params) { { sha: 'HEAD', variables: variables, project: project, logger: logger } }
+ let(:context_params) { { sha: 'HEAD', variables: variables, project: project, logger: logger, user: user } }
+ let(:user) { instance_double(User, id: 'test-user-id') }
+
+ before do
+ allow(logger).to receive(:instrument).and_yield
+ end
it 'includes instrumentation for loading and expanding the content' do
expect(logger).to receive(:instrument).once.ordered.with(:config_file_fetch_content_hash).and_yield
@@ -262,5 +267,26 @@ RSpec.describe Gitlab::Ci::Config::External::File::Base, feature_category: :pipe
file.load_and_validate_expanded_hash!
end
+
+ context 'when the content is interpolated' do
+ let(:content) { "spec:\n inputs:\n website:\n---\nkey: value" }
+
+ subject(:file) { test_class.new({ inputs: { website: 'test' }, location: location, content: content }, ctx) }
+
+ it 'increments the ci_interpolation_users usage counter' do
+ expect(::Gitlab::UsageDataCounters::HLLRedisCounter).to receive(:track_event)
+ .with('ci_interpolation_users', values: 'test-user-id')
+
+ file.load_and_validate_expanded_hash!
+ end
+ end
+
+ context 'when the content is not interpolated' do
+ it 'does not increment the ci_interpolation_users usage counter' do
+ expect(::Gitlab::UsageDataCounters::HLLRedisCounter).not_to receive(:track_event)
+
+ file.load_and_validate_expanded_hash!
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/ci/config/external/file/component_spec.rb b/spec/lib/gitlab/ci/config/external/file/component_spec.rb
index 7e3406413d0..487690296b5 100644
--- a/spec/lib/gitlab/ci/config/external/file/component_spec.rb
+++ b/spec/lib/gitlab/ci/config/external/file/component_spec.rb
@@ -41,14 +41,6 @@ RSpec.describe Gitlab::Ci::Config::External::File::Component, feature_category:
let(:params) { { component: 'some-value' } }
it { is_expected.to be_truthy }
-
- context 'when feature flag ci_include_components is disabled' do
- before do
- stub_feature_flags(ci_include_components: false)
- end
-
- it { is_expected.to be_falsey }
- end
end
context 'when component is not specified' do
diff --git a/spec/lib/gitlab/ci/config/external/mapper/matcher_spec.rb b/spec/lib/gitlab/ci/config/external/mapper/matcher_spec.rb
index 719c75dca80..cea65faccd7 100644
--- a/spec/lib/gitlab/ci/config/external/mapper/matcher_spec.rb
+++ b/spec/lib/gitlab/ci/config/external/mapper/matcher_spec.rb
@@ -18,54 +18,26 @@ RSpec.describe Gitlab::Ci::Config::External::Mapper::Matcher, feature_category:
describe '#process' do
subject(:process) { matcher.process(locations) }
- context 'with ci_include_components FF disabled' do
- before do
- stub_feature_flags(ci_include_components: false)
- end
-
- let(:locations) do
- [
- { local: 'file.yml' },
- { file: 'file.yml', project: 'namespace/project' },
- { remote: 'https://example.com/.gitlab-ci.yml' },
- { template: 'file.yml' },
- { artifact: 'generated.yml', job: 'test' }
- ]
- end
-
- it 'returns an array of file objects' do
- is_expected.to contain_exactly(
- an_instance_of(Gitlab::Ci::Config::External::File::Local),
- an_instance_of(Gitlab::Ci::Config::External::File::Project),
- an_instance_of(Gitlab::Ci::Config::External::File::Remote),
- an_instance_of(Gitlab::Ci::Config::External::File::Template),
- an_instance_of(Gitlab::Ci::Config::External::File::Artifact)
- )
- end
+ let(:locations) do
+ [
+ { local: 'file.yml' },
+ { file: 'file.yml', project: 'namespace/project' },
+ { component: 'gitlab.com/org/component@1.0' },
+ { remote: 'https://example.com/.gitlab-ci.yml' },
+ { template: 'file.yml' },
+ { artifact: 'generated.yml', job: 'test' }
+ ]
end
- context 'with ci_include_components FF enabled' do
- let(:locations) do
- [
- { local: 'file.yml' },
- { file: 'file.yml', project: 'namespace/project' },
- { component: 'gitlab.com/org/component@1.0' },
- { remote: 'https://example.com/.gitlab-ci.yml' },
- { template: 'file.yml' },
- { artifact: 'generated.yml', job: 'test' }
- ]
- end
-
- it 'returns an array of file objects' do
- is_expected.to contain_exactly(
- an_instance_of(Gitlab::Ci::Config::External::File::Local),
- an_instance_of(Gitlab::Ci::Config::External::File::Project),
- an_instance_of(Gitlab::Ci::Config::External::File::Component),
- an_instance_of(Gitlab::Ci::Config::External::File::Remote),
- an_instance_of(Gitlab::Ci::Config::External::File::Template),
- an_instance_of(Gitlab::Ci::Config::External::File::Artifact)
- )
- end
+ it 'returns an array of file objects' do
+ is_expected.to contain_exactly(
+ an_instance_of(Gitlab::Ci::Config::External::File::Local),
+ an_instance_of(Gitlab::Ci::Config::External::File::Project),
+ an_instance_of(Gitlab::Ci::Config::External::File::Component),
+ an_instance_of(Gitlab::Ci::Config::External::File::Remote),
+ an_instance_of(Gitlab::Ci::Config::External::File::Template),
+ an_instance_of(Gitlab::Ci::Config::External::File::Artifact)
+ )
end
context 'when a location is not valid' do
diff --git a/spec/lib/gitlab/ci/config/external/mapper/verifier_spec.rb b/spec/lib/gitlab/ci/config/external/mapper/verifier_spec.rb
index e7dd5bd5079..69b0524be9e 100644
--- a/spec/lib/gitlab/ci/config/external/mapper/verifier_spec.rb
+++ b/spec/lib/gitlab/ci/config/external/mapper/verifier_spec.rb
@@ -364,5 +364,77 @@ RSpec.describe Gitlab::Ci::Config::External::Mapper::Verifier, feature_category:
end
end
end
+
+ describe '#verify_max_total_pipeline_size' do
+ let(:files) do
+ [
+ Gitlab::Ci::Config::External::File::Local.new({ local: 'myfolder/file1.yml' }, context),
+ Gitlab::Ci::Config::External::File::Local.new({ local: 'myfolder/file2.yml' }, context)
+ ]
+ end
+
+ let(:project_files) do
+ {
+ 'myfolder/file1.yml' => <<~YAML,
+ build:
+ script: echo Hello World
+ YAML
+ 'myfolder/file2.yml' => <<~YAML
+ include:
+ - local: myfolder/file1.yml
+ build:
+ script: echo Hello from the other file
+ YAML
+ }
+ end
+
+ context 'when pipeline tree size is within the limit' do
+ before do
+ stub_application_setting(ci_max_total_yaml_size_bytes: 10000)
+ end
+
+ it 'passes the verification' do
+ expect(process.all?(&:valid?)).to be_truthy
+ end
+ end
+
+ context 'when pipeline tree size is larger then the limit' do
+ before do
+ stub_application_setting(ci_max_total_yaml_size_bytes: 50)
+ end
+
+ let(:expected_error_class) { Gitlab::Ci::Config::External::Mapper::TooMuchDataInPipelineTreeError }
+
+ it 'raises a limit error' do
+ expect { process }.to raise_error(expected_error_class)
+ end
+ end
+
+ context 'when introduce_ci_max_total_yaml_size_bytes is disabled' do
+ before do
+ stub_feature_flags(introduce_ci_max_total_yaml_size_bytes: false)
+ end
+
+ context 'when pipeline tree size is within the limit' do
+ before do
+ stub_application_setting(ci_max_total_yaml_size_bytes: 10000)
+ end
+
+ it 'passes the verification' do
+ expect(process.all?(&:valid?)).to be_truthy
+ end
+ end
+
+ context 'when pipeline tree size is larger then the limit' do
+ before do
+ stub_application_setting(ci_max_total_yaml_size_bytes: 100)
+ end
+
+ it 'passes the verification' do
+ expect(process.all?(&:valid?)).to be_truthy
+ end
+ end
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/ci/config/external/processor_spec.rb b/spec/lib/gitlab/ci/config/external/processor_spec.rb
index 935b6989dd7..19113ce6a4e 100644
--- a/spec/lib/gitlab/ci/config/external/processor_spec.rb
+++ b/spec/lib/gitlab/ci/config/external/processor_spec.rb
@@ -425,17 +425,6 @@ RSpec.describe Gitlab::Ci::Config::External::Processor, feature_category: :pipel
output = processor.perform
expect(output.keys).to match_array([:image, :component_x_job])
end
-
- context 'when feature flag ci_include_components is disabled' do
- before do
- stub_feature_flags(ci_include_components: false)
- end
-
- it 'returns an error' do
- expect { processor.perform }
- .to raise_error(described_class::IncludeError, /does not have a valid subkey for include./)
- end
- end
end
context 'when a valid project file is defined' do
@@ -572,7 +561,17 @@ RSpec.describe Gitlab::Ci::Config::External::Processor, feature_category: :pipel
end
it 'raises IncludeError' do
- expect { subject }.to raise_error(described_class::IncludeError, /invalid include rule/)
+ expect { subject }.to raise_error(described_class::IncludeError, /contains unknown keys: changes/)
+ end
+
+ context 'when FF `ci_refactor_external_rules` is disabled' do
+ before do
+ stub_feature_flags(ci_refactor_external_rules: false)
+ end
+
+ it 'raises IncludeError' do
+ expect { subject }.to raise_error(described_class::IncludeError, /invalid include rule/)
+ end
end
end
end
diff --git a/spec/lib/gitlab/ci/config/external/rules_spec.rb b/spec/lib/gitlab/ci/config/external/rules_spec.rb
index 25b7998ef5e..8674af7ab65 100644
--- a/spec/lib/gitlab/ci/config/external/rules_spec.rb
+++ b/spec/lib/gitlab/ci/config/external/rules_spec.rb
@@ -76,8 +76,7 @@ RSpec.describe Gitlab::Ci::Config::External::Rules, feature_category: :pipeline_
let(:rule_hashes) { [{ if: '$MY_VAR == "hello"', when: 'on_success' }] }
it 'raises an error' do
- expect { result }.to raise_error(described_class::InvalidIncludeRulesError,
- 'invalid include rule: {:if=>"$MY_VAR == \"hello\"", :when=>"on_success"}')
+ expect { result }.to raise_error(described_class::InvalidIncludeRulesError, /when unknown value: on_success/)
end
end
@@ -105,8 +104,7 @@ RSpec.describe Gitlab::Ci::Config::External::Rules, feature_category: :pipeline_
let(:rule_hashes) { [{ exists: 'Dockerfile', when: 'on_success' }] }
it 'raises an error' do
- expect { result }.to raise_error(described_class::InvalidIncludeRulesError,
- 'invalid include rule: {:exists=>"Dockerfile", :when=>"on_success"}')
+ expect { result }.to raise_error(described_class::InvalidIncludeRulesError, /when unknown value: on_success/)
end
end
@@ -121,8 +119,94 @@ RSpec.describe Gitlab::Ci::Config::External::Rules, feature_category: :pipeline_
let(:rule_hashes) { [{ changes: ['$MY_VAR'] }] }
it 'raises an error' do
- expect { result }.to raise_error(described_class::InvalidIncludeRulesError,
- 'invalid include rule: {:changes=>["$MY_VAR"]}')
+ expect { result }.to raise_error(described_class::InvalidIncludeRulesError, /contains unknown keys: changes/)
+ end
+ end
+
+ context 'when FF `ci_refactor_external_rules` is disabled' do
+ before do
+ stub_feature_flags(ci_refactor_external_rules: false)
+ end
+
+ context 'when there is no rule' do
+ let(:rule_hashes) {}
+
+ it { is_expected.to eq(true) }
+ end
+
+ it_behaves_like 'when there is a rule with if'
+
+ context 'when there is a rule with exists' do
+ let(:rule_hashes) { [{ exists: 'Dockerfile' }] }
+
+ it_behaves_like 'when there is a rule with exists'
+ end
+
+ context 'when there is a rule with if and when' do
+ context 'with when: never' do
+ let(:rule_hashes) { [{ if: '$MY_VAR == "hello"', when: 'never' }] }
+
+ it_behaves_like 'when there is a rule with if', false, false
+ end
+
+ context 'with when: always' do
+ let(:rule_hashes) { [{ if: '$MY_VAR == "hello"', when: 'always' }] }
+
+ it_behaves_like 'when there is a rule with if'
+ end
+
+ context 'with when: <invalid string>' do
+ let(:rule_hashes) { [{ if: '$MY_VAR == "hello"', when: 'on_success' }] }
+
+ it 'raises an error' do
+ expect { result }.to raise_error(described_class::InvalidIncludeRulesError,
+ 'invalid include rule: {:if=>"$MY_VAR == \"hello\"", :when=>"on_success"}')
+ end
+ end
+
+ context 'with when: null' do
+ let(:rule_hashes) { [{ if: '$MY_VAR == "hello"', when: nil }] }
+
+ it_behaves_like 'when there is a rule with if'
+ end
+ end
+
+ context 'when there is a rule with exists and when' do
+ context 'with when: never' do
+ let(:rule_hashes) { [{ exists: 'Dockerfile', when: 'never' }] }
+
+ it_behaves_like 'when there is a rule with exists', false, false
+ end
+
+ context 'with when: always' do
+ let(:rule_hashes) { [{ exists: 'Dockerfile', when: 'always' }] }
+
+ it_behaves_like 'when there is a rule with exists'
+ end
+
+ context 'with when: <invalid string>' do
+ let(:rule_hashes) { [{ exists: 'Dockerfile', when: 'on_success' }] }
+
+ it 'raises an error' do
+ expect { result }.to raise_error(described_class::InvalidIncludeRulesError,
+ 'invalid include rule: {:exists=>"Dockerfile", :when=>"on_success"}')
+ end
+ end
+
+ context 'with when: null' do
+ let(:rule_hashes) { [{ exists: 'Dockerfile', when: nil }] }
+
+ it_behaves_like 'when there is a rule with exists'
+ end
+ end
+
+ context 'when there is a rule with changes' do
+ let(:rule_hashes) { [{ changes: ['$MY_VAR'] }] }
+
+ it 'raises an error' do
+ expect { result }.to raise_error(described_class::InvalidIncludeRulesError,
+ 'invalid include rule: {:changes=>["$MY_VAR"]}')
+ end
end
end
end
diff --git a/spec/lib/gitlab/ci/config/header/input_spec.rb b/spec/lib/gitlab/ci/config/header/input_spec.rb
index 73b5b8f9497..b5155dff6e8 100644
--- a/spec/lib/gitlab/ci/config/header/input_spec.rb
+++ b/spec/lib/gitlab/ci/config/header/input_spec.rb
@@ -46,12 +46,29 @@ RSpec.describe Gitlab::Ci::Config::Header::Input, feature_category: :pipeline_co
it_behaves_like 'a valid input'
end
- context 'when is a required required input' do
+ context 'when is a required input' do
let(:input_hash) { nil }
it_behaves_like 'a valid input'
end
+ context 'when given a valid type' do
+ where(:input_type) { ::Gitlab::Ci::Config::Interpolation::Inputs.input_types }
+
+ with_them do
+ let(:input_hash) { { type: input_type } }
+
+ it_behaves_like 'a valid input'
+ end
+ end
+
+ context 'when given an invalid type' do
+ let(:input_hash) { { type: 'datetime' } }
+ let(:expected_errors) { ['foo input type unknown value: datetime'] }
+
+ it_behaves_like 'an invalid input'
+ end
+
context 'when contains unknown keywords' do
let(:input_hash) { { test: 123 } }
let(:expected_errors) { ['foo config contains unknown keys: test'] }
diff --git a/spec/lib/gitlab/ci/interpolation/access_spec.rb b/spec/lib/gitlab/ci/config/interpolation/access_spec.rb
index f327377b7e3..ee414c209f7 100644
--- a/spec/lib/gitlab/ci/interpolation/access_spec.rb
+++ b/spec/lib/gitlab/ci/config/interpolation/access_spec.rb
@@ -2,7 +2,7 @@
require 'fast_spec_helper'
-RSpec.describe Gitlab::Ci::Interpolation::Access, feature_category: :pipeline_composition do
+RSpec.describe Gitlab::Ci::Config::Interpolation::Access, feature_category: :pipeline_composition do
subject { described_class.new(access, ctx) }
let(:access) do
@@ -46,4 +46,13 @@ RSpec.describe Gitlab::Ci::Interpolation::Access, feature_category: :pipeline_co
.to eq 'invalid interpolation access pattern'
end
end
+
+ context 'when a non-existent key is accessed' do
+ let(:access) { 'inputs.nonexistent' }
+
+ it 'returns an error' do
+ expect(subject).not_to be_valid
+ expect(subject.errors.first).to eq('unknown interpolation key: `nonexistent`')
+ end
+ end
end
diff --git a/spec/lib/gitlab/ci/config/interpolation/block_spec.rb b/spec/lib/gitlab/ci/config/interpolation/block_spec.rb
new file mode 100644
index 00000000000..bfaa4eb3e05
--- /dev/null
+++ b/spec/lib/gitlab/ci/config/interpolation/block_spec.rb
@@ -0,0 +1,112 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+
+RSpec.describe Gitlab::Ci::Config::Interpolation::Block, feature_category: :pipeline_composition do
+ subject { described_class.new(block, data, ctx) }
+
+ let(:data) do
+ 'inputs.data'
+ end
+
+ let(:block) do
+ "$[[ #{data} ]]"
+ end
+
+ let(:ctx) do
+ { inputs: { data: 'abcdef' }, env: { 'ENV' => 'dev' } }
+ end
+
+ it 'knows its content' do
+ expect(subject.content).to eq 'inputs.data'
+ end
+
+ it 'properly evaluates the access pattern' do
+ expect(subject.value).to eq 'abcdef'
+ end
+
+ describe '.match' do
+ it 'matches each block in a string' do
+ expect { |b| described_class.match('$[[ access1 ]] $[[ access2 ]]', &b) }
+ .to yield_successive_args(['$[[ access1 ]]', 'access1'], ['$[[ access2 ]]', 'access2'])
+ end
+
+ it 'matches an empty block' do
+ expect { |b| described_class.match('$[[]]', &b) }
+ .to yield_with_args('$[[]]', '')
+ end
+
+ context 'when functions are specified in the block' do
+ it 'matches each block in a string' do
+ expect { |b| described_class.match('$[[ access1 | func1 ]] $[[ access2 | func1 | func2(0,1) ]]', &b) }
+ .to yield_successive_args(['$[[ access1 | func1 ]]', 'access1 | func1'],
+ ['$[[ access2 | func1 | func2(0,1) ]]', 'access2 | func1 | func2(0,1)'])
+ end
+ end
+ end
+
+ describe 'when functions are specified in the block' do
+ let(:function_string1) { 'truncate(1,5)' }
+ let(:data) { "inputs.data | #{function_string1}" }
+ let(:access_value) { 'abcdef' }
+
+ it 'returns the modified value' do
+ expect(subject).to be_valid
+ expect(subject.value).to eq('bcdef')
+ end
+
+ context 'when there is an access error' do
+ let(:data) { "inputs.undefined | #{function_string1}" }
+
+ it 'returns the access error' do
+ expect(subject).not_to be_valid
+ expect(subject.errors.first).to eq('unknown interpolation key: `undefined`')
+ end
+ end
+
+ context 'when there is a function error' do
+ let(:data) { 'inputs.data | undefined' }
+
+ it 'returns the function error' do
+ expect(subject).not_to be_valid
+ expect(subject.errors.first).to match(/no function matching `undefined`/)
+ end
+ end
+
+ context 'when multiple functions are specified' do
+ let(:function_string2) { 'truncate(2,2)' }
+ let(:data) { "inputs.data | #{function_string1} | #{function_string2}" }
+
+ it 'executes each function in the specified order' do
+ expect(subject.value).to eq('de')
+ end
+
+ context 'when the data has inconsistent spacing' do
+ let(:data) { "inputs.data|#{function_string1} | #{function_string2} " }
+
+ it 'executes each function in the specified order' do
+ expect(subject.value).to eq('de')
+ end
+ end
+
+ context 'when a stack of functions errors in the middle' do
+ let(:function_string2) { 'truncate(2)' }
+
+ it 'does not modify the value' do
+ expect(subject).not_to be_valid
+ expect(subject.errors.first).to match(/no function matching `truncate\(2\)`/)
+ expect(subject.instance_variable_get(:@value)).to be_nil
+ end
+ end
+
+ context 'when too many functions are specified' do
+ it 'returns error' do
+ stub_const('Gitlab::Ci::Config::Interpolation::Block::MAX_FUNCTIONS', 1)
+
+ expect(subject).not_to be_valid
+ expect(subject.errors.first).to eq('too many functions in interpolation block')
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/config/interpolation/config_spec.rb b/spec/lib/gitlab/ci/config/interpolation/config_spec.rb
new file mode 100644
index 00000000000..1731e954906
--- /dev/null
+++ b/spec/lib/gitlab/ci/config/interpolation/config_spec.rb
@@ -0,0 +1,86 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+
+RSpec.describe Gitlab::Ci::Config::Interpolation::Config, feature_category: :pipeline_composition do
+ subject { described_class.new(YAML.safe_load(config)) }
+
+ let(:config) do
+ <<~CFG
+ test:
+ spec:
+ env: $[[ inputs.env ]]
+
+ $[[ inputs.key ]]:
+ name: $[[ inputs.key ]]
+ script: my-value
+ CFG
+ end
+
+ describe '.fabricate' do
+ subject { described_class.fabricate(config) }
+
+ context 'when given an Interpolation::Config' do
+ let(:config) { described_class.new(YAML.safe_load('yaml:')) }
+
+ it 'returns the given config' do
+ is_expected.to be(config)
+ end
+ end
+
+ context 'when given an unknown object' do
+ let(:config) { [] }
+
+ it 'raises an ArgumentError' do
+ expect { subject }.to raise_error(ArgumentError, 'unknown interpolation config')
+ end
+ end
+ end
+
+ describe '#replace!' do
+ it 'replaces each of the nodes with a block return value' do
+ result = subject.replace! { |node| "abc#{node}cde" }
+
+ expect(result).to eq({
+ 'abctestcde' => { 'abcspeccde' => { 'abcenvcde' => 'abc$[[ inputs.env ]]cde' } },
+ 'abc$[[ inputs.key ]]cde' => {
+ 'abcnamecde' => 'abc$[[ inputs.key ]]cde',
+ 'abcscriptcde' => 'abcmy-valuecde'
+ }
+ })
+ expect(subject.to_h).to eq({
+ '$[[ inputs.key ]]' => { 'name' => '$[[ inputs.key ]]', 'script' => 'my-value' },
+ 'test' => { 'spec' => { 'env' => '$[[ inputs.env ]]' } }
+ })
+ end
+
+ context 'when config size is exceeded' do
+ before do
+ stub_const("#{described_class}::MAX_NODES", 7)
+ end
+
+ it 'returns a config size error' do
+ replaced = 0
+
+ subject.replace! { replaced += 1 }
+
+ expect(replaced).to eq 4
+ expect(subject.errors.size).to eq 1
+ expect(subject.errors.first).to eq 'config too large'
+ end
+ end
+
+ context 'when node size is exceeded' do
+ before do
+ stub_const("#{described_class}::MAX_NODE_SIZE", 1)
+ end
+
+ it 'returns a config size error' do
+ subject.replace! { |node| "abc#{node}cde" }
+
+ expect(subject.errors.size).to eq 1
+ expect(subject.errors.first).to eq 'config node too large'
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/interpolation/context_spec.rb b/spec/lib/gitlab/ci/config/interpolation/context_spec.rb
index 2b126f4a8b3..c90866c986a 100644
--- a/spec/lib/gitlab/ci/interpolation/context_spec.rb
+++ b/spec/lib/gitlab/ci/config/interpolation/context_spec.rb
@@ -2,13 +2,27 @@
require 'fast_spec_helper'
-RSpec.describe Gitlab::Ci::Interpolation::Context, feature_category: :pipeline_composition do
+RSpec.describe Gitlab::Ci::Config::Interpolation::Context, feature_category: :pipeline_composition do
subject { described_class.new(ctx) }
let(:ctx) do
{ inputs: { key: 'abc' } }
end
+ describe '.fabricate' do
+ context 'when given an unexpected object' do
+ it 'raises an ArgumentError' do
+ expect { described_class.fabricate([]) }.to raise_error(ArgumentError, 'unknown interpolation context')
+ end
+ end
+ end
+
+ describe '#to_h' do
+ it 'returns the context hash' do
+ expect(subject.to_h).to eq(ctx)
+ end
+ end
+
describe '#depth' do
it 'returns a max depth of the hash' do
expect(subject.depth).to eq 2
diff --git a/spec/lib/gitlab/ci/config/interpolation/functions/base_spec.rb b/spec/lib/gitlab/ci/config/interpolation/functions/base_spec.rb
new file mode 100644
index 00000000000..c193e88dbe2
--- /dev/null
+++ b/spec/lib/gitlab/ci/config/interpolation/functions/base_spec.rb
@@ -0,0 +1,23 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+
+RSpec.describe Gitlab::Ci::Config::Interpolation::Functions::Base, feature_category: :pipeline_composition do
+ let(:custom_function_klass) do
+ Class.new(described_class) do
+ def self.function_expression_pattern
+ /.*/
+ end
+
+ def self.name
+ 'test_function'
+ end
+ end
+ end
+
+ it 'defines an expected interface for child classes' do
+ expect { described_class.function_expression_pattern }.to raise_error(NotImplementedError)
+ expect { described_class.name }.to raise_error(NotImplementedError)
+ expect { custom_function_klass.new('test').execute('input') }.to raise_error(NotImplementedError)
+ end
+end
diff --git a/spec/lib/gitlab/ci/config/interpolation/functions/truncate_spec.rb b/spec/lib/gitlab/ci/config/interpolation/functions/truncate_spec.rb
new file mode 100644
index 00000000000..c521eff9811
--- /dev/null
+++ b/spec/lib/gitlab/ci/config/interpolation/functions/truncate_spec.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ci::Config::Interpolation::Functions::Truncate, feature_category: :pipeline_composition do
+ it 'matches exactly the truncate function with 2 numeric arguments' do
+ expect(described_class.matches?('truncate(1,2)')).to be_truthy
+ expect(described_class.matches?('truncate( 11 , 222 )')).to be_truthy
+ expect(described_class.matches?('truncate( string , 222 )')).to be_falsey
+ expect(described_class.matches?('truncate(222)')).to be_falsey
+ expect(described_class.matches?('unknown(1,2)')).to be_falsey
+ end
+
+ it 'truncates the given input' do
+ function = described_class.new('truncate(1,2)')
+
+ output = function.execute('test')
+
+ expect(function).to be_valid
+ expect(output).to eq('es')
+ end
+
+ context 'when given a non-string input' do
+ it 'returns an error' do
+ function = described_class.new('truncate(1,2)')
+
+ function.execute(100)
+
+ expect(function).not_to be_valid
+ expect(function.errors).to contain_exactly(
+ 'error in `truncate` function: invalid input type: truncate can only be used with string inputs'
+ )
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/config/interpolation/functions_stack_spec.rb b/spec/lib/gitlab/ci/config/interpolation/functions_stack_spec.rb
new file mode 100644
index 00000000000..881f092c440
--- /dev/null
+++ b/spec/lib/gitlab/ci/config/interpolation/functions_stack_spec.rb
@@ -0,0 +1,38 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ci::Config::Interpolation::FunctionsStack, feature_category: :pipeline_composition do
+ let(:functions) { ['truncate(0,4)', 'truncate(1,2)'] }
+ let(:input_value) { 'test_input_value' }
+
+ subject { described_class.new(functions).evaluate(input_value) }
+
+ it 'modifies the given input value according to the function expressions' do
+ expect(subject).to be_success
+ expect(subject.value).to eq('es')
+ end
+
+ context 'when applying a function fails' do
+ let(:input_value) { 666 }
+
+ it 'returns the error given by the failure' do
+ expect(subject).not_to be_success
+ expect(subject.errors).to contain_exactly(
+ 'error in `truncate` function: invalid input type: truncate can only be used with string inputs'
+ )
+ end
+ end
+
+ context 'when function expressions do not match any function' do
+ let(:functions) { ['truncate(0)', 'unknown'] }
+
+ it 'returns an error' do
+ expect(subject).not_to be_success
+ expect(subject.errors).to contain_exactly(
+ 'no function matching `truncate(0)`: check that the function name, arguments, and types are correct',
+ 'no function matching `unknown`: check that the function name, arguments, and types are correct'
+ )
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/config/interpolation/inputs/base_input_spec.rb b/spec/lib/gitlab/ci/config/interpolation/inputs/base_input_spec.rb
new file mode 100644
index 00000000000..30036ee68ed
--- /dev/null
+++ b/spec/lib/gitlab/ci/config/interpolation/inputs/base_input_spec.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ci::Config::Interpolation::Inputs::BaseInput, feature_category: :pipeline_composition do
+ describe '.matches?' do
+ it 'is not implemented' do
+ expect { described_class.matches?(double) }.to raise_error(NotImplementedError)
+ end
+ end
+
+ describe '.type_name' do
+ it 'is not implemented' do
+ expect { described_class.type_name }.to raise_error(NotImplementedError)
+ end
+ end
+
+ describe '#valid_value?' do
+ it 'is not implemented' do
+ expect do
+ described_class.new(
+ name: 'website', spec: { website: nil }, value: { website: 'example.com' }
+ ).valid_value?('test')
+ end.to raise_error(NotImplementedError)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/config/interpolation/inputs_spec.rb b/spec/lib/gitlab/ci/config/interpolation/inputs_spec.rb
new file mode 100644
index 00000000000..ea06f181fa4
--- /dev/null
+++ b/spec/lib/gitlab/ci/config/interpolation/inputs_spec.rb
@@ -0,0 +1,137 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ci::Config::Interpolation::Inputs, feature_category: :pipeline_composition do
+ let(:inputs) { described_class.new(specs, args) }
+ let(:specs) { { foo: { default: 'bar' } } }
+ let(:args) { {} }
+
+ context 'when inputs are valid' do
+ where(:specs, :args, :merged) do
+ [
+ [
+ { foo: { default: 'bar' } }, {},
+ { foo: 'bar' }
+ ],
+ [
+ { foo: { default: 'bar' } }, { foo: 'test' },
+ { foo: 'test' }
+ ],
+ [
+ { foo: nil }, { foo: 'bar' },
+ { foo: 'bar' }
+ ],
+ [
+ { foo: { type: 'string' } }, { foo: 'bar' },
+ { foo: 'bar' }
+ ],
+ [
+ { foo: { type: 'string', default: 'bar' } }, { foo: 'test' },
+ { foo: 'test' }
+ ],
+ [
+ { foo: { type: 'string', default: 'bar' } }, {},
+ { foo: 'bar' }
+ ],
+ [
+ { foo: { default: 'bar' }, baz: nil }, { baz: 'test' },
+ { foo: 'bar', baz: 'test' }
+ ],
+ [
+ { number_input: { type: 'number' } },
+ { number_input: 8 },
+ { number_input: 8 }
+ ],
+ [
+ { default_number_input: { default: 9, type: 'number' } },
+ {},
+ { default_number_input: 9 }
+ ],
+ [
+ { true_input: { type: 'boolean' }, false_input: { type: 'boolean' } },
+ { true_input: true, false_input: false },
+ { true_input: true, false_input: false }
+ ],
+ [
+ { default_boolean_input: { default: true, type: 'boolean' } },
+ {},
+ { default_boolean_input: true }
+ ]
+ ]
+ end
+
+ with_them do
+ it 'contains the merged inputs' do
+ expect(inputs).to be_valid
+ expect(inputs.to_hash).to eq(merged)
+ end
+ end
+ end
+
+ context 'when inputs are invalid' do
+ where(:specs, :args, :errors) do
+ [
+ [
+ { foo: nil }, { foo: 'bar', test: 'bar' },
+ ['unknown input arguments: test']
+ ],
+ [
+ { foo: nil }, { test: 'bar', gitlab: '1' },
+ ['unknown input arguments: test, gitlab', '`foo` input: required value has not been provided']
+ ],
+ [
+ { foo: 123 }, {},
+ ['unknown input specification for `foo` (valid types: boolean, number, string)']
+ ],
+ [
+ { a: nil, foo: 123 }, { a: '123' },
+ ['unknown input specification for `foo` (valid types: boolean, number, string)']
+ ],
+ [
+ { foo: nil }, {},
+ ['`foo` input: required value has not been provided']
+ ],
+ [
+ { foo: { default: 123 } }, { foo: 'test' },
+ ['`foo` input: default value is not a string']
+ ],
+ [
+ { foo: { default: 'test' } }, { foo: 123 },
+ ['`foo` input: provided value is not a string']
+ ],
+ [
+ { foo: nil }, { foo: 123 },
+ ['`foo` input: provided value is not a string']
+ ],
+ [
+ { number_input: { type: 'number' } },
+ { number_input: 'NaN' },
+ ['`number_input` input: provided value is not a number']
+ ],
+ [
+ { default_number_input: { default: 'NaN', type: 'number' } },
+ {},
+ ['`default_number_input` input: default value is not a number']
+ ],
+ [
+ { boolean_input: { type: 'boolean' } },
+ { boolean_input: 'string' },
+ ['`boolean_input` input: provided value is not a boolean']
+ ],
+ [
+ { default_boolean_input: { default: 'string', type: 'boolean' } },
+ {},
+ ['`default_boolean_input` input: default value is not a boolean']
+ ]
+ ]
+ end
+
+ with_them do
+ it 'contains the merged inputs', :aggregate_failures do
+ expect(inputs).not_to be_valid
+ expect(inputs.errors).to contain_exactly(*errors)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/config/yaml/interpolator_spec.rb b/spec/lib/gitlab/ci/config/interpolation/interpolator_spec.rb
index 888756a3eb1..7bb09d35064 100644
--- a/spec/lib/gitlab/ci/config/yaml/interpolator_spec.rb
+++ b/spec/lib/gitlab/ci/config/interpolation/interpolator_spec.rb
@@ -2,13 +2,12 @@
require 'spec_helper'
-RSpec.describe Gitlab::Ci::Config::Yaml::Interpolator, feature_category: :pipeline_composition do
+RSpec.describe Gitlab::Ci::Config::Interpolation::Interpolator, feature_category: :pipeline_composition do
let_it_be(:project) { create(:project) }
- let(:current_user) { build(:user, id: 1234) }
let(:result) { ::Gitlab::Ci::Config::Yaml::Result.new(config: [header, content]) }
- subject { described_class.new(result, arguments, current_user: current_user) }
+ subject { described_class.new(result, arguments) }
context 'when input data is valid' do
let(:header) do
@@ -26,16 +25,10 @@ RSpec.describe Gitlab::Ci::Config::Yaml::Interpolator, feature_category: :pipeli
it 'correctly interpolates the config' do
subject.interpolate!
+ expect(subject).to be_interpolated
expect(subject).to be_valid
expect(subject.to_hash).to eq({ test: 'deploy gitlab.com' })
end
-
- it 'tracks the event' do
- expect(::Gitlab::UsageDataCounters::HLLRedisCounter).to receive(:track_event)
- .with('ci_interpolation_users', { values: 1234 })
-
- subject.interpolate!
- end
end
context 'when config has a syntax error' do
@@ -54,6 +47,20 @@ RSpec.describe Gitlab::Ci::Config::Yaml::Interpolator, feature_category: :pipeli
end
end
+ context 'when spec header is missing but inputs are specified' do
+ let(:header) { nil }
+ let(:content) { { test: 'echo' } }
+ let(:arguments) { { foo: 'bar' } }
+
+ it 'surfaces an error about invalid inputs' do
+ subject.interpolate!
+
+ expect(subject).not_to be_valid
+ expect(subject.error_message).to eq subject.errors.first
+ expect(subject.errors).to include('unknown input arguments')
+ end
+ end
+
context 'when spec header is invalid' do
let(:header) do
{ spec: { arguments: { website: nil } } }
@@ -76,47 +83,47 @@ RSpec.describe Gitlab::Ci::Config::Yaml::Interpolator, feature_category: :pipeli
end
end
- context 'when interpolation block is invalid' do
+ context 'when provided interpolation argument is invalid' do
let(:header) do
{ spec: { inputs: { website: nil } } }
end
let(:content) do
- { test: 'deploy $[[ inputs.abc ]]' }
+ { test: 'deploy $[[ inputs.website ]]' }
end
let(:arguments) do
- { website: 'gitlab.com' }
+ { website: ['gitlab.com'] }
end
- it 'correctly interpolates the config' do
+ it 'returns an error' do
subject.interpolate!
expect(subject).not_to be_valid
- expect(subject.errors).to include 'unknown interpolation key: `abc`'
- expect(subject.error_message).to eq 'interpolation interrupted by errors, unknown interpolation key: `abc`'
+ expect(subject.error_message).to eq subject.errors.first
+ expect(subject.errors).to include '`website` input: provided value is not a string'
end
end
- context 'when provided interpolation argument is invalid' do
+ context 'when interpolation block is invalid' do
let(:header) do
{ spec: { inputs: { website: nil } } }
end
let(:content) do
- { test: 'deploy $[[ inputs.website ]]' }
+ { test: 'deploy $[[ inputs.abc ]]' }
end
let(:arguments) do
- { website: ['gitlab.com'] }
+ { website: 'gitlab.com' }
end
- it 'correctly interpolates the config' do
+ it 'returns an error' do
subject.interpolate!
expect(subject).not_to be_valid
- expect(subject.error_message).to eq subject.errors.first
- expect(subject.errors).to include 'unsupported value in input argument `website`'
+ expect(subject.errors).to include 'unknown interpolation key: `abc`'
+ expect(subject.error_message).to eq 'interpolation interrupted by errors, unknown interpolation key: `abc`'
end
end
@@ -133,11 +140,12 @@ RSpec.describe Gitlab::Ci::Config::Yaml::Interpolator, feature_category: :pipeli
{ website: 'gitlab.com' }
end
- it 'correctly interpolates the config' do
+ it 'returns an error' do
subject.interpolate!
expect(subject).not_to be_valid
- expect(subject.error_message).to eq 'interpolation interrupted by errors, unknown interpolation key: `something`'
+ expect(subject.error_message)
+ .to eq 'interpolation interrupted by errors, unknown interpolation key: `something`'
end
end
diff --git a/spec/lib/gitlab/ci/interpolation/template_spec.rb b/spec/lib/gitlab/ci/config/interpolation/template_spec.rb
index a3ef1bb4445..c7d88822558 100644
--- a/spec/lib/gitlab/ci/interpolation/template_spec.rb
+++ b/spec/lib/gitlab/ci/config/interpolation/template_spec.rb
@@ -2,7 +2,7 @@
require 'fast_spec_helper'
-RSpec.describe Gitlab::Ci::Interpolation::Template, feature_category: :pipeline_composition do
+RSpec.describe Gitlab::Ci::Config::Interpolation::Template, feature_category: :pipeline_composition do
subject { described_class.new(YAML.safe_load(config), ctx) }
let(:config) do
@@ -67,7 +67,7 @@ RSpec.describe Gitlab::Ci::Interpolation::Template, feature_category: :pipeline_
context 'when template contains symbols that need interpolation' do
subject do
- described_class.new({ '$[[ inputs.key ]]'.to_sym => 'cde' }, ctx)
+ described_class.new({ '$[[ inputs.key ]]': 'cde' }, ctx)
end
it 'performs a valid interpolation' do
@@ -78,7 +78,7 @@ RSpec.describe Gitlab::Ci::Interpolation::Template, feature_category: :pipeline_
context 'when template is too large' do
before do
- stub_const('Gitlab::Ci::Interpolation::Config::MAX_NODES', 1)
+ stub_const('Gitlab::Ci::Config::Interpolation::Config::MAX_NODES', 1)
end
it 'returns an error' do
diff --git a/spec/lib/gitlab/ci/config/normalizer_spec.rb b/spec/lib/gitlab/ci/config/normalizer_spec.rb
index 96ca5d98a6e..cc549b38dc3 100644
--- a/spec/lib/gitlab/ci/config/normalizer_spec.rb
+++ b/spec/lib/gitlab/ci/config/normalizer_spec.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-require 'fast_spec_helper'
+require 'spec_helper'
RSpec.describe Gitlab::Ci::Config::Normalizer do
let(:job_name) { :rspec }
@@ -103,6 +103,34 @@ RSpec.describe Gitlab::Ci::Config::Normalizer do
end
end
+ shared_examples 'needs:parallel:matrix' do
+ let(:expanded_needs_parallel_job_attributes) do
+ expanded_needs_parallel_job_names.map do |job_name|
+ { name: job_name }
+ end
+ end
+
+ context 'when job has needs:parallel:matrix on parallelized jobs' do
+ let(:config) do
+ {
+ job_name => job_config,
+ other_job: {
+ script: 'echo 1',
+ needs: {
+ job: [
+ { name: job_name.to_s, parallel: needs_parallel_config }
+ ]
+ }
+ }
+ }
+ end
+
+ it 'parallelizes and only keeps needs specified by needs:parallel:matrix' do
+ expect(subject.dig(:other_job, :needs, :job)).to eq(expanded_needs_parallel_job_attributes)
+ end
+ end
+ end
+
context 'with parallel config as integer' do
let(:variables_config) { {} }
let(:parallel_config) { 5 }
@@ -167,7 +195,7 @@ RSpec.describe Gitlab::Ci::Config::Normalizer do
it_behaves_like 'parallel needs'
end
- context 'with parallel matrix config' do
+ context 'with a simple parallel matrix config' do
let(:variables_config) do
{
USER_VARIABLE: 'user value'
@@ -192,6 +220,19 @@ RSpec.describe Gitlab::Ci::Config::Normalizer do
]
end
+ let(:needs_parallel_config) do
+ {
+ matrix: [
+ {
+ VAR_1: ['A'],
+ VAR_2: ['C']
+ }
+ ]
+ }
+ end
+
+ let(:expanded_needs_parallel_job_names) { ['rspec: [A, C]'] }
+
it 'does not have original job' do
is_expected.not_to include(job_name)
end
@@ -228,6 +269,66 @@ RSpec.describe Gitlab::Ci::Config::Normalizer do
it_behaves_like 'parallel dependencies'
it_behaves_like 'parallel needs'
+ it_behaves_like 'needs:parallel:matrix'
+ end
+
+ context 'with a complex parallel matrix config' do
+ let(:variables_config) { {} }
+ let(:parallel_config) do
+ {
+ matrix: [
+ {
+ PLATFORM: ['centos'],
+ STACK: %w[ruby python java],
+ DB: %w[postgresql mysql]
+ },
+ {
+ PLATFORM: ['ubuntu'],
+ PROVIDER: %w[aws gcp]
+ }
+ ]
+ }
+ end
+
+ let(:needs_parallel_config) do
+ {
+ matrix: [
+ {
+ PLATFORM: ['centos'],
+ STACK: %w[ruby python],
+ DB: ['postgresql']
+ },
+ {
+ PLATFORM: ['ubuntu'],
+ PROVIDER: ['aws']
+ }
+ ]
+ }
+ end
+
+ let(:expanded_needs_parallel_job_names) do
+ [
+ 'rspec: [centos, ruby, postgresql]',
+ 'rspec: [centos, python, postgresql]',
+ 'rspec: [ubuntu, aws]'
+ ]
+ end
+
+ let(:expanded_job_names) do
+ [
+ 'rspec: [centos, ruby, postgresql]',
+ 'rspec: [centos, ruby, mysql]',
+ 'rspec: [centos, python, postgresql]',
+ 'rspec: [centos, python, mysql]',
+ 'rspec: [centos, java, postgresql]',
+ 'rspec: [centos, java, mysql]',
+ 'rspec: [ubuntu, aws]',
+ 'rspec: [ubuntu, gcp]'
+ ]
+ end
+
+ it_behaves_like 'parallel needs'
+ it_behaves_like 'needs:parallel:matrix'
end
context 'when parallel config does not matches a factory' do
diff --git a/spec/lib/gitlab/ci/config/yaml/loader_spec.rb b/spec/lib/gitlab/ci/config/yaml/loader_spec.rb
index 4e6151677e6..57a9a47d699 100644
--- a/spec/lib/gitlab/ci/config/yaml/loader_spec.rb
+++ b/spec/lib/gitlab/ci/config/yaml/loader_spec.rb
@@ -21,12 +21,13 @@ RSpec.describe ::Gitlab::Ci::Config::Yaml::Loader, feature_category: :pipeline_c
YAML
end
- subject(:result) { described_class.new(yaml, inputs: inputs, current_user: project.creator).load }
+ subject(:result) { described_class.new(yaml, inputs: inputs).load }
it 'loads and interpolates CI config YAML' do
expected_config = { test_job: { script: ['echo "hello test"'] } }
expect(result).to be_valid
+ expect(result).to be_interpolated
expect(result.content).to eq(expected_config)
end
diff --git a/spec/lib/gitlab/ci/config/yaml/result_spec.rb b/spec/lib/gitlab/ci/config/yaml/result_spec.rb
index d17e0609ef6..a66c630dfc9 100644
--- a/spec/lib/gitlab/ci/config/yaml/result_spec.rb
+++ b/spec/lib/gitlab/ci/config/yaml/result_spec.rb
@@ -51,4 +51,14 @@ RSpec.describe Gitlab::Ci::Config::Yaml::Result, feature_category: :pipeline_com
expect(result).not_to be_valid
expect(result.error).to be_a ArgumentError
end
+
+ describe '#interpolated?' do
+ it 'defaults to false' do
+ expect(described_class.new).not_to be_interpolated
+ end
+
+ it 'returns the value passed to the initializer' do
+ expect(described_class.new(interpolated: true)).to be_interpolated
+ end
+ end
end
diff --git a/spec/lib/gitlab/ci/config/yaml_spec.rb b/spec/lib/gitlab/ci/config/yaml_spec.rb
index 27d93d555f1..e30ddbb8033 100644
--- a/spec/lib/gitlab/ci/config/yaml_spec.rb
+++ b/spec/lib/gitlab/ci/config/yaml_spec.rb
@@ -36,17 +36,5 @@ RSpec.describe Gitlab::Ci::Config::Yaml, feature_category: :pipeline_composition
.to raise_error ::Gitlab::Config::Loader::FormatError, /mapping values are not allowed in this context/
end
end
-
- context 'when given a user' do
- let(:user) { instance_double(User) }
-
- subject(:config) { described_class.load!(yaml, current_user: user) }
-
- it 'passes it to Loader' do
- expect(::Gitlab::Ci::Config::Yaml::Loader).to receive(:new).with(yaml, current_user: user).and_call_original
-
- config
- end
- end
end
end
diff --git a/spec/lib/gitlab/ci/decompressed_gzip_size_validator_spec.rb b/spec/lib/gitlab/ci/decompressed_gzip_size_validator_spec.rb
index dad5bd2548b..f1b10648f51 100644
--- a/spec/lib/gitlab/ci/decompressed_gzip_size_validator_spec.rb
+++ b/spec/lib/gitlab/ci/decompressed_gzip_size_validator_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Ci::DecompressedGzipSizeValidator, feature_category: :importers do
let_it_be(:filepath) { File.join(Dir.tmpdir, 'decompressed_gzip_size_validator_spec.gz') }
- before(:all) do
+ before_all do
create_compressed_file
end
diff --git a/spec/lib/gitlab/ci/input/arguments/base_spec.rb b/spec/lib/gitlab/ci/input/arguments/base_spec.rb
deleted file mode 100644
index ed8e99b7257..00000000000
--- a/spec/lib/gitlab/ci/input/arguments/base_spec.rb
+++ /dev/null
@@ -1,19 +0,0 @@
-# frozen_string_literal: true
-
-require 'fast_spec_helper'
-
-RSpec.describe Gitlab::Ci::Input::Arguments::Base, feature_category: :pipeline_composition do
- subject do
- Class.new(described_class) do
- def validate!; end
- def to_value; end
- end
- end
-
- it 'fabricates an invalid input argument if unknown value is provided' do
- argument = subject.new(:something, { spec: 123 }, [:a, :b])
-
- expect(argument).not_to be_valid
- expect(argument.errors.first).to eq 'unsupported value in input argument `something`'
- end
-end
diff --git a/spec/lib/gitlab/ci/input/arguments/default_spec.rb b/spec/lib/gitlab/ci/input/arguments/default_spec.rb
deleted file mode 100644
index bc0cee6ac4e..00000000000
--- a/spec/lib/gitlab/ci/input/arguments/default_spec.rb
+++ /dev/null
@@ -1,53 +0,0 @@
-# frozen_string_literal: true
-
-require 'fast_spec_helper'
-
-RSpec.describe Gitlab::Ci::Input::Arguments::Default, feature_category: :pipeline_composition do
- it 'returns a user-provided value if it is present' do
- argument = described_class.new(:website, { default: 'https://gitlab.com' }, 'https://example.gitlab.com')
-
- expect(argument).to be_valid
- expect(argument.to_value).to eq 'https://example.gitlab.com'
- expect(argument.to_hash).to eq({ website: 'https://example.gitlab.com' })
- end
-
- it 'returns an empty value if user-provider input is empty' do
- argument = described_class.new(:website, { default: 'https://gitlab.com' }, '')
-
- expect(argument).to be_valid
- expect(argument.to_value).to eq ''
- expect(argument.to_hash).to eq({ website: '' })
- end
-
- it 'returns a default value if user-provider one is unknown' do
- argument = described_class.new(:website, { default: 'https://gitlab.com' }, nil)
-
- expect(argument).to be_valid
- expect(argument.to_value).to eq 'https://gitlab.com'
- expect(argument.to_hash).to eq({ website: 'https://gitlab.com' })
- end
-
- it 'returns an error if the default argument has not been recognized' do
- argument = described_class.new(:website, { default: ['gitlab.com'] }, 'abc')
-
- expect(argument).not_to be_valid
- end
-
- it 'returns an error if the argument has not been fabricated correctly' do
- argument = described_class.new(:website, { required: 'https://gitlab.com' }, 'https://example.gitlab.com')
-
- expect(argument).not_to be_valid
- end
-
- describe '.matches?' do
- it 'matches specs with default configuration' do
- expect(described_class.matches?({ default: 'abc' })).to be true
- end
-
- it 'does not match specs different configuration keyword' do
- expect(described_class.matches?({ options: %w[a b] })).to be false
- expect(described_class.matches?('a b c')).to be false
- expect(described_class.matches?(%w[default a])).to be false
- end
- end
-end
diff --git a/spec/lib/gitlab/ci/input/arguments/options_spec.rb b/spec/lib/gitlab/ci/input/arguments/options_spec.rb
deleted file mode 100644
index 17e3469b294..00000000000
--- a/spec/lib/gitlab/ci/input/arguments/options_spec.rb
+++ /dev/null
@@ -1,54 +0,0 @@
-# frozen_string_literal: true
-
-require 'fast_spec_helper'
-
-RSpec.describe Gitlab::Ci::Input::Arguments::Options, feature_category: :pipeline_composition do
- it 'returns a user-provided value if it is an allowed one' do
- argument = described_class.new(:run, { options: %w[opt1 opt2] }, 'opt1')
-
- expect(argument).to be_valid
- expect(argument.to_value).to eq 'opt1'
- expect(argument.to_hash).to eq({ run: 'opt1' })
- end
-
- it 'returns an error if user-provided value is not allowlisted' do
- argument = described_class.new(:run, { options: %w[opt1 opt2] }, 'opt3')
-
- expect(argument).not_to be_valid
- expect(argument.errors.first).to eq '`run` input: argument value opt3 not allowlisted'
- end
-
- it 'returns an error if specification is not correct' do
- argument = described_class.new(:website, { options: nil }, 'opt1')
-
- expect(argument).not_to be_valid
- expect(argument.errors.first).to eq '`website` input: argument specification invalid'
- end
-
- it 'returns an error if specification is using a hash' do
- argument = described_class.new(:website, { options: { a: 1 } }, 'opt1')
-
- expect(argument).not_to be_valid
- expect(argument.errors.first).to eq '`website` input: argument specification invalid'
- end
-
- it 'returns an empty value if it is allowlisted' do
- argument = described_class.new(:run, { options: ['opt1', ''] }, '')
-
- expect(argument).to be_valid
- expect(argument.to_value).to be_empty
- expect(argument.to_hash).to eq({ run: '' })
- end
-
- describe '.matches?' do
- it 'matches specs with options configuration' do
- expect(described_class.matches?({ options: %w[a b] })).to be true
- end
-
- it 'does not match specs different configuration keyword' do
- expect(described_class.matches?({ default: 'abc' })).to be false
- expect(described_class.matches?(['options'])).to be false
- expect(described_class.matches?('options')).to be false
- end
- end
-end
diff --git a/spec/lib/gitlab/ci/input/arguments/required_spec.rb b/spec/lib/gitlab/ci/input/arguments/required_spec.rb
deleted file mode 100644
index 847272998c2..00000000000
--- a/spec/lib/gitlab/ci/input/arguments/required_spec.rb
+++ /dev/null
@@ -1,45 +0,0 @@
-# frozen_string_literal: true
-
-require 'fast_spec_helper'
-
-RSpec.describe Gitlab::Ci::Input::Arguments::Required, feature_category: :pipeline_composition do
- it 'returns a user-provided value if it is present' do
- argument = described_class.new(:website, nil, 'https://example.gitlab.com')
-
- expect(argument).to be_valid
- expect(argument.to_value).to eq 'https://example.gitlab.com'
- expect(argument.to_hash).to eq({ website: 'https://example.gitlab.com' })
- end
-
- it 'returns an empty value if user-provider value is empty' do
- argument = described_class.new(:website, nil, '')
-
- expect(argument).to be_valid
- expect(argument.to_hash).to eq(website: '')
- end
-
- it 'returns an error if user-provided value is unspecified' do
- argument = described_class.new(:website, nil, nil)
-
- expect(argument).not_to be_valid
- expect(argument.errors.first).to eq '`website` input: required value has not been provided'
- end
-
- describe '.matches?' do
- it 'matches specs without configuration' do
- expect(described_class.matches?(nil)).to be true
- end
-
- it 'matches specs with empty configuration' do
- expect(described_class.matches?('')).to be true
- end
-
- it 'matches specs with an empty hash configuration' do
- expect(described_class.matches?({})).to be true
- end
-
- it 'does not match specs with configuration' do
- expect(described_class.matches?({ options: %w[a b] })).to be false
- end
- end
-end
diff --git a/spec/lib/gitlab/ci/input/arguments/unknown_spec.rb b/spec/lib/gitlab/ci/input/arguments/unknown_spec.rb
deleted file mode 100644
index 1270423ac72..00000000000
--- a/spec/lib/gitlab/ci/input/arguments/unknown_spec.rb
+++ /dev/null
@@ -1,18 +0,0 @@
-# frozen_string_literal: true
-
-require 'fast_spec_helper'
-
-RSpec.describe Gitlab::Ci::Input::Arguments::Unknown, feature_category: :pipeline_composition do
- it 'raises an error when someone tries to evaluate the value' do
- argument = described_class.new(:website, nil, 'https://example.gitlab.com')
-
- expect(argument).not_to be_valid
- expect { argument.to_value }.to raise_error ArgumentError
- end
-
- describe '.matches?' do
- it 'always matches' do
- expect(described_class.matches?('abc')).to be true
- end
- end
-end
diff --git a/spec/lib/gitlab/ci/input/inputs_spec.rb b/spec/lib/gitlab/ci/input/inputs_spec.rb
deleted file mode 100644
index 5d2d5192299..00000000000
--- a/spec/lib/gitlab/ci/input/inputs_spec.rb
+++ /dev/null
@@ -1,126 +0,0 @@
-# frozen_string_literal: true
-
-require 'fast_spec_helper'
-
-RSpec.describe Gitlab::Ci::Input::Inputs, feature_category: :pipeline_composition do
- describe '#valid?' do
- let(:spec) { { website: nil } }
-
- it 'describes user-provided inputs' do
- inputs = described_class.new(spec, { website: 'http://example.gitlab.com' })
-
- expect(inputs).to be_valid
- end
- end
-
- context 'when proper specification has been provided' do
- let(:spec) do
- {
- website: nil,
- env: { default: 'development' },
- run: { options: %w[tests spec e2e] }
- }
- end
-
- let(:args) { { website: 'https://gitlab.com', run: 'tests' } }
-
- it 'fabricates desired input arguments' do
- inputs = described_class.new(spec, args)
-
- expect(inputs).to be_valid
- expect(inputs.count).to eq 3
- expect(inputs.to_hash).to eq(args.merge(env: 'development'))
- end
- end
-
- context 'when inputs and args are empty' do
- it 'is a valid use-case' do
- inputs = described_class.new({}, {})
-
- expect(inputs).to be_valid
- expect(inputs.to_hash).to be_empty
- end
- end
-
- context 'when there are arguments recoincilation errors present' do
- context 'when required argument is missing' do
- let(:spec) { { website: nil } }
-
- it 'returns an error' do
- inputs = described_class.new(spec, {})
-
- expect(inputs).not_to be_valid
- expect(inputs.errors.first).to eq '`website` input: required value has not been provided'
- end
- end
-
- context 'when argument is not present but configured as allowlist' do
- let(:spec) do
- { run: { options: %w[opt1 opt2] } }
- end
-
- it 'returns an error' do
- inputs = described_class.new(spec, {})
-
- expect(inputs).not_to be_valid
- expect(inputs.errors.first).to eq '`run` input: argument not provided'
- end
- end
- end
-
- context 'when unknown specification argument has been used' do
- let(:spec) do
- {
- website: nil,
- env: { default: 'development' },
- run: { options: %w[tests spec e2e] },
- test: { unknown: 'something' }
- }
- end
-
- let(:args) { { website: 'https://gitlab.com', run: 'tests' } }
-
- it 'fabricates an unknown argument entry and returns an error' do
- inputs = described_class.new(spec, args)
-
- expect(inputs).not_to be_valid
- expect(inputs.count).to eq 4
- expect(inputs.errors.first).to eq '`test` input: unrecognized input argument specification: `unknown`'
- end
- end
-
- context 'when unknown arguments are being passed by a user' do
- let(:spec) do
- { env: { default: 'development' } }
- end
-
- let(:args) { { website: 'https://gitlab.com', run: 'tests' } }
-
- it 'returns an error with a list of unknown arguments' do
- inputs = described_class.new(spec, args)
-
- expect(inputs).not_to be_valid
- expect(inputs.errors.first).to eq 'unknown input arguments: [:website, :run]'
- end
- end
-
- context 'when composite specification is being used' do
- let(:spec) do
- {
- env: {
- default: 'dev',
- options: %w[test dev prod]
- }
- }
- end
-
- let(:args) { { env: 'dev' } }
-
- it 'returns an error describing an unknown specification' do
- inputs = described_class.new(spec, args)
-
- expect(inputs).not_to be_valid
- expect(inputs.errors.first).to eq '`env` input: unrecognized input argument definition'
- end
- end
-end
diff --git a/spec/lib/gitlab/ci/interpolation/block_spec.rb b/spec/lib/gitlab/ci/interpolation/block_spec.rb
deleted file mode 100644
index 4a8709df3dc..00000000000
--- a/spec/lib/gitlab/ci/interpolation/block_spec.rb
+++ /dev/null
@@ -1,39 +0,0 @@
-# frozen_string_literal: true
-
-require 'fast_spec_helper'
-
-RSpec.describe Gitlab::Ci::Interpolation::Block, feature_category: :pipeline_composition do
- subject { described_class.new(block, data, ctx) }
-
- let(:data) do
- 'inputs.data'
- end
-
- let(:block) do
- "$[[ #{data} ]]"
- end
-
- let(:ctx) do
- { inputs: { data: 'abc' }, env: { 'ENV' => 'dev' } }
- end
-
- it 'knows its content' do
- expect(subject.content).to eq 'inputs.data'
- end
-
- it 'properly evaluates the access pattern' do
- expect(subject.value).to eq 'abc'
- end
-
- describe '.match' do
- it 'matches each block in a string' do
- expect { |b| described_class.match('$[[ access1 ]] $[[ access2 ]]', &b) }
- .to yield_successive_args(['$[[ access1 ]]', 'access1'], ['$[[ access2 ]]', 'access2'])
- end
-
- it 'matches an empty block' do
- expect { |b| described_class.match('$[[]]', &b) }
- .to yield_with_args('$[[]]', '')
- end
- end
-end
diff --git a/spec/lib/gitlab/ci/interpolation/config_spec.rb b/spec/lib/gitlab/ci/interpolation/config_spec.rb
deleted file mode 100644
index e745269d8c0..00000000000
--- a/spec/lib/gitlab/ci/interpolation/config_spec.rb
+++ /dev/null
@@ -1,49 +0,0 @@
-# frozen_string_literal: true
-
-require 'fast_spec_helper'
-
-RSpec.describe Gitlab::Ci::Interpolation::Config, feature_category: :pipeline_composition do
- subject { described_class.new(YAML.safe_load(config)) }
-
- let(:config) do
- <<~CFG
- test:
- spec:
- env: $[[ inputs.env ]]
-
- $[[ inputs.key ]]:
- name: $[[ inputs.key ]]
- script: my-value
- CFG
- end
-
- describe '#replace!' do
- it 'replaces each od the nodes with a block return value' do
- result = subject.replace! { |node| "abc#{node}cde" }
-
- expect(result).to eq({
- 'abctestcde' => { 'abcspeccde' => { 'abcenvcde' => 'abc$[[ inputs.env ]]cde' } },
- 'abc$[[ inputs.key ]]cde' => {
- 'abcnamecde' => 'abc$[[ inputs.key ]]cde',
- 'abcscriptcde' => 'abcmy-valuecde'
- }
- })
- end
- end
-
- context 'when config size is exceeded' do
- before do
- stub_const("#{described_class}::MAX_NODES", 7)
- end
-
- it 'returns a config size error' do
- replaced = 0
-
- subject.replace! { replaced += 1 }
-
- expect(replaced).to eq 4
- expect(subject.errors.size).to eq 1
- expect(subject.errors.first).to eq 'config too large'
- end
- end
-end
diff --git a/spec/lib/gitlab/ci/jwt_v2/claim_mapper/repository_spec.rb b/spec/lib/gitlab/ci/jwt_v2/claim_mapper/repository_spec.rb
new file mode 100644
index 00000000000..0dd0d2fcf0d
--- /dev/null
+++ b/spec/lib/gitlab/ci/jwt_v2/claim_mapper/repository_spec.rb
@@ -0,0 +1,34 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ci::JwtV2::ClaimMapper::Repository, feature_category: :continuous_integration do
+ let_it_be(:sha) { '35fa264414ee3ed7d0b8a6f5da40751c8600a772' }
+ let_it_be(:pipeline) { build_stubbed(:ci_pipeline, ref: 'test-branch-for-claim-mapper', sha: sha) }
+
+ let(:url) { 'gitlab.com/gitlab-org/gitlab//.gitlab-ci.yml' }
+ let(:project_config) { instance_double(Gitlab::Ci::ProjectConfig, url: url) }
+
+ subject(:mapper) { described_class.new(project_config, pipeline) }
+
+ describe '#to_h' do
+ it 'returns expected claims' do
+ expect(mapper.to_h).to eq({
+ ci_config_ref_uri: 'gitlab.com/gitlab-org/gitlab//.gitlab-ci.yml@refs/heads/test-branch-for-claim-mapper',
+ ci_config_sha: sha
+ })
+ end
+
+ context 'when ref is a tag' do
+ let_it_be(:tag) { 'test-tag-for-claim-mapper' }
+ let_it_be(:pipeline) { build_stubbed(:ci_pipeline, tag: tag, ref: tag, sha: sha) }
+
+ it 'returns expected claims' do
+ expect(mapper.to_h).to eq({
+ ci_config_ref_uri: 'gitlab.com/gitlab-org/gitlab//.gitlab-ci.yml@refs/tags/test-tag-for-claim-mapper',
+ ci_config_sha: sha
+ })
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/jwt_v2/claim_mapper_spec.rb b/spec/lib/gitlab/ci/jwt_v2/claim_mapper_spec.rb
new file mode 100644
index 00000000000..b7a73c938a3
--- /dev/null
+++ b/spec/lib/gitlab/ci/jwt_v2/claim_mapper_spec.rb
@@ -0,0 +1,40 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ci::JwtV2::ClaimMapper, feature_category: :continuous_integration do
+ let_it_be(:pipeline) { build_stubbed(:ci_pipeline) }
+
+ let(:source) { :unknown_source }
+ let(:url) { 'gitlab.com/gitlab-org/gitlab//.gitlab-ci.yml' }
+ let(:project_config) { instance_double(Gitlab::Ci::ProjectConfig, url: url, source: source) }
+
+ subject(:mapper) { described_class.new(project_config, pipeline) }
+
+ describe '#to_h' do
+ it 'returns an empty hash when source is not implemented' do
+ expect(mapper.to_h).to eq({})
+ end
+
+ context 'when mapper for source is implemented' do
+ where(:source) { described_class::MAPPER_FOR_CONFIG_SOURCE.keys }
+ let(:result) do
+ {
+ ci_config_ref_uri: 'ci_config_ref_uri',
+ ci_config_sha: 'ci_config_sha'
+ }
+ end
+
+ with_them do
+ it 'uses mapper' do
+ mapper_class = described_class::MAPPER_FOR_CONFIG_SOURCE[source]
+ expect_next_instance_of(mapper_class, project_config, pipeline) do |instance|
+ expect(instance).to receive(:to_h).and_return(result)
+ end
+
+ expect(mapper.to_h).to eq(result)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/jwt_v2_spec.rb b/spec/lib/gitlab/ci/jwt_v2_spec.rb
index 575f174f737..d45d8cacb88 100644
--- a/spec/lib/gitlab/ci/jwt_v2_spec.rb
+++ b/spec/lib/gitlab/ci/jwt_v2_spec.rb
@@ -129,75 +129,39 @@ RSpec.describe Gitlab::Ci::JwtV2, feature_category: :continuous_integration do
end
end
- describe 'ci_config_ref_uri' do
- it 'joins project_config.url and pipeline.source_ref_path with @' do
- expect(payload[:ci_config_ref_uri]).to eq('gitlab.com/gitlab-org/gitlab//.gitlab-ci.yml' \
- '@refs/heads/auto-deploy-2020-03-19')
- end
-
- context 'when project config is nil' do
- before do
- allow(Gitlab::Ci::ProjectConfig).to receive(:new).and_return(nil)
- end
-
- it 'is nil' do
- expect(payload[:ci_config_ref_uri]).to be_nil
- end
- end
-
- context 'when ProjectConfig#url raises an error' do
- before do
- allow(project_config).to receive(:url).and_raise(RuntimeError)
- end
+ describe 'claims delegated to mapper' do
+ let(:ci_config_ref_uri) { 'ci_config_ref_uri' }
+ let(:ci_config_sha) { 'ci_config_sha' }
- it 'raises the same error' do
- expect { payload }.to raise_error(RuntimeError)
+ it 'delegates claims to Gitlab::Ci::JwtV2::ClaimMapper' do
+ expect_next_instance_of(Gitlab::Ci::JwtV2::ClaimMapper, project_config, pipeline) do |mapper|
+ expect(mapper).to receive(:to_h).and_return({
+ ci_config_ref_uri: ci_config_ref_uri,
+ ci_config_sha: ci_config_sha
+ })
end
- context 'in production' do
- before do
- stub_rails_env('production')
- end
-
- it 'is nil' do
- expect(payload[:ci_config_ref_uri]).to be_nil
- end
- end
- end
-
- context 'when config source is not repository' do
- before do
- allow(project_config).to receive(:source).and_return(:auto_devops_source)
- end
-
- it 'is nil' do
- expect(payload[:ci_config_ref_uri]).to be_nil
- end
+ expect(payload[:ci_config_ref_uri]).to eq(ci_config_ref_uri)
+ expect(payload[:ci_config_sha]).to eq(ci_config_sha)
end
end
- describe 'ci_config_sha' do
- it 'is the SHA of the pipeline' do
- expect(payload[:ci_config_sha]).to eq(pipeline.sha)
- end
+ describe 'project_visibility' do
+ using RSpec::Parameterized::TableSyntax
- context 'when project config is nil' do
- before do
- allow(Gitlab::Ci::ProjectConfig).to receive(:new).and_return(nil)
- end
-
- it 'is nil' do
- expect(payload[:ci_config_sha]).to be_nil
- end
+ where(:visibility_level, :visibility_level_string) do
+ Project::PUBLIC | 'public'
+ Project::INTERNAL | 'internal'
+ Project::PRIVATE | 'private'
end
- context 'when config source is not repository' do
+ with_them do
before do
- allow(project_config).to receive(:source).and_return(:auto_devops_source)
+ project.visibility_level = visibility_level
end
- it 'is nil' do
- expect(payload[:ci_config_sha]).to be_nil
+ it 'is a string representation of the project visibility_level' do
+ expect(payload[:project_visibility]).to eq(visibility_level_string)
end
end
end
diff --git a/spec/lib/gitlab/ci/pipeline/chain/config/content_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/config/content_spec.rb
index 9c268d9039e..66e4b987ac1 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/config/content_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/config/content_spec.rb
@@ -42,9 +42,9 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Config::Content, feature_category: :
before do
expect(project.repository)
- .to receive(:gitlab_ci_yml_for)
+ .to receive(:blob_at)
.with(pipeline.sha, ci_config_path)
- .and_return('the-content')
+ .and_return(instance_double(Blob, empty?: false))
end
it 'builds root config including the local custom file' do
@@ -132,9 +132,9 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Config::Content, feature_category: :
before do
expect(project.repository)
- .to receive(:gitlab_ci_yml_for)
+ .to receive(:blob_at)
.with(pipeline.sha, '.gitlab-ci.yml')
- .and_return('the-content')
+ .and_return(instance_double(Blob, empty?: false))
end
it 'builds root config including the canonical CI config file' do
diff --git a/spec/lib/gitlab/ci/project_config/repository_spec.rb b/spec/lib/gitlab/ci/project_config/repository_spec.rb
index e8a997a7e43..bd95eefe821 100644
--- a/spec/lib/gitlab/ci/project_config/repository_spec.rb
+++ b/spec/lib/gitlab/ci/project_config/repository_spec.rb
@@ -32,7 +32,7 @@ RSpec.describe Gitlab::Ci::ProjectConfig::Repository, feature_category: :continu
context 'when Gitaly raises error' do
before do
- allow(project.repository).to receive(:gitlab_ci_yml_for).and_raise(GRPC::Internal)
+ allow(project.repository).to receive(:blob_at).and_raise(GRPC::Internal)
end
it { is_expected.to be_nil }
diff --git a/spec/lib/gitlab/ci/project_config_spec.rb b/spec/lib/gitlab/ci/project_config_spec.rb
index 13ef0939ddd..6a4af3c61bf 100644
--- a/spec/lib/gitlab/ci/project_config_spec.rb
+++ b/spec/lib/gitlab/ci/project_config_spec.rb
@@ -45,9 +45,9 @@ RSpec.describe Gitlab::Ci::ProjectConfig, feature_category: :pipeline_compositio
before do
allow(project.repository)
- .to receive(:gitlab_ci_yml_for)
+ .to receive(:blob_at)
.with(sha, ci_config_path)
- .and_return('the-content')
+ .and_return(instance_double(Blob, empty?: false))
end
it 'returns root config including the local custom file' do
@@ -122,9 +122,9 @@ RSpec.describe Gitlab::Ci::ProjectConfig, feature_category: :pipeline_compositio
before do
allow(project.repository)
- .to receive(:gitlab_ci_yml_for)
+ .to receive(:blob_at)
.with(sha, '.gitlab-ci.yml')
- .and_return('the-content')
+ .and_return(instance_double(Blob, empty?: false))
end
it 'returns root config including the canonical CI config file' do
diff --git a/spec/lib/gitlab/ci/queue/metrics_spec.rb b/spec/lib/gitlab/ci/queue/metrics_spec.rb
new file mode 100644
index 00000000000..2fb4226ba5a
--- /dev/null
+++ b/spec/lib/gitlab/ci/queue/metrics_spec.rb
@@ -0,0 +1,71 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ci::Queue::Metrics, feature_category: :continuous_integration do
+ let(:metrics) { described_class.new(build(:ci_runner)) }
+
+ describe '#observe_queue_depth' do
+ subject { metrics.observe_queue_depth(:found, 1) }
+
+ it { is_expected.not_to be_nil }
+
+ context 'with feature flag gitlab_ci_builds_queueing_metrics disabled' do
+ before do
+ stub_feature_flags(gitlab_ci_builds_queuing_metrics: false)
+ end
+
+ it { is_expected.to be_nil }
+ end
+ end
+
+ describe '#observe_queue_size' do
+ subject { metrics.observe_queue_size(-> { 0 }, :some_runner_type) }
+
+ it { is_expected.not_to be_nil }
+
+ context 'with feature flag gitlab_ci_builds_queueing_metrics disabled' do
+ before do
+ stub_feature_flags(gitlab_ci_builds_queuing_metrics: false)
+ end
+
+ it { is_expected.to be_nil }
+ end
+ end
+
+ describe '#observe_queue_time' do
+ subject { metrics.observe_queue_time(:process, :some_runner_type) { 1 } }
+
+ specify do
+ expect(described_class).to receive(:queue_iteration_duration_seconds).and_call_original
+
+ subject
+ end
+
+ context 'with feature flag gitlab_ci_builds_queueing_metrics disabled' do
+ before do
+ stub_feature_flags(gitlab_ci_builds_queuing_metrics: false)
+ end
+
+ specify do
+ expect(described_class).not_to receive(:queue_iteration_duration_seconds)
+
+ subject
+ end
+ end
+
+ describe '.observe_active_runners' do
+ subject { described_class.observe_active_runners(-> { 0 }) }
+
+ it { is_expected.not_to be_nil }
+
+ context 'with feature flag gitlab_ci_builds_queueing_metrics disabled' do
+ before do
+ stub_feature_flags(gitlab_ci_builds_queuing_metrics: false)
+ end
+
+ it { is_expected.to be_nil }
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/reports/sbom/component_spec.rb b/spec/lib/gitlab/ci/reports/sbom/component_spec.rb
index 5dbcc1991d4..d62d25aeefe 100644
--- a/spec/lib/gitlab/ci/reports/sbom/component_spec.rb
+++ b/spec/lib/gitlab/ci/reports/sbom/component_spec.rb
@@ -27,6 +27,154 @@ RSpec.describe Gitlab::Ci::Reports::Sbom::Component, feature_category: :dependen
)
end
+ describe '#name' do
+ subject { component.name }
+
+ it { is_expected.to eq(name) }
+
+ context 'with namespace' do
+ let(:purl) do
+ 'pkg:maven/org.NameSpace/Name@v0.0.1'
+ end
+
+ it { is_expected.to eq('org.NameSpace/Name') }
+
+ context 'when needing normalization' do
+ let(:purl) do
+ 'pkg:pypi/org.NameSpace/Name@v0.0.1'
+ end
+
+ it { is_expected.to eq('org.namespace/name') }
+ end
+ end
+ end
+
+ describe '#<=>' do
+ where do
+ {
+ 'equal' => {
+ a_name: 'component-a',
+ b_name: 'component-a',
+ a_type: 'library',
+ b_type: 'library',
+ a_purl: 'pkg:npm/component-a@1.0.0',
+ b_purl: 'pkg:npm/component-a@1.0.0',
+ a_version: '1.0.0',
+ b_version: '1.0.0',
+ expected: 0
+ },
+ 'name lesser' => {
+ a_name: 'component-a',
+ b_name: 'component-b',
+ a_type: 'library',
+ b_type: 'library',
+ a_purl: 'pkg:npm/component-a@1.0.0',
+ b_purl: 'pkg:npm/component-b@1.0.0',
+ a_version: '1.0.0',
+ b_version: '1.0.0',
+ expected: -1
+ },
+ 'name greater' => {
+ a_name: 'component-b',
+ b_name: 'component-a',
+ a_type: 'library',
+ b_type: 'library',
+ a_purl: 'pkg:npm/component-b@1.0.0',
+ b_purl: 'pkg:npm/component-a@1.0.0',
+ a_version: '1.0.0',
+ b_version: '1.0.0',
+ expected: 1
+ },
+ 'purl type lesser' => {
+ a_name: 'component-a',
+ b_name: 'component-a',
+ a_type: 'library',
+ b_type: 'library',
+ a_purl: 'pkg:composer/component-a@1.0.0',
+ b_purl: 'pkg:npm/component-a@1.0.0',
+ a_version: '1.0.0',
+ b_version: '1.0.0',
+ expected: -1
+ },
+ 'purl type greater' => {
+ a_name: 'component-a',
+ b_name: 'component-a',
+ a_type: 'library',
+ b_type: 'library',
+ a_purl: 'pkg:npm/component-a@1.0.0',
+ b_purl: 'pkg:composer/component-a@1.0.0',
+ a_version: '1.0.0',
+ b_version: '1.0.0',
+ expected: 1
+ },
+ 'purl type nulls first' => {
+ a_name: 'component-a',
+ b_name: 'component-a',
+ a_type: 'library',
+ b_type: 'library',
+ a_purl: nil,
+ b_purl: 'pkg:npm/component-a@1.0.0',
+ a_version: '1.0.0',
+ b_version: '1.0.0',
+ expected: -1
+ },
+ 'version lesser' => {
+ a_name: 'component-a',
+ b_name: 'component-a',
+ a_type: 'library',
+ b_type: 'library',
+ a_purl: 'pkg:npm/component-a@1.0.0',
+ b_purl: 'pkg:npm/component-a@1.0.0',
+ a_version: '1.0.0',
+ b_version: '2.0.0',
+ expected: -1
+ },
+ 'version greater' => {
+ a_name: 'component-a',
+ b_name: 'component-a',
+ a_type: 'library',
+ b_type: 'library',
+ a_purl: 'pkg:npm/component-a@1.0.0',
+ b_purl: 'pkg:npm/component-a@1.0.0',
+ a_version: '2.0.0',
+ b_version: '1.0.0',
+ expected: 1
+ },
+ 'version nulls first' => {
+ a_name: 'component-a',
+ b_name: 'component-a',
+ a_type: 'library',
+ b_type: 'library',
+ a_purl: 'pkg:npm/component-a@1.0.0',
+ b_purl: 'pkg:npm/component-a@1.0.0',
+ a_version: nil,
+ b_version: '1.0.0',
+ expected: -1
+ }
+ }
+ end
+
+ with_them do
+ specify do
+ a = described_class.new(
+ name: a_name,
+ type: a_type,
+ purl: a_purl,
+ version: a_version
+ )
+
+ b = described_class.new(
+ name: b_name,
+ type: b_type,
+ purl: b_purl,
+ version: b_version
+ )
+
+ expect(a <=> b).to eq(expected)
+ end
+ end
+ end
+
describe '#ingestible?' do
subject { component.ingestible? }
diff --git a/spec/lib/gitlab/ci/status/stage/factory_spec.rb b/spec/lib/gitlab/ci/status/stage/factory_spec.rb
index 702341a7ea7..34e430202c9 100644
--- a/spec/lib/gitlab/ci/status/stage/factory_spec.rb
+++ b/spec/lib/gitlab/ci/status/stage/factory_spec.rb
@@ -62,7 +62,7 @@ RSpec.describe Gitlab::Ci::Status::Stage::Factory, feature_category: :continuous
end
context 'when stage has manual builds' do
- Ci::HasStatus::BLOCKED_STATUS.each do |core_status|
+ (Ci::HasStatus::BLOCKED_STATUS + ['skipped']).each do |core_status|
context "when status is #{core_status}" do
let(:stage) { create(:ci_stage, pipeline: pipeline, status: core_status) }
diff --git a/spec/lib/gitlab/ci/status/stage/play_manual_spec.rb b/spec/lib/gitlab/ci/status/stage/play_manual_spec.rb
index e23645c106b..fc52b7bf9d4 100644
--- a/spec/lib/gitlab/ci/status/stage/play_manual_spec.rb
+++ b/spec/lib/gitlab/ci/status/stage/play_manual_spec.rb
@@ -48,7 +48,7 @@ RSpec.describe Gitlab::Ci::Status::Stage::PlayManual, feature_category: :continu
context 'when stage is skipped' do
let(:stage) { create(:ci_stage, status: :skipped) }
- it { is_expected.to be_falsy }
+ it { is_expected.to be_truthy }
end
context 'when stage is manual' do
diff --git a/spec/lib/gitlab/ci/tags/bulk_insert_spec.rb b/spec/lib/gitlab/ci/tags/bulk_insert_spec.rb
index b72a818c16c..460ecbb05d0 100644
--- a/spec/lib/gitlab/ci/tags/bulk_insert_spec.rb
+++ b/spec/lib/gitlab/ci/tags/bulk_insert_spec.rb
@@ -13,7 +13,7 @@ RSpec.describe Gitlab::Ci::Tags::BulkInsert do
subject(:service) { described_class.new(statuses) }
describe 'gem version' do
- let(:acceptable_version) { '9.0.0' }
+ let(:acceptable_version) { '9.0.1' }
let(:error_message) do
<<~MESSAGE
diff --git a/spec/lib/gitlab/ci/variables/builder/pipeline_spec.rb b/spec/lib/gitlab/ci/variables/builder/pipeline_spec.rb
index e5324560944..0880c556523 100644
--- a/spec/lib/gitlab/ci/variables/builder/pipeline_spec.rb
+++ b/spec/lib/gitlab/ci/variables/builder/pipeline_spec.rb
@@ -18,6 +18,7 @@ RSpec.describe Gitlab::Ci::Variables::Builder::Pipeline, feature_category: :secr
CI_PIPELINE_IID
CI_PIPELINE_SOURCE
CI_PIPELINE_CREATED_AT
+ CI_PIPELINE_NAME
CI_COMMIT_SHA
CI_COMMIT_SHORT_SHA
CI_COMMIT_BEFORE_SHA
@@ -43,6 +44,7 @@ RSpec.describe Gitlab::Ci::Variables::Builder::Pipeline, feature_category: :secr
CI_PIPELINE_IID
CI_PIPELINE_SOURCE
CI_PIPELINE_CREATED_AT
+ CI_PIPELINE_NAME
CI_COMMIT_SHA
CI_COMMIT_SHORT_SHA
CI_COMMIT_BEFORE_SHA
diff --git a/spec/lib/gitlab/ci/variables/builder_spec.rb b/spec/lib/gitlab/ci/variables/builder_spec.rb
index 28c9bdc4c4b..3411426fcdb 100644
--- a/spec/lib/gitlab/ci/variables/builder_spec.rb
+++ b/spec/lib/gitlab/ci/variables/builder_spec.rb
@@ -111,6 +111,8 @@ RSpec.describe Gitlab::Ci::Variables::Builder, :clean_gitlab_redis_cache, featur
value: pipeline.source },
{ key: 'CI_PIPELINE_CREATED_AT',
value: pipeline.created_at.iso8601 },
+ { key: 'CI_PIPELINE_NAME',
+ value: pipeline.name },
{ key: 'CI_COMMIT_SHA',
value: job.sha },
{ key: 'CI_COMMIT_SHORT_SHA',
diff --git a/spec/lib/gitlab/ci/variables/downstream/expandable_variable_generator_spec.rb b/spec/lib/gitlab/ci/variables/downstream/expandable_variable_generator_spec.rb
index 5b33527e06c..95d0f089f6d 100644
--- a/spec/lib/gitlab/ci/variables/downstream/expandable_variable_generator_spec.rb
+++ b/spec/lib/gitlab/ci/variables/downstream/expandable_variable_generator_spec.rb
@@ -7,13 +7,19 @@ RSpec.describe Gitlab::Ci::Variables::Downstream::ExpandableVariableGenerator, f
Gitlab::Ci::Variables::Collection.fabricate(
[
{ key: 'REF1', value: 'ref 1' },
- { key: 'REF2', value: 'ref 2' }
+ { key: 'REF2', value: 'ref 2' },
+ { key: 'NESTED_REF1', value: 'nested $REF1' }
]
)
end
+ let(:expand_file_refs) { false }
+
let(:context) do
- Gitlab::Ci::Variables::Downstream::Generator::Context.new(all_bridge_variables: all_bridge_variables)
+ Gitlab::Ci::Variables::Downstream::Generator::Context.new(
+ all_bridge_variables: all_bridge_variables,
+ expand_file_refs: expand_file_refs
+ )
end
subject(:generator) { described_class.new(context) }
@@ -34,5 +40,54 @@ RSpec.describe Gitlab::Ci::Variables::Downstream::ExpandableVariableGenerator, f
expect(generator.for(var)).to match_array([{ key: 'VAR1', value: 'ref 1 ref 2 ' }])
end
end
+
+ context 'when given a variable with nested interpolation' do
+ it 'returns an array containing the expanded variables' do
+ var = Gitlab::Ci::Variables::Collection::Item.fabricate({ key: 'VAR1', value: '$REF1 $REF2 $NESTED_REF1' })
+
+ expect(generator.for(var)).to match_array([{ key: 'VAR1', value: 'ref 1 ref 2 nested $REF1' }])
+ end
+ end
+
+ context 'when given a variable with expansion on a file variable' do
+ let(:all_bridge_variables) do
+ Gitlab::Ci::Variables::Collection.fabricate(
+ [
+ { key: 'REF1', value: 'ref 1' },
+ { key: 'FILE_REF2', value: 'ref 2', file: true },
+ { key: 'NESTED_REF3', value: 'ref 3 $REF1 and $FILE_REF2', file: true }
+ ]
+ )
+ end
+
+ context 'when expand_file_refs is false' do
+ let(:expand_file_refs) { false }
+
+ it 'returns an array containing the unexpanded variable and the file variable dependency' do
+ var = { key: 'VAR1', value: '$REF1 $FILE_REF2 $FILE_REF3 $NESTED_REF3' }
+ var = Gitlab::Ci::Variables::Collection::Item.fabricate(var)
+
+ expected = [
+ { key: 'VAR1', value: 'ref 1 $FILE_REF2 $NESTED_REF3' },
+ { key: 'FILE_REF2', value: 'ref 2', variable_type: :file },
+ { key: 'NESTED_REF3', value: 'ref 3 $REF1 and $FILE_REF2', variable_type: :file }
+ ]
+
+ expect(generator.for(var)).to match_array(expected)
+ end
+ end
+
+ context 'when expand_file_refs is true' do
+ let(:expand_file_refs) { true }
+
+ it 'returns an array containing the expanded variables' do
+ var = { key: 'VAR1', value: '$REF1 $FILE_REF2 $FILE_REF3 $NESTED_REF3' }
+ var = Gitlab::Ci::Variables::Collection::Item.fabricate(var)
+
+ expected = { key: 'VAR1', value: 'ref 1 ref 2 ref 3 $REF1 and $FILE_REF2' }
+ expect(generator.for(var)).to contain_exactly(expected)
+ end
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/ci/variables/downstream/generator_spec.rb b/spec/lib/gitlab/ci/variables/downstream/generator_spec.rb
index 61e8b9a8c4a..cd68b0cdf2b 100644
--- a/spec/lib/gitlab/ci/variables/downstream/generator_spec.rb
+++ b/spec/lib/gitlab/ci/variables/downstream/generator_spec.rb
@@ -45,6 +45,7 @@ RSpec.describe Gitlab::Ci::Variables::Downstream::Generator, feature_category: :
variables: bridge_variables,
forward_yaml_variables?: true,
forward_pipeline_variables?: true,
+ expand_file_refs?: false,
yaml_variables: yaml_variables,
pipeline_variables: pipeline_variables,
pipeline_schedule_variables: pipeline_schedule_variables
@@ -81,5 +82,61 @@ RSpec.describe Gitlab::Ci::Variables::Downstream::Generator, feature_category: :
expect(generator.calculate).to be_empty
end
+
+ context 'with file variable interpolation' do
+ let(:bridge_variables) do
+ Gitlab::Ci::Variables::Collection.fabricate(
+ [
+ { key: 'REF1', value: 'ref 1' },
+ { key: 'FILE_REF3', value: 'ref 3', file: true }
+ ]
+ )
+ end
+
+ let(:yaml_variables) do
+ [{ key: 'INTERPOLATION_VAR', value: 'interpolate $REF1 $REF2 $FILE_REF3 $FILE_REF4' }]
+ end
+
+ let(:pipeline_variables) do
+ [{ key: 'PIPELINE_INTERPOLATION_VAR', value: 'interpolate $REF1 $REF2 $FILE_REF3 $FILE_REF4' }]
+ end
+
+ let(:pipeline_schedule_variables) do
+ [{ key: 'PIPELINE_SCHEDULE_INTERPOLATION_VAR', value: 'interpolate $REF1 $REF2 $FILE_REF3 $FILE_REF4' }]
+ end
+
+ context 'when expand_file_refs is true' do
+ before do
+ allow(bridge).to receive(:expand_file_refs?).and_return(true)
+ end
+
+ it 'expands file variables' do
+ expected = [
+ { key: 'INTERPOLATION_VAR', value: 'interpolate ref 1 ref 3 ' },
+ { key: 'PIPELINE_INTERPOLATION_VAR', value: 'interpolate ref 1 ref 3 ' },
+ { key: 'PIPELINE_SCHEDULE_INTERPOLATION_VAR', value: 'interpolate ref 1 ref 3 ' }
+ ]
+
+ expect(generator.calculate).to contain_exactly(*expected)
+ end
+ end
+
+ context 'when expand_file_refs is false' do
+ before do
+ allow(bridge).to receive(:expand_file_refs?).and_return(false)
+ end
+
+ it 'does not expand file variables and adds file variables' do
+ expected = [
+ { key: 'INTERPOLATION_VAR', value: 'interpolate ref 1 $FILE_REF3 ' },
+ { key: 'PIPELINE_INTERPOLATION_VAR', value: 'interpolate ref 1 $FILE_REF3 ' },
+ { key: 'PIPELINE_SCHEDULE_INTERPOLATION_VAR', value: 'interpolate ref 1 $FILE_REF3 ' },
+ { key: 'FILE_REF3', value: 'ref 3', variable_type: :file }
+ ]
+
+ expect(generator.calculate).to contain_exactly(*expected)
+ end
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/ci/yaml_processor_spec.rb b/spec/lib/gitlab/ci/yaml_processor_spec.rb
index c4e27d0e420..f8f1d71e773 100644
--- a/spec/lib/gitlab/ci/yaml_processor_spec.rb
+++ b/spec/lib/gitlab/ci/yaml_processor_spec.rb
@@ -2675,6 +2675,42 @@ module Gitlab
it_behaves_like 'returns errors', 'jobs:test1 dependencies should be an array of strings'
end
+
+ context 'needs with parallel:matrix' do
+ let(:config) do
+ {
+ build1: {
+ stage: 'build',
+ script: 'build',
+ parallel: { matrix: [{ 'PROVIDER': ['aws'], 'STACK': %w[monitoring app1 app2] }] }
+ },
+ test1: {
+ stage: 'test',
+ script: 'test',
+ needs: [{ job: 'build1', parallel: { matrix: [{ 'PROVIDER': ['aws'], 'STACK': ['app1'] }] } }]
+ }
+ }
+ end
+
+ it "does create jobs with valid specification" do
+ expect(subject.builds.size).to eq(4)
+ expect(subject.builds[3]).to eq(
+ stage: "test",
+ stage_idx: 2,
+ name: "test1",
+ only: { refs: %w[branches tags] },
+ options: { script: ["test"] },
+ needs_attributes: [
+ { name: "build1: [aws, app1]", artifacts: true, optional: false }
+ ],
+ when: "on_success",
+ allow_failure: false,
+ job_variables: [],
+ root_variables_inheritance: true,
+ scheduling_type: :dag
+ )
+ end
+ end
end
context 'with when/rules' do
diff --git a/spec/lib/gitlab/cleanup/orphan_job_artifact_files_batch_spec.rb b/spec/lib/gitlab/cleanup/orphan_job_artifact_files_batch_spec.rb
index d03d4f64a0f..56745759c5a 100644
--- a/spec/lib/gitlab/cleanup/orphan_job_artifact_files_batch_spec.rb
+++ b/spec/lib/gitlab/cleanup/orphan_job_artifact_files_batch_spec.rb
@@ -23,26 +23,8 @@ RSpec.describe Gitlab::Cleanup::OrphanJobArtifactFilesBatch do
expect(batch.artifact_files.count).to eq(2)
expect(batch.lost_and_found.count).to eq(1)
expect(batch.lost_and_found.first.artifact_id).to eq(orphan_artifact.id)
- end
-
- it 'does not mix up job ID and artifact ID' do
- # take maximum ID of both tables to avoid any collision
- max_id = [Ci::Build.maximum(:id), Ci::JobArtifact.maximum(:id)].compact.max.to_i
- job_a = create(:ci_build, id: max_id + 1)
- job_b = create(:ci_build, id: max_id + 2)
- # reuse the build IDs for the job artifact IDs, but swap them
- job_artifact_b = create(:ci_job_artifact, :archive, job: job_b, id: max_id + 1)
- job_artifact_a = create(:ci_job_artifact, :archive, job: job_a, id: max_id + 2)
-
- batch << artifact_path(job_artifact_a)
- batch << artifact_path(job_artifact_b)
-
- job_artifact_b.delete
-
- batch.clean!
-
- expect(File.exist?(job_artifact_a.file.path)).to be_truthy
- expect(File.exist?(job_artifact_b.file.path)).to be_falsey
+ expect(File.exist?(job_artifact.file.path)).to be_truthy
+ expect(File.exist?(orphan_artifact.file.path)).to be_falsey
end
end
diff --git a/spec/lib/gitlab/config/entry/validators_spec.rb b/spec/lib/gitlab/config/entry/validators_spec.rb
index abf3dbacb3d..6fa9f9d0767 100644
--- a/spec/lib/gitlab/config/entry/validators_spec.rb
+++ b/spec/lib/gitlab/config/entry/validators_spec.rb
@@ -102,4 +102,37 @@ RSpec.describe Gitlab::Config::Entry::Validators, feature_category: :pipeline_co
end
end
end
+
+ describe described_class::OnlyOneOfKeysValidator do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:config, :valid_result) do
+ { foo: '1' } | true
+ { foo: '1', bar: '2', baz: '3' } | false
+ { bar: '2' } | true
+ { foo: '1' } | true
+ {} | false
+ { baz: '3' } | false
+ end
+
+ with_them do
+ before do
+ klass.instance_eval do
+ validates :config, only_one_of_keys: %i[foo bar]
+ end
+
+ allow(instance).to receive(:config).and_return(config)
+ end
+
+ it 'validates the instance' do
+ expect(instance.valid?).to be(valid_result)
+
+ unless valid_result
+ expect(instance.errors.messages_for(:config)).to(
+ include "must use exactly one of these keys: foo, bar"
+ )
+ end
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/container_repository/tags/cache_spec.rb b/spec/lib/gitlab/container_repository/tags/cache_spec.rb
index 4b8c843eb3a..fcfc8e7a348 100644
--- a/spec/lib/gitlab/container_repository/tags/cache_spec.rb
+++ b/spec/lib/gitlab/container_repository/tags/cache_spec.rb
@@ -81,9 +81,7 @@ RSpec.describe ::Gitlab::ContainerRepository::Tags::Cache, :clean_gitlab_redis_c
::Gitlab::Redis::Cache.with do |redis|
expect(redis).to receive(:pipelined).and_call_original
- times = Gitlab::Redis::ClusterUtil.cluster?(redis) ? 2 : 1
-
- expect_next_instances_of(Redis::PipelinedConnection, times) do |pipeline|
+ expect_next_instance_of(Redis::PipelinedConnection) do |pipeline|
expect(pipeline)
.to receive(:set)
.with(cache_key(tag), rfc3339(tag.created_at), ex: ttl.to_i)
diff --git a/spec/lib/gitlab/content_security_policy/config_loader_spec.rb b/spec/lib/gitlab/content_security_policy/config_loader_spec.rb
index b40829d72a0..dd633820ad9 100644
--- a/spec/lib/gitlab/content_security_policy/config_loader_spec.rb
+++ b/spec/lib/gitlab/content_security_policy/config_loader_spec.rb
@@ -2,8 +2,11 @@
require 'spec_helper'
-RSpec.describe Gitlab::ContentSecurityPolicy::ConfigLoader do
+RSpec.describe Gitlab::ContentSecurityPolicy::ConfigLoader, feature_category: :shared do
let(:policy) { ActionDispatch::ContentSecurityPolicy.new }
+ let(:lfs_enabled) { false }
+ let(:proxy_download) { false }
+
let(:csp_config) do
{
enabled: true,
@@ -20,6 +23,32 @@ RSpec.describe Gitlab::ContentSecurityPolicy::ConfigLoader do
}
end
+ let(:lfs_config) do
+ {
+ enabled: lfs_enabled,
+ remote_directory: 'lfs-objects',
+ connection: object_store_connection_config,
+ direct_upload: false,
+ proxy_download: proxy_download,
+ storage_options: {}
+ }
+ end
+
+ let(:object_store_connection_config) do
+ {
+ provider: 'AWS',
+ aws_access_key_id: 'AWS_ACCESS_KEY_ID',
+ aws_secret_access_key: 'AWS_SECRET_ACCESS_KEY'
+ }
+ end
+
+ before do
+ stub_lfs_setting(enabled: lfs_enabled)
+ allow(LfsObjectUploader)
+ .to receive(:object_store_options)
+ .and_return(GitlabSettings::Options.build(lfs_config))
+ end
+
describe '.default_enabled' do
let(:enabled) { described_class.default_enabled }
@@ -29,7 +58,7 @@ RSpec.describe Gitlab::ContentSecurityPolicy::ConfigLoader do
context 'when in production' do
before do
- allow(Rails).to receive(:env).and_return(ActiveSupport::StringInquirer.new('production'))
+ stub_rails_env('production')
end
it 'is disabled' do
@@ -40,6 +69,16 @@ RSpec.describe Gitlab::ContentSecurityPolicy::ConfigLoader do
describe '.default_directives' do
let(:directives) { described_class.default_directives }
+ let(:child_src) { directives['child_src'] }
+ let(:connect_src) { directives['connect_src'] }
+ let(:font_src) { directives['font_src'] }
+ let(:frame_src) { directives['frame_src'] }
+ let(:img_src) { directives['img_src'] }
+ let(:media_src) { directives['media_src'] }
+ let(:report_uri) { directives['report_uri'] }
+ let(:script_src) { directives['script_src'] }
+ let(:style_src) { directives['style_src'] }
+ let(:worker_src) { directives['worker_src'] }
it 'returns default directives' do
directive_names = (described_class::DIRECTIVES - ['report_uri'])
@@ -49,68 +88,231 @@ RSpec.describe Gitlab::ContentSecurityPolicy::ConfigLoader do
end
expect(directives.has_key?('report_uri')).to be_truthy
- expect(directives['report_uri']).to be_nil
- expect(directives['child_src']).to eq("#{directives['frame_src']} #{directives['worker_src']}")
+ expect(report_uri).to be_nil
+ expect(child_src).to eq("#{frame_src} #{worker_src}")
end
describe 'the images-src directive' do
it 'can be loaded from anywhere' do
- expect(directives['img_src']).to include('http: https:')
+ expect(img_src).to include('http: https:')
end
end
describe 'the media-src directive' do
it 'can be loaded from anywhere' do
- expect(directives['media_src']).to include('http: https:')
+ expect(media_src).to include('http: https:')
end
end
- context 'adds all websocket origins to support Safari' do
+ describe 'Webpack dev server websocket connections' do
+ let(:webpack_dev_server_host) { 'webpack-dev-server.com' }
+ let(:webpack_dev_server_port) { '9999' }
+ let(:webpack_dev_server_https) { true }
+
+ before do
+ stub_config_setting(
+ webpack: { dev_server: {
+ host: webpack_dev_server_host,
+ webpack_dev_server_port: webpack_dev_server_port,
+ https: webpack_dev_server_https
+ } }
+ )
+ end
+
+ context 'when in production' do
+ before do
+ stub_rails_env('production')
+ end
+
+ context 'with secure domain' do
+ it 'does not include webpack dev server in connect-src' do
+ expect(connect_src).not_to include(webpack_dev_server_host)
+ expect(connect_src).not_to include(webpack_dev_server_port)
+ end
+ end
+
+ context 'with insecure domain' do
+ let(:webpack_dev_server_https) { false }
+
+ it 'does not include webpack dev server in connect-src' do
+ expect(connect_src).not_to include(webpack_dev_server_host)
+ expect(connect_src).not_to include(webpack_dev_server_port)
+ end
+ end
+ end
+
+ context 'when in development' do
+ before do
+ stub_rails_env('development')
+ end
+
+ context 'with secure domain' do
+ before do
+ stub_config_setting(host: webpack_dev_server_host, port: webpack_dev_server_port, https: true)
+ end
+
+ it 'includes secure websocket url for webpack dev server in connect-src' do
+ expect(connect_src).to include("wss://#{webpack_dev_server_host}:#{webpack_dev_server_port}")
+ expect(connect_src).not_to include("ws://#{webpack_dev_server_host}:#{webpack_dev_server_port}")
+ end
+ end
+
+ context 'with insecure domain' do
+ before do
+ stub_config_setting(host: webpack_dev_server_host, port: webpack_dev_server_port, https: false)
+ end
+
+ it 'includes insecure websocket url for webpack dev server in connect-src' do
+ expect(connect_src).not_to include("wss://#{webpack_dev_server_host}:#{webpack_dev_server_port}")
+ expect(connect_src).to include("ws://#{webpack_dev_server_host}:#{webpack_dev_server_port}")
+ end
+ end
+ end
+ end
+
+ describe 'Websocket connections' do
it 'with insecure domain' do
stub_config_setting(host: 'example.com', https: false)
- expect(directives['connect_src']).to eq("'self' ws://example.com")
+ expect(connect_src).to eq("'self' ws://example.com")
end
it 'with secure domain' do
stub_config_setting(host: 'example.com', https: true)
- expect(directives['connect_src']).to eq("'self' wss://example.com")
+ expect(connect_src).to eq("'self' wss://example.com")
end
it 'with custom port' do
stub_config_setting(host: 'example.com', port: '1234')
- expect(directives['connect_src']).to eq("'self' ws://example.com:1234")
+ expect(connect_src).to eq("'self' ws://example.com:1234")
end
it 'with custom port and secure domain' do
stub_config_setting(host: 'example.com', https: true, port: '1234')
- expect(directives['connect_src']).to eq("'self' wss://example.com:1234")
+ expect(connect_src).to eq("'self' wss://example.com:1234")
+ end
+
+ it 'when port is included in HTTP_PORTS' do
+ described_class::HTTP_PORTS.each do |port|
+ stub_config_setting(host: 'example.com', https: true, port: port)
+ expect(connect_src).to eq("'self' wss://example.com")
+ end
end
end
- context 'when CDN host is defined' do
+ describe 'LFS connect-src headers' do
+ let(:url_for_provider) { described_class.send(:build_lfs_url) }
+
+ context 'when LFS is enabled' do
+ let(:lfs_enabled) { true }
+
+ context 'and direct downloads are enabled' do
+ let(:provider) { LfsObjectUploader.object_store_options.connection.provider }
+
+ context 'when provider is AWS' do
+ it { expect(provider).to eq('AWS') }
+
+ it { expect(url_for_provider).to be_present }
+
+ it { expect(directives['connect_src']).to include(url_for_provider) }
+ end
+
+ context 'when provider is AzureRM' do
+ let(:object_store_connection_config) do
+ {
+ provider: 'AzureRM',
+ azure_storage_account_name: 'azuretest',
+ azure_storage_access_key: 'ABCD1234'
+ }
+ end
+
+ it { expect(provider).to eq('AzureRM') }
+
+ it { expect(url_for_provider).to be_present }
+
+ it { expect(directives['connect_src']).to include(url_for_provider) }
+ end
+
+ context 'when provider is Google' do
+ let(:object_store_connection_config) do
+ {
+ provider: 'Google',
+ google_project: 'GOOGLE_PROJECT',
+ google_application_default: true
+ }
+ end
+
+ it { expect(provider).to eq('Google') }
+
+ it { expect(url_for_provider).to be_present }
+
+ it { expect(directives['connect_src']).to include(url_for_provider) }
+ end
+ end
+
+ context 'but direct downloads are disabled' do
+ let(:proxy_download) { true }
+
+ it { expect(directives['connect_src']).not_to include(url_for_provider) }
+ end
+ end
+
+ context 'when LFS is disabled' do
+ let(:proxy_download) { true }
+
+ it { expect(directives['connect_src']).not_to include(url_for_provider) }
+ end
+ end
+
+ describe 'CDN connections' do
before do
- stub_config_setting(cdn_host: 'https://cdn.example.com')
+ allow(described_class).to receive(:allow_letter_opener)
+ allow(described_class).to receive(:allow_zuora)
+ allow(described_class).to receive(:allow_framed_gitlab_paths)
+ allow(described_class).to receive(:allow_customersdot)
+ allow(described_class).to receive(:csp_level_3_backport)
+ end
+
+ context 'when CDN host is defined' do
+ let(:cdn_host) { 'https://cdn.example.com' }
+
+ before do
+ stub_config_setting(cdn_host: cdn_host)
+ end
+
+ it 'adds CDN host to CSP' do
+ expect(script_src).to include(cdn_host)
+ expect(style_src).to include(cdn_host)
+ expect(font_src).to include(cdn_host)
+ expect(worker_src).to include(cdn_host)
+ expect(frame_src).to include(cdn_host)
+ end
end
- it 'adds CDN host to CSP' do
- expect(directives['script_src']).to eq(::Gitlab::ContentSecurityPolicy::Directives.script_src + " https://cdn.example.com")
- expect(directives['style_src']).to eq(::Gitlab::ContentSecurityPolicy::Directives.style_src + " https://cdn.example.com")
- expect(directives['font_src']).to eq("'self' https://cdn.example.com")
- expect(directives['worker_src']).to eq('http://localhost/assets/ blob: data: https://cdn.example.com')
- expect(directives['frame_src']).to eq(::Gitlab::ContentSecurityPolicy::Directives.frame_src + " https://cdn.example.com http://localhost/admin/ http://localhost/assets/ http://localhost/-/speedscope/index.html http://localhost/-/sandbox/")
+ context 'when CDN host is undefined' do
+ before do
+ stub_config_setting(cdn_host: nil)
+ end
+
+ it 'does not include CDN host in CSP' do
+ expect(script_src).to eq(::Gitlab::ContentSecurityPolicy::Directives.script_src)
+ expect(style_src).to eq(::Gitlab::ContentSecurityPolicy::Directives.style_src)
+ expect(font_src).to eq("'self'")
+ expect(worker_src).to eq("http://localhost/assets/ blob: data:")
+ expect(frame_src).to eq(::Gitlab::ContentSecurityPolicy::Directives.frame_src)
+ end
end
end
describe 'Zuora directives' do
context 'when on SaaS', :saas do
it 'adds Zuora host to CSP' do
- expect(directives['frame_src']).to include('https://*.zuora.com/apps/PublicHostedPageLite.do')
+ expect(frame_src).to include('https://*.zuora.com/apps/PublicHostedPageLite.do')
end
end
context 'when is not Gitlab.com?' do
it 'does not add Zuora host to CSP' do
- expect(directives['frame_src']).not_to include('https://*.zuora.com/apps/PublicHostedPageLite.do')
+ expect(frame_src).not_to include('https://*.zuora.com/apps/PublicHostedPageLite.do')
end
end
end
@@ -131,7 +333,7 @@ RSpec.describe Gitlab::ContentSecurityPolicy::ConfigLoader do
end
it 'adds legacy sentry path to CSP' do
- expect(directives['connect_src']).to eq("'self' ws://gitlab.example.com dummy://legacy-sentry.example.com")
+ expect(connect_src).to eq("'self' ws://gitlab.example.com dummy://legacy-sentry.example.com")
end
end
@@ -143,7 +345,7 @@ RSpec.describe Gitlab::ContentSecurityPolicy::ConfigLoader do
end
it 'adds new sentry path to CSP' do
- expect(directives['connect_src']).to eq("'self' ws://gitlab.example.com dummy://sentry.example.com")
+ expect(connect_src).to eq("'self' ws://gitlab.example.com dummy://sentry.example.com")
end
end
@@ -159,11 +361,22 @@ RSpec.describe Gitlab::ContentSecurityPolicy::ConfigLoader do
end
it 'config is backwards compatible, does not add sentry path to CSP' do
- expect(directives['connect_src']).to eq("'self' ws://gitlab.example.com")
+ expect(connect_src).to eq("'self' ws://gitlab.example.com")
end
end
context 'when legacy sentry and sentry are both configured' do
+ let(:connect_src_expectation) do
+ # rubocop:disable Lint/PercentStringArray
+ %w[
+ 'self'
+ ws://gitlab.example.com
+ dummy://legacy-sentry.example.com
+ dummy://sentry.example.com
+ ].join(' ')
+ # rubocop:enable Lint/PercentStringArray
+ end
+
before do
allow(Gitlab.config.sentry).to receive(:enabled).and_return(true)
allow(Gitlab.config.sentry).to receive(:clientside_dsn).and_return(legacy_dsn)
@@ -173,24 +386,57 @@ RSpec.describe Gitlab::ContentSecurityPolicy::ConfigLoader do
end
it 'adds both sentry paths to CSP' do
- expect(directives['connect_src']).to eq("'self' ws://gitlab.example.com dummy://legacy-sentry.example.com dummy://sentry.example.com")
+ expect(connect_src).to eq(connect_src_expectation)
end
end
end
- context 'when CUSTOMER_PORTAL_URL is set' do
- let(:customer_portal_url) { 'https://customers.example.com' }
+ describe 'Customer portal frames' do
+ context 'when CUSTOMER_PORTAL_URL is set' do
+ let(:customer_portal_url) { 'https://customers.example.com' }
+ let(:frame_src_expectation) do
+ [
+ ::Gitlab::ContentSecurityPolicy::Directives.frame_src,
+ 'http://localhost/admin/',
+ 'http://localhost/assets/',
+ 'http://localhost/-/speedscope/index.html',
+ 'http://localhost/-/sandbox/',
+ customer_portal_url
+ ].join(' ')
+ end
- before do
- stub_env('CUSTOMER_PORTAL_URL', customer_portal_url)
+ before do
+ stub_env('CUSTOMER_PORTAL_URL', customer_portal_url)
+ end
+
+ it 'adds CUSTOMER_PORTAL_URL to CSP' do
+ expect(frame_src).to eq(frame_src_expectation)
+ end
end
- it 'adds CUSTOMER_PORTAL_URL to CSP' do
- expect(directives['frame_src']).to eq(::Gitlab::ContentSecurityPolicy::Directives.frame_src + " http://localhost/admin/ http://localhost/assets/ http://localhost/-/speedscope/index.html http://localhost/-/sandbox/ #{customer_portal_url}")
+ context 'when CUSTOMER_PORTAL_URL is blank' do
+ let(:customer_portal_url) { '' }
+ let(:frame_src_expectation) do
+ [
+ ::Gitlab::ContentSecurityPolicy::Directives.frame_src,
+ 'http://localhost/admin/',
+ 'http://localhost/assets/',
+ 'http://localhost/-/speedscope/index.html',
+ 'http://localhost/-/sandbox/'
+ ].join(' ')
+ end
+
+ before do
+ stub_env('CUSTOMER_PORTAL_URL', customer_portal_url)
+ end
+
+ it 'adds CUSTOMER_PORTAL_URL to CSP' do
+ expect(frame_src).to eq(frame_src_expectation)
+ end
end
end
- context 'letter_opener application URL' do
+ describe 'letter_opener application URL' do
let(:gitlab_url) { 'http://gitlab.example.com' }
let(:letter_opener_url) { "#{gitlab_url}/rails/letter_opener/" }
@@ -200,21 +446,21 @@ RSpec.describe Gitlab::ContentSecurityPolicy::ConfigLoader do
context 'when in production' do
before do
- allow(Rails).to receive(:env).and_return(ActiveSupport::StringInquirer.new('production'))
+ stub_rails_env('production')
end
it 'does not add letter_opener to CSP' do
- expect(directives['frame_src']).not_to include(letter_opener_url)
+ expect(frame_src).not_to include(letter_opener_url)
end
end
context 'when in development' do
before do
- allow(Rails).to receive(:env).and_return(ActiveSupport::StringInquirer.new('development'))
+ stub_rails_env('development')
end
it 'adds letter_opener to CSP' do
- expect(directives['frame_src']).to include(letter_opener_url)
+ expect(frame_src).to include(letter_opener_url)
end
end
end
@@ -234,7 +480,7 @@ RSpec.describe Gitlab::ContentSecurityPolicy::ConfigLoader do
end
it 'does not add Snowplow Micro URL to connect-src' do
- expect(directives['connect_src']).not_to include(snowplow_micro_url)
+ expect(connect_src).not_to include(snowplow_micro_url)
end
end
@@ -244,7 +490,7 @@ RSpec.describe Gitlab::ContentSecurityPolicy::ConfigLoader do
end
it 'adds Snowplow Micro URL with trailing slash to connect-src' do
- expect(directives['connect_src']).to match(Regexp.new(snowplow_micro_url))
+ expect(connect_src).to match(Regexp.new(snowplow_micro_url))
end
context 'when not enabled using config' do
@@ -253,7 +499,7 @@ RSpec.describe Gitlab::ContentSecurityPolicy::ConfigLoader do
end
it 'does not add Snowplow Micro URL to connect-src' do
- expect(directives['connect_src']).not_to include(snowplow_micro_url)
+ expect(connect_src).not_to include(snowplow_micro_url)
end
end
@@ -262,8 +508,18 @@ RSpec.describe Gitlab::ContentSecurityPolicy::ConfigLoader do
stub_env('REVIEW_APPS_ENABLED', 'true')
end
- it 'adds gitlab-org/gitlab merge requests API endpoint to CSP' do
- expect(directives['connect_src']).to include('https://gitlab.com/api/v4/projects/278964/merge_requests/')
+ it "includes review app's merge requests API endpoint in the CSP" do
+ expect(connect_src).to include('https://gitlab.com/api/v4/projects/278964/merge_requests/')
+ end
+ end
+
+ context 'when REVIEW_APPS_ENABLED is blank' do
+ before do
+ stub_env('REVIEW_APPS_ENABLED', '')
+ end
+
+ it "does not include review app's merge requests API endpoint in the CSP" do
+ expect(connect_src).not_to include('https://gitlab.com/api/v4/projects/278964/merge_requests/')
end
end
end
diff --git a/spec/lib/gitlab/data_builder/build_spec.rb b/spec/lib/gitlab/data_builder/build_spec.rb
index 7cd0af0dcec..66890315ee8 100644
--- a/spec/lib/gitlab/data_builder/build_spec.rb
+++ b/spec/lib/gitlab/data_builder/build_spec.rb
@@ -53,7 +53,9 @@ RSpec.describe Gitlab::DataBuilder::Build, feature_category: :integrations do
it { expect(data[:runner][:description]).to eq(ci_build.runner.description) }
it { expect(data[:runner][:runner_type]).to eq(ci_build.runner.runner_type) }
it { expect(data[:runner][:is_shared]).to eq(ci_build.runner.instance_type?) }
+ it { expect(data[:project]).to eq(ci_build.project.hook_attrs(backward: false)) }
it { expect(data[:environment]).to be_nil }
+ it { expect(data[:source_pipeline]).to be_nil }
it 'does not exceed number of expected queries' do
ci_build # Make sure the Ci::Build model is created before recording.
@@ -63,7 +65,7 @@ RSpec.describe Gitlab::DataBuilder::Build, feature_category: :integrations do
described_class.build(b) # Don't use ci_build variable here since it has all associations loaded into memory
end
- expect(control.count).to eq(14)
+ expect(control.count).to eq(16)
end
context 'commit author_url' do
@@ -98,5 +100,33 @@ RSpec.describe Gitlab::DataBuilder::Build, feature_category: :integrations do
it { expect(data[:environment][:action]).to eq(ci_build.environment_action) }
end
end
+
+ context 'when the build job has an upstream' do
+ let(:source_pipeline_attrs) { data[:source_pipeline] }
+
+ shared_examples 'source pipeline attributes' do
+ it 'has source pipeline attributes', :aggregate_failures do
+ expect(source_pipeline_attrs[:pipeline_id]).to eq upstream_pipeline.id
+ expect(source_pipeline_attrs[:job_id]).to eq pipeline.reload.source_bridge.id
+ expect(source_pipeline_attrs[:project][:id]).to eq upstream_pipeline.project.id
+ expect(source_pipeline_attrs[:project][:web_url]).to eq upstream_pipeline.project.web_url
+ expect(source_pipeline_attrs[:project][:path_with_namespace]).to eq upstream_pipeline.project.full_path
+ end
+ end
+
+ context 'in same project' do
+ let_it_be(:upstream_pipeline) { create(:ci_pipeline, upstream_of: pipeline, project: ci_build.project) }
+
+ it_behaves_like 'source pipeline attributes'
+ end
+
+ context 'in different project' do
+ let_it_be(:upstream_pipeline) { create(:ci_pipeline, upstream_of: pipeline) }
+
+ it_behaves_like 'source pipeline attributes'
+
+ it { expect(source_pipeline_attrs[:project][:id]).not_to eq pipeline.project.id }
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/data_builder/deployment_spec.rb b/spec/lib/gitlab/data_builder/deployment_spec.rb
index 82ec3e791a4..bbcfa1973ea 100644
--- a/spec/lib/gitlab/data_builder/deployment_spec.rb
+++ b/spec/lib/gitlab/data_builder/deployment_spec.rb
@@ -56,7 +56,7 @@ RSpec.describe Gitlab::DataBuilder::Deployment, feature_category: :continuous_de
subject(:data) { described_class.build(deployment, 'created', Time.current) }
- before(:all) do
+ before_all do
project.repository.remove
end
@@ -74,7 +74,7 @@ RSpec.describe Gitlab::DataBuilder::Deployment, feature_category: :continuous_de
subject(:data) { described_class.build(deployment, 'created', Time.current) }
- before(:all) do
+ before_all do
deployment.user = nil
end
diff --git a/spec/lib/gitlab/data_builder/issuable_spec.rb b/spec/lib/gitlab/data_builder/issuable_spec.rb
index 455800a3f7d..22c0eb1c7f9 100644
--- a/spec/lib/gitlab/data_builder/issuable_spec.rb
+++ b/spec/lib/gitlab/data_builder/issuable_spec.rb
@@ -4,6 +4,8 @@ require 'spec_helper'
RSpec.describe Gitlab::DataBuilder::Issuable do
let_it_be(:user) { create(:user) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:reusable_project) { create(:project, :repository, group: group) }
# This shared example requires a `builder` and `user` variable
shared_examples 'issuable hook data' do |kind, hook_data_issuable_builder_class|
@@ -96,17 +98,17 @@ RSpec.describe Gitlab::DataBuilder::Issuable do
describe '#build' do
it_behaves_like 'issuable hook data', 'issue', Gitlab::HookData::IssueBuilder do
- let(:issuable) { create(:issue, description: 'A description') }
+ let_it_be(:issuable) { create(:issue, description: 'A description', project: reusable_project) }
let(:builder) { described_class.new(issuable) }
end
it_behaves_like 'issuable hook data', 'merge_request', Gitlab::HookData::MergeRequestBuilder do
- let(:issuable) { create(:merge_request, description: 'A description') }
+ let_it_be(:issuable) { create(:merge_request, description: 'A description', source_project: reusable_project) }
let(:builder) { described_class.new(issuable) }
end
context 'issue is assigned' do
- let(:issue) { create(:issue, assignees: [user]) }
+ let(:issue) { create(:issue, assignees: [user], project: reusable_project) }
let(:data) { described_class.new(issue).build(user: user) }
it 'returns correct hook data' do
@@ -117,8 +119,21 @@ RSpec.describe Gitlab::DataBuilder::Issuable do
end
end
+ context 'when issuable is a group level work item' do
+ let(:work_item) { create(:work_item, namespace: group, description: 'work item description') }
+
+ it 'returns correct hook data', :aggregate_failures do
+ data = described_class.new(work_item).build(user: user)
+
+ expect(data[:object_kind]).to eq('work_item')
+ expect(data[:event_type]).to eq('work_item')
+ expect(data.dig(:object_attributes, :id)).to eq(work_item.id)
+ expect(data.dig(:object_attributes, :iid)).to eq(work_item.iid)
+ end
+ end
+
context 'merge_request is assigned' do
- let(:merge_request) { create(:merge_request, assignees: [user]) }
+ let(:merge_request) { create(:merge_request, assignees: [user], source_project: reusable_project) }
let(:data) { described_class.new(merge_request).build(user: user) }
it 'returns correct hook data' do
@@ -129,7 +144,7 @@ RSpec.describe Gitlab::DataBuilder::Issuable do
end
context 'merge_request is assigned reviewers' do
- let(:merge_request) { create(:merge_request, reviewers: [user]) }
+ let(:merge_request) { create(:merge_request, reviewers: [user], source_project: reusable_project) }
let(:data) { described_class.new(merge_request).build(user: user) }
it 'returns correct hook data' do
@@ -139,7 +154,7 @@ RSpec.describe Gitlab::DataBuilder::Issuable do
end
context 'when merge_request does not have reviewers and assignees' do
- let(:merge_request) { create(:merge_request) }
+ let(:merge_request) { create(:merge_request, source_project: reusable_project) }
let(:data) { described_class.new(merge_request).build(user: user) }
it 'returns correct hook data' do
diff --git a/spec/lib/gitlab/database/async_constraints/postgres_async_constraint_validation_spec.rb b/spec/lib/gitlab/database/async_constraints/postgres_async_constraint_validation_spec.rb
index 52fbf6d2f9b..02b84085cc4 100644
--- a/spec/lib/gitlab/database/async_constraints/postgres_async_constraint_validation_spec.rb
+++ b/spec/lib/gitlab/database/async_constraints/postgres_async_constraint_validation_spec.rb
@@ -80,12 +80,16 @@ RSpec.describe Gitlab::Database::AsyncConstraints::PostgresAsyncConstraintValida
it { expect(described_class.constraint_type_exists?).to be_truthy }
it 'always asks the database' do
- control = ActiveRecord::QueryRecorder.new(skip_schema_queries: false) do
+ control1 = ActiveRecord::QueryRecorder.new(skip_schema_queries: false) do
described_class.constraint_type_exists?
end
- expect(control.count).to be >= 1
- expect { described_class.constraint_type_exists? }.to issue_same_number_of_queries_as(control)
+ control2 = ActiveRecord::QueryRecorder.new(skip_schema_queries: false) do
+ described_class.constraint_type_exists?
+ end
+
+ expect(control1.count).to eq(1)
+ expect(control2.count).to eq(1)
end
end
diff --git a/spec/lib/gitlab/database/batch_count_spec.rb b/spec/lib/gitlab/database/batch_count_spec.rb
index 53f8fe3dcd2..89652b81fde 100644
--- a/spec/lib/gitlab/database/batch_count_spec.rb
+++ b/spec/lib/gitlab/database/batch_count_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Database::BatchCount do
let_it_be(:fallback) { ::Gitlab::Database::BatchCounter::FALLBACK }
let_it_be(:small_batch_size) { calculate_batch_size(::Gitlab::Database::BatchCounter::MIN_REQUIRED_BATCH_SIZE) }
+ let_it_be(:max_allowed_loops) { ::Gitlab::Database::BatchCounter::MAX_ALLOWED_LOOPS }
let(:model) { Issue }
let(:column) { :author_id }
@@ -34,7 +35,7 @@ RSpec.describe Gitlab::Database::BatchCount do
end
it 'returns fallback if loops more than allowed' do
- large_finish = Gitlab::Database::BatchCounter::MAX_ALLOWED_LOOPS * default_batch_size + 1
+ large_finish = max_allowed_loops * default_batch_size + 1
expect(described_class.public_send(method, *args, start: 1, finish: large_finish)).to eq(fallback)
end
@@ -81,6 +82,7 @@ RSpec.describe Gitlab::Database::BatchCount do
relation: model.table_name,
operation: operation,
operation_args: operation_args,
+ max_allowed_loops: max_allowed_loops,
start: 0,
mode: mode,
query: batch_count_query,
diff --git a/spec/lib/gitlab/database/bump_sequences_spec.rb b/spec/lib/gitlab/database/bump_sequences_spec.rb
new file mode 100644
index 00000000000..db420123350
--- /dev/null
+++ b/spec/lib/gitlab/database/bump_sequences_spec.rb
@@ -0,0 +1,83 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::BumpSequences, feature_category: :cell, query_analyzers: false do
+ let!(:gitlab_schema) { :gitlab_main_cell }
+ let!(:increment_by) { 1000 }
+
+ let!(:main_cell_sequence_name) { 'namespaces_id_seq' }
+ let!(:main_sequence_name) { 'vulnerabilities_id_seq' }
+ let!(:main_clusterwide_sequence_name) { 'users_id_seq' }
+ let!(:ci_sequence_name) { 'ci_build_needs_id_seq' }
+
+ # This is just to make sure that all of the sequences start with `is_called=True`
+ # which means that the next call to nextval() is going to increment the sequence.
+ # To give predictable test results.
+ before do
+ ApplicationRecord.connection.select_value("select nextval($1)", nil, [main_cell_sequence_name])
+ ApplicationRecord.connection.select_value("select nextval($1)", nil, [main_sequence_name])
+ ApplicationRecord.connection.select_value("select nextval($1)", nil, [main_clusterwide_sequence_name])
+ ApplicationRecord.connection.select_value("select nextval($1)", nil, [ci_sequence_name])
+ end
+
+ describe '#execute' do
+ subject { described_class.new(gitlab_schema, increment_by).execute }
+
+ context 'when bumping the sequences' do
+ it 'changes sequences by the passed argument `increase_by` value on the main database' do
+ expect do
+ subject
+ end.to change {
+ last_value_of_sequence(ApplicationRecord.connection, main_cell_sequence_name)
+ }.by(1001) # the +1 is because the sequence has is_called = true
+ end
+
+ it 'will still increase the value of sequences that have is_called = False' do
+ # see `is_called`: https://www.postgresql.org/docs/12/functions-sequence.html
+ # choosing a new arbitrary value for the sequence
+ new_value = last_value_of_sequence(ApplicationRecord.connection, main_cell_sequence_name) + 1000
+ ApplicationRecord.connection.select_value(
+ "select setval($1, $2, false)", nil, [main_cell_sequence_name, new_value]
+ )
+ expect do
+ subject
+ end.to change {
+ last_value_of_sequence(ApplicationRecord.connection, main_cell_sequence_name)
+ }.by(1000)
+ end
+
+ it 'resets the INCREMENT value of the sequences back to 1 for the following calls to nextval()' do
+ subject
+ value_1 = ApplicationRecord.connection.select_value("select nextval($1)", nil, [main_cell_sequence_name])
+ value_2 = ApplicationRecord.connection.select_value("select nextval($1)", nil, [main_cell_sequence_name])
+ expect(value_2 - value_1).to eq(1)
+ end
+
+ it 'increments the sequence of the tables in the given schema, but not in other schemas' do
+ expect do
+ subject
+ end.to change {
+ last_value_of_sequence(ApplicationRecord.connection, main_cell_sequence_name)
+ }.by(1001)
+ .and change {
+ last_value_of_sequence(ApplicationRecord.connection, main_sequence_name)
+ }.by(0)
+ .and change {
+ last_value_of_sequence(ApplicationRecord.connection, main_clusterwide_sequence_name)
+ }.by(0)
+ .and change {
+ last_value_of_sequence(ApplicationRecord.connection, ci_sequence_name)
+ }.by(0)
+ end
+ end
+ end
+
+ private
+
+ def last_value_of_sequence(connection, sequence_name)
+ allow_cross_joins_across_databases(url: 'https://gitlab.com/gitlab-org/gitlab/-/issues/408220') do
+ connection.select_value("select last_value from #{sequence_name}")
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/click_house_client_spec.rb b/spec/lib/gitlab/database/click_house_client_spec.rb
index 502d879bf6a..50086795b2b 100644
--- a/spec/lib/gitlab/database/click_house_client_spec.rb
+++ b/spec/lib/gitlab/database/click_house_client_spec.rb
@@ -12,25 +12,12 @@ RSpec.describe 'ClickHouse::Client', feature_category: :database do
end
describe 'when click_house spec tag is added', :click_house do
- around do |example|
- with_net_connect_allowed do
- example.run
- end
- end
-
it 'has a ClickHouse database configured' do
databases = ClickHouse::Client.configuration.databases
expect(databases).not_to be_empty
end
- it 'returns data from the DB via `select` method' do
- result = ClickHouse::Client.select("SELECT 1 AS value", :main)
-
- # returns JSON if successful. Otherwise error
- expect(result).to eq([{ 'value' => 1 }])
- end
-
it 'does not return data via `execute` method' do
result = ClickHouse::Client.execute("SELECT 1 AS value", :main)
diff --git a/spec/lib/gitlab/database/gitlab_schema_spec.rb b/spec/lib/gitlab/database/gitlab_schema_spec.rb
index 1c864239ae6..14ff1a462e3 100644
--- a/spec/lib/gitlab/database/gitlab_schema_spec.rb
+++ b/spec/lib/gitlab/database/gitlab_schema_spec.rb
@@ -148,7 +148,7 @@ RSpec.describe Gitlab::Database::GitlabSchema, feature_category: :database do
subject { described_class.table_schemas!(tables) }
it 'returns the matched schemas' do
- expect(subject).to match_array %i[gitlab_main gitlab_ci].to_set
+ expect(subject).to match_array %i[gitlab_main_cell gitlab_main gitlab_ci].to_set
end
context 'when one of the tables does not have a matching table schema' do
diff --git a/spec/lib/gitlab/database/health_status/indicators/patroni_apdex_spec.rb b/spec/lib/gitlab/database/health_status/indicators/patroni_apdex_spec.rb
index e0e3a0a7c23..9382074f584 100644
--- a/spec/lib/gitlab/database/health_status/indicators/patroni_apdex_spec.rb
+++ b/spec/lib/gitlab/database/health_status/indicators/patroni_apdex_spec.rb
@@ -3,150 +3,27 @@
require 'spec_helper'
RSpec.describe Gitlab::Database::HealthStatus::Indicators::PatroniApdex, :aggregate_failures, feature_category: :database do # rubocop:disable Layout/LineLength
- let(:schema) { :main }
- let(:connection) { Gitlab::Database.database_base_models[schema].connection }
-
- around do |example|
- Gitlab::Database::SharedModel.using_connection(connection) do
- example.run
- end
- end
-
- describe '#evaluate' do
- let(:prometheus_url) { 'http://thanos:9090' }
- let(:prometheus_config) { [prometheus_url, { allow_local_requests: true, verify: true }] }
-
- let(:prometheus_client) { instance_double(Gitlab::PrometheusClient) }
-
- let(:context) do
- Gitlab::Database::HealthStatus::Context.new(
- described_class,
- connection,
- ['users'],
- gitlab_schema
- )
- end
-
- let(:gitlab_schema) { "gitlab_#{schema}" }
- let(:client_ready) { true }
- let(:database_apdex_sli_query_main) { 'Apdex query for main' }
- let(:database_apdex_sli_query_ci) { 'Apdex query for ci' }
- let(:database_apdex_slo_main) { 0.99 }
- let(:database_apdex_slo_ci) { 0.95 }
- let(:database_apdex_settings) do
+ it_behaves_like 'Prometheus Alert based health indicator' do
+ let(:feature_flag) { :batched_migrations_health_status_patroni_apdex }
+ let(:sli_query_main) { 'Apdex query for main' }
+ let(:sli_query_ci) { 'Apdex query for ci' }
+ let(:slo_main) { 0.99 }
+ let(:slo_ci) { 0.95 }
+ let(:sli_with_good_condition) { { main: 0.991, ci: 0.951 } }
+ let(:sli_with_bad_condition) { { main: 0.989, ci: 0.949 } }
+
+ let(:prometheus_alert_db_indicators_settings) do
{
prometheus_api_url: prometheus_url,
apdex_sli_query: {
- main: database_apdex_sli_query_main,
- ci: database_apdex_sli_query_ci
+ main: sli_query_main,
+ ci: sli_query_ci
},
apdex_slo: {
- main: database_apdex_slo_main,
- ci: database_apdex_slo_ci
+ main: slo_main,
+ ci: slo_ci
}
}
end
-
- subject(:evaluate) { described_class.new(context).evaluate }
-
- before do
- stub_application_setting(database_apdex_settings: database_apdex_settings)
-
- allow(Gitlab::PrometheusClient).to receive(:new).with(*prometheus_config).and_return(prometheus_client)
- allow(prometheus_client).to receive(:ready?).and_return(client_ready)
- end
-
- shared_examples 'Patroni Apdex Evaluator' do |schema|
- context "with #{schema} schema" do
- let(:schema) { schema }
- let(:apdex_slo_above_sli) { { main: 0.991, ci: 0.951 } }
- let(:apdex_slo_below_sli) { { main: 0.989, ci: 0.949 } }
-
- it 'returns NoSignal signal in case the feature flag is disabled' do
- stub_feature_flags(batched_migrations_health_status_patroni_apdex: false)
-
- expect(evaluate).to be_a(Gitlab::Database::HealthStatus::Signals::NotAvailable)
- expect(evaluate.reason).to include('indicator disabled')
- end
-
- context 'without database_apdex_settings' do
- let(:database_apdex_settings) { nil }
-
- it 'returns Unknown signal' do
- expect(evaluate).to be_a(Gitlab::Database::HealthStatus::Signals::Unknown)
- expect(evaluate.reason).to include('Patroni Apdex Settings not configured')
- end
- end
-
- context 'when Prometheus client is not ready' do
- let(:client_ready) { false }
-
- it 'returns Unknown signal' do
- expect(evaluate).to be_a(Gitlab::Database::HealthStatus::Signals::Unknown)
- expect(evaluate.reason).to include('Prometheus client is not ready')
- end
- end
-
- context 'when apdex SLI query is not configured' do
- let(:"database_apdex_sli_query_#{schema}") { nil }
-
- it 'returns Unknown signal' do
- expect(evaluate).to be_a(Gitlab::Database::HealthStatus::Signals::Unknown)
- expect(evaluate.reason).to include('Apdex SLI query is not configured')
- end
- end
-
- context 'when slo is not configured' do
- let(:"database_apdex_slo_#{schema}") { nil }
-
- it 'returns Unknown signal' do
- expect(evaluate).to be_a(Gitlab::Database::HealthStatus::Signals::Unknown)
- expect(evaluate.reason).to include('Apdex SLO is not configured')
- end
- end
-
- it 'returns Normal signal when Patroni apdex SLI is above SLO' do
- expect(prometheus_client).to receive(:query)
- .with(send("database_apdex_sli_query_#{schema}"))
- .and_return([{ "value" => [1662423310.878, apdex_slo_above_sli[schema]] }])
- expect(evaluate).to be_a(Gitlab::Database::HealthStatus::Signals::Normal)
- expect(evaluate.reason).to include('Patroni service apdex is above SLO')
- end
-
- it 'returns Stop signal when Patroni apdex is below SLO' do
- expect(prometheus_client).to receive(:query)
- .with(send("database_apdex_sli_query_#{schema}"))
- .and_return([{ "value" => [1662423310.878, apdex_slo_below_sli[schema]] }])
- expect(evaluate).to be_a(Gitlab::Database::HealthStatus::Signals::Stop)
- expect(evaluate.reason).to include('Patroni service apdex is below SLO')
- end
-
- context 'when Patroni apdex can not be calculated' do
- where(:result) do
- [
- nil,
- [],
- [{}],
- [{ 'value' => 1 }],
- [{ 'value' => [1] }]
- ]
- end
-
- with_them do
- it 'returns Unknown signal' do
- expect(prometheus_client).to receive(:query).and_return(result)
- expect(evaluate).to be_a(Gitlab::Database::HealthStatus::Signals::Unknown)
- expect(evaluate.reason).to include('Patroni service apdex can not be calculated')
- end
- end
- end
- end
- end
-
- Gitlab::Database.database_base_models.each do |database_base_model, connection|
- next unless connection.present?
-
- it_behaves_like 'Patroni Apdex Evaluator', database_base_model.to_sym
- end
end
end
diff --git a/spec/lib/gitlab/database/health_status/indicators/prometheus_alert_indicator_spec.rb b/spec/lib/gitlab/database/health_status/indicators/prometheus_alert_indicator_spec.rb
new file mode 100644
index 00000000000..393bbf6beff
--- /dev/null
+++ b/spec/lib/gitlab/database/health_status/indicators/prometheus_alert_indicator_spec.rb
@@ -0,0 +1,66 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::HealthStatus::Indicators::PrometheusAlertIndicator, :aggregate_failures, feature_category: :database do # rubocop:disable Layout/LineLength
+ let(:connection) { Gitlab::Database.database_base_models[:main].connection }
+
+ let(:context) do
+ Gitlab::Database::HealthStatus::Context.new(
+ described_class,
+ connection,
+ ['users'],
+ :gitlab_main
+ )
+ end
+
+ let(:invalid_indicator) do
+ Class.new(described_class).new(context)
+ end
+
+ let(:valid_indicator) do
+ Class.new(described_class) do
+ def enabled?
+ true
+ end
+
+ def slo_key
+ :test_indicator_slo
+ end
+
+ def sli_key
+ :test_indicator_sli
+ end
+ end.new(context)
+ end
+
+ describe '#enabled?' do
+ it 'throws NotImplementedError for invalid indicator' do
+ expect { invalid_indicator.send(:enabled?) }.to raise_error(NotImplementedError)
+ end
+
+ it 'returns the defined value for valid indicator' do
+ expect(valid_indicator.send(:enabled?)).to eq(true)
+ end
+ end
+
+ describe '#slo_key' do
+ it 'throws NotImplementedError for invalid indicator' do
+ expect { invalid_indicator.send(:slo_key) }.to raise_error(NotImplementedError)
+ end
+
+ it 'returns the defined value for valid indicator' do
+ expect(valid_indicator.send(:slo_key)).to eq(:test_indicator_slo)
+ end
+ end
+
+ describe '#sli_key' do
+ it 'throws NotImplementedError for invalid indicator' do
+ expect { invalid_indicator.send(:sli_key) }.to raise_error(NotImplementedError)
+ end
+
+ it 'returns the defined value for valid indicator' do
+ expect(valid_indicator.send(:sli_key)).to eq(:test_indicator_sli)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/health_status/indicators/wal_rate_spec.rb b/spec/lib/gitlab/database/health_status/indicators/wal_rate_spec.rb
new file mode 100644
index 00000000000..d6fe7f0cead
--- /dev/null
+++ b/spec/lib/gitlab/database/health_status/indicators/wal_rate_spec.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::HealthStatus::Indicators::WalRate, :aggregate_failures, feature_category: :database do # rubocop:disable Layout/LineLength
+ it_behaves_like 'Prometheus Alert based health indicator' do
+ let(:feature_flag) { :db_health_check_wal_rate }
+ let(:sli_query_main) { 'WAL rate query for main' }
+ let(:sli_query_ci) { 'WAL rate query for ci' }
+ let(:slo_main) { 100 }
+ let(:slo_ci) { 100 }
+ let(:sli_with_good_condition) { { main: 70, ci: 70 } }
+ let(:sli_with_bad_condition) { { main: 120, ci: 120 } }
+
+ let(:prometheus_alert_db_indicators_settings) do
+ {
+ prometheus_api_url: prometheus_url,
+ wal_rate_sli_query: {
+ main: sli_query_main,
+ ci: sli_query_ci
+ },
+ wal_rate_slo: {
+ main: slo_main,
+ ci: slo_ci
+ }
+ }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/health_status_spec.rb b/spec/lib/gitlab/database/health_status_spec.rb
index 4a2b9eee45a..95f74929b84 100644
--- a/spec/lib/gitlab/database/health_status_spec.rb
+++ b/spec/lib/gitlab/database/health_status_spec.rb
@@ -21,9 +21,11 @@ RSpec.describe Gitlab::Database::HealthStatus, feature_category: :database do
let(:autovacuum_indicator_class) { health_status::Indicators::AutovacuumActiveOnTable }
let(:wal_indicator_class) { health_status::Indicators::WriteAheadLog }
let(:patroni_apdex_indicator_class) { health_status::Indicators::PatroniApdex }
+ let(:wal_rate_indicator_class) { health_status::Indicators::WalRate }
let(:autovacuum_indicator) { instance_double(autovacuum_indicator_class) }
let(:wal_indicator) { instance_double(wal_indicator_class) }
let(:patroni_apdex_indicator) { instance_double(patroni_apdex_indicator_class) }
+ let(:wal_rate_indicator) { instance_double(wal_rate_indicator_class) }
before do
allow(autovacuum_indicator_class).to receive(:new).with(health_context).and_return(autovacuum_indicator)
@@ -39,11 +41,17 @@ RSpec.describe Gitlab::Database::HealthStatus, feature_category: :database do
expect(autovacuum_indicator).to receive(:evaluate).and_return(normal_signal)
expect(wal_indicator_class).to receive(:new).with(health_context).and_return(wal_indicator)
expect(wal_indicator).to receive(:evaluate).and_return(not_available_signal)
- expect(patroni_apdex_indicator_class).to receive(:new).with(health_context)
- .and_return(patroni_apdex_indicator)
+ expect(patroni_apdex_indicator_class).to receive(:new).with(health_context).and_return(patroni_apdex_indicator)
expect(patroni_apdex_indicator).to receive(:evaluate).and_return(not_available_signal)
-
- expect(evaluate).to contain_exactly(normal_signal, not_available_signal, not_available_signal)
+ expect(wal_rate_indicator_class).to receive(:new).with(health_context).and_return(wal_rate_indicator)
+ expect(wal_rate_indicator).to receive(:evaluate).and_return(not_available_signal)
+
+ expect(evaluate).to contain_exactly(
+ normal_signal,
+ not_available_signal,
+ not_available_signal,
+ not_available_signal
+ )
end
end
diff --git a/spec/lib/gitlab/database/migration_helpers/convert_to_bigint_spec.rb b/spec/lib/gitlab/database/migration_helpers/convert_to_bigint_spec.rb
index cee5f54bd6a..1ff157b51d4 100644
--- a/spec/lib/gitlab/database/migration_helpers/convert_to_bigint_spec.rb
+++ b/spec/lib/gitlab/database/migration_helpers/convert_to_bigint_spec.rb
@@ -3,7 +3,15 @@
require 'spec_helper'
RSpec.describe Gitlab::Database::MigrationHelpers::ConvertToBigint, feature_category: :database do
- describe 'com_or_dev_or_test_but_not_jh?' do
+ let(:migration) do
+ Class
+ .new
+ .include(described_class)
+ .include(Gitlab::Database::MigrationHelpers)
+ .new
+ end
+
+ describe '#com_or_dev_or_test_but_not_jh?' do
using RSpec::Parameterized::TableSyntax
where(:dot_com, :dev_or_test, :jh, :expectation) do
@@ -23,13 +31,46 @@ RSpec.describe Gitlab::Database::MigrationHelpers::ConvertToBigint, feature_cate
allow(Gitlab).to receive(:dev_or_test_env?).and_return(dev_or_test)
allow(Gitlab).to receive(:jh?).and_return(jh)
- migration = Class
- .new
- .include(Gitlab::Database::MigrationHelpers::ConvertToBigint)
- .new
-
expect(migration.com_or_dev_or_test_but_not_jh?).to eq(expectation)
end
end
end
+
+ describe '#temp_column_removed?' do
+ it 'return true when column is not present' do
+ expect(migration).to receive(:column_exists?).with('test_table', 'id_convert_to_bigint').and_return(false)
+
+ expect(migration.temp_column_removed?(:test_table, :id)).to eq(true)
+ end
+
+ it 'return false when column present' do
+ expect(migration).to receive(:column_exists?).with('test_table', 'id_convert_to_bigint').and_return(true)
+
+ expect(migration.temp_column_removed?(:test_table, :id)).to eq(false)
+ end
+ end
+
+ describe '#columns_swapped?' do
+ it 'returns true if columns are already swapped' do
+ columns = [
+ Struct.new(:name, :sql_type).new('id', 'bigint'),
+ Struct.new(:name, :sql_type).new('id_convert_to_bigint', 'integer')
+ ]
+
+ expect(migration).to receive(:columns).with('test_table').and_return(columns)
+
+ expect(migration.columns_swapped?(:test_table, :id)).to eq(true)
+ end
+
+ it 'returns false if columns are not yet swapped' do
+ columns = [
+ Struct.new(:name, :sql_type).new('id', 'integer'),
+ Struct.new(:name, :sql_type).new('id_convert_to_bigint', 'bigint')
+ ]
+
+ expect(migration).to receive(:columns).with('test_table').and_return(columns)
+
+ expect(migration.columns_swapped?(:test_table, :id)).to eq(false)
+ end
+ end
end
diff --git a/spec/lib/gitlab/database/migration_helpers_spec.rb b/spec/lib/gitlab/database/migration_helpers_spec.rb
index b1e8301d69f..f3c181db3aa 100644
--- a/spec/lib/gitlab/database/migration_helpers_spec.rb
+++ b/spec/lib/gitlab/database/migration_helpers_spec.rb
@@ -2867,4 +2867,43 @@ RSpec.describe Gitlab::Database::MigrationHelpers, feature_category: :database d
it { is_expected.to be_falsey }
end
end
+
+ describe '#remove_column_default' do
+ let(:test_table) { :_test_defaults_table }
+ let(:drop_default_statement) do
+ /ALTER TABLE "#{test_table}" ALTER COLUMN "#{column_name}" SET DEFAULT NULL/
+ end
+
+ subject(:recorder) do
+ ActiveRecord::QueryRecorder.new do
+ model.remove_column_default(test_table, column_name)
+ end
+ end
+
+ before do
+ model.create_table(test_table) do |t|
+ t.integer :int_with_default, default: 100
+ t.integer :int_with_default_function, default: -> { 'ceil(random () * 100)::int' }
+ t.integer :int_without_default
+ end
+ end
+
+ context 'with default values' do
+ let(:column_name) { :int_with_default }
+
+ it { expect(recorder.log).to include(drop_default_statement) }
+ end
+
+ context 'with default functions' do
+ let(:column_name) { :int_with_default_function }
+
+ it { expect(recorder.log).to include(drop_default_statement) }
+ end
+
+ context 'without any defaults' do
+ let(:column_name) { :int_without_default }
+
+ it { expect(recorder.log).to be_empty }
+ end
+ end
end
diff --git a/spec/lib/gitlab/database/migrations/batched_background_migration_helpers_spec.rb b/spec/lib/gitlab/database/migrations/batched_background_migration_helpers_spec.rb
index 82f77d2bb19..158497b1fef 100644
--- a/spec/lib/gitlab/database/migrations/batched_background_migration_helpers_spec.rb
+++ b/spec/lib/gitlab/database/migrations/batched_background_migration_helpers_spec.rb
@@ -473,7 +473,7 @@ RSpec.describe Gitlab::Database::Migrations::BatchedBackgroundMigrationHelpers d
"\n\n" \
"For more information, check the documentation" \
"\n\n" \
- "\thttps://docs.gitlab.com/ee/user/admin_area/monitoring/background_migrations.html#database-migrations-failing-because-of-batched-background-migration-not-finished"
+ "\thttps://docs.gitlab.com/ee/update/background_migrations.html#database-migrations-failing-because-of-batched-background-migration-not-finished"
end
it 'does not raise error when migration exists and is marked as finished' do
diff --git a/spec/lib/gitlab/database/migrations/squasher_spec.rb b/spec/lib/gitlab/database/migrations/squasher_spec.rb
new file mode 100644
index 00000000000..e7ab5873f73
--- /dev/null
+++ b/spec/lib/gitlab/database/migrations/squasher_spec.rb
@@ -0,0 +1,65 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+RSpec.describe Gitlab::Database::Migrations::Squasher, feature_category: :database do
+ let(:git_output) do
+ <<~FILES
+ db/migrate/misplaced.txt
+ db/migrate/20221003041700_init_schema.rb
+ db/migrate/20221003041800_foo_migrate.rb
+ db/migrate/20221003041900_foo_migrate_two.rb
+ db/migrate/20221003042000_add_name_to_widgets.rb
+ db/migrate/20221003042200_add_enterprise.rb
+ db/post_migrate/20221003042100_post_migrate.rb
+ FILES
+ end
+
+ let(:spec_files) do
+ [
+ 'spec/migrations/add_name_to_widgets_spec.rb',
+ 'spec/migrations/20221003041800_foo_migrate_spec.rb',
+ 'spec/migrations/foo_migrate_three_spec.rb',
+ 'spec/migrations/foo_migrate_two_spec.rb',
+ 'spec/migrations/post_migrate_spec.rb'
+ ]
+ end
+
+ let(:ee_spec_files) do
+ [
+ 'ee/spec/migrations/add_enterprise_spec.rb'
+ ]
+ end
+
+ let(:expected_list) do
+ [
+ 'db/migrate/20221003041800_foo_migrate.rb',
+ 'db/migrate/20221003041900_foo_migrate_two.rb',
+ 'db/migrate/20221003042000_add_name_to_widgets.rb',
+ 'spec/migrations/add_name_to_widgets_spec.rb',
+ 'spec/migrations/20221003041800_foo_migrate_spec.rb',
+ 'spec/migrations/foo_migrate_two_spec.rb',
+ 'db/schema_migrations/20221003041800',
+ 'db/schema_migrations/20221003041900',
+ 'db/schema_migrations/20221003042000',
+ 'db/schema_migrations/20221003042100',
+ 'db/schema_migrations/20221003042200',
+ 'db/post_migrate/20221003042100_post_migrate.rb',
+ 'spec/migrations/post_migrate_spec.rb',
+ 'ee/spec/migrations/add_enterprise_spec.rb',
+ 'db/migrate/20221003042200_add_enterprise.rb'
+ ]
+ end
+
+ describe "#files_to_delete" do
+ before do
+ allow(Dir).to receive(:glob).with(Rails.root.join('spec/migrations/*.rb')).and_return(spec_files)
+ allow(Dir).to receive(:glob).with(Rails.root.join('ee/spec/migrations/*.rb')).and_return(ee_spec_files)
+ end
+
+ let(:squasher) { described_class.new(git_output) }
+
+ it 'only deletes the files we\'re expecting' do
+ expect(squasher.files_to_delete).to match_array expected_list
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/no_cross_db_foreign_keys_spec.rb b/spec/lib/gitlab/database/no_cross_db_foreign_keys_spec.rb
index 7899c1588b2..6cac7abb703 100644
--- a/spec/lib/gitlab/database/no_cross_db_foreign_keys_spec.rb
+++ b/spec/lib/gitlab/database/no_cross_db_foreign_keys_spec.rb
@@ -3,12 +3,27 @@
require 'spec_helper'
RSpec.describe 'cross-database foreign keys' do
- # Since we don't expect to have any cross-database foreign keys
- # this is empty. If we will have an entry like
- # `ci_daily_build_group_report_results.project_id`
- # should be added.
- let(:allowed_cross_database_foreign_keys) do
- %w[].freeze
+ # While we are building out Cells, we will be moving tables from gitlab_main schema
+ # to either gitlab_main_clusterwide schema or gitlab_main_cell schema.
+ # During this transition phase, cross database foreign keys need
+ # to be temporarily allowed to exist, until we can work on converting these columns to loose foreign keys.
+ # The issue corresponding to the loose foreign key conversion
+ # should be added as a comment along with the name of the column.
+ let!(:allowed_cross_database_foreign_keys) do
+ [
+ 'gitlab_subscriptions.hosted_plan_id', # https://gitlab.com/gitlab-org/gitlab/-/issues/422012
+ 'group_import_states.user_id', # https://gitlab.com/gitlab-org/gitlab/-/issues/421210
+ 'identities.saml_provider_id', # https://gitlab.com/gitlab-org/gitlab/-/issues/422010
+ 'project_authorizations.user_id', # https://gitlab.com/gitlab-org/gitlab/-/issues/422044
+ 'merge_requests.assignee_id', # https://gitlab.com/gitlab-org/gitlab/-/issues/422080
+ 'merge_requests.updated_by_id', # https://gitlab.com/gitlab-org/gitlab/-/issues/422080
+ 'merge_requests.merge_user_id', # https://gitlab.com/gitlab-org/gitlab/-/issues/422080
+ 'merge_requests.author_id', # https://gitlab.com/gitlab-org/gitlab/-/issues/422080
+ 'projects.creator_id', # https://gitlab.com/gitlab-org/gitlab/-/issues/421844
+ 'projects.marked_for_deletion_by_user_id', # https://gitlab.com/gitlab-org/gitlab/-/issues/421844
+ 'routes.namespace_id', # https://gitlab.com/gitlab-org/gitlab/-/issues/420869
+ 'user_group_callouts.user_id' # https://gitlab.com/gitlab-org/gitlab/-/issues/421287
+ ]
end
def foreign_keys_for(table_name)
diff --git a/spec/lib/gitlab/database/postgresql_adapter/force_disconnectable_mixin_spec.rb b/spec/lib/gitlab/database/postgresql_adapter/force_disconnectable_mixin_spec.rb
index 399fcae2fa0..3650ca1d904 100644
--- a/spec/lib/gitlab/database/postgresql_adapter/force_disconnectable_mixin_spec.rb
+++ b/spec/lib/gitlab/database/postgresql_adapter/force_disconnectable_mixin_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Database::PostgresqlAdapter::ForceDisconnectableMixin, :reestablished_active_record_base do
+RSpec.describe Gitlab::Database::PostgresqlAdapter::ForceDisconnectableMixin, :delete, :reestablished_active_record_base do
describe 'checking in a connection to the pool' do
let(:model) do
Class.new(ActiveRecord::Base) do
@@ -32,14 +32,29 @@ RSpec.describe Gitlab::Database::PostgresqlAdapter::ForceDisconnectableMixin, :r
let(:timer) { connection.force_disconnect_timer }
context 'when the timer is expired' do
- it 'disconnects from the database' do
+ before do
allow(timer).to receive(:expired?).and_return(true)
+ end
+ it 'disconnects from the database' do
expect(connection).to receive(:disconnect!).and_call_original
expect(timer).to receive(:reset!).and_call_original
connection.force_disconnect_if_old!
end
+
+ context 'when the connection has an open transaction' do
+ it 'does not disconnect from the database' do
+ connection.begin_transaction
+
+ expect(connection).not_to receive(:disconnect!)
+ expect(timer).not_to receive(:reset!)
+
+ connection.force_disconnect_if_old!
+
+ connection.rollback_transaction
+ end
+ end
end
context 'when the timer is not expired' do
diff --git a/spec/lib/gitlab/database/query_analyzers/gitlab_schemas_metrics_spec.rb b/spec/lib/gitlab/database/query_analyzers/gitlab_schemas_metrics_spec.rb
index b5e08f58608..f325060e592 100644
--- a/spec/lib/gitlab/database/query_analyzers/gitlab_schemas_metrics_spec.rb
+++ b/spec/lib/gitlab/database/query_analyzers/gitlab_schemas_metrics_spec.rb
@@ -29,7 +29,7 @@ RSpec.describe Gitlab::Database::QueryAnalyzers::GitlabSchemasMetrics, query_ana
model: ApplicationRecord,
sql: "SELECT 1 FROM projects",
expectations: {
- gitlab_schemas: "gitlab_main",
+ gitlab_schemas: "gitlab_main_cell",
db_config_name: "main"
}
},
@@ -37,7 +37,7 @@ RSpec.describe Gitlab::Database::QueryAnalyzers::GitlabSchemasMetrics, query_ana
model: ApplicationRecord,
sql: "SELECT 1 FROM projects LEFT JOIN ci_builds ON ci_builds.project_id=projects.id",
expectations: {
- gitlab_schemas: "gitlab_ci,gitlab_main",
+ gitlab_schemas: "gitlab_ci,gitlab_main_cell",
db_config_name: "main"
}
},
@@ -45,7 +45,7 @@ RSpec.describe Gitlab::Database::QueryAnalyzers::GitlabSchemasMetrics, query_ana
model: ApplicationRecord,
sql: "SELECT 1 FROM ci_builds LEFT JOIN projects ON ci_builds.project_id=projects.id",
expectations: {
- gitlab_schemas: "gitlab_ci,gitlab_main",
+ gitlab_schemas: "gitlab_ci,gitlab_main_cell",
db_config_name: "main"
}
},
diff --git a/spec/lib/gitlab/database/query_analyzers/query_recorder_spec.rb b/spec/lib/gitlab/database/query_analyzers/query_recorder_spec.rb
deleted file mode 100644
index 22ff66ff55e..00000000000
--- a/spec/lib/gitlab/database/query_analyzers/query_recorder_spec.rb
+++ /dev/null
@@ -1,114 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Database::QueryAnalyzers::QueryRecorder, feature_category: :database, query_analyzers: false do
- # We keep only the QueryRecorder analyzer running
- around do |example|
- described_class.with_suppressed(false) do
- example.run
- end
- end
-
- context 'with query analyzer' do
- let(:log_path) { Rails.root.join(described_class::LOG_PATH) }
- let(:log_file) { described_class.log_file }
-
- after do
- ::Gitlab::Database::QueryAnalyzer.instance.end!([described_class])
- end
-
- shared_examples_for 'an enabled query recorder' do
- using RSpec::Parameterized::TableSyntax
-
- normalized_query = <<~SQL.strip.tr("\n", ' ')
- SELECT \\\\"projects\\\\".\\\\"id\\\\"
- FROM \\\\"projects\\\\"
- WHERE \\\\"projects\\\\".\\\\"namespace_id\\\\" = \\?
- AND \\\\"projects\\\\".\\\\"id\\\\" IN \\(\\?,\\?,\\?\\);
- SQL
-
- where(:list_parameter, :bind_parameters) do
- '$2, $3' | [1, 2, 3]
- '$2, $3, $4' | [1, 2, 3, 4]
- '$2 ,$3 ,$4 ,$5' | [1, 2, 3, 4, 5]
- '$2 , $3 , $4 , $5, $6' | [1, 2, 3, 4, 5, 6]
- '$2, $3 ,$4 , $5,$6,$7' | [1, 2, 3, 4, 5, 6, 7]
- '$2,$3,$4,$5,$6,$7,$8' | [1, 2, 3, 4, 5, 6, 7, 8]
- end
-
- with_them do
- before do
- allow(described_class).to receive(:analyze).and_call_original
- allow(FileUtils).to receive(:mkdir_p)
- .with(log_path)
- end
-
- it 'logs normalized queries to a file' do
- expect(File).to receive(:write)
- .with(log_file, /^{"normalized":"#{normalized_query}/, mode: 'a')
-
- expect do
- ApplicationRecord.connection.exec_query(<<~SQL.strip.tr("\n", ' '), 'SQL', bind_parameters)
- SELECT "projects"."id"
- FROM "projects"
- WHERE "projects"."namespace_id" = $1
- AND "projects"."id" IN (#{list_parameter});
- SQL
- end.not_to raise_error
- end
- end
- end
-
- context 'on default branch' do
- before do
- stub_env('CI_MERGE_REQUEST_LABELS', nil)
- stub_env('CI_DEFAULT_BRANCH', 'default_branch_name')
- stub_env('CI_COMMIT_REF_NAME', 'default_branch_name')
-
- # This is needed to be able to stub_env the CI variable
- ::Gitlab::Database::QueryAnalyzer.instance.begin!([described_class])
- end
-
- it_behaves_like 'an enabled query recorder'
- end
-
- context 'on database merge requests' do
- before do
- stub_env('CI_MERGE_REQUEST_LABELS', 'database')
-
- # This is needed to be able to stub_env the CI variable
- ::Gitlab::Database::QueryAnalyzer.instance.begin!([described_class])
- end
-
- it_behaves_like 'an enabled query recorder'
- end
- end
-
- describe '.log_file' do
- let(:folder) { 'query_recorder' }
- let(:extension) { 'ndjson' }
- let(:default_name) { 'rspec' }
- let(:job_name) { 'test-job-1' }
-
- subject { described_class.log_file.to_s }
-
- context 'when in CI' do
- before do
- stub_env('CI_JOB_NAME_SLUG', job_name)
- end
-
- it { is_expected.to include("#{folder}/#{job_name}.#{extension}") }
- it { is_expected.not_to include("#{folder}/#{default_name}.#{extension}") }
- end
-
- context 'when not in CI' do
- before do
- stub_env('CI_JOB_NAME_SLUG', nil)
- end
-
- it { is_expected.to include("#{folder}/#{default_name}.#{extension}") }
- it { is_expected.not_to include("#{folder}/#{job_name}.#{extension}") }
- end
- end
-end
diff --git a/spec/lib/gitlab/database/schema_validation/schema_inconsistency_spec.rb b/spec/lib/gitlab/database/schema_validation/schema_inconsistency_spec.rb
deleted file mode 100644
index fbaf8474f22..00000000000
--- a/spec/lib/gitlab/database/schema_validation/schema_inconsistency_spec.rb
+++ /dev/null
@@ -1,41 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Database::SchemaValidation::SchemaInconsistency, type: :model, feature_category: :database do
- it { is_expected.to be_a ApplicationRecord }
-
- describe 'associations' do
- it { is_expected.to belong_to(:issue) }
- end
-
- describe "Validations" do
- it { is_expected.to validate_presence_of(:object_name) }
- it { is_expected.to validate_presence_of(:valitador_name) }
- it { is_expected.to validate_presence_of(:table_name) }
- it { is_expected.to validate_presence_of(:diff) }
- end
-
- describe 'scopes' do
- describe '.with_open_issues' do
- subject(:inconsistencies) { described_class.with_open_issues }
-
- let(:closed_issue) { create(:issue, :closed) }
- let(:open_issue) { create(:issue, :opened) }
-
- let!(:schema_inconsistency_with_issue_closed) do
- create(:schema_inconsistency, object_name: 'index_name', table_name: 'achievements',
- valitador_name: 'different_definition_indexes', issue: closed_issue)
- end
-
- let!(:schema_inconsistency_with_issue_opened) do
- create(:schema_inconsistency, object_name: 'index_name', table_name: 'achievements',
- valitador_name: 'different_definition_indexes', issue: open_issue)
- end
-
- it 'returns only schema inconsistencies with GitLab issues open' do
- expect(inconsistencies).to eq([schema_inconsistency_with_issue_opened])
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/database/tables_sorted_by_foreign_keys_spec.rb b/spec/lib/gitlab/database/tables_sorted_by_foreign_keys_spec.rb
index aa25590ed58..70352775fe5 100644
--- a/spec/lib/gitlab/database/tables_sorted_by_foreign_keys_spec.rb
+++ b/spec/lib/gitlab/database/tables_sorted_by_foreign_keys_spec.rb
@@ -2,11 +2,12 @@
require 'spec_helper'
-RSpec.describe Gitlab::Database::TablesSortedByForeignKeys do
- let(:connection) { ApplicationRecord.connection }
+RSpec.describe Gitlab::Database::TablesSortedByForeignKeys, feature_category: :cell do
+ let(:connection) { Ci::ApplicationRecord.connection }
let(:tables) do
%w[_test_gitlab_main_items _test_gitlab_main_references _test_gitlab_partition_parent
- gitlab_partitions_dynamic._test_gitlab_partition_20220101]
+ gitlab_partitions_dynamic._test_gitlab_partition_20220101
+ gitlab_partitions_dynamic._test_gitlab_partition_20220102]
end
subject do
@@ -35,7 +36,18 @@ RSpec.describe Gitlab::Database::TablesSortedByForeignKeys do
PARTITION OF _test_gitlab_partition_parent
FOR VALUES FROM ('20220101') TO ('20220131');
+ CREATE TABLE gitlab_partitions_dynamic._test_gitlab_partition_20220102
+ PARTITION OF _test_gitlab_partition_parent
+ FOR VALUES FROM ('20220201') TO ('20220228');
+
ALTER TABLE _test_gitlab_partition_parent DETACH PARTITION gitlab_partitions_dynamic._test_gitlab_partition_20220101;
+ ALTER TABLE _test_gitlab_partition_parent DETACH PARTITION gitlab_partitions_dynamic._test_gitlab_partition_20220102;
+
+ /* For some reason FK is now created in gitlab_partitions_dynamic */
+ ALTER TABLE gitlab_partitions_dynamic._test_gitlab_partition_20220101
+ DROP CONSTRAINT fk_constrained_1;
+ ALTER TABLE gitlab_partitions_dynamic._test_gitlab_partition_20220101
+ ADD CONSTRAINT fk_constrained_1 FOREIGN KEY(item_id) REFERENCES _test_gitlab_main_items(id);
SQL
connection.execute(statement)
end
@@ -47,6 +59,7 @@ RSpec.describe Gitlab::Database::TablesSortedByForeignKeys do
['_test_gitlab_main_references'],
['_test_gitlab_partition_parent'],
['gitlab_partitions_dynamic._test_gitlab_partition_20220101'],
+ ['gitlab_partitions_dynamic._test_gitlab_partition_20220102'],
['_test_gitlab_main_items']
])
end
@@ -62,6 +75,7 @@ RSpec.describe Gitlab::Database::TablesSortedByForeignKeys do
[
['_test_gitlab_partition_parent'],
['gitlab_partitions_dynamic._test_gitlab_partition_20220101'],
+ ['gitlab_partitions_dynamic._test_gitlab_partition_20220102'],
%w[_test_gitlab_main_items _test_gitlab_main_references]
])
end
diff --git a/spec/lib/gitlab/database/tables_truncate_spec.rb b/spec/lib/gitlab/database/tables_truncate_spec.rb
index ef76c9b8da3..04bec50088d 100644
--- a/spec/lib/gitlab/database/tables_truncate_spec.rb
+++ b/spec/lib/gitlab/database/tables_truncate_spec.rb
@@ -155,6 +155,7 @@ RSpec.describe Gitlab::Database::TablesTruncate, :reestablished_active_record_ba
"_test_gitlab_shared_items" => :gitlab_shared,
"_test_gitlab_geo_items" => :gitlab_geo,
"detached_partitions" => :gitlab_shared,
+ "postgres_foreign_keys" => :gitlab_shared,
"postgres_partitions" => :gitlab_shared
}
)
diff --git a/spec/lib/gitlab/database_spec.rb b/spec/lib/gitlab/database_spec.rb
index d51319d462b..0d8fa4dad6d 100644
--- a/spec/lib/gitlab/database_spec.rb
+++ b/spec/lib/gitlab/database_spec.rb
@@ -344,6 +344,33 @@ RSpec.describe Gitlab::Database, feature_category: :database do
end
end
+ describe '.db_config_share_with' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:db_config_name, :db_config_attributes, :expected_db_config_share_with) do
+ 'main' | { database_tasks: true } | nil
+ 'main' | { database_tasks: false } | nil
+ 'ci' | { database_tasks: true } | nil
+ 'ci' | { database_tasks: false } | 'main'
+ 'main_clusterwide' | { database_tasks: true } | nil
+ 'main_clusterwide' | { database_tasks: false } | 'main'
+ '_test_unknown' | { database_tasks: true } | nil
+ '_test_unknown' | { database_tasks: false } | 'main'
+ end
+
+ with_them do
+ it 'returns the expected result' do
+ db_config = ActiveRecord::DatabaseConfigurations::HashConfig.new(
+ Rails.env,
+ db_config_name,
+ db_config_attributes
+ )
+
+ expect(described_class.db_config_share_with(db_config)).to eq(expected_db_config_share_with)
+ end
+ end
+ end
+
describe '.gitlab_schemas_for_connection' do
it 'does return a valid schema depending on a base model used', :request_store do
expect(described_class.gitlab_schemas_for_connection(Project.connection)).to include(:gitlab_main, :gitlab_shared)
diff --git a/spec/lib/gitlab/dependency_linker/cargo_toml_linker_spec.rb b/spec/lib/gitlab/dependency_linker/cargo_toml_linker_spec.rb
index 8068fa30367..7f6b3b86799 100644
--- a/spec/lib/gitlab/dependency_linker/cargo_toml_linker_spec.rb
+++ b/spec/lib/gitlab/dependency_linker/cargo_toml_linker_spec.rb
@@ -32,6 +32,7 @@ RSpec.describe Gitlab::DependencyLinker::CargoTomlLinker do
# Default dependencies format with fixed version and version range
chrono = "0.4.7"
xml-rs = ">=0.8.0"
+ indicatif = { version = "0.17.5", features = ["rayon"] }
[dependencies.memchr]
# Specific dependency with optional info
@@ -45,6 +46,24 @@ RSpec.describe Gitlab::DependencyLinker::CargoTomlLinker do
[build-dependencies]
# Build dependency with version wildcard
thread_local = "0.3.*"
+
+ # Dependencies with a custom location should be ignored
+ path-ignored = { path = "local" }
+ git-ignored = { git = "https://example.com/.git" }
+ registry-ignored = { registry = "custom-registry" }
+
+ [build-dependencies.bracked-ignored]
+ path = "local"
+
+ # Unless they specify a version and no registry
+ [build-dependencies.rand]
+ version = "0.8.5"
+ path = "../rand"
+
+ [build-dependencies.custom-rand]
+ version = "0.8.5"
+ path = "../custom-rand"
+ registry = "custom-registry"
CONTENT
end
@@ -62,8 +81,27 @@ RSpec.describe Gitlab::DependencyLinker::CargoTomlLinker do
expect(subject).to include(link('thread_local', 'https://crates.io/crates/thread_local'))
end
+ it 'links dependencies that use an inline table' do
+ expect(subject).to include(link('indicatif', 'https://crates.io/crates/indicatif'))
+ end
+
+ it 'links dependencies that include a version but no registry' do
+ expect(subject).to include(link('rand', 'https://crates.io/crates/rand'))
+ end
+
it 'does not contain metadata identified as package' do
expect(subject).not_to include(link('version', 'https://crates.io/crates/version'))
end
+
+ it 'does not link dependencies without a version' do
+ expect(subject).not_to include(link('path-ignored', 'https://crates.io/crates/path-ignored'))
+ expect(subject).not_to include(link('git-ignored', 'https://crates.io/crates/git-ignored'))
+ expect(subject).not_to include(link('bracked-ignored', 'https://crates.io/crates/bracked-ignored'))
+ end
+
+ it 'does not link dependencies with a custom registry' do
+ expect(subject).not_to include(link('registry-ignored', 'https://crates.io/crates/registry-ignored'))
+ expect(subject).not_to include(link('custom-rand', 'https://crates.io/crates/custom-rand'))
+ end
end
end
diff --git a/spec/lib/gitlab/exclusive_lease_spec.rb b/spec/lib/gitlab/exclusive_lease_spec.rb
index 968d26e1c38..c8325c5b359 100644
--- a/spec/lib/gitlab/exclusive_lease_spec.rb
+++ b/spec/lib/gitlab/exclusive_lease_spec.rb
@@ -2,7 +2,8 @@
require 'spec_helper'
-RSpec.describe Gitlab::ExclusiveLease, :clean_gitlab_redis_shared_state do
+RSpec.describe Gitlab::ExclusiveLease, :request_store, :clean_gitlab_redis_shared_state,
+ :clean_gitlab_redis_cluster_shared_state, feature_category: :shared do
let(:unique_key) { SecureRandom.hex(10) }
describe '#try_obtain' do
@@ -19,6 +20,67 @@ RSpec.describe Gitlab::ExclusiveLease, :clean_gitlab_redis_shared_state do
sleep(2 * timeout) # lease should have expired now
expect(lease.try_obtain).to be_present
end
+
+ context 'when migrating across stores' do
+ let(:lease) { described_class.new(unique_key, timeout: 3600) }
+
+ before do
+ stub_feature_flags(use_cluster_shared_state_for_exclusive_lease: false)
+ allow(lease).to receive(:same_store).and_return(false)
+ end
+
+ it 'acquires 2 locks' do
+ # stub first SETNX
+ Gitlab::Redis::SharedState.with { |r| expect(r).to receive(:set).and_return(true) }
+ Gitlab::Redis::ClusterSharedState.with { |r| expect(r).to receive(:set).and_call_original }
+
+ expect(lease.try_obtain).to be_truthy
+ end
+
+ it 'rollback first lock if second lock is not acquired' do
+ Gitlab::Redis::ClusterSharedState.with do |r|
+ expect(r).to receive(:set).and_return(false)
+ expect(r).to receive(:eval).and_call_original
+ end
+
+ Gitlab::Redis::SharedState.with do |r|
+ expect(r).to receive(:set).and_call_original
+ expect(r).to receive(:eval).and_call_original
+ end
+
+ expect(lease.try_obtain).to be_falsey
+ end
+ end
+
+ context 'when cutting over to ClusterSharedState' do
+ context 'when lock is not acquired' do
+ it 'waits for existing holder to yield the lock' do
+ Gitlab::Redis::ClusterSharedState.with { |r| expect(r).to receive(:set).and_call_original }
+ Gitlab::Redis::SharedState.with { |r| expect(r).not_to receive(:set) }
+
+ lease = described_class.new(unique_key, timeout: 3600)
+ expect(lease.try_obtain).to be_truthy
+ end
+ end
+
+ context 'when lock is still acquired' do
+ let(:lease) { described_class.new(unique_key, timeout: 3600) }
+
+ before do
+ # simulates cutover where some application's feature-flag has not updated
+ stub_feature_flags(use_cluster_shared_state_for_exclusive_lease: false)
+ lease.try_obtain
+ stub_feature_flags(use_cluster_shared_state_for_exclusive_lease: true)
+ end
+
+ it 'waits for existing holder to yield the lock' do
+ Gitlab::Redis::ClusterSharedState.with { |r| expect(r).not_to receive(:set) }
+ Gitlab::Redis::SharedState.with { |r| expect(r).not_to receive(:set) }
+
+ expect(lease.try_obtain).to be_falsey
+ end
+ end
+ end
end
describe '.redis_shared_state_key' do
@@ -42,131 +104,159 @@ RSpec.describe Gitlab::ExclusiveLease, :clean_gitlab_redis_shared_state do
end
end
- describe '#renew' do
- it 'returns true when we have the existing lease' do
- lease = described_class.new(unique_key, timeout: 3600)
- expect(lease.try_obtain).to be_present
- expect(lease.renew).to be_truthy
- end
+ shared_examples 'write operations' do
+ describe '#renew' do
+ it 'returns true when we have the existing lease' do
+ lease = described_class.new(unique_key, timeout: 3600)
+ expect(lease.try_obtain).to be_present
+ expect(lease.renew).to be_truthy
+ end
- it 'returns false when we dont have a lease' do
- lease = described_class.new(unique_key, timeout: 3600)
- expect(lease.renew).to be_falsey
+ it 'returns false when we dont have a lease' do
+ lease = described_class.new(unique_key, timeout: 3600)
+ expect(lease.renew).to be_falsey
+ end
end
- end
- describe '#exists?' do
- it 'returns true for an existing lease' do
- lease = described_class.new(unique_key, timeout: 3600)
- lease.try_obtain
+ describe 'cancellation' do
+ def new_lease(key)
+ described_class.new(key, timeout: 3600)
+ end
- expect(lease.exists?).to eq(true)
- end
+ shared_examples 'cancelling a lease' do
+ let(:lease) { new_lease(unique_key) }
- it 'returns false for a lease that does not exist' do
- lease = described_class.new(unique_key, timeout: 3600)
+ it 'releases the held lease' do
+ uuid = lease.try_obtain
+ expect(uuid).to be_present
+ expect(new_lease(unique_key).try_obtain).to eq(false)
- expect(lease.exists?).to eq(false)
- end
- end
+ cancel_lease(uuid)
- describe '.get_uuid' do
- it 'gets the uuid if lease with the key associated exists' do
- uuid = described_class.new(unique_key, timeout: 3600).try_obtain
+ expect(new_lease(unique_key).try_obtain).to be_present
+ end
+ end
- expect(described_class.get_uuid(unique_key)).to eq(uuid)
- end
+ describe '.cancel' do
+ def cancel_lease(uuid)
+ described_class.cancel(release_key, uuid)
+ end
- it 'returns false if the lease does not exist' do
- expect(described_class.get_uuid(unique_key)).to be false
- end
- end
+ context 'when called with the unprefixed key' do
+ it_behaves_like 'cancelling a lease' do
+ let(:release_key) { unique_key }
+ end
+ end
- describe 'cancellation' do
- def new_lease(key)
- described_class.new(key, timeout: 3600)
- end
+ context 'when called with the prefixed key' do
+ it_behaves_like 'cancelling a lease' do
+ let(:release_key) { described_class.redis_shared_state_key(unique_key) }
+ end
+ end
- shared_examples 'cancelling a lease' do
- let(:lease) { new_lease(unique_key) }
+ it 'does not raise errors when given a nil key' do
+ expect { described_class.cancel(nil, nil) }.not_to raise_error
+ end
+ end
- it 'releases the held lease' do
- uuid = lease.try_obtain
- expect(uuid).to be_present
- expect(new_lease(unique_key).try_obtain).to eq(false)
+ describe '#cancel' do
+ def cancel_lease(_uuid)
+ lease.cancel
+ end
- cancel_lease(uuid)
+ it_behaves_like 'cancelling a lease'
- expect(new_lease(unique_key).try_obtain).to be_present
- end
- end
+ it 'is safe to call even if the lease was never obtained' do
+ lease = new_lease(unique_key)
- describe '.cancel' do
- def cancel_lease(uuid)
- described_class.cancel(release_key, uuid)
- end
+ lease.cancel
- context 'when called with the unprefixed key' do
- it_behaves_like 'cancelling a lease' do
- let(:release_key) { unique_key }
+ expect(new_lease(unique_key).try_obtain).to be_present
end
end
+ end
- context 'when called with the prefixed key' do
- it_behaves_like 'cancelling a lease' do
- let(:release_key) { described_class.redis_shared_state_key(unique_key) }
- end
- end
+ describe '.reset_all!' do
+ it 'removes all existing lease keys from redis' do
+ uuid = described_class.new(unique_key, timeout: 3600).try_obtain
- it 'does not raise errors when given a nil key' do
- expect { described_class.cancel(nil, nil) }.not_to raise_error
+ expect(described_class.get_uuid(unique_key)).to eq(uuid)
+
+ described_class.reset_all!
+
+ expect(described_class.get_uuid(unique_key)).to be_falsey
end
end
+ end
- describe '#cancel' do
- def cancel_lease(_uuid)
- lease.cancel
+ shared_examples 'read operations' do
+ describe '#exists?' do
+ it 'returns true for an existing lease' do
+ lease = described_class.new(unique_key, timeout: 3600)
+ lease.try_obtain
+
+ expect(lease.exists?).to eq(true)
end
- it_behaves_like 'cancelling a lease'
+ it 'returns false for a lease that does not exist' do
+ lease = described_class.new(unique_key, timeout: 3600)
+
+ expect(lease.exists?).to eq(false)
+ end
+ end
- it 'is safe to call even if the lease was never obtained' do
- lease = new_lease(unique_key)
+ describe '.get_uuid' do
+ it 'gets the uuid if lease with the key associated exists' do
+ uuid = described_class.new(unique_key, timeout: 3600).try_obtain
- lease.cancel
+ expect(described_class.get_uuid(unique_key)).to eq(uuid)
+ end
- expect(new_lease(unique_key).try_obtain).to be_present
+ it 'returns false if the lease does not exist' do
+ expect(described_class.get_uuid(unique_key)).to be false
end
end
- end
- describe '#ttl' do
- it 'returns the TTL of the Redis key' do
- lease = described_class.new('kittens', timeout: 100)
- lease.try_obtain
+ describe '#ttl' do
+ it 'returns the TTL of the Redis key' do
+ lease = described_class.new('kittens', timeout: 100)
+ lease.try_obtain
- expect(lease.ttl <= 100).to eq(true)
- end
+ expect(lease.ttl <= 100).to eq(true)
+ end
- it 'returns nil when the lease does not exist' do
- lease = described_class.new('kittens', timeout: 10)
+ it 'returns nil when the lease does not exist' do
+ lease = described_class.new('kittens', timeout: 10)
- expect(lease.ttl).to be_nil
+ expect(lease.ttl).to be_nil
+ end
end
end
- describe '.reset_all!' do
- it 'removes all existing lease keys from redis' do
- uuid = described_class.new(unique_key, timeout: 3600).try_obtain
-
- expect(described_class.get_uuid(unique_key)).to eq(uuid)
+ context 'when migrating across stores' do
+ before do
+ stub_feature_flags(use_cluster_shared_state_for_exclusive_lease: false)
+ end
- described_class.reset_all!
+ it_behaves_like 'read operations'
+ it_behaves_like 'write operations'
+ end
- expect(described_class.get_uuid(unique_key)).to be_falsey
+ context 'when feature flags are all disabled' do
+ before do
+ stub_feature_flags(
+ use_cluster_shared_state_for_exclusive_lease: false,
+ enable_exclusive_lease_double_lock_rw: false
+ )
end
+
+ it_behaves_like 'read operations'
+ it_behaves_like 'write operations'
end
+ it_behaves_like 'read operations'
+ it_behaves_like 'write operations'
+
describe '.throttle' do
it 'prevents repeated execution of the block' do
number = 0
@@ -244,4 +334,74 @@ RSpec.describe Gitlab::ExclusiveLease, :clean_gitlab_redis_shared_state do
described_class.throttle(1, count: 48, period: 1.day) {}
end
end
+
+ describe 'transitions between feature-flag toggles' do
+ shared_examples 'retains behaviours across transitions' do |flag|
+ it 'retains read behaviour' do
+ lease = described_class.new(unique_key, timeout: 3600)
+ uuid = lease.try_obtain
+
+ expect(lease.ttl).not_to eq(nil)
+ expect(lease.exists?).to be_truthy
+ expect(described_class.get_uuid(unique_key)).to eq(uuid)
+
+ # simulates transition
+ stub_feature_flags({ flag => true })
+ Gitlab::SafeRequestStore.clear!
+
+ expect(lease.ttl).not_to eq(nil)
+ expect(lease.exists?).to be_truthy
+ expect(described_class.get_uuid(unique_key)).to eq(uuid)
+ end
+
+ it 'retains renew behaviour' do
+ lease = described_class.new(unique_key, timeout: 3600)
+ lease.try_obtain
+
+ expect(lease.renew).to be_truthy
+
+ # simulates transition
+ stub_feature_flags({ flag => true })
+ Gitlab::SafeRequestStore.clear!
+
+ expect(lease.renew).to be_truthy
+ end
+
+ it 'retains renew behaviour' do
+ lease = described_class.new(unique_key, timeout: 3600)
+ uuid = lease.try_obtain
+ lease.cancel
+
+ # proves successful cancellation
+ expect(lease.try_obtain).to eq(uuid)
+
+ # simulates transition
+ stub_feature_flags({ flag => true })
+ Gitlab::SafeRequestStore.clear!
+
+ expect(lease.try_obtain).to be_falsey
+ lease.cancel
+ expect(lease.try_obtain).to eq(uuid)
+ end
+ end
+
+ context 'when enabling enable_exclusive_lease_double_lock_rw' do
+ before do
+ stub_feature_flags(
+ enable_exclusive_lease_double_lock_rw: false,
+ use_cluster_shared_state_for_exclusive_lease: false
+ )
+ end
+
+ it_behaves_like 'retains behaviours across transitions', :enable_exclusive_lease_double_lock_rw
+ end
+
+ context 'when enabling use_cluster_shared_state_for_exclusive_lease' do
+ before do
+ stub_feature_flags(use_cluster_shared_state_for_exclusive_lease: false)
+ end
+
+ it_behaves_like 'retains behaviours across transitions', :use_cluster_shared_state_for_exclusive_lease
+ end
+ end
end
diff --git a/spec/lib/gitlab/git/blame_spec.rb b/spec/lib/gitlab/git/blame_spec.rb
index 676ea2663d2..d21ac36bf34 100644
--- a/spec/lib/gitlab/git/blame_spec.rb
+++ b/spec/lib/gitlab/git/blame_spec.rb
@@ -13,13 +13,17 @@ RSpec.describe Gitlab::Git::Blame do
let(:result) do
[].tap do |data|
- blame.each do |commit, line, previous_path|
- data << { commit: commit, line: line, previous_path: previous_path }
+ blame.each do |commit, line, previous_path, span|
+ data << { commit: commit, line: line, previous_path: previous_path, span: span }
end
end
end
describe 'blaming a file' do
+ it 'has the right commit span' do
+ expect(result.first[:span]).to eq(95)
+ end
+
it 'has the right number of lines' do
expect(result.size).to eq(95)
expect(result.first[:commit]).to be_kind_of(Gitlab::Git::Commit)
diff --git a/spec/lib/gitlab/git/commit_spec.rb b/spec/lib/gitlab/git/commit_spec.rb
index dd9f77f0211..5c4be1003c3 100644
--- a/spec/lib/gitlab/git/commit_spec.rb
+++ b/spec/lib/gitlab/git/commit_spec.rb
@@ -3,15 +3,16 @@
require "spec_helper"
RSpec.describe Gitlab::Git::Commit, feature_category: :source_code_management do
- let(:repository) { create(:project, :repository).repository.raw }
+ let_it_be(:repository) { create(:project, :repository).repository.raw }
let(:commit) { described_class.find(repository, SeedRepo::Commit::ID) }
describe "Commit info from gitaly commit" do
let(:subject) { (+"My commit").force_encoding('ASCII-8BIT') }
let(:body) { subject + (+"My body").force_encoding('ASCII-8BIT') }
let(:body_size) { body.length }
- let(:gitaly_commit) { build(:gitaly_commit, subject: subject, body: body, body_size: body_size) }
+ let(:gitaly_commit) { build(:gitaly_commit, subject: subject, body: body, body_size: body_size, tree_id: tree_id) }
let(:id) { gitaly_commit.id }
+ let(:tree_id) { 'd7f32d821c9cc7b1a9166ca7c4ba95b5c2d0d000' }
let(:committer) { gitaly_commit.committer }
let(:author) { gitaly_commit.author }
let(:commit) { described_class.new(repository, gitaly_commit) }
@@ -26,6 +27,7 @@ RSpec.describe Gitlab::Git::Commit, feature_category: :source_code_management do
it { expect(commit.committer_name).to eq(committer.name) }
it { expect(commit.committer_email).to eq(committer.email) }
it { expect(commit.parent_ids).to eq(gitaly_commit.parent_ids) }
+ it { expect(commit.tree_id).to eq(tree_id) }
context 'non-UTC dates' do
let(:seconds) { Time.now.to_i }
@@ -577,6 +579,14 @@ RSpec.describe Gitlab::Git::Commit, feature_category: :source_code_management do
it { is_expected.to eq(sample_commit_hash[:message]) }
end
+
+ describe '#tree_id' do
+ subject { super().tree_id }
+
+ it "doesn't return tree id for non-Gitaly commits" do
+ is_expected.to be_nil
+ end
+ end
end
describe '#stats' do
@@ -681,6 +691,100 @@ RSpec.describe Gitlab::Git::Commit, feature_category: :source_code_management do
end
end
+ describe 'SHA patterns' do
+ shared_examples 'a SHA-matching pattern' do
+ let(:expected_match) { sha }
+
+ shared_examples 'a match' do
+ it 'matches the pattern' do
+ expect(value).to match(pattern)
+ expect(pattern.match(value).to_a).to eq([expected_match])
+ end
+ end
+
+ shared_examples 'no match' do
+ it 'does not match the pattern' do
+ expect(value).not_to match(pattern)
+ end
+ end
+
+ shared_examples 'a SHA pattern' do
+ context "with too short value" do
+ let(:value) { sha[0, described_class::MIN_SHA_LENGTH - 1] }
+
+ it_behaves_like 'no match'
+ end
+
+ context "with full length" do
+ let(:value) { sha }
+
+ it_behaves_like 'a match'
+ end
+
+ context "with exceeeding length" do
+ let(:value) { sha + sha }
+
+ # This case is not exactly pretty for SHA1 as we would still match the full SHA256 length. It's arguable what
+ # the correct behaviour would be, but without starting to distinguish SHA1 and SHA256 hashes this is the best
+ # we can do.
+ let(:expected_match) { (sha + sha)[0, described_class::MAX_SHA_LENGTH] }
+
+ it_behaves_like 'a match'
+ end
+
+ context "with embedded SHA" do
+ let(:value) { "xxx#{sha}xxx" }
+
+ it_behaves_like 'a match'
+ end
+ end
+
+ context 'abbreviated SHA pattern' do
+ let(:pattern) { described_class::SHA_PATTERN }
+
+ context "with minimum length" do
+ let(:value) { sha[0, described_class::MIN_SHA_LENGTH] }
+ let(:expected_match) { value }
+
+ it_behaves_like 'a match'
+ end
+
+ context "with medium length" do
+ let(:value) { sha[0, described_class::MIN_SHA_LENGTH + 20] }
+ let(:expected_match) { value }
+
+ it_behaves_like 'a match'
+ end
+
+ it_behaves_like 'a SHA pattern'
+ end
+
+ context 'full SHA pattern' do
+ let(:pattern) { described_class::FULL_SHA_PATTERN }
+
+ context 'with abbreviated length' do
+ let(:value) { sha[0, described_class::SHA1_LENGTH - 1] }
+
+ it_behaves_like 'no match'
+ end
+
+ it_behaves_like 'a SHA pattern'
+ end
+ end
+
+ context 'SHA1' do
+ let(:sha) { "5716ca5987cbf97d6bb54920bea6adde242d87e6" }
+
+ it_behaves_like 'a SHA-matching pattern'
+ end
+
+ context 'SHA256' do
+ let(:sha) { "a52e146ac2ab2d0efbb768ab8ebd1e98a6055764c81fe424fbae4522f5b4cb92" }
+
+ it_behaves_like 'a SHA-matching pattern'
+ end
+ end
+
def sample_commit_hash
{
author_email: "dmitriy.zaporozhets@gmail.com",
diff --git a/spec/lib/gitlab/git/diff_tree_spec.rb b/spec/lib/gitlab/git/diff_tree_spec.rb
new file mode 100644
index 00000000000..614a8f03dd8
--- /dev/null
+++ b/spec/lib/gitlab/git/diff_tree_spec.rb
@@ -0,0 +1,30 @@
+# frozen_string_literal: true
+
+require "spec_helper"
+
+RSpec.describe Gitlab::Git::DiffTree, feature_category: :source_code_management do
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:repository) { project.repository }
+
+ describe '.from_commit' do
+ subject(:diff_tree) { described_class.from_commit(commit) }
+
+ context 'when commit is an initial commit' do
+ let(:commit) { repository.commit('1a0b36b3cdad1d2ee32457c102a8c0b7056fa863') }
+
+ it 'returns the expected diff tree object' do
+ expect(diff_tree.left_tree_id).to eq(Gitlab::Git::EMPTY_TREE_ID)
+ expect(diff_tree.right_tree_id).to eq(commit.tree_id)
+ end
+ end
+
+ context 'when commit is a regular commit' do
+ let(:commit) { repository.commit('60ecb67744cb56576c30214ff52294f8ce2def98') }
+
+ it 'returns the expected diff tree object' do
+ expect(diff_tree.left_tree_id).to eq(commit.parent.tree_id)
+ expect(diff_tree.right_tree_id).to eq(commit.tree_id)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/git/object_pool_spec.rb b/spec/lib/gitlab/git/object_pool_spec.rb
index b158c7227d4..f65ed319462 100644
--- a/spec/lib/gitlab/git/object_pool_spec.rb
+++ b/spec/lib/gitlab/git/object_pool_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Git::ObjectPool do
+RSpec.describe Gitlab::Git::ObjectPool, feature_category: :source_code_management do
let(:pool_repository) { create(:pool_repository) }
let(:source_repository) { pool_repository.source_project.repository }
@@ -15,6 +15,29 @@ RSpec.describe Gitlab::Git::ObjectPool do
end
end
+ describe '.init_from_gitaly' do
+ let(:gitaly_object_pool) { Gitaly::ObjectPool.new(repository: repository) }
+ let(:repository) do
+ Gitaly::Repository.new(
+ storage_name: 'default',
+ relative_path: '@pools/ef/2d/ef2d127d',
+ gl_project_path: ''
+ )
+ end
+
+ it 'returns an object pool object' do
+ object_pool = described_class.init_from_gitaly(gitaly_object_pool, source_repository)
+
+ expect(object_pool).to be_kind_of(described_class)
+ expect(object_pool).to have_attributes(
+ storage: repository.storage_name,
+ relative_path: repository.relative_path,
+ source_repository: source_repository,
+ gl_project_path: repository.gl_project_path
+ )
+ end
+ end
+
describe '#create' do
before do
subject.create # rubocop:disable Rails/SaveBang
diff --git a/spec/lib/gitlab/git/repository_spec.rb b/spec/lib/gitlab/git/repository_spec.rb
index 9ce8a674146..e27b97ea0e6 100644
--- a/spec/lib/gitlab/git/repository_spec.rb
+++ b/spec/lib/gitlab/git/repository_spec.rb
@@ -309,6 +309,32 @@ RSpec.describe Gitlab::Git::Repository, feature_category: :source_code_managemen
end
end
+ describe '#recent_objects_size' do
+ subject(:recent_objects_size) { repository.recent_objects_size }
+
+ it { is_expected.to be_a(Float) }
+
+ it 'uses repository_info for size' do
+ expect(repository.gitaly_repository_client).to receive(:repository_info).and_call_original
+
+ recent_objects_size
+ end
+
+ it 'returns the recent objects size' do
+ objects_response = Gitaly::RepositoryInfoResponse::ObjectsInfo.new(recent_size: 5.megabytes)
+
+ allow(repository.gitaly_repository_client).to receive(:repository_info).and_return(
+ Gitaly::RepositoryInfoResponse.new(objects: objects_response)
+ )
+
+ expect(recent_objects_size).to eq 5.0
+ end
+
+ it_behaves_like 'wrapping gRPC errors', Gitaly::RepositoryInfoResponse::ObjectsInfo, :recent_size do
+ subject { recent_objects_size }
+ end
+ end
+
describe '#to_s' do
subject { repository.to_s }
@@ -1675,9 +1701,13 @@ RSpec.describe Gitlab::Git::Repository, feature_category: :source_code_managemen
end
describe '#find_changed_paths' do
- let(:commit_1) { TestEnv::BRANCH_SHA['with-executables'] }
- let(:commit_2) { TestEnv::BRANCH_SHA['master'] }
- let(:commit_3) { '6f6d7e7ed97bb5f0054f2b1df789b39ca89b6ff9' }
+ let_it_be(:commit_1) { repository.commit(TestEnv::BRANCH_SHA['with-executables']) }
+ let_it_be(:commit_2) { repository.commit(TestEnv::BRANCH_SHA['master']) }
+ let_it_be(:commit_3) { repository.commit('6f6d7e7ed97bb5f0054f2b1df789b39ca89b6ff9') }
+
+ let_it_be(:initial_commit) { repository.commit('1a0b36b3cdad1d2ee32457c102a8c0b7056fa863') }
+ let_it_be(:diff_tree) { Gitlab::Git::DiffTree.from_commit(initial_commit) }
+
let(:commit_1_files) do
[Gitlab::Git::ChangedPath.new(status: :ADDED, path: "files/executables/ls")]
end
@@ -1693,18 +1723,26 @@ RSpec.describe Gitlab::Git::Repository, feature_category: :source_code_managemen
]
end
+ let(:diff_tree_files) do
+ [
+ Gitlab::Git::ChangedPath.new(status: :ADDED, path: ".gitignore"),
+ Gitlab::Git::ChangedPath.new(status: :ADDED, path: "LICENSE"),
+ Gitlab::Git::ChangedPath.new(status: :ADDED, path: "README.md")
+ ]
+ end
+
it 'returns a list of paths' do
- collection = repository.find_changed_paths([commit_1, commit_2, commit_3])
+ collection = repository.find_changed_paths([commit_1, commit_2, commit_3, diff_tree])
expect(collection).to be_a(Enumerable)
- expect(collection.as_json).to eq((commit_1_files + commit_2_files + commit_3_files).as_json)
+ expect(collection.as_json).to eq((commit_1_files + commit_2_files + commit_3_files + diff_tree_files).as_json)
end
- it 'returns no paths when SHAs are invalid' do
+ it 'returns only paths with valid SHAs' do
collection = repository.find_changed_paths(['invalid', commit_1])
expect(collection).to be_a(Enumerable)
- expect(collection.to_a).to be_empty
+ expect(collection.as_json).to eq(commit_1_files.as_json)
end
it 'returns a list of paths even when containing a blank ref' do
@@ -2535,6 +2573,12 @@ RSpec.describe Gitlab::Git::Repository, feature_category: :source_code_managemen
end
end
+ describe '#get_patch_id' do
+ it_behaves_like 'wrapping gRPC errors', Gitlab::GitalyClient::CommitService, :get_patch_id do
+ subject { repository.get_patch_id('HEAD~', 'HEAD') }
+ end
+ end
+
def create_remote_branch(remote_name, branch_name, source_branch_name)
source_branch = repository.find_branch(source_branch_name)
repository.write_ref("refs/remotes/#{remote_name}/#{branch_name}", source_branch.dereferenced_target.sha)
@@ -2723,4 +2767,39 @@ RSpec.describe Gitlab::Git::Repository, feature_category: :source_code_managemen
expect(repository.check_objects_exist(single_sha)).to eq({ single_sha => true })
end
end
+
+ describe '#list_all_blobs' do
+ subject { repository.list_all_blobs(expected_params) }
+
+ let(:expected_params) { { bytes_limit: 0, dynamic_timeout: nil, ignore_alternate_object_directories: true } }
+
+ it 'calls delegates to BlobService' do
+ expect(repository.gitaly_blob_client).to receive(:list_all_blobs).with(expected_params)
+ subject
+ end
+ end
+
+ describe '#object_pool' do
+ subject { repository.object_pool }
+
+ context 'without object pool' do
+ it { is_expected.to be_nil }
+ end
+
+ context 'when pool repository exists' do
+ let!(:pool) { create(:pool_repository, :ready, source_project: project) }
+
+ it { is_expected.to be_nil }
+
+ context 'when repository is linked to the pool repository' do
+ before do
+ pool.link_repository(pool.source_project.repository)
+ end
+
+ it 'returns a object pool for the repository' do
+ is_expected.to be_kind_of(Gitaly::ObjectPool)
+ end
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/git/rugged_impl/use_rugged_spec.rb b/spec/lib/gitlab/git/rugged_impl/use_rugged_spec.rb
index c5b44b260c6..d320b9c4091 100644
--- a/spec/lib/gitlab/git/rugged_impl/use_rugged_spec.rb
+++ b/spec/lib/gitlab/git/rugged_impl/use_rugged_spec.rb
@@ -10,7 +10,7 @@ RSpec.describe Gitlab::Git::RuggedImpl::UseRugged, feature_category: :gitaly do
let(:feature_flag_name) { wrapper.rugged_feature_keys.first }
let(:temp_gitaly_metadata_file) { create_temporary_gitaly_metadata_file }
- before(:all) do
+ before_all do
create_gitaly_metadata_file
end
diff --git a/spec/lib/gitlab/git/tree_spec.rb b/spec/lib/gitlab/git/tree_spec.rb
index 4a20e0b1156..84ab8376fe1 100644
--- a/spec/lib/gitlab/git/tree_spec.rb
+++ b/spec/lib/gitlab/git/tree_spec.rb
@@ -9,13 +9,14 @@ RSpec.describe Gitlab::Git::Tree do
let(:repository) { project.repository.raw }
shared_examples 'repo' do
- subject(:tree) { Gitlab::Git::Tree.where(repository, sha, path, recursive, skip_flat_paths, pagination_params) }
+ subject(:tree) { Gitlab::Git::Tree.where(repository, sha, path, recursive, skip_flat_paths, rescue_not_found, pagination_params) }
let(:sha) { SeedRepo::Commit::ID }
let(:path) { nil }
let(:recursive) { false }
let(:pagination_params) { nil }
let(:skip_flat_paths) { false }
+ let(:rescue_not_found) { true }
let(:entries) { tree.first }
let(:cursor) { tree.second }
@@ -30,8 +31,14 @@ RSpec.describe Gitlab::Git::Tree do
context 'with an invalid ref' do
let(:sha) { 'foobar-does-not-exist' }
- it { expect(entries).to eq([]) }
- it { expect(cursor).to be_nil }
+ context 'when handle_structured_gitaly_errors feature is disabled' do
+ before do
+ stub_feature_flags(handle_structured_gitaly_errors: false)
+ end
+
+ it { expect(entries).to eq([]) }
+ it { expect(cursor).to be_nil }
+ end
end
context 'when path is provided' do
@@ -162,11 +169,23 @@ RSpec.describe Gitlab::Git::Tree do
end
context 'and invalid reference is used' do
- it 'returns no entries and nil cursor' do
+ before do
allow(repository.gitaly_commit_client).to receive(:tree_entries).and_raise(Gitlab::Git::Index::IndexError)
+ end
+
+ context 'when rescue_not_found is set to false' do
+ let(:rescue_not_found) { false }
- expect(entries.count).to eq(0)
- expect(cursor).to be_nil
+ it 'raises an IndexError error' do
+ expect { entries }.to raise_error(Gitlab::Git::Index::IndexError)
+ end
+ end
+
+ context 'when rescue_not_found is set to true' do
+ it 'returns no entries and nil cursor' do
+ expect(entries.count).to eq(0)
+ expect(cursor).to be_nil
+ end
end
end
end
@@ -196,7 +215,7 @@ RSpec.describe Gitlab::Git::Tree do
let(:entries_count) { entries.count }
it 'returns all entries without a cursor' do
- result, cursor = Gitlab::Git::Tree.where(repository, sha, path, recursive, skip_flat_paths, { limit: entries_count, page_token: nil })
+ result, cursor = Gitlab::Git::Tree.where(repository, sha, path, recursive, skip_flat_paths, rescue_not_found, { limit: entries_count, page_token: nil })
expect(cursor).to be_nil
expect(result.entries.count).to eq(entries_count)
@@ -225,7 +244,7 @@ RSpec.describe Gitlab::Git::Tree do
let(:entries_count) { entries.count }
it 'returns all entries' do
- result, cursor = Gitlab::Git::Tree.where(repository, sha, path, recursive, skip_flat_paths, { limit: -1, page_token: nil })
+ result, cursor = Gitlab::Git::Tree.where(repository, sha, path, recursive, skip_flat_paths, rescue_not_found, { limit: -1, page_token: nil })
expect(result.count).to eq(entries_count)
expect(cursor).to be_nil
@@ -236,7 +255,7 @@ RSpec.describe Gitlab::Git::Tree do
let(:token) { entries.second.id }
it 'returns all entries after token' do
- result, cursor = Gitlab::Git::Tree.where(repository, sha, path, recursive, skip_flat_paths, { limit: -1, page_token: token })
+ result, cursor = Gitlab::Git::Tree.where(repository, sha, path, recursive, skip_flat_paths, rescue_not_found, { limit: -1, page_token: token })
expect(result.count).to eq(entries.count - 2)
expect(cursor).to be_nil
@@ -268,7 +287,7 @@ RSpec.describe Gitlab::Git::Tree do
expected_entries = entries
loop do
- result, cursor = Gitlab::Git::Tree.where(repository, sha, path, recursive, skip_flat_paths, { limit: 5, page_token: token })
+ result, cursor = Gitlab::Git::Tree.where(repository, sha, path, recursive, skip_flat_paths, rescue_not_found, { limit: 5, page_token: token })
collected_entries += result.entries
token = cursor&.next_cursor
diff --git a/spec/lib/gitlab/git_access_snippet_spec.rb b/spec/lib/gitlab/git_access_snippet_spec.rb
index becf97bb24e..9ba021e838e 100644
--- a/spec/lib/gitlab/git_access_snippet_spec.rb
+++ b/spec/lib/gitlab/git_access_snippet_spec.rb
@@ -346,7 +346,7 @@ RSpec.describe Gitlab::GitAccessSnippet do
expect(snippet.repository_size_checker).to receive(:above_size_limit?).and_return(false)
expect(snippet.repository_size_checker)
.to receive(:changes_will_exceed_size_limit?)
- .with(change_size)
+ .with(change_size, nil)
.and_return(false)
expect { push_access_check }.not_to raise_error
@@ -360,7 +360,7 @@ RSpec.describe Gitlab::GitAccessSnippet do
expect(snippet.repository_size_checker).to receive(:above_size_limit?).and_return(false)
expect(snippet.repository_size_checker)
.to receive(:changes_will_exceed_size_limit?)
- .with(change_size)
+ .with(change_size, nil)
.and_return(true)
expect do
diff --git a/spec/lib/gitlab/gitaly_client/blob_service_spec.rb b/spec/lib/gitlab/gitaly_client/blob_service_spec.rb
index d02b4492216..ee76811fea5 100644
--- a/spec/lib/gitlab/gitaly_client/blob_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/blob_service_spec.rb
@@ -209,4 +209,23 @@ RSpec.describe Gitlab::GitalyClient::BlobService do
end
end
end
+
+ describe '#list_all_blobs' do
+ subject { client.list_all_blobs(**expected_params) }
+
+ let(:expected_params) { { limit: 0, bytes_limit: 0 } }
+
+ before do
+ ::Gitlab::GitalyClient.clear_stubs!
+ end
+
+ it 'sends a list all blobs message' do
+ expect_next_instance_of(Gitaly::BlobService::Stub) do |service|
+ expect(service).to receive(:list_all_blobs)
+ .with(gitaly_request_with_params(expected_params), kind_of(Hash))
+ end
+
+ subject
+ end
+ end
end
diff --git a/spec/lib/gitlab/gitaly_client/commit_service_spec.rb b/spec/lib/gitlab/gitaly_client/commit_service_spec.rb
index fd66efe12c8..2ee9d85c723 100644
--- a/spec/lib/gitlab/gitaly_client/commit_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/commit_service_spec.rb
@@ -192,7 +192,9 @@ RSpec.describe Gitlab::GitalyClient::CommitService, feature_category: :gitaly do
Gitaly::FindChangedPathsRequest.new(repository: repository_message, requests: requests, merge_commit_diff_mode: merge_commit_diff_mode)
end
- subject { described_class.new(repository).find_changed_paths(commits, merge_commit_diff_mode: merge_commit_diff_mode).as_json }
+ let(:treeish_objects) { repository.commits_by(oids: commits) }
+
+ subject { described_class.new(repository).find_changed_paths(treeish_objects, merge_commit_diff_mode: merge_commit_diff_mode).as_json }
before do
allow(Gitaly::FindChangedPathsRequest).to receive(:new).and_call_original
@@ -334,6 +336,40 @@ RSpec.describe Gitlab::GitalyClient::CommitService, feature_category: :gitaly do
include_examples 'uses requests format'
end
end
+
+ context 'when all requested objects are invalid' do
+ it 'does not send RPC request' do
+ expect_any_instance_of(Gitaly::DiffService::Stub).not_to receive(:find_changed_paths)
+
+ returned_value = described_class.new(repository).find_changed_paths(%w[wrong values])
+
+ expect(returned_value).to eq([])
+ end
+ end
+
+ context 'when commit has an empty SHA' do
+ let(:empty_commit) { build(:commit, project: project, sha: '0000000000000000000000000000000000000000') }
+
+ it 'does not send RPC request' do
+ expect_any_instance_of(Gitaly::DiffService::Stub).not_to receive(:find_changed_paths)
+
+ returned_value = described_class.new(repository).find_changed_paths([empty_commit])
+
+ expect(returned_value).to eq([])
+ end
+ end
+
+ context 'when commit sha is not set' do
+ let(:empty_commit) { build(:commit, project: project, sha: nil) }
+
+ it 'does not send RPC request' do
+ expect_any_instance_of(Gitaly::DiffService::Stub).not_to receive(:find_changed_paths)
+
+ returned_value = described_class.new(repository).find_changed_paths([empty_commit])
+
+ expect(returned_value).to eq([])
+ end
+ end
end
describe '#tree_entries' do
@@ -1072,4 +1108,22 @@ RSpec.describe Gitlab::GitalyClient::CommitService, feature_category: :gitaly do
expect(signatures[large_signed_text][:signed_text].size).to eq(4971878)
end
end
+
+ describe '#get_patch_id' do
+ it 'returns patch_id of given revisions' do
+ expect(client.get_patch_id('HEAD~', 'HEAD')).to eq('45435e5d7b339dd76d939508c7687701d0c17fff')
+ end
+
+ context 'when one of the param is invalid' do
+ it 'raises an GRPC::InvalidArgument error' do
+ expect { client.get_patch_id('HEAD', nil) }.to raise_error(GRPC::InvalidArgument)
+ end
+ end
+
+ context 'when two revisions are the same' do
+ it 'raises an GRPC::FailedPrecondition error' do
+ expect { client.get_patch_id('HEAD', 'HEAD') }.to raise_error(GRPC::FailedPrecondition)
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/gitaly_client/conflicts_service_spec.rb b/spec/lib/gitlab/gitaly_client/conflicts_service_spec.rb
index 89a41ae71f3..bdc16f16e66 100644
--- a/spec/lib/gitlab/gitaly_client/conflicts_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/conflicts_service_spec.rb
@@ -14,6 +14,32 @@ RSpec.describe Gitlab::GitalyClient::ConflictsService do
described_class.new(target_repository, our_commit_oid, their_commit_oid)
end
+ describe '#conflicts' do
+ subject(:conflicts) { client.conflicts? }
+
+ context "with the `skip_conflict_files_in_gitaly` feature flag on" do
+ it 'calls list_conflict_files with `skip_content: true`' do
+ expect_any_instance_of(described_class).to receive(:list_conflict_files)
+ .with(skip_content: true).and_return(["let's pretend i'm a conflicted file"])
+
+ conflicts
+ end
+ end
+
+ context "with the `skip_conflict_files_in_gitaly` feature flag off" do
+ before do
+ stub_feature_flags(skip_conflict_files_in_gitaly: false)
+ end
+
+ it 'calls list_conflict_files with no parameters' do
+ expect_any_instance_of(described_class).to receive(:list_conflict_files)
+ .with(skip_content: false).and_return(["let's pretend i'm a conflicted file"])
+
+ conflicts
+ end
+ end
+ end
+
describe '#list_conflict_files' do
let(:allow_tree_conflicts) { false }
let(:request) do
diff --git a/spec/lib/gitlab/gitaly_client/object_pool_service_spec.rb b/spec/lib/gitlab/gitaly_client/object_pool_service_spec.rb
index baf7076c718..ae2bb5af2b1 100644
--- a/spec/lib/gitlab/gitaly_client/object_pool_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/object_pool_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::GitalyClient::ObjectPoolService do
+RSpec.describe Gitlab::GitalyClient::ObjectPoolService, feature_category: :source_code_management do
let(:pool_repository) { create(:pool_repository) }
let(:project) { pool_repository.source_project }
let(:raw_repository) { project.repository.raw }
diff --git a/spec/lib/gitlab/gitaly_client/operation_service_spec.rb b/spec/lib/gitlab/gitaly_client/operation_service_spec.rb
index 4a3607ed6db..9055b284119 100644
--- a/spec/lib/gitlab/gitaly_client/operation_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/operation_service_spec.rb
@@ -183,6 +183,7 @@ RSpec.describe Gitlab::GitalyClient::OperationService, feature_category: :source
expect(request.to_h).to eq(
payload.merge({
allow_conflicts: false,
+ expected_old_oid: "",
repository: repository.gitaly_repository.to_h,
message: message.dup.force_encoding(Encoding::ASCII_8BIT),
user: Gitlab::Git::User.from_gitlab(user).to_gitaly.to_h,
@@ -730,6 +731,39 @@ RSpec.describe Gitlab::GitalyClient::OperationService, feature_category: :source
end
end
+ describe '#user_rebase_to_ref' do
+ let(:first_parent_ref) { 'refs/heads/my-branch' }
+ let(:source_sha) { 'cfe32cf61b73a0d5e9f13e774abde7ff789b1660' }
+ let(:target_ref) { 'refs/merge-requests/x/merge' }
+ let(:response) { Gitaly::UserRebaseToRefResponse.new(commit_id: 'new-commit-id') }
+
+ let(:payload) do
+ { source_sha: source_sha, target_ref: target_ref, first_parent_ref: first_parent_ref }
+ end
+
+ it 'sends a user_rebase_to_ref message' do
+ freeze_time do
+ expect_any_instance_of(Gitaly::OperationService::Stub).to receive(:user_rebase_to_ref) do |_, request, options|
+ expect(options).to be_kind_of(Hash)
+ expect(request.to_h).to(
+ eq(
+ payload.merge(
+ {
+ expected_old_oid: "",
+ repository: repository.gitaly_repository.to_h,
+ user: Gitlab::Git::User.from_gitlab(user).to_gitaly.to_h,
+ timestamp: { nanos: 0, seconds: Time.current.to_i }
+ }
+ )
+ )
+ )
+ end.and_return(response)
+
+ client.user_rebase_to_ref(user, **payload)
+ end
+ end
+ end
+
describe '#user_squash' do
let(:start_sha) { 'b83d6e391c22777fca1ed3012fce84f633d7fed0' }
let(:end_sha) { '54cec5282aa9f21856362fe321c800c236a61615' }
diff --git a/spec/lib/gitlab/gitaly_client/repository_service_spec.rb b/spec/lib/gitlab/gitaly_client/repository_service_spec.rb
index 08457e20ec3..d8ae7d70bb2 100644
--- a/spec/lib/gitlab/gitaly_client/repository_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/repository_service_spec.rb
@@ -452,4 +452,14 @@ RSpec.describe Gitlab::GitalyClient::RepositoryService, feature_category: :gital
client.find_license
end
end
+
+ describe '#object_pool' do
+ it 'sends a get_object_pool_request message' do
+ expect_any_instance_of(Gitaly::ObjectPoolService::Stub)
+ .to receive(:get_object_pool)
+ .with(gitaly_request_with_path(storage_name, relative_path), kind_of(Hash))
+
+ client.object_pool
+ end
+ end
end
diff --git a/spec/lib/gitlab/github_import_spec.rb b/spec/lib/gitlab/github_import_spec.rb
index c4ed4b09f04..898bc40ec1f 100644
--- a/spec/lib/gitlab/github_import_spec.rb
+++ b/spec/lib/gitlab/github_import_spec.rb
@@ -61,7 +61,7 @@ RSpec.describe Gitlab::GithubImport, feature_category: :importers do
expect(described_class::ClientPool)
.to receive(:new)
- .with(token_pool: %w[foo bar], host: nil, parallel: true, per_page: 100)
+ .with(token_pool: %w[foo bar 123], host: nil, parallel: true, per_page: 100)
described_class.new_client_for(project)
end
diff --git a/spec/lib/gitlab/graphql/pagination/connections_spec.rb b/spec/lib/gitlab/graphql/pagination/connections_spec.rb
index 97389b6250e..0c4ca5570f8 100644
--- a/spec/lib/gitlab/graphql/pagination/connections_spec.rb
+++ b/spec/lib/gitlab/graphql/pagination/connections_spec.rb
@@ -6,7 +6,7 @@ require 'spec_helper'
RSpec.describe ::Gitlab::Graphql::Pagination::Connections do
include GraphqlHelpers
- before(:all) do
+ before_all do
ActiveRecord::Schema.define do
create_table :_test_testing_pagination_nodes, force: true do |t|
t.integer :value, null: false
diff --git a/spec/lib/gitlab/group_search_results_spec.rb b/spec/lib/gitlab/group_search_results_spec.rb
index 1206a1c9131..071b303d777 100644
--- a/spec/lib/gitlab/group_search_results_spec.rb
+++ b/spec/lib/gitlab/group_search_results_spec.rb
@@ -32,8 +32,12 @@ RSpec.describe Gitlab::GroupSearchResults, feature_category: :global_search do
end
describe 'merge_requests search' do
+ let_it_be(:unarchived_project) { create(:project, :public, group: group) }
+ let_it_be(:archived_project) { create(:project, :public, :archived, group: group) }
let(:opened_result) { create(:merge_request, :opened, source_project: project, title: 'foo opened') }
let(:closed_result) { create(:merge_request, :closed, source_project: project, title: 'foo closed') }
+ let_it_be(:unarchived_result) { create(:merge_request, source_project: unarchived_project, title: 'foo') }
+ let_it_be(:archived_result) { create(:merge_request, source_project: archived_project, title: 'foo') }
let(:query) { 'foo' }
let(:scope) { 'merge_requests' }
@@ -44,6 +48,7 @@ RSpec.describe Gitlab::GroupSearchResults, feature_category: :global_search do
end
include_examples 'search results filtered by state'
+ include_examples 'search results filtered by archived', 'search_merge_requests_hide_archived_projects'
end
describe '#projects' do
@@ -52,10 +57,10 @@ RSpec.describe Gitlab::GroupSearchResults, feature_category: :global_search do
describe 'filtering' do
let_it_be(:group) { create(:group) }
- let_it_be(:unarchived_project) { create(:project, :public, group: group, name: 'Test1') }
- let_it_be(:archived_project) { create(:project, :archived, :public, group: group, name: 'Test2') }
+ let_it_be(:unarchived_result) { create(:project, :public, group: group, name: 'Test1') }
+ let_it_be(:archived_result) { create(:project, :archived, :public, group: group, name: 'Test2') }
- it_behaves_like 'search results filtered by archived'
+ it_behaves_like 'search results filtered by archived', 'search_projects_hide_archived'
end
end
diff --git a/spec/lib/gitlab/hook_data/issue_builder_spec.rb b/spec/lib/gitlab/hook_data/issue_builder_spec.rb
index b9490306410..9f7aaa21f5b 100644
--- a/spec/lib/gitlab/hook_data/issue_builder_spec.rb
+++ b/spec/lib/gitlab/hook_data/issue_builder_spec.rb
@@ -2,9 +2,13 @@
require 'spec_helper'
-RSpec.describe Gitlab::HookData::IssueBuilder do
- let_it_be(:label) { create(:label) }
- let_it_be(:issue) { create(:labeled_issue, labels: [label], project: label.project) }
+RSpec.describe Gitlab::HookData::IssueBuilder, feature_category: :webhooks do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, group: group) }
+ let_it_be(:label) { create(:label, project: project) }
+ let_it_be(:issue) { create(:labeled_issue, labels: [label], project: project) }
+ let_it_be(:contact) { create(:contact, group: project.group) }
+ let_it_be(:issue_contact) { create(:issue_customer_relations_contact, issue: issue, contact: contact) }
let(:builder) { described_class.new(issue) }
@@ -50,6 +54,7 @@ RSpec.describe Gitlab::HookData::IssueBuilder do
expect(data).to include(:state)
expect(data).to include(:severity)
expect(data).to include('labels' => [label.hook_attrs])
+ expect(data).to include('customer_relations_contacts' => [contact.reload.hook_attrs])
end
context 'when the issue has an image in the description' do
diff --git a/spec/lib/gitlab/hook_data/merge_request_builder_spec.rb b/spec/lib/gitlab/hook_data/merge_request_builder_spec.rb
index f9a6c25b786..1818693974e 100644
--- a/spec/lib/gitlab/hook_data/merge_request_builder_spec.rb
+++ b/spec/lib/gitlab/hook_data/merge_request_builder_spec.rb
@@ -39,6 +39,7 @@ RSpec.describe Gitlab::HookData::MergeRequestBuilder do
title
updated_at
updated_by_id
+ draft
].freeze
expect(safe_attribute_keys).to match_array(expected_safe_attribute_keys)
@@ -66,6 +67,7 @@ RSpec.describe Gitlab::HookData::MergeRequestBuilder do
url
last_commit
work_in_progress
+ draft
total_time_spent
time_change
human_total_time_spent
diff --git a/spec/lib/gitlab/http_spec.rb b/spec/lib/gitlab/http_spec.rb
index 133cd3b2f49..93d48379414 100644
--- a/spec/lib/gitlab/http_spec.rb
+++ b/spec/lib/gitlab/http_spec.rb
@@ -28,7 +28,7 @@ RSpec.describe Gitlab::HTTP do
end
context 'when reading the response is too slow' do
- before(:all) do
+ before_all do
# Override Net::HTTP to add a delay between sending each response chunk
mocked_http = Class.new(Net::HTTP) do
def request(*)
diff --git a/spec/lib/gitlab/import_export/all_models.yml b/spec/lib/gitlab/import_export/all_models.yml
index 981802ad09d..5bbb95b3ea5 100644
--- a/spec/lib/gitlab/import_export/all_models.yml
+++ b/spec/lib/gitlab/import_export/all_models.yml
@@ -162,6 +162,7 @@ releases:
- milestone_releases
- milestones
- evidences
+- catalog_resource_version
links:
- release
project_members:
@@ -325,7 +326,7 @@ ci_pipelines:
- security_findings
- daily_build_group_report_results
- latest_builds
-- latest_successful_builds
+- latest_successful_jobs
- daily_report_results
- latest_builds_report_results
- messages
@@ -432,6 +433,7 @@ builds:
- dast_scanner_profiles_build
- dast_scanner_profile
- job_annotations
+- job_artifacts_annotations
bridges:
- user
- pipeline
@@ -440,6 +442,7 @@ bridges:
- needs
- resource
- sourced_pipeline
+- deployment
- resource_group
- metadata
- trigger_request
@@ -516,6 +519,8 @@ container_repositories:
- name
project:
- catalog_resource
+- catalog_resource_versions
+- ci_components
- external_status_checks
- base_tags
- project_topics
@@ -817,6 +822,7 @@ project:
- scan_result_policy_reads
- project_state
- security_policy_bots
+- target_branch_rules
award_emoji:
- awardable
- user
@@ -1039,6 +1045,13 @@ iterations_cadence:
- iterations
catalog_resource:
- project
+ - catalog_resource_components
+ - catalog_resource_versions
+catalog_resource_versions:
+ - project
+ - release
+ - catalog_resource
+ - catalog_resource_components
approval_rules:
- users
- groups
diff --git a/spec/lib/gitlab/import_export/command_line_util_spec.rb b/spec/lib/gitlab/import_export/command_line_util_spec.rb
index b2e047f5621..8ed3a60d7fc 100644
--- a/spec/lib/gitlab/import_export/command_line_util_spec.rb
+++ b/spec/lib/gitlab/import_export/command_line_util_spec.rb
@@ -103,7 +103,7 @@ RSpec.describe Gitlab::ImportExport::CommandLineUtil, feature_category: :importe
let(:local) { false }
it 'downloads the file' do
- expect(subject).to receive(:download).with(:url, upload_path, size_limit: nil)
+ expect(subject).to receive(:download).with(:url, upload_path, size_limit: 0)
subject.download_or_copy_upload(uploader, upload_path)
end
diff --git a/spec/lib/gitlab/import_export/decompressed_archive_size_validator_spec.rb b/spec/lib/gitlab/import_export/decompressed_archive_size_validator_spec.rb
index 8c5823edc51..aceea70be92 100644
--- a/spec/lib/gitlab/import_export/decompressed_archive_size_validator_spec.rb
+++ b/spec/lib/gitlab/import_export/decompressed_archive_size_validator_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe Gitlab::ImportExport::DecompressedArchiveSizeValidator, feature_category: :importers do
let_it_be(:filepath) { File.join(Dir.tmpdir, 'decompressed_archive_size_validator_spec.gz') }
- before(:all) do
+ before_all do
create_compressed_file
end
@@ -47,6 +47,25 @@ RSpec.describe Gitlab::ImportExport::DecompressedArchiveSizeValidator, feature_c
end
end
+ context 'when max_decompressed_archive_size is set to 0' do
+ subject { described_class.new(archive_path: filepath) }
+
+ before do
+ stub_application_setting(max_decompressed_archive_size: 0)
+ end
+
+ it 'is valid and does not log error message' do
+ expect(Gitlab::Import::Logger)
+ .not_to receive(:info)
+ .with(
+ import_upload_archive_path: filepath,
+ import_upload_archive_size: File.size(filepath),
+ message: 'Decompressed archive size limit reached'
+ )
+ expect(subject.valid?).to eq(true)
+ end
+ end
+
context 'when exception occurs during decompression' do
shared_examples 'logs raised exception and terminates validator process group' do
let(:std) { double(:std, close: nil, value: nil) }
diff --git a/spec/lib/gitlab/import_export/file_importer_spec.rb b/spec/lib/gitlab/import_export/file_importer_spec.rb
index aff11f7ac30..d449446d7be 100644
--- a/spec/lib/gitlab/import_export/file_importer_spec.rb
+++ b/spec/lib/gitlab/import_export/file_importer_spec.rb
@@ -84,7 +84,7 @@ RSpec.describe Gitlab::ImportExport::FileImporter, feature_category: :importers
.with(
import_export_upload.import_file,
kind_of(String),
- size_limit: ::Import::GitlabProjects::RemoteFileValidator::FILE_SIZE_LIMIT
+ size_limit: Gitlab::CurrentSettings.current_application_settings.max_import_remote_file_size.megabytes
)
described_class.import(importable: project, archive_file: nil, shared: shared)
@@ -104,7 +104,7 @@ RSpec.describe Gitlab::ImportExport::FileImporter, feature_category: :importers
.with(
file_url,
kind_of(String),
- size_limit: ::Import::GitlabProjects::RemoteFileValidator::FILE_SIZE_LIMIT
+ size_limit: Gitlab::CurrentSettings.current_application_settings.max_import_remote_file_size.megabytes
)
described_class.import(importable: project, archive_file: nil, shared: shared)
diff --git a/spec/lib/gitlab/import_export/project/relation_tree_restorer_spec.rb b/spec/lib/gitlab/import_export/project/relation_tree_restorer_spec.rb
index 180a6b6ff0a..0f4f2eb573c 100644
--- a/spec/lib/gitlab/import_export/project/relation_tree_restorer_spec.rb
+++ b/spec/lib/gitlab/import_export/project/relation_tree_restorer_spec.rb
@@ -60,7 +60,7 @@ RSpec.describe Gitlab::ImportExport::Project::RelationTreeRestorer, feature_cate
let(:relation_reader) { Gitlab::ImportExport::Json::NdjsonReader.new(path) }
let_it_be(:group) do
- create(:group, :disabled_and_unoverridable).tap { |g| g.add_maintainer(user) }
+ create(:group, :shared_runners_disabled_and_unoverridable).tap { |g| g.add_maintainer(user) }
end
before do
diff --git a/spec/lib/gitlab/internal_events/event_definitions_spec.rb b/spec/lib/gitlab/internal_events/event_definitions_spec.rb
index f6f79d9d906..924845504ca 100644
--- a/spec/lib/gitlab/internal_events/event_definitions_spec.rb
+++ b/spec/lib/gitlab/internal_events/event_definitions_spec.rb
@@ -2,9 +2,9 @@
require "spec_helper"
-RSpec.describe Gitlab::InternalEvents::EventDefinitions, feature_category: :product_analytics do
+RSpec.describe Gitlab::InternalEvents::EventDefinitions, feature_category: :product_analytics_data_management do
after(:all) do
- described_class.clear_events
+ described_class.instance_variable_set(:@events, nil)
end
context 'when using actual metric definitions' do
diff --git a/spec/lib/gitlab/internal_events_spec.rb b/spec/lib/gitlab/internal_events_spec.rb
index 86215434ba4..c2615e0f22c 100644
--- a/spec/lib/gitlab/internal_events_spec.rb
+++ b/spec/lib/gitlab/internal_events_spec.rb
@@ -2,7 +2,7 @@
require "spec_helper"
-RSpec.describe Gitlab::InternalEvents, :snowplow, feature_category: :product_analytics do
+RSpec.describe Gitlab::InternalEvents, :snowplow, feature_category: :product_analytics_data_management do
include TrackingHelpers
include SnowplowHelpers
diff --git a/spec/lib/gitlab/jwt_authenticatable_spec.rb b/spec/lib/gitlab/jwt_authenticatable_spec.rb
index 98c87ef627a..eea93c4e3fe 100644
--- a/spec/lib/gitlab/jwt_authenticatable_spec.rb
+++ b/spec/lib/gitlab/jwt_authenticatable_spec.rb
@@ -148,9 +148,9 @@ RSpec.describe Gitlab::JwtAuthenticatable, feature_category: :system_access do
it 'returns decoded payload if issuer is correct' do
encoded_message = JWT.encode(payload, test_class.secret, 'HS256')
- payload = test_class.decode_jwt(encoded_message, issuer: 'test_issuer')
+ decoded_payload = test_class.decode_jwt(encoded_message, issuer: 'test_issuer')
- expect(payload[0]).to match a_hash_including('iss' => 'test_issuer')
+ expect(decoded_payload[0]).to match a_hash_including('iss' => 'test_issuer')
end
it 'raises an error when the issuer is incorrect' do
@@ -159,6 +159,38 @@ RSpec.describe Gitlab::JwtAuthenticatable, feature_category: :system_access do
expect { test_class.decode_jwt(encoded_message, issuer: 'test_issuer') }.to raise_error(JWT::DecodeError)
end
+
+ it 'raises an error when the issuer is nil' do
+ payload['iss'] = nil
+ encoded_message = JWT.encode(payload, test_class.secret, 'HS256')
+
+ expect { test_class.decode_jwt(encoded_message, issuer: 'test_issuer') }.to raise_error(JWT::DecodeError)
+ end
+ end
+
+ context 'audience option' do
+ let(:payload) { { 'aud' => 'test_audience' } }
+
+ it 'returns decoded payload if audience is correct' do
+ encoded_message = JWT.encode(payload, test_class.secret, 'HS256')
+ decoded_payload = test_class.decode_jwt(encoded_message, audience: 'test_audience')
+
+ expect(decoded_payload[0]).to match a_hash_including('aud' => 'test_audience')
+ end
+
+ it 'raises an error when the audience is incorrect' do
+ payload['aud'] = 'somebody else'
+ encoded_message = JWT.encode(payload, test_class.secret, 'HS256')
+
+ expect { test_class.decode_jwt(encoded_message, audience: 'test_audience') }.to raise_error(JWT::DecodeError)
+ end
+
+ it 'raises an error when the audience is nil' do
+ payload['aud'] = nil
+ encoded_message = JWT.encode(payload, test_class.secret, 'HS256')
+
+ expect { test_class.decode_jwt(encoded_message, audience: 'test_audience') }.to raise_error(JWT::DecodeError)
+ end
end
context 'iat_after option' do
diff --git a/spec/lib/gitlab/kas_spec.rb b/spec/lib/gitlab/kas_spec.rb
index 34eb48a3221..69d9ca7d4ed 100644
--- a/spec/lib/gitlab/kas_spec.rb
+++ b/spec/lib/gitlab/kas_spec.rb
@@ -10,20 +10,41 @@ RSpec.describe Gitlab::Kas do
end
describe '.verify_api_request' do
- let(:payload) { { 'iss' => described_class::JWT_ISSUER } }
+ let(:payload) { { 'iss' => described_class::JWT_ISSUER, 'aud' => described_class::JWT_AUDIENCE } }
- it 'returns nil if fails to validate the JWT' do
- encoded_token = JWT.encode(payload, 'wrongsecret', 'HS256')
- headers = { described_class::INTERNAL_API_REQUEST_HEADER => encoded_token }
+ context 'returns nil if fails to validate the JWT' do
+ it 'when secret is wrong' do
+ encoded_token = JWT.encode(payload, 'wrong secret', 'HS256')
+ headers = { described_class::INTERNAL_API_REQUEST_HEADER => encoded_token }
+
+ expect(described_class.verify_api_request(headers)).to be_nil
+ end
+
+ it 'when issuer is wrong' do
+ payload['iss'] = 'wrong issuer'
+ encoded_token = JWT.encode(payload, described_class.secret, 'HS256')
+ headers = { described_class::INTERNAL_API_REQUEST_HEADER => encoded_token }
+
+ expect(described_class.verify_api_request(headers)).to be_nil
+ end
- expect(described_class.verify_api_request(headers)).to be_nil
+ it 'when audience is wrong' do
+ payload['aud'] = 'wrong audience'
+ encoded_token = JWT.encode(payload, described_class.secret, 'HS256')
+ headers = { described_class::INTERNAL_API_REQUEST_HEADER => encoded_token }
+
+ expect(described_class.verify_api_request(headers)).to be_nil
+ end
end
it 'returns the decoded JWT' do
encoded_token = JWT.encode(payload, described_class.secret, 'HS256')
headers = { described_class::INTERNAL_API_REQUEST_HEADER => encoded_token }
- expect(described_class.verify_api_request(headers)).to eq([{ "iss" => described_class::JWT_ISSUER }, { "alg" => "HS256" }])
+ expect(described_class.verify_api_request(headers)).to eq([
+ { 'iss' => described_class::JWT_ISSUER, 'aud' => described_class::JWT_AUDIENCE },
+ { 'alg' => 'HS256' }
+ ])
end
end
@@ -111,6 +132,52 @@ RSpec.describe Gitlab::Kas do
end
end
+ describe '.tunnel_ws_url' do
+ before do
+ stub_config(gitlab_kas: { external_url: external_url })
+ end
+
+ let(:external_url) { 'xyz' }
+
+ subject { described_class.tunnel_ws_url }
+
+ context 'with a gitlab_kas.external_k8s_proxy_url setting' do
+ let(:external_k8s_proxy_url) { 'http://abc' }
+
+ before do
+ stub_config(gitlab_kas: { external_k8s_proxy_url: external_k8s_proxy_url })
+ end
+
+ it { is_expected.to eq('ws://abc') }
+ end
+
+ context 'without a gitlab_kas.external_k8s_proxy_url setting' do
+ context 'external_url uses wss://' do
+ let(:external_url) { 'wss://kas.gitlab.example.com' }
+
+ it { is_expected.to eq('wss://kas.gitlab.example.com/k8s-proxy') }
+ end
+
+ context 'external_url uses ws://' do
+ let(:external_url) { 'ws://kas.gitlab.example.com' }
+
+ it { is_expected.to eq('ws://kas.gitlab.example.com/k8s-proxy') }
+ end
+
+ context 'external_url uses grpcs://' do
+ let(:external_url) { 'grpcs://kas.gitlab.example.com' }
+
+ it { is_expected.to eq('wss://kas.gitlab.example.com/k8s-proxy') }
+ end
+
+ context 'external_url uses grpc://' do
+ let(:external_url) { 'grpc://kas.gitlab.example.com' }
+
+ it { is_expected.to eq('ws://kas.gitlab.example.com/k8s-proxy') }
+ end
+ end
+ end
+
describe '.internal_url' do
it 'returns gitlab_kas internal_url config' do
expect(described_class.internal_url).to eq(Gitlab.config.gitlab_kas.internal_url)
diff --git a/spec/lib/gitlab/merge_requests/message_generator_spec.rb b/spec/lib/gitlab/merge_requests/message_generator_spec.rb
index df8804d38d4..b1a8ff26a86 100644
--- a/spec/lib/gitlab/merge_requests/message_generator_spec.rb
+++ b/spec/lib/gitlab/merge_requests/message_generator_spec.rb
@@ -96,6 +96,26 @@ RSpec.describe Gitlab::MergeRequests::MessageGenerator, feature_category: :code_
end
end
+ context 'when project has commit template with source project id' do
+ let(:merge_request) do
+ double(
+ :merge_request,
+ title: 'Fixes',
+ target_project: project,
+ source_project: project,
+ to_reference: '!123',
+ metrics: nil,
+ merge_user: nil
+ )
+ end
+
+ let(message_template_name) { '%{source_project_id}' }
+
+ it 'evaluates only necessary variables' do
+ expect(result_message).to eq project.id.to_s
+ end
+ end
+
context 'when project has commit template with closed issues' do
let(message_template_name) { <<~MSG.rstrip }
Merge branch '%{source_branch}' into '%{target_branch}'
diff --git a/spec/lib/gitlab/metrics/dashboard/defaults_spec.rb b/spec/lib/gitlab/metrics/dashboard/defaults_spec.rb
deleted file mode 100644
index b8556829a59..00000000000
--- a/spec/lib/gitlab/metrics/dashboard/defaults_spec.rb
+++ /dev/null
@@ -1,7 +0,0 @@
-# frozen_string_literal: true
-
-require 'fast_spec_helper'
-
-RSpec.describe Gitlab::Metrics::Dashboard::Defaults do
- it { is_expected.to be_const_defined(:DEFAULT_PANEL_TYPE) }
-end
diff --git a/spec/lib/gitlab/metrics/dashboard/finder_spec.rb b/spec/lib/gitlab/metrics/dashboard/finder_spec.rb
deleted file mode 100644
index d3cb9760052..00000000000
--- a/spec/lib/gitlab/metrics/dashboard/finder_spec.rb
+++ /dev/null
@@ -1,178 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Metrics::Dashboard::Finder, :use_clean_rails_memory_store_caching do
- include MetricsDashboardHelpers
-
- let_it_be(:project) { create(:project) }
- let_it_be(:user) { create(:user) }
- let_it_be(:environment) { create(:environment, project: project) }
-
- before do
- project.add_maintainer(user)
- end
-
- describe '.find' do
- let(:dashboard_path) { '.gitlab/dashboards/test.yml' }
- let(:service_call) { described_class.find(project, user, environment: environment, dashboard_path: dashboard_path) }
-
- it_behaves_like 'misconfigured dashboard service response', :not_found
-
- context 'when the dashboard exists' do
- let(:project) { project_with_dashboard(dashboard_path) }
-
- it_behaves_like 'valid dashboard service response'
- end
-
- context 'when the dashboard is configured incorrectly' do
- let(:project) { project_with_dashboard(dashboard_path, {}) }
-
- it_behaves_like 'misconfigured dashboard service response', :unprocessable_entity
- end
-
- context 'when the dashboard contains a metric without a query' do
- let(:dashboard) { { 'panel_groups' => [{ 'panels' => [{ 'metrics' => [{ 'id' => 'mock' }] }] }] } }
- let(:project) { project_with_dashboard(dashboard_path, dashboard.to_yaml) }
-
- it_behaves_like 'misconfigured dashboard service response', :unprocessable_entity
- end
-
- context 'when the system dashboard is specified' do
- let(:dashboard_path) { system_dashboard_path }
-
- it_behaves_like 'valid dashboard service response'
- end
-
- context 'when no dashboard is specified' do
- let(:service_call) { described_class.find(project, user, environment: environment) }
-
- it_behaves_like 'valid dashboard service response'
- end
-
- context 'when the dashboard is expected to be embedded' do
- let(:service_call) { described_class.find(project, user, **params) }
- let(:params) { { environment: environment, embedded: true } }
-
- it_behaves_like 'valid embedded dashboard service response'
-
- context 'when params are incomplete' do
- let(:params) { { environment: environment, embedded: true, dashboard_path: system_dashboard_path } }
-
- it_behaves_like 'valid embedded dashboard service response'
- end
-
- context 'when the panel is specified' do
- context 'as a custom metric' do
- let(:params) do
- {
- environment: environment,
- embedded: true,
- dashboard_path: system_dashboard_path,
- group: business_metric_title,
- title: 'title',
- y_label: 'y_label'
- }
- end
-
- it_behaves_like 'misconfigured dashboard service response', :not_found
-
- context 'when the metric exists' do
- before do
- create(:prometheus_metric, project: project)
- end
-
- it_behaves_like 'valid embedded dashboard service response'
- end
- end
-
- context 'as a project-defined panel' do
- let(:dashboard_path) { '.gitlab/dashboard/test.yml' }
- let(:params) do
- {
- environment: environment,
- embedded: true,
- dashboard_path: dashboard_path,
- group: 'Group A',
- title: 'Super Chart A1',
- y_label: 'y_label'
- }
- end
-
- it_behaves_like 'misconfigured dashboard service response', :not_found
-
- context 'when the metric exists' do
- let(:project) { project_with_dashboard(dashboard_path) }
-
- it_behaves_like 'valid embedded dashboard service response'
- end
- end
- end
- end
- end
-
- describe '.find_raw' do
- let(:dashboard) { load_dashboard_yaml(File.read(Rails.root.join('config', 'prometheus', 'common_metrics.yml'))) }
- let(:params) { {} }
-
- subject { described_class.find_raw(project, **params) }
-
- it { is_expected.to eq dashboard }
-
- context 'when the system dashboard is specified' do
- let(:params) { { dashboard_path: system_dashboard_path } }
-
- it { is_expected.to eq dashboard }
- end
-
- context 'when an existing project dashboard is specified' do
- let(:dashboard) { load_sample_dashboard }
- let(:params) { { dashboard_path: '.gitlab/dashboards/test.yml' } }
- let(:project) { project_with_dashboard(params[:dashboard_path]) }
-
- it { is_expected.to eq dashboard }
- end
- end
-
- describe '.find_all_paths' do
- let(:all_dashboard_paths) { described_class.find_all_paths(project) }
- let(:system_dashboard) { { path: system_dashboard_path, display_name: 'Overview', default: true, system_dashboard: true, out_of_the_box_dashboard: true } }
- let(:k8s_pod_health_dashboard) { { path: pod_dashboard_path, display_name: 'K8s pod health', default: false, system_dashboard: false, out_of_the_box_dashboard: true } }
-
- it 'includes OOTB dashboards by default' do
- expect(all_dashboard_paths).to eq([k8s_pod_health_dashboard, system_dashboard])
- end
-
- context 'when the project contains dashboards' do
- let(:dashboard_content) { fixture_file('lib/gitlab/metrics/dashboard/sample_dashboard.yml') }
- let(:project) { project_with_dashboards(dashboards) }
-
- let(:dashboards) do
- {
- '.gitlab/dashboards/metrics.yml' => dashboard_content,
- '.gitlab/dashboards/better_metrics.yml' => dashboard_content
- }
- end
-
- it 'includes OOTB and project dashboards' do
- project_dashboard1 = {
- path: '.gitlab/dashboards/metrics.yml',
- display_name: 'metrics.yml',
- default: false,
- system_dashboard: false,
- out_of_the_box_dashboard: false
- }
-
- project_dashboard2 = {
- path: '.gitlab/dashboards/better_metrics.yml',
- display_name: 'better_metrics.yml',
- default: false,
- system_dashboard: false,
- out_of_the_box_dashboard: false
- }
-
- expect(all_dashboard_paths).to eq([project_dashboard2, k8s_pod_health_dashboard, project_dashboard1, system_dashboard])
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/metrics/dashboard/importer_spec.rb b/spec/lib/gitlab/metrics/dashboard/importer_spec.rb
deleted file mode 100644
index 8b705395a2c..00000000000
--- a/spec/lib/gitlab/metrics/dashboard/importer_spec.rb
+++ /dev/null
@@ -1,55 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Metrics::Dashboard::Importer do
- include MetricsDashboardHelpers
-
- let_it_be(:dashboard_path) { '.gitlab/dashboards/sample_dashboard.yml' }
- let_it_be(:project) { create(:project) }
-
- before do
- allow(subject).to receive(:dashboard_hash).and_return(dashboard_hash)
- end
-
- subject { described_class.new(dashboard_path, project) }
-
- describe '.execute' do
- context 'valid dashboard hash' do
- let(:dashboard_hash) { load_sample_dashboard }
-
- it 'imports metrics to database' do
- expect { subject.execute }
- .to change { PrometheusMetric.count }.from(0).to(3)
- end
- end
-
- context 'invalid dashboard hash' do
- let(:dashboard_hash) { {} }
-
- it 'returns false' do
- expect(subject.execute).to be(false)
- end
- end
- end
-
- describe '.execute!' do
- context 'valid dashboard hash' do
- let(:dashboard_hash) { load_sample_dashboard }
-
- it 'imports metrics to database' do
- expect { subject.execute }
- .to change { PrometheusMetric.count }.from(0).to(3)
- end
- end
-
- context 'invalid dashboard hash' do
- let(:dashboard_hash) { {} }
-
- it 'raises error' do
- expect { subject.execute! }.to raise_error(Gitlab::Metrics::Dashboard::Validator::Errors::SchemaValidationError,
- 'root is missing required keys: dashboard, panel_groups')
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/metrics/dashboard/importers/prometheus_metrics_spec.rb b/spec/lib/gitlab/metrics/dashboard/importers/prometheus_metrics_spec.rb
deleted file mode 100644
index bc6cd383758..00000000000
--- a/spec/lib/gitlab/metrics/dashboard/importers/prometheus_metrics_spec.rb
+++ /dev/null
@@ -1,97 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Metrics::Dashboard::Importers::PrometheusMetrics do
- include MetricsDashboardHelpers
-
- describe '#execute' do
- let(:project) { create(:project) }
- let(:dashboard_path) { 'path/to/dashboard.yml' }
- let(:prometheus_adapter) { double('adapter', clear_prometheus_reactive_cache!: nil) }
-
- subject { described_class.new(dashboard_hash, project: project, dashboard_path: dashboard_path) }
-
- context 'valid dashboard' do
- let(:dashboard_hash) { load_sample_dashboard }
-
- context 'with all new metrics' do
- it 'creates PrometheusMetrics' do
- expect { subject.execute }.to change { PrometheusMetric.count }.by(3)
- end
- end
-
- context 'with existing metrics' do
- let(:existing_metric_attributes) do
- {
- project: project,
- identifier: 'metric_b',
- title: 'overwrite',
- y_label: 'overwrite',
- query: 'overwrite',
- unit: 'overwrite',
- legend: 'overwrite',
- dashboard_path: dashboard_path
- }
- end
-
- let!(:existing_metric) do
- create(:prometheus_metric, existing_metric_attributes)
- end
-
- it 'updates existing PrometheusMetrics' do
- subject.execute
-
- expect(existing_metric.reload.attributes.with_indifferent_access).to include({
- title: 'Super Chart B',
- y_label: 'y_label',
- query: 'query',
- unit: 'unit',
- legend: 'Legend Label'
- })
- end
-
- it 'creates new PrometheusMetrics' do
- expect { subject.execute }.to change { PrometheusMetric.count }.by(2)
- end
-
- context 'with stale metrics' do
- let!(:stale_metric) do
- create(:prometheus_metric,
- project: project,
- identifier: 'stale_metric',
- dashboard_path: dashboard_path,
- group: 3
- )
- end
-
- it 'updates existing PrometheusMetrics' do
- subject.execute
-
- expect(existing_metric.reload.attributes.with_indifferent_access).to include({
- title: 'Super Chart B',
- y_label: 'y_label',
- query: 'query',
- unit: 'unit',
- legend: 'Legend Label'
- })
- end
-
- it 'deletes stale metrics' do
- subject.execute
-
- expect { stale_metric.reload }.to raise_error(ActiveRecord::RecordNotFound)
- end
- end
- end
- end
-
- context 'invalid dashboard' do
- let(:dashboard_hash) { {} }
-
- it 'returns false' do
- expect(subject.execute).to eq(false)
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/metrics/dashboard/processor_spec.rb b/spec/lib/gitlab/metrics/dashboard/processor_spec.rb
index 52908a0b339..11b587e4905 100644
--- a/spec/lib/gitlab/metrics/dashboard/processor_spec.rb
+++ b/spec/lib/gitlab/metrics/dashboard/processor_spec.rb
@@ -12,11 +12,6 @@ RSpec.describe Gitlab::Metrics::Dashboard::Processor do
describe 'process' do
let(:sequence) do
[
- Gitlab::Metrics::Dashboard::Stages::CommonMetricsInserter,
- Gitlab::Metrics::Dashboard::Stages::CustomMetricsInserter,
- Gitlab::Metrics::Dashboard::Stages::CustomMetricsDetailsInserter,
- Gitlab::Metrics::Dashboard::Stages::MetricEndpointInserter,
- Gitlab::Metrics::Dashboard::Stages::PanelIdsInserter,
Gitlab::Metrics::Dashboard::Stages::UrlValidator
]
end
@@ -24,16 +19,6 @@ RSpec.describe Gitlab::Metrics::Dashboard::Processor do
let(:process_params) { [project, dashboard_yml, sequence, { environment: environment }] }
let(:dashboard) { described_class.new(*process_params).process }
- it 'includes an id for each dashboard panel' do
- expect(all_panels).to satisfy_all do |panel|
- panel[:id].present?
- end
- end
-
- it 'includes boolean to indicate if panel group has custom metrics' do
- expect(dashboard[:panel_groups]).to all(include( { has_custom_metrics: boolean } ))
- end
-
context 'when the dashboard is not present' do
let(:dashboard_yml) { nil }
@@ -41,168 +26,5 @@ RSpec.describe Gitlab::Metrics::Dashboard::Processor do
expect(dashboard).to be_nil
end
end
-
- context 'when dashboard config corresponds to common metrics' do
- let!(:common_metric) { create(:prometheus_metric, :common, identifier: 'metric_a1') }
-
- it 'inserts metric ids into the config' do
- target_metric = all_metrics.find { |metric| metric[:id] == 'metric_a1' }
-
- expect(target_metric).to include(:metric_id)
- expect(target_metric[:metric_id]).to eq(common_metric.id)
- end
- end
-
- context 'when the project has associated metrics' do
- let!(:project_response_metric) { create(:prometheus_metric, project: project, group: :response) }
- let!(:project_system_metric) { create(:prometheus_metric, project: project, group: :system) }
- let!(:project_business_metric) { create(:prometheus_metric, project: project, group: :business) }
-
- it 'includes project-specific metrics' do
- expect(all_metrics).to include get_metric_details(project_system_metric)
- expect(all_metrics).to include get_metric_details(project_response_metric)
- expect(all_metrics).to include get_metric_details(project_business_metric)
- end
-
- it 'display groups and panels in the order they are defined' do
- expected_metrics_order = [
- 'metric_b',
- 'metric_a2',
- 'metric_a1',
- project_business_metric.id,
- project_response_metric.id,
- project_system_metric.id
- ]
- actual_metrics_order = all_metrics.map { |m| m[:id] || m[:metric_id] }
-
- expect(actual_metrics_order).to eq expected_metrics_order
- end
-
- context 'when the project has multiple metrics in the same group' do
- let!(:project_response_metric) { create(:prometheus_metric, project: project, group: :response) }
- let!(:project_response_metric_2) { create(:prometheus_metric, project: project, group: :response) }
-
- it 'includes multiple metrics' do
- expect(all_metrics).to include get_metric_details(project_response_metric)
- expect(all_metrics).to include get_metric_details(project_response_metric_2)
- end
- end
-
- context 'when the dashboard should not include project metrics' do
- let(:sequence) do
- [
- Gitlab::Metrics::Dashboard::Stages::CommonMetricsInserter,
- Gitlab::Metrics::Dashboard::Stages::MetricEndpointInserter
- ]
- end
-
- let(:dashboard) { described_class.new(*process_params).process }
-
- it 'includes only dashboard metrics' do
- metrics = all_metrics.map { |m| m[:id] }
-
- expect(metrics.length).to be(3)
- expect(metrics).to eq %w(metric_b metric_a2 metric_a1)
- end
- end
-
- context 'when sample_metrics are requested' do
- let(:process_params) { [project, dashboard_yml, sequence, { environment: environment, sample_metrics: true }] }
-
- it 'includes a sample metrics path for the prometheus endpoint with each metric' do
- expect(all_metrics).to satisfy_all do |metric|
- metric[:prometheus_endpoint_path] == sample_metrics_path(metric[:id])
- end
- end
- end
- end
-
- context 'when there are no alerts' do
- let!(:persisted_metric) { create(:prometheus_metric, :common, identifier: 'metric_a1') }
-
- it 'does not insert an alert_path' do
- target_metric = all_metrics.find { |metric| metric[:metric_id] == persisted_metric.id }
-
- expect(target_metric).to be_a Hash
- expect(target_metric).not_to include(:alert_path)
- end
- end
-
- shared_examples_for 'errors with message' do |expected_message|
- it 'raises a DashboardLayoutError' do
- error_class = Gitlab::Metrics::Dashboard::Errors::DashboardProcessingError
-
- expect { dashboard }.to raise_error(error_class, expected_message)
- end
- end
-
- context 'when the dashboard is missing panel_groups' do
- let(:dashboard_yml) { {} }
-
- it_behaves_like 'errors with message', 'Top-level key :panel_groups must be an array'
- end
-
- context 'when the dashboard contains a panel_group which is missing panels' do
- let(:dashboard_yml) { { panel_groups: [{}] } }
-
- it_behaves_like 'errors with message', 'Each "panel_group" must define an array :panels'
- end
-
- context 'when the dashboard contains a panel which is missing metrics' do
- let(:dashboard_yml) { { panel_groups: [{ panels: [{}] }] } }
-
- it_behaves_like 'errors with message', 'Each "panel" must define an array :metrics'
- end
-
- context 'when the dashboard contains a metric which is missing a query' do
- let(:dashboard_yml) { { panel_groups: [{ panels: [{ metrics: [{}] }] }] } }
-
- it_behaves_like 'errors with message', 'Each "metric" must define one of :query or :query_range'
- end
- end
-
- private
-
- def all_metrics
- all_panels.flat_map { |panel| panel[:metrics] }
- end
-
- def all_panels
- dashboard[:panel_groups].flat_map { |group| group[:panels] }
- end
-
- def get_metric_details(metric)
- {
- query_range: metric.query,
- unit: metric.unit,
- label: metric.legend,
- metric_id: metric.id,
- prometheus_endpoint_path: prometheus_path(metric.query),
- edit_path: edit_metric_path(metric)
- }
- end
-
- def prometheus_path(query)
- Gitlab::Routing.url_helpers.prometheus_api_project_environment_path(
- project,
- environment,
- proxy_path: :query_range,
- query: query
- )
- end
-
- def sample_metrics_path(metric)
- Gitlab::Routing.url_helpers.sample_metrics_project_environment_path(
- project,
- environment,
- identifier: metric
- )
- end
-
- def edit_metric_path(metric)
- Gitlab::Routing.url_helpers.edit_project_prometheus_metric_path(
- project,
- metric.id
- )
end
end
diff --git a/spec/lib/gitlab/metrics/dashboard/service_selector_spec.rb b/spec/lib/gitlab/metrics/dashboard/service_selector_spec.rb
deleted file mode 100644
index 343596af5cf..00000000000
--- a/spec/lib/gitlab/metrics/dashboard/service_selector_spec.rb
+++ /dev/null
@@ -1,148 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Metrics::Dashboard::ServiceSelector do
- include MetricsDashboardHelpers
-
- describe '#call' do
- let(:arguments) { {} }
-
- subject { described_class.call(arguments) }
-
- it { is_expected.to be Metrics::Dashboard::SystemDashboardService }
-
- context 'when just the dashboard path is provided' do
- let(:arguments) { { dashboard_path: '.gitlab/dashboards/test.yml' } }
-
- it { is_expected.to be Metrics::Dashboard::CustomDashboardService }
-
- context 'when the path is for the system dashboard' do
- let(:arguments) { { dashboard_path: system_dashboard_path } }
-
- it { is_expected.to be Metrics::Dashboard::SystemDashboardService }
- end
-
- context 'when the path is for the pod dashboard' do
- let(:arguments) { { dashboard_path: pod_dashboard_path } }
-
- it { is_expected.to be Metrics::Dashboard::PodDashboardService }
- end
- end
-
- context 'when the embedded flag is provided' do
- let(:arguments) { { embedded: true } }
-
- it { is_expected.to be Metrics::Dashboard::DefaultEmbedService }
-
- context 'when an incomplete set of dashboard identifiers are provided' do
- let(:arguments) { { embedded: true, dashboard_path: '.gitlab/dashboards/test.yml' } }
-
- it { is_expected.to be Metrics::Dashboard::DefaultEmbedService }
- end
-
- context 'when all the chart identifiers are provided' do
- let(:arguments) do
- {
- embedded: true,
- dashboard_path: '.gitlab/dashboards/test.yml',
- group: 'Important Metrics',
- title: 'Total Requests',
- y_label: 'req/sec'
- }
- end
-
- it { is_expected.to be Metrics::Dashboard::DynamicEmbedService }
- end
-
- context 'when all chart params expect dashboard_path are provided' do
- let(:arguments) do
- {
- embedded: true,
- group: 'Important Metrics',
- title: 'Total Requests',
- y_label: 'req/sec'
- }
- end
-
- it { is_expected.to be Metrics::Dashboard::DynamicEmbedService }
- end
-
- context 'with a system dashboard and "custom" group' do
- let(:arguments) do
- {
- embedded: true,
- dashboard_path: system_dashboard_path,
- group: business_metric_title,
- title: 'Total Requests',
- y_label: 'req/sec'
- }
- end
-
- it { is_expected.to be Metrics::Dashboard::CustomMetricEmbedService }
- end
-
- context 'with a grafana link' do
- let(:arguments) do
- {
- embedded: true,
- grafana_url: 'https://grafana.example.com'
- }
- end
-
- it { is_expected.to be Metrics::Dashboard::GrafanaMetricEmbedService }
- end
-
- context 'with the embed defined in the arguments' do
- let(:arguments) do
- {
- embedded: true,
- embed_json: '{}'
- }
- end
-
- it { is_expected.to be Metrics::Dashboard::TransientEmbedService }
- end
-
- context 'when cluster is provided' do
- let(:arguments) { { cluster: "some cluster" } }
-
- it { is_expected.to be Metrics::Dashboard::ClusterDashboardService }
- end
-
- context 'when cluster is provided and embedded is not true' do
- let(:arguments) { { cluster: "some cluster", embedded: 'false' } }
-
- it { is_expected.to be Metrics::Dashboard::ClusterDashboardService }
- end
-
- context 'when cluster dashboard_path is provided' do
- let(:arguments) { { dashboard_path: ::Metrics::Dashboard::ClusterDashboardService::DASHBOARD_PATH } }
-
- it { is_expected.to be Metrics::Dashboard::ClusterDashboardService }
- end
-
- context 'when cluster is provided and embed params' do
- let(:arguments) do
- {
- cluster: "some cluster",
- embedded: 'true',
- cluster_type: 'project',
- format: :json,
- group: 'Food metrics',
- title: 'Pizza Consumption',
- y_label: 'Slice Count'
- }
- end
-
- it { is_expected.to be Metrics::Dashboard::ClusterMetricsEmbedService }
- end
-
- context 'when metrics embed is for an alert' do
- let(:arguments) { { embedded: true, prometheus_alert_id: 5 } }
-
- it { is_expected.to be Metrics::Dashboard::GitlabAlertEmbedService }
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/metrics/dashboard/stages/grafana_formatter_spec.rb b/spec/lib/gitlab/metrics/dashboard/stages/grafana_formatter_spec.rb
deleted file mode 100644
index 3cfdfafb0c5..00000000000
--- a/spec/lib/gitlab/metrics/dashboard/stages/grafana_formatter_spec.rb
+++ /dev/null
@@ -1,58 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Metrics::Dashboard::Stages::GrafanaFormatter do
- include GrafanaApiHelpers
-
- let_it_be(:namespace) { create(:namespace, path: 'foo') }
- let_it_be(:project) { create(:project, namespace: namespace, path: 'bar') }
-
- describe '#transform!' do
- let(:grafana_dashboard) { Gitlab::Json.parse(fixture_file('grafana/simplified_dashboard_response.json'), symbolize_names: true) }
- let(:datasource) { Gitlab::Json.parse(fixture_file('grafana/datasource_response.json'), symbolize_names: true) }
- let(:expected_dashboard) { Gitlab::Json.parse(fixture_file('grafana/expected_grafana_embed.json'), symbolize_names: true) }
-
- subject(:dashboard) { described_class.new(project, {}, params).transform! }
-
- let(:params) do
- {
- grafana_dashboard: grafana_dashboard,
- datasource: datasource,
- grafana_url: valid_grafana_dashboard_link('https://grafana.example.com')
- }
- end
-
- context 'when the query and resources are configured correctly' do
- it { is_expected.to eq expected_dashboard }
- end
-
- context 'when a panelId is not included in the grafana_url' do
- before do
- params[:grafana_url].gsub('&panelId=8', '')
- end
-
- it { is_expected.to eq expected_dashboard }
-
- context 'when there is also no valid panel in the dashboard' do
- before do
- params[:grafana_dashboard][:dashboard][:panels] = []
- end
-
- it 'raises a processing error' do
- expect { dashboard }.to raise_error(::Gitlab::Metrics::Dashboard::Errors::DashboardProcessingError)
- end
- end
- end
-
- context 'when an input is invalid' do
- before do
- params[:datasource][:access] = 'not-proxy'
- end
-
- it 'raises a processing error' do
- expect { dashboard }.to raise_error(::Gitlab::Metrics::Dashboard::Errors::DashboardProcessingError)
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/metrics/dashboard/stages/metric_endpoint_inserter_spec.rb b/spec/lib/gitlab/metrics/dashboard/stages/metric_endpoint_inserter_spec.rb
deleted file mode 100644
index bb3c8626d32..00000000000
--- a/spec/lib/gitlab/metrics/dashboard/stages/metric_endpoint_inserter_spec.rb
+++ /dev/null
@@ -1,59 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Metrics::Dashboard::Stages::MetricEndpointInserter do
- include MetricsDashboardHelpers
-
- let(:project) { build_stubbed(:project) }
- let(:environment) { build_stubbed(:environment, project: project) }
-
- describe '#transform!' do
- subject(:transform!) { described_class.new(project, dashboard, environment: environment).transform! }
-
- let(:dashboard) { load_sample_dashboard.deep_symbolize_keys }
-
- it 'generates prometheus_endpoint_path without newlines' do
- query = 'avg( sum( container_memory_usage_bytes{ container_name!="POD", '\
- 'pod_name=~"^{{ci_environment_slug}}-(.*)", namespace="{{kube_namespace}}" } ) '\
- 'by (job) ) without (job) /1024/1024/1024'
-
- transform!
-
- expect(all_metrics[2][:prometheus_endpoint_path]).to eq(prometheus_path(query))
- end
-
- it 'includes a path for the prometheus endpoint with each metric' do
- transform!
-
- expect(all_metrics).to satisfy_all do |metric|
- metric[:prometheus_endpoint_path].present? && !metric[:prometheus_endpoint_path].include?("\n")
- end
- end
-
- it 'works when query/query_range is a number' do
- query = 2000
-
- transform!
-
- expect(all_metrics[1][:prometheus_endpoint_path]).to eq(prometheus_path(query))
- end
- end
-
- private
-
- def all_metrics
- dashboard[:panel_groups].flat_map do |group|
- group[:panels].flat_map { |panel| panel[:metrics] }
- end
- end
-
- def prometheus_path(query)
- Gitlab::Routing.url_helpers.prometheus_api_project_environment_path(
- project,
- environment,
- proxy_path: :query_range,
- query: query
- )
- end
-end
diff --git a/spec/lib/gitlab/metrics/dashboard/stages/panel_ids_inserter_spec.rb b/spec/lib/gitlab/metrics/dashboard/stages/panel_ids_inserter_spec.rb
deleted file mode 100644
index 7a3a9021f86..00000000000
--- a/spec/lib/gitlab/metrics/dashboard/stages/panel_ids_inserter_spec.rb
+++ /dev/null
@@ -1,88 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Metrics::Dashboard::Stages::PanelIdsInserter do
- include MetricsDashboardHelpers
-
- let(:project) { build_stubbed(:project) }
-
- def fetch_panel_ids(dashboard_hash)
- dashboard_hash[:panel_groups].flat_map { |group| group[:panels].flat_map { |panel| panel[:id] } }
- end
-
- describe '#transform!' do
- subject(:transform!) { described_class.new(project, dashboard, nil).transform! }
-
- let(:dashboard) { load_sample_dashboard.deep_symbolize_keys }
-
- context 'when dashboard panels are present' do
- it 'assigns unique ids to each panel using PerformanceMonitoring::PrometheusPanel', :aggregate_failures do
- dashboard.fetch(:panel_groups).each do |group|
- group.fetch(:panels).each do |panel|
- panel_double = instance_double(::PerformanceMonitoring::PrometheusPanel)
-
- expect(::PerformanceMonitoring::PrometheusPanel).to receive(:new).with(panel).and_return(panel_double)
- expect(panel_double).to receive(:id).with(group[:group]).and_return(FFaker::Lorem.unique.characters(125))
- end
- end
-
- transform!
-
- expect(fetch_panel_ids(dashboard)).not_to include nil
- end
- end
-
- context 'when dashboard panels has duplicated ids' do
- it 'no panel has assigned id' do
- panel_double = instance_double(::PerformanceMonitoring::PrometheusPanel)
- allow(::PerformanceMonitoring::PrometheusPanel).to receive(:new).and_return(panel_double)
- allow(panel_double).to receive(:id).and_return('duplicated id')
-
- transform!
-
- expect(fetch_panel_ids(dashboard)).to all be_nil
- expect(fetch_panel_ids(dashboard)).not_to include 'duplicated id'
- end
- end
-
- context 'when there are no panels in the dashboard' do
- it 'raises a processing error' do
- dashboard[:panel_groups][0].delete(:panels)
-
- expect { transform! }.to(
- raise_error(::Gitlab::Metrics::Dashboard::Errors::DashboardProcessingError)
- )
- end
- end
-
- context 'when there are no panel_groups in the dashboard' do
- it 'raises a processing error' do
- dashboard.delete(:panel_groups)
-
- expect { transform! }.to(
- raise_error(::Gitlab::Metrics::Dashboard::Errors::DashboardProcessingError)
- )
- end
- end
-
- context 'when dashboard panels has unknown schema attributes' do
- before do
- error = ActiveModel::UnknownAttributeError.new(double, 'unknown_panel_attribute')
- allow(::PerformanceMonitoring::PrometheusPanel).to receive(:new).and_raise(error)
- end
-
- it 'no panel has assigned id' do
- transform!
-
- expect(fetch_panel_ids(dashboard)).to all be_nil
- end
-
- it 'logs the failure' do
- expect(Gitlab::ErrorTracking).to receive(:log_exception)
-
- transform!
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/metrics/dashboard/stages/track_panel_type_spec.rb b/spec/lib/gitlab/metrics/dashboard/stages/track_panel_type_spec.rb
deleted file mode 100644
index 60010b9f257..00000000000
--- a/spec/lib/gitlab/metrics/dashboard/stages/track_panel_type_spec.rb
+++ /dev/null
@@ -1,26 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Metrics::Dashboard::Stages::TrackPanelType do
- include MetricsDashboardHelpers
-
- let(:project) { build_stubbed(:project) }
- let(:environment) { build_stubbed(:environment, project: project) }
-
- describe '#transform!', :snowplow do
- subject { described_class.new(project, dashboard, environment: environment) }
-
- let(:dashboard) { load_sample_dashboard.deep_symbolize_keys }
-
- it 'creates tracking event' do
- subject.transform!
-
- expect_snowplow_event(
- category: 'MetricsDashboard::Chart',
- action: 'chart_rendered',
- label: 'area-chart'
- )
- end
- end
-end
diff --git a/spec/lib/gitlab/metrics/dashboard/stages/variable_endpoint_inserter_spec.rb b/spec/lib/gitlab/metrics/dashboard/stages/variable_endpoint_inserter_spec.rb
deleted file mode 100644
index 9303ff981fb..00000000000
--- a/spec/lib/gitlab/metrics/dashboard/stages/variable_endpoint_inserter_spec.rb
+++ /dev/null
@@ -1,77 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Metrics::Dashboard::Stages::VariableEndpointInserter do
- include MetricsDashboardHelpers
-
- let(:project) { build_stubbed(:project) }
- let(:environment) { build_stubbed(:environment, project: project) }
-
- describe '#transform!' do
- subject(:transform!) { described_class.new(project, dashboard, environment: environment).transform! }
-
- let(:dashboard) { load_sample_dashboard.deep_symbolize_keys }
-
- context 'when dashboard variables are present' do
- it 'assigns prometheus_endpoint_path to metric_label_values variable type' do
- endpoint_path = Gitlab::Routing.url_helpers.prometheus_api_project_environment_path(
- project,
- environment,
- proxy_path: :series,
- match: ['backend:haproxy_backend_availability:ratio{env="{{env}}"}']
- )
-
- transform!
-
- expect(
- dashboard.dig(:templating, :variables, :metric_label_values_variable, :options)
- ).to include(prometheus_endpoint_path: endpoint_path)
- end
-
- it 'does not modify other variable types' do
- original_text_variable = dashboard[:templating][:variables][:text_variable_full_syntax].deep_dup
-
- transform!
-
- expect(dashboard[:templating][:variables][:text_variable_full_syntax]).to eq(original_text_variable)
- end
-
- context 'when variable does not have the required series_selector' do
- it 'adds prometheus_endpoint_path without match parameter' do
- dashboard[:templating][:variables][:metric_label_values_variable][:options].delete(:series_selector)
- endpoint_path = Gitlab::Routing.url_helpers.prometheus_api_project_environment_path(
- project,
- environment,
- proxy_path: :series
- )
-
- transform!
-
- expect(
- dashboard.dig(:templating, :variables, :metric_label_values_variable, :options)
- ).to include(prometheus_endpoint_path: endpoint_path)
- end
- end
- end
-
- context 'when no variables are present' do
- it 'does not fail' do
- dashboard.delete(:templating)
-
- expect { transform! }.not_to raise_error
- end
- end
-
- context 'with no environment' do
- subject(:transform!) { described_class.new(project, dashboard, {}).transform! }
-
- it 'raises error' do
- expect { transform! }.to raise_error(
- Gitlab::Metrics::Dashboard::Errors::DashboardProcessingError,
- 'Environment is required for Stages::VariableEndpointInserter'
- )
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/metrics/dashboard/transformers/yml/v1/prometheus_metrics_spec.rb b/spec/lib/gitlab/metrics/dashboard/transformers/yml/v1/prometheus_metrics_spec.rb
deleted file mode 100644
index 3af8b51c889..00000000000
--- a/spec/lib/gitlab/metrics/dashboard/transformers/yml/v1/prometheus_metrics_spec.rb
+++ /dev/null
@@ -1,99 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Metrics::Dashboard::Transformers::Yml::V1::PrometheusMetrics do
- include MetricsDashboardHelpers
-
- describe '#execute' do
- subject { described_class.new(dashboard_hash) }
-
- context 'valid dashboard' do
- let_it_be(:dashboard_hash) do
- {
- panel_groups: [{
- panels: [
- {
- title: 'Panel 1 title',
- y_label: 'Panel 1 y_label',
- metrics: [
- {
- query_range: 'Panel 1 metric 1 query_range',
- unit: 'Panel 1 metric 1 unit',
- label: 'Panel 1 metric 1 label',
- id: 'Panel 1 metric 1 id'
- },
- {
- query: 'Panel 1 metric 2 query',
- unit: 'Panel 1 metric 2 unit',
- label: 'Panel 1 metric 2 label',
- id: 'Panel 1 metric 2 id'
- }
- ]
- },
- {
- title: 'Panel 2 title',
- y_label: 'Panel 2 y_label',
- metrics: [{
- query_range: 'Panel 2 metric 1 query_range',
- unit: 'Panel 2 metric 1 unit',
- label: 'Panel 2 metric 1 label',
- id: 'Panel 2 metric 1 id'
- }]
- }
- ]
- }]
- }
- end
-
- let(:expected_metrics) do
- [
- {
- title: 'Panel 1 title',
- y_label: 'Panel 1 y_label',
- query: "Panel 1 metric 1 query_range",
- unit: 'Panel 1 metric 1 unit',
- legend: 'Panel 1 metric 1 label',
- identifier: 'Panel 1 metric 1 id',
- group: 3,
- common: false
- },
- {
- title: 'Panel 1 title',
- y_label: 'Panel 1 y_label',
- query: 'Panel 1 metric 2 query',
- unit: 'Panel 1 metric 2 unit',
- legend: 'Panel 1 metric 2 label',
- identifier: 'Panel 1 metric 2 id',
- group: 3,
- common: false
- },
- {
- title: 'Panel 2 title',
- y_label: 'Panel 2 y_label',
- query: 'Panel 2 metric 1 query_range',
- unit: 'Panel 2 metric 1 unit',
- legend: 'Panel 2 metric 1 label',
- identifier: 'Panel 2 metric 1 id',
- group: 3,
- common: false
- }
- ]
- end
-
- it 'returns collection of metrics with correct attributes' do
- expect(subject.execute).to match_array(expected_metrics)
- end
- end
-
- context 'invalid dashboard' do
- let(:dashboard_hash) { {} }
-
- it 'raises missing attribute error' do
- expect { subject.execute }.to raise_error(
- ::Gitlab::Metrics::Dashboard::Transformers::Errors::MissingAttribute, "Missing attribute: 'panel_groups'"
- )
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/metrics/dashboard/validator/client_spec.rb b/spec/lib/gitlab/metrics/dashboard/validator/client_spec.rb
deleted file mode 100644
index 4b07f9dbbab..00000000000
--- a/spec/lib/gitlab/metrics/dashboard/validator/client_spec.rb
+++ /dev/null
@@ -1,29 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Metrics::Dashboard::Validator::Client do
- include MetricsDashboardHelpers
-
- let_it_be(:schema_path) { 'lib/gitlab/metrics/dashboard/validator/schemas/dashboard.json' }
-
- subject { described_class.new(dashboard, schema_path) }
-
- describe '#execute' do
- context 'with no validation errors' do
- let(:dashboard) { load_sample_dashboard }
-
- it 'returns empty array' do
- expect(subject.execute).to eq([])
- end
- end
-
- context 'with validation errors' do
- let(:dashboard) { load_dashboard_yaml(fixture_file('lib/gitlab/metrics/dashboard/invalid_dashboard.yml')) }
-
- it 'returns array of error objects' do
- expect(subject.execute).to include(Gitlab::Metrics::Dashboard::Validator::Errors::SchemaValidationError)
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/metrics/dashboard/validator/custom_formats_spec.rb b/spec/lib/gitlab/metrics/dashboard/validator/custom_formats_spec.rb
deleted file mode 100644
index 129fb631f3e..00000000000
--- a/spec/lib/gitlab/metrics/dashboard/validator/custom_formats_spec.rb
+++ /dev/null
@@ -1,15 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Metrics::Dashboard::Validator::CustomFormats do
- describe '#format_handlers' do
- describe 'add_to_metric_id_cache' do
- it 'adds data to metric id cache' do
- subject.format_handlers['add_to_metric_id_cache'].call('metric_id', '_schema')
-
- expect(subject.metric_ids_cache).to eq(["metric_id"])
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/metrics/dashboard/validator/errors_spec.rb b/spec/lib/gitlab/metrics/dashboard/validator/errors_spec.rb
deleted file mode 100644
index a50c2a506cb..00000000000
--- a/spec/lib/gitlab/metrics/dashboard/validator/errors_spec.rb
+++ /dev/null
@@ -1,149 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Metrics::Dashboard::Validator::Errors do
- describe Gitlab::Metrics::Dashboard::Validator::Errors::SchemaValidationError do
- context 'empty error hash' do
- let(:error_hash) { {} }
-
- it 'uses default error message' do
- expect(described_class.new(error_hash).message).to eq('Dashboard failed schema validation')
- end
- end
-
- context 'formatted message' do
- subject { described_class.new(error_hash).message }
-
- let(:error_hash) do
- {
- 'data' => 'property_name',
- 'data_pointer' => pointer,
- 'type' => type,
- 'schema' => 'schema',
- 'details' => details
- }
- end
-
- context 'for root object' do
- let(:pointer) { '' }
-
- context 'when required keys are missing' do
- let(:type) { 'required' }
- let(:details) { { 'missing_keys' => ['one'] } }
-
- it { is_expected.to eq 'root is missing required keys: one' }
- end
-
- context 'when there is type mismatch' do
- %w(null string boolean integer number array object).each do |expected_type|
- context "on type: #{expected_type}" do
- let(:type) { expected_type }
- let(:details) { nil }
-
- it { is_expected.to eq "'property_name' at root is not of type: #{expected_type}" }
- end
- end
- end
- end
-
- context 'for nested object' do
- let(:pointer) { '/nested_objects/0' }
-
- context 'when required keys are missing' do
- let(:type) { 'required' }
- let(:details) { { 'missing_keys' => ['two'] } }
-
- it { is_expected.to eq '/nested_objects/0 is missing required keys: two' }
- end
-
- context 'when there is type mismatch' do
- %w(null string boolean integer number array object).each do |expected_type|
- context "on type: #{expected_type}" do
- let(:type) { expected_type }
- let(:details) { nil }
-
- it { is_expected.to eq "'property_name' at /nested_objects/0 is not of type: #{expected_type}" }
- end
- end
- end
-
- context 'when data does not match pattern' do
- let(:type) { 'pattern' }
- let(:error_hash) do
- {
- 'data' => 'property_name',
- 'data_pointer' => pointer,
- 'type' => type,
- 'schema' => { 'pattern' => 'aa.*' }
- }
- end
-
- it { is_expected.to eq "'property_name' at /nested_objects/0 does not match pattern: aa.*" }
- end
-
- context 'when data does not match format' do
- let(:type) { 'format' }
- let(:error_hash) do
- {
- 'data' => 'property_name',
- 'data_pointer' => pointer,
- 'type' => type,
- 'schema' => { 'format' => 'date-time' }
- }
- end
-
- it { is_expected.to eq "'property_name' at /nested_objects/0 does not match format: date-time" }
- end
-
- context 'when data is not const' do
- let(:type) { 'const' }
- let(:error_hash) do
- {
- 'data' => 'property_name',
- 'data_pointer' => pointer,
- 'type' => type,
- 'schema' => { 'const' => 'one' }
- }
- end
-
- it { is_expected.to eq "'property_name' at /nested_objects/0 is not: \"one\"" }
- end
-
- context 'when data is not included in enum' do
- let(:type) { 'enum' }
- let(:error_hash) do
- {
- 'data' => 'property_name',
- 'data_pointer' => pointer,
- 'type' => type,
- 'schema' => { 'enum' => %w(one two) }
- }
- end
-
- it { is_expected.to eq "'property_name' at /nested_objects/0 is not one of: [\"one\", \"two\"]" }
- end
-
- context 'when data is not included in enum' do
- let(:type) { 'unknown' }
- let(:error_hash) do
- {
- 'data' => 'property_name',
- 'data_pointer' => pointer,
- 'type' => type,
- 'schema' => 'schema'
- }
- end
-
- it { is_expected.to eq "'property_name' at /nested_objects/0 is invalid: error_type=unknown" }
- end
- end
- end
- end
-
- describe Gitlab::Metrics::Dashboard::Validator::Errors::DuplicateMetricIds do
- it 'has custom error message' do
- expect(described_class.new.message).to eq('metric_id must be unique across a project')
- end
- end
-end
diff --git a/spec/lib/gitlab/metrics/dashboard/validator/post_schema_validator_spec.rb b/spec/lib/gitlab/metrics/dashboard/validator/post_schema_validator_spec.rb
deleted file mode 100644
index e7cb1429ca9..00000000000
--- a/spec/lib/gitlab/metrics/dashboard/validator/post_schema_validator_spec.rb
+++ /dev/null
@@ -1,78 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Metrics::Dashboard::Validator::PostSchemaValidator do
- describe '#validate' do
- context 'with no project and dashboard_path provided' do
- context 'unique local metric_ids' do
- it 'returns empty array' do
- expect(described_class.new(metric_ids: [1, 2, 3]).validate).to eq([])
- end
- end
-
- context 'duplicate local metrics_ids' do
- it 'returns error' do
- expect(described_class.new(metric_ids: [1, 1]).validate)
- .to eq([Gitlab::Metrics::Dashboard::Validator::Errors::DuplicateMetricIds])
- end
- end
- end
-
- context 'with project and dashboard_path' do
- let(:project) { create(:project) }
-
- subject do
- described_class.new(
- project: project,
- metric_ids: ['some_identifier'],
- dashboard_path: 'test/path.yml'
- ).validate
- end
-
- context 'with unique metric identifiers' do
- before do
- create(:prometheus_metric,
- project: project,
- identifier: 'some_other_identifier',
- dashboard_path: 'test/path.yml'
- )
- end
-
- it 'returns empty array' do
- expect(subject).to eq([])
- end
- end
-
- context 'duplicate metric identifiers in database' do
- context 'with different dashboard_path' do
- before do
- create(:prometheus_metric,
- project: project,
- identifier: 'some_identifier',
- dashboard_path: 'some/other/path.yml'
- )
- end
-
- it 'returns error' do
- expect(subject).to include(Gitlab::Metrics::Dashboard::Validator::Errors::DuplicateMetricIds)
- end
- end
-
- context 'with same dashboard_path' do
- before do
- create(:prometheus_metric,
- project: project,
- identifier: 'some_identifier',
- dashboard_path: 'test/path.yml'
- )
- end
-
- it 'returns empty array' do
- expect(subject).to eq([])
- end
- end
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/metrics/dashboard/validator_spec.rb b/spec/lib/gitlab/metrics/dashboard/validator_spec.rb
deleted file mode 100644
index fb55b736354..00000000000
--- a/spec/lib/gitlab/metrics/dashboard/validator_spec.rb
+++ /dev/null
@@ -1,146 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Metrics::Dashboard::Validator do
- include MetricsDashboardHelpers
-
- let_it_be(:valid_dashboard) { load_sample_dashboard }
- let_it_be(:invalid_dashboard) { load_dashboard_yaml(fixture_file('lib/gitlab/metrics/dashboard/invalid_dashboard.yml')) }
- let_it_be(:duplicate_id_dashboard) { load_dashboard_yaml(fixture_file('lib/gitlab/metrics/dashboard/duplicate_id_dashboard.yml')) }
-
- let_it_be(:project) { create(:project) }
-
- describe '#validate' do
- context 'valid dashboard schema' do
- it 'returns true' do
- expect(described_class.validate(valid_dashboard)).to be true
- end
-
- context 'with duplicate metric_ids' do
- it 'returns false' do
- expect(described_class.validate(duplicate_id_dashboard)).to be false
- end
- end
-
- context 'with dashboard_path and project' do
- subject { described_class.validate(valid_dashboard, dashboard_path: 'test/path.yml', project: project) }
-
- context 'with no conflicting metric identifiers in db' do
- it { is_expected.to be true }
- end
-
- context 'with metric identifier present in current dashboard' do
- before do
- create(:prometheus_metric,
- identifier: 'metric_a1',
- dashboard_path: 'test/path.yml',
- project: project
- )
- end
-
- it { is_expected.to be true }
- end
-
- context 'with metric identifier present in another dashboard' do
- before do
- create(:prometheus_metric,
- identifier: 'metric_a1',
- dashboard_path: 'some/other/dashboard/path.yml',
- project: project
- )
- end
-
- it { is_expected.to be false }
- end
- end
- end
-
- context 'invalid dashboard schema' do
- it 'returns false' do
- expect(described_class.validate(invalid_dashboard)).to be false
- end
- end
- end
-
- describe '#validate!' do
- shared_examples 'validation failed' do |errors_message|
- it 'raises error with corresponding messages', :aggregate_failures do
- expect { subject }.to raise_error do |error|
- expect(error).to be_kind_of(Gitlab::Metrics::Dashboard::Validator::Errors::InvalidDashboardError)
- expect(error.message).to eq(errors_message)
- end
- end
- end
-
- context 'valid dashboard schema' do
- it 'returns true' do
- expect(described_class.validate!(valid_dashboard)).to be true
- end
-
- context 'with duplicate metric_ids' do
- subject { described_class.validate!(duplicate_id_dashboard) }
-
- it_behaves_like 'validation failed', 'metric_id must be unique across a project'
- end
-
- context 'with dashboard_path and project' do
- subject { described_class.validate!(valid_dashboard, dashboard_path: 'test/path.yml', project: project) }
-
- context 'with no conflicting metric identifiers in db' do
- it { is_expected.to be true }
- end
-
- context 'with metric identifier present in current dashboard' do
- before do
- create(:prometheus_metric,
- identifier: 'metric_a1',
- dashboard_path: 'test/path.yml',
- project: project
- )
- end
-
- it { is_expected.to be true }
- end
-
- context 'with metric identifier present in another dashboard' do
- before do
- create(:prometheus_metric,
- identifier: 'metric_a1',
- dashboard_path: 'some/other/dashboard/path.yml',
- project: project
- )
- end
-
- it_behaves_like 'validation failed', 'metric_id must be unique across a project'
- end
- end
- end
-
- context 'invalid dashboard schema' do
- subject { described_class.validate!(invalid_dashboard) }
-
- context 'wrong property type' do
- it_behaves_like 'validation failed', "'this_should_be_a_int' at /panel_groups/0/panels/0/weight is not of type: number"
- end
-
- context 'panel groups missing' do
- let_it_be(:invalid_dashboard) { load_dashboard_yaml(fixture_file('lib/gitlab/metrics/dashboard/dashboard_missing_panel_groups.yml')) }
-
- it_behaves_like 'validation failed', 'root is missing required keys: panel_groups'
- end
-
- context 'groups are missing panels and group keys' do
- let_it_be(:invalid_dashboard) { load_dashboard_yaml(fixture_file('lib/gitlab/metrics/dashboard/dashboard_groups_missing_panels_and_group.yml')) }
-
- it_behaves_like 'validation failed', '/panel_groups/0 is missing required keys: group'
- end
-
- context 'panel is missing metrics key' do
- let_it_be(:invalid_dashboard) { load_dashboard_yaml(fixture_file('lib/gitlab/metrics/dashboard/dashboard_panel_is_missing_metrics.yml')) }
-
- it_behaves_like 'validation failed', '/panel_groups/0/panels/0 is missing required keys: metrics'
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/metrics/global_search_slis_spec.rb b/spec/lib/gitlab/metrics/global_search_slis_spec.rb
index 5248cd08770..68793db6e41 100644
--- a/spec/lib/gitlab/metrics/global_search_slis_spec.rb
+++ b/spec/lib/gitlab/metrics/global_search_slis_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Metrics::GlobalSearchSlis do
+RSpec.describe Gitlab::Metrics::GlobalSearchSlis, feature_category: :global_search do
using RSpec::Parameterized::TableSyntax
describe '#initialize_slis!' do
@@ -92,6 +92,7 @@ RSpec.describe Gitlab::Metrics::GlobalSearchSlis do
'basic' | true | 27.538
'advanced' | false | 2.452
'advanced' | true | 15.52
+ 'zoekt' | true | 15.52
end
with_them do
diff --git a/spec/lib/gitlab/metrics/subscribers/active_record_spec.rb b/spec/lib/gitlab/metrics/subscribers/active_record_spec.rb
index afb029a96cb..2ec31a5cc3e 100644
--- a/spec/lib/gitlab/metrics/subscribers/active_record_spec.rb
+++ b/spec/lib/gitlab/metrics/subscribers/active_record_spec.rb
@@ -384,7 +384,7 @@ RSpec.describe Gitlab::Metrics::Subscribers::ActiveRecord do
end
it 'does not store DB roles into into RequestStore' do
- Gitlab::WithRequestStore.with_request_store do
+ Gitlab::SafeRequestStore.ensure_request_store do
subscriber.sql(event)
expect(described_class.db_counter_payload).to include(
diff --git a/spec/lib/gitlab/middleware/webhook_recursion_detection_spec.rb b/spec/lib/gitlab/middleware/webhook_recursion_detection_spec.rb
index c8dbc990f8c..5394cea64af 100644
--- a/spec/lib/gitlab/middleware/webhook_recursion_detection_spec.rb
+++ b/spec/lib/gitlab/middleware/webhook_recursion_detection_spec.rb
@@ -11,7 +11,7 @@ RSpec.describe Gitlab::Middleware::WebhookRecursionDetection do
let(:env) { Rack::MockRequest.env_for("/").merge(headers) }
around do |example|
- Gitlab::WithRequestStore.with_request_store { example.run }
+ Gitlab::SafeRequestStore.ensure_request_store { example.run }
end
describe '#call' do
diff --git a/spec/lib/gitlab/null_request_store_spec.rb b/spec/lib/gitlab/null_request_store_spec.rb
deleted file mode 100644
index f68f478c73e..00000000000
--- a/spec/lib/gitlab/null_request_store_spec.rb
+++ /dev/null
@@ -1,75 +0,0 @@
-# frozen_string_literal: true
-
-require 'fast_spec_helper'
-
-RSpec.describe Gitlab::NullRequestStore do
- let(:null_store) { described_class.new }
-
- describe '#store' do
- it 'returns an empty hash' do
- expect(null_store.store).to eq({})
- end
- end
-
- describe '#active?' do
- it 'returns falsey' do
- expect(null_store.active?).to be_falsey
- end
- end
-
- describe '#read' do
- it 'returns nil' do
- expect(null_store.read('foo')).to be nil
- end
- end
-
- describe '#[]' do
- it 'returns nil' do
- expect(null_store['foo']).to be nil
- end
- end
-
- describe '#write' do
- it 'returns the same value' do
- expect(null_store.write('key', 'value')).to eq('value')
- end
- end
-
- describe '#[]=' do
- it 'returns the same value' do
- expect(null_store['key'] = 'value').to eq('value')
- end
- end
-
- describe '#exist?' do
- it 'returns falsey' do
- expect(null_store.exist?('foo')).to be_falsey
- end
- end
-
- describe '#fetch' do
- it 'returns the block result' do
- expect(null_store.fetch('key') { 'block result' }).to eq('block result') # rubocop:disable Style/RedundantFetchBlock
- end
- end
-
- describe '#delete' do
- context 'when a block is given' do
- it 'yields the key to the block' do
- expect do |b|
- null_store.delete('foo', &b)
- end.to yield_with_args('foo')
- end
-
- it 'returns the block result' do
- expect(null_store.delete('foo') { |key| 'block result' }).to eq('block result')
- end
- end
-
- context 'when a block is not given' do
- it 'returns nil' do
- expect(null_store.delete('foo')).to be nil
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/pages/url_builder_spec.rb b/spec/lib/gitlab/pages/url_builder_spec.rb
index 8e1581704cb..ae94bbadffe 100644
--- a/spec/lib/gitlab/pages/url_builder_spec.rb
+++ b/spec/lib/gitlab/pages/url_builder_spec.rb
@@ -83,60 +83,32 @@ RSpec.describe Gitlab::Pages::UrlBuilder, feature_category: :pages do
context 'when not using pages_unique_domain' do
subject(:pages_url) { builder.pages_url(with_unique_domain: false) }
- context 'when pages_unique_domain feature flag is disabled' do
- before do
- stub_feature_flags(pages_unique_domain: false)
- end
+ context 'when pages_unique_domain_enabled is false' do
+ let(:unique_domain_enabled) { false }
it { is_expected.to eq('http://group.example.com/project') }
end
- context 'when pages_unique_domain feature flag is enabled' do
- before do
- stub_feature_flags(pages_unique_domain: true)
- end
-
- context 'when pages_unique_domain_enabled is false' do
- let(:unique_domain_enabled) { false }
-
- it { is_expected.to eq('http://group.example.com/project') }
- end
-
- context 'when pages_unique_domain_enabled is true' do
- let(:unique_domain_enabled) { true }
+ context 'when pages_unique_domain_enabled is true' do
+ let(:unique_domain_enabled) { true }
- it { is_expected.to eq('http://group.example.com/project') }
- end
+ it { is_expected.to eq('http://group.example.com/project') }
end
end
context 'when using pages_unique_domain' do
subject(:pages_url) { builder.pages_url(with_unique_domain: true) }
- context 'when pages_unique_domain feature flag is disabled' do
- before do
- stub_feature_flags(pages_unique_domain: false)
- end
+ context 'when pages_unique_domain_enabled is false' do
+ let(:unique_domain_enabled) { false }
it { is_expected.to eq('http://group.example.com/project') }
end
- context 'when pages_unique_domain feature flag is enabled' do
- before do
- stub_feature_flags(pages_unique_domain: true)
- end
-
- context 'when pages_unique_domain_enabled is false' do
- let(:unique_domain_enabled) { false }
-
- it { is_expected.to eq('http://group.example.com/project') }
- end
-
- context 'when pages_unique_domain_enabled is true' do
- let(:unique_domain_enabled) { true }
+ context 'when pages_unique_domain_enabled is true' do
+ let(:unique_domain_enabled) { true }
- it { is_expected.to eq('http://unique-domain.example.com') }
- end
+ it { is_expected.to eq('http://unique-domain.example.com') }
end
end
end
@@ -144,30 +116,16 @@ RSpec.describe Gitlab::Pages::UrlBuilder, feature_category: :pages do
describe '#unique_host' do
subject(:unique_host) { builder.unique_host }
- context 'when pages_unique_domain feature flag is disabled' do
- before do
- stub_feature_flags(pages_unique_domain: false)
- end
+ context 'when pages_unique_domain_enabled is false' do
+ let(:unique_domain_enabled) { false }
it { is_expected.to be_nil }
end
- context 'when pages_unique_domain feature flag is enabled' do
- before do
- stub_feature_flags(pages_unique_domain: true)
- end
+ context 'when pages_unique_domain_enabled is true' do
+ let(:unique_domain_enabled) { true }
- context 'when pages_unique_domain_enabled is false' do
- let(:unique_domain_enabled) { false }
-
- it { is_expected.to be_nil }
- end
-
- context 'when pages_unique_domain_enabled is true' do
- let(:unique_domain_enabled) { true }
-
- it { is_expected.to eq('unique-domain.example.com') }
- end
+ it { is_expected.to eq('unique-domain.example.com') }
end
end
diff --git a/spec/lib/gitlab/pagination/keyset/in_operator_optimization/strategies/record_loader_strategy_spec.rb b/spec/lib/gitlab/pagination/keyset/in_operator_optimization/strategies/record_loader_strategy_spec.rb
index 3fe858f33da..ddaf555dae6 100644
--- a/spec/lib/gitlab/pagination/keyset/in_operator_optimization/strategies/record_loader_strategy_spec.rb
+++ b/spec/lib/gitlab/pagination/keyset/in_operator_optimization/strategies/record_loader_strategy_spec.rb
@@ -32,6 +32,12 @@ RSpec.describe Gitlab::Pagination::Keyset::InOperatorOptimization::Strategies::R
end
end
+ let_it_be(:model_without_ignored_columns) do
+ Class.new(ApplicationRecord) do
+ self.table_name = 'projects'
+ end
+ end
+
subject(:strategy) { described_class.new(finder_query, model, order_by_columns) }
describe '#initializer_columns' do
@@ -70,6 +76,8 @@ RSpec.describe Gitlab::Pagination::Keyset::InOperatorOptimization::Strategies::R
describe '#final_projections' do
context 'when model does not have ignored columns' do
+ let(:model) { model_without_ignored_columns }
+
it 'does not specify the selected column names' do
expect(strategy.final_projections).to contain_exactly("(#{described_class::RECORDS_COLUMN}).*")
end
diff --git a/spec/lib/gitlab/plantuml_spec.rb b/spec/lib/gitlab/plantuml_spec.rb
index c783dd66c48..c2cce59cf90 100644
--- a/spec/lib/gitlab/plantuml_spec.rb
+++ b/spec/lib/gitlab/plantuml_spec.rb
@@ -9,7 +9,7 @@ RSpec.describe Gitlab::Plantuml, feature_category: :shared do
let(:plantuml_url) { "http://plantuml.foo.bar" }
before do
- allow(Gitlab::CurrentSettings).to receive(:plantuml_url).and_return(plantuml_url)
+ stub_application_setting(plantuml_url: plantuml_url)
end
context "when PlantUML is enabled" do
diff --git a/spec/lib/gitlab/project_search_results_spec.rb b/spec/lib/gitlab/project_search_results_spec.rb
index a762fdbde6b..8f74963d60b 100644
--- a/spec/lib/gitlab/project_search_results_spec.rb
+++ b/spec/lib/gitlab/project_search_results_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::ProjectSearchResults do
+RSpec.describe Gitlab::ProjectSearchResults, feature_category: :global_search do
include SearchHelpers
let_it_be(:user) { create(:user) }
diff --git a/spec/lib/gitlab/redis/cache_spec.rb b/spec/lib/gitlab/redis/cache_spec.rb
index b7b4ba0eb2f..a48bde5e4ab 100644
--- a/spec/lib/gitlab/redis/cache_spec.rb
+++ b/spec/lib/gitlab/redis/cache_spec.rb
@@ -17,5 +17,9 @@ RSpec.describe Gitlab::Redis::Cache do
expect(described_class.active_support_config[:expires_in]).to eq(1.day)
end
+
+ it 'has a pool set to false' do
+ expect(described_class.active_support_config[:pool]).to eq(false)
+ end
end
end
diff --git a/spec/lib/gitlab/redis/cluster_cache_spec.rb b/spec/lib/gitlab/redis/cluster_shared_state_spec.rb
index e448d608c53..11a574c79c4 100644
--- a/spec/lib/gitlab/redis/cluster_cache_spec.rb
+++ b/spec/lib/gitlab/redis/cluster_shared_state_spec.rb
@@ -2,6 +2,6 @@
require 'spec_helper'
-RSpec.describe Gitlab::Redis::ClusterCache, feature_category: :redis do
- include_examples "redis_new_instance_shared_examples", 'cluster_cache', Gitlab::Redis::Cache
+RSpec.describe Gitlab::Redis::ClusterSharedState, feature_category: :redis do
+ include_examples "redis_new_instance_shared_examples", 'cluster_shared_state', Gitlab::Redis::SharedState
end
diff --git a/spec/lib/gitlab/redis/etag_cache_spec.rb b/spec/lib/gitlab/redis/etag_cache_spec.rb
new file mode 100644
index 00000000000..182a41bac80
--- /dev/null
+++ b/spec/lib/gitlab/redis/etag_cache_spec.rb
@@ -0,0 +1,56 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Redis::EtagCache, feature_category: :shared do
+ # Note: this is a pseudo-store in front of `Cache`, meant only as a tool
+ # to move away from `SharedState` for etag cache data. Thus, we use the
+ # same store configuration as the former.
+ let(:instance_specific_config_file) { "config/redis.cache.yml" }
+
+ include_examples "redis_shared_examples"
+
+ describe '#pool' do
+ let(:config_new_format_host) { "spec/fixtures/config/redis_new_format_host.yml" }
+ let(:config_new_format_socket) { "spec/fixtures/config/redis_new_format_socket.yml" }
+ let(:rails_root) { mktmpdir }
+
+ subject { described_class.pool }
+
+ before do
+ # Override rails root to avoid having our fixtures overwritten by `redis.yml` if it exists
+ allow(Gitlab::Redis::SharedState).to receive(:rails_root).and_return(rails_root)
+ allow(Gitlab::Redis::Cache).to receive(:rails_root).and_return(rails_root)
+
+ allow(Gitlab::Redis::SharedState).to receive(:config_file_name).and_return(config_new_format_host)
+ allow(Gitlab::Redis::Cache).to receive(:config_file_name).and_return(config_new_format_socket)
+ end
+
+ around do |example|
+ clear_pool
+ example.run
+ ensure
+ clear_pool
+ end
+
+ it 'instantiates an instance of MultiStore' do
+ subject.with do |redis_instance|
+ expect(redis_instance).to be_instance_of(::Gitlab::Redis::MultiStore)
+
+ expect(redis_instance.primary_store.connection[:id]).to eq("unix:///path/to/redis.sock/0")
+ expect(redis_instance.secondary_store.connection[:id]).to eq("redis://test-host:6379/99")
+
+ expect(redis_instance.instance_name).to eq('EtagCache')
+ end
+ end
+
+ it_behaves_like 'multi store feature flags', :use_primary_and_secondary_stores_for_etag_cache,
+ :use_primary_store_as_default_for_etag_cache
+ end
+
+ describe '#store_name' do
+ it 'returns the name of the Cache store' do
+ expect(described_class.store_name).to eq('Cache')
+ end
+ end
+end
diff --git a/spec/lib/gitlab/regex_requires_app_spec.rb b/spec/lib/gitlab/regex_requires_app_spec.rb
index 780184cdfd2..bea5d25dbc8 100644
--- a/spec/lib/gitlab/regex_requires_app_spec.rb
+++ b/spec/lib/gitlab/regex_requires_app_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
# Only specs that *cannot* be run with fast_spec_helper only
# See regex_spec for tests that do not require the full spec_helper
-RSpec.describe Gitlab::Regex do
+RSpec.describe Gitlab::Regex, feature_category: :tooling do
describe '.debian_architecture_regex' do
subject { described_class.debian_architecture_regex }
diff --git a/spec/lib/gitlab/regex_spec.rb b/spec/lib/gitlab/regex_spec.rb
index 5e58282ff92..c91b99caba2 100644
--- a/spec/lib/gitlab/regex_spec.rb
+++ b/spec/lib/gitlab/regex_spec.rb
@@ -65,13 +65,25 @@ RSpec.describe Gitlab::Regex, feature_category: :tooling do
describe '.project_name_regex_message' do
subject { described_class.project_name_regex_message }
- it { is_expected.to eq("can contain only letters, digits, emojis, '_', '.', '+', dashes, or spaces. It must start with a letter, digit, emoji, or '_'.") }
+ it { is_expected.to eq("can contain only letters, digits, emoji, '_', '.', '+', dashes, or spaces. It must start with a letter, digit, emoji, or '_'.") }
end
describe '.group_name_regex_message' do
subject { described_class.group_name_regex_message }
- it { is_expected.to eq("can contain only letters, digits, emojis, '_', '.', dash, space, parenthesis. It must start with letter, digit, emoji or '_'.") }
+ it { is_expected.to eq("can contain only letters, digits, emoji, '_', '.', dash, space, parenthesis. It must start with letter, digit, emoji or '_'.") }
+ end
+
+ describe '.slack_link_regex' do
+ subject { described_class.slack_link_regex }
+
+ it { is_expected.not_to match('http://custom-url.com|click here') }
+ it { is_expected.not_to match('custom-url.com|any-Charact3r$') }
+ it { is_expected.not_to match("&lt;custom-url.com|any-Charact3r$&gt;") }
+
+ it { is_expected.to match('<http://custom-url.com|click here>') }
+ it { is_expected.to match('<custom-url.com|any-Charact3r$>') }
+ it { is_expected.to match('<any-Charact3r$|any-Charact3r$>') }
end
describe '.bulk_import_destination_namespace_path_regex_message' do
@@ -820,6 +832,7 @@ RSpec.describe Gitlab::Regex, feature_category: :tooling do
it { is_expected.to match('1.2.3') }
it { is_expected.to match('1.2.3-beta') }
it { is_expected.to match('1.2.3-alpha.3') }
+ it { is_expected.to match('1.2.3-alpha.3+abcd') }
it { is_expected.not_to match('1') }
it { is_expected.not_to match('1.2') }
it { is_expected.not_to match('1./2.3') }
diff --git a/spec/lib/gitlab/repository_size_checker_spec.rb b/spec/lib/gitlab/repository_size_checker_spec.rb
index 559f5fa66c6..15c05a07ebb 100644
--- a/spec/lib/gitlab/repository_size_checker_spec.rb
+++ b/spec/lib/gitlab/repository_size_checker_spec.rb
@@ -36,13 +36,14 @@ RSpec.describe Gitlab::RepositorySizeChecker do
describe '#changes_will_exceed_size_limit?' do
let(:current_size) { 49 }
+ let(:project) { double }
it 'returns true when changes go over' do
- expect(subject.changes_will_exceed_size_limit?(2.megabytes)).to eq(true)
+ expect(subject.changes_will_exceed_size_limit?(2.megabytes, project)).to eq(true)
end
it 'returns false when changes do not go over' do
- expect(subject.changes_will_exceed_size_limit?(1.megabytes)).to eq(false)
+ expect(subject.changes_will_exceed_size_limit?(1.megabytes, project)).to eq(false)
end
end
diff --git a/spec/lib/gitlab/safe_request_store_spec.rb b/spec/lib/gitlab/safe_request_store_spec.rb
deleted file mode 100644
index accc491fbb7..00000000000
--- a/spec/lib/gitlab/safe_request_store_spec.rb
+++ /dev/null
@@ -1,257 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::SafeRequestStore do
- describe '.store' do
- context 'when RequestStore is active', :request_store do
- it 'uses RequestStore' do
- expect(described_class.store).to eq(RequestStore)
- end
- end
-
- context 'when RequestStore is NOT active' do
- it 'does not use RequestStore' do
- expect(described_class.store).to be_a(Gitlab::NullRequestStore)
- end
- end
- end
-
- describe '.begin!' do
- context 'when RequestStore is active', :request_store do
- it 'uses RequestStore' do
- expect(RequestStore).to receive(:begin!)
-
- described_class.begin!
- end
- end
-
- context 'when RequestStore is NOT active' do
- it 'uses RequestStore' do
- expect(RequestStore).to receive(:begin!)
-
- described_class.begin!
- end
- end
- end
-
- describe '.clear!' do
- context 'when RequestStore is active', :request_store do
- it 'uses RequestStore' do
- expect(RequestStore).to receive(:clear!).once.and_call_original
-
- described_class.clear!
- end
- end
-
- context 'when RequestStore is NOT active' do
- it 'uses RequestStore' do
- expect(RequestStore).to receive(:clear!).and_call_original
-
- described_class.clear!
- end
- end
- end
-
- describe '.end!' do
- context 'when RequestStore is active', :request_store do
- it 'uses RequestStore' do
- expect(RequestStore).to receive(:end!).once.and_call_original
-
- described_class.end!
- end
- end
-
- context 'when RequestStore is NOT active' do
- it 'uses RequestStore' do
- expect(RequestStore).to receive(:end!).and_call_original
-
- described_class.end!
- end
- end
- end
-
- describe '.write' do
- context 'when RequestStore is active', :request_store do
- it 'uses RequestStore' do
- expect do
- described_class.write('foo', true)
- end.to change { described_class.read('foo') }.from(nil).to(true)
- end
-
- it 'does not pass the options hash to the underlying store implementation' do
- expect(described_class.store).to receive(:write).with('foo', true)
-
- described_class.write('foo', true, expires_in: 15.seconds)
- end
- end
-
- context 'when RequestStore is NOT active' do
- it 'does not use RequestStore' do
- expect do
- described_class.write('foo', true)
- end.not_to change { described_class.read('foo') }.from(nil)
- end
-
- it 'does not pass the options hash to the underlying store implementation' do
- expect(described_class.store).to receive(:write).with('foo', true)
-
- described_class.write('foo', true, expires_in: 15.seconds)
- end
- end
- end
-
- describe '.[]=' do
- context 'when RequestStore is active', :request_store do
- it 'uses RequestStore' do
- expect do
- described_class['foo'] = true
- end.to change { described_class.read('foo') }.from(nil).to(true)
- end
- end
-
- context 'when RequestStore is NOT active' do
- it 'does not use RequestStore' do
- expect do
- described_class['foo'] = true
- end.not_to change { described_class.read('foo') }.from(nil)
- end
- end
- end
-
- describe '.read' do
- context 'when RequestStore is active', :request_store do
- it 'uses RequestStore' do
- expect do
- RequestStore.write('foo', true)
- end.to change { described_class.read('foo') }.from(nil).to(true)
- end
- end
-
- context 'when RequestStore is NOT active' do
- it 'does not use RequestStore' do
- expect do
- RequestStore.write('foo', true)
- end.not_to change { described_class.read('foo') }.from(nil)
-
- RequestStore.clear! # Clean up
- end
- end
- end
-
- describe '.[]' do
- context 'when RequestStore is active', :request_store do
- it 'uses RequestStore' do
- expect do
- RequestStore.write('foo', true)
- end.to change { described_class['foo'] }.from(nil).to(true)
- end
- end
-
- context 'when RequestStore is NOT active' do
- it 'does not use RequestStore' do
- expect do
- RequestStore.write('foo', true)
- end.not_to change { described_class['foo'] }.from(nil)
-
- RequestStore.clear! # Clean up
- end
- end
- end
-
- describe '.exist?' do
- context 'when RequestStore is active', :request_store do
- it 'uses RequestStore' do
- expect do
- RequestStore.write('foo', 'not nil')
- end.to change { described_class.exist?('foo') }.from(false).to(true)
- end
- end
-
- context 'when RequestStore is NOT active' do
- it 'does not use RequestStore' do
- expect do
- RequestStore.write('foo', 'not nil')
- end.not_to change { described_class.exist?('foo') }.from(false)
-
- RequestStore.clear! # Clean up
- end
- end
- end
-
- describe '.fetch' do
- context 'when RequestStore is active', :request_store do
- it 'uses RequestStore' do
- expect do
- described_class.fetch('foo') { 'block result' } # rubocop:disable Style/RedundantFetchBlock
- end.to change { described_class.read('foo') }.from(nil).to('block result')
- end
- end
-
- context 'when RequestStore is NOT active' do
- it 'does not use RequestStore' do
- RequestStore.clear! # Ensure clean
-
- expect do
- described_class.fetch('foo') { 'block result' } # rubocop:disable Style/RedundantFetchBlock
- end.not_to change { described_class.read('foo') }.from(nil)
-
- RequestStore.clear! # Clean up
- end
- end
- end
-
- describe '.delete' do
- context 'when RequestStore is active', :request_store do
- it 'uses RequestStore' do
- described_class.write('foo', true)
-
- expect do
- described_class.delete('foo')
- end.to change { described_class.read('foo') }.from(true).to(nil)
- end
-
- context 'when given a block and the key exists' do
- it 'does not execute the block' do
- described_class.write('foo', true)
-
- expect do |b|
- described_class.delete('foo', &b)
- end.not_to yield_control
- end
- end
-
- context 'when given a block and the key does not exist' do
- it 'yields the key and returns the block result' do
- result = described_class.delete('foo') { |key| "#{key} block result" }
-
- expect(result).to eq('foo block result')
- end
- end
- end
-
- context 'when RequestStore is NOT active' do
- before do
- RequestStore.write('foo', true)
- end
-
- after do
- RequestStore.clear! # Clean up
- end
-
- it 'does not use RequestStore' do
- expect do
- described_class.delete('foo')
- end.not_to change { RequestStore.read('foo') }.from(true)
- end
-
- context 'when given a block' do
- it 'yields the key and returns the block result' do
- result = described_class.delete('foo') { |key| "#{key} block result" }
-
- expect(result).to eq('foo block result')
- end
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/search_results_spec.rb b/spec/lib/gitlab/search_results_spec.rb
index 662eab11cc0..725b7901e68 100644
--- a/spec/lib/gitlab/search_results_spec.rb
+++ b/spec/lib/gitlab/search_results_spec.rb
@@ -187,11 +187,16 @@ RSpec.describe Gitlab::SearchResults, feature_category: :global_search do
end
context 'filtering' do
+ let_it_be(:unarchived_project) { create(:project, :public) }
+ let_it_be(:archived_project) { create(:project, :public, :archived) }
let!(:opened_result) { create(:merge_request, :opened, source_project: project, title: 'foo opened') }
let!(:closed_result) { create(:merge_request, :closed, source_project: project, title: 'foo closed') }
+ let(:unarchived_result) { create(:merge_request, source_project: unarchived_project, title: 'foo unarchived') }
+ let(:archived_result) { create(:merge_request, source_project: archived_project, title: 'foo archived') }
let(:query) { 'foo' }
include_examples 'search results filtered by state'
+ include_examples 'search results filtered by archived', 'search_merge_requests_hide_archived_projects'
end
context 'ordering' do
@@ -266,25 +271,10 @@ RSpec.describe Gitlab::SearchResults, feature_category: :global_search do
describe 'filtering' do
let_it_be(:group) { create(:group) }
- let_it_be(:unarchived_project) { create(:project, :public, group: group, name: 'Test1') }
- let_it_be(:archived_project) { create(:project, :archived, :public, group: group, name: 'Test2') }
+ let_it_be(:unarchived_result) { create(:project, :public, group: group, name: 'Test1') }
+ let_it_be(:archived_result) { create(:project, :archived, :public, group: group, name: 'Test2') }
- it_behaves_like 'search results filtered by archived'
-
- context 'when the search_projects_hide_archived feature flag is disabled' do
- before do
- stub_feature_flags(search_projects_hide_archived: false)
- end
-
- context 'when filter not provided' do
- let(:filters) { {} }
-
- it 'returns archived and unarchived results', :aggregate_failures do
- expect(results.objects('projects')).to include unarchived_project
- expect(results.objects('projects')).to include archived_project
- end
- end
- end
+ it_behaves_like 'search results filtered by archived', 'search_projects_hide_archived'
end
end
diff --git a/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb b/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb
index 4e46a26e89f..4550ccc2fff 100644
--- a/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb
+++ b/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb
@@ -463,11 +463,12 @@ RSpec.describe Gitlab::SidekiqLogging::StructuredLogger do
let(:expected_end_payload) do
end_payload.merge(
'urgency' => 'high',
- 'target_duration_s' => 10
+ 'target_duration_s' => 10,
+ 'target_scheduling_latency_s' => 10
)
end
- it 'logs job done with urgency and target_duration_s fields' do
+ it 'logs job done with urgency, target_duration_s and target_scheduling_latency_s fields' do
travel_to(timestamp) do
expect(logger).to receive(:info).with(start_payload).ordered
expect(logger).to receive(:info).with(expected_end_payload).ordered
diff --git a/spec/lib/gitlab/sidekiq_middleware/pause_control/client_spec.rb b/spec/lib/gitlab/sidekiq_middleware/pause_control/client_spec.rb
new file mode 100644
index 00000000000..0a837f6f932
--- /dev/null
+++ b/spec/lib/gitlab/sidekiq_middleware/pause_control/client_spec.rb
@@ -0,0 +1,53 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::SidekiqMiddleware::PauseControl::Client, :clean_gitlab_redis_queues, feature_category: :global_search do
+ let(:worker_class) do
+ Class.new do
+ def self.name
+ 'TestPauseWorker'
+ end
+
+ include ApplicationWorker
+
+ pause_control :zoekt
+
+ def perform(*); end
+ end
+ end
+
+ before do
+ stub_const('TestPauseWorker', worker_class)
+ end
+
+ describe '#call' do
+ context 'when strategy is enabled' do
+ before do
+ stub_feature_flags(zoekt_pause_indexing: true)
+ end
+
+ it 'does not schedule the job' do
+ expect(Gitlab::SidekiqMiddleware::PauseControl::PauseControlService).to receive(:add_to_waiting_queue!).once
+
+ TestPauseWorker.perform_async('args1')
+
+ expect(TestPauseWorker.jobs.count).to eq(0)
+ end
+ end
+
+ context 'when strategy is disabled' do
+ before do
+ stub_feature_flags(zoekt_pause_indexing: false)
+ end
+
+ it 'schedules the job' do
+ expect(Gitlab::SidekiqMiddleware::PauseControl::PauseControlService).not_to receive(:add_to_waiting_queue!)
+
+ TestPauseWorker.perform_async('args1')
+
+ expect(TestPauseWorker.jobs.count).to eq(1)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/sidekiq_middleware/pause_control/pause_control_service_spec.rb b/spec/lib/gitlab/sidekiq_middleware/pause_control/pause_control_service_spec.rb
new file mode 100644
index 00000000000..1de8bd9f7ad
--- /dev/null
+++ b/spec/lib/gitlab/sidekiq_middleware/pause_control/pause_control_service_spec.rb
@@ -0,0 +1,178 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::SidekiqMiddleware::PauseControl::PauseControlService, :clean_gitlab_redis_shared_state, feature_category: :global_search do
+ let(:worker_class) do
+ Class.new do
+ def self.name
+ 'DummyWorker'
+ end
+
+ include ApplicationWorker
+ end
+ end
+
+ let(:worker_class_name) { worker_class.name }
+
+ let(:worker_context) do
+ { 'correlation_id' => 'context_correlation_id',
+ 'meta.project' => 'gitlab-org/gitlab' }
+ end
+
+ let(:stored_context) do
+ { "#{Gitlab::ApplicationContext::LOG_KEY}.project" => 'gitlab-org/gitlab' }
+ end
+
+ let(:worker_args) { [1, 2] }
+
+ subject { described_class.new(worker_class_name) }
+
+ before do
+ stub_const(worker_class_name, worker_class)
+ end
+
+ describe '.add_to_waiting_queue!' do
+ it 'calls an instance method' do
+ expect_next_instance_of(described_class) do |instance|
+ expect(instance).to receive(:add_to_waiting_queue!).with(worker_args, worker_context)
+ end
+
+ described_class.add_to_waiting_queue!(worker_class_name, worker_args, worker_context)
+ end
+ end
+
+ describe '.has_jobs_in_waiting_queue?' do
+ it 'calls an instance method' do
+ expect_next_instance_of(described_class) do |instance|
+ expect(instance).to receive(:has_jobs_in_waiting_queue?)
+ end
+
+ described_class.has_jobs_in_waiting_queue?(worker_class_name)
+ end
+ end
+
+ describe '.resume_processing!' do
+ it 'calls an instance method' do
+ expect_next_instance_of(described_class) do |instance|
+ expect(instance).to receive(:resume_processing!)
+ end
+
+ described_class.resume_processing!(worker_class_name)
+ end
+ end
+
+ describe '.queue_size' do
+ it 'reports the queue size' do
+ expect(described_class.queue_size(worker_class_name)).to eq(0)
+
+ subject.add_to_waiting_queue!(worker_args, worker_context)
+
+ expect(described_class.queue_size(worker_class_name)).to eq(1)
+
+ expect { subject.resume_processing! }.to change { described_class.queue_size(worker_class_name) }.by(-1)
+ end
+ end
+
+ describe '#add_to_waiting_queue!' do
+ it 'adds a job to the set' do
+ expect { subject.add_to_waiting_queue!(worker_args, worker_context) }
+ .to change { subject.queue_size }
+ .from(0).to(1)
+ end
+
+ it 'adds only one unique job to the set' do
+ expect do
+ 2.times { subject.add_to_waiting_queue!(worker_args, worker_context) }
+ end.to change { subject.queue_size }.from(0).to(1)
+ end
+
+ it 'only stores `project` context information' do
+ subject.add_to_waiting_queue!(worker_args, worker_context)
+
+ subject.send(:with_redis) do |r|
+ set_key = subject.send(:redis_set_key)
+ stored_job = subject.send(:deserialize, r.zrange(set_key, 0, -1).first)
+
+ expect(stored_job['context']).to eq(stored_context)
+ end
+ end
+ end
+
+ describe '#has_jobs_in_waiting_queue?' do
+ it 'checks set existence' do
+ expect { subject.add_to_waiting_queue!(worker_args, worker_context) }
+ .to change { subject.has_jobs_in_waiting_queue? }
+ .from(false).to(true)
+ end
+ end
+
+ describe '#resume_processing!' do
+ let(:jobs) { [[1], [2], [3]] }
+
+ it 'puts jobs back into the queue and respects order' do
+ # We stub this const to test at least a couple of loop iterations
+ stub_const("#{described_class}::LIMIT", 2)
+
+ jobs.each do |j|
+ subject.add_to_waiting_queue!(j, worker_context)
+ end
+
+ expect(worker_class).to receive(:perform_async).with(1).ordered
+ expect(worker_class).to receive(:perform_async).with(2).ordered
+ expect(worker_class).not_to receive(:perform_async).with(3).ordered
+
+ expect(Gitlab::SidekiqLogging::PauseControlLogger.instance).to receive(:resumed_log).with(worker_class_name, [1])
+ expect(Gitlab::SidekiqLogging::PauseControlLogger.instance).to receive(:resumed_log).with(worker_class_name, [2])
+
+ subject.resume_processing!
+ end
+
+ it 'drops a set after execution' do
+ jobs.each do |j|
+ subject.add_to_waiting_queue!(j, worker_context)
+ end
+
+ expect(Gitlab::ApplicationContext).to receive(:with_raw_context)
+ .with(stored_context)
+ .exactly(jobs.count).times.and_call_original
+ expect(worker_class).to receive(:perform_async).exactly(jobs.count).times
+
+ expect { subject.resume_processing! }.to change { subject.has_jobs_in_waiting_queue? }.from(true).to(false)
+ end
+ end
+
+ context 'with concurrent changes to different queues' do
+ let(:second_worker_class) do
+ Class.new do
+ def self.name
+ 'SecondDummyIndexingWorker'
+ end
+
+ include ApplicationWorker
+ end
+ end
+
+ let(:other_subject) { described_class.new(second_worker_class.name) }
+
+ before do
+ stub_const(second_worker_class.name, second_worker_class)
+ end
+
+ it 'allows to use queues independently of each other' do
+ expect { subject.add_to_waiting_queue!(worker_args, worker_context) }
+ .to change { subject.queue_size }
+ .from(0).to(1)
+
+ expect { other_subject.add_to_waiting_queue!(worker_args, worker_context) }
+ .to change { other_subject.queue_size }
+ .from(0).to(1)
+
+ expect { subject.resume_processing! }.to change { subject.has_jobs_in_waiting_queue? }
+ .from(true).to(false)
+
+ expect { other_subject.resume_processing! }.to change { other_subject.has_jobs_in_waiting_queue? }
+ .from(true).to(false)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/sidekiq_middleware/pause_control/server_spec.rb b/spec/lib/gitlab/sidekiq_middleware/pause_control/server_spec.rb
new file mode 100644
index 00000000000..c577f9697b2
--- /dev/null
+++ b/spec/lib/gitlab/sidekiq_middleware/pause_control/server_spec.rb
@@ -0,0 +1,76 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::SidekiqMiddleware::PauseControl::Server, :clean_gitlab_redis_queues, feature_category: :global_search do
+ let(:worker_class) do
+ Class.new do
+ def self.name
+ 'TestPauseWorker'
+ end
+
+ include ApplicationWorker
+
+ pause_control :zoekt
+
+ def perform(*)
+ self.class.work
+ end
+
+ def self.work; end
+ end
+ end
+
+ before do
+ stub_const('TestPauseWorker', worker_class)
+ end
+
+ around do |example|
+ with_sidekiq_server_middleware do |chain|
+ chain.add described_class
+ Sidekiq::Testing.inline! { example.run }
+ end
+ end
+
+ describe '#call' do
+ context 'when strategy is enabled' do
+ before do
+ stub_feature_flags(zoekt_pause_indexing: true)
+ end
+
+ it 'puts the job to another queue without execution' do
+ bare_job = { 'class' => 'TestPauseWorker', 'args' => ['hello'] }
+ job_definition = Gitlab::SidekiqMiddleware::PauseControl::StrategyHandler.new(TestPauseWorker, bare_job.dup)
+
+ expect(Gitlab::SidekiqMiddleware::PauseControl::StrategyHandler)
+ .to receive(:new).with(TestPauseWorker, a_hash_including(bare_job))
+ .and_return(job_definition).once
+
+ expect(TestPauseWorker).not_to receive(:work)
+ expect(Gitlab::SidekiqMiddleware::PauseControl::PauseControlService).to receive(:add_to_waiting_queue!).once
+
+ TestPauseWorker.perform_async('hello')
+ end
+ end
+
+ context 'when strategy is disabled' do
+ before do
+ stub_feature_flags(zoekt_pause_indexing: false)
+ end
+
+ it 'executes the job' do
+ bare_job = { 'class' => 'TestPauseWorker', 'args' => ['hello'] }
+ job_definition = Gitlab::SidekiqMiddleware::PauseControl::StrategyHandler.new(TestPauseWorker, bare_job.dup)
+
+ expect(Gitlab::SidekiqMiddleware::PauseControl::StrategyHandler)
+ .to receive(:new).with(TestPauseWorker, hash_including(bare_job))
+ .and_return(job_definition).twice
+
+ expect(TestPauseWorker).to receive(:work)
+ expect(Gitlab::SidekiqMiddleware::PauseControl::PauseControlService).not_to receive(:add_to_waiting_queue!)
+
+ TestPauseWorker.perform_async('hello')
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/sidekiq_middleware/pause_control/strategy_handler_spec.rb b/spec/lib/gitlab/sidekiq_middleware/pause_control/strategy_handler_spec.rb
new file mode 100644
index 00000000000..da53abec479
--- /dev/null
+++ b/spec/lib/gitlab/sidekiq_middleware/pause_control/strategy_handler_spec.rb
@@ -0,0 +1,68 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::SidekiqMiddleware::PauseControl::StrategyHandler, :clean_gitlab_redis_queues, feature_category: :global_search do
+ subject(:pause_control) do
+ described_class.new(TestPauseWorker, job)
+ end
+
+ let(:worker_class) do
+ Class.new do
+ def self.name
+ 'TestPauseWorker'
+ end
+
+ include ApplicationWorker
+
+ pause_control :zoekt
+
+ def perform(*); end
+ end
+ end
+
+ let(:job) { { 'class' => 'TestPauseWorker', 'args' => [1], 'jid' => '123' } }
+
+ before do
+ stub_const('TestPauseWorker', worker_class)
+ end
+
+ describe '#schedule' do
+ shared_examples 'scheduling with pause control class' do |strategy_class|
+ it 'calls schedule on the strategy' do
+ expect do |block|
+ klass = "Gitlab::SidekiqMiddleware::PauseControl::Strategies::#{strategy_class}".constantize
+ expect_next_instance_of(klass) do |strategy|
+ expect(strategy).to receive(:schedule).with(job, &block)
+ end
+
+ pause_control.schedule(&block)
+ end.to yield_control
+ end
+ end
+
+ it_behaves_like 'scheduling with pause control class', 'Zoekt'
+ end
+
+ describe '#perform' do
+ it 'calls perform on the strategy' do
+ expect do |block|
+ expect_next_instance_of(Gitlab::SidekiqMiddleware::PauseControl::Strategies::Zoekt) do |strategy|
+ expect(strategy).to receive(:perform).with(job, &block)
+ end
+
+ pause_control.perform(&block)
+ end.to yield_control
+ end
+
+ it 'pauses job' do
+ expect_next_instance_of(Gitlab::SidekiqMiddleware::PauseControl::Strategies::Zoekt) do |strategy|
+ expect(strategy).to receive(:should_pause?).and_return(true)
+ end
+
+ expect { pause_control.perform }.to change {
+ Gitlab::SidekiqMiddleware::PauseControl::PauseControlService.queue_size('TestPauseWorker')
+ }.by(1)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/sidekiq_middleware/pause_control_spec.rb b/spec/lib/gitlab/sidekiq_middleware/pause_control_spec.rb
new file mode 100644
index 00000000000..a0cce0f61a0
--- /dev/null
+++ b/spec/lib/gitlab/sidekiq_middleware/pause_control_spec.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+
+RSpec.describe Gitlab::SidekiqMiddleware::PauseControl, feature_category: :global_search do
+ describe '.for' do
+ it 'returns the right class for `zoekt`' do
+ expect(described_class.for(:zoekt)).to eq(::Gitlab::SidekiqMiddleware::PauseControl::Strategies::Zoekt)
+ end
+
+ it 'returns the right class for `none`' do
+ expect(described_class.for(:none)).to eq(::Gitlab::SidekiqMiddleware::PauseControl::Strategies::None)
+ end
+
+ it 'returns nil when passing an unknown key' do
+ expect(described_class.for(:unknown)).to eq(::Gitlab::SidekiqMiddleware::PauseControl::Strategies::None)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb b/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb
index bc69f232d9e..0cbf9eab3d8 100644
--- a/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb
@@ -59,30 +59,14 @@ RSpec.describe Gitlab::SidekiqMiddleware::ServerMetrics do
described_class.initialize_process_metrics
end
- shared_examples "initializes sidekiq SLIs for the workers in the current process" do
+ context 'when emit_sidekiq_histogram FF is disabled' do
before do
- allow(Gitlab::SidekiqConfig)
- .to receive(:current_worker_queue_mappings)
- .and_return('MergeWorker' => 'merge', 'Ci::BuildFinishedWorker' => 'default')
- allow(completion_seconds_metric).to receive(:get)
+ stub_feature_flags(emit_sidekiq_histogram_metrics: false)
+ allow(Gitlab::SidekiqConfig).to receive(:current_worker_queue_mappings).and_return('MergeWorker' => 'merge')
end
- it "initializes the SLIs with labels" do
- expect(Gitlab::Metrics::SidekiqSlis)
- .to receive(initialize_sli_method).with([
- {
- worker: 'MergeWorker',
- urgency: 'high',
- feature_category: 'source_code_management',
- external_dependencies: 'no'
- },
- {
- worker: 'Ci::BuildFinishedWorker',
- urgency: 'high',
- feature_category: 'continuous_integration',
- external_dependencies: 'no'
- }
- ])
+ it 'does not initialize sidekiq_jobs_completion_seconds' do
+ expect(completion_seconds_metric).not_to receive(:get)
described_class.initialize_process_metrics
end
@@ -97,35 +81,38 @@ RSpec.describe Gitlab::SidekiqMiddleware::ServerMetrics do
end
end
- context 'initializing execution SLIs' do
- let(:initialize_sli_method) { :initialize_execution_slis! }
-
- context 'when sidekiq_execution_application_slis FF is turned on' do
- it_behaves_like "initializes sidekiq SLIs for the workers in the current process"
+ context 'initializing execution and queueing SLIs' do
+ before do
+ allow(Gitlab::SidekiqConfig)
+ .to receive(:current_worker_queue_mappings)
+ .and_return('MergeWorker' => 'merge', 'Ci::BuildFinishedWorker' => 'default')
+ allow(completion_seconds_metric).to receive(:get)
end
- context 'when sidekiq_execution_application_slis FF is turned off' do
- before do
- stub_feature_flags(sidekiq_execution_application_slis: false)
- end
-
- it_behaves_like "not initializing sidekiq SLIs"
- end
- end
+ it "initializes the execution and queueing SLIs with labels" do
+ expected_labels = [
+ {
+ worker: 'MergeWorker',
+ urgency: 'high',
+ feature_category: 'source_code_management',
+ external_dependencies: 'no',
+ queue: 'merge'
+ },
+ {
+ worker: 'Ci::BuildFinishedWorker',
+ urgency: 'high',
+ feature_category: 'continuous_integration',
+ external_dependencies: 'no',
+ queue: 'default'
+ }
+ ]
- context 'initializing queueing SLIs' do
- let(:initialize_sli_method) { :initialize_queueing_slis! }
-
- context 'when sidekiq_queueing_application_slis FF is turned on' do
- it_behaves_like "initializes sidekiq SLIs for the workers in the current process"
- end
-
- context 'when sidekiq_queueing_application_slis FF is turned off' do
- before do
- stub_feature_flags(sidekiq_queueing_application_slis: false)
- end
+ expect(Gitlab::Metrics::SidekiqSlis)
+ .to receive(:initialize_execution_slis!).with(expected_labels)
+ expect(Gitlab::Metrics::SidekiqSlis)
+ .to receive(:initialize_queueing_slis!).with(expected_labels)
- it_behaves_like "not initializing sidekiq SLIs"
+ described_class.initialize_process_metrics
end
end
@@ -192,20 +179,26 @@ RSpec.describe Gitlab::SidekiqMiddleware::ServerMetrics do
expect(redis_requests_total).to receive(:increment).with(labels_with_job_status, redis_calls)
expect(elasticsearch_requests_total).to receive(:increment).with(labels_with_job_status, elasticsearch_calls)
expect(sidekiq_mem_total_bytes).to receive(:set).with(labels_with_job_status, mem_total_bytes)
- expect(Gitlab::Metrics::SidekiqSlis).to receive(:record_execution_apdex).with(labels.slice(:worker,
- :feature_category,
- :urgency,
- :external_dependencies), monotonic_time_duration)
- expect(Gitlab::Metrics::SidekiqSlis).to receive(:record_execution_error).with(labels.slice(:worker,
- :feature_category,
- :urgency,
- :external_dependencies), false)
+ expect(Gitlab::Metrics::SidekiqSlis).to receive(:record_execution_apdex)
+ .with(labels.slice(:worker,
+ :feature_category,
+ :urgency,
+ :external_dependencies,
+ :queue), monotonic_time_duration)
+ expect(Gitlab::Metrics::SidekiqSlis).to receive(:record_execution_error)
+ .with(labels.slice(:worker,
+ :feature_category,
+ :urgency,
+ :external_dependencies,
+ :queue), false)
if queue_duration_for_job
- expect(Gitlab::Metrics::SidekiqSlis).to receive(:record_queueing_apdex).with(labels.slice(:worker,
- :feature_category,
- :urgency,
- :external_dependencies), queue_duration_for_job)
+ expect(Gitlab::Metrics::SidekiqSlis).to receive(:record_queueing_apdex)
+ .with(labels.slice(:worker,
+ :feature_category,
+ :urgency,
+ :external_dependencies,
+ :queue), queue_duration_for_job)
end
subject.call(worker, job, :test) { nil }
@@ -260,10 +253,12 @@ RSpec.describe Gitlab::SidekiqMiddleware::ServerMetrics do
it 'records sidekiq SLI error but does not record sidekiq SLI apdex' do
expect(failed_total_metric).to receive(:increment)
expect(Gitlab::Metrics::SidekiqSlis).not_to receive(:record_execution_apdex)
- expect(Gitlab::Metrics::SidekiqSlis).to receive(:record_execution_error).with(labels.slice(:worker,
- :feature_category,
- :urgency,
- :external_dependencies), true)
+ expect(Gitlab::Metrics::SidekiqSlis).to receive(:record_execution_error)
+ .with(labels.slice(:worker,
+ :feature_category,
+ :urgency,
+ :external_dependencies,
+ :queue), true)
expect { subject.call(worker, job, :test) { raise StandardError, "Failed" } }.to raise_error(StandardError, "Failed")
end
@@ -288,31 +283,6 @@ RSpec.describe Gitlab::SidekiqMiddleware::ServerMetrics do
subject.call(worker, job, :test) { nil }
end
end
-
- context 'when sidekiq_execution_application_slis FF is turned off' do
- before do
- stub_feature_flags(sidekiq_execution_application_slis: false)
- end
-
- it 'does not call record_execution_apdex nor record_execution_error' do
- expect(Gitlab::Metrics::SidekiqSlis).not_to receive(:record_execution_apdex)
- expect(Gitlab::Metrics::SidekiqSlis).not_to receive(:record_execution_error)
-
- subject.call(worker, job, :test) { nil }
- end
- end
-
- context 'when sidekiq_queueing_application_slis FF is turned off' do
- before do
- stub_feature_flags(sidekiq_queueing_application_slis: false)
- end
-
- it 'does not call record_queueing_apdex' do
- expect(Gitlab::Metrics::SidekiqSlis).not_to receive(:record_queueing_apdex)
-
- subject.call(worker, job, :test) { nil }
- end
- end
end
end
@@ -484,5 +454,53 @@ RSpec.describe Gitlab::SidekiqMiddleware::ServerMetrics do
end
end
end
+
+ context 'when emit_sidekiq_histogram_metrics FF is disabled' do
+ include_context 'server metrics with mocked prometheus'
+ include_context 'server metrics call' do
+ let(:stub_subject) { false }
+ end
+
+ subject(:middleware) { described_class.new }
+
+ let(:job) { {} }
+ let(:queue) { :test }
+ let(:worker_class) do
+ Class.new do
+ def self.name
+ "TestWorker"
+ end
+ include ApplicationWorker
+ end
+ end
+
+ let(:worker) { worker_class.new }
+ let(:labels) do
+ { queue: queue.to_s,
+ worker: worker.class.name,
+ boundary: "",
+ external_dependencies: "no",
+ feature_category: "",
+ urgency: "low" }
+ end
+
+ before do
+ stub_feature_flags(emit_sidekiq_histogram_metrics: false)
+ end
+
+ it 'does not emit histogram metrics' do
+ expect(completion_seconds_metric).not_to receive(:observe)
+ expect(queue_duration_seconds).not_to receive(:observe)
+ expect(failed_total_metric).not_to receive(:increment)
+
+ middleware.call(worker, job, queue) { nil }
+ end
+
+ it 'emits sidekiq_jobs_completion_seconds_sum metric' do
+ expect(completion_seconds_sum_metric).to receive(:increment).with(labels, monotonic_time_duration)
+
+ middleware.call(worker, job, queue) { nil }
+ end
+ end
end
# rubocop: enable RSpec/MultipleMemoizedHelpers
diff --git a/spec/lib/gitlab/sidekiq_middleware/skip_jobs_spec.rb b/spec/lib/gitlab/sidekiq_middleware/skip_jobs_spec.rb
index 4be21591a40..620de7e7671 100644
--- a/spec/lib/gitlab/sidekiq_middleware/skip_jobs_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware/skip_jobs_spec.rb
@@ -115,7 +115,7 @@ RSpec.describe Gitlab::SidekiqMiddleware::SkipJobs, feature_category: :scalabili
end
context 'with worker opted for database health check' do
- let(:health_signal_attrs) { { gitlab_schema: :gitlab_main, delay: 1.minute, tables: [:users] } }
+ let(:health_signal_attrs) { { gitlab_schema: :gitlab_main, tables: [:users], delay: 1.minute } }
around do |example|
with_sidekiq_server_middleware do |chain|
diff --git a/spec/lib/gitlab/time_tracking_formatter_spec.rb b/spec/lib/gitlab/time_tracking_formatter_spec.rb
index 4203a76cbfb..aa755d64a7a 100644
--- a/spec/lib/gitlab/time_tracking_formatter_spec.rb
+++ b/spec/lib/gitlab/time_tracking_formatter_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::TimeTrackingFormatter do
+RSpec.describe Gitlab::TimeTrackingFormatter, feature_category: :team_planning do
describe '#parse' do
let(:keep_zero) { false }
diff --git a/spec/lib/gitlab/tracking/standard_context_spec.rb b/spec/lib/gitlab/tracking/standard_context_spec.rb
index e1ae362e797..c44cfdea1cd 100644
--- a/spec/lib/gitlab/tracking/standard_context_spec.rb
+++ b/spec/lib/gitlab/tracking/standard_context_spec.rb
@@ -3,10 +3,6 @@
require 'spec_helper'
RSpec.describe Gitlab::Tracking::StandardContext do
- let_it_be(:project) { create(:project) }
- let_it_be(:namespace) { create(:namespace) }
- let_it_be(:user) { create(:user) }
-
let(:snowplow_context) { subject.to_context }
describe '#to_context' do
@@ -62,21 +58,27 @@ RSpec.describe Gitlab::Tracking::StandardContext do
expect(snowplow_context.to_json.dig(:data, :context_generated_at)).to eq(Time.current)
end
- context 'plan' do
- context 'when namespace is not available' do
- it 'is nil' do
- expect(snowplow_context.to_json.dig(:data, :plan)).to be_nil
- end
- end
+ it 'contains standard properties' do
+ standard_properties = [:user_id, :project_id, :namespace_id, :plan]
+ expect(snowplow_context.to_json[:data].keys).to include(*standard_properties)
+ end
- context 'when namespace is available' do
- let(:namespace) { create(:namespace) }
+ context 'with standard properties' do
+ let(:user_id) { 1 }
+ let(:project_id) { 2 }
+ let(:namespace_id) { 3 }
+ let(:plan_name) { "plan name" }
- subject { described_class.new(namespace_id: namespace.id, plan_name: namespace.actual_plan_name) }
+ subject do
+ described_class.new(user_id: user_id, project_id: project_id, namespace_id: namespace_id, plan_name: plan_name)
+ end
- it 'contains plan name' do
- expect(snowplow_context.to_json.dig(:data, :plan)).to eq(Plan::DEFAULT)
- end
+ it 'holds the correct values', :aggregate_failures do
+ json_data = snowplow_context.to_json.fetch(:data)
+ expect(json_data[:user_id]).to eq(user_id)
+ expect(json_data[:project_id]).to eq(project_id)
+ expect(json_data[:namespace_id]).to eq(namespace_id)
+ expect(json_data[:plan]).to eq(plan_name)
end
end
@@ -95,24 +97,12 @@ RSpec.describe Gitlab::Tracking::StandardContext do
end
context 'with incorrect argument type' do
- subject { described_class.new(project_id: create(:group)) }
+ subject { described_class.new(project_id: "a string") }
it 'does call `track_and_raise_for_dev_exception`' do
expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception)
snowplow_context
end
end
-
- it 'contains user id' do
- expect(snowplow_context.to_json[:data].keys).to include(:user_id)
- end
-
- it 'contains namespace and project ids' do
- expect(snowplow_context.to_json[:data].keys).to include(:project_id, :namespace_id)
- end
-
- it 'accepts just project id as integer' do
- expect { described_class.new(project: 1).to_context }.not_to raise_error
- end
end
end
diff --git a/spec/lib/gitlab/usage/metric_definition_spec.rb b/spec/lib/gitlab/usage/metric_definition_spec.rb
index d67bb477350..859f3f7a8d7 100644
--- a/spec/lib/gitlab/usage/metric_definition_spec.rb
+++ b/spec/lib/gitlab/usage/metric_definition_spec.rb
@@ -18,7 +18,6 @@ RSpec.describe Gitlab::Usage::MetricDefinition do
data_source: 'database',
distribution: %w(ee ce),
tier: %w(free starter premium ultimate bronze silver gold),
- name: 'uuid',
data_category: 'standard',
removed_by_url: 'http://gdk.test'
}
@@ -129,7 +128,6 @@ RSpec.describe Gitlab::Usage::MetricDefinition do
:distribution | nil
:distribution | 'test'
:tier | %w(test ee)
- :name | 'count_<adjective_describing>_boards'
:repair_issue_url | nil
:removed_by_url | 1
diff --git a/spec/lib/gitlab/usage/metric_spec.rb b/spec/lib/gitlab/usage/metric_spec.rb
index d0ea4e7aa16..a4135b143dd 100644
--- a/spec/lib/gitlab/usage/metric_spec.rb
+++ b/spec/lib/gitlab/usage/metric_spec.rb
@@ -45,12 +45,6 @@ RSpec.describe Gitlab::Usage::Metric do
end
end
- describe '#with_suggested_name' do
- it 'returns key_path metric with the corresponding generated query' do
- expect(described_class.new(issue_count_metric_definiton).with_suggested_name).to eq({ counts: { issues: 'count_issues' } })
- end
- end
-
context 'unavailable metric' do
let(:instrumentation_class) { "UnavailableMetric" }
let(:issue_count_metric_definiton) do
@@ -69,7 +63,7 @@ RSpec.describe Gitlab::Usage::Metric do
stub_const("Gitlab::Usage::Metrics::Instrumentations::#{instrumentation_class}", unavailable_metric_class)
end
- [:with_value, :with_instrumentation, :with_suggested_name].each do |method_name|
+ [:with_value, :with_instrumentation].each do |method_name|
describe "##{method_name}" do
it 'returns an empty hash' do
expect(described_class.new(issue_count_metric_definiton).public_send(method_name)).to eq({})
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/batched_background_migration_failed_jobs_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/batched_background_migration_failed_jobs_metric_spec.rb
new file mode 100644
index 00000000000..e66dd04b69b
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/batched_background_migration_failed_jobs_metric_spec.rb
@@ -0,0 +1,40 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::Instrumentations::BatchedBackgroundMigrationFailedJobsMetric, feature_category: :database do
+ let(:expected_value) do
+ [
+ {
+ job_class_name: 'job',
+ number_of_failed_jobs: 1,
+ table_name: 'jobs'
+ },
+ {
+ job_class_name: 'test',
+ number_of_failed_jobs: 2,
+ table_name: 'users'
+ }
+ ]
+ end
+
+ let_it_be(:active_migration) do
+ create(:batched_background_migration, :active, table_name: 'users', job_class_name: 'test', created_at: 5.days.ago)
+ end
+
+ let_it_be(:failed_migration) do
+ create(:batched_background_migration, :failed, table_name: 'jobs', job_class_name: 'job', created_at: 4.days.ago)
+ end
+
+ let_it_be(:batched_job) { create(:batched_background_migration_job, :failed, batched_migration: active_migration) }
+
+ let_it_be(:batched_job_2) { create(:batched_background_migration_job, :failed, batched_migration: active_migration) }
+
+ let_it_be(:batched_job_3) { create(:batched_background_migration_job, :failed, batched_migration: failed_migration) }
+
+ let_it_be(:old_migration) { create(:batched_background_migration, :failed, created_at: 99.days.ago) }
+
+ let_it_be(:old_batched_job) { create(:batched_background_migration_job, :failed, batched_migration: old_migration) }
+
+ it_behaves_like 'a correct instrumented metric value', { time_frame: '7d' }
+end
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/count_bulk_imports_entities_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/count_bulk_imports_entities_metric_spec.rb
index eee5396bdbf..0deb586d488 100644
--- a/spec/lib/gitlab/usage/metrics/instrumentations/count_bulk_imports_entities_metric_spec.rb
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/count_bulk_imports_entities_metric_spec.rb
@@ -165,7 +165,7 @@ RSpec.describe Gitlab::Usage::Metrics::Instrumentations::CountBulkImportsEntitie
end
context 'with has_failures: true' do
- before(:all) do
+ before_all do
create_list(:bulk_import_entity, 3, :project_entity, :finished, created_at: 3.weeks.ago, has_failures: true)
create_list(:bulk_import_entity, 2, :project_entity, :finished, created_at: 2.months.ago, has_failures: true)
create_list(:bulk_import_entity, 3, :group_entity, :finished, created_at: 3.weeks.ago, has_failures: true)
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/count_deployments_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/count_deployments_metric_spec.rb
index 538be7bbdc4..7fd5b135a4a 100644
--- a/spec/lib/gitlab/usage/metrics/instrumentations/count_deployments_metric_spec.rb
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/count_deployments_metric_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Usage::Metrics::Instrumentations::CountDeploymentsMetric, feature_category: :service_ping do
using RSpec::Parameterized::TableSyntax
- before(:all) do
+ before_all do
env = create(:environment)
[3, 60].each do |n|
deployment_options = { created_at: n.days.ago, project: env.project, environment: env }
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/work_items_activity_aggregated_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/work_items_activity_aggregated_metric_spec.rb
deleted file mode 100644
index 35e5d7f2796..00000000000
--- a/spec/lib/gitlab/usage/metrics/instrumentations/work_items_activity_aggregated_metric_spec.rb
+++ /dev/null
@@ -1,72 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Usage::Metrics::Instrumentations::WorkItemsActivityAggregatedMetric do
- let(:metric_definition) do
- {
- data_source: 'redis_hll',
- time_frame: time_frame,
- options: {
- aggregate: {
- operator: 'OR'
- },
- events: %w[
- users_creating_work_items
- users_updating_work_item_title
- users_updating_work_item_dates
- users_updating_work_item_labels
- users_updating_work_item_milestone
- users_updating_work_item_iteration
- ]
- }
- }
- end
-
- around do |example|
- freeze_time { example.run }
- end
-
- where(:time_frame) { [['28d'], ['7d']] }
-
- with_them do
- describe '#available?' do
- it 'returns false without track_work_items_activity feature' do
- stub_feature_flags(track_work_items_activity: false)
-
- expect(described_class.new(metric_definition).available?).to eq(false)
- end
-
- it 'returns true with track_work_items_activity feature' do
- stub_feature_flags(track_work_items_activity: true)
-
- expect(described_class.new(metric_definition).available?).to eq(true)
- end
- end
-
- describe '#value', :clean_gitlab_redis_shared_state do
- let(:counter) { Gitlab::UsageDataCounters::HLLRedisCounter }
- let(:author1_id) { 1 }
- let(:author2_id) { 2 }
- let(:event_time) { 1.week.ago }
-
- before do
- counter.track_event(:users_creating_work_items, values: author1_id, time: event_time)
- end
-
- it 'has correct value after events are tracked', :aggregate_failures do
- expect do
- counter.track_event(:users_updating_work_item_title, values: author1_id, time: event_time)
- counter.track_event(:users_updating_work_item_dates, values: author1_id, time: event_time)
- counter.track_event(:users_updating_work_item_labels, values: author1_id, time: event_time)
- counter.track_event(:users_updating_work_item_milestone, values: author1_id, time: event_time)
- end.to not_change { described_class.new(metric_definition).value }
-
- expect do
- counter.track_event(:users_updating_work_item_iteration, values: author2_id, time: event_time)
- counter.track_event(:users_updating_weight_estimate, values: author1_id, time: event_time)
- end.to change { described_class.new(metric_definition).value }.from(1).to(2)
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/usage/metrics/name_suggestion_spec.rb b/spec/lib/gitlab/usage/metrics/name_suggestion_spec.rb
deleted file mode 100644
index 9dba64ff59f..00000000000
--- a/spec/lib/gitlab/usage/metrics/name_suggestion_spec.rb
+++ /dev/null
@@ -1,113 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Usage::Metrics::NameSuggestion do
- describe '#for' do
- shared_examples 'name suggestion' do
- it 'return correct name' do
- expect(described_class.for(operation, relation: relation, column: column)).to match name_suggestion
- end
- end
-
- context 'for count with nil column' do
- it_behaves_like 'name suggestion' do
- let(:operation) { :count }
- let(:relation) { Board }
- let(:column) { nil }
- let(:name_suggestion) { /count_boards/ }
- end
- end
-
- context 'for count with column :id' do
- it_behaves_like 'name suggestion' do
- let(:operation) { :count }
- let(:relation) { Board }
- let(:column) { :id }
- let(:name_suggestion) { /count_boards/ }
- end
- end
-
- context 'for count distinct with column defined metrics' do
- it_behaves_like 'name suggestion' do
- let(:operation) { :distinct_count }
- let(:relation) { ZoomMeeting }
- let(:column) { :issue_id }
- let(:name_suggestion) { /count_distinct_issue_id_from_zoom_meetings/ }
- end
- end
-
- context 'joined relations' do
- context 'counted attribute comes from source relation' do
- it_behaves_like 'name suggestion' do
- # corresponding metric is collected with count(Issue.with_alert_management_alerts.not_authored_by(::User.alert_bot), start: issue_minimum_id, finish: issue_maximum_id)
- let(:operation) { :count }
- let(:relation) { Issue.with_alert_management_alerts.not_authored_by(::User.alert_bot) }
- let(:column) { nil }
- let(:name_suggestion) { /count_<adjective describing: '\(issues\.author_id != \d+\)'>_issues_<with>_alert_management_alerts/ }
- end
- end
- end
-
- context 'strips off time period constraint' do
- it_behaves_like 'name suggestion' do
- # corresponding metric is collected with distinct_count(::Clusters::Cluster.aws_installed.enabled.where(time_period), :user_id)
- let(:operation) { :distinct_count }
- let(:relation) { ::Clusters::Cluster.aws_installed.enabled.where(created_at: 30.days.ago..2.days.ago ) }
- let(:column) { :user_id }
- let(:constraints) { /<adjective describing: '\(clusters.provider_type = \d+ AND \(cluster_providers_aws\.status IN \(\d+\)\) AND clusters\.enabled = TRUE\)'>/ }
- let(:name_suggestion) { /count_distinct_user_id_from_#{constraints}_clusters_<with>_#{constraints}_cluster_providers_aws/ }
- end
- end
-
- context 'for sum metrics' do
- it_behaves_like 'name suggestion' do
- # corresponding metric is collected with sum(JiraImportState.finished, :imported_issues_count)
- let(:operation) { :sum }
- let(:relation) { JiraImportState.finished }
- let(:column) { :imported_issues_count }
- let(:name_suggestion) { /sum_imported_issues_count_from_<adjective describing: '\(jira_imports\.status = \d+\)'>_jira_imports/ }
- end
- end
-
- context 'for average metrics' do
- it_behaves_like 'name suggestion' do
- # corresponding metric is collected with average(Ci::Pipeline, :duration)
- let(:operation) { :average }
- let(:relation) { Ci::Pipeline }
- let(:column) { :duration }
- let(:name_suggestion) { /average_duration_from_ci_pipelines/ }
- end
- end
-
- context 'for redis metrics' do
- it_behaves_like 'name suggestion' do
- let(:operation) { :redis }
- let(:column) { nil }
- let(:relation) { nil }
- let(:name_suggestion) { /<please fill metric name, suggested format is: {subject}_{verb}{ing|ed}_{object} eg: users_creating_epics or merge_requests_viewed_in_single_file_mode>/ }
- end
- end
-
- context 'for alt_usage_data metrics' do
- it_behaves_like 'name suggestion' do
- # corresponding metric is collected with alt_usage_data(fallback: nil) { operating_system }
- let(:operation) { :alt }
- let(:column) { nil }
- let(:relation) { nil }
- let(:name_suggestion) { /<please fill metric name>/ }
- end
- end
-
- context 'for metrics with `having` keyword' do
- it_behaves_like 'name suggestion' do
- let(:operation) { :count }
- let(:relation) { Issue.with_alert_management_alerts.having('COUNT(alert_management_alerts) > 1').group(:id) }
-
- let(:column) { nil }
- let(:constraints) { /<adjective describing: '\(\(COUNT\(alert_management_alerts\) > 1\)\)'>/ }
- let(:name_suggestion) { /count_#{constraints}_issues_<with>_alert_management_alerts/ }
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/usage/metrics/names_suggestions/generator_spec.rb b/spec/lib/gitlab/usage/metrics/names_suggestions/generator_spec.rb
deleted file mode 100644
index 884d73a70f3..00000000000
--- a/spec/lib/gitlab/usage/metrics/names_suggestions/generator_spec.rb
+++ /dev/null
@@ -1,97 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Usage::Metrics::NamesSuggestions::Generator, feature_category: :service_ping do
- include UsageDataHelpers
-
- before do
- stub_usage_data_connections
- end
-
- describe '#generate' do
- shared_examples 'name suggestion' do
- it 'return correct name' do
- expect(described_class.generate(key_path)).to match name_suggestion
- end
- end
-
- describe '#add_metric' do
- let(:metric) { 'CountIssuesMetric' }
-
- it 'computes the suggested name for given metric' do
- expect(described_class.add_metric(metric)).to eq('count_issues')
- end
- end
-
- context 'for count with default column metrics' do
- it_behaves_like 'name suggestion' do
- # corresponding metric is collected with count(Board)
- let(:key_path) { 'counts.issues' }
- let(:name_suggestion) { /count_issues/ }
- end
- end
-
- context 'for count distinct with column defined metrics' do
- it_behaves_like 'name suggestion' do
- # corresponding metric is collected with distinct_count(ZoomMeeting, :issue_id)
- let(:key_path) { 'counts.issues_using_zoom_quick_actions' }
- let(:name_suggestion) { /count_distinct_issue_id_from_zoom_meetings/ }
- end
- end
-
- context 'joined relations' do
- context 'counted attribute comes from source relation' do
- it_behaves_like 'name suggestion' do
- # corresponding metric is collected with distinct_count(Release.with_milestones, :author_id)
- let(:key_path) { 'usage_activity_by_stage.release.releases_with_milestones' }
- let(:name_suggestion) { /count_distinct_author_id_from_releases_<with>_milestone_releases/ }
- end
- end
- end
-
- context 'strips off time period constraint' do
- it_behaves_like 'name suggestion' do
- # corresponding metric is collected with distinct_count(::Clusters::Cluster.aws_installed.enabled.where(time_period), :user_id)
- let(:key_path) { 'usage_activity_by_stage_monthly.configure.clusters_platforms_eks' }
- let(:constraints) { /<adjective describing: '\(clusters.provider_type = \d+ AND \(cluster_providers_aws\.status IN \(\d+\)\) AND clusters\.enabled = TRUE\)'>/ }
- let(:name_suggestion) { /count_distinct_user_id_from_#{constraints}_clusters_<with>_#{constraints}_cluster_providers_aws/ }
- end
- end
-
- context 'for sum metrics' do
- it_behaves_like 'name suggestion' do
- # corresponding metric is collected with sum(JiraImportState.finished, :imported_issues_count)
- let(:key_path) { 'counts.jira_imports_total_imported_issues_count' }
- let(:name_suggestion) { /sum_imported_issues_count_from_<adjective describing: '\(jira_imports\.status = \d+\)'>_jira_imports/ }
- end
- end
-
- context 'for add metrics' do
- before do
- pending 'https://gitlab.com/gitlab-org/gitlab/-/issues/414887'
- end
-
- it_behaves_like 'name suggestion' do
- # corresponding metric is collected with add(data[:personal_snippets], data[:project_snippets])
- let(:key_path) { 'counts.snippets' }
- let(:name_suggestion) { /add_count_<adjective describing: '\(snippets\.type = 'PersonalSnippet'\)'>_snippets_and_count_<adjective describing: '\(snippets\.type = 'ProjectSnippet'\)'>_snippets/ }
- end
- end
-
- context 'for redis metrics', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/399421' do
- it_behaves_like 'name suggestion' do
- let(:key_path) { 'usage_activity_by_stage_monthly.create.merge_requests_users' }
- let(:name_suggestion) { /<please fill metric name, suggested format is: {subject}_{verb}{ing|ed}_{object} eg: users_creating_epics or merge_requests_viewed_in_single_file_mode>/ }
- end
- end
-
- context 'for alt_usage_data metrics' do
- it_behaves_like 'name suggestion' do
- # corresponding metric is collected with alt_usage_data { ApplicationRecord.database.version }
- let(:key_path) { 'database.version' }
- let(:name_suggestion) { /<please fill metric name>/ }
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/usage/metrics/names_suggestions/relation_parsers/having_constraints_spec.rb b/spec/lib/gitlab/usage/metrics/names_suggestions/relation_parsers/having_constraints_spec.rb
deleted file mode 100644
index 492acf2a902..00000000000
--- a/spec/lib/gitlab/usage/metrics/names_suggestions/relation_parsers/having_constraints_spec.rb
+++ /dev/null
@@ -1,19 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Usage::Metrics::NamesSuggestions::RelationParsers::HavingConstraints do
- describe '#accept' do
- let(:connection) { ApplicationRecord.connection }
- let(:collector) { Arel::Collectors::SubstituteBinds.new(connection, Arel::Collectors::SQLString.new) }
-
- it 'builds correct constraints description' do
- table = Arel::Table.new('records')
- havings = table[:attribute].sum.eq(6).and(table[:attribute].count.gt(5))
- arel = table.from.project(table['id'].count).having(havings).group(table[:attribute2])
- described_class.new(connection).accept(arel, collector)
-
- expect(collector.value).to eql '(SUM(records.attribute) = 6 AND COUNT(records.attribute) > 5)'
- end
- end
-end
diff --git a/spec/lib/gitlab/usage/metrics/names_suggestions/relation_parsers/joins_spec.rb b/spec/lib/gitlab/usage/metrics/names_suggestions/relation_parsers/joins_spec.rb
deleted file mode 100644
index 3e72d118ac6..00000000000
--- a/spec/lib/gitlab/usage/metrics/names_suggestions/relation_parsers/joins_spec.rb
+++ /dev/null
@@ -1,45 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Usage::Metrics::NamesSuggestions::RelationParsers::Joins do
- describe '#accept' do
- let(:collector) do
- Arel::Collectors::SubstituteBinds.new(ApplicationRecord.connection, Arel::Collectors::SQLString.new)
- end
-
- context 'with join added via string' do
- it 'collects join parts' do
- arel = Issue.joins('LEFT JOIN projects ON projects.id = issue.project_id')
-
- arel = arel.arel
- result = described_class.new(ApplicationRecord.connection).accept(arel)
-
- expect(result).to match_array [{ source: "projects", constraints: "projects.id = issue.project_id" }]
- end
- end
-
- context 'with join added via arel node' do
- it 'collects join parts' do
- source_table = Arel::Table.new('records')
- joined_table = Arel::Table.new('joins')
- second_level_joined_table = Arel::Table.new('second_level_joins')
-
- arel = source_table
- .from
- .project(source_table['id'].count)
- .join(joined_table, Arel::Nodes::OuterJoin)
- .on(source_table[:id].eq(joined_table[:records_id]))
- .join(second_level_joined_table, Arel::Nodes::OuterJoin)
- .on(joined_table[:id].eq(second_level_joined_table[:joins_id]))
-
- result = described_class.new(ApplicationRecord.connection).accept(arel)
-
- expect(result).to match_array [
- { source: "joins", constraints: "records.id = joins.records_id" },
- { source: "second_level_joins", constraints: "joins.id = second_level_joins.joins_id" }
- ]
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/usage/metrics/names_suggestions/relation_parsers/where_constraints_spec.rb b/spec/lib/gitlab/usage/metrics/names_suggestions/relation_parsers/where_constraints_spec.rb
deleted file mode 100644
index 42a776478a4..00000000000
--- a/spec/lib/gitlab/usage/metrics/names_suggestions/relation_parsers/where_constraints_spec.rb
+++ /dev/null
@@ -1,18 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Usage::Metrics::NamesSuggestions::RelationParsers::WhereConstraints do
- describe '#accept' do
- let(:connection) { ApplicationRecord.connection }
- let(:collector) { Arel::Collectors::SubstituteBinds.new(connection, Arel::Collectors::SQLString.new) }
-
- it 'builds correct constraints description' do
- table = Arel::Table.new('records')
- arel = table.from.project(table['id'].count).where(table[:attribute].eq(true).and(table[:some_value].gt(5)))
- described_class.new(connection).accept(arel, collector)
-
- expect(collector.value).to eql '(records.attribute = true AND records.some_value > 5)'
- end
- end
-end
diff --git a/spec/lib/gitlab/usage_data_counters/editor_unique_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/editor_unique_counter_spec.rb
index 19236cdbba0..ab92b59c845 100644
--- a/spec/lib/gitlab/usage_data_counters/editor_unique_counter_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/editor_unique_counter_spec.rb
@@ -3,41 +3,31 @@
require 'spec_helper'
RSpec.describe Gitlab::UsageDataCounters::EditorUniqueCounter, :clean_gitlab_redis_shared_state do
- let(:user1) { build(:user, id: 1) }
+ let(:user) { build(:user, id: 1) }
let(:user2) { build(:user, id: 2) }
let(:user3) { build(:user, id: 3) }
let(:project) { build(:project) }
+ let(:namespace) { project.namespace }
let(:time) { Time.zone.now }
shared_examples 'tracks and counts action' do
+ subject { track_action(author: user, project: project) }
+
before do
stub_application_setting(usage_ping_enabled: true)
end
specify do
aggregate_failures do
- expect(track_action(author: user1, project: project)).to be_truthy
+ expect(track_action(author: user, project: project)).to be_truthy
expect(track_action(author: user2, project: project)).to be_truthy
- expect(track_action(author: user3, time: time.end_of_week - 3.days, project: project)).to be_truthy
+ expect(track_action(author: user3, project: project)).to be_truthy
expect(count_unique(date_from: time.beginning_of_week, date_to: 1.week.from_now)).to eq(3)
end
end
- it 'track snowplow event' do
- track_action(author: user1, project: project)
-
- expect_snowplow_event(
- category: described_class.name,
- action: 'ide_edit',
- label: 'usage_activity_by_stage_monthly.create.action_monthly_active_users_ide_edit',
- namespace: project.namespace,
- property: event_name,
- project: project,
- user: user1,
- context: [Gitlab::Tracking::ServicePingContext.new(data_source: :redis_hll, event: event_name).to_h]
- )
- end
+ it_behaves_like 'internal event tracking'
it 'does not track edit actions if author is not present' do
expect(track_action(author: nil, project: project)).to be_nil
@@ -45,7 +35,7 @@ RSpec.describe Gitlab::UsageDataCounters::EditorUniqueCounter, :clean_gitlab_red
end
context 'for web IDE edit actions' do
- let(:event_name) { described_class::EDIT_BY_WEB_IDE }
+ let(:action) { described_class::EDIT_BY_WEB_IDE }
it_behaves_like 'tracks and counts action' do
def track_action(params)
@@ -59,7 +49,7 @@ RSpec.describe Gitlab::UsageDataCounters::EditorUniqueCounter, :clean_gitlab_red
end
context 'for SFE edit actions' do
- let(:event_name) { described_class::EDIT_BY_SFE }
+ let(:action) { described_class::EDIT_BY_SFE }
it_behaves_like 'tracks and counts action' do
def track_action(params)
@@ -73,7 +63,7 @@ RSpec.describe Gitlab::UsageDataCounters::EditorUniqueCounter, :clean_gitlab_red
end
context 'for snippet editor edit actions' do
- let(:event_name) { described_class::EDIT_BY_SNIPPET_EDITOR }
+ let(:action) { described_class::EDIT_BY_SNIPPET_EDITOR }
it_behaves_like 'tracks and counts action' do
def track_action(params)
diff --git a/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb
index fc1d66d1d62..7bef14d5f7a 100644
--- a/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb
@@ -8,9 +8,6 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
let(:entity3) { '34rfjuuy-ce56-sa35-ds34-dfer567dfrf2' }
let(:entity4) { '8b9a2671-2abf-4bec-a682-22f6a8f7bf31' }
- let(:default_context) { 'default' }
- let(:invalid_context) { 'invalid' }
-
around do |example|
# We need to freeze to a reference time
# because visits are grouped by the week number in the year
@@ -27,62 +24,39 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
describe '.known_events' do
let(:ce_event) { { "name" => "ce_event" } }
-
- context 'with use_metric_definitions_for_events_list disabled' do
- let(:ce_temp_dir) { Dir.mktmpdir }
- let(:ce_temp_file) { Tempfile.new(%w[common .yml], ce_temp_dir) }
-
- before do
- stub_feature_flags(use_metric_definitions_for_events_list: false)
- stub_const("#{described_class}::KNOWN_EVENTS_PATH", File.expand_path('*.yml', ce_temp_dir))
- File.open(ce_temp_file.path, "w+b") { |f| f.write [ce_event].to_yaml }
- end
-
- after do
- ce_temp_file.unlink
- FileUtils.remove_entry(ce_temp_dir) if Dir.exist?(ce_temp_dir)
- end
-
- it 'returns ce events' do
- expect(described_class.known_events).to include(ce_event)
- end
+ let(:removed_ce_event) { { "name" => "removed_ce_event" } }
+ let(:metric_definition) do
+ Gitlab::Usage::MetricDefinition.new('ce_metric',
+ {
+ key_path: 'ce_metric_weekly',
+ status: 'active',
+ options: {
+ events: [ce_event['name']]
+ }
+ })
end
- context 'with use_metric_definitions_for_events_list enabled' do
- let(:removed_ce_event) { { "name" => "removed_ce_event" } }
- let(:metric_definition) do
- Gitlab::Usage::MetricDefinition.new('ce_metric',
- {
- key_path: 'ce_metric_weekly',
- status: 'active',
- options: {
- events: [ce_event['name']]
- }
- })
- end
-
- let(:removed_metric_definition) do
- Gitlab::Usage::MetricDefinition.new('removed_ce_metric',
- {
- key_path: 'removed_ce_metric_weekly',
- status: 'removed',
- options: {
- events: [removed_ce_event['name']]
- }
- })
- end
+ let(:removed_metric_definition) do
+ Gitlab::Usage::MetricDefinition.new('removed_ce_metric',
+ {
+ key_path: 'removed_ce_metric_weekly',
+ status: 'removed',
+ options: {
+ events: [removed_ce_event['name']]
+ }
+ })
+ end
- before do
- allow(Gitlab::Usage::MetricDefinition).to receive(:all).and_return([metric_definition, removed_metric_definition])
- end
+ before do
+ allow(Gitlab::Usage::MetricDefinition).to receive(:all).and_return([metric_definition, removed_metric_definition])
+ end
- it 'returns ce events' do
- expect(described_class.known_events).to include(ce_event)
- end
+ it 'returns ce events' do
+ expect(described_class.known_events).to include(ce_event)
+ end
- it 'does not return removed events' do
- expect(described_class.known_events).not_to include(removed_ce_event)
- end
+ it 'does not return removed events' do
+ expect(described_class.known_events).not_to include(removed_ce_event)
end
end
@@ -96,7 +70,6 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
let(:no_slot) { 'no_slot' }
let(:different_aggregation) { 'different_aggregation' }
let(:custom_daily_event) { 'g_analytics_custom' }
- let(:context_event) { 'context_event' }
let(:global_category) { 'global' }
let(:compliance_category) { 'compliance' }
@@ -111,8 +84,7 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
{ name: category_productivity_event },
{ name: compliance_slot_event },
{ name: no_slot },
- { name: different_aggregation },
- { name: context_event }
+ { name: different_aggregation }
].map(&:with_indifferent_access)
end
@@ -214,43 +186,6 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
end
end
- describe '.track_event_in_context' do
- context 'with valid contex' do
- it 'increments context event counter' do
- expect(Gitlab::Redis::HLL).to receive(:add) do |kwargs|
- expect(kwargs[:key]).to match(/^#{default_context}_.*/)
- end
-
- described_class.track_event_in_context(context_event, values: entity1, context: default_context)
- end
-
- it 'tracks events with multiple values' do
- values = [entity1, entity2]
- expect(Gitlab::Redis::HLL).to receive(:add).with(key: /g_analytics_contribution/,
- value: values,
- expiry: described_class::KEY_EXPIRY_LENGTH)
-
- described_class.track_event_in_context(:g_analytics_contribution, values: values, context: default_context)
- end
- end
-
- context 'with empty context' do
- it 'does not increment a counter' do
- expect(Gitlab::Redis::HLL).not_to receive(:add)
-
- described_class.track_event_in_context(context_event, values: entity1, context: '')
- end
- end
-
- context 'when sending invalid context' do
- it 'does not increment a counter' do
- expect(Gitlab::Redis::HLL).not_to receive(:add)
-
- described_class.track_event_in_context(context_event, values: entity1, context: invalid_context)
- end
- end
- end
-
describe '.unique_events' do
before do
# events in current week, should not be counted as week is not complete
@@ -360,48 +295,6 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
end
end
- describe 'context level tracking' do
- using RSpec::Parameterized::TableSyntax
-
- let(:known_events) do
- [
- { name: 'event_name_1' },
- { name: 'event_name_2' },
- { name: 'event_name_3' }
- ].map(&:with_indifferent_access)
- end
-
- before do
- allow(described_class).to receive(:known_events).and_return(known_events)
- allow(described_class).to receive(:categories).and_return(%w(category1 category2))
-
- described_class.track_event_in_context('event_name_1', values: [entity1, entity3], context: default_context, time: 2.days.ago)
- described_class.track_event_in_context('event_name_1', values: entity3, context: default_context, time: 2.days.ago)
- described_class.track_event_in_context('event_name_1', values: entity3, context: invalid_context, time: 2.days.ago)
- described_class.track_event_in_context('event_name_2', values: [entity1, entity2], context: '', time: 2.weeks.ago)
- end
-
- subject(:unique_events) { described_class.unique_events(event_names: event_names, start_date: 4.weeks.ago, end_date: Date.current, context: context) }
-
- context 'with correct arguments' do
- where(:event_names, :context, :value) do
- ['event_name_1'] | 'default' | 2
- ['event_name_1'] | '' | 0
- ['event_name_2'] | '' | 0
- end
-
- with_them do
- it { is_expected.to eq value }
- end
- end
-
- context 'with invalid context' do
- it 'raise error' do
- expect { described_class.unique_events(event_names: 'event_name_1', start_date: 4.weeks.ago, end_date: Date.current, context: invalid_context) }.to raise_error(Gitlab::UsageDataCounters::HLLRedisCounter::InvalidContext)
- end
- end
- end
-
describe '.calculate_events_union' do
let(:time_range) { { start_date: 7.days.ago, end_date: DateTime.current } }
let(:known_events) do
diff --git a/spec/lib/gitlab/usage_data_counters/neovim_plugin_activity_unique_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/neovim_plugin_activity_unique_counter_spec.rb
new file mode 100644
index 00000000000..274a3ffc843
--- /dev/null
+++ b/spec/lib/gitlab/usage_data_counters/neovim_plugin_activity_unique_counter_spec.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::UsageDataCounters::NeovimPluginActivityUniqueCounter, :clean_gitlab_redis_shared_state, feature_category: :editor_extensions do
+ let(:user1) { build(:user, id: 1) }
+ let(:user2) { build(:user, id: 2) }
+ let(:time) { Time.current }
+ let(:action) { described_class::NEOVIM_PLUGIN_API_REQUEST_ACTION }
+ let(:user_agent_string) do
+ 'code-completions-language-server-experiment (Neovim:0.9.0; gitlab.vim (v0.1.0); arch:amd64; os:darwin)'
+ end
+
+ let(:user_agent) { { user_agent: user_agent_string } }
+
+ context 'when tracking a neovim plugin api request' do
+ it_behaves_like 'a request from an extension'
+ end
+end
diff --git a/spec/lib/gitlab/usage_data_counters/visual_studio_extension_activity_unique_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/visual_studio_extension_activity_unique_counter_spec.rb
new file mode 100644
index 00000000000..57cf173f793
--- /dev/null
+++ b/spec/lib/gitlab/usage_data_counters/visual_studio_extension_activity_unique_counter_spec.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::UsageDataCounters::VisualStudioExtensionActivityUniqueCounter, :clean_gitlab_redis_shared_state, feature_category: :editor_extensions do
+ let(:user1) { build(:user, id: 1) }
+ let(:user2) { build(:user, id: 2) }
+ let(:time) { Time.current }
+ let(:action) { described_class::VISUAL_STUDIO_EXTENSION_API_REQUEST_ACTION }
+ let(:user_agent_string) do
+ 'code-completions-language-server-experiment (gl-visual-studio-extension:1.0.0.0; arch:X64;)'
+ end
+
+ let(:user_agent) { { user_agent: user_agent_string } }
+
+ context 'when tracking a visual studio api request' do
+ it_behaves_like 'a request from an extension'
+ end
+end
diff --git a/spec/lib/gitlab/with_request_store_spec.rb b/spec/lib/gitlab/with_request_store_spec.rb
deleted file mode 100644
index 353ad02fbd8..00000000000
--- a/spec/lib/gitlab/with_request_store_spec.rb
+++ /dev/null
@@ -1,30 +0,0 @@
-# frozen_string_literal: true
-
-require 'fast_spec_helper'
-require 'request_store'
-
-RSpec.describe Gitlab::WithRequestStore do
- let(:fake_class) { Class.new { include Gitlab::WithRequestStore } }
-
- subject(:object) { fake_class.new }
-
- describe "#with_request_store" do
- it 'starts a request store and yields control' do
- expect(RequestStore).to receive(:begin!).ordered
- expect(RequestStore).to receive(:end!).ordered
- expect(RequestStore).to receive(:clear!).ordered
-
- expect { |b| object.with_request_store(&b) }.to yield_control
- end
-
- it 'only starts a request store once when nested' do
- expect(RequestStore).to receive(:begin!).ordered.once.and_call_original
- expect(RequestStore).to receive(:end!).ordered.once.and_call_original
- expect(RequestStore).to receive(:clear!).ordered.once.and_call_original
-
- object.with_request_store do
- expect { |b| object.with_request_store(&b) }.to yield_control
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/x509/signature_spec.rb b/spec/lib/gitlab/x509/signature_spec.rb
index d119a4e2b9d..e0823aa8153 100644
--- a/spec/lib/gitlab/x509/signature_spec.rb
+++ b/spec/lib/gitlab/x509/signature_spec.rb
@@ -36,6 +36,7 @@ RSpec.describe Gitlab::X509::Signature do
it 'returns a verified signature if email does match' do
expect(signature.x509_certificate).to have_attributes(certificate_attributes)
+
expect(signature.x509_certificate.x509_issuer).to have_attributes(issuer_attributes)
expect(signature.verified_signature).to be_truthy
expect(signature.verification_status).to eq(:verified)
@@ -55,6 +56,27 @@ RSpec.describe Gitlab::X509::Signature do
expect(signature.verification_status).to eq(:verified)
end
+ context 'when the certificate contains multiple emails' do
+ before do
+ allow_any_instance_of(described_class).to receive(:get_certificate_extension).and_call_original
+
+ allow_any_instance_of(described_class).to receive(:get_certificate_extension)
+ .with('subjectAltName')
+ .and_return("email:gitlab2@example.com, othername:<unsupported>, email:#{X509Helpers::User1.certificate_email}")
+ end
+
+ context 'and the email matches one of them' do
+ it 'returns a verified signature' do
+ expect(signature.x509_certificate).to have_attributes(certificate_attributes.except(:email, :emails))
+ expect(signature.x509_certificate.email).to eq('gitlab2@example.com')
+ expect(signature.x509_certificate.emails).to contain_exactly('gitlab2@example.com', X509Helpers::User1.certificate_email)
+ expect(signature.x509_certificate.x509_issuer).to have_attributes(issuer_attributes)
+ expect(signature.verified_signature).to be_truthy
+ expect(signature.verification_status).to eq(:verified)
+ end
+ end
+ end
+
context "if the email matches but isn't confirmed" do
let!(:user) { create(:user, :unconfirmed, email: X509Helpers::User1.certificate_email) }
@@ -106,6 +128,7 @@ RSpec.describe Gitlab::X509::Signature do
subject_key_identifier: X509Helpers::User1.certificate_subject_key_identifier,
subject: X509Helpers::User1.certificate_subject,
email: X509Helpers::User1.certificate_email,
+ emails: [X509Helpers::User1.certificate_email],
serial_number: X509Helpers::User1.certificate_serial
}
end
@@ -248,15 +271,31 @@ RSpec.describe Gitlab::X509::Signature do
.and_return("email:gitlab@example.com, othername:<unsupported>")
end
- it 'extracts email' do
- signature = described_class.new(
+ let(:signature) do
+ described_class.new(
X509Helpers::User1.signed_commit_signature,
X509Helpers::User1.signed_commit_base_data,
'gitlab@example.com',
X509Helpers::User1.signed_commit_time
)
+ end
+ it 'extracts email' do
expect(signature.x509_certificate.email).to eq("gitlab@example.com")
+ expect(signature.x509_certificate.emails).to contain_exactly("gitlab@example.com")
+ end
+
+ context 'when there are multiple emails' do
+ before do
+ allow_any_instance_of(described_class).to receive(:get_certificate_extension)
+ .with('subjectAltName')
+ .and_return("email:gitlab@example.com, othername:<unsupported>, email:gitlab2@example.com")
+ end
+
+ it 'extracts all the emails' do
+ expect(signature.x509_certificate.email).to eq("gitlab@example.com")
+ expect(signature.x509_certificate.emails).to contain_exactly("gitlab@example.com", "gitlab2@example.com")
+ end
end
end
@@ -311,6 +350,7 @@ RSpec.describe Gitlab::X509::Signature do
subject_key_identifier: X509Helpers::User1.tag_certificate_subject_key_identifier,
subject: X509Helpers::User1.certificate_subject,
email: X509Helpers::User1.certificate_email,
+ emails: [X509Helpers::User1.certificate_email],
serial_number: X509Helpers::User1.tag_certificate_serial
}
end
diff --git a/spec/lib/peek/views/click_house_spec.rb b/spec/lib/peek/views/click_house_spec.rb
new file mode 100644
index 00000000000..9d7d06204fc
--- /dev/null
+++ b/spec/lib/peek/views/click_house_spec.rb
@@ -0,0 +1,37 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Peek::Views::ClickHouse, :click_house, :request_store, feature_category: :database do
+ before do
+ allow(::Gitlab::PerformanceBar).to receive(:enabled_for_request?).and_return(true)
+ end
+
+ describe '#results' do
+ let(:results) { described_class.new.results }
+
+ it 'includes performance details' do
+ ::Gitlab::SafeRequestStore.clear!
+
+ data = ClickHouse::Client.select('SELECT 1 AS value', :main)
+ ClickHouse::Client.execute('INSERT INTO events (id) VALUES (1)', :main)
+
+ expect(data).to eq([{ 'value' => 1 }])
+
+ expect(results[:calls]).to eq(2)
+ expect(results[:duration]).to be_kind_of(String)
+
+ expect(results[:details]).to match_array([
+ a_hash_including({
+ sql: 'SELECT 1 AS value',
+ database: 'database: main'
+ }),
+ a_hash_including({
+ sql: 'INSERT INTO events (id) VALUES (1)',
+ database: 'database: main',
+ statistics: include('written_rows=>"1"')
+ })
+ ])
+ end
+ end
+end
diff --git a/spec/lib/product_analytics/settings_spec.rb b/spec/lib/product_analytics/settings_spec.rb
index 9c33b8068d1..9ba5dbfc8fc 100644
--- a/spec/lib/product_analytics/settings_spec.rb
+++ b/spec/lib/product_analytics/settings_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe ProductAnalytics::Settings, feature_category: :product_analytics do
+RSpec.describe ProductAnalytics::Settings, feature_category: :product_analytics_data_management do
let_it_be(:project) { create(:project) }
subject { described_class.for_project(project) }
diff --git a/spec/lib/sbom/package_url/encoder_spec.rb b/spec/lib/sbom/package_url/encoder_spec.rb
index a0b51007008..195b9969a10 100644
--- a/spec/lib/sbom/package_url/encoder_spec.rb
+++ b/spec/lib/sbom/package_url/encoder_spec.rb
@@ -25,5 +25,20 @@ RSpec.describe Sbom::PackageUrl::Encoder, feature_category: :dependency_manageme
with_them do
it { is_expected.to eq(canonical_purl) }
end
+
+ context 'when purl requires normalization' do
+ let(:package) do
+ ::Sbom::PackageUrl.new(
+ type: 'github',
+ namespace: 'GitLab-Org',
+ name: 'GitLab',
+ version: '1.0.0'
+ )
+ end
+
+ it 'outputs normalized form' do
+ expect(encode).to eq('pkg:github/gitlab-org/gitlab@1.0.0')
+ end
+ end
end
end
diff --git a/spec/lib/sidebars/groups/super_sidebar_menus/deploy_menu_spec.rb b/spec/lib/sidebars/groups/super_sidebar_menus/deploy_menu_spec.rb
index ec3f911d8dc..edb3f5f610a 100644
--- a/spec/lib/sidebars/groups/super_sidebar_menus/deploy_menu_spec.rb
+++ b/spec/lib/sidebars/groups/super_sidebar_menus/deploy_menu_spec.rb
@@ -15,7 +15,8 @@ RSpec.describe Sidebars::Groups::SuperSidebarMenus::DeployMenu, feature_category
it 'defines list of NilMenuItem placeholders' do
expect(items.map(&:class).uniq).to eq([Sidebars::NilMenuItem])
expect(items.map(&:item_id)).to eq([
- :packages_registry
+ :packages_registry,
+ :container_registry
])
end
end
diff --git a/spec/lib/sidebars/groups/super_sidebar_menus/operations_menu_spec.rb b/spec/lib/sidebars/groups/super_sidebar_menus/operations_menu_spec.rb
index df37d5f1b0d..c909e7efd90 100644
--- a/spec/lib/sidebars/groups/super_sidebar_menus/operations_menu_spec.rb
+++ b/spec/lib/sidebars/groups/super_sidebar_menus/operations_menu_spec.rb
@@ -16,7 +16,6 @@ RSpec.describe Sidebars::Groups::SuperSidebarMenus::OperationsMenu, feature_cate
expect(items.map(&:class).uniq).to eq([Sidebars::NilMenuItem])
expect(items.map(&:item_id)).to eq([
:dependency_proxy,
- :container_registry,
:group_kubernetes_clusters
])
end
diff --git a/spec/lib/sidebars/menu_spec.rb b/spec/lib/sidebars/menu_spec.rb
index 4f77cb3aed4..00202ac7d2b 100644
--- a/spec/lib/sidebars/menu_spec.rb
+++ b/spec/lib/sidebars/menu_spec.rb
@@ -302,6 +302,19 @@ RSpec.describe Sidebars::Menu, feature_category: :navigation do
end
end
+ describe "#remove_item" do
+ let(:item) { Sidebars::MenuItem.new(title: 'foo1', link: 'foo1', active_routes: {}, item_id: :foo1) }
+
+ before do
+ menu.add_item(item)
+ end
+
+ it 'removes the item from the menu' do
+ menu.remove_item(item)
+ expect(menu.has_items?).to be false
+ end
+ end
+
describe '#container_html_options' do
before do
allow(menu).to receive(:title).and_return('Foo Menu')
diff --git a/spec/lib/slack_markdown_sanitizer_spec.rb b/spec/lib/slack_markdown_sanitizer_spec.rb
index f4042439213..d9552542465 100644
--- a/spec/lib/slack_markdown_sanitizer_spec.rb
+++ b/spec/lib/slack_markdown_sanitizer_spec.rb
@@ -20,4 +20,21 @@ RSpec.describe SlackMarkdownSanitizer, feature_category: :integrations do
end
end
end
+
+ describe '.sanitize_slack_link' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:input, :output) do
+ '' | ''
+ '[label](url)' | '[label](url)'
+ '<url|label>' | '&lt;url|label&gt;'
+ '<a href="url">label</a>' | '<a href="url">label</a>'
+ end
+
+ with_them do
+ it 'returns the expected output' do
+ expect(described_class.sanitize_slack_link(input)).to eq(output)
+ end
+ end
+ end
end
diff --git a/spec/lib/unnested_in_filters/rewriter_spec.rb b/spec/lib/unnested_in_filters/rewriter_spec.rb
index fe34fba579b..e094563e8fb 100644
--- a/spec/lib/unnested_in_filters/rewriter_spec.rb
+++ b/spec/lib/unnested_in_filters/rewriter_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe UnnestedInFilters::Rewriter do
let(:rewriter) { described_class.new(relation) }
- before(:all) do
+ before_all do
User.include(UnnestedInFilters::Dsl)
end
diff --git a/spec/mailers/devise_mailer_spec.rb b/spec/mailers/devise_mailer_spec.rb
index 171251f51ef..0a6d38996b7 100644
--- a/spec/mailers/devise_mailer_spec.rb
+++ b/spec/mailers/devise_mailer_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require 'email_spec'
-RSpec.describe DeviseMailer do
+RSpec.describe DeviseMailer, feature_category: :user_management do
include EmailSpec::Matchers
include_context 'gitlab email notification'
@@ -150,10 +150,12 @@ RSpec.describe DeviseMailer do
end
describe '#email_changed' do
- subject { described_class.email_changed(user, {}) }
-
+ let(:content_saas) { 'If you did not initiate this change, please contact your group owner immediately. If you have a Premium or Ultimate tier subscription, you can also contact GitLab support.' }
+ let(:content_self_managed) { 'If you did not initiate this change, please contact your administrator immediately.' }
let_it_be(:user) { create(:user) }
+ subject { described_class.email_changed(user, {}) }
+
it_behaves_like 'an email sent from GitLab'
it 'is sent to the user' do
@@ -168,6 +170,18 @@ RSpec.describe DeviseMailer do
is_expected.to have_body_text /Hello, #{user.name}!/
end
+ context 'when self-managed' do
+ it 'has the expected content of self managed instance' do
+ is_expected.to have_body_text content_self_managed
+ end
+ end
+
+ context 'when saas', :saas do
+ it 'has the expected content of saas instance' do
+ is_expected.to have_body_text content_saas
+ end
+ end
+
context "email contains updated id" do
before do
user.update!(email: "new_email@test.com")
diff --git a/spec/mailers/emails/projects_spec.rb b/spec/mailers/emails/projects_spec.rb
index 1f0f09f7ca2..9518672939b 100644
--- a/spec/mailers/emails/projects_spec.rb
+++ b/spec/mailers/emails/projects_spec.rb
@@ -124,41 +124,6 @@ RSpec.describe Emails::Projects do
end
end
- context 'with gitlab alerting rule' do
- let_it_be(:prometheus_alert) { create(:prometheus_alert, project: project) }
- let_it_be(:environment) { prometheus_alert.environment }
-
- let(:alert) { create(:alert_management_alert, :prometheus, :from_payload, payload: payload, project: project) }
- let(:title) { "#{prometheus_alert.title} #{prometheus_alert.computed_operator} #{prometheus_alert.threshold}" }
-
- before do
- payload['labels'] = {
- 'gitlab_alert_id' => prometheus_alert.prometheus_metric_id,
- 'alertname' => prometheus_alert.title
- }
- end
-
- it_behaves_like 'an email sent from GitLab'
- it_behaves_like 'it should not have Gmail Actions links'
- it_behaves_like 'a user cannot unsubscribe through footer link'
- it_behaves_like 'shows the incident issues url'
-
- it 'has expected subject' do
- is_expected.to have_subject("#{project.name} | Alert: #{environment.name}: #{title} for 5 minutes")
- end
-
- it 'has expected content' do
- is_expected.to have_body_text('An alert has been triggered')
- is_expected.to have_body_text(project.full_path)
- is_expected.to have_body_text(alert.details_url)
- is_expected.to have_body_text('Environment:')
- is_expected.to have_body_text(environment.name)
- is_expected.to have_body_text('Metric:')
- is_expected.to have_body_text(prometheus_alert.full_query)
- is_expected.not_to have_body_text('Description:')
- end
- end
-
context 'resolved' do
let_it_be(:alert) { create(:alert_management_alert, :resolved, project: project) }
diff --git a/spec/mailers/notify_spec.rb b/spec/mailers/notify_spec.rb
index 629dfdaf55e..976fe214c95 100644
--- a/spec/mailers/notify_spec.rb
+++ b/spec/mailers/notify_spec.rb
@@ -2056,6 +2056,68 @@ RSpec.describe Notify do
end
end
+ describe 'membership about to expire' do
+ context "with group membership" do
+ let_it_be(:group_member) { create(:group_member, source: group, expires_at: 7.days.from_now) }
+
+ subject { described_class.member_about_to_expire_email("Namespace", group_member.id) }
+
+ it_behaves_like 'an email sent from GitLab'
+ it_behaves_like 'it should not have Gmail Actions links'
+ it_behaves_like 'a user cannot unsubscribe through footer link'
+ it_behaves_like 'appearance header and footer enabled'
+ it_behaves_like 'appearance header and footer not enabled'
+
+ it 'contains all the useful information' do
+ is_expected.to deliver_to group_member.user.email
+ is_expected.to have_subject "Your membership will expire in 7 days"
+ is_expected.to have_body_text "group will expire in 7 days."
+ is_expected.to have_body_text group_url(group)
+ is_expected.to have_body_text group_group_members_url(group)
+ end
+ end
+
+ context "with project membership" do
+ let_it_be(:project_member) { create(:project_member, source: project, expires_at: 7.days.from_now) }
+
+ subject { described_class.member_about_to_expire_email('Project', project_member.id) }
+
+ it_behaves_like 'an email sent from GitLab'
+ it_behaves_like 'it should not have Gmail Actions links'
+ it_behaves_like 'a user cannot unsubscribe through footer link'
+ it_behaves_like 'appearance header and footer enabled'
+ it_behaves_like 'appearance header and footer not enabled'
+
+ it 'contains all the useful information' do
+ is_expected.to deliver_to project_member.user.email
+ is_expected.to have_subject "Your membership will expire in 7 days"
+ is_expected.to have_body_text "project will expire in 7 days."
+ is_expected.to have_body_text project_url(project)
+ is_expected.to have_body_text project_project_members_url(project)
+ end
+ end
+
+ context "with expired membership" do
+ let_it_be(:project_member) { create(:project_member, source: project, expires_at: Date.today) }
+
+ subject { described_class.member_about_to_expire_email('Project', project_member.id) }
+
+ it 'not deliver expiry email' do
+ should_not_email_anyone
+ end
+ end
+
+ context "with expiry notified membership" do
+ let_it_be(:project_member) { create(:project_member, source: project, expires_at: 7.days.from_now, expiry_notified_at: Date.today) }
+
+ subject { described_class.member_about_to_expire_email('Project', project_member.id) }
+
+ it 'not deliver expiry email' do
+ should_not_email_anyone
+ end
+ end
+ end
+
describe 'admin notification' do
let(:example_site_path) { root_path }
let(:user) { create(:user) }
diff --git a/spec/mailers/previews_spec.rb b/spec/mailers/previews_spec.rb
index 14bd56e5d40..e1af0d7ef77 100644
--- a/spec/mailers/previews_spec.rb
+++ b/spec/mailers/previews_spec.rb
@@ -13,6 +13,7 @@ RSpec.describe 'Mailer previews' do
let_it_be(:issue) { create(:issue, project: project, milestone: milestone) }
let_it_be(:remote_mirror) { create(:remote_mirror, project: project) }
let_it_be(:member) { create(:project_member, :maintainer, project: project, created_by: user) }
+ let_it_be(:review) { create(:review, project: project, merge_request: merge_request, author: user) }
Gitlab.ee do
let_it_be(:epic) { create(:epic, group: group) }
diff --git a/spec/migrations/20221219122320_copy_clickhouse_connection_string_to_encrypted_var_spec.rb b/spec/migrations/20221219122320_copy_clickhouse_connection_string_to_encrypted_var_spec.rb
index 7ff033ab0c2..48702e866e0 100644
--- a/spec/migrations/20221219122320_copy_clickhouse_connection_string_to_encrypted_var_spec.rb
+++ b/spec/migrations/20221219122320_copy_clickhouse_connection_string_to_encrypted_var_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require_migration!
-RSpec.describe CopyClickhouseConnectionStringToEncryptedVar, feature_category: :product_analytics do
+RSpec.describe CopyClickhouseConnectionStringToEncryptedVar, feature_category: :product_analytics_data_management do
let!(:migration) { described_class.new }
let(:setting) { table(:application_settings).create!(clickhouse_connection_string: 'https://example.com/test') }
diff --git a/spec/migrations/20230327123333_backfill_product_analytics_data_collector_host_spec.rb b/spec/migrations/20230327123333_backfill_product_analytics_data_collector_host_spec.rb
index 253512c9194..05cc065e6c3 100644
--- a/spec/migrations/20230327123333_backfill_product_analytics_data_collector_host_spec.rb
+++ b/spec/migrations/20230327123333_backfill_product_analytics_data_collector_host_spec.rb
@@ -3,7 +3,7 @@
require "spec_helper"
require_migration!
-RSpec.describe BackfillProductAnalyticsDataCollectorHost, feature_category: :product_analytics do
+RSpec.describe BackfillProductAnalyticsDataCollectorHost, feature_category: :product_analytics_data_management do
let!(:application_settings) { table(:application_settings) }
describe '#up' do
diff --git a/spec/migrations/20230612232000_queue_backfill_dismissal_reason_in_vulnerability_reads_spec.rb b/spec/migrations/20230612232000_queue_backfill_dismissal_reason_in_vulnerability_reads_spec.rb
new file mode 100644
index 00000000000..56bfa50abd0
--- /dev/null
+++ b/spec/migrations/20230612232000_queue_backfill_dismissal_reason_in_vulnerability_reads_spec.rb
@@ -0,0 +1,26 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe QueueBackfillDismissalReasonInVulnerabilityReads, feature_category: :vulnerability_management do
+ let!(:batched_migration) { described_class::MIGRATION }
+
+ it 'schedules a new batched migration' do
+ reversible_migration do |migration|
+ migration.before -> {
+ expect(batched_migration).not_to have_scheduled_batched_migration
+ }
+
+ migration.after -> {
+ expect(batched_migration).to have_scheduled_batched_migration(
+ table_name: :vulnerability_reads,
+ column_name: :id,
+ interval: described_class::DELAY_INTERVAL,
+ batch_size: described_class::BATCH_SIZE,
+ sub_batch_size: described_class::SUB_BATCH_SIZE
+ )
+ }
+ end
+ end
+end
diff --git a/spec/migrations/20230712145557_queue_backfill_missing_vulnerability_dismissal_details_spec.rb b/spec/migrations/20230712145557_queue_backfill_missing_vulnerability_dismissal_details_spec.rb
new file mode 100644
index 00000000000..6595164da41
--- /dev/null
+++ b/spec/migrations/20230712145557_queue_backfill_missing_vulnerability_dismissal_details_spec.rb
@@ -0,0 +1,26 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe QueueBackfillMissingVulnerabilityDismissalDetails, feature_category: :vulnerability_management do
+ let!(:batched_migration) { described_class::MIGRATION }
+
+ it 'schedules a new batched migration' do
+ reversible_migration do |migration|
+ migration.before -> {
+ expect(batched_migration).not_to have_scheduled_batched_migration
+ }
+
+ migration.after -> {
+ expect(batched_migration).to have_scheduled_batched_migration(
+ table_name: :vulnerabilities,
+ column_name: :id,
+ interval: described_class::DELAY_INTERVAL,
+ batch_size: described_class::BATCH_SIZE,
+ sub_batch_size: described_class::SUB_BATCH_SIZE
+ )
+ }
+ end
+ end
+end
diff --git a/spec/migrations/20230714015909_add_index_for_member_expiring_query_spec.rb b/spec/migrations/20230714015909_add_index_for_member_expiring_query_spec.rb
new file mode 100644
index 00000000000..524354ecc9a
--- /dev/null
+++ b/spec/migrations/20230714015909_add_index_for_member_expiring_query_spec.rb
@@ -0,0 +1,16 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe AddIndexForMemberExpiringQuery, :migration, feature_category: :groups_and_projects do
+ let(:index_name) { 'index_members_on_expiring_at_access_level_id' }
+
+ it 'correctly migrates up and down' do
+ expect(subject).not_to be_index_exists_by_name(:members, index_name)
+
+ migrate!
+
+ expect(subject).to be_index_exists_by_name(:members, index_name)
+ end
+end
diff --git a/spec/migrations/20230719083202_backfill_project_statistics_storage_size_without_pipeline_artifacts_size_spec.rb b/spec/migrations/20230719083202_backfill_project_statistics_storage_size_without_pipeline_artifacts_size_spec.rb
new file mode 100644
index 00000000000..c3183a5da1b
--- /dev/null
+++ b/spec/migrations/20230719083202_backfill_project_statistics_storage_size_without_pipeline_artifacts_size_spec.rb
@@ -0,0 +1,44 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe BackfillProjectStatisticsStorageSizeWithoutPipelineArtifactsSize, feature_category: :consumables_cost_management do
+ let!(:batched_migration) { described_class::MIGRATION }
+
+ it 'does not schedule background jobs when Gitlab.org_or_com? is false' do
+ allow(Gitlab).to receive(:dev_or_test_env?).and_return(false)
+ allow(Gitlab).to receive(:org_or_com?).and_return(false)
+
+ reversible_migration do |migration|
+ migration.before -> {
+ expect(batched_migration).not_to have_scheduled_batched_migration
+ }
+
+ migration.after -> {
+ expect(batched_migration).not_to have_scheduled_batched_migration
+ }
+ end
+ end
+
+ it 'schedules a new batched migration' do
+ allow(Gitlab).to receive(:dev_or_test_env?).and_return(false)
+ allow(Gitlab).to receive(:org_or_com?).and_return(true)
+
+ reversible_migration do |migration|
+ migration.before -> {
+ expect(batched_migration).not_to have_scheduled_batched_migration
+ }
+
+ migration.after -> {
+ expect(batched_migration).to have_scheduled_batched_migration(
+ table_name: :project_statistics,
+ column_name: :project_id,
+ interval: described_class::DELAY_INTERVAL,
+ batch_size: described_class::BATCH_SIZE,
+ sub_batch_size: described_class::SUB_BATCH_SIZE
+ )
+ }
+ end
+ end
+end
diff --git a/spec/migrations/20230723203612_backfill_default_branch_protection_application_setting_spec.rb b/spec/migrations/20230723203612_backfill_default_branch_protection_application_setting_spec.rb
new file mode 100644
index 00000000000..dcb65e22196
--- /dev/null
+++ b/spec/migrations/20230723203612_backfill_default_branch_protection_application_setting_spec.rb
@@ -0,0 +1,50 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe BackfillDefaultBranchProtectionApplicationSetting, :migration, feature_category: :database do
+ let(:application_settings_table) { table(:application_settings) }
+
+ before do
+ 5.times do |branch_protection|
+ application_settings_table.create!(default_branch_protection: branch_protection,
+ default_branch_protection_defaults: {})
+ end
+ end
+
+ it 'schedules a new batched migration' do
+ reversible_migration do |migration|
+ migration.before -> {
+ 5.times do |branch_protection|
+ expect(migrated_attribute(branch_protection)).to eq({})
+ end
+ }
+
+ migration.after -> {
+ expect(migrated_attribute(0)).to eq({ "allow_force_push" => true,
+ "allowed_to_merge" => [{ "access_level" => 30 }],
+ "allowed_to_push" => [{ "access_level" => 30 }] })
+ expect(migrated_attribute(1)).to eq({ "allow_force_push" => false,
+ "allowed_to_merge" => [{ "access_level" => 30 }],
+ "allowed_to_push" => [{ "access_level" => 30 }] })
+ expect(migrated_attribute(2)).to eq({ "allow_force_push" => false,
+ "allowed_to_merge" => [{ "access_level" => 40 }],
+ "allowed_to_push" => [{ "access_level" => 40 }] })
+ expect(migrated_attribute(3)).to eq({ "allow_force_push" => true,
+ "allowed_to_merge" => [{ "access_level" => 30 }],
+ "allowed_to_push" => [{ "access_level" => 40 }] })
+ expect(migrated_attribute(4)).to eq({ "allow_force_push" => true,
+ "allowed_to_merge" => [{ "access_level" => 30 }],
+ "allowed_to_push" => [{ "access_level" => 40 }],
+ "developer_can_initial_push" => true })
+ }
+ end
+ end
+
+ def migrated_attribute(branch_protection)
+ application_settings_table
+ .where(default_branch_protection: branch_protection)
+ .last.default_branch_protection_defaults
+ end
+end
diff --git a/spec/migrations/20230724071541_queue_backfill_default_branch_protection_namespace_setting_spec.rb b/spec/migrations/20230724071541_queue_backfill_default_branch_protection_namespace_setting_spec.rb
new file mode 100644
index 00000000000..5ba8c6b853c
--- /dev/null
+++ b/spec/migrations/20230724071541_queue_backfill_default_branch_protection_namespace_setting_spec.rb
@@ -0,0 +1,26 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe QueueBackfillDefaultBranchProtectionNamespaceSetting, feature_category: :database do
+ let!(:batched_migration) { described_class::MIGRATION }
+
+ it 'schedules a new batched migration' do
+ reversible_migration do |migration|
+ migration.before -> {
+ expect(batched_migration).not_to have_scheduled_batched_migration
+ }
+
+ migration.after -> {
+ expect(batched_migration).to have_scheduled_batched_migration(
+ table_name: :namespace_settings,
+ column_name: :namespace_id,
+ interval: described_class::DELAY_INTERVAL,
+ batch_size: described_class::BATCH_SIZE,
+ sub_batch_size: described_class::SUB_BATCH_SIZE
+ )
+ }
+ end
+ end
+end
diff --git a/spec/migrations/20230724164745_queue_delete_orphaned_transferred_project_approval_rules_spec.rb b/spec/migrations/20230724164745_queue_delete_orphaned_transferred_project_approval_rules_spec.rb
new file mode 100644
index 00000000000..e4fa7216ae2
--- /dev/null
+++ b/spec/migrations/20230724164745_queue_delete_orphaned_transferred_project_approval_rules_spec.rb
@@ -0,0 +1,26 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe QueueDeleteOrphanedTransferredProjectApprovalRules, feature_category: :security_policy_management do
+ let!(:batched_migration) { described_class::MIGRATION }
+
+ it 'schedules a new batched migration' do
+ reversible_migration do |migration|
+ migration.before -> {
+ expect(batched_migration).not_to have_scheduled_batched_migration
+ }
+
+ migration.after -> {
+ expect(batched_migration).to have_scheduled_batched_migration(
+ table_name: :approval_project_rules,
+ column_name: :id,
+ interval: described_class::DELAY_INTERVAL,
+ batch_size: described_class::BATCH_SIZE,
+ sub_batch_size: described_class::SUB_BATCH_SIZE
+ )
+ }
+ end
+ end
+end
diff --git a/spec/migrations/20230728174927_add_epic_work_item_type_spec.rb b/spec/migrations/20230728174927_add_epic_work_item_type_spec.rb
new file mode 100644
index 00000000000..8f0227950e1
--- /dev/null
+++ b/spec/migrations/20230728174927_add_epic_work_item_type_spec.rb
@@ -0,0 +1,91 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe AddEpicWorkItemType, :migration, feature_category: :team_planning do
+ include MigrationHelpers::WorkItemTypesHelper
+
+ let(:work_item_types) { table(:work_item_types) }
+ let(:work_item_widget_definitions) { table(:work_item_widget_definitions) }
+ let(:work_item_hierarchy_restrictions) { table(:work_item_hierarchy_restrictions) }
+ let(:base_types) do
+ {
+ issue: 0,
+ incident: 1,
+ test_case: 2,
+ requirement: 3,
+ task: 4,
+ objective: 5,
+ key_result: 6,
+ epic: 7
+ }
+ end
+
+ after(:all) do
+ # Make sure base types are recreated after running the migration
+ # because migration specs are not run in a transaction
+ reset_work_item_types
+ end
+
+ before do
+ reset_db_state_prior_to_migration
+ end
+
+ it 'adds the epic type, widget definitions and hierarchy restrictions', :aggregate_failures do
+ expect do
+ migrate!
+ end.to change { work_item_types.count }.by(1)
+ .and(change { work_item_widget_definitions.count }.by(10))
+ .and(change { work_item_hierarchy_restrictions.count }.by(2))
+
+ epic_type = work_item_types.last
+ issue_type = work_item_types.find_by!(namespace_id: nil, base_type: base_types[:issue])
+
+ expect(work_item_types.pluck(:base_type)).to include(base_types[:epic])
+ expect(
+ work_item_widget_definitions.where(work_item_type_id: epic_type.id).pluck(:widget_type)
+ ).to match_array(described_class::EPIC_WIDGETS.values)
+ expect(
+ work_item_hierarchy_restrictions.where(parent_type_id: epic_type.id).pluck(:child_type_id, :maximum_depth)
+ ).to contain_exactly([epic_type.id, 9], [issue_type.id, 1])
+ end
+
+ it 'skips creating the new type an it\'s definitions' do
+ work_item_types.find_or_create_by!(
+ name: 'Epic', namespace_id: nil, base_type: base_types[:epic], icon_name: 'issue-type-epic'
+ )
+
+ expect do
+ migrate!
+ end.to not_change(work_item_types, :count)
+ .and(not_change(work_item_widget_definitions, :count))
+ .and(not_change(work_item_hierarchy_restrictions, :count))
+ end
+
+ def reset_db_state_prior_to_migration
+ # Database needs to be in a similar state as when this migration was created
+ work_item_types.delete_all
+ work_item_types.find_or_create_by!(
+ name: 'Issue', namespace_id: nil, base_type: base_types[:issue], icon_name: 'issue-type-issue'
+ )
+ work_item_types.find_or_create_by!(
+ name: 'Incident', namespace_id: nil, base_type: base_types[:incident], icon_name: 'issue-type-incident'
+ )
+ work_item_types.find_or_create_by!(
+ name: 'Test Case', namespace_id: nil, base_type: base_types[:test_case], icon_name: 'issue-type-test-case'
+ )
+ work_item_types.find_or_create_by!(
+ name: 'Requirement', namespace_id: nil, base_type: base_types[:requirement], icon_name: 'issue-type-requirements'
+ )
+ work_item_types.find_or_create_by!(
+ name: 'Task', namespace_id: nil, base_type: base_types[:task], icon_name: 'issue-type-task'
+ )
+ work_item_types.find_or_create_by!(
+ name: 'Objective', namespace_id: nil, base_type: base_types[:objective], icon_name: 'issue-type-objective'
+ )
+ work_item_types.find_or_create_by!(
+ name: 'Key Result', namespace_id: nil, base_type: base_types[:key_result], icon_name: 'issue-type-keyresult'
+ )
+ end
+end
diff --git a/spec/migrations/20230801150214_retry_cleanup_bigint_conversion_for_events_for_gitlab_com_spec.rb b/spec/migrations/20230801150214_retry_cleanup_bigint_conversion_for_events_for_gitlab_com_spec.rb
new file mode 100644
index 00000000000..0eac9f28fcd
--- /dev/null
+++ b/spec/migrations/20230801150214_retry_cleanup_bigint_conversion_for_events_for_gitlab_com_spec.rb
@@ -0,0 +1,147 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+require_migration!
+
+RSpec.describe RetryCleanupBigintConversionForEventsForGitlabCom, :migration, feature_category: :database do
+ let(:migration) { described_class.new }
+ let(:connection) { migration.connection }
+ let(:column_name) { 'target_id_convert_to_bigint' }
+ let(:events_table) { table(:events) }
+
+ before do
+ allow(migration).to receive(:should_run?).and_return(should_run?)
+ end
+
+ shared_examples 'skips the up migration' do
+ it "doesn't calls cleanup_conversion_of_integer_to_bigint method" do
+ disable_migrations_output do
+ expect(migration).not_to receive(:cleanup_conversion_of_integer_to_bigint)
+
+ migration.up
+ end
+ end
+ end
+
+ shared_examples 'skips the down migration' do
+ it "doesn't calls restore_conversion_of_integer_to_bigint method" do
+ disable_migrations_output do
+ expect(migration).not_to receive(:restore_conversion_of_integer_to_bigint)
+
+ migration.down
+ end
+ end
+ end
+
+ describe '#up' do
+ context 'when column still exists' do
+ before do
+ # Ensures the correct state of db before the test
+ connection.execute('ALTER TABLE events ADD COLUMN IF NOT EXISTS target_id_convert_to_bigint integer')
+ connection.execute('CREATE OR REPLACE FUNCTION trigger_cd1aeb22b34a() RETURNS trigger LANGUAGE plpgsql AS $$
+ BEGIN NEW."target_id_convert_to_bigint" := NEW."target_id"; RETURN NEW; END; $$;')
+ connection.execute('DROP TRIGGER IF EXISTS trigger_cd1aeb22b34a ON events')
+ connection.execute('CREATE TRIGGER trigger_cd1aeb22b34a BEFORE INSERT OR UPDATE ON events FOR EACH ROW EXECUTE
+ FUNCTION trigger_cd1aeb22b34a()')
+ end
+
+ context 'when is GitLab.com, dev, or test' do
+ let(:should_run?) { true }
+
+ it 'drop the temporary columns' do
+ disable_migrations_output do
+ reversible_migration do |migration|
+ migration.before -> {
+ events_table.reset_column_information
+ expect(events_table.columns.find { |c| c.name == 'target_id_convert_to_bigint' }).not_to be_nil
+ }
+
+ migration.after -> {
+ events_table.reset_column_information
+ expect(events_table.columns.find { |c| c.name == 'target_id_convert_to_bigint' }).to be_nil
+ }
+ end
+ end
+ end
+ end
+
+ context 'when is a self-managed instance' do
+ let(:should_run?) { false }
+
+ it_behaves_like 'skips the up migration'
+ end
+ end
+
+ context 'when column not exists' do
+ before do
+ connection.execute('ALTER TABLE events DROP COLUMN IF EXISTS target_id_convert_to_bigint')
+ end
+
+ context 'when is GitLab.com, dev, or test' do
+ let(:should_run?) { true }
+
+ it_behaves_like 'skips the up migration'
+ end
+
+ context 'when is a self-managed instance' do
+ let(:should_run?) { false }
+
+ it_behaves_like 'skips the up migration'
+ end
+ end
+ end
+
+ describe '#down' do
+ context 'when column still exists' do
+ before do
+ # Ensures the correct state of db before the test
+ connection.execute('ALTER TABLE events ADD COLUMN IF NOT EXISTS target_id_convert_to_bigint integer')
+ connection.execute('CREATE OR REPLACE FUNCTION trigger_cd1aeb22b34a() RETURNS trigger LANGUAGE plpgsql AS $$
+ BEGIN NEW."target_id_convert_to_bigint" := NEW."target_id"; RETURN NEW; END; $$;')
+ connection.execute('DROP TRIGGER IF EXISTS trigger_cd1aeb22b34a ON events')
+ connection.execute('CREATE TRIGGER trigger_cd1aeb22b34a BEFORE INSERT OR UPDATE ON events FOR EACH ROW EXECUTE
+ FUNCTION trigger_cd1aeb22b34a()')
+ end
+
+ context 'when is GitLab.com, dev, or test' do
+ let(:should_run?) { true }
+
+ it_behaves_like 'skips the down migration'
+ end
+
+ context 'when is a self-managed instance' do
+ let(:should_run?) { false }
+
+ it_behaves_like 'skips the down migration'
+ end
+ end
+
+ context 'when column not exists' do
+ before do
+ connection.execute('ALTER TABLE events DROP COLUMN IF EXISTS target_id_convert_to_bigint')
+ end
+
+ context 'when is GitLab.com, dev, or test' do
+ let(:should_run?) { true }
+
+ it 'restore the temporary columns' do
+ disable_migrations_output do
+ migration.down
+
+ column = events_table.columns.find { |c| c.name == 'target_id_convert_to_bigint' }
+
+ expect(column).not_to be_nil
+ expect(column.sql_type).to eq('integer')
+ end
+ end
+ end
+
+ context 'when is a self-managed instance' do
+ let(:should_run?) { false }
+
+ it_behaves_like 'skips the down migration'
+ end
+ end
+ end
+end
diff --git a/spec/migrations/20230802085923_queue_fix_allow_descendants_override_disabled_shared_runners_spec.rb b/spec/migrations/20230802085923_queue_fix_allow_descendants_override_disabled_shared_runners_spec.rb
new file mode 100644
index 00000000000..c296ba24d9d
--- /dev/null
+++ b/spec/migrations/20230802085923_queue_fix_allow_descendants_override_disabled_shared_runners_spec.rb
@@ -0,0 +1,26 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe QueueFixAllowDescendantsOverrideDisabledSharedRunners, feature_category: :runner_fleet do
+ let!(:batched_migration) { described_class::MIGRATION }
+
+ it 'schedules a new batched migration' do
+ reversible_migration do |migration|
+ migration.before -> {
+ expect(batched_migration).not_to have_scheduled_batched_migration
+ }
+
+ migration.after -> {
+ expect(batched_migration).to have_scheduled_batched_migration(
+ table_name: :namespaces,
+ column_name: :id,
+ interval: described_class::DELAY_INTERVAL,
+ batch_size: described_class::BATCH_SIZE,
+ sub_batch_size: described_class::SUB_BATCH_SIZE
+ )
+ }
+ end
+ end
+end
diff --git a/spec/migrations/20230803125434_add_has_merge_request_on_vulnerability_reads_trigger_spec.rb b/spec/migrations/20230803125434_add_has_merge_request_on_vulnerability_reads_trigger_spec.rb
new file mode 100644
index 00000000000..374a494fbec
--- /dev/null
+++ b/spec/migrations/20230803125434_add_has_merge_request_on_vulnerability_reads_trigger_spec.rb
@@ -0,0 +1,178 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+require_migration!
+
+RSpec.describe AddHasMergeRequestOnVulnerabilityReadsTrigger, feature_category: :vulnerability_management do
+ let(:migration) { described_class.new }
+ let(:vulnerability_reads) { table(:vulnerability_reads) }
+ let(:merge_requests) { table(:merge_requests) }
+ let(:merge_request_links) { table(:vulnerability_merge_request_links) }
+ let(:vulnerabilities) { table(:vulnerabilities) }
+ let(:vulnerabilities_findings) { table(:vulnerability_occurrences) }
+
+ let(:namespace) { table(:namespaces).create!(name: 'user', path: 'user') }
+ let(:user) { table(:users).create!(id: 13, email: 'author@example.com', username: 'author', projects_limit: 10) }
+ let(:project) { table(:projects).create!(id: 123, namespace_id: namespace.id, project_namespace_id: namespace.id) }
+
+ let(:scanner) do
+ table(:vulnerability_scanners).create!(
+ project_id: project.id, external_id: 'test 1', name: 'test scanner 1')
+ end
+
+ let(:merge_request) do
+ create_merge_request!(
+ target_project_id: project.id,
+ source_branch: "other",
+ target_branch: "main",
+ author_id: user.id,
+ title: 'Feedback Merge Request 1'
+ )
+ end
+
+ let(:vulnerability) do
+ create_vulnerability!(
+ project_id: project.id,
+ author_id: user.id
+ )
+ end
+
+ let(:identifier) do
+ table(:vulnerability_identifiers).create!(
+ project_id: project.id,
+ external_type: 'uuid-v5',
+ external_id: 'uuid-v5',
+ fingerprint: '7e394d1b1eb461a7406d7b1e08f057a1cf11287a',
+ name: 'Identifier for UUIDv5')
+ end
+
+ let(:vulnerability_read) { vulnerability_reads.first }
+
+ before do
+ create_finding!(
+ vulnerability_id: vulnerability.id,
+ project_id: project.id,
+ scanner_id: scanner.id,
+ primary_identifier_id: identifier.id
+ )
+
+ vulnerability_read.reload
+ end
+
+ describe '#up' do
+ before do
+ migrate!
+ end
+
+ describe 'INSERT trigger' do
+ it 'updates has_merge_request in vulnerability_reads' do
+ expect do
+ merge_request_links.create!(
+ vulnerability_id: vulnerability.id, merge_request_id: merge_request.id)
+ end.to change { vulnerability_read.reload.has_merge_request }.from(false).to(true)
+ end
+ end
+
+ describe 'DELETE trigger' do
+ let(:merge_request2) do
+ create_merge_request!(
+ target_project_id: project.id,
+ source_branch: "other_2",
+ target_branch: "main",
+ author_id: user.id,
+ title: 'Feedback Merge Request 2'
+ )
+ end
+
+ it 'does not change has_merge_request when there exists another merge_request' do
+ merge_request_link1 = merge_request_links.create!(
+ vulnerability_id: vulnerability.id, merge_request_id: merge_request.id)
+
+ merge_request_links.create!(
+ vulnerability_id: vulnerability.id, merge_request_id: merge_request2.id)
+
+ expect do
+ merge_request_link1.delete
+ end.not_to change { vulnerability_read.reload.has_merge_request }
+ end
+
+ it 'unsets has_merge_request when all merge_requests are deleted' do
+ merge_request_link1 = merge_request_links.create!(
+ vulnerability_id: vulnerability.id, merge_request_id: merge_request.id)
+
+ merge_request_link2 = merge_request_links.create!(
+ vulnerability_id: vulnerability.id, merge_request_id: merge_request2.id)
+
+ expect do
+ merge_request_link1.delete
+ merge_request_link2.delete
+ end.to change { vulnerability_read.reload.has_merge_request }.from(true).to(false)
+ end
+ end
+ end
+
+ describe '#down' do
+ before do
+ migration.up
+ migration.down
+ end
+
+ it 'drops the trigger' do
+ expect do
+ merge_request_links.create!(
+ vulnerability_id: vulnerability.id, merge_request_id: merge_request.id)
+ end.not_to change { vulnerability_read.reload.has_merge_request }
+ end
+ end
+
+ private
+
+ def create_vulnerability!(project_id:, author_id:, title: 'test', severity: 7, confidence: 7, report_type: 0)
+ vulnerabilities.create!(
+ project_id: project_id,
+ author_id: author_id,
+ title: title,
+ severity: severity,
+ confidence: confidence,
+ report_type: report_type
+ )
+ end
+
+ def create_merge_request!(overrides = {})
+ attrs = {
+ target_project_id: project.id,
+ source_branch: "other",
+ target_branch: "main",
+ author_id: user.id,
+ title: 'Feedback Merge Request'
+ }.merge(overrides)
+
+ merge_requests.create!(attrs)
+ end
+
+ # rubocop:disable Metrics/ParameterLists
+ def create_finding!(
+ vulnerability_id:, project_id:, scanner_id:, primary_identifier_id:,
+ name: "test", severity: 7, confidence: 7, report_type: 0,
+ project_fingerprint: '123qweasdzxc', location: { "image" => "alpine:3.4" }, location_fingerprint: 'test',
+ metadata_version: 'test', raw_metadata: 'test', uuid: SecureRandom.uuid)
+ vulnerabilities_findings.create!(
+ vulnerability_id: vulnerability_id,
+ project_id: project_id,
+ name: name,
+ severity: severity,
+ confidence: confidence,
+ report_type: report_type,
+ project_fingerprint: project_fingerprint,
+ scanner_id: scanner_id,
+ primary_identifier_id: primary_identifier_id,
+ location: location,
+ location_fingerprint: location_fingerprint,
+ metadata_version: metadata_version,
+ raw_metadata: raw_metadata,
+ uuid: uuid
+ )
+ end
+ # rubocop:enable Metrics/ParameterLists
+end
diff --git a/spec/migrations/20230804053643_add_ticket_work_item_type_spec.rb b/spec/migrations/20230804053643_add_ticket_work_item_type_spec.rb
new file mode 100644
index 00000000000..9a6eeb44254
--- /dev/null
+++ b/spec/migrations/20230804053643_add_ticket_work_item_type_spec.rb
@@ -0,0 +1,97 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe AddTicketWorkItemType, :migration, feature_category: :service_desk do
+ include MigrationHelpers::WorkItemTypesHelper
+
+ let(:work_item_types) { table(:work_item_types) }
+ let(:work_item_widget_definitions) { table(:work_item_widget_definitions) }
+ let(:work_item_hierarchy_restrictions) { table(:work_item_hierarchy_restrictions) }
+ let(:base_types) do
+ {
+ issue: 0,
+ incident: 1,
+ test_case: 2,
+ requirement: 3,
+ task: 4,
+ objective: 5,
+ key_result: 6,
+ epic: 7,
+ ticket: 8
+ }
+ end
+
+ after(:all) do
+ # Make sure base types are recreated after running the migration
+ # because migration specs are not run in a transaction
+ reset_work_item_types
+ end
+
+ before do
+ reset_db_state_prior_to_migration
+ end
+
+ it 'adds the ticket type, widget definitions and hierarchy restrictions', :aggregate_failures do
+ expect do
+ migrate!
+ end.to change { work_item_types.count }.by(1)
+ .and(change { work_item_widget_definitions.count }.by(13))
+ .and(change { work_item_hierarchy_restrictions.count }.by(2))
+
+ ticket_type = work_item_types.last
+ issue_type = work_item_types.find_by!(namespace_id: nil, base_type: base_types[:issue])
+
+ expect(work_item_types.pluck(:base_type)).to include(base_types[:ticket])
+ expect(
+ work_item_widget_definitions.where(work_item_type_id: ticket_type.id).pluck(:widget_type)
+ ).to match_array(described_class::TICKET_WIDGETS.values)
+ expect(
+ work_item_hierarchy_restrictions.where(parent_type_id: ticket_type.id).pluck(:child_type_id, :maximum_depth)
+ ).to contain_exactly([ticket_type.id, 1], [issue_type.id, 1])
+ end
+
+ it "skips creating the new type and it's definitions when it already exists" do
+ work_item_types.find_or_create_by!(
+ name: 'Ticket', namespace_id: nil, base_type: base_types[:ticket], icon_name: 'issue-type-issue'
+ )
+
+ expect do
+ migrate!
+ end.to not_change(work_item_types, :count)
+ .and(not_change(work_item_widget_definitions, :count))
+ .and(not_change(work_item_hierarchy_restrictions, :count))
+ end
+
+ it "skips creating the new type and it's definitions when type creation fails" do
+ allow(described_class::MigrationWorkItemType).to receive(:create)
+ .and_return(described_class::MigrationWorkItemType.new)
+
+ expect do
+ migrate!
+ end.to not_change(work_item_types, :count)
+ .and(not_change(work_item_widget_definitions, :count))
+ .and(not_change(work_item_hierarchy_restrictions, :count))
+ end
+
+ def reset_db_state_prior_to_migration
+ # Database needs to be in a similar state as when this migration was created
+ work_item_types.delete_all
+
+ {
+ issue: { name: 'Issue', icon_name: 'issue-type-issue' },
+ incident: { name: 'Incident', icon_name: 'issue-type-incident' },
+ test_case: { name: 'Test Case', icon_name: 'issue-type-test-case' },
+ requirement: { name: 'Requirement', icon_name: 'issue-type-requirements' },
+ task: { name: 'Task', icon_name: 'issue-type-task' },
+ objective: { name: 'Objective', icon_name: 'issue-type-objective' },
+ key_result: { name: 'Key Result', icon_name: 'issue-type-keyresult' },
+ epic: { name: 'Epic', icon_name: 'issue-type-epic' }
+ }.each do |type, opts|
+ work_item_types.find_or_create_by!(
+ name: opts[:name], namespace_id: nil, base_type: base_types[type], icon_name: opts[:icon_name]
+ )
+ end
+ end
+end
diff --git a/spec/migrations/20230807083334_add_linked_items_work_item_widget_spec.rb b/spec/migrations/20230807083334_add_linked_items_work_item_widget_spec.rb
new file mode 100644
index 00000000000..cd6da15403f
--- /dev/null
+++ b/spec/migrations/20230807083334_add_linked_items_work_item_widget_spec.rb
@@ -0,0 +1,10 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe AddLinkedItemsWorkItemWidget, :migration, feature_category: :portfolio_management do
+ it_behaves_like 'migration that adds widget to work items definitions', widget_name: 'Linked items' do
+ let(:work_item_type_count) { 8 }
+ end
+end
diff --git a/spec/migrations/20230809104753_swap_epic_user_mentions_note_id_to_bigint_for_self_hosts_spec.rb b/spec/migrations/20230809104753_swap_epic_user_mentions_note_id_to_bigint_for_self_hosts_spec.rb
new file mode 100644
index 00000000000..7dcd40db88a
--- /dev/null
+++ b/spec/migrations/20230809104753_swap_epic_user_mentions_note_id_to_bigint_for_self_hosts_spec.rb
@@ -0,0 +1,149 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe SwapEpicUserMentionsNoteIdToBigintForSelfHosts, feature_category: :database do
+ describe '#up' do
+ context 'when GitLab.com, dev, or test' do
+ before do
+ # As we call `schema_migrate_down!` before each example, and for this migration
+ # `#down` is same as `#up`, we need to ensure we start from the expected state.
+ connection = described_class.new.connection
+ connection.execute('ALTER TABLE epic_user_mentions ALTER COLUMN note_id TYPE bigint')
+ connection.execute('ALTER TABLE epic_user_mentions DROP COLUMN IF EXISTS note_id_convert_to_bigint')
+ end
+
+ it 'does not swap the columns' do
+ # rubocop: disable RSpec/AnyInstanceOf
+ allow_any_instance_of(described_class).to receive(:com_or_dev_or_test_but_not_jh?).and_return(true)
+ # rubocop: enable RSpec/AnyInstanceOf
+
+ epic_user_mentions = table(:epic_user_mentions)
+
+ disable_migrations_output do
+ reversible_migration do |migration|
+ migration.before -> {
+ epic_user_mentions.reset_column_information
+
+ expect(epic_user_mentions.columns.find { |c| c.name == 'note_id' }.sql_type).to eq('bigint')
+ expect(epic_user_mentions.columns.find { |c| c.name == 'note_id_convert_to_bigint' }).to be nil
+ }
+
+ migration.after -> {
+ epic_user_mentions.reset_column_information
+
+ expect(epic_user_mentions.columns.find { |c| c.name == 'note_id' }.sql_type).to eq('bigint')
+ expect(epic_user_mentions.columns.find { |c| c.name == 'note_id_convert_to_bigint' }).to be nil
+ }
+ end
+ end
+ end
+ end
+
+ context 'when self-managed instance with the columns already swapped' do
+ before do
+ # As we call `schema_migrate_down!` before each example, and for this migration
+ # `#down` is same as `#up`, we need to ensure we start from the expected state.
+ connection = described_class.new.connection
+ connection.execute('ALTER TABLE epic_user_mentions ALTER COLUMN note_id TYPE bigint')
+ connection.execute('ALTER TABLE epic_user_mentions ADD COLUMN IF NOT EXISTS note_id_convert_to_bigint integer')
+ end
+
+ it 'does not swap the columns' do
+ # rubocop: disable RSpec/AnyInstanceOf
+ allow_any_instance_of(described_class).to receive(:com_or_dev_or_test_but_not_jh?).and_return(false)
+ # rubocop: enable RSpec/AnyInstanceOf
+
+ epic_user_mentions = table(:epic_user_mentions)
+
+ migrate!
+
+ expect(epic_user_mentions.columns.find { |c| c.name == 'note_id' }.sql_type).to eq('bigint')
+ expect(epic_user_mentions.columns.find do |c|
+ c.name == 'note_id_convert_to_bigint'
+ end.sql_type).to eq('integer')
+ end
+ end
+
+ context 'when self-managed instance with the `note_id_convert_to_bigint` column already dropped ' do
+ before do
+ # As we call `schema_migrate_down!` before each example, and for this migration
+ # `#down` is same as `#up`, we need to ensure we start from the expected state.
+ connection = described_class.new.connection
+ connection.execute('ALTER TABLE epic_user_mentions ALTER COLUMN note_id TYPE bigint')
+ connection.execute('ALTER TABLE epic_user_mentions DROP COLUMN IF EXISTS note_id_convert_to_bigint')
+ end
+
+ it 'does not swap the columns' do
+ # rubocop: disable RSpec/AnyInstanceOf
+ allow_any_instance_of(described_class).to receive(:com_or_dev_or_test_but_not_jh?).and_return(false)
+ # rubocop: enable RSpec/AnyInstanceOf
+
+ epic_user_mentions = table(:epic_user_mentions)
+
+ disable_migrations_output do
+ reversible_migration do |migration|
+ migration.before -> {
+ epic_user_mentions.reset_column_information
+
+ expect(epic_user_mentions.columns.find { |c| c.name == 'note_id' }.sql_type).to eq('bigint')
+ expect(epic_user_mentions.columns.find { |c| c.name == 'note_id_convert_to_bigint' }).to be nil
+ }
+
+ migration.after -> {
+ epic_user_mentions.reset_column_information
+
+ expect(epic_user_mentions.columns.find { |c| c.name == 'note_id' }.sql_type).to eq('bigint')
+ expect(epic_user_mentions.columns.find { |c| c.name == 'note_id_convert_to_bigint' }).to be nil
+ }
+ end
+ end
+ end
+ end
+
+ context 'when self-managed instance' do
+ before do
+ # As we call `schema_migrate_down!` before each example, and for this migration
+ # `#down` is same as `#up`, we need to ensure we start from the expected state.
+ connection = described_class.new.connection
+ connection.execute('ALTER TABLE epic_user_mentions ALTER COLUMN note_id TYPE integer')
+ connection.execute('ALTER TABLE epic_user_mentions ADD COLUMN IF NOT EXISTS note_id_convert_to_bigint bigint')
+ connection.execute('ALTER TABLE epic_user_mentions ALTER COLUMN note_id_convert_to_bigint TYPE bigint')
+ connection.execute('DROP INDEX IF EXISTS index_epic_user_mentions_on_note_id_convert_to_bigint')
+ connection.execute('CREATE OR REPLACE FUNCTION trigger_c5a5f48f12b0() RETURNS trigger LANGUAGE plpgsql AS $$
+ BEGIN NEW."note_id_convert_to_bigint" := NEW."note_id"; RETURN NEW; END; $$;')
+ end
+
+ it 'swaps the columns' do
+ # rubocop: disable RSpec/AnyInstanceOf
+ allow_any_instance_of(described_class).to receive(:com_or_dev_or_test_but_not_jh?).and_return(false)
+ # rubocop: enable RSpec/AnyInstanceOf
+
+ epic_user_mentions = table(:epic_user_mentions)
+
+ disable_migrations_output do
+ reversible_migration do |migration|
+ migration.before -> {
+ epic_user_mentions.reset_column_information
+
+ expect(epic_user_mentions.columns.find { |c| c.name == 'note_id' }.sql_type).to eq('integer')
+ expect(epic_user_mentions.columns.find do |c|
+ c.name == 'note_id_convert_to_bigint'
+ end.sql_type).to eq('bigint')
+ }
+
+ migration.after -> {
+ epic_user_mentions.reset_column_information
+
+ expect(epic_user_mentions.columns.find { |c| c.name == 'note_id' }.sql_type).to eq('bigint')
+ expect(epic_user_mentions.columns.find do |c|
+ c.name == 'note_id_convert_to_bigint'
+ end.sql_type).to eq('integer')
+ }
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/migrations/20230810103534_swap_suggestions_note_id_to_bigint_for_self_hosts_spec.rb b/spec/migrations/20230810103534_swap_suggestions_note_id_to_bigint_for_self_hosts_spec.rb
new file mode 100644
index 00000000000..79583fb0a21
--- /dev/null
+++ b/spec/migrations/20230810103534_swap_suggestions_note_id_to_bigint_for_self_hosts_spec.rb
@@ -0,0 +1,149 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe SwapSuggestionsNoteIdToBigintForSelfHosts, feature_category: :database do
+ describe '#up' do
+ context 'when GitLab.com, dev, or test' do
+ before do
+ # As we call `schema_migrate_down!` before each example, and for this migration
+ # `#down` is same as `#up`, we need to ensure we start from the expected state.
+ connection = described_class.new.connection
+ connection.execute('ALTER TABLE suggestions ALTER COLUMN note_id TYPE bigint')
+ connection.execute('ALTER TABLE suggestions DROP COLUMN IF EXISTS note_id_convert_to_bigint')
+ end
+
+ it 'does not swap the columns' do
+ # rubocop: disable RSpec/AnyInstanceOf
+ allow_any_instance_of(described_class).to receive(:com_or_dev_or_test_but_not_jh?).and_return(true)
+ # rubocop: enable RSpec/AnyInstanceOf
+
+ suggestions = table(:suggestions)
+
+ disable_migrations_output do
+ reversible_migration do |migration|
+ migration.before -> {
+ suggestions.reset_column_information
+
+ expect(suggestions.columns.find { |c| c.name == 'note_id' }.sql_type).to eq('bigint')
+ expect(suggestions.columns.find { |c| c.name == 'note_id_convert_to_bigint' }).to be nil
+ }
+
+ migration.after -> {
+ suggestions.reset_column_information
+
+ expect(suggestions.columns.find { |c| c.name == 'note_id' }.sql_type).to eq('bigint')
+ expect(suggestions.columns.find { |c| c.name == 'note_id_convert_to_bigint' }).to be nil
+ }
+ end
+ end
+ end
+ end
+
+ context 'when self-managed instance with the columns already swapped' do
+ before do
+ # As we call `schema_migrate_down!` before each example, and for this migration
+ # `#down` is same as `#up`, we need to ensure we start from the expected state.
+ connection = described_class.new.connection
+ connection.execute('ALTER TABLE suggestions ALTER COLUMN note_id TYPE bigint')
+ connection.execute('ALTER TABLE suggestions ADD COLUMN IF NOT EXISTS note_id_convert_to_bigint integer')
+ end
+
+ it 'does not swap the columns' do
+ # rubocop: disable RSpec/AnyInstanceOf
+ allow_any_instance_of(described_class).to receive(:com_or_dev_or_test_but_not_jh?).and_return(false)
+ # rubocop: enable RSpec/AnyInstanceOf
+
+ suggestions = table(:suggestions)
+
+ migrate!
+
+ expect(suggestions.columns.find { |c| c.name == 'note_id' }.sql_type).to eq('bigint')
+ expect(suggestions.columns.find do |c|
+ c.name == 'note_id_convert_to_bigint'
+ end.sql_type).to eq('integer')
+ end
+ end
+
+ context 'when self-managed instance with the `note_id_convert_to_bigint` column already dropped ' do
+ before do
+ # As we call `schema_migrate_down!` before each example, and for this migration
+ # `#down` is same as `#up`, we need to ensure we start from the expected state.
+ connection = described_class.new.connection
+ connection.execute('ALTER TABLE suggestions ALTER COLUMN note_id TYPE bigint')
+ connection.execute('ALTER TABLE suggestions DROP COLUMN IF EXISTS note_id_convert_to_bigint')
+ end
+
+ it 'does not swap the columns' do
+ # rubocop: disable RSpec/AnyInstanceOf
+ allow_any_instance_of(described_class).to receive(:com_or_dev_or_test_but_not_jh?).and_return(false)
+ # rubocop: enable RSpec/AnyInstanceOf
+
+ suggestions = table(:suggestions)
+
+ disable_migrations_output do
+ reversible_migration do |migration|
+ migration.before -> {
+ suggestions.reset_column_information
+
+ expect(suggestions.columns.find { |c| c.name == 'note_id' }.sql_type).to eq('bigint')
+ expect(suggestions.columns.find { |c| c.name == 'note_id_convert_to_bigint' }).to be nil
+ }
+
+ migration.after -> {
+ suggestions.reset_column_information
+
+ expect(suggestions.columns.find { |c| c.name == 'note_id' }.sql_type).to eq('bigint')
+ expect(suggestions.columns.find { |c| c.name == 'note_id_convert_to_bigint' }).to be nil
+ }
+ end
+ end
+ end
+ end
+
+ context 'when self-managed instance' do
+ before do
+ # As we call `schema_migrate_down!` before each example, and for this migration
+ # `#down` is same as `#up`, we need to ensure we start from the expected state.
+ connection = described_class.new.connection
+ connection.execute('ALTER TABLE suggestions ALTER COLUMN note_id TYPE integer')
+ connection.execute('ALTER TABLE suggestions ADD COLUMN IF NOT EXISTS note_id_convert_to_bigint bigint')
+ connection.execute('ALTER TABLE suggestions ALTER COLUMN note_id_convert_to_bigint TYPE bigint')
+ connection.execute('DROP INDEX IF EXISTS index_suggestions_on_note_id_convert_to_bigint')
+ connection.execute('CREATE OR REPLACE FUNCTION trigger_ee7956d805e6() RETURNS trigger LANGUAGE plpgsql AS $$
+ BEGIN NEW."note_id_convert_to_bigint" := NEW."note_id"; RETURN NEW; END; $$;')
+ end
+
+ it 'swaps the columns' do
+ # rubocop: disable RSpec/AnyInstanceOf
+ allow_any_instance_of(described_class).to receive(:com_or_dev_or_test_but_not_jh?).and_return(false)
+ # rubocop: enable RSpec/AnyInstanceOf
+
+ suggestions = table(:suggestions)
+
+ disable_migrations_output do
+ reversible_migration do |migration|
+ migration.before -> {
+ suggestions.reset_column_information
+
+ expect(suggestions.columns.find { |c| c.name == 'note_id' }.sql_type).to eq('integer')
+ expect(suggestions.columns.find do |c|
+ c.name == 'note_id_convert_to_bigint'
+ end.sql_type).to eq('bigint')
+ }
+
+ migration.after -> {
+ suggestions.reset_column_information
+
+ expect(suggestions.columns.find { |c| c.name == 'note_id' }.sql_type).to eq('bigint')
+ expect(suggestions.columns.find do |c|
+ c.name == 'note_id_convert_to_bigint'
+ end.sql_type).to eq('integer')
+ }
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/migrations/20230810123044_swap_snippet_user_mentions_note_id_to_bigint_for_self_hosts_spec.rb b/spec/migrations/20230810123044_swap_snippet_user_mentions_note_id_to_bigint_for_self_hosts_spec.rb
new file mode 100644
index 00000000000..92b73901fec
--- /dev/null
+++ b/spec/migrations/20230810123044_swap_snippet_user_mentions_note_id_to_bigint_for_self_hosts_spec.rb
@@ -0,0 +1,125 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe SwapSnippetUserMentionsNoteIdToBigintForSelfHosts, feature_category: :database do
+ let(:connection) { described_class.new.connection }
+ let(:snippet_user_mentions) { table(:snippet_user_mentions) }
+
+ shared_examples 'column `note_id_convert_to_bigint` is already dropped' do
+ before do
+ connection.execute('ALTER TABLE snippet_user_mentions ALTER COLUMN note_id TYPE bigint')
+ connection.execute('ALTER TABLE snippet_user_mentions DROP COLUMN IF EXISTS note_id_convert_to_bigint')
+ end
+
+ it 'does not swaps the columns' do
+ disable_migrations_output do
+ reversible_migration do |migration|
+ migration.before -> {
+ snippet_user_mentions.reset_column_information
+
+ expect(snippet_user_mentions.columns.find { |c| c.name == 'note_id' }.sql_type).to eq('bigint')
+ expect(snippet_user_mentions.columns.find { |c| c.name == 'note_id_convert_to_bigint' }).to be_nil
+ }
+
+ migration.after -> {
+ snippet_user_mentions.reset_column_information
+
+ expect(snippet_user_mentions.columns.find { |c| c.name == 'note_id' }.sql_type).to eq('bigint')
+ expect(snippet_user_mentions.columns.find { |c| c.name == 'note_id_convert_to_bigint' }).to be_nil
+ }
+ end
+ end
+ end
+ end
+
+ describe '#up' do
+ before do
+ # rubocop:disable RSpec/AnyInstanceOf
+ allow_any_instance_of(described_class).to(
+ receive(:com_or_dev_or_test_but_not_jh?).and_return(com_or_dev_or_test_but_not_jh?)
+ )
+ # rubocop:enable RSpec/AnyInstanceOf
+ end
+
+ context 'when GitLab.com, dev, or test' do
+ let(:com_or_dev_or_test_but_not_jh?) { true }
+
+ it_behaves_like 'column `note_id_convert_to_bigint` is already dropped'
+ end
+
+ context 'when self-managed instance with the `note_id_convert_to_bigint` column already dropped' do
+ let(:com_or_dev_or_test_but_not_jh?) { false }
+
+ it_behaves_like 'column `note_id_convert_to_bigint` is already dropped'
+ end
+
+ context 'when self-managed instance columns already swapped' do
+ let(:com_or_dev_or_test_but_not_jh?) { false }
+
+ before do
+ connection.execute('ALTER TABLE snippet_user_mentions ALTER COLUMN note_id TYPE bigint')
+ connection.execute(
+ 'ALTER TABLE snippet_user_mentions ADD COLUMN IF NOT EXISTS note_id_convert_to_bigint integer'
+ )
+
+ disable_migrations_output { migrate! }
+ end
+
+ after do
+ connection.execute('ALTER TABLE snippet_user_mentions DROP COLUMN IF EXISTS note_id_convert_to_bigint')
+ end
+
+ it 'does not swaps the columns' do
+ expect(snippet_user_mentions.columns.find { |c| c.name == 'note_id' }.sql_type).to eq('bigint')
+ expect(snippet_user_mentions.columns.find { |c| c.name == 'note_id_convert_to_bigint' }.sql_type).to(
+ eq('integer')
+ )
+ end
+ end
+
+ context 'when self-managed instance' do
+ let(:com_or_dev_or_test_but_not_jh?) { false }
+
+ before do
+ connection.execute('ALTER TABLE snippet_user_mentions ALTER COLUMN note_id TYPE integer')
+ connection.execute(
+ 'ALTER TABLE snippet_user_mentions ADD COLUMN IF NOT EXISTS note_id_convert_to_bigint bigint'
+ )
+ connection.execute('ALTER TABLE snippet_user_mentions ALTER COLUMN note_id_convert_to_bigint TYPE bigint')
+ connection.execute('DROP INDEX IF EXISTS index_snippet_user_mentions_on_note_id_convert_to_bigint CASCADE')
+ connection.execute('CREATE OR REPLACE FUNCTION trigger_bfc6e47be8cc() RETURNS trigger LANGUAGE plpgsql AS $$
+ BEGIN NEW."note_id_convert_to_bigint" := NEW."note_id"; RETURN NEW; END; $$;')
+ end
+
+ after do
+ connection.execute('ALTER TABLE snippet_user_mentions DROP COLUMN IF EXISTS note_id_convert_to_bigint')
+ end
+
+ it 'swaps the columns' do
+ disable_migrations_output do
+ reversible_migration do |migration|
+ migration.before -> {
+ snippet_user_mentions.reset_column_information
+
+ expect(snippet_user_mentions.columns.find { |c| c.name == 'note_id' }.sql_type).to eq('integer')
+ expect(snippet_user_mentions.columns.find { |c| c.name == 'note_id_convert_to_bigint' }.sql_type).to(
+ eq('bigint')
+ )
+ }
+
+ migration.after -> {
+ snippet_user_mentions.reset_column_information
+
+ expect(snippet_user_mentions.columns.find { |c| c.name == 'note_id' }.sql_type).to eq('bigint')
+ expect(snippet_user_mentions.columns.find { |c| c.name == 'note_id_convert_to_bigint' }.sql_type).to(
+ eq('integer')
+ )
+ }
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/migrations/20230811103941_swap_vulnerability_user_mentions_note_id_to_bigint_for_self_hosts_spec.rb b/spec/migrations/20230811103941_swap_vulnerability_user_mentions_note_id_to_bigint_for_self_hosts_spec.rb
new file mode 100644
index 00000000000..c1aa2d4daec
--- /dev/null
+++ b/spec/migrations/20230811103941_swap_vulnerability_user_mentions_note_id_to_bigint_for_self_hosts_spec.rb
@@ -0,0 +1,133 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe SwapVulnerabilityUserMentionsNoteIdToBigintForSelfHosts, feature_category: :database do
+ let(:connection) { described_class.new.connection }
+ let(:vulnerability_user_mentions) { table(:vulnerability_user_mentions) }
+
+ shared_examples 'column `note_id_convert_to_bigint` is already dropped' do
+ before do
+ connection.execute('ALTER TABLE vulnerability_user_mentions ALTER COLUMN note_id TYPE bigint')
+ connection.execute('ALTER TABLE vulnerability_user_mentions DROP COLUMN IF EXISTS note_id_convert_to_bigint')
+ end
+
+ it 'does not swaps the columns' do
+ disable_migrations_output do
+ reversible_migration do |migration|
+ migration.before -> {
+ vulnerability_user_mentions.reset_column_information
+
+ expect(vulnerability_user_mentions.columns.find { |c| c.name == 'note_id' }.sql_type).to eq('bigint')
+ expect(vulnerability_user_mentions.columns.find { |c| c.name == 'note_id_convert_to_bigint' }).to be_nil
+ }
+
+ migration.after -> {
+ vulnerability_user_mentions.reset_column_information
+
+ expect(vulnerability_user_mentions.columns.find { |c| c.name == 'note_id' }.sql_type).to eq('bigint')
+ expect(vulnerability_user_mentions.columns.find { |c| c.name == 'note_id_convert_to_bigint' }).to be_nil
+ }
+ end
+ end
+ end
+ end
+
+ describe '#up' do
+ before do
+ # rubocop:disable RSpec/AnyInstanceOf
+ allow_any_instance_of(described_class).to(
+ receive(:com_or_dev_or_test_but_not_jh?).and_return(com_or_dev_or_test_but_not_jh?)
+ )
+ # rubocop:enable RSpec/AnyInstanceOf
+ end
+
+ context 'when GitLab.com, dev, or test' do
+ let(:com_or_dev_or_test_but_not_jh?) { true }
+
+ it_behaves_like 'column `note_id_convert_to_bigint` is already dropped'
+ end
+
+ context 'when self-managed instance with the `note_id_convert_to_bigint` column already dropped' do
+ let(:com_or_dev_or_test_but_not_jh?) { false }
+
+ it_behaves_like 'column `note_id_convert_to_bigint` is already dropped'
+ end
+
+ context 'when self-managed instance columns already swapped' do
+ let(:com_or_dev_or_test_but_not_jh?) { false }
+
+ before do
+ connection.execute('ALTER TABLE vulnerability_user_mentions ALTER COLUMN note_id TYPE bigint')
+ connection.execute(
+ 'ALTER TABLE vulnerability_user_mentions ADD COLUMN IF NOT EXISTS note_id_convert_to_bigint integer'
+ )
+
+ disable_migrations_output { migrate! }
+ end
+
+ after do
+ connection.execute('ALTER TABLE vulnerability_user_mentions DROP COLUMN IF EXISTS note_id_convert_to_bigint')
+ end
+
+ it 'does not swaps the columns' do
+ expect(vulnerability_user_mentions.columns.find { |c| c.name == 'note_id' }.sql_type).to eq('bigint')
+ expect(vulnerability_user_mentions.columns.find { |c| c.name == 'note_id_convert_to_bigint' }.sql_type).to(
+ eq('integer')
+ )
+ end
+ end
+
+ context 'when self-managed instance' do
+ let(:com_or_dev_or_test_but_not_jh?) { false }
+
+ before do
+ connection.execute('ALTER TABLE vulnerability_user_mentions ALTER COLUMN note_id TYPE integer')
+ connection.execute(
+ 'ALTER TABLE vulnerability_user_mentions ADD COLUMN IF NOT EXISTS note_id_convert_to_bigint bigint'
+ )
+ connection.execute('ALTER TABLE vulnerability_user_mentions ALTER COLUMN note_id_convert_to_bigint TYPE bigint')
+ connection.execute(
+ 'DROP INDEX IF EXISTS index_vulnerability_user_mentions_on_note_id_convert_to_bigint CASCADE'
+ )
+ connection.execute('CREATE OR REPLACE FUNCTION trigger_0e214b8a14f2() RETURNS trigger LANGUAGE plpgsql AS $$
+ BEGIN NEW."note_id_convert_to_bigint" := NEW."note_id"; RETURN NEW; END; $$;')
+ end
+
+ after do
+ connection.execute('ALTER TABLE vulnerability_user_mentions DROP COLUMN IF EXISTS note_id_convert_to_bigint')
+ end
+
+ it 'swaps the columns' do
+ disable_migrations_output do
+ reversible_migration do |migration|
+ migration.before -> {
+ vulnerability_user_mentions.reset_column_information
+
+ expect(vulnerability_user_mentions.columns.find do |c|
+ c.name == 'note_id'
+ end.sql_type).to eq('integer')
+ expect(vulnerability_user_mentions.columns.find do |c|
+ c.name == 'note_id_convert_to_bigint'
+ end.sql_type).to(
+ eq('bigint')
+ )
+ }
+
+ migration.after -> {
+ vulnerability_user_mentions.reset_column_information
+
+ expect(vulnerability_user_mentions.columns.find { |c| c.name == 'note_id' }.sql_type).to eq('bigint')
+ expect(vulnerability_user_mentions.columns.find do |c|
+ c.name == 'note_id_convert_to_bigint'
+ end.sql_type).to(
+ eq('integer')
+ )
+ }
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/migrations/20230814144045_swap_timelogs_note_id_to_bigint_for_self_hosts_spec.rb b/spec/migrations/20230814144045_swap_timelogs_note_id_to_bigint_for_self_hosts_spec.rb
new file mode 100644
index 00000000000..41b9f0bf4e9
--- /dev/null
+++ b/spec/migrations/20230814144045_swap_timelogs_note_id_to_bigint_for_self_hosts_spec.rb
@@ -0,0 +1,125 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe SwapTimelogsNoteIdToBigintForSelfHosts, feature_category: :database do
+ let(:connection) { described_class.new.connection }
+ let(:timelogs) { table(:timelogs) }
+
+ shared_examples 'column `note_id_convert_to_bigint` is already dropped' do
+ before do
+ connection.execute('ALTER TABLE timelogs ALTER COLUMN note_id TYPE bigint')
+ connection.execute('ALTER TABLE timelogs DROP COLUMN IF EXISTS note_id_convert_to_bigint')
+ end
+
+ it 'does not swaps the columns' do
+ disable_migrations_output do
+ reversible_migration do |migration|
+ migration.before -> {
+ timelogs.reset_column_information
+
+ expect(timelogs.columns.find { |c| c.name == 'note_id' }.sql_type).to eq('bigint')
+ expect(timelogs.columns.find { |c| c.name == 'note_id_convert_to_bigint' }).to be_nil
+ }
+
+ migration.after -> {
+ timelogs.reset_column_information
+
+ expect(timelogs.columns.find { |c| c.name == 'note_id' }.sql_type).to eq('bigint')
+ expect(timelogs.columns.find { |c| c.name == 'note_id_convert_to_bigint' }).to be_nil
+ }
+ end
+ end
+ end
+ end
+
+ describe '#up' do
+ before do
+ # rubocop:disable RSpec/AnyInstanceOf
+ allow_any_instance_of(described_class).to(
+ receive(:com_or_dev_or_test_but_not_jh?).and_return(com_or_dev_or_test_but_not_jh?)
+ )
+ # rubocop:enable RSpec/AnyInstanceOf
+ end
+
+ context 'when GitLab.com, dev, or test' do
+ let(:com_or_dev_or_test_but_not_jh?) { true }
+
+ it_behaves_like 'column `note_id_convert_to_bigint` is already dropped'
+ end
+
+ context 'when self-managed instance with the `note_id_convert_to_bigint` column already dropped' do
+ let(:com_or_dev_or_test_but_not_jh?) { false }
+
+ it_behaves_like 'column `note_id_convert_to_bigint` is already dropped'
+ end
+
+ context 'when self-managed instance columns already swapped' do
+ let(:com_or_dev_or_test_but_not_jh?) { false }
+
+ before do
+ connection.execute('ALTER TABLE timelogs ALTER COLUMN note_id TYPE bigint')
+ connection.execute(
+ 'ALTER TABLE timelogs ADD COLUMN IF NOT EXISTS note_id_convert_to_bigint integer'
+ )
+
+ disable_migrations_output { migrate! }
+ end
+
+ after do
+ connection.execute('ALTER TABLE timelogs DROP COLUMN IF EXISTS note_id_convert_to_bigint')
+ end
+
+ it 'does not swaps the columns' do
+ expect(timelogs.columns.find { |c| c.name == 'note_id' }.sql_type).to eq('bigint')
+ expect(timelogs.columns.find { |c| c.name == 'note_id_convert_to_bigint' }.sql_type).to(
+ eq('integer')
+ )
+ end
+ end
+
+ context 'when self-managed instance' do
+ let(:com_or_dev_or_test_but_not_jh?) { false }
+
+ before do
+ connection.execute('ALTER TABLE timelogs ALTER COLUMN note_id TYPE integer')
+ connection.execute(
+ 'ALTER TABLE timelogs ADD COLUMN IF NOT EXISTS note_id_convert_to_bigint bigint'
+ )
+ connection.execute('ALTER TABLE timelogs ALTER COLUMN note_id_convert_to_bigint TYPE bigint')
+ connection.execute('DROP INDEX IF EXISTS index_timelogs_on_note_id_convert_to_bigint CASCADE')
+ connection.execute('CREATE OR REPLACE FUNCTION trigger_428d92773fe7() RETURNS trigger LANGUAGE plpgsql AS $$
+ BEGIN NEW."note_id_convert_to_bigint" := NEW."note_id"; RETURN NEW; END; $$;')
+ end
+
+ after do
+ connection.execute('ALTER TABLE timelogs DROP COLUMN IF EXISTS note_id_convert_to_bigint')
+ end
+
+ it 'swaps the columns' do
+ disable_migrations_output do
+ reversible_migration do |migration|
+ migration.before -> {
+ timelogs.reset_column_information
+
+ expect(timelogs.columns.find { |c| c.name == 'note_id' }.sql_type).to eq('integer')
+ expect(timelogs.columns.find { |c| c.name == 'note_id_convert_to_bigint' }.sql_type).to(
+ eq('bigint')
+ )
+ }
+
+ migration.after -> {
+ timelogs.reset_column_information
+
+ expect(timelogs.columns.find { |c| c.name == 'note_id' }.sql_type).to eq('bigint')
+ expect(timelogs.columns.find { |c| c.name == 'note_id_convert_to_bigint' }.sql_type).to(
+ eq('integer')
+ )
+ }
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/migrations/add_expiry_notified_at_to_member_spec.rb b/spec/migrations/add_expiry_notified_at_to_member_spec.rb
new file mode 100644
index 00000000000..30eaf06529e
--- /dev/null
+++ b/spec/migrations/add_expiry_notified_at_to_member_spec.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe AddExpiryNotifiedAtToMember, feature_category: :system_access do
+ let(:members) { table(:members) }
+
+ it 'correctly migrates up and down' do
+ reversible_migration do |migration|
+ migration.before -> {
+ expect(members.column_names).not_to include('expiry_notified_at')
+ }
+
+ migration.after -> {
+ members.reset_column_information
+ expect(members.column_names).to include('expiry_notified_at')
+ }
+ end
+ end
+end
diff --git a/spec/migrations/cleanup_conversion_big_int_ci_build_needs_self_managed_spec.rb b/spec/migrations/cleanup_conversion_big_int_ci_build_needs_self_managed_spec.rb
new file mode 100644
index 00000000000..03a8356c721
--- /dev/null
+++ b/spec/migrations/cleanup_conversion_big_int_ci_build_needs_self_managed_spec.rb
@@ -0,0 +1,107 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe CleanupConversionBigIntCiBuildNeedsSelfManaged, feature_category: :database do
+ after do
+ connection = described_class.new.connection
+ connection.execute('ALTER TABLE ci_build_needs DROP COLUMN IF EXISTS id_convert_to_bigint')
+ end
+
+ describe '#up' do
+ context 'when it is GitLab.com, dev, or test but not JiHu' do
+ before do
+ # As we call `schema_migrate_down!` before each example, and for this migration
+ # `#down` is same as `#up`, we need to ensure we start from the expected state.
+ connection = described_class.new.connection
+ connection.execute('ALTER TABLE ci_build_needs DROP COLUMN IF EXISTS id_convert_to_bigint')
+ end
+
+ it 'does nothing' do
+ # rubocop: disable RSpec/AnyInstanceOf
+ allow_any_instance_of(described_class).to receive(:com_or_dev_or_test_but_not_jh?).and_return(true)
+ # rubocop: enable RSpec/AnyInstanceOf
+
+ ci_build_needs = table(:ci_build_needs)
+
+ disable_migrations_output do
+ reversible_migration do |migration|
+ migration.before -> {
+ ci_build_needs.reset_column_information
+
+ expect(ci_build_needs.columns.find { |c| c.name == 'id_convert_to_bigint' }).to be nil
+ }
+
+ migration.after -> {
+ ci_build_needs.reset_column_information
+
+ expect(ci_build_needs.columns.find { |c| c.name == 'id_convert_to_bigint' }).to be nil
+ }
+ end
+ end
+ end
+ end
+
+ context 'when there is a self-managed instance with the temporary column already dropped' do
+ before do
+ # As we call `schema_migrate_down!` before each example, and for this migration
+ # `#down` is same as `#up`, we need to ensure we start from the expected state.
+ connection = described_class.new.connection
+ connection.execute('ALTER TABLE ci_build_needs ALTER COLUMN id TYPE bigint')
+ connection.execute('ALTER TABLE ci_build_needs DROP COLUMN IF EXISTS id_convert_to_bigint')
+ end
+
+ it 'does nothing' do
+ # rubocop: disable RSpec/AnyInstanceOf
+ allow_any_instance_of(described_class).to receive(:com_or_dev_or_test_but_not_jh?).and_return(false)
+ # rubocop: enable RSpec/AnyInstanceOf
+
+ ci_build_needs = table(:ci_build_needs)
+
+ migrate!
+
+ expect(ci_build_needs.columns.find { |c| c.name == 'id' }.sql_type).to eq('bigint')
+ expect(ci_build_needs.columns.find { |c| c.name == 'id_convert_to_bigint' }).to be nil
+ end
+ end
+
+ context 'when there is a self-managed instance with the temporary columns' do
+ before do
+ # As we call `schema_migrate_down!` before each example, and for this migration
+ # `#down` is same as `#up`, we need to ensure we start from the expected state.
+ connection = described_class.new.connection
+ connection.execute('ALTER TABLE ci_build_needs ALTER COLUMN id TYPE bigint')
+ connection.execute('ALTER TABLE ci_build_needs ADD COLUMN IF NOT EXISTS id_convert_to_bigint integer')
+ end
+
+ it 'drops the temporary column' do
+ # rubocop: disable RSpec/AnyInstanceOf
+ allow_any_instance_of(described_class).to receive(:com_or_dev_or_test_but_not_jh?).and_return(false)
+ # rubocop: enable RSpec/AnyInstanceOf
+
+ ci_build_needs = table(:ci_build_needs)
+
+ disable_migrations_output do
+ reversible_migration do |migration|
+ migration.before -> {
+ ci_build_needs.reset_column_information
+
+ expect(ci_build_needs.columns.find { |c| c.name == 'id' }.sql_type).to eq('bigint')
+ expect(ci_build_needs.columns.find do |c|
+ c.name == 'id_convert_to_bigint'
+ end.sql_type).to eq('integer')
+ }
+
+ migration.after -> {
+ ci_build_needs.reset_column_information
+
+ expect(ci_build_needs.columns.find { |c| c.name == 'id' }.sql_type).to eq('bigint')
+ expect(ci_build_needs.columns.find { |c| c.name == 'id_convert_to_bigint' }).to be nil
+ }
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/migrations/ensure_commit_user_mentions_note_id_bigint_backfill_is_finished_for_self_managed_spec.rb b/spec/migrations/ensure_commit_user_mentions_note_id_bigint_backfill_is_finished_for_self_managed_spec.rb
new file mode 100644
index 00000000000..b281403204b
--- /dev/null
+++ b/spec/migrations/ensure_commit_user_mentions_note_id_bigint_backfill_is_finished_for_self_managed_spec.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe EnsureCommitUserMentionsNoteIdBigintBackfillIsFinishedForSelfManaged, feature_category: :database do
+ describe '#up' do
+ let(:migration_arguments) do
+ {
+ job_class_name: 'CopyColumnUsingBackgroundMigrationJob',
+ table_name: 'commit_user_mentions',
+ column_name: 'id',
+ job_arguments: [['note_id'], ['note_id_convert_to_bigint']]
+ }
+ end
+
+ it 'ensures the migration is completed for self-managed instances' do
+ expect_next_instance_of(described_class) do |instance|
+ expect(instance).to receive(:com_or_dev_or_test_but_not_jh?).and_return(false)
+ expect(instance).to receive(:ensure_batched_background_migration_is_finished).with(migration_arguments)
+ end
+
+ migrate!
+ end
+
+ it 'skips the check for GitLab.com, dev, or test' do
+ expect_next_instance_of(described_class) do |instance|
+ expect(instance).to receive(:com_or_dev_or_test_but_not_jh?).and_return(true)
+ expect(instance).not_to receive(:ensure_batched_background_migration_is_finished)
+ end
+
+ migrate!
+ end
+ end
+end
diff --git a/spec/migrations/ensure_todos_bigint_backfill_completed_for_self_managed_spec.rb b/spec/migrations/ensure_todos_bigint_backfill_completed_for_self_managed_spec.rb
new file mode 100644
index 00000000000..5187441377e
--- /dev/null
+++ b/spec/migrations/ensure_todos_bigint_backfill_completed_for_self_managed_spec.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe EnsureTodosBigintBackfillCompletedForSelfManaged, feature_category: :database do
+ describe '#up' do
+ let(:migration_arguments) do
+ {
+ job_class_name: 'CopyColumnUsingBackgroundMigrationJob',
+ table_name: 'todos',
+ column_name: 'id',
+ job_arguments: [['note_id'], ['note_id_convert_to_bigint']]
+ }
+ end
+
+ it 'ensures the migration is completed for self-managed instances' do
+ expect_next_instance_of(described_class) do |instance|
+ expect(instance).to receive(:com_or_dev_or_test_but_not_jh?).and_return(false)
+ expect(instance).to receive(:ensure_batched_background_migration_is_finished).with(migration_arguments)
+ end
+
+ migrate!
+ end
+
+ it 'skips the check for GitLab.com, dev, or test' do
+ expect_next_instance_of(described_class) do |instance|
+ expect(instance).to receive(:com_or_dev_or_test_but_not_jh?).and_return(true)
+ expect(instance).not_to receive(:ensure_batched_background_migration_is_finished)
+ end
+
+ migrate!
+ end
+ end
+end
diff --git a/spec/migrations/swap_commit_user_mentions_note_id_to_bigint_for_self_managed_spec.rb b/spec/migrations/swap_commit_user_mentions_note_id_to_bigint_for_self_managed_spec.rb
new file mode 100644
index 00000000000..b35da2f78db
--- /dev/null
+++ b/spec/migrations/swap_commit_user_mentions_note_id_to_bigint_for_self_managed_spec.rb
@@ -0,0 +1,123 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe SwapCommitUserMentionsNoteIdToBigintForSelfManaged, feature_category: :database do
+ let(:connection) { described_class.new.connection }
+ let(:commit_user_mentions) { table(:commit_user_mentions) }
+
+ shared_examples 'column `note_id_convert_to_bigint` is already dropped' do
+ before do
+ connection.execute('ALTER TABLE commit_user_mentions ALTER COLUMN note_id TYPE bigint')
+ connection.execute('ALTER TABLE commit_user_mentions DROP COLUMN IF EXISTS note_id_convert_to_bigint')
+ end
+
+ it 'does not swaps the columns' do
+ disable_migrations_output do
+ reversible_migration do |migration|
+ migration.before -> {
+ commit_user_mentions.reset_column_information
+
+ expect(commit_user_mentions.columns.find { |c| c.name == 'note_id' }.sql_type).to eq('bigint')
+ expect(commit_user_mentions.columns.find { |c| c.name == 'note_id_convert_to_bigint' }).to be_nil
+ }
+
+ migration.after -> {
+ commit_user_mentions.reset_column_information
+
+ expect(commit_user_mentions.columns.find { |c| c.name == 'note_id' }.sql_type).to eq('bigint')
+ expect(commit_user_mentions.columns.find { |c| c.name == 'note_id_convert_to_bigint' }).to be_nil
+ }
+ end
+ end
+ end
+ end
+
+ describe '#up' do
+ before do
+ # rubocop:disable RSpec/AnyInstanceOf
+ allow_any_instance_of(described_class).to(
+ receive(:com_or_dev_or_test_but_not_jh?).and_return(com_or_dev_or_test_but_not_jh?)
+ )
+ # rubocop:enable RSpec/AnyInstanceOf
+ end
+
+ context 'when GitLab.com, dev, or test' do
+ let(:com_or_dev_or_test_but_not_jh?) { true }
+
+ it_behaves_like 'column `note_id_convert_to_bigint` is already dropped'
+ end
+
+ context 'when self-managed instance with the `note_id_convert_to_bigint` column already dropped' do
+ let(:com_or_dev_or_test_but_not_jh?) { false }
+
+ it_behaves_like 'column `note_id_convert_to_bigint` is already dropped'
+ end
+
+ context 'when self-managed instance columns already swapped' do
+ let(:com_or_dev_or_test_but_not_jh?) { false }
+
+ before do
+ connection.execute('ALTER TABLE commit_user_mentions ALTER COLUMN note_id TYPE bigint')
+ connection.execute(
+ 'ALTER TABLE commit_user_mentions ADD COLUMN IF NOT EXISTS note_id_convert_to_bigint integer'
+ )
+
+ disable_migrations_output { migrate! }
+ end
+
+ after do
+ connection.execute('ALTER TABLE commit_user_mentions DROP COLUMN IF EXISTS note_id_convert_to_bigint')
+ end
+
+ it 'does not swaps the columns' do
+ expect(commit_user_mentions.columns.find { |c| c.name == 'note_id' }.sql_type).to eq('bigint')
+ expect(commit_user_mentions.columns.find { |c| c.name == 'note_id_convert_to_bigint' }.sql_type).to(
+ eq('integer')
+ )
+ end
+ end
+
+ context 'when self-managed instance' do
+ let(:com_or_dev_or_test_but_not_jh?) { false }
+
+ before do
+ connection.execute('ALTER TABLE commit_user_mentions ALTER COLUMN note_id TYPE integer')
+ connection.execute('ALTER TABLE commit_user_mentions ADD COLUMN IF NOT EXISTS note_id_convert_to_bigint bigint')
+ connection.execute('ALTER TABLE commit_user_mentions ALTER COLUMN note_id_convert_to_bigint TYPE bigint')
+ connection.execute('DROP INDEX IF EXISTS index_commit_user_mentions_on_note_id_convert_to_bigint CASCADE')
+ connection.execute('CREATE OR REPLACE FUNCTION trigger_17c3a95ee58a() RETURNS trigger LANGUAGE plpgsql AS $$
+ BEGIN NEW."note_id_convert_to_bigint" := NEW."note_id"; RETURN NEW; END; $$;')
+ end
+
+ after do
+ connection.execute('ALTER TABLE commit_user_mentions DROP COLUMN IF EXISTS note_id_convert_to_bigint')
+ end
+
+ it 'swaps the columns' do
+ disable_migrations_output do
+ reversible_migration do |migration|
+ migration.before -> {
+ commit_user_mentions.reset_column_information
+
+ expect(commit_user_mentions.columns.find { |c| c.name == 'note_id' }.sql_type).to eq('integer')
+ expect(commit_user_mentions.columns.find { |c| c.name == 'note_id_convert_to_bigint' }.sql_type).to(
+ eq('bigint')
+ )
+ }
+
+ migration.after -> {
+ commit_user_mentions.reset_column_information
+
+ expect(commit_user_mentions.columns.find { |c| c.name == 'note_id' }.sql_type).to eq('bigint')
+ expect(commit_user_mentions.columns.find { |c| c.name == 'note_id_convert_to_bigint' }.sql_type).to(
+ eq('integer')
+ )
+ }
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/migrations/swap_events_target_id_to_bigint_for_gitlab_dot_com_spec.rb b/spec/migrations/swap_events_target_id_to_bigint_for_gitlab_dot_com_spec.rb
new file mode 100644
index 00000000000..a3dc73ecc38
--- /dev/null
+++ b/spec/migrations/swap_events_target_id_to_bigint_for_gitlab_dot_com_spec.rb
@@ -0,0 +1,67 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe SwapEventsTargetIdToBigintForGitlabDotCom, feature_category: :database do
+ describe '#up' do
+ before do
+ # A we call `schema_migrate_down!` before each example, and for this migration
+ # `#down` is same as `#up`, we need to ensure we start from the expected state.
+ connection = described_class.new.connection
+ connection.execute('ALTER TABLE events ALTER COLUMN target_id TYPE integer')
+ connection.execute('ALTER TABLE events ALTER COLUMN target_id_convert_to_bigint TYPE bigint')
+ end
+
+ # rubocop: disable RSpec/AnyInstanceOf
+ it 'swaps the integer and bigint columns for GitLab.com, dev, or test' do
+ allow_any_instance_of(described_class).to receive(:com_or_dev_or_test_but_not_jh?).and_return(true)
+
+ events = table(:events)
+
+ disable_migrations_output do
+ reversible_migration do |migration|
+ migration.before -> {
+ events.reset_column_information
+
+ expect(events.columns.find { |c| c.name == 'target_id' }.sql_type).to eq('integer')
+ expect(events.columns.find { |c| c.name == 'target_id_convert_to_bigint' }.sql_type).to eq('bigint')
+ }
+
+ migration.after -> {
+ events.reset_column_information
+
+ expect(events.columns.find { |c| c.name == 'target_id' }.sql_type).to eq('bigint')
+ expect(events.columns.find { |c| c.name == 'target_id_convert_to_bigint' }.sql_type)
+ .to eq('integer')
+ }
+ end
+ end
+ end
+
+ it 'is a no-op for other instances' do
+ allow_any_instance_of(described_class).to receive(:com_or_dev_or_test_but_not_jh?).and_return(false)
+
+ events = table(:events)
+
+ disable_migrations_output do
+ reversible_migration do |migration|
+ migration.before -> {
+ events.reset_column_information
+
+ expect(events.columns.find { |c| c.name == 'target_id' }.sql_type).to eq('integer')
+ expect(events.columns.find { |c| c.name == 'target_id_convert_to_bigint' }.sql_type).to eq('bigint')
+ }
+
+ migration.after -> {
+ events.reset_column_information
+
+ expect(events.columns.find { |c| c.name == 'target_id' }.sql_type).to eq('integer')
+ expect(events.columns.find { |c| c.name == 'target_id_convert_to_bigint' }.sql_type).to eq('bigint')
+ }
+ end
+ end
+ end
+ # rubocop: enable RSpec/AnyInstanceOf
+ end
+end
diff --git a/spec/migrations/swap_todos_note_id_to_bigint_for_self_managed_spec.rb b/spec/migrations/swap_todos_note_id_to_bigint_for_self_managed_spec.rb
new file mode 100644
index 00000000000..525e4fbcd8d
--- /dev/null
+++ b/spec/migrations/swap_todos_note_id_to_bigint_for_self_managed_spec.rb
@@ -0,0 +1,159 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe SwapTodosNoteIdToBigintForSelfManaged, feature_category: :database do
+ describe '#up' do
+ context 'when GitLab.com, dev, or test' do
+ before do
+ # As we call `schema_migrate_down!` before each example, and for this migration
+ # `#down` is same as `#up`, we need to ensure we start from the expected state.
+ connection = described_class.new.connection
+ connection.execute('ALTER TABLE todos ALTER COLUMN note_id TYPE bigint')
+ connection.execute('ALTER TABLE todos DROP COLUMN IF EXISTS note_id_convert_to_bigint')
+ end
+
+ it 'does not swap the columns' do
+ # rubocop: disable RSpec/AnyInstanceOf
+ allow_any_instance_of(described_class).to receive(:com_or_dev_or_test_but_not_jh?).and_return(true)
+ # rubocop: enable RSpec/AnyInstanceOf
+
+ todos = table(:todos)
+
+ disable_migrations_output do
+ reversible_migration do |migration|
+ migration.before -> {
+ todos.reset_column_information
+
+ expect(todos.columns.find { |c| c.name == 'note_id' }.sql_type).to eq('bigint')
+ expect(todos.columns.find { |c| c.name == 'note_id_convert_to_bigint' }).to be nil
+ }
+
+ migration.after -> {
+ todos.reset_column_information
+
+ expect(todos.columns.find { |c| c.name == 'note_id' }.sql_type).to eq('bigint')
+ expect(todos.columns.find { |c| c.name == 'note_id_convert_to_bigint' }).to be nil
+ }
+ end
+ end
+ end
+ end
+
+ context 'when self-managed instance with the columns already swapped' do
+ before do
+ # As we call `schema_migrate_down!` before each example, and for this migration
+ # `#down` is same as `#up`, we need to ensure we start from the expected state.
+ connection = described_class.new.connection
+ connection.execute('ALTER TABLE todos ALTER COLUMN note_id TYPE bigint')
+ connection.execute('ALTER TABLE todos ADD COLUMN IF NOT EXISTS note_id_convert_to_bigint integer')
+ end
+
+ after do
+ connection = described_class.new.connection
+ connection.execute('ALTER TABLE todos DROP COLUMN IF EXISTS note_id_convert_to_bigint')
+ end
+
+ it 'does not swap the columns' do
+ # rubocop: disable RSpec/AnyInstanceOf
+ allow_any_instance_of(described_class).to receive(:com_or_dev_or_test_but_not_jh?).and_return(false)
+ # rubocop: enable RSpec/AnyInstanceOf
+
+ todos = table(:todos)
+
+ migrate!
+
+ expect(todos.columns.find { |c| c.name == 'note_id' }.sql_type).to eq('bigint')
+ expect(todos.columns.find do |c|
+ c.name == 'note_id_convert_to_bigint'
+ end.sql_type).to eq('integer')
+ end
+ end
+
+ context 'when self-managed instance with the `note_id_convert_to_bigint` column already dropped ' do
+ before do
+ # As we call `schema_migrate_down!` before each example, and for this migration
+ # `#down` is same as `#up`, we need to ensure we start from the expected state.
+ connection = described_class.new.connection
+ connection.execute('ALTER TABLE todos ALTER COLUMN note_id TYPE bigint')
+ connection.execute('ALTER TABLE todos DROP COLUMN IF EXISTS note_id_convert_to_bigint')
+ end
+
+ it 'does not swap the columns' do
+ # rubocop: disable RSpec/AnyInstanceOf
+ allow_any_instance_of(described_class).to receive(:com_or_dev_or_test_but_not_jh?).and_return(false)
+ # rubocop: enable RSpec/AnyInstanceOf
+
+ todos = table(:todos)
+
+ disable_migrations_output do
+ reversible_migration do |migration|
+ migration.before -> {
+ todos.reset_column_information
+
+ expect(todos.columns.find { |c| c.name == 'note_id' }.sql_type).to eq('bigint')
+ expect(todos.columns.find { |c| c.name == 'note_id_convert_to_bigint' }).to be nil
+ }
+
+ migration.after -> {
+ todos.reset_column_information
+
+ expect(todos.columns.find { |c| c.name == 'note_id' }.sql_type).to eq('bigint')
+ expect(todos.columns.find { |c| c.name == 'note_id_convert_to_bigint' }).to be nil
+ }
+ end
+ end
+ end
+ end
+
+ context 'when self-managed instance' do
+ before do
+ # As we call `schema_migrate_down!` before each example, and for this migration
+ # `#down` is same as `#up`, we need to ensure we start from the expected state.
+ connection = described_class.new.connection
+ connection.execute('ALTER TABLE todos ALTER COLUMN note_id TYPE integer')
+ connection.execute('ALTER TABLE todos ADD COLUMN IF NOT EXISTS note_id_convert_to_bigint bigint')
+ connection.execute('ALTER TABLE todos ALTER COLUMN note_id_convert_to_bigint TYPE bigint')
+ connection.execute('DROP INDEX IF EXISTS index_todos_on_note_id_convert_to_bigint')
+ connection.execute('CREATE OR REPLACE FUNCTION trigger_dca935e3a712() RETURNS trigger LANGUAGE plpgsql AS $$
+ BEGIN NEW."note_id_convert_to_bigint" := NEW."note_id"; RETURN NEW; END; $$;')
+ end
+
+ after do
+ connection = described_class.new.connection
+ connection.execute('ALTER TABLE todos DROP COLUMN IF EXISTS note_id_convert_to_bigint')
+ end
+
+ it 'swaps the columns' do
+ # rubocop: disable RSpec/AnyInstanceOf
+ allow_any_instance_of(described_class).to receive(:com_or_dev_or_test_but_not_jh?).and_return(false)
+ # rubocop: enable RSpec/AnyInstanceOf
+
+ todos = table(:todos)
+
+ disable_migrations_output do
+ reversible_migration do |migration|
+ migration.before -> {
+ todos.reset_column_information
+
+ expect(todos.columns.find { |c| c.name == 'note_id' }.sql_type).to eq('integer')
+ expect(todos.columns.find do |c|
+ c.name == 'note_id_convert_to_bigint'
+ end.sql_type).to eq('bigint')
+ }
+
+ migration.after -> {
+ todos.reset_column_information
+
+ expect(todos.columns.find { |c| c.name == 'note_id' }.sql_type).to eq('bigint')
+ expect(todos.columns.find do |c|
+ c.name == 'note_id_convert_to_bigint'
+ end.sql_type).to eq('integer')
+ }
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/models/abuse_report_spec.rb b/spec/models/abuse_report_spec.rb
index 6192a271028..584f9b010ad 100644
--- a/spec/models/abuse_report_spec.rb
+++ b/spec/models/abuse_report_spec.rb
@@ -164,6 +164,34 @@ RSpec.describe AbuseReport, feature_category: :insider_threat do
expect(described_class.by_category('phishing')).to match_array([report2])
end
end
+
+ describe '.aggregated_by_user_and_category' do
+ let_it_be(:report3) { create(:abuse_report, category: report1.category, user: report1.user) }
+ let_it_be(:report4) { create(:abuse_report, category: 'phishing', user: report1.user) }
+ let_it_be(:report5) { create(:abuse_report, category: report1.category, user: build(:user)) }
+
+ let_it_be(:sort_by_count) { true }
+
+ subject(:aggregated) { described_class.aggregated_by_user_and_category(sort_by_count) }
+
+ context 'when sort_by_count = true' do
+ it 'sorts by aggregated_count in descending order and created_at in descending order' do
+ expect(aggregated).to eq([report1, report5, report4, report])
+ end
+
+ it 'returns count with aggregated reports' do
+ expect(aggregated[0].count).to eq(2)
+ end
+ end
+
+ context 'when sort_by_count = false' do
+ let_it_be(:sort_by_count) { false }
+
+ it 'does not sort using a specific order' do
+ expect(aggregated).to match_array([report, report1, report4, report5])
+ end
+ end
+ end
end
describe 'before_validation' do
diff --git a/spec/models/ai/service_access_token_spec.rb b/spec/models/ai/service_access_token_spec.rb
index 12ed24f3bd6..d979db4b3d6 100644
--- a/spec/models/ai/service_access_token_spec.rb
+++ b/spec/models/ai/service_access_token_spec.rb
@@ -12,6 +12,15 @@ RSpec.describe Ai::ServiceAccessToken, type: :model, feature_category: :applicat
end
end
+ describe '.active', :freeze_time do
+ let_it_be(:expired_token) { create(:service_access_token, :code_suggestions, :expired) }
+ let_it_be(:active_token) { create(:service_access_token, :code_suggestions, :active) }
+
+ it 'selects all active tokens' do
+ expect(described_class.active).to match_array([active_token])
+ end
+ end
+
# There is currently only one category, please expand this test when a new category is added.
describe '.for_category' do
let(:code_suggestions_token) { create(:service_access_token, :code_suggestions) }
diff --git a/spec/models/application_setting_spec.rb b/spec/models/application_setting_spec.rb
index 8dcafaa90a0..5a70bec8b33 100644
--- a/spec/models/application_setting_spec.rb
+++ b/spec/models/application_setting_spec.rb
@@ -24,6 +24,7 @@ RSpec.describe ApplicationSetting, feature_category: :shared, type: :model do
it { expect(setting.repository_storages_weighted).to eq({}) }
it { expect(setting.kroki_formats).to eq({}) }
it { expect(setting.default_branch_protection_defaults).to eq({}) }
+ it { expect(setting.max_decompressed_archive_size).to eq(25600) }
end
describe 'validations' do
@@ -32,7 +33,7 @@ RSpec.describe ApplicationSetting, feature_category: :shared, type: :model do
let(:ftp) { 'ftp://example.com' }
let(:javascript) { 'javascript:alert(window.opener.document.location)' }
- let_it_be(:valid_database_apdex_settings) do
+ let_it_be(:valid_prometheus_alert_db_indicators_settings) do
{
prometheus_api_url: 'Prometheus URL',
apdex_sli_query: {
@@ -42,6 +43,14 @@ RSpec.describe ApplicationSetting, feature_category: :shared, type: :model do
apdex_slo: {
main: 0.99,
ci: 0.98
+ },
+ wal_rate_sli_query: {
+ main: 'WAL rate query main',
+ ci: 'WAL rate query ci'
+ },
+ wal_rate_slo: {
+ main: 7000,
+ ci: 7000
}
}
end
@@ -86,6 +95,10 @@ RSpec.describe ApplicationSetting, feature_category: :shared, type: :model do
it { is_expected.not_to allow_value(['/example'] * 101).for(:protected_paths) }
it { is_expected.not_to allow_value(nil).for(:protected_paths) }
it { is_expected.to allow_value([]).for(:protected_paths) }
+ it { is_expected.to allow_value(['/example'] * 100).for(:protected_paths_for_get_request) }
+ it { is_expected.not_to allow_value(['/example'] * 101).for(:protected_paths_for_get_request) }
+ it { is_expected.not_to allow_value(nil).for(:protected_paths_for_get_request) }
+ it { is_expected.to allow_value([]).for(:protected_paths_for_get_request) }
it { is_expected.to allow_value(3).for(:push_event_hooks_limit) }
it { is_expected.not_to allow_value('three').for(:push_event_hooks_limit) }
@@ -255,9 +268,9 @@ RSpec.describe ApplicationSetting, feature_category: :shared, type: :model do
it { is_expected.to allow_value(true, false).for(:gitlab_dedicated_instance) }
it { is_expected.not_to allow_value(nil).for(:gitlab_dedicated_instance) }
- it { is_expected.not_to allow_value(random: :value).for(:database_apdex_settings) }
- it { is_expected.to allow_value(nil).for(:database_apdex_settings) }
- it { is_expected.to allow_value(valid_database_apdex_settings).for(:database_apdex_settings) }
+ it { is_expected.not_to allow_value(apdex_slo: '10').for(:prometheus_alert_db_indicators_settings) }
+ it { is_expected.to allow_value(nil).for(:prometheus_alert_db_indicators_settings) }
+ it { is_expected.to allow_value(valid_prometheus_alert_db_indicators_settings).for(:prometheus_alert_db_indicators_settings) }
it { is_expected.to allow_value([true, false]).for(:silent_mode_enabled) }
it { is_expected.not_to allow_value(nil).for(:silent_mode_enabled) }
@@ -269,6 +282,13 @@ RSpec.describe ApplicationSetting, feature_category: :shared, type: :model do
it { is_expected.not_to allow_value(10.5).for(:ci_max_includes) }
it { is_expected.not_to allow_value(-1).for(:ci_max_includes) }
+ it { is_expected.to allow_value(0).for(:ci_max_total_yaml_size_bytes) }
+ it { is_expected.to allow_value(200).for(:ci_max_total_yaml_size_bytes) }
+ it { is_expected.not_to allow_value('abc').for(:ci_max_total_yaml_size_bytes) }
+ it { is_expected.not_to allow_value(nil).for(:ci_max_total_yaml_size_bytes) }
+ it { is_expected.not_to allow_value(10.5).for(:ci_max_total_yaml_size_bytes) }
+ it { is_expected.not_to allow_value(-1).for(:ci_max_total_yaml_size_bytes) }
+
it { is_expected.to allow_value([true, false]).for(:remember_me_enabled) }
it { is_expected.not_to allow_value(nil).for(:remember_me_enabled) }
@@ -277,6 +297,9 @@ RSpec.describe ApplicationSetting, feature_category: :shared, type: :model do
it { is_expected.to allow_values([true, false]).for(:instance_level_code_suggestions_enabled) }
it { is_expected.not_to allow_value(nil).for(:instance_level_code_suggestions_enabled) }
+ it { is_expected.to allow_values([true, false]).for(:package_registry_allow_anyone_to_pull_option) }
+ it { is_expected.not_to allow_value(nil).for(:package_registry_allow_anyone_to_pull_option) }
+
context 'when deactivate_dormant_users is enabled' do
before do
stub_application_setting(deactivate_dormant_users: true)
@@ -579,6 +602,30 @@ RSpec.describe ApplicationSetting, feature_category: :shared, type: :model do
.is_greater_than_or_equal_to(0)
end
+ it { is_expected.to validate_presence_of(:max_import_remote_file_size) }
+
+ specify do
+ is_expected.to validate_numericality_of(:max_import_remote_file_size)
+ .only_integer
+ .is_greater_than_or_equal_to(0)
+ end
+
+ it { is_expected.to validate_presence_of(:bulk_import_max_download_file_size) }
+
+ specify do
+ is_expected.to validate_numericality_of(:bulk_import_max_download_file_size)
+ .only_integer
+ .is_greater_than_or_equal_to(0)
+ end
+
+ it { is_expected.to validate_presence_of(:max_decompressed_archive_size) }
+
+ specify do
+ is_expected.to validate_numericality_of(:max_decompressed_archive_size)
+ .only_integer
+ .is_greater_than_or_equal_to(0)
+ end
+
specify do
is_expected.to validate_numericality_of(:local_markdown_version)
.only_integer
@@ -1272,6 +1319,46 @@ RSpec.describe ApplicationSetting, feature_category: :shared, type: :model do
it { is_expected.to allow_value({ name: value }).for(:default_branch_protection_defaults) }
end
end
+
+ context 'default_project_visibility, default_group_visibility and restricted_visibility_levels validations' do
+ before do
+ subject.restricted_visibility_levels = [10]
+ end
+
+ it { is_expected.not_to allow_value(10).for(:default_group_visibility) }
+ it { is_expected.not_to allow_value(10).for(:default_project_visibility) }
+ it { is_expected.to allow_value(20).for(:default_group_visibility) }
+ it { is_expected.to allow_value(20).for(:default_project_visibility) }
+
+ it 'sets error messages when default visibility settings are not valid' do
+ subject.default_group_visibility = 10
+ subject.default_project_visibility = 10
+
+ expect(subject).not_to be_valid
+ expect(subject.errors.messages[:default_group_visibility].first).to eq("cannot be set to a restricted visibility level")
+ expect(subject.errors.messages[:default_project_visibility].first).to eq("cannot be set to a restricted visibility level")
+ end
+
+ context 'when prevent_visibility_restriction FF is disabled' do
+ before do
+ stub_feature_flags(prevent_visibility_restriction: false)
+ end
+
+ it { is_expected.to allow_value(10).for(:default_group_visibility) }
+ it { is_expected.to allow_value(10).for(:default_project_visibility) }
+ it { is_expected.to allow_value(20).for(:default_group_visibility) }
+ it { is_expected.to allow_value(20).for(:default_project_visibility) }
+ end
+ end
+
+ describe 'sentry_clientside_traces_sample_rate' do
+ it do
+ is_expected.to validate_numericality_of(:sentry_clientside_traces_sample_rate)
+ .is_greater_than_or_equal_to(0)
+ .is_less_than_or_equal_to(1)
+ .with_message("must be a value between 0 and 1")
+ end
+ end
end
context 'restrict creating duplicates' do
@@ -1653,29 +1740,4 @@ RSpec.describe ApplicationSetting, feature_category: :shared, type: :model do
expect(setting.personal_access_tokens_disabled?).to eq(false)
end
end
-
- describe '#ai_access_token' do
- context 'when `instance_level_code_suggestions_enabled` is true' do
- before do
- setting.instance_level_code_suggestions_enabled = true
- end
-
- it { is_expected.not_to allow_value(nil).for(:ai_access_token) }
- end
-
- context 'when `instance_level_code_suggestions_enabled` is false' do
- before do
- setting.instance_level_code_suggestions_enabled = false
- end
-
- it { is_expected.to allow_value(nil).for(:ai_access_token) }
- end
-
- it 'does not modify the token if it is unchanged in the form' do
- setting.ai_access_token = 'foo'
- setting.ai_access_token = ApplicationSettingMaskedAttrs::MASK
-
- expect(setting.ai_access_token).to eq('foo')
- end
- end
end
diff --git a/spec/models/batched_git_ref_updates/deletion_spec.rb b/spec/models/batched_git_ref_updates/deletion_spec.rb
new file mode 100644
index 00000000000..1679e8977b3
--- /dev/null
+++ b/spec/models/batched_git_ref_updates/deletion_spec.rb
@@ -0,0 +1,125 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BatchedGitRefUpdates::Deletion, feature_category: :gitaly do
+ describe '.mark_records_processed' do
+ let_it_be(:deletion_1) { described_class.create!(project_id: 5, ref: 'refs/test/1') }
+ let_it_be(:deletion_2) { described_class.create!(project_id: 1, ref: 'refs/test/2') }
+ let_it_be(:deletion_3) { described_class.create!(project_id: 3, ref: 'refs/test/3') }
+ let_it_be(:deletion_4) { described_class.create!(project_id: 1, ref: 'refs/test/4') }
+ let_it_be(:deletion_5) { described_class.create!(project_id: 4, ref: 'refs/test/5', status: :processed) }
+
+ it 'updates all records' do
+ expect(described_class.status_pending.count).to eq(4)
+ expect(described_class.status_processed.count).to eq(1)
+
+ deletions = described_class.for_project(1).select_ref_and_identity
+ described_class.mark_records_processed(deletions)
+
+ deletions.each do |deletion|
+ expect(deletion.reload.status).to eq("processed")
+ end
+
+ expect(described_class.status_pending.count).to eq(2)
+ expect(described_class.status_processed.count).to eq(3)
+ end
+ end
+
+ describe 'sliding_list partitioning' do
+ let(:partition_manager) { Gitlab::Database::Partitioning::PartitionManager.new(described_class) }
+
+ describe 'next_partition_if callback' do
+ let(:active_partition) { described_class.partitioning_strategy.active_partition }
+
+ subject(:value) { described_class.partitioning_strategy.next_partition_if.call(active_partition) }
+
+ context 'when the partition is empty' do
+ it { is_expected.to eq(false) }
+ end
+
+ context 'when the partition has records' do
+ before do
+ described_class.create!(project_id: 1, ref: 'refs/test/1', status: :processed)
+ described_class.create!(project_id: 2, ref: 'refs/test/2', status: :pending)
+ end
+
+ it { is_expected.to eq(false) }
+ end
+
+ context 'when the first record of the partition is older than PARTITION_DURATION' do
+ before do
+ described_class.create!(
+ project_id: 1,
+ ref: 'refs/test/1',
+ created_at: (described_class::PARTITION_DURATION + 1.day).ago)
+
+ described_class.create!(project_id: 2, ref: 'refs/test/2')
+ end
+
+ it { is_expected.to eq(true) }
+ end
+ end
+
+ describe 'detach_partition_if callback' do
+ let(:active_partition) { described_class.partitioning_strategy.active_partition }
+
+ subject(:value) { described_class.partitioning_strategy.detach_partition_if.call(active_partition) }
+
+ context 'when the partition contains unprocessed records' do
+ before do
+ described_class.create!(project_id: 1, ref: 'refs/test/1')
+ described_class.create!(project_id: 2, ref: 'refs/test/2', status: :processed)
+ end
+
+ it { is_expected.to eq(false) }
+ end
+
+ context 'when the partition contains only processed records' do
+ before do
+ described_class.create!(project_id: 1, ref: 'refs/test/1', status: :processed)
+ described_class.create!(project_id: 2, ref: 'refs/test/2', status: :processed)
+ end
+
+ it { is_expected.to eq(true) }
+ end
+ end
+
+ describe 'the behavior of the strategy' do
+ it 'moves records to new partitions as time passes', :freeze_time do
+ # We start with partition 1
+ expect(described_class.partitioning_strategy.current_partitions.map(&:value)).to eq([1])
+
+ # it's not a day old yet so no new partitions are created
+ partition_manager.sync_partitions
+
+ expect(described_class.partitioning_strategy.current_partitions.map(&:value)).to eq([1])
+
+ # add one record so the next partition will be created
+ described_class.create!(project_id: 1, ref: 'refs/test/1')
+
+ # after traveling forward a day
+ travel(described_class::PARTITION_DURATION + 1.second)
+
+ # a new partition is created
+ partition_manager.sync_partitions
+
+ expect(described_class.partitioning_strategy.current_partitions.map(&:value)).to contain_exactly(1, 2)
+
+ # and we can insert to the new partition
+ described_class.create!(project_id: 2, ref: 'refs/test/2')
+
+ # after processing old records
+ described_class.mark_records_processed(described_class.for_partition(1).select_ref_and_identity)
+
+ partition_manager.sync_partitions
+
+ # the old one is removed
+ expect(described_class.partitioning_strategy.current_partitions.map(&:value)).to eq([2])
+
+ # and we only have the newly created partition left.
+ expect(described_class.count).to eq(1)
+ end
+ end
+ end
+end
diff --git a/spec/models/ci/bridge_spec.rb b/spec/models/ci/bridge_spec.rb
index d93250af177..1d0c3bb5dee 100644
--- a/spec/models/ci/bridge_spec.rb
+++ b/spec/models/ci/bridge_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Ci::Bridge, feature_category: :continuous_integration do
- let_it_be(:project) { create(:project) }
+ let_it_be(:project, refind: true) { create(:project, :repository, :in_group) }
let_it_be(:target_project) { create(:project, name: 'project', namespace: create(:namespace, name: 'my')) }
let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
@@ -27,6 +27,10 @@ RSpec.describe Ci::Bridge, feature_category: :continuous_integration do
it_behaves_like 'a retryable job'
+ it_behaves_like 'a deployable job' do
+ let(:job) { bridge }
+ end
+
it 'has one downstream pipeline' do
expect(bridge).to have_one(:sourced_pipeline)
expect(bridge).to have_one(:downstream_pipeline)
@@ -196,11 +200,18 @@ RSpec.describe Ci::Bridge, feature_category: :continuous_integration do
end
describe '#downstream_variables' do
+ # A new pipeline needs to be created in each test.
+ # The pipeline #variables_builder is memoized. The builder internally also memoizes variables.
+ # Having pipeline in a let_it_be might lead to flaky tests
+ # because a test might expect new variables but the variables builder does not
+ # return the new variables due to memoized results from previous tests.
+ let(:pipeline) { create(:ci_pipeline, project: project) }
+
subject(:downstream_variables) { bridge.downstream_variables }
it 'returns variables that are going to be passed downstream' do
expect(bridge.downstream_variables)
- .to include(key: 'BRIDGE', value: 'cross')
+ .to contain_exactly(key: 'BRIDGE', value: 'cross')
end
context 'when using variables interpolation' do
@@ -241,14 +252,49 @@ RSpec.describe Ci::Bridge, feature_category: :continuous_integration do
end
end
+ context 'when using variables interpolation on file variables' do
+ let(:yaml_variables) do
+ [
+ {
+ key: 'EXPANDED_FILE',
+ value: '$TEST_FILE_VAR'
+ }
+ ]
+ end
+
+ before do
+ bridge.yaml_variables = yaml_variables
+ create(:ci_variable, :file, project: bridge.pipeline.project, key: 'TEST_FILE_VAR', value: 'test-file-value')
+ end
+
+ it 'does not expand file variable and forwards the file variable' do
+ expected_vars = [
+ { key: 'EXPANDED_FILE', value: '$TEST_FILE_VAR' },
+ { key: 'TEST_FILE_VAR', value: 'test-file-value', variable_type: :file }
+ ]
+
+ expect(bridge.downstream_variables).to contain_exactly(*expected_vars)
+ end
+
+ context 'and feature flag is disabled' do
+ before do
+ stub_feature_flags(ci_prevent_file_var_expansion_downstream_pipeline: false)
+ end
+
+ it 'expands the file variable' do
+ expect(bridge.downstream_variables).to contain_exactly({ key: 'EXPANDED_FILE', value: 'test-file-value' })
+ end
+ end
+ end
+
context 'when recursive interpolation has been used' do
before do
- bridge.yaml_variables << { key: 'EXPANDED', value: '$EXPANDED', public: true }
+ bridge.yaml_variables = [{ key: 'EXPANDED', value: '$EXPANDED', public: true }]
end
it 'does not expand variable recursively' do
expect(bridge.downstream_variables)
- .to include(key: 'EXPANDED', value: '$EXPANDED')
+ .to contain_exactly(key: 'EXPANDED', value: '$EXPANDED')
end
end
@@ -279,26 +325,82 @@ RSpec.describe Ci::Bridge, feature_category: :continuous_integration do
}
end
+ before do
+ create(:ci_pipeline_variable, pipeline: pipeline, key: 'PVAR1', value: 'PVAL1')
+ end
+
it 'returns variables according to the forward value' do
expect(bridge.downstream_variables.map { |v| v[:key] }).to contain_exactly(*variables)
end
end
context 'when sending a variable via both yaml and pipeline' do
- let(:pipeline) { create(:ci_pipeline, project: project) }
-
let(:options) do
{ trigger: { project: 'my/project', forward: { pipeline_variables: true } } }
end
before do
- create(:ci_pipeline_variable, pipeline: pipeline, key: 'BRIDGE', value: 'new value')
+ bridge.yaml_variables = [{ key: 'SHARED_KEY', value: 'old_value' }]
+ create(:ci_pipeline_variable, pipeline: pipeline, key: 'SHARED_KEY', value: 'new value')
end
it 'uses the pipeline variable' do
- expect(bridge.downstream_variables).to contain_exactly(
- { key: 'BRIDGE', value: 'new value' }
- )
+ expect(bridge.downstream_variables).to contain_exactly({ key: 'SHARED_KEY', value: 'new value' })
+ end
+ end
+
+ context 'when sending a file variable from pipeline variable' do
+ let(:options) do
+ { trigger: { project: 'my/project', forward: { pipeline_variables: true } } }
+ end
+
+ before do
+ bridge.yaml_variables = [{ key: 'FILE_VAR', value: 'old_value' }]
+ create(:ci_pipeline_variable, :file, pipeline: pipeline, key: 'FILE_VAR', value: 'new value')
+ end
+
+ # The current behaviour forwards the file variable as an environment variable.
+ # TODO: decide whether to forward as a file var in https://gitlab.com/gitlab-org/gitlab/-/issues/416334
+ it 'forwards the pipeline file variable' do
+ expect(bridge.downstream_variables).to contain_exactly({ key: 'FILE_VAR', value: 'new value' })
+ end
+ end
+
+ context 'when a pipeline variable interpolates a scoped file variable' do
+ let(:options) do
+ { trigger: { project: 'my/project', forward: { pipeline_variables: true } } }
+ end
+
+ before do
+ bridge.yaml_variables = [{ key: 'YAML_VAR', value: '$PROJECT_FILE_VAR' }]
+
+ create(:ci_variable, :file, project: pipeline.project, key: 'PROJECT_FILE_VAR', value: 'project file')
+ create(:ci_pipeline_variable, pipeline: pipeline, key: 'FILE_VAR', value: '$PROJECT_FILE_VAR')
+ end
+
+ it 'does not expand the scoped file variable and forwards the file variable' do
+ expected_vars = [
+ { key: 'FILE_VAR', value: '$PROJECT_FILE_VAR' },
+ { key: 'YAML_VAR', value: '$PROJECT_FILE_VAR' },
+ { key: 'PROJECT_FILE_VAR', value: 'project file', variable_type: :file }
+ ]
+
+ expect(bridge.downstream_variables).to contain_exactly(*expected_vars)
+ end
+
+ context 'and feature flag is disabled' do
+ before do
+ stub_feature_flags(ci_prevent_file_var_expansion_downstream_pipeline: false)
+ end
+
+ it 'expands the file variable' do
+ expected_vars = [
+ { key: 'FILE_VAR', value: 'project file' },
+ { key: 'YAML_VAR', value: 'project file' }
+ ]
+
+ expect(bridge.downstream_variables).to contain_exactly(*expected_vars)
+ end
end
end
@@ -315,10 +417,66 @@ RSpec.describe Ci::Bridge, feature_category: :continuous_integration do
end
it 'adds the schedule variable' do
- expect(bridge.downstream_variables).to contain_exactly(
+ expected_vars = [
{ key: 'BRIDGE', value: 'cross' },
{ key: 'schedule_var_key', value: 'schedule var value' }
- )
+ ]
+
+ expect(bridge.downstream_variables).to contain_exactly(*expected_vars)
+ end
+ end
+ end
+
+ context 'when sending a file variable from pipeline schedule' do
+ let(:pipeline_schedule) { create(:ci_pipeline_schedule, :nightly, project: project) }
+ let(:pipeline) { create(:ci_pipeline, pipeline_schedule: pipeline_schedule) }
+
+ let(:options) do
+ { trigger: { project: 'my/project', forward: { pipeline_variables: true } } }
+ end
+
+ before do
+ bridge.yaml_variables = []
+ pipeline_schedule.variables.create!(key: 'schedule_var_key', value: 'schedule var value', variable_type: :file)
+ end
+
+ # The current behaviour forwards the file variable as an environment variable.
+ # TODO: decide whether to forward as a file var in https://gitlab.com/gitlab-org/gitlab/-/issues/416334
+ it 'forwards the schedule file variable' do
+ expect(bridge.downstream_variables).to contain_exactly({ key: 'schedule_var_key', value: 'schedule var value' })
+ end
+ end
+
+ context 'when a pipeline schedule variable interpolates a scoped file variable' do
+ let(:pipeline_schedule) { create(:ci_pipeline_schedule, :nightly, project: project) }
+ let(:pipeline) { create(:ci_pipeline, pipeline_schedule: pipeline_schedule) }
+
+ let(:options) do
+ { trigger: { project: 'my/project', forward: { pipeline_variables: true } } }
+ end
+
+ before do
+ bridge.yaml_variables = []
+ create(:ci_variable, :file, project: pipeline.project, key: 'PROJECT_FILE_VAR', value: 'project file')
+ pipeline_schedule.variables.create!(key: 'schedule_var_key', value: '$PROJECT_FILE_VAR')
+ end
+
+ it 'does not expand the scoped file variable and forwards the file variable' do
+ expected_vars = [
+ { key: 'schedule_var_key', value: '$PROJECT_FILE_VAR' },
+ { key: 'PROJECT_FILE_VAR', value: 'project file', variable_type: :file }
+ ]
+
+ expect(bridge.downstream_variables).to contain_exactly(*expected_vars)
+ end
+
+ context 'and feature flag is disabled' do
+ before do
+ stub_feature_flags(ci_prevent_file_var_expansion_downstream_pipeline: false)
+ end
+
+ it 'expands the file variable' do
+ expect(bridge.downstream_variables).to contain_exactly({ key: 'schedule_var_key', value: 'project file' })
end
end
end
diff --git a/spec/models/ci/build_spec.rb b/spec/models/ci/build_spec.rb
index b7f457962a0..a556244ae00 100644
--- a/spec/models/ci/build_spec.rb
+++ b/spec/models/ci/build_spec.rb
@@ -38,7 +38,6 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration, factory_def
it { is_expected.to have_many(:report_results).with_foreign_key(:build_id) }
it { is_expected.to have_many(:pages_deployments).with_foreign_key(:ci_build_id) }
- it { is_expected.to have_one(:deployment) }
it { is_expected.to have_one(:runner_manager).through(:runner_manager_build) }
it { is_expected.to have_one(:runner_session).with_foreign_key(:build_id) }
it { is_expected.to have_one(:trace_metadata).with_foreign_key(:build_id) }
@@ -67,14 +66,6 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration, factory_def
it { is_expected.to delegate_method(:merge_request_ref?).to(:pipeline) }
it { is_expected.to delegate_method(:legacy_detached_merge_request_pipeline?).to(:pipeline) }
- shared_examples 'calling proper BuildFinishedWorker' do
- it 'calls Ci::BuildFinishedWorker' do
- expect(Ci::BuildFinishedWorker).to receive(:perform_async)
-
- subject
- end
- end
-
describe 'associations' do
it 'has a bidirectional relationship with projects' do
expect(described_class.reflect_on_association(:project).has_inverse?).to eq(:builds)
@@ -110,16 +101,8 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration, factory_def
it_behaves_like 'a retryable job'
- describe '.manual_actions' do
- let!(:manual_but_created) { create(:ci_build, :manual, status: :created, pipeline: pipeline) }
- let!(:manual_but_succeeded) { create(:ci_build, :manual, status: :success, pipeline: pipeline) }
- let!(:manual_action) { create(:ci_build, :manual, pipeline: pipeline) }
-
- subject { described_class.manual_actions }
-
- it { is_expected.to include(manual_action) }
- it { is_expected.to include(manual_but_succeeded) }
- it { is_expected.not_to include(manual_but_created) }
+ it_behaves_like 'a deployable job' do
+ let(:job) { build }
end
describe '.ref_protected' do
@@ -657,54 +640,6 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration, factory_def
end
end
- describe '#outdated_deployment?' do
- subject { build.outdated_deployment? }
-
- let(:build) { create(:ci_build, :created, :with_deployment, pipeline: pipeline, environment: 'production') }
-
- context 'when build has no environment' do
- let(:build) { create(:ci_build, :created, pipeline: pipeline, environment: nil) }
-
- it { expect(subject).to be_falsey }
- end
-
- context 'when project has forward deployment disabled' do
- before do
- project.ci_cd_settings.update!(forward_deployment_enabled: false)
- end
-
- it { expect(subject).to be_falsey }
- end
-
- context 'when build is not an outdated deployment' do
- before do
- allow(build.deployment).to receive(:older_than_last_successful_deployment?).and_return(false)
- end
-
- it { expect(subject).to be_falsey }
- end
-
- context 'when build is older than the latest deployment and still pending status' do
- before do
- allow(build.deployment).to receive(:older_than_last_successful_deployment?).and_return(true)
- end
-
- it { expect(subject).to be_truthy }
- end
-
- context 'when build is older than the latest deployment but succeeded once' do
- let(:build) { create(:ci_build, :success, :with_deployment, pipeline: pipeline, environment: 'production') }
-
- before do
- allow(build.deployment).to receive(:older_than_last_successful_deployment?).and_return(true)
- end
-
- it 'returns false for allowing rollback' do
- expect(subject).to be_falsey
- end
- end
- end
-
describe '#schedulable?' do
subject { build.schedulable? }
@@ -1588,430 +1523,6 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration, factory_def
end
end
- describe 'state transition as a deployable' do
- subject { build.send(event) }
-
- let!(:build) { create(:ci_build, :with_deployment, :start_review_app, pipeline: pipeline) }
- let(:deployment) { build.deployment }
- let(:environment) { deployment.environment }
-
- before do
- allow(Deployments::LinkMergeRequestWorker).to receive(:perform_async)
- allow(Deployments::HooksWorker).to receive(:perform_async)
- end
-
- it 'has deployments record with created status' do
- expect(deployment).to be_created
- expect(environment.name).to eq('review/master')
- end
-
- shared_examples_for 'avoid deadlock' do
- it 'executes UPDATE in the right order' do
- recorded = with_cross_database_modification_prevented do
- ActiveRecord::QueryRecorder.new { subject }
- end
-
- index_for_build = recorded.log.index { |l| l.include?("UPDATE #{described_class.quoted_table_name}") }
- index_for_deployment = recorded.log.index { |l| l.include?("UPDATE \"deployments\"") }
-
- expect(index_for_build).to be < index_for_deployment
- end
- end
-
- context 'when transits to running' do
- let(:event) { :run! }
-
- it_behaves_like 'avoid deadlock'
-
- it 'transits deployment status to running' do
- with_cross_database_modification_prevented do
- subject
- end
-
- expect(deployment).to be_running
- end
-
- context 'when deployment is already running state' do
- before do
- build.deployment.success!
- end
-
- it 'does not change deployment status and tracks an error' do
- expect(Gitlab::ErrorTracking)
- .to receive(:track_exception).with(
- instance_of(Deployment::StatusSyncError), deployment_id: deployment.id, build_id: build.id)
-
- with_cross_database_modification_prevented do
- expect { subject }.not_to change { deployment.reload.status }
- end
- end
- end
- end
-
- context 'when transits to success' do
- let(:event) { :success! }
-
- before do
- allow(Deployments::UpdateEnvironmentWorker).to receive(:perform_async)
- allow(Deployments::HooksWorker).to receive(:perform_async)
- end
-
- it_behaves_like 'avoid deadlock'
- it_behaves_like 'calling proper BuildFinishedWorker'
-
- it 'transits deployment status to success' do
- with_cross_database_modification_prevented do
- subject
- end
-
- expect(deployment).to be_success
- end
- end
-
- context 'when transits to failed' do
- let(:event) { :drop! }
-
- it_behaves_like 'avoid deadlock'
- it_behaves_like 'calling proper BuildFinishedWorker'
-
- it 'transits deployment status to failed' do
- with_cross_database_modification_prevented do
- subject
- end
-
- expect(deployment).to be_failed
- end
- end
-
- context 'when transits to skipped' do
- let(:event) { :skip! }
-
- it_behaves_like 'avoid deadlock'
-
- it 'transits deployment status to skipped' do
- with_cross_database_modification_prevented do
- subject
- end
-
- expect(deployment).to be_skipped
- end
- end
-
- context 'when transits to canceled' do
- let(:event) { :cancel! }
-
- it_behaves_like 'avoid deadlock'
- it_behaves_like 'calling proper BuildFinishedWorker'
-
- it 'transits deployment status to canceled' do
- with_cross_database_modification_prevented do
- subject
- end
-
- expect(deployment).to be_canceled
- end
- end
-
- # Mimic playing a manual job that needs another job.
- # `needs + when:manual` scenario, see: https://gitlab.com/gitlab-org/gitlab/-/issues/347502
- context 'when transits from skipped to created to running' do
- before do
- build.skip!
- end
-
- context 'during skipped to created' do
- let(:event) { :process! }
-
- it 'transitions to created' do
- subject
-
- expect(deployment).to be_created
- end
- end
-
- context 'during created to running' do
- let(:event) { :run! }
-
- before do
- build.process!
- build.enqueue!
- end
-
- it 'transitions to running and calls webhook' do
- freeze_time do
- expect(Deployments::HooksWorker)
- .to receive(:perform_async).with(hash_including({ 'deployment_id' => deployment.id, 'status' => 'running', 'status_changed_at' => Time.current.to_s }))
-
- subject
- end
-
- expect(deployment).to be_running
- end
- end
- end
- end
-
- describe '#on_stop' do
- subject { build.on_stop }
-
- context 'when a job has a specification that it can be stopped from the other job' do
- let(:build) { create(:ci_build, :start_review_app, pipeline: pipeline) }
-
- it 'returns the other job name' do
- is_expected.to eq('stop_review_app')
- end
- end
-
- context 'when a job does not have environment information' do
- let(:build) { create(:ci_build, pipeline: pipeline) }
-
- it 'returns nil' do
- is_expected.to be_nil
- end
- end
- end
-
- describe '#environment_tier_from_options' do
- subject { build.environment_tier_from_options }
-
- let(:build) { described_class.new(options: options) }
- let(:options) { { environment: { deployment_tier: 'production' } } }
-
- it { is_expected.to eq('production') }
-
- context 'when options does not include deployment_tier' do
- let(:options) { { environment: { name: 'production' } } }
-
- it { is_expected.to be_nil }
- end
- end
-
- describe '#environment_tier' do
- subject { build.environment_tier }
-
- let(:options) { { environment: { deployment_tier: 'production' } } }
- let!(:environment) { create(:environment, name: 'production', tier: 'development', project: project) }
- let(:build) { described_class.new(options: options, environment: 'production', project: project) }
-
- it { is_expected.to eq('production') }
-
- context 'when options does not include deployment_tier' do
- let(:options) { { environment: { name: 'production' } } }
-
- it 'uses tier from environment' do
- is_expected.to eq('development')
- end
-
- context 'when persisted environment is absent' do
- let(:environment) { nil }
-
- it { is_expected.to be_nil }
- end
- end
- end
-
- describe 'environment' do
- describe '#has_environment_keyword?' do
- subject { build.has_environment_keyword? }
-
- context 'when environment is defined' do
- before do
- build.update!(environment: 'review')
- end
-
- it { is_expected.to be_truthy }
- end
-
- context 'when environment is not defined' do
- before do
- build.update!(environment: nil)
- end
-
- it { is_expected.to be_falsey }
- end
- end
-
- describe '#expanded_environment_name' do
- subject { build.expanded_environment_name }
-
- context 'when environment uses $CI_COMMIT_REF_NAME' do
- let(:build) do
- create(
- :ci_build,
- ref: 'master',
- environment: 'review/$CI_COMMIT_REF_NAME',
- pipeline: pipeline
- )
- end
-
- it { is_expected.to eq('review/master') }
- end
-
- context 'when environment uses yaml_variables containing symbol keys' do
- let(:build) do
- create(
- :ci_build,
- yaml_variables: [{ key: :APP_HOST, value: 'host' }],
- environment: 'review/$APP_HOST',
- pipeline: pipeline
- )
- end
-
- it 'returns an expanded environment name with a list of variables' do
- is_expected.to eq('review/host')
- end
-
- context 'when build metadata has already persisted the expanded environment name' do
- before do
- build.metadata.expanded_environment_name = 'review/foo'
- end
-
- it 'returns a persisted expanded environment name without a list of variables' do
- expect(build).not_to receive(:simple_variables)
-
- is_expected.to eq('review/foo')
- end
- end
- end
-
- context 'when using persisted variables' do
- let(:build) do
- create(:ci_build, environment: 'review/x$CI_JOB_ID', pipeline: pipeline)
- end
-
- it { is_expected.to eq('review/x') }
- end
-
- context 'when environment name uses a nested variable' do
- let(:yaml_variables) do
- [
- { key: 'ENVIRONMENT_NAME', value: '${CI_COMMIT_REF_NAME}' }
- ]
- end
-
- let(:build) do
- create(
- :ci_build,
- ref: 'master',
- yaml_variables: yaml_variables,
- environment: 'review/$ENVIRONMENT_NAME',
- pipeline: pipeline
- )
- end
-
- it { is_expected.to eq('review/master') }
- end
- end
-
- describe '#expanded_kubernetes_namespace' do
- let(:build) { create(:ci_build, environment: environment, options: options, pipeline: pipeline) }
-
- subject { build.expanded_kubernetes_namespace }
-
- context 'environment and namespace are not set' do
- let(:environment) { nil }
- let(:options) { nil }
-
- it { is_expected.to be_nil }
- end
-
- context 'environment is specified' do
- let(:environment) { 'production' }
-
- context 'namespace is not set' do
- let(:options) { nil }
-
- it { is_expected.to be_nil }
- end
-
- context 'namespace is provided' do
- let(:options) do
- {
- environment: {
- name: environment,
- kubernetes: {
- namespace: namespace
- }
- }
- }
- end
-
- context 'with a static value' do
- let(:namespace) { 'production' }
-
- it { is_expected.to eq namespace }
- end
-
- context 'with a dynamic value' do
- let(:namespace) { 'deploy-$CI_COMMIT_REF_NAME' }
-
- it { is_expected.to eq 'deploy-master' }
- end
- end
- end
- end
-
- describe '#deployment_job?' do
- subject { build.deployment_job? }
-
- context 'when environment is defined' do
- before do
- build.update!(environment: 'review')
- end
-
- context 'no action is defined' do
- it { is_expected.to be_truthy }
- end
-
- context 'and start action is defined' do
- before do
- build.update!(options: { environment: { action: 'start' } })
- end
-
- it { is_expected.to be_truthy }
- end
- end
-
- context 'when environment is not defined' do
- before do
- build.update!(environment: nil)
- end
-
- it { is_expected.to be_falsey }
- end
- end
-
- describe '#stops_environment?' do
- subject { build.stops_environment? }
-
- context 'when environment is defined' do
- before do
- build.update!(environment: 'review')
- end
-
- context 'no action is defined' do
- it { is_expected.to be_falsey }
- end
-
- context 'and stop action is defined' do
- before do
- build.update!(options: { environment: { action: 'stop' } })
- end
-
- it { is_expected.to be_truthy }
- end
- end
-
- context 'when environment is not defined' do
- before do
- build.update!(environment: nil)
- end
-
- it { is_expected.to be_falsey }
- end
- end
- end
-
describe 'erasable build' do
shared_examples 'erasable' do
it 'removes artifact file' do
@@ -2485,29 +1996,6 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration, factory_def
end
end
- describe '#other_manual_actions' do
- let(:build) { create(:ci_build, :manual, pipeline: pipeline) }
- let!(:other_build) { create(:ci_build, :manual, pipeline: pipeline, name: 'other action') }
-
- subject { build.other_manual_actions }
-
- before do
- project.add_developer(user)
- end
-
- it 'returns other actions' do
- is_expected.to contain_exactly(other_build)
- end
-
- context 'when build is retried' do
- let!(:new_build) { Ci::RetryJobService.new(project, user).execute(build)[:job] }
-
- it 'does not return any of them' do
- is_expected.not_to include(build, new_build)
- end
- end
- end
-
describe '#other_scheduled_actions' do
let(:build) { create(:ci_build, :scheduled, pipeline: pipeline) }
@@ -2550,44 +2038,6 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration, factory_def
end
end
- describe '#persisted_environment' do
- let!(:environment) do
- create(:environment, project: project, name: "foo-#{project.default_branch}")
- end
-
- subject { build.persisted_environment }
-
- context 'when referenced literally' do
- let(:build) do
- create(:ci_build, pipeline: pipeline, environment: "foo-#{project.default_branch}")
- end
-
- it { is_expected.to eq(environment) }
- end
-
- context 'when referenced with a variable' do
- let(:build) do
- create(:ci_build, pipeline: pipeline, environment: "foo-$CI_COMMIT_REF_NAME")
- end
-
- it { is_expected.to eq(environment) }
- end
-
- context 'when there is no environment' do
- it { is_expected.to be_nil }
- end
-
- context 'when build has a stop environment' do
- let(:build) { create(:ci_build, :stop_review_app, pipeline: pipeline, environment: "foo-#{project.default_branch}") }
-
- it 'expands environment name' do
- expect(build).to receive(:expanded_environment_name).and_call_original
-
- is_expected.to eq(environment)
- end
- end
- end
-
describe '#play' do
let(:build) { create(:ci_build, :manual, pipeline: pipeline) }
@@ -2915,6 +2365,7 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration, factory_def
{ key: 'CI_PIPELINE_IID', value: pipeline.iid.to_s, public: true, masked: false },
{ key: 'CI_PIPELINE_SOURCE', value: pipeline.source, public: true, masked: false },
{ key: 'CI_PIPELINE_CREATED_AT', value: pipeline.created_at.iso8601, public: true, masked: false },
+ { key: 'CI_PIPELINE_NAME', value: pipeline.name, public: true, masked: false },
{ key: 'CI_COMMIT_SHA', value: build.sha, public: true, masked: false },
{ key: 'CI_COMMIT_SHORT_SHA', value: build.short_sha, public: true, masked: false },
{ key: 'CI_COMMIT_BEFORE_SHA', value: build.before_sha, public: true, masked: false },
@@ -3765,41 +3216,79 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration, factory_def
end
context 'for the google_play integration' do
- let_it_be(:google_play_integration) { create(:google_play_integration) }
+ before do
+ allow(build.pipeline).to receive(:protected_ref?).and_return(pipeline_protected_ref)
+ end
let(:google_play_variables) do
[
- { key: 'SUPPLY_JSON_KEY_DATA', value: google_play_integration.service_account_key, masked: true, public: false }
+ { key: "SUPPLY_JSON_KEY_DATA", value: google_play_integration.service_account_key, masked: true, public: false },
+ { key: "SUPPLY_PACKAGE_NAME", value: google_play_integration.package_name, masked: false, public: false }
]
end
+ shared_examples 'does not include the google_play_variables' do
+ specify do
+ expect(subject.find { |v| v[:key] == "SUPPLY_JSON_KEY_DATA" }).to be_nil
+ expect(subject.find { |v| v[:key] == "SUPPLY_PACKAGE_NAME" }).to be_nil
+ end
+ end
+
+ shared_examples 'includes google_play_variables' do
+ specify do
+ expect(subject).to include(*google_play_variables)
+ end
+ end
+
context 'when the google_play integration exists' do
- context 'when a build is protected' do
- before do
- allow(build.pipeline).to receive(:protected_ref?).and_return(true)
- build.project.update!(google_play_integration: google_play_integration)
+ let_it_be(:google_play_integration) do
+ create(:google_play_integration, project: project)
+ end
+
+ context 'when google_play_protected_refs is true' do
+ context 'when a build is protected' do
+ let(:pipeline_protected_ref) { true }
+
+ include_examples 'includes google_play_variables'
end
- it 'includes google_play variables' do
- is_expected.to include(*google_play_variables)
+ context 'when a build is not protected' do
+ let(:pipeline_protected_ref) { false }
+
+ include_examples 'does not include the google_play_variables'
end
end
- context 'when a build is not protected' do
+ context 'when google_play_protected_refs is false' do
before do
- allow(build.pipeline).to receive(:protected_ref?).and_return(false)
- build.project.update!(google_play_integration: google_play_integration)
+ google_play_integration.update!(google_play_protected_refs: false)
+ end
+
+ context 'when a build is protected' do
+ let(:pipeline_protected_ref) { true }
+
+ include_examples 'includes google_play_variables'
end
- it 'does not include the google_play variable' do
- expect(subject[:key] == 'SUPPLY_JSON_KEY_DATA').to eq(false)
+ context 'when a build is not protected' do
+ let(:pipeline_protected_ref) { false }
+
+ include_examples 'includes google_play_variables'
end
end
end
- context 'when the googel_play integration does not exist' do
- it 'does not include google_play variable' do
- expect(subject[:key] == 'SUPPLY_JSON_KEY_DATA').to eq(false)
+ context 'when the google_play integration does not exist' do
+ context 'when a build is protected' do
+ let(:pipeline_protected_ref) { true }
+
+ include_examples 'does not include the google_play_variables'
+ end
+
+ context 'when a build is not protected' do
+ let(:pipeline_protected_ref) { false }
+
+ include_examples 'does not include the google_play_variables'
end
end
end
@@ -5051,45 +4540,6 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration, factory_def
end
end
- describe '#deployment_status' do
- before do
- allow_any_instance_of(described_class).to receive(:create_deployment)
- end
-
- context 'when build is a last deployment' do
- let(:build) { create(:ci_build, :success, environment: 'production', pipeline: pipeline) }
- let(:environment) { create(:environment, name: 'production', project: build.project) }
- let!(:deployment) { create(:deployment, :success, environment: environment, project: environment.project, deployable: build) }
-
- it { expect(build.deployment_status).to eq(:last) }
- end
-
- context 'when there is a newer build with deployment' do
- let(:build) { create(:ci_build, :success, environment: 'production', pipeline: pipeline) }
- let(:environment) { create(:environment, name: 'production', project: build.project) }
- let!(:deployment) { create(:deployment, :success, environment: environment, project: environment.project, deployable: build) }
- let!(:last_deployment) { create(:deployment, :success, environment: environment, project: environment.project) }
-
- it { expect(build.deployment_status).to eq(:out_of_date) }
- end
-
- context 'when build with deployment has failed' do
- let(:build) { create(:ci_build, :failed, environment: 'production', pipeline: pipeline) }
- let(:environment) { create(:environment, name: 'production', project: build.project) }
- let!(:deployment) { create(:deployment, :success, environment: environment, project: environment.project, deployable: build) }
-
- it { expect(build.deployment_status).to eq(:failed) }
- end
-
- context 'when build with deployment is running' do
- let(:build) { create(:ci_build, environment: 'production', pipeline: pipeline) }
- let(:environment) { create(:environment, name: 'production', project: build.project) }
- let!(:deployment) { create(:deployment, :success, environment: environment, project: environment.project, deployable: build) }
-
- it { expect(build.deployment_status).to eq(:creating) }
- end
- end
-
describe '#degenerated?' do
context 'when build is degenerated' do
subject { create(:ci_build, :degenerated, pipeline: pipeline) }
@@ -6154,4 +5604,26 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration, factory_def
end
end
end
+
+ describe 'routing table switch' do
+ context 'with ff disabled' do
+ before do
+ stub_feature_flags(ci_partitioning_use_ci_builds_routing_table: false)
+ end
+
+ it 'uses the legacy table' do
+ expect(described_class.table_name).to eq('ci_builds')
+ end
+ end
+
+ context 'with ff enabled' do
+ before do
+ stub_feature_flags(ci_partitioning_use_ci_builds_routing_table: true)
+ end
+
+ it 'uses the routing table' do
+ expect(described_class.table_name).to eq('p_ci_builds')
+ end
+ end
+ end
end
diff --git a/spec/models/ci/catalog/resource_spec.rb b/spec/models/ci/catalog/resource_spec.rb
index 45d49d65b02..4608e611ea1 100644
--- a/spec/models/ci/catalog/resource_spec.rb
+++ b/spec/models/ci/catalog/resource_spec.rb
@@ -15,6 +15,8 @@ RSpec.describe Ci::Catalog::Resource, feature_category: :pipeline_composition do
let_it_be(:release3) { create(:release, project: project, released_at: Time.zone.now) }
it { is_expected.to belong_to(:project) }
+ it { is_expected.to have_many(:components).class_name('Ci::Catalog::Resources::Component') }
+ it { is_expected.to have_many(:versions).class_name('Ci::Catalog::Resources::Version') }
it { is_expected.to delegate_method(:avatar_path).to(:project) }
it { is_expected.to delegate_method(:description).to(:project) }
diff --git a/spec/models/ci/catalog/resources/component_spec.rb b/spec/models/ci/catalog/resources/component_spec.rb
new file mode 100644
index 00000000000..caaf76e610d
--- /dev/null
+++ b/spec/models/ci/catalog/resources/component_spec.rb
@@ -0,0 +1,46 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::Catalog::Resources::Component, type: :model, feature_category: :pipeline_composition do
+ let(:component) { build(:catalog_resource_component) }
+
+ it { is_expected.to belong_to(:catalog_resource).class_name('Ci::Catalog::Resource') }
+ it { is_expected.to belong_to(:project) }
+ it { is_expected.to belong_to(:version).class_name('Ci::Catalog::Resources::Version') }
+
+ describe 'validations' do
+ it { is_expected.to validate_presence_of(:catalog_resource) }
+ it { is_expected.to validate_presence_of(:project) }
+ it { is_expected.to validate_presence_of(:version) }
+ it { is_expected.to validate_presence_of(:name) }
+
+ context 'when attributes are valid' do
+ it 'returns no errors' do
+ component.inputs = {
+ website: nil,
+ environment: {
+ default: 'test'
+ },
+ tags: {
+ type: 'array'
+ }
+ }
+ expect(component).to be_valid
+ end
+ end
+
+ context 'when data is invalid' do
+ it 'returns errors' do
+ component.inputs = { boo: [] }
+
+ aggregate_failures do
+ expect(component).to be_invalid
+ expect(component.errors.full_messages).to contain_exactly(
+ 'Inputs must be a valid json schema'
+ )
+ end
+ end
+ end
+ end
+end
diff --git a/spec/models/ci/catalog/resources/version_spec.rb b/spec/models/ci/catalog/resources/version_spec.rb
new file mode 100644
index 00000000000..e93176e466a
--- /dev/null
+++ b/spec/models/ci/catalog/resources/version_spec.rb
@@ -0,0 +1,16 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::Catalog::Resources::Version, type: :model, feature_category: :pipeline_composition do
+ it { is_expected.to belong_to(:release) }
+ it { is_expected.to belong_to(:catalog_resource).class_name('Ci::Catalog::Resource') }
+ it { is_expected.to belong_to(:project) }
+ it { is_expected.to have_many(:components).class_name('Ci::Catalog::Resources::Component') }
+
+ describe 'validations' do
+ it { is_expected.to validate_presence_of(:release) }
+ it { is_expected.to validate_presence_of(:catalog_resource) }
+ it { is_expected.to validate_presence_of(:project) }
+ end
+end
diff --git a/spec/models/ci/commit_with_pipeline_spec.rb b/spec/models/ci/commit_with_pipeline_spec.rb
index 766e99288c0..063cff0e63f 100644
--- a/spec/models/ci/commit_with_pipeline_spec.rb
+++ b/spec/models/ci/commit_with_pipeline_spec.rb
@@ -44,7 +44,7 @@ RSpec.describe Ci::CommitWithPipeline, feature_category: :continuous_integration
]
end
- before(:all) do
+ before_all do
commits_with_pipelines.each do |commit|
create(:ci_empty_pipeline, project: commit.project, sha: commit.sha)
end
diff --git a/spec/models/ci/job_annotation_spec.rb b/spec/models/ci/job_annotation_spec.rb
index f94494bc91d..465c168b714 100644
--- a/spec/models/ci/job_annotation_spec.rb
+++ b/spec/models/ci/job_annotation_spec.rb
@@ -11,7 +11,6 @@ RSpec.describe Ci::JobAnnotation, feature_category: :build_artifacts do
it { is_expected.to belong_to(:job).class_name('Ci::Build').inverse_of(:job_annotations) }
it { is_expected.to validate_presence_of(:name) }
it { is_expected.to validate_length_of(:name).is_at_most(255) }
- it { is_expected.to validate_uniqueness_of(:name).scoped_to([:job_id, :partition_id]) }
end
describe '.create' do
diff --git a/spec/models/ci/job_artifact_spec.rb b/spec/models/ci/job_artifact_spec.rb
index 83c233fa942..498af80dbb6 100644
--- a/spec/models/ci/job_artifact_spec.rb
+++ b/spec/models/ci/job_artifact_spec.rb
@@ -824,4 +824,82 @@ RSpec.describe Ci::JobArtifact, feature_category: :build_artifacts do
it { is_expected.to eq(artifact.file.filename) }
end
+
+ describe '#to_deleted_object_attrs' do
+ let(:pick_up_at) { nil }
+ let(:expire_at) { nil }
+ let(:file_final_path) { nil }
+
+ let(:artifact) do
+ create(
+ :ci_job_artifact,
+ :archive,
+ :remote_store,
+ file_final_path: file_final_path,
+ expire_at: expire_at
+ )
+ end
+
+ subject(:attributes) { artifact.to_deleted_object_attrs(pick_up_at) }
+
+ before do
+ stub_artifacts_object_storage
+ end
+
+ shared_examples_for 'returning attributes for object deletion' do
+ it 'returns the file store' do
+ expect(attributes[:file_store]).to eq(artifact.file_store)
+ end
+
+ context 'when pick_up_at is present' do
+ let(:pick_up_at) { 2.hours.ago }
+
+ it 'returns the pick_up_at value' do
+ expect(attributes[:pick_up_at]).to eq(pick_up_at)
+ end
+ end
+
+ context 'when pick_up_at is not present' do
+ context 'and expire_at is present' do
+ let(:expire_at) { 4.hours.ago }
+
+ it 'sets expire_at as pick_up_at' do
+ expect(attributes[:pick_up_at]).to eq(expire_at)
+ end
+ end
+
+ context 'and expire_at is not present' do
+ it 'sets current time as pick_up_at' do
+ freeze_time do
+ expect(attributes[:pick_up_at]).to eq(Time.current)
+ end
+ end
+ end
+ end
+ end
+
+ context 'when file_final_path is present' do
+ let(:file_final_path) { 'some/hash/path/to/randomfile' }
+
+ it 'returns the store_dir and file based on the file_final_path' do
+ expect(attributes).to include(
+ store_dir: 'some/hash/path/to',
+ file: 'randomfile'
+ )
+ end
+
+ it_behaves_like 'returning attributes for object deletion'
+ end
+
+ context 'when file_final_path is not present' do
+ it 'returns the uploader default store_dir and file_identifier' do
+ expect(attributes).to include(
+ store_dir: artifact.file.store_dir.to_s,
+ file: artifact.file_identifier
+ )
+ end
+
+ it_behaves_like 'returning attributes for object deletion'
+ end
+ end
end
diff --git a/spec/models/ci/persistent_ref_spec.rb b/spec/models/ci/persistent_ref_spec.rb
index ecaa8f59ecf..ed4ea02d8ba 100644
--- a/spec/models/ci/persistent_ref_spec.rb
+++ b/spec/models/ci/persistent_ref_spec.rb
@@ -3,26 +3,40 @@
require 'spec_helper'
RSpec.describe Ci::PersistentRef do
- it 'cleans up persistent refs after pipeline finished', :sidekiq_inline do
+ it 'cleans up persistent refs async after pipeline finished' do
pipeline = create(:ci_pipeline, :running)
- expect(Ci::PipelineCleanupRefWorker).to receive(:perform_async).with(pipeline.id)
-
- pipeline.succeed!
+ expect { pipeline.succeed! }
+ .to change { ::BatchedGitRefUpdates::Deletion.count }
+ .by(1)
end
- context 'when pipeline_cleanup_ref_worker_async is disabled' do
+ context 'when pipeline_delete_gitaly_refs_in_batches is disabled' do
before do
- stub_feature_flags(pipeline_cleanup_ref_worker_async: false)
+ stub_feature_flags(pipeline_delete_gitaly_refs_in_batches: false)
end
it 'cleans up persistent refs after pipeline finished' do
pipeline = create(:ci_pipeline, :running)
- expect(pipeline.persistent_ref).to receive(:delete).once
+ expect(Ci::PipelineCleanupRefWorker).to receive(:perform_async).with(pipeline.id)
pipeline.succeed!
end
+
+ context 'when pipeline_cleanup_ref_worker_async is disabled' do
+ before do
+ stub_feature_flags(pipeline_cleanup_ref_worker_async: false)
+ end
+
+ it 'cleans up persistent refs after pipeline finished' do
+ pipeline = create(:ci_pipeline, :running)
+
+ expect(pipeline.persistent_ref).to receive(:delete).once
+
+ pipeline.succeed!
+ end
+ end
end
describe '#exist?' do
diff --git a/spec/models/ci/pipeline_spec.rb b/spec/models/ci/pipeline_spec.rb
index ae3725a0b08..7e572e2fdc6 100644
--- a/spec/models/ci/pipeline_spec.rb
+++ b/spec/models/ci/pipeline_spec.rb
@@ -9,7 +9,7 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep, feature_category:
let_it_be(:user) { create(:user, :public_email) }
let_it_be(:namespace) { create_default(:namespace).freeze }
- let_it_be(:project) { create_default(:project, :repository).freeze }
+ let_it_be_with_refind(:project) { create_default(:project, :repository).freeze }
it 'paginates 15 pipelines per page' do
expect(described_class.default_per_page).to eq(15)
@@ -110,14 +110,16 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep, feature_category:
end
end
- describe '#latest_successful_builds' do
- it 'has a one to many relationship with its latest successful builds' do
+ describe '#latest_successful_jobs' do
+ it 'has a one to many relationship with its latest successful jobs' do
_old_build = create(:ci_build, :retried, pipeline: pipeline)
_expired_build = create(:ci_build, :expired, pipeline: pipeline)
- _failed_builds = create_list(:ci_build, 2, :failed, pipeline: pipeline)
- successful_builds = create_list(:ci_build, 2, :success, pipeline: pipeline)
+ _failed_jobs = [create(:ci_build, :failed, pipeline: pipeline),
+ create(:ci_bridge, :failed, pipeline: pipeline)]
+ successful_jobs = [create(:ci_build, :success, pipeline: pipeline),
+ create(:ci_bridge, :success, pipeline: pipeline)]
- expect(pipeline.latest_successful_builds).to contain_exactly(successful_builds.first, successful_builds.second)
+ expect(pipeline.latest_successful_jobs).to contain_exactly(successful_jobs.first, successful_jobs.second)
end
end
@@ -277,6 +279,29 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep, feature_category:
end
end
+ describe '.for_status' do
+ subject { described_class.for_status(status) }
+
+ let_it_be(:pipeline1) { create(:ci_pipeline, name: 'Build pipeline', status: :created) }
+ let_it_be(:pipeline2) { create(:ci_pipeline, name: 'Chatops pipeline', status: :failed) }
+
+ context 'when status exists' do
+ let(:status) { :created }
+
+ it 'performs exact compare' do
+ is_expected.to contain_exactly(pipeline1)
+ end
+ end
+
+ context 'when status does not exist' do
+ let(:status) { :pending }
+
+ it 'returns empty' do
+ is_expected.to be_empty
+ end
+ end
+ end
+
describe '.created_after' do
let_it_be(:old_pipeline) { create(:ci_pipeline, created_at: 1.week.ago) }
let_it_be(:pipeline) { create(:ci_pipeline) }
@@ -1328,33 +1353,47 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep, feature_category:
%w[succeed! drop! cancel! skip! block! delay!].each do |action|
context "when the pipeline received #{action} event" do
- it 'deletes a persistent ref asynchronously', :sidekiq_inline do
- expect(pipeline.persistent_ref).not_to receive(:delete_refs)
-
- expect(Ci::PipelineCleanupRefWorker).to receive(:perform_async)
- .with(pipeline.id).and_call_original
-
- expect_next_instance_of(Ci::PersistentRef) do |persistent_ref|
- expect(persistent_ref).to receive(:delete_refs)
- .with("refs/#{Repository::REF_PIPELINES}/#{pipeline.id}").once
- end
+ it 'deletes a persistent ref asynchronously' do
+ expect(pipeline.persistent_ref).to receive(:async_delete)
+ expect(pipeline.persistent_ref).not_to receive(:delete)
pipeline.public_send(action)
end
- context 'when pipeline_cleanup_ref_worker_async is disabled' do
+ context 'when pipeline_delete_gitaly_refs_in_batches is disabled' do
before do
- stub_feature_flags(pipeline_cleanup_ref_worker_async: false)
+ stub_feature_flags(pipeline_delete_gitaly_refs_in_batches: false)
end
- it 'deletes a persistent ref synchronously' do
- expect(Ci::PipelineCleanupRefWorker).not_to receive(:perform_async).with(pipeline.id)
+ it 'deletes a persistent ref asynchronously via ::Ci::PipelineCleanupRefWorker', :sidekiq_inline do
+ expect(pipeline.persistent_ref).not_to receive(:delete_refs)
+
+ expect(Ci::PipelineCleanupRefWorker).to receive(:perform_async)
+ .with(pipeline.id).and_call_original
- expect(pipeline.persistent_ref).to receive(:delete_refs).once
- .with("refs/#{Repository::REF_PIPELINES}/#{pipeline.id}")
+ expect_next_instance_of(Ci::PersistentRef) do |persistent_ref|
+ expect(persistent_ref).to receive(:delete_refs)
+ .with("refs/#{Repository::REF_PIPELINES}/#{pipeline.id}").once
+ end
pipeline.public_send(action)
end
+
+ context 'when pipeline_cleanup_ref_worker_async is disabled' do
+ before do
+ stub_feature_flags(pipeline_delete_gitaly_refs_in_batches: false)
+ stub_feature_flags(pipeline_cleanup_ref_worker_async: false)
+ end
+
+ it 'deletes a persistent ref synchronously' do
+ expect(Ci::PipelineCleanupRefWorker).not_to receive(:perform_async).with(pipeline.id)
+
+ expect(pipeline.persistent_ref).to receive(:delete_refs).once
+ .with("refs/#{Repository::REF_PIPELINES}/#{pipeline.id}")
+
+ pipeline.public_send(action)
+ end
+ end
end
end
end
@@ -2241,7 +2280,7 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep, feature_category:
end
describe '#modified_paths' do
- let(:pipeline) { create(:ci_empty_pipeline, :created) }
+ let(:pipeline) { create(:ci_empty_pipeline, :created, project: project) }
context 'when old and new revisions are set' do
before do
@@ -3447,99 +3486,109 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep, feature_category:
describe '#environments_in_self_and_project_descendants' do
subject { pipeline.environments_in_self_and_project_descendants }
- context 'when pipeline is not child nor parent' do
- let_it_be(:pipeline) { create(:ci_pipeline, :created) }
- let_it_be(:build, refind: true) { create(:ci_build, :with_deployment, :deploy_to_production, pipeline: pipeline) }
+ shared_examples_for 'fetches environments in self and project descendant pipelines' do |factory_type|
+ context 'when pipeline is not child nor parent' do
+ let_it_be(:pipeline) { create(:ci_pipeline, :created) }
+ let_it_be(:job, refind: true) { create(factory_type, :with_deployment, :deploy_to_production, pipeline: pipeline) }
- it 'returns just the pipeline environment' do
- expect(subject).to contain_exactly(build.deployment.environment)
+ it 'returns just the pipeline environment' do
+ expect(subject).to contain_exactly(job.deployment.environment)
+ end
+
+ context 'when deployment SHA is not matched' do
+ before do
+ job.deployment.update!(sha: 'old-sha')
+ end
+
+ it 'does not return environments' do
+ expect(subject).to be_empty
+ end
+ end
end
- context 'when deployment SHA is not matched' do
- before do
- build.deployment.update!(sha: 'old-sha')
+ context 'when an associated environment does not have deployments' do
+ let_it_be(:pipeline) { create(:ci_pipeline, :created) }
+ let_it_be(:job) { create(factory_type, :stop_review_app, pipeline: pipeline) }
+ let_it_be(:environment) { create(:environment, project: pipeline.project) }
+
+ before_all do
+ job.metadata.update!(expanded_environment_name: environment.name)
end
it 'does not return environments' do
expect(subject).to be_empty
end
end
- end
- context 'when an associated environment does not have deployments' do
- let_it_be(:pipeline) { create(:ci_pipeline, :created) }
- let_it_be(:build) { create(:ci_build, :stop_review_app, pipeline: pipeline) }
- let_it_be(:environment) { create(:environment, project: pipeline.project) }
+ context 'when pipeline is in extended family' do
+ let_it_be(:parent) { create(:ci_pipeline) }
+ let_it_be(:parent_job) { create(factory_type, :with_deployment, environment: 'staging', pipeline: parent) }
- before_all do
- build.metadata.update!(expanded_environment_name: environment.name)
- end
+ let_it_be(:pipeline) { create(:ci_pipeline, child_of: parent) }
+ let_it_be(:job) { create(factory_type, :with_deployment, :deploy_to_production, pipeline: pipeline) }
- it 'does not return environments' do
- expect(subject).to be_empty
- end
- end
+ let_it_be(:child) { create(:ci_pipeline, child_of: pipeline) }
+ let_it_be(:child_job) { create(factory_type, :with_deployment, environment: 'canary', pipeline: child) }
- context 'when pipeline is in extended family' do
- let_it_be(:parent) { create(:ci_pipeline) }
- let_it_be(:parent_build) { create(:ci_build, :with_deployment, environment: 'staging', pipeline: parent) }
+ let_it_be(:grandchild) { create(:ci_pipeline, child_of: child) }
+ let_it_be(:grandchild_job) { create(factory_type, :with_deployment, environment: 'test', pipeline: grandchild) }
- let_it_be(:pipeline) { create(:ci_pipeline, child_of: parent) }
- let_it_be(:build) { create(:ci_build, :with_deployment, :deploy_to_production, pipeline: pipeline) }
+ let_it_be(:sibling) { create(:ci_pipeline, child_of: parent) }
+ let_it_be(:sibling_job) { create(factory_type, :with_deployment, environment: 'review', pipeline: sibling) }
- let_it_be(:child) { create(:ci_pipeline, child_of: pipeline) }
- let_it_be(:child_build) { create(:ci_build, :with_deployment, environment: 'canary', pipeline: child) }
-
- let_it_be(:grandchild) { create(:ci_pipeline, child_of: child) }
- let_it_be(:grandchild_build) { create(:ci_build, :with_deployment, environment: 'test', pipeline: grandchild) }
+ it 'returns its own environment and from all descendants' do
+ expected_environments = [
+ job.deployment.environment,
+ child_job.deployment.environment,
+ grandchild_job.deployment.environment
+ ]
+ expect(subject).to match_array(expected_environments)
+ end
- let_it_be(:sibling) { create(:ci_pipeline, child_of: parent) }
- let_it_be(:sibling_build) { create(:ci_build, :with_deployment, environment: 'review', pipeline: sibling) }
+ it 'does not return parent environment' do
+ expect(subject).not_to include(parent_job.deployment.environment)
+ end
- it 'returns its own environment and from all descendants' do
- expected_environments = [
- build.deployment.environment,
- child_build.deployment.environment,
- grandchild_build.deployment.environment
- ]
- expect(subject).to match_array(expected_environments)
+ it 'does not return sibling environment' do
+ expect(subject).not_to include(sibling_job.deployment.environment)
+ end
end
- it 'does not return parent environment' do
- expect(subject).not_to include(parent_build.deployment.environment)
- end
+ context 'when each pipeline has multiple environments' do
+ let_it_be(:pipeline) { create(:ci_pipeline, :created) }
+ let_it_be(:job1) { create(factory_type, :with_deployment, :deploy_to_production, pipeline: pipeline) }
+ let_it_be(:job2) { create(factory_type, :with_deployment, environment: 'staging', pipeline: pipeline) }
- it 'does not return sibling environment' do
- expect(subject).not_to include(sibling_build.deployment.environment)
- end
- end
+ let_it_be(:child) { create(:ci_pipeline, child_of: pipeline) }
+ let_it_be(:child_job1) { create(factory_type, :with_deployment, environment: 'canary', pipeline: child) }
+ let_it_be(:child_job2) { create(factory_type, :with_deployment, environment: 'test', pipeline: child) }
- context 'when each pipeline has multiple environments' do
- let_it_be(:pipeline) { create(:ci_pipeline, :created) }
- let_it_be(:build1) { create(:ci_build, :with_deployment, :deploy_to_production, pipeline: pipeline) }
- let_it_be(:build2) { create(:ci_build, :with_deployment, environment: 'staging', pipeline: pipeline) }
+ it 'returns all related environments' do
+ expected_environments = [
+ job1.deployment.environment,
+ job2.deployment.environment,
+ child_job1.deployment.environment,
+ child_job2.deployment.environment
+ ]
+ expect(subject).to match_array(expected_environments)
+ end
+ end
- let_it_be(:child) { create(:ci_pipeline, child_of: pipeline) }
- let_it_be(:child_build1) { create(:ci_build, :with_deployment, environment: 'canary', pipeline: child) }
- let_it_be(:child_build2) { create(:ci_build, :with_deployment, environment: 'test', pipeline: child) }
+ context 'when pipeline has no environment' do
+ let_it_be(:pipeline) { create(:ci_pipeline, :created) }
- it 'returns all related environments' do
- expected_environments = [
- build1.deployment.environment,
- build2.deployment.environment,
- child_build1.deployment.environment,
- child_build2.deployment.environment
- ]
- expect(subject).to match_array(expected_environments)
+ it 'returns empty' do
+ expect(subject).to be_empty
+ end
end
end
- context 'when pipeline has no environment' do
- let_it_be(:pipeline) { create(:ci_pipeline, :created) }
+ context 'when job is build' do
+ it_behaves_like 'fetches environments in self and project descendant pipelines', :ci_build
+ end
- it 'returns empty' do
- expect(subject).to be_empty
- end
+ context 'when job is bridge' do
+ it_behaves_like 'fetches environments in self and project descendant pipelines', :ci_bridge
end
end
@@ -3924,6 +3973,53 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep, feature_category:
end
end
+ describe '#jobs_in_self_and_project_descendants' do
+ subject(:jobs) { pipeline.jobs_in_self_and_project_descendants }
+
+ let(:pipeline) { create(:ci_pipeline) }
+
+ shared_examples_for 'fetches jobs in self and project descendant pipelines' do |factory_type|
+ let!(:job) { create(factory_type, pipeline: pipeline) }
+
+ context 'when pipeline is standalone' do
+ it 'returns the list of jobs' do
+ expect(jobs).to contain_exactly(job)
+ end
+ end
+
+ context 'when pipeline is parent of another pipeline' do
+ let(:child_pipeline) { create(:ci_pipeline, child_of: pipeline) }
+ let(:child_source_bridge) { child_pipeline.source_pipeline.source_job }
+ let!(:child_job) { create(factory_type, pipeline: child_pipeline) }
+
+ it 'returns the list of jobs' do
+ expect(jobs).to contain_exactly(job, child_job, child_source_bridge)
+ end
+ end
+
+ context 'when pipeline is parent of another parent pipeline' do
+ let(:child_pipeline) { create(:ci_pipeline, child_of: pipeline) }
+ let(:child_source_bridge) { child_pipeline.source_pipeline.source_job }
+ let!(:child_job) { create(factory_type, pipeline: child_pipeline) }
+ let(:child_of_child_pipeline) { create(:ci_pipeline, child_of: child_pipeline) }
+ let(:child_of_child_source_bridge) { child_of_child_pipeline.source_pipeline.source_job }
+ let!(:child_of_child_job) { create(factory_type, pipeline: child_of_child_pipeline) }
+
+ it 'returns the list of jobs' do
+ expect(jobs).to contain_exactly(job, child_job, child_of_child_job, child_source_bridge, child_of_child_source_bridge)
+ end
+ end
+ end
+
+ context 'when job is build' do
+ it_behaves_like 'fetches jobs in self and project descendant pipelines', :ci_build
+ end
+
+ context 'when job is bridge' do
+ it_behaves_like 'fetches jobs in self and project descendant pipelines', :ci_bridge
+ end
+ end
+
describe '#find_job_with_archive_artifacts' do
let(:pipeline) { create(:ci_pipeline) }
let!(:old_job) { create(:ci_build, name: 'rspec', retried: true, pipeline: pipeline) }
diff --git a/spec/models/ci/processable_spec.rb b/spec/models/ci/processable_spec.rb
index e1c449e18ac..c6af7609778 100644
--- a/spec/models/ci/processable_spec.rb
+++ b/spec/models/ci/processable_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe Ci::Processable, feature_category: :continuous_integration do
let_it_be(:project) { create(:project) }
- let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
+ let_it_be_with_refind(:pipeline) { create(:ci_pipeline, project: project) }
describe 'delegations' do
subject { described_class.new }
@@ -31,7 +31,8 @@ RSpec.describe Ci::Processable, feature_category: :continuous_integration do
let_it_be_with_refind(:processable) do
create(:ci_bridge, :success,
- pipeline: pipeline, downstream: downstream_project, description: 'a trigger job', stage_id: stage.id)
+ pipeline: pipeline, downstream: downstream_project, description: 'a trigger job', stage_id: stage.id,
+ environment: 'production')
end
let(:clone_accessors) { ::Ci::Bridge.clone_accessors }
@@ -77,7 +78,7 @@ RSpec.describe Ci::Processable, feature_category: :continuous_integration do
job_artifacts_requirements job_artifacts_coverage_fuzzing
job_artifacts_requirements_v2
job_artifacts_api_fuzzing terraform_state_versions job_artifacts_cyclonedx
- job_annotations].freeze
+ job_annotations job_artifacts_annotations].freeze
end
let(:ignore_accessors) do
@@ -503,4 +504,61 @@ RSpec.describe Ci::Processable, feature_category: :continuous_integration do
end
end
end
+
+ describe '.manual_actions' do
+ shared_examples_for 'manual actions for a job' do
+ let!(:manual_but_created) { create(factory_type, :manual, status: :created, pipeline: pipeline) }
+ let!(:manual_but_succeeded) { create(factory_type, :manual, status: :success, pipeline: pipeline) }
+ let!(:manual_action) { create(factory_type, :manual, pipeline: pipeline) }
+
+ subject { described_class.manual_actions }
+
+ it { is_expected.to include(manual_action) }
+ it { is_expected.to include(manual_but_succeeded) }
+ it { is_expected.not_to include(manual_but_created) }
+ end
+
+ it_behaves_like 'manual actions for a job' do
+ let(:factory_type) { :ci_build }
+ end
+
+ it_behaves_like 'manual actions for a job' do
+ let(:factory_type) { :ci_bridge }
+ end
+ end
+
+ describe '#other_manual_actions' do
+ let_it_be(:user) { create(:user) }
+
+ before_all do
+ project.add_developer(user)
+ end
+
+ shared_examples_for 'other manual actions for a job' do
+ let(:job) { create(factory_type, :manual, pipeline: pipeline, project: project) }
+ let!(:other_job) { create(factory_type, :manual, pipeline: pipeline, project: project, name: 'other action') }
+
+ subject { job.other_manual_actions }
+
+ it 'returns other actions' do
+ is_expected.to contain_exactly(other_job)
+ end
+
+ context 'when job is retried' do
+ let!(:new_job) { Ci::RetryJobService.new(project, user).execute(job)[:job] }
+
+ it 'does not return any of them' do
+ is_expected.not_to include(job, new_job)
+ end
+ end
+ end
+
+ it_behaves_like 'other manual actions for a job' do
+ let(:factory_type) { :ci_build }
+ end
+
+ it_behaves_like 'other manual actions for a job' do
+ let(:factory_type) { :ci_bridge }
+ end
+ end
end
diff --git a/spec/models/ci/runner_manager_spec.rb b/spec/models/ci/runner_manager_spec.rb
index 80cffb98dff..bc1d1a0cc49 100644
--- a/spec/models/ci/runner_manager_spec.rb
+++ b/spec/models/ci/runner_manager_spec.rb
@@ -112,6 +112,33 @@ RSpec.describe Ci::RunnerManager, feature_category: :runner_fleet, type: :model
end
end
+ describe '.with_running_builds' do
+ subject(:scope) { described_class.with_running_builds }
+
+ let_it_be(:runner) { create(:ci_runner) }
+ let_it_be(:runner_manager1) { create(:ci_runner_machine, runner: runner) }
+ let_it_be(:runner_manager2) { create(:ci_runner_machine, runner: runner) }
+
+ before_all do
+ create(:ci_runner_machine_build, runner_manager: runner_manager1,
+ build: create(:ci_build, :success, runner: runner))
+ create(:ci_runner_machine_build, runner_manager: runner_manager2,
+ build: create(:ci_build, :running, runner: runner))
+ end
+
+ it { is_expected.to contain_exactly runner_manager2 }
+ end
+
+ describe '.order_id_desc' do
+ subject(:scope) { described_class.order_id_desc }
+
+ let_it_be(:runner_manager1) { create(:ci_runner_machine) }
+ let_it_be(:runner_manager2) { create(:ci_runner_machine) }
+
+ specify { expect(described_class.all).to eq([runner_manager1, runner_manager2]) }
+ it { is_expected.to eq([runner_manager2, runner_manager1]) }
+ end
+
describe '#status', :freeze_time do
let(:runner_manager) { build(:ci_runner_machine, created_at: 8.days.ago) }
@@ -331,4 +358,59 @@ RSpec.describe Ci::RunnerManager, feature_category: :runner_fleet, type: :model
.and change { runner_manager.reload.read_attribute(:executor_type) }
end
end
+
+ describe '#builds' do
+ let_it_be(:runner_manager) { create(:ci_runner_machine) }
+
+ subject(:builds) { runner_manager.builds }
+
+ it { is_expected.to be_empty }
+
+ context 'with an existing build' do
+ let!(:build) { create(:ci_build) }
+ let!(:runner_machine_build) do
+ create(:ci_runner_machine_build, runner_manager: runner_manager, build: build)
+ end
+
+ it { is_expected.to contain_exactly build }
+ end
+ end
+
+ describe '.with_upgrade_status' do
+ subject(:scope) { described_class.with_upgrade_status(upgrade_status) }
+
+ let_it_be(:runner_manager_14_0_0) { create(:ci_runner_machine, version: '14.0.0') }
+ let_it_be(:runner_manager_14_1_0) { create(:ci_runner_machine, version: '14.1.0') }
+ let_it_be(:runner_manager_14_1_1) { create(:ci_runner_machine, version: '14.1.1') }
+
+ before_all do
+ create(:ci_runner_version, version: '14.0.0', status: :available)
+ create(:ci_runner_version, version: '14.1.0', status: :recommended)
+ create(:ci_runner_version, version: '14.1.1', status: :unavailable)
+ end
+
+ context 'as :unavailable' do
+ let(:upgrade_status) { :unavailable }
+
+ it 'returns runners with runner managers whose version is assigned :unavailable' do
+ is_expected.to contain_exactly(runner_manager_14_1_1)
+ end
+ end
+
+ context 'as :available' do
+ let(:upgrade_status) { :available }
+
+ it 'returns runners with runner managers whose version is assigned :available' do
+ is_expected.to contain_exactly(runner_manager_14_0_0)
+ end
+ end
+
+ context 'as :recommended' do
+ let(:upgrade_status) { :recommended }
+
+ it 'returns runners with runner managers whose version is assigned :recommended' do
+ is_expected.to contain_exactly(runner_manager_14_1_0)
+ end
+ end
+ end
end
diff --git a/spec/models/ci/runner_spec.rb b/spec/models/ci/runner_spec.rb
index 50e2ded695c..56e69cc2b9c 100644
--- a/spec/models/ci/runner_spec.rb
+++ b/spec/models/ci/runner_spec.rb
@@ -1968,19 +1968,29 @@ RSpec.describe Ci::Runner, type: :model, feature_category: :runner do
end
describe '.with_upgrade_status' do
- subject { described_class.with_upgrade_status(upgrade_status) }
+ subject(:scope) { described_class.with_upgrade_status(upgrade_status) }
- let_it_be(:runner_14_0_0) { create(:ci_runner, version: '14.0.0') }
- let_it_be(:runner_14_1_0) { create(:ci_runner, version: '14.1.0') }
- let_it_be(:runner_14_1_1) { create(:ci_runner, version: '14.1.1') }
- let_it_be(:runner_version_14_0_0) { create(:ci_runner_version, version: '14.0.0', status: :available) }
- let_it_be(:runner_version_14_1_0) { create(:ci_runner_version, version: '14.1.0', status: :recommended) }
- let_it_be(:runner_version_14_1_1) { create(:ci_runner_version, version: '14.1.1', status: :unavailable) }
+ let_it_be(:runner_14_0_0) { create(:ci_runner) }
+ let_it_be(:runner_14_1_0_and_14_0_0) { create(:ci_runner) }
+ let_it_be(:runner_14_1_0) { create(:ci_runner) }
+ let_it_be(:runner_14_1_1) { create(:ci_runner) }
+
+ before_all do
+ create(:ci_runner_machine, runner: runner_14_1_0_and_14_0_0, version: '14.0.0')
+ create(:ci_runner_machine, runner: runner_14_1_0_and_14_0_0, version: '14.1.0')
+ create(:ci_runner_machine, runner: runner_14_0_0, version: '14.0.0')
+ create(:ci_runner_machine, runner: runner_14_1_0, version: '14.1.0')
+ create(:ci_runner_machine, runner: runner_14_1_1, version: '14.1.1')
+
+ create(:ci_runner_version, version: '14.0.0', status: :available)
+ create(:ci_runner_version, version: '14.1.0', status: :recommended)
+ create(:ci_runner_version, version: '14.1.1', status: :unavailable)
+ end
context ':unavailable' do
let(:upgrade_status) { :unavailable }
- it 'returns runners whose version is assigned :unavailable' do
+ it 'returns runners with runner managers whose version is assigned :unavailable' do
is_expected.to contain_exactly(runner_14_1_1)
end
end
@@ -1988,23 +1998,27 @@ RSpec.describe Ci::Runner, type: :model, feature_category: :runner do
context ':available' do
let(:upgrade_status) { :available }
- it 'returns runners whose version is assigned :available' do
- is_expected.to contain_exactly(runner_14_0_0)
+ it 'returns runners with runner managers whose version is assigned :available' do
+ is_expected.to contain_exactly(runner_14_0_0, runner_14_1_0_and_14_0_0)
end
end
context ':recommended' do
let(:upgrade_status) { :recommended }
- it 'returns runners whose version is assigned :recommended' do
- is_expected.to contain_exactly(runner_14_1_0)
+ it 'returns runners with runner managers whose version is assigned :recommended' do
+ is_expected.to contain_exactly(runner_14_1_0_and_14_0_0, runner_14_1_0)
end
end
describe 'composed with other scopes' do
subject { described_class.active(false).with_upgrade_status(:available) }
- let(:inactive_runner_14_0_0) { create(:ci_runner, version: '14.0.0', active: false) }
+ before do
+ create(:ci_runner_machine, runner: inactive_runner_14_0_0, version: '14.0.0')
+ end
+
+ let(:inactive_runner_14_0_0) { create(:ci_runner, active: false) }
it 'returns runner matching the composed scope' do
is_expected.to contain_exactly(inactive_runner_14_0_0)
diff --git a/spec/models/clusters/cluster_spec.rb b/spec/models/clusters/cluster_spec.rb
index 73df283d996..7dafec2536f 100644
--- a/spec/models/clusters/cluster_spec.rb
+++ b/spec/models/clusters/cluster_spec.rb
@@ -26,7 +26,6 @@ RSpec.describe Clusters::Cluster, :use_clean_rails_memory_store_caching,
it { is_expected.to have_many(:kubernetes_namespaces) }
it { is_expected.to have_one(:cluster_project) }
it { is_expected.to have_many(:deployment_clusters) }
- it { is_expected.to have_many(:metrics_dashboard_annotations) }
it { is_expected.to have_many(:successful_deployments) }
it { is_expected.to have_many(:environments).through(:deployments) }
diff --git a/spec/models/commit_collection_spec.rb b/spec/models/commit_collection_spec.rb
index 1d2d89573bb..be80aced3fd 100644
--- a/spec/models/commit_collection_spec.rb
+++ b/spec/models/commit_collection_spec.rb
@@ -27,11 +27,23 @@ RSpec.describe CommitCollection, feature_category: :source_code_management do
expect(collection.committers).to be_empty
end
- it 'excludes authors of merge commits' do
- commit = project.commit("60ecb67744cb56576c30214ff52294f8ce2def98")
- create(:user, email: commit.committer_email.upcase)
+ context 'when is with_merge_commits false' do
+ it 'excludes authors of merge commits' do
+ commit = project.commit("60ecb67744cb56576c30214ff52294f8ce2def98")
+ create(:user, email: commit.committer_email.upcase)
- expect(collection.committers).to be_empty
+ expect(collection.committers).to be_empty
+ end
+ end
+
+ context 'when is with_merge_commits true' do
+ let(:commit) { project.commit("60ecb67744cb56576c30214ff52294f8ce2def98") }
+
+ it 'does not exclude authors of merge commits' do
+ user = create(:user, email: commit.committer_email.upcase)
+
+ expect(collection.committers(with_merge_commits: true)).to contain_exactly(user)
+ end
end
context 'when committer email is nil' do
diff --git a/spec/models/commit_spec.rb b/spec/models/commit_spec.rb
index dd3d4f1865c..7ab43611108 100644
--- a/spec/models/commit_spec.rb
+++ b/spec/models/commit_spec.rb
@@ -831,7 +831,8 @@ eos
expect(described_class.valid_hash?('a' * 6)).to be false
expect(described_class.valid_hash?('a' * 7)).to be true
expect(described_class.valid_hash?('a' * 40)).to be true
- expect(described_class.valid_hash?('a' * 41)).to be false
+ expect(described_class.valid_hash?('a' * 64)).to be true
+ expect(described_class.valid_hash?('a' * 65)).to be false
end
end
diff --git a/spec/models/commit_status_spec.rb b/spec/models/commit_status_spec.rb
index ac356bcd65a..9ce9f0e13b5 100644
--- a/spec/models/commit_status_spec.rb
+++ b/spec/models/commit_status_spec.rb
@@ -1067,4 +1067,26 @@ RSpec.describe CommitStatus, feature_category: :continuous_integration do
it_behaves_like 'having enum with nil value'
end
+
+ describe 'routing table switch' do
+ context 'with ff disabled' do
+ before do
+ stub_feature_flags(ci_partitioning_use_ci_builds_routing_table: false)
+ end
+
+ it 'uses the legacy table' do
+ expect(described_class.table_name).to eq('ci_builds')
+ end
+ end
+
+ context 'with ff enabled' do
+ before do
+ stub_feature_flags(ci_partitioning_use_ci_builds_routing_table: true)
+ end
+
+ it 'uses the routing table' do
+ expect(described_class.table_name).to eq('p_ci_builds')
+ end
+ end
+ end
end
diff --git a/spec/models/concerns/approvable_spec.rb b/spec/models/concerns/approvable_spec.rb
index 25a4f51cd82..49b31d7fd89 100644
--- a/spec/models/concerns/approvable_spec.rb
+++ b/spec/models/concerns/approvable_spec.rb
@@ -32,6 +32,24 @@ RSpec.describe Approvable do
end
end
+ describe '#approved?' do
+ context 'when a merge request is approved' do
+ before do
+ create(:approval, merge_request: merge_request, user: user)
+ end
+
+ it 'returns true' do
+ expect(merge_request.approved?).to eq(true)
+ end
+ end
+
+ context 'when a merge request is not approved' do
+ it 'returns false' do
+ expect(merge_request.approved?).to eq(false)
+ end
+ end
+ end
+
describe '#eligible_for_approval_by?' do
subject { merge_request.eligible_for_approval_by?(user) }
@@ -40,14 +58,14 @@ RSpec.describe Approvable do
end
it 'returns true' do
- is_expected.to be_truthy
+ is_expected.to eq(true)
end
context 'when a user has approved' do
let!(:approval) { create(:approval, merge_request: merge_request, user: user) }
it 'returns false' do
- is_expected.to be_falsy
+ is_expected.to eq(false)
end
end
@@ -55,7 +73,7 @@ RSpec.describe Approvable do
let(:user) { nil }
it 'returns false' do
- is_expected.to be_falsy
+ is_expected.to eq(false)
end
end
end
diff --git a/spec/models/concerns/bulk_insert_safe_spec.rb b/spec/models/concerns/bulk_insert_safe_spec.rb
index 65b7da20bbc..3c50003ba2f 100644
--- a/spec/models/concerns/bulk_insert_safe_spec.rb
+++ b/spec/models/concerns/bulk_insert_safe_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe BulkInsertSafe, feature_category: :database do
- before(:all) do
+ before_all do
ActiveRecord::Schema.define do
create_table :_test_bulk_insert_parent_items, force: true do |t|
t.string :name, null: false
diff --git a/spec/models/concerns/bulk_insertable_associations_spec.rb b/spec/models/concerns/bulk_insertable_associations_spec.rb
index 3187dcd8f93..3796f60c705 100644
--- a/spec/models/concerns/bulk_insertable_associations_spec.rb
+++ b/spec/models/concerns/bulk_insertable_associations_spec.rb
@@ -40,7 +40,7 @@ RSpec.describe BulkInsertableAssociations do
end
end
- before(:all) do
+ before_all do
ActiveRecord::Schema.define do
create_table :_test_bulk_parents, force: true do |t|
t.string :name, null: true
diff --git a/spec/models/concerns/ci/partitionable/switch_spec.rb b/spec/models/concerns/ci/partitionable/switch_spec.rb
index 551ae111fa4..0041a33e50e 100644
--- a/spec/models/concerns/ci/partitionable/switch_spec.rb
+++ b/spec/models/concerns/ci/partitionable/switch_spec.rb
@@ -38,6 +38,7 @@ RSpec.describe Ci::Partitionable::Switch, :aggregate_failures do
id serial NOT NULL PRIMARY KEY,
job_id int,
partition_id int NOT NULL DEFAULT 1,
+ type text,
expanded_environment_name text);
CREATE TABLE _test_p_ci_jobs_metadata (
@@ -89,6 +90,25 @@ RSpec.describe Ci::Partitionable::Switch, :aggregate_failures do
it { expect(partitioned_model.sequence_name).to eq('_test_ci_jobs_metadata_id_seq') }
+ context 'with singe table inheritance' do
+ let(:child_model) do
+ Class.new(model) do
+ def self.name
+ 'TestSwitchJobMetadataChild'
+ end
+ end
+ end
+
+ it 'adds a Partitioned model for each descendant' do
+ expect(model::Partitioned).not_to eq(child_model::Partitioned)
+ end
+
+ it 'uses the parent name in STI queries' do
+ recorder = ActiveRecord::QueryRecorder.new { child_model.all.load }
+ expect(recorder.log).to include(/"type" = 'TestSwitchJobMetadataChild'/)
+ end
+ end
+
context 'when switching the tables' do
before do
stub_feature_flags(table_rollout_flag => false)
@@ -172,11 +192,11 @@ RSpec.describe Ci::Partitionable::Switch, :aggregate_failures do
it 'writes' do
rollout_and_rollback_flag(
-> {
- expect(sql(filter: /INSERT .* jobs_metadata/) { jobs_model.find(job.id).create_metadata! })
+ expect(sql(filter: [/INSERT/, /jobs_metadata/]) { jobs_model.find(job.id).create_metadata! })
.to all match(/INSERT INTO "_test_ci_jobs_metadata"/)
},
-> {
- expect(sql(filter: /INSERT .* jobs_metadata/) { jobs_model.find(job.id).create_metadata! })
+ expect(sql(filter: [/INSERT/, /jobs_metadata/]) { jobs_model.find(job.id).create_metadata! })
.to all match(/INSERT INTO "_test_p_ci_jobs_metadata"/)
}
)
@@ -190,11 +210,11 @@ RSpec.describe Ci::Partitionable::Switch, :aggregate_failures do
rollout_and_rollback_flag(
-> {
- expect(sql(filter: /DELETE .* jobs_metadata/) { jobs_model.last.destroy! })
+ expect(sql(filter: [/DELETE/, /jobs_metadata/]) { jobs_model.last.destroy! })
.to all match(/DELETE FROM "_test_ci_jobs_metadata"/)
},
-> {
- expect(sql(filter: /DELETE .* jobs_metadata/) { jobs_model.last.destroy! })
+ expect(sql(filter: [/DELETE/, /jobs_metadata/]) { jobs_model.last.destroy! })
.to all match(/DELETE FROM "_test_p_ci_jobs_metadata"/)
}
)
@@ -252,11 +272,11 @@ RSpec.describe Ci::Partitionable::Switch, :aggregate_failures do
rollout_and_rollback_flag(
-> {
- expect(sql(filter: /INSERT .* jobs_metadata/) { jobs_model.create!(attrs) })
+ expect(sql(filter: [/INSERT/, /jobs_metadata/]) { jobs_model.create!(attrs) })
.to all match(/INSERT INTO "_test_ci_jobs_metadata" .* 'test_env_name'/)
},
-> {
- expect(sql(filter: /INSERT .* jobs_metadata/) { jobs_model.create!(attrs) })
+ expect(sql(filter: [/INSERT/, /jobs_metadata/]) { jobs_model.create!(attrs) })
.to all match(/INSERT INTO "_test_p_ci_jobs_metadata" .* 'test_env_name'/)
}
)
@@ -307,11 +327,9 @@ RSpec.describe Ci::Partitionable::Switch, :aggregate_failures do
end
def sql(filter: nil, &block)
- result = ActiveRecord::QueryRecorder.new(&block)
- result = result.log
-
- return result unless filter
-
- result.select { |statement| statement.match?(filter) }
+ ActiveRecord::QueryRecorder.new(&block)
+ .log
+ .select { |statement| Array.wrap(filter).all? { |regex| statement.match?(regex) } }
+ .tap { |result| expect(result).not_to be_empty }
end
end
diff --git a/spec/models/concerns/ci/partitionable_spec.rb b/spec/models/concerns/ci/partitionable_spec.rb
index d41654e547e..6daafc78cff 100644
--- a/spec/models/concerns/ci/partitionable_spec.rb
+++ b/spec/models/concerns/ci/partitionable_spec.rb
@@ -25,7 +25,11 @@ RSpec.describe Ci::Partitionable do
end
context 'with through options' do
+ let(:disable_partitionable_switch) { nil }
+
before do
+ stub_env('DISABLE_PARTITIONABLE_SWITCH', disable_partitionable_switch)
+
allow(ActiveSupport::DescendantsTracker).to receive(:store_inherited)
stub_const("#{described_class}::Testing::PARTITIONABLE_MODELS", [ci_model.name])
@@ -39,6 +43,12 @@ RSpec.describe Ci::Partitionable do
it { expect(ci_model.routing_table_name_flag).to eq(:some_flag) }
it { expect(ci_model.ancestors).to include(described_class::Switch) }
+
+ context 'when DISABLE_PARTITIONABLE_SWITCH is set' do
+ let(:disable_partitionable_switch) { true }
+
+ it { expect(ci_model.ancestors).not_to include(described_class::Switch) }
+ end
end
context 'with partitioned options' do
diff --git a/spec/models/concerns/cross_database_ignored_tables_spec.rb b/spec/models/concerns/cross_database_ignored_tables_spec.rb
new file mode 100644
index 00000000000..901a6f39eaf
--- /dev/null
+++ b/spec/models/concerns/cross_database_ignored_tables_spec.rb
@@ -0,0 +1,222 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe CrossDatabaseIgnoredTables, feature_category: :cell, query_analyzers: false do
+ # We enable only the PreventCrossDatabaseModification query analyzer in these tests
+ before do
+ stub_const("CiModel", ci_model)
+ allow(Gitlab::Database::QueryAnalyzer.instance).to receive(:all_analyzers).and_return(
+ [Gitlab::Database::QueryAnalyzers::PreventCrossDatabaseModification]
+ )
+ end
+
+ around do |example|
+ Gitlab::Database::QueryAnalyzer.instance.within { example.run }
+ end
+
+ let(:cross_database_exception) do
+ Gitlab::Database::QueryAnalyzers::
+ PreventCrossDatabaseModification::CrossDatabaseModificationAcrossUnsupportedTablesError
+ end
+
+ let(:ci_model) do
+ Class.new(Ci::ApplicationRecord) do
+ self.table_name = '_test_gitlab_ci_items'
+
+ belongs_to :main_model_object, class_name: 'MainModel',
+ inverse_of: 'ci_model_object', foreign_key: 'main_model_id'
+ end
+ end
+
+ before_all do
+ Ci::ApplicationRecord.connection.execute(
+ 'CREATE TABLE _test_gitlab_ci_items(
+ id BIGSERIAL PRIMARY KEY, main_model_id INTEGER, updated_at timestamp without time zone
+ )'
+ )
+ ApplicationRecord.connection.execute(
+ 'CREATE TABLE _test_gitlab_main_items(
+ id BIGSERIAL PRIMARY KEY, updated_at timestamp without time zone
+ )'
+ )
+ end
+
+ after(:all) do
+ ApplicationRecord.connection.execute('DROP TABLE _test_gitlab_main_items')
+ Ci::ApplicationRecord.connection.execute('DROP TABLE _test_gitlab_ci_items')
+ end
+
+ describe '.cross_database_ignore_tables' do
+ context 'when the tables are not ignored' do
+ before do
+ stub_const("MainModel", create_main_model([], []))
+ end
+
+ it 'raises an error when we doing cross-database modification using create' do
+ expect { MainModel.create! }.to raise_error(cross_database_exception)
+ end
+
+ it 'raises an error when we doing cross-database modification using update' do
+ main_model_object = create_main_model_object
+ expect { main_model_object.update!(updated_at: Time.zone.now) }.to raise_error(cross_database_exception)
+ end
+
+ it 'raises an error when we doing cross-database modification using destroy' do
+ main_model_object = create_main_model_object
+ expect { main_model_object.destroy! }.to raise_error(cross_database_exception)
+ end
+ end
+
+ context 'when the tables are ignored on save' do
+ before do
+ stub_const("MainModel", create_main_model(%w[_test_gitlab_ci_items], %I[save]))
+ end
+
+ it 'does not raise an error when creating a new object' do
+ expect { MainModel.create! }.not_to raise_error
+ end
+
+ it 'does not raise an error when updating an existing object' do
+ main_model_object = create_main_model_object
+ expect { main_model_object.update!(updated_at: Time.zone.now) }.not_to raise_error
+ end
+
+ it 'still raises an error when deleting an object' do # save doesn't include destroy
+ main_model_object = create_main_model_object
+ expect { main_model_object.destroy! }.to raise_error(cross_database_exception)
+ end
+ end
+
+ context 'when the tables are ignored on save with if statement' do
+ before do
+ stub_const(
+ "MainModel",
+ create_main_model(
+ %w[_test_gitlab_ci_items],
+ %I[save],
+ & proc { condition }
+ )
+ )
+
+ expect_next_instance_of(MainModel) do |instance|
+ allow(instance).to receive(:condition).and_return(condition_value)
+ end
+ end
+
+ context 'when condition returns true' do
+ let(:condition_value) { true }
+
+ it 'does not raise an error on creating a new object' do
+ expect { MainModel.create! }.not_to raise_error
+ end
+ end
+
+ context 'when condition returns false' do
+ let(:condition_value) { false }
+
+ it 'raises an error on creating a new object' do
+ expect { MainModel.create! }.to raise_error(cross_database_exception)
+ end
+ end
+ end
+
+ context 'when the tables are ignored on create' do
+ before do
+ stub_const("MainModel", create_main_model(%w[_test_gitlab_ci_items], %I[create]))
+ end
+
+ it 'does not raise an error when creating a new object' do
+ expect { MainModel.create! }.not_to raise_error
+ end
+
+ it 'raises an error when updating an existing object' do
+ main_model_object = create_main_model_object
+ expect { main_model_object.update!(updated_at: Time.zone.now) }.to raise_error(cross_database_exception)
+ end
+
+ it 'still raises an error when deleting an object' do
+ main_model_object = create_main_model_object
+ expect { main_model_object.destroy! }.to raise_error(cross_database_exception)
+ end
+ end
+
+ context 'when the tables are ignored on update' do
+ before do
+ stub_const("MainModel", create_main_model(%w[_test_gitlab_ci_items], %I[update]))
+ end
+
+ it 'raises an error when creating a new object' do
+ expect { MainModel.create! }.to raise_error(cross_database_exception)
+ end
+
+ it 'does not raise an error when updating an existing object' do
+ main_model_object = create_main_model_object
+ expect { main_model_object.update!(updated_at: Time.zone.now) }.not_to raise_error
+ end
+
+ it 'still raises an error when deleting an object' do
+ main_model_object = create_main_model_object
+ expect { main_model_object.destroy! }.to raise_error(cross_database_exception)
+ end
+ end
+
+ context 'when the tables are ignored on create and destroy' do
+ before do
+ stub_const("MainModel", create_main_model(%w[_test_gitlab_ci_items], %I[create destroy]))
+ end
+
+ it 'does not raise an error when creating a new object' do
+ expect { MainModel.create! }.not_to raise_error
+ end
+
+ it 'raises an error when updating an existing object' do
+ main_model_object = create_main_model_object
+ expect { main_model_object.update!(updated_at: Time.zone.now) }.to raise_error(cross_database_exception)
+ end
+
+ it 'does not raise an error when deleting an object' do
+ main_model_object = create_main_model_object
+ expect { main_model_object.destroy! }.not_to raise_error
+ end
+ end
+ end
+
+ def create_main_model(ignored_tables, events, &condition_block)
+ klass = Class.new(ApplicationRecord) do
+ include CrossDatabaseIgnoredTables
+
+ self.table_name = '_test_gitlab_main_items'
+
+ has_one :ci_model_object, autosave: true, class_name: 'CiModel',
+ inverse_of: 'main_model_object', foreign_key: 'main_model_id',
+ dependent: :nullify, touch: true
+ before_create :prepare_ci_model_object
+
+ def condition
+ true
+ end
+
+ def prepare_ci_model_object
+ build_ci_model_object
+ end
+ end
+
+ if ignored_tables.any? && events.any?
+ klass.class_eval do
+ cross_database_ignore_tables ignored_tables, on: events, url: "TODO", if: condition_block
+ end
+ end
+
+ klass
+ end
+
+ # This helper allows creating a test model object without raising a cross database exception
+ def create_main_model_object
+ Gitlab::Database::QueryAnalyzers::PreventCrossDatabaseModification.temporary_ignore_tables_in_transaction(
+ [CiModel.table_name], url: "TODO"
+ ) do
+ MainModel.create!
+ end
+ end
+end
diff --git a/spec/models/concerns/cross_database_modification_spec.rb b/spec/models/concerns/cross_database_modification_spec.rb
index eaebf613cb5..bca37ffa9d9 100644
--- a/spec/models/concerns/cross_database_modification_spec.rb
+++ b/spec/models/concerns/cross_database_modification_spec.rb
@@ -30,7 +30,7 @@ RSpec.describe CrossDatabaseModification do
expect(ApplicationRecord.gitlab_transactions_stack).to be_empty
Project.transaction do
- expect(ApplicationRecord.gitlab_transactions_stack).to contain_exactly(:gitlab_main)
+ expect(ApplicationRecord.gitlab_transactions_stack).to contain_exactly(:gitlab_main_cell)
Project.first
end
diff --git a/spec/models/concerns/enum_inheritance_spec.rb b/spec/models/concerns/enum_inheritance_spec.rb
new file mode 100644
index 00000000000..492503dad36
--- /dev/null
+++ b/spec/models/concerns/enum_inheritance_spec.rb
@@ -0,0 +1,97 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+module EnumInheritableTestCase
+ class Animal < ActiveRecord::Base
+ include EnumInheritance
+
+ def self.table_name = '_test_animals'
+ def self.inheritance_column = 'species'
+
+ enum species: {
+ dog: 1,
+ cat: 2,
+ bird: 3
+ }
+
+ def self.inheritance_column_to_class_map = {
+ dog: 'EnumInheritableTestCase::Dog',
+ cat: 'EnumInheritableTestCase::Cat'
+ }.freeze
+ end
+
+ class Dog < Animal; end
+ class Cat < Animal; end
+end
+
+RSpec.describe EnumInheritance, feature_category: :shared do
+ describe '.sti_class_to_enum_map' do
+ it 'is the inverse of sti_class_to_enum_map' do
+ expect(EnumInheritableTestCase::Animal.sti_class_to_enum_map).to include({
+ 'EnumInheritableTestCase::Dog' => :dog,
+ 'EnumInheritableTestCase::Cat' => :cat
+ })
+ end
+ end
+
+ describe '.sti_class_for' do
+ it 'is the base class if no mapping for type is provided' do
+ expect(EnumInheritableTestCase::Animal.sti_class_for('bird')).to be(EnumInheritableTestCase::Animal)
+ end
+
+ it 'is class if mapping for type is provided' do
+ expect(EnumInheritableTestCase::Animal.sti_class_for('dog')).to be(EnumInheritableTestCase::Dog)
+ end
+ end
+
+ describe '.sti_name' do
+ it 'is nil if map does not exist' do
+ expect(EnumInheritableTestCase::Animal.sti_name).to eq("")
+ end
+
+ it 'is nil if map exists' do
+ expect(EnumInheritableTestCase::Dog.sti_name).to eq("dog")
+ end
+ end
+
+ describe 'querying' do
+ before_all do
+ EnumInheritableTestCase::Animal.connection.execute(<<~SQL)
+ CREATE TABLE _test_animals (
+ id bigserial primary key not null,
+ species bigint not null
+ );
+ SQL
+ end
+
+ let_it_be(:dog) { EnumInheritableTestCase::Dog.create! }
+ let_it_be(:cat) { EnumInheritableTestCase::Cat.create! }
+ let_it_be(:bird) { EnumInheritableTestCase::Animal.create!(species: :bird) }
+
+ describe 'object class when querying' do
+ context 'when mapping for type exists' do
+ it 'is the super class', :aggregate_failures do
+ queried_dog = EnumInheritableTestCase::Animal.find_by(id: dog.id)
+ expect(queried_dog).to eq(dog)
+ # Test below is already part of the test above, but it makes the desired behavior explicit
+ expect(queried_dog.class).to eq(EnumInheritableTestCase::Dog)
+
+ queried_cat = EnumInheritableTestCase::Animal.find_by(id: cat.id)
+ expect(queried_cat).to eq(cat)
+ expect(queried_cat.class).to eq(EnumInheritableTestCase::Cat)
+ end
+ end
+
+ context 'when mapping does not exist' do
+ it 'is the base class' do
+ expect(EnumInheritableTestCase::Animal.find_by(id: bird.id).class).to eq(EnumInheritableTestCase::Animal)
+ end
+ end
+ end
+
+ it 'finds by type' do
+ expect(EnumInheritableTestCase::Animal.where(species: :dog).first!).to eq(dog)
+ end
+ end
+end
diff --git a/spec/models/concerns/integrations/reset_secret_fields_spec.rb b/spec/models/concerns/integrations/reset_secret_fields_spec.rb
index 3b15b95fea9..411c79624de 100644
--- a/spec/models/concerns/integrations/reset_secret_fields_spec.rb
+++ b/spec/models/concerns/integrations/reset_secret_fields_spec.rb
@@ -2,14 +2,14 @@
require 'spec_helper'
-RSpec.describe Integrations::ResetSecretFields do
+RSpec.describe Integrations::ResetSecretFields, feature_category: :integrations do
let(:described_class) do
Class.new(Integration) do
- field :username, type: 'text'
- field :url, type: 'text', exposes_secrets: true
- field :api_url, type: 'text', exposes_secrets: true
- field :password, type: 'password'
- field :token, type: 'password'
+ field :username, type: :text
+ field :url, type: :text, exposes_secrets: true
+ field :api_url, type: :text, exposes_secrets: true
+ field :password, type: :password
+ field :token, type: :password
end
end
diff --git a/spec/models/concerns/milestoneable_spec.rb b/spec/models/concerns/milestoneable_spec.rb
index 961eac4710d..cbb0cbf063f 100644
--- a/spec/models/concerns/milestoneable_spec.rb
+++ b/spec/models/concerns/milestoneable_spec.rb
@@ -3,13 +3,14 @@
require 'spec_helper'
RSpec.describe Milestoneable do
- let(:user) { create(:user) }
- let(:milestone) { create(:milestone, project: project) }
+ let_it_be(:group, reload: true) { create(:group) }
+ let_it_be(:project) { create(:project, :repository, group: group) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:milestone) { create(:milestone, project: project) }
shared_examples_for 'an object that can be assigned a milestone' do
describe 'Validation' do
describe 'milestone' do
- let(:project) { create(:project, :repository) }
let(:milestone_id) { milestone.id }
subject { milestoneable_class.new(params) }
@@ -39,8 +40,6 @@ RSpec.describe Milestoneable do
end
describe '#milestone_available?' do
- let(:group) { create(:group) }
- let(:project) { create(:project, group: group) }
let(:issue) { create(:issue, project: project) }
def build_milestoneable(milestone_id)
@@ -62,9 +61,9 @@ RSpec.describe Milestoneable do
it 'returns true with a milestone from the the parent of the issue project group' do
parent = create(:group)
group.update!(parent: parent)
- milestone = create(:milestone, group: parent)
+ parent_milestone = create(:milestone, group: parent)
- expect(build_milestoneable(milestone.id).milestone_available?).to be(true)
+ expect(build_milestoneable(parent_milestone.id).milestone_available?).to be(true)
end
it 'returns true with a blank milestone' do
@@ -86,9 +85,6 @@ RSpec.describe Milestoneable do
end
describe '#supports_milestone?' do
- let(:group) { create(:group) }
- let(:project) { create(:project, group: group) }
-
context "for issues" do
let(:issue) { build(:issue, project: project) }
@@ -215,6 +211,15 @@ RSpec.describe Milestoneable do
end
it_behaves_like 'an object that can be assigned a milestone'
+
+ describe '#milestone_available?' do
+ it 'returns true with a milestone from the issue group' do
+ milestone = create(:milestone, group: group)
+ milestoneable = milestoneable_class.new(namespace: group, milestone_id: milestone.id)
+
+ expect(milestoneable.milestone_available?).to be_truthy
+ end
+ end
end
context 'MergeRequests' do
diff --git a/spec/models/concerns/noteable_spec.rb b/spec/models/concerns/noteable_spec.rb
index c1323d20d83..dd180749e94 100644
--- a/spec/models/concerns/noteable_spec.rb
+++ b/spec/models/concerns/noteable_spec.rb
@@ -391,8 +391,8 @@ RSpec.describe Noteable, feature_category: :code_review_workflow do
end
describe '.resolvable_types' do
- it 'exposes the replyable types' do
- expect(described_class.resolvable_types).to include('MergeRequest', 'DesignManagement::Design')
+ it 'exposes the resolvable types' do
+ expect(described_class.resolvable_types).to include('Issue', 'MergeRequest', 'DesignManagement::Design')
end
end
diff --git a/spec/initializers/active_record_relation_union_reset_spec.rb b/spec/models/concerns/reset_on_union_error_spec.rb
index 013dfa1b49b..70993b92c90 100644
--- a/spec/initializers/active_record_relation_union_reset_spec.rb
+++ b/spec/models/concerns/reset_on_union_error_spec.rb
@@ -2,10 +2,9 @@
require 'spec_helper'
-# rubocop:disable Database/MultipleDatabases
-RSpec.describe ActiveRecordRelationUnionReset, :delete, feature_category: :shared do
+RSpec.describe ResetOnUnionError, :delete, feature_category: :shared do
let(:test_unioned_model) do
- Class.new(ActiveRecord::Base) do
+ Class.new(ApplicationRecord) do
include FromUnion
self.table_name = '_test_unioned_model'
@@ -17,7 +16,7 @@ RSpec.describe ActiveRecordRelationUnionReset, :delete, feature_category: :share
end
before(:context) do
- ActiveRecord::Base.connection.execute(<<~SQL)
+ ApplicationRecord.connection.execute(<<~SQL)
CREATE TABLE _test_unioned_model (
id serial NOT NULL PRIMARY KEY,
created_at timestamptz NOT NULL
@@ -26,7 +25,7 @@ RSpec.describe ActiveRecordRelationUnionReset, :delete, feature_category: :share
end
after(:context) do
- ActiveRecord::Base.connection.execute(<<~SQL)
+ ApplicationRecord.connection.execute(<<~SQL)
DROP TABLE _test_unioned_model
SQL
end
@@ -44,13 +43,13 @@ RSpec.describe ActiveRecordRelationUnionReset, :delete, feature_category: :share
before do
load_query
- ActiveRecord::Base.connection.execute(<<~SQL)
+ ApplicationRecord.connection.execute(<<~SQL)
ALTER TABLE _test_unioned_model ADD COLUMN _test_new_column int;
SQL
end
after do
- ActiveRecord::Base.connection.execute(<<~SQL)
+ ApplicationRecord.connection.execute(<<~SQL)
ALTER TABLE _test_unioned_model DROP COLUMN _test_new_column;
SQL
@@ -131,4 +130,3 @@ RSpec.describe ActiveRecordRelationUnionReset, :delete, feature_category: :share
end
end
end
-# rubocop:enable Database/MultipleDatabases
diff --git a/spec/models/concerns/resolvable_discussion_spec.rb b/spec/models/concerns/resolvable_discussion_spec.rb
index 7e08f47fb5a..1423b56fa5d 100644
--- a/spec/models/concerns/resolvable_discussion_spec.rb
+++ b/spec/models/concerns/resolvable_discussion_spec.rb
@@ -2,14 +2,16 @@
require 'spec_helper'
-RSpec.describe Discussion, ResolvableDiscussion do
+RSpec.describe Discussion, ResolvableDiscussion, feature_category: :code_review_workflow do
subject { described_class.new([first_note, second_note, third_note]) }
- let(:first_note) { create(:discussion_note_on_merge_request) }
- let(:noteable) { first_note.noteable }
- let(:project) { first_note.project }
- let(:second_note) { create(:discussion_note_on_merge_request, noteable: noteable, project: project, in_reply_to: first_note) }
- let(:third_note) { create(:discussion_note_on_merge_request, noteable: noteable, project: project) }
+ let_it_be(:first_note, reload: true) { create(:discussion_note_on_merge_request) }
+ let_it_be(:noteable) { first_note.noteable }
+ let_it_be(:project) { first_note.project }
+ let_it_be(:second_note, reload: true) { create(:discussion_note_on_merge_request, noteable: noteable, project: project, in_reply_to: first_note) }
+ let_it_be(:third_note, reload: true) { create(:discussion_note_on_merge_request, noteable: noteable, project: project) }
+
+ let_it_be(:current_user) { create(:user) }
describe "#resolvable?" do
context "when potentially resolvable" do
@@ -154,8 +156,6 @@ RSpec.describe Discussion, ResolvableDiscussion do
end
describe "#can_resolve?" do
- let(:current_user) { create(:user) }
-
context "when not resolvable" do
before do
allow(subject).to receive(:resolvable?).and_return(false)
@@ -201,8 +201,8 @@ RSpec.describe Discussion, ResolvableDiscussion do
end
context "when the signed in user can push to the project" do
- before do
- subject.project.add_maintainer(current_user)
+ before_all do
+ project.add_maintainer(current_user)
end
it "returns true" do
@@ -211,7 +211,7 @@ RSpec.describe Discussion, ResolvableDiscussion do
context "when the noteable has no author" do
before do
- subject.noteable.author = nil
+ noteable.author = nil
end
it "returns true" do
@@ -240,8 +240,6 @@ RSpec.describe Discussion, ResolvableDiscussion do
end
describe "#resolve!" do
- let(:current_user) { create(:user) }
-
context "when not resolvable" do
before do
allow(subject).to receive(:resolvable?).and_return(false)
@@ -271,8 +269,8 @@ RSpec.describe Discussion, ResolvableDiscussion do
end
context "when resolvable" do
- let(:user) { create(:user) }
- let(:second_note) { create(:diff_note_on_commit) } # unresolvable
+ let_it_be(:user) { create(:user) }
+ let_it_be(:second_note) { create(:diff_note_on_commit) } # unresolvable
before do
allow(subject).to receive(:resolvable?).and_return(true)
@@ -447,6 +445,12 @@ RSpec.describe Discussion, ResolvableDiscussion do
expect(subject.resolved?).to be true
end
+
+ it "expires the etag cache of the noteable" do
+ expect(subject.noteable).to receive(:expire_note_etag_cache)
+
+ subject.resolve!(current_user)
+ end
end
end
end
@@ -463,7 +467,7 @@ RSpec.describe Discussion, ResolvableDiscussion do
end
context "when resolvable" do
- let(:user) { create(:user) }
+ let_it_be(:user) { create(:user) }
before do
allow(subject).to receive(:resolvable?).and_return(true)
@@ -527,6 +531,12 @@ RSpec.describe Discussion, ResolvableDiscussion do
expect(subject.resolved?).to be false
end
+
+ it "expires the etag cache of the noteable" do
+ expect(subject.noteable).to receive(:expire_note_etag_cache)
+
+ subject.unresolve!
+ end
end
context "when some resolvable notes are resolved" do
@@ -565,7 +575,6 @@ RSpec.describe Discussion, ResolvableDiscussion do
end
describe "#last_resolved_note" do
- let(:current_user) { create(:user) }
let(:time) { Time.current.utc }
before do
diff --git a/spec/models/concerns/resolvable_note_spec.rb b/spec/models/concerns/resolvable_note_spec.rb
index 09646f6c4eb..c57f3ba6d84 100644
--- a/spec/models/concerns/resolvable_note_spec.rb
+++ b/spec/models/concerns/resolvable_note_spec.rb
@@ -2,41 +2,42 @@
require 'spec_helper'
-RSpec.describe Note, ResolvableNote do
- let(:project) { create(:project, :repository) }
- let(:merge_request) { create(:merge_request, source_project: project) }
+RSpec.describe Note, ResolvableNote, feature_category: :code_review_workflow do
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:merge_request) { create(:merge_request, source_project: project) }
subject { create(:discussion_note_on_merge_request, noteable: merge_request, project: project) }
context 'resolvability scopes' do
- let!(:note1) { create(:note, project: project) }
- let!(:note2) { create(:diff_note_on_commit, project: project) }
- let!(:note3) { create(:diff_note_on_merge_request, :resolved, noteable: merge_request, project: project) }
- let!(:note4) { create(:discussion_note_on_merge_request, noteable: merge_request, project: project) }
- let!(:note5) { create(:discussion_note_on_issue, project: project) }
- let!(:note6) { create(:discussion_note_on_merge_request, :system, noteable: merge_request, project: project) }
+ let_it_be(:note1) { create(:note, project: project) }
+ let_it_be(:note2) { create(:diff_note_on_commit, project: project) }
+ let_it_be(:note3) { create(:diff_note_on_merge_request, :resolved, noteable: merge_request, project: project) }
+ let_it_be(:note4) { create(:discussion_note_on_merge_request, noteable: merge_request, project: project) }
+ let_it_be(:note5) { create(:discussion_note_on_issue, project: project) }
+ let_it_be(:note6) { create(:discussion_note_on_merge_request, :system, noteable: merge_request, project: project) }
+ let_it_be(:note7) { create(:discussion_note_on_issue, :resolved, project: project) }
describe '.potentially_resolvable' do
- it 'includes diff and discussion notes on merge requests' do
- expect(described_class.potentially_resolvable).to match_array([note3, note4, note6])
+ it 'includes diff and discussion notes on issues and merge requests' do
+ expect(described_class.potentially_resolvable).to match_array([note3, note4, note5, note6, note7])
end
end
describe '.resolvable' do
- it 'includes non-system diff and discussion notes on merge requests' do
- expect(described_class.resolvable).to match_array([note3, note4])
+ it 'includes non-system diff and discussion notes on issues and merge requests' do
+ expect(described_class.resolvable).to match_array([note3, note4, note5, note7])
end
end
describe '.resolved' do
- it 'includes resolved non-system diff and discussion notes on merge requests' do
- expect(described_class.resolved).to match_array([note3])
+ it 'includes resolved non-system diff and discussion notes on issues and merge requests' do
+ expect(described_class.resolved).to match_array([note3, note7])
end
end
describe '.unresolved' do
- it 'includes non-resolved non-system diff and discussion notes on merge requests' do
- expect(described_class.unresolved).to match_array([note4])
+ it 'includes non-resolved non-system diff and discussion notes on issues and merge requests' do
+ expect(described_class.unresolved).to match_array([note4, note5])
end
end
end
@@ -55,11 +56,13 @@ RSpec.describe Note, ResolvableNote do
unresolved_note.reload
end
- it 'resolves only the resolvable, not yet resolved notes' do
+ it 'resolves only the resolvable, not yet resolved notes', :freeze_time do
expect(commit_note.resolved_at).to be_nil
expect(resolved_note.resolved_by).not_to eq(current_user)
+
expect(unresolved_note.resolved_at).not_to be_nil
expect(unresolved_note.resolved_by).to eq(current_user)
+ expect(unresolved_note.updated_at).to be_like_time(Time.current)
end
end
@@ -72,9 +75,10 @@ RSpec.describe Note, ResolvableNote do
resolved_note.reload
end
- it 'unresolves the resolved notes' do
+ it 'unresolves the resolved notes', :freeze_time do
expect(resolved_note.resolved_by).to be_nil
expect(resolved_note.resolved_at).to be_nil
+ expect(resolved_note.updated_at).to be_like_time(Time.current)
end
end
@@ -272,6 +276,12 @@ RSpec.describe Note, ResolvableNote do
expect(subject.resolved?).to be true
end
+
+ it "updates the updated_at timestamp", :freeze_time do
+ subject.resolve!(current_user)
+
+ expect(subject.updated_at).to be_like_time(Time.current)
+ end
end
end
end
@@ -320,6 +330,12 @@ RSpec.describe Note, ResolvableNote do
expect(subject.resolved?).to be false
end
+
+ it "updates the updated_at timestamp", :freeze_time do
+ subject.unresolve!
+
+ expect(subject.updated_at).to be_like_time(Time.current)
+ end
end
context "when not resolved" do
diff --git a/spec/models/concerns/token_authenticatable_spec.rb b/spec/models/concerns/token_authenticatable_spec.rb
index cbfc1df64f1..822e2817d84 100644
--- a/spec/models/concerns/token_authenticatable_spec.rb
+++ b/spec/models/concerns/token_authenticatable_spec.rb
@@ -89,7 +89,7 @@ RSpec.describe ApplicationSetting, 'TokenAuthenticatable' do
end
describe 'multiple token fields' do
- before(:all) do
+ before_all do
described_class.send(:add_authentication_token_field, :yet_another_token)
end
diff --git a/spec/models/concerns/where_composite_spec.rb b/spec/models/concerns/where_composite_spec.rb
index 6abdd12aac5..b2b9602f5ee 100644
--- a/spec/models/concerns/where_composite_spec.rb
+++ b/spec/models/concerns/where_composite_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe WhereComposite do
describe '.where_composite' do
- let_it_be(:test_table_name) { "_test_table_#{SecureRandom.hex(10)}" }
+ let_it_be(:test_table_name) { "_test_table_where_composite" }
let(:model) do
tbl_name = test_table_name
diff --git a/spec/models/customer_relations/contact_spec.rb b/spec/models/customer_relations/contact_spec.rb
index 3d78a9089ca..6f124662b8e 100644
--- a/spec/models/customer_relations/contact_spec.rb
+++ b/spec/models/customer_relations/contact_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe CustomerRelations::Contact, type: :model do
+RSpec.describe CustomerRelations::Contact, type: :model, feature_category: :team_planning do
let_it_be(:group) { create(:group) }
describe 'associations' do
@@ -280,4 +280,25 @@ RSpec.describe CustomerRelations::Contact, type: :model do
end
end
end
+
+ describe '#hook_attrs' do
+ let_it_be(:contact) { create(:contact, group: group) }
+
+ it 'includes the expected attributes' do
+ expect(contact.hook_attrs).to match a_hash_including(
+ {
+ 'created_at' => contact.created_at,
+ 'description' => contact.description,
+ 'first_name' => contact.first_name,
+ 'group_id' => group.id,
+ 'id' => contact.id,
+ 'last_name' => contact.last_name,
+ 'organization_id' => contact.organization_id,
+ 'state' => contact.state,
+ 'updated_at' => contact.updated_at
+ }
+ )
+ expect(contact.hook_attrs.keys).to match_array(described_class::SAFE_ATTRIBUTES)
+ end
+ end
end
diff --git a/spec/models/deployment_spec.rb b/spec/models/deployment_spec.rb
index 227ac69133b..639b149e2ae 100644
--- a/spec/models/deployment_spec.rb
+++ b/spec/models/deployment_spec.rb
@@ -648,163 +648,177 @@ RSpec.describe Deployment, feature_category: :continuous_delivery do
let!(:project) { create(:project, :repository) }
let!(:environment) { create(:environment, project: project) }
- context 'when there are no deployments and builds' do
- it do
- expect(subject_method(environment)).to eq(described_class.none)
+ shared_examples_for 'find last deployment group for environment' do
+ context 'when there are no deployments and jobs' do
+ it do
+ expect(subject_method(environment)).to eq(described_class.none)
+ end
end
- end
- context 'when there are no successful builds' do
- let(:pipeline) { create(:ci_pipeline, project: project) }
- let(:ci_build) { create(:ci_build, :running, project: project, pipeline: pipeline) }
+ context 'when there are no successful jobs' do
+ let(:pipeline) { create(:ci_pipeline, project: project) }
+ let(:job) { create(factory_type, :created, project: project, pipeline: pipeline) }
- before do
- create(:deployment, :success, environment: environment, project: project, deployable: ci_build)
- end
+ before do
+ create(:deployment, :created, environment: environment, project: project, deployable: job)
+ end
- it do
- expect(subject_method(environment)).to eq(described_class.none)
+ it do
+ expect(subject_method(environment)).to eq(described_class.none)
+ end
end
- end
- context 'when there are deployments for multiple pipelines' do
- let(:pipeline_a) { create(:ci_pipeline, project: project) }
- let(:pipeline_b) { create(:ci_pipeline, project: project) }
- let(:ci_build_a) { create(:ci_build, :success, project: project, pipeline: pipeline_a) }
- let(:ci_build_b) { create(:ci_build, :failed, project: project, pipeline: pipeline_b) }
- let(:ci_build_c) { create(:ci_build, :success, project: project, pipeline: pipeline_a) }
- let(:ci_build_d) { create(:ci_build, :failed, project: project, pipeline: pipeline_a) }
+ context 'when there are deployments for multiple pipelines' do
+ let(:pipeline_a) { create(:ci_pipeline, project: project) }
+ let(:pipeline_b) { create(:ci_pipeline, project: project) }
+ let(:job_a) { create(factory_type, :success, project: project, pipeline: pipeline_a) }
+ let(:job_b) { create(factory_type, :failed, project: project, pipeline: pipeline_b) }
+ let(:job_c) { create(factory_type, :success, project: project, pipeline: pipeline_a) }
+ let(:job_d) { create(factory_type, :failed, project: project, pipeline: pipeline_a) }
+
+ # Successful deployments for pipeline_a
+ let!(:deployment_a) do
+ create(:deployment, :success, project: project, environment: environment, deployable: job_a)
+ end
- # Successful deployments for pipeline_a
- let!(:deployment_a) do
- create(:deployment, :success, project: project, environment: environment, deployable: ci_build_a)
- end
+ let!(:deployment_b) do
+ create(:deployment, :success, project: project, environment: environment, deployable: job_c)
+ end
- let!(:deployment_b) do
- create(:deployment, :success, project: project, environment: environment, deployable: ci_build_c)
- end
+ before do
+ # Failed deployment for pipeline_a
+ create(:deployment, :failed, project: project, environment: environment, deployable: job_d)
- before do
- # Failed deployment for pipeline_a
- create(:deployment, :failed, project: project, environment: environment, deployable: ci_build_d)
+ # Failed deployment for pipeline_b
+ create(:deployment, :failed, project: project, environment: environment, deployable: job_b)
+ end
- # Failed deployment for pipeline_b
- create(:deployment, :failed, project: project, environment: environment, deployable: ci_build_b)
+ it 'returns the successful deployment jobs for the last deployment pipeline' do
+ expect(subject_method(environment).pluck(:id)).to contain_exactly(deployment_a.id, deployment_b.id)
+ end
end
- it 'returns the successful deployment jobs for the last deployment pipeline' do
- expect(subject_method(environment).pluck(:id)).to contain_exactly(deployment_a.id, deployment_b.id)
- end
- end
+ context 'when there are many environments' do
+ let(:environment_b) { create(:environment, project: project) }
+
+ let(:pipeline_a) { create(:ci_pipeline, project: project) }
+ let(:pipeline_b) { create(:ci_pipeline, project: project) }
+ let(:pipeline_c) { create(:ci_pipeline, project: project) }
+ let(:pipeline_d) { create(:ci_pipeline, project: project) }
+
+ # Builds for first environment: 'environment' with pipeline_a and pipeline_b
+ let(:job_a) { create(factory_type, :success, project: project, pipeline: pipeline_a) }
+ let(:job_b) { create(factory_type, :failed, project: project, pipeline: pipeline_b) }
+ let(:job_c) { create(factory_type, :success, project: project, pipeline: pipeline_a) }
+ let(:job_d) { create(factory_type, :failed, project: project, pipeline: pipeline_a) }
+ let!(:stop_env_a) do
+ create(factory_type, :manual, project: project, pipeline: pipeline_a, name: 'stop_env_a')
+ end
- context 'when there are many environments' do
- let(:environment_b) { create(:environment, project: project) }
+ # Builds for second environment: 'environment_b' with pipeline_c and pipeline_d
+ let(:job_e) { create(factory_type, :success, project: project, pipeline: pipeline_c) }
+ let(:job_f) { create(factory_type, :failed, project: project, pipeline: pipeline_d) }
+ let(:job_g) { create(factory_type, :success, project: project, pipeline: pipeline_c) }
+ let(:job_h) { create(factory_type, :failed, project: project, pipeline: pipeline_c) }
+ let!(:stop_env_b) do
+ create(factory_type, :manual, project: project, pipeline: pipeline_c, name: 'stop_env_b')
+ end
- let(:pipeline_a) { create(:ci_pipeline, project: project) }
- let(:pipeline_b) { create(:ci_pipeline, project: project) }
- let(:pipeline_c) { create(:ci_pipeline, project: project) }
- let(:pipeline_d) { create(:ci_pipeline, project: project) }
+ # Successful deployments for 'environment' from pipeline_a
+ let!(:deployment_a) do
+ create(:deployment, :success, project: project, environment: environment, deployable: job_a)
+ end
- # Builds for first environment: 'environment' with pipeline_a and pipeline_b
- let(:ci_build_a) { create(:ci_build, :success, project: project, pipeline: pipeline_a) }
- let(:ci_build_b) { create(:ci_build, :failed, project: project, pipeline: pipeline_b) }
- let(:ci_build_c) { create(:ci_build, :success, project: project, pipeline: pipeline_a) }
- let(:ci_build_d) { create(:ci_build, :failed, project: project, pipeline: pipeline_a) }
- let!(:stop_env_a) { create(:ci_build, :manual, project: project, pipeline: pipeline_a, name: 'stop_env_a') }
+ let!(:deployment_b) do
+ create(:deployment, :success,
+ project: project, environment: environment, deployable: job_c, on_stop: 'stop_env_a')
+ end
- # Builds for second environment: 'environment_b' with pipeline_c and pipeline_d
- let(:ci_build_e) { create(:ci_build, :success, project: project, pipeline: pipeline_c) }
- let(:ci_build_f) { create(:ci_build, :failed, project: project, pipeline: pipeline_d) }
- let(:ci_build_g) { create(:ci_build, :success, project: project, pipeline: pipeline_c) }
- let(:ci_build_h) { create(:ci_build, :failed, project: project, pipeline: pipeline_c) }
- let!(:stop_env_b) { create(:ci_build, :manual, project: project, pipeline: pipeline_c, name: 'stop_env_b') }
+ # Successful deployments for 'environment_b' from pipeline_c
+ let!(:deployment_c) do
+ create(:deployment, :success, project: project, environment: environment_b, deployable: job_e)
+ end
- # Successful deployments for 'environment' from pipeline_a
- let!(:deployment_a) do
- create(:deployment, :success, project: project, environment: environment, deployable: ci_build_a)
- end
+ let!(:deployment_d) do
+ create(:deployment, :success,
+ project: project, environment: environment_b, deployable: job_g, on_stop: 'stop_env_b')
+ end
- let!(:deployment_b) do
- create(:deployment, :success,
- project: project, environment: environment, deployable: ci_build_c, on_stop: 'stop_env_a')
- end
+ before do
+ # Failed deployment for 'environment' from pipeline_a and pipeline_b
+ create(:deployment, :failed, project: project, environment: environment, deployable: job_d)
+ create(:deployment, :failed, project: project, environment: environment, deployable: job_b)
- # Successful deployments for 'environment_b' from pipeline_c
- let!(:deployment_c) do
- create(:deployment, :success, project: project, environment: environment_b, deployable: ci_build_e)
- end
+ # Failed deployment for 'environment_b' from pipeline_c and pipeline_d
+ create(:deployment, :failed, project: project, environment: environment_b, deployable: job_h)
+ create(:deployment, :failed, project: project, environment: environment_b, deployable: job_f)
+ end
- let!(:deployment_d) do
- create(:deployment, :success,
- project: project, environment: environment_b, deployable: ci_build_g, on_stop: 'stop_env_b')
- end
+ it 'batch loads for environments' do
+ environments = [environment, environment_b]
- before do
- # Failed deployment for 'environment' from pipeline_a and pipeline_b
- create(:deployment, :failed, project: project, environment: environment, deployable: ci_build_d)
- create(:deployment, :failed, project: project, environment: environment, deployable: ci_build_b)
+ # Loads Batch loader
+ environments.each do |env|
+ subject_method(env)
+ end
- # Failed deployment for 'environment_b' from pipeline_c and pipeline_d
- create(:deployment, :failed, project: project, environment: environment_b, deployable: ci_build_h)
- create(:deployment, :failed, project: project, environment: environment_b, deployable: ci_build_f)
- end
+ expect(subject_method(environments.first).pluck(:id))
+ .to contain_exactly(deployment_a.id, deployment_b.id)
- it 'batch loads for environments' do
- environments = [environment, environment_b]
+ expect { subject_method(environments.second).pluck(:id) }.not_to exceed_query_limit(0)
- # Loads Batch loader
- environments.each do |env|
- subject_method(env)
- end
+ expect(subject_method(environments.second).pluck(:id))
+ .to contain_exactly(deployment_c.id, deployment_d.id)
- expect(subject_method(environments.first).pluck(:id))
- .to contain_exactly(deployment_a.id, deployment_b.id)
+ expect(subject_method(environments.first).map(&:stop_action).compact)
+ .to contain_exactly(stop_env_a)
- expect { subject_method(environments.second).pluck(:id) }.not_to exceed_query_limit(0)
+ expect { subject_method(environments.second).map(&:stop_action) }
+ .not_to exceed_query_limit(0)
- expect(subject_method(environments.second).pluck(:id))
- .to contain_exactly(deployment_c.id, deployment_d.id)
+ expect(subject_method(environments.second).map(&:stop_action).compact)
+ .to contain_exactly(stop_env_b)
+ end
+ end
- expect(subject_method(environments.first).map(&:stop_action).compact)
- .to contain_exactly(stop_env_a)
+ context 'When last deployment for environment is a retried job' do
+ let(:pipeline) { create(:ci_pipeline, project: project) }
+ let(:environment_b) { create(:environment, project: project) }
- expect { subject_method(environments.second).map(&:stop_action) }
- .not_to exceed_query_limit(0)
+ let(:job_a) do
+ create(factory_type, :success, project: project, pipeline: pipeline, environment: environment.name)
+ end
- expect(subject_method(environments.second).map(&:stop_action).compact)
- .to contain_exactly(stop_env_b)
- end
- end
+ let(:job_b) do
+ create(factory_type, :success, project: project, pipeline: pipeline, environment: environment_b.name)
+ end
- context 'When last deployment for environment is a retried build' do
- let(:pipeline) { create(:ci_pipeline, project: project) }
- let(:environment_b) { create(:environment, project: project) }
+ let!(:deployment_a) do
+ create(:deployment, :success, project: project, environment: environment, deployable: job_a)
+ end
- let(:build_a) do
- create(:ci_build, :success, project: project, pipeline: pipeline, environment: environment.name)
- end
+ let!(:deployment_b) do
+ create(:deployment, :success, project: project, environment: environment_b, deployable: job_b)
+ end
- let(:build_b) do
- create(:ci_build, :success, project: project, pipeline: pipeline, environment: environment_b.name)
- end
+ before do
+ # Retry job_b
+ job_b.update!(retried: true)
- let!(:deployment_a) do
- create(:deployment, :success, project: project, environment: environment, deployable: build_a)
- end
+ # New successful job after retry.
+ create(factory_type, :success, project: project, pipeline: pipeline, environment: environment_b.name)
+ end
- let!(:deployment_b) do
- create(:deployment, :success, project: project, environment: environment_b, deployable: build_b)
+ it { expect(subject_method(environment_b)).not_to be_nil }
end
+ end
- before do
- # Retry build_b
- build_b.update!(retried: true)
-
- # New successful build after retry.
- create(:ci_build, :success, project: project, pipeline: pipeline, environment: environment_b.name)
- end
+ it_behaves_like 'find last deployment group for environment' do
+ let(:factory_type) { :ci_build }
+ end
- it { expect(subject_method(environment_b)).not_to be_nil }
+ it_behaves_like 'find last deployment group for environment' do
+ let(:factory_type) { :ci_bridge }
end
end
end
@@ -873,31 +887,41 @@ RSpec.describe Deployment, feature_category: :continuous_delivery do
end
describe '#stop_action' do
- let(:build) { create(:ci_build) }
-
subject { deployment.stop_action }
- context 'when no other actions' do
- let(:deployment) { FactoryBot.build(:deployment, deployable: build) }
-
- it { is_expected.to be_nil }
- end
-
- context 'with other actions' do
- let!(:close_action) { create(:ci_build, :manual, pipeline: build.pipeline, name: 'close_app') }
+ shared_examples_for 'stop action for a job' do
+ let(:job) { create(factory_type) } # rubocop:disable Rails/SaveBang
- context 'when matching action is defined' do
- let(:deployment) { FactoryBot.build(:deployment, deployable: build, on_stop: 'close_other_app') }
+ context 'when no other actions' do
+ let(:deployment) { FactoryBot.build(:deployment, deployable: job) }
it { is_expected.to be_nil }
end
- context 'when no matching action is defined' do
- let(:deployment) { FactoryBot.build(:deployment, deployable: build, on_stop: 'close_app') }
+ context 'with other actions' do
+ let!(:close_action) { create(factory_type, :manual, pipeline: job.pipeline, name: 'close_app') }
- it { is_expected.to eq(close_action) }
+ context 'when matching action is defined' do
+ let(:deployment) { FactoryBot.build(:deployment, deployable: job, on_stop: 'close_other_app') }
+
+ it { is_expected.to be_nil }
+ end
+
+ context 'when no matching action is defined' do
+ let(:deployment) { FactoryBot.build(:deployment, deployable: job, on_stop: 'close_app') }
+
+ it { is_expected.to eq(close_action) }
+ end
end
end
+
+ it_behaves_like 'stop action for a job' do
+ let(:factory_type) { :ci_build }
+ end
+
+ it_behaves_like 'stop action for a job' do
+ let(:factory_type) { :ci_bridge }
+ end
end
describe '#deployed_by' do
@@ -908,10 +932,18 @@ RSpec.describe Deployment, feature_category: :continuous_delivery do
expect(deployment.deployed_by).to eq(deployment_user)
end
- it 'returns the deployment user if the deployable have no user' do
+ it 'returns the deployment user if the deployable is build and have no user' do
deployment_user = create(:user)
- build = create(:ci_build, user: nil)
- deployment = create(:deployment, deployable: build, user: deployment_user)
+ job = create(:ci_build, user: nil)
+ deployment = create(:deployment, deployable: job, user: deployment_user)
+
+ expect(deployment.deployed_by).to eq(deployment_user)
+ end
+
+ it 'returns the deployment user if the deployable is bridge and have no user' do
+ deployment_user = create(:user)
+ job = create(:ci_bridge, user: nil)
+ deployment = create(:deployment, deployable: job, user: deployment_user)
expect(deployment.deployed_by).to eq(deployment_user)
end
@@ -919,8 +951,8 @@ RSpec.describe Deployment, feature_category: :continuous_delivery do
it 'returns the deployable user if there is one' do
build_user = create(:user)
deployment_user = create(:user)
- build = create(:ci_build, user: build_user)
- deployment = create(:deployment, deployable: build, user: deployment_user)
+ job = create(:ci_build, user: build_user)
+ deployment = create(:deployment, deployable: job, user: deployment_user)
expect(deployment.deployed_by).to eq(build_user)
end
@@ -954,14 +986,14 @@ RSpec.describe Deployment, feature_category: :continuous_delivery do
end
end
- describe '.builds' do
+ describe '.jobs' do
let!(:deployment1) { create(:deployment) }
let!(:deployment2) { create(:deployment) }
let!(:deployment3) { create(:deployment) }
- subject { described_class.builds }
+ subject { described_class.jobs }
- it 'retrieves builds for the deployments' do
+ it 'retrieves jobs for the deployments' do
is_expected.to match_array(
[deployment1.deployable, deployment2.deployable, deployment3.deployable])
end
@@ -974,16 +1006,16 @@ RSpec.describe Deployment, feature_category: :continuous_delivery do
end
end
- describe '#build' do
+ describe '#job' do
let!(:deployment) { create(:deployment) }
- subject { deployment.build }
+ subject { deployment.job }
- it 'retrieves build for the deployment' do
+ it 'retrieves job for the deployment' do
is_expected.to eq(deployment.deployable)
end
- it 'returns nil when the associated build is not found' do
+ it 'returns nil when the associated job is not found' do
deployment.update!(deployable_id: nil, deployable_type: nil)
is_expected.to be_nil
@@ -1088,22 +1120,30 @@ RSpec.describe Deployment, feature_category: :continuous_delivery do
end
end
- describe '#playable_build' do
- subject { deployment.playable_build }
+ describe '#playable_job' do
+ subject { deployment.playable_job }
+
+ context 'when there is a deployable job' do
+ let(:deployment) { create(:deployment, deployable: job) }
- context 'when there is a deployable build' do
- let(:deployment) { create(:deployment, deployable: build) }
+ context 'when the deployable job is build and playable' do
+ let(:job) { create(:ci_build, :playable) }
+
+ it 'returns that job' do
+ is_expected.to eq(job)
+ end
+ end
- context 'when the deployable build is playable' do
- let(:build) { create(:ci_build, :playable) }
+ context 'when the deployable job is bridge and playable' do
+ let(:job) { create(:ci_bridge, :playable) }
- it 'returns that build' do
- is_expected.to eq(build)
+ it 'returns that job' do
+ is_expected.to eq(job)
end
end
- context 'when the deployable build is not playable' do
- let(:build) { create(:ci_build) }
+ context 'when the deployable job is not playable' do
+ let(:job) { create(:ci_build) }
it 'returns nil' do
is_expected.to be_nil
@@ -1111,7 +1151,7 @@ RSpec.describe Deployment, feature_category: :continuous_delivery do
end
end
- context 'when there is no deployable build' do
+ context 'when there is no deployable job' do
let(:deployment) { create(:deployment) }
it 'returns nil' do
@@ -1207,144 +1247,180 @@ RSpec.describe Deployment, feature_category: :continuous_delivery do
end
describe '#sync_status_with' do
- subject { deployment.sync_status_with(ci_build) }
+ subject { deployment.sync_status_with(job) }
let_it_be(:project) { create(:project, :repository) }
- let(:deployment) { create(:deployment, project: project, status: deployment_status) }
- let(:ci_build) { create(:ci_build, project: project, status: build_status) }
+ shared_examples_for 'sync status with a job' do
+ let(:deployment) { create(:deployment, project: project, status: deployment_status) }
+ let(:job) { create(factory_type, project: project, status: job_status) }
+
+ shared_examples_for 'synchronizing deployment' do
+ let(:expected_deployment_status) { job_status.to_s }
- shared_examples_for 'synchronizing deployment' do
- it 'changes deployment status' do
- expect(Gitlab::ErrorTracking).not_to receive(:track_exception)
+ it 'changes deployment status' do
+ expect(Gitlab::ErrorTracking).not_to receive(:track_exception)
- is_expected.to eq(true)
+ is_expected.to eq(true)
- expect(deployment.status).to eq(build_status.to_s)
- expect(deployment.errors).to be_empty
+ expect(deployment.status).to eq(expected_deployment_status)
+ expect(deployment.errors).to be_empty
+ end
end
- end
- shared_examples_for 'gracefully handling error' do
- it 'tracks an exception' do
- expect(Gitlab::ErrorTracking).to receive(:track_exception).with(
- instance_of(described_class::StatusSyncError),
- deployment_id: deployment.id,
- build_id: ci_build.id)
+ shared_examples_for 'gracefully handling error' do
+ it 'tracks an exception' do
+ expect(Gitlab::ErrorTracking).to receive(:track_exception).with(
+ instance_of(described_class::StatusSyncError),
+ deployment_id: deployment.id,
+ job_id: job.id)
- is_expected.to eq(false)
+ is_expected.to eq(false)
- expect(deployment.status).to eq(deployment_status.to_s)
- expect(deployment.errors.full_messages).to include(error_message)
+ expect(deployment.status).to eq(deployment_status.to_s)
+ expect(deployment.errors.full_messages).to include(error_message)
+ end
end
- end
- shared_examples_for 'ignoring build' do
- it 'does not change deployment status' do
- expect(Gitlab::ErrorTracking).not_to receive(:track_exception)
+ shared_examples_for 'ignoring job' do
+ it 'does not change deployment status' do
+ expect(Gitlab::ErrorTracking).not_to receive(:track_exception)
- is_expected.to eq(false)
+ is_expected.to eq(false)
- expect(deployment.status).to eq(deployment_status.to_s)
- expect(deployment.errors).to be_empty
+ expect(deployment.status).to eq(deployment_status.to_s)
+ expect(deployment.errors).to be_empty
+ end
end
- end
- context 'with created deployment' do
- let(:deployment_status) { :created }
+ context 'with created deployment' do
+ let(:deployment_status) { :created }
- context 'with created build' do
- let(:build_status) { :created }
+ context 'with created job' do
+ let(:job_status) { :created }
- it_behaves_like 'ignoring build'
- end
+ it_behaves_like 'ignoring job'
+ end
- context 'with running build' do
- let(:build_status) { :running }
+ context 'with manual job' do
+ let(:job_status) { :manual }
- it_behaves_like 'synchronizing deployment'
- end
+ it_behaves_like 'synchronizing deployment' do
+ let(:expected_deployment_status) { 'blocked' }
+ end
+ end
- context 'with finished build' do
- let(:build_status) { :success }
+ context 'with running job' do
+ let(:job_status) { :running }
- it_behaves_like 'synchronizing deployment'
- end
+ it_behaves_like 'synchronizing deployment'
+ end
- context 'with unrelated build' do
- let(:build_status) { :waiting_for_resource }
+ context 'with finished job' do
+ let(:job_status) { :success }
- it_behaves_like 'ignoring build'
+ it_behaves_like 'synchronizing deployment'
+ end
+
+ context 'with unrelated job' do
+ let(:job_status) { :waiting_for_resource }
+
+ it_behaves_like 'ignoring job'
+ end
end
- end
- context 'with running deployment' do
- let(:deployment_status) { :running }
+ context 'with running deployment' do
+ let(:deployment_status) { :running }
- context 'with created build' do
- let(:build_status) { :created }
+ context 'with created job' do
+ let(:job_status) { :created }
- it_behaves_like 'gracefully handling error' do
- let(:error_message) { %{Status cannot transition via \"create\"} }
+ it_behaves_like 'gracefully handling error' do
+ let(:error_message) { %{Status cannot transition via \"create\"} }
+ end
end
- end
- context 'with running build' do
- let(:build_status) { :running }
+ context 'with manual job' do
+ let(:job_status) { :manual }
- it_behaves_like 'ignoring build'
- end
+ it_behaves_like 'gracefully handling error' do
+ let(:error_message) { %{Status cannot transition via \"block\"} }
+ end
+ end
- context 'with finished build' do
- let(:build_status) { :success }
+ context 'with running job' do
+ let(:job_status) { :running }
- it_behaves_like 'synchronizing deployment'
- end
+ it_behaves_like 'ignoring job'
+ end
- context 'with unrelated build' do
- let(:build_status) { :waiting_for_resource }
+ context 'with finished job' do
+ let(:job_status) { :success }
- it_behaves_like 'ignoring build'
+ it_behaves_like 'synchronizing deployment'
+ end
+
+ context 'with unrelated job' do
+ let(:job_status) { :waiting_for_resource }
+
+ it_behaves_like 'ignoring job'
+ end
end
- end
- context 'with finished deployment' do
- let(:deployment_status) { :success }
+ context 'with finished deployment' do
+ let(:deployment_status) { :success }
- context 'with created build' do
- let(:build_status) { :created }
+ context 'with created job' do
+ let(:job_status) { :created }
- it_behaves_like 'gracefully handling error' do
- let(:error_message) { %{Status cannot transition via \"create\"} }
+ it_behaves_like 'gracefully handling error' do
+ let(:error_message) { %{Status cannot transition via \"create\"} }
+ end
end
- end
- context 'with running build' do
- let(:build_status) { :running }
+ context 'with manual job' do
+ let(:job_status) { :manual }
- it_behaves_like 'gracefully handling error' do
- let(:error_message) { %{Status cannot transition via \"run\"} }
+ it_behaves_like 'gracefully handling error' do
+ let(:error_message) { %{Status cannot transition via \"block\"} }
+ end
end
- end
- context 'with finished build' do
- let(:build_status) { :success }
+ context 'with running job' do
+ let(:job_status) { :running }
- it_behaves_like 'ignoring build'
- end
+ it_behaves_like 'gracefully handling error' do
+ let(:error_message) { %{Status cannot transition via \"run\"} }
+ end
+ end
- context 'with failed build' do
- let(:build_status) { :failed }
+ context 'with finished job' do
+ let(:job_status) { :success }
- it_behaves_like 'synchronizing deployment'
- end
+ it_behaves_like 'ignoring job'
+ end
+
+ context 'with failed job' do
+ let(:job_status) { :failed }
- context 'with unrelated build' do
- let(:build_status) { :waiting_for_resource }
+ it_behaves_like 'synchronizing deployment'
+ end
+
+ context 'with unrelated job' do
+ let(:job_status) { :waiting_for_resource }
- it_behaves_like 'ignoring build'
+ it_behaves_like 'ignoring job'
+ end
end
end
+
+ it_behaves_like 'sync status with a job' do
+ let(:factory_type) { :ci_build }
+ end
+
+ it_behaves_like 'sync status with a job' do
+ let(:factory_type) { :ci_bridge }
+ end
end
describe '#tags' do
@@ -1417,6 +1493,14 @@ RSpec.describe Deployment, feature_category: :continuous_delivery do
expect(subject.tier_in_yaml).to eq('testing')
end
+ context 'when deployable is a bridge job' do
+ let(:deployable) { create(:ci_bridge, :success, :environment_with_deployment_tier) }
+
+ it 'returns the tier' do
+ expect(subject.tier_in_yaml).to eq('testing')
+ end
+ end
+
context 'when tier is not specified' do
let(:deployable) { create(:ci_build, :success) }
diff --git a/spec/models/discussion_spec.rb b/spec/models/discussion_spec.rb
index 1c9798c6d99..6f1d05a2567 100644
--- a/spec/models/discussion_spec.rb
+++ b/spec/models/discussion_spec.rb
@@ -5,10 +5,10 @@ require 'spec_helper'
RSpec.describe Discussion, feature_category: :team_planning do
subject { described_class.new([first_note, second_note, third_note]) }
- let(:first_note) { create(:diff_note_on_merge_request) }
- let(:merge_request) { first_note.noteable }
- let(:second_note) { create(:diff_note_on_merge_request, in_reply_to: first_note) }
- let(:third_note) { create(:diff_note_on_merge_request) }
+ let_it_be(:first_note) { create(:diff_note_on_merge_request) }
+ let_it_be(:merge_request) { first_note.noteable }
+ let_it_be(:second_note) { create(:diff_note_on_merge_request, in_reply_to: first_note) }
+ let_it_be(:third_note) { create(:diff_note_on_merge_request) }
describe '.lazy_find' do
let!(:note1) { create(:discussion_note_on_merge_request).to_discussion }
@@ -54,7 +54,7 @@ RSpec.describe Discussion, feature_category: :team_planning do
end
describe '#cache_key' do
- let(:notes_sha) { Digest::SHA1.hexdigest("#{first_note.post_processed_cache_key}:#{second_note.post_processed_cache_key}:#{third_note.post_processed_cache_key}") }
+ let(:notes_sha) { Digest::SHA1.hexdigest("#{subject.notes[0].post_processed_cache_key}:#{subject.notes[1].post_processed_cache_key}:#{subject.notes[2].post_processed_cache_key}") }
it 'returns the cache key' do
expect(subject.cache_key).to eq("#{described_class::CACHE_VERSION}:#{subject.id}:#{notes_sha}:")
diff --git a/spec/models/environment_spec.rb b/spec/models/environment_spec.rb
index 066763645ab..9d4699cb91e 100644
--- a/spec/models/environment_spec.rb
+++ b/spec/models/environment_spec.rb
@@ -16,13 +16,13 @@ RSpec.describe Environment, :use_clean_rails_memory_store_caching, feature_categ
it { is_expected.to be_kind_of(ReactiveCaching) }
it { is_expected.to nullify_if_blank(:external_url) }
it { is_expected.to nullify_if_blank(:kubernetes_namespace) }
+ it { is_expected.to nullify_if_blank(:flux_resource_path) }
it { is_expected.to belong_to(:project).required }
it { is_expected.to belong_to(:merge_request).optional }
it { is_expected.to belong_to(:cluster_agent).optional }
it { is_expected.to have_many(:deployments) }
- it { is_expected.to have_many(:metrics_dashboard_annotations) }
it { is_expected.to have_many(:alert_management_alerts) }
it { is_expected.to have_one(:upcoming_deployment) }
it { is_expected.to have_one(:latest_opened_most_severe_alert) }
@@ -38,6 +38,7 @@ RSpec.describe Environment, :use_clean_rails_memory_store_caching, feature_categ
it { is_expected.to validate_length_of(:external_url).is_at_most(255) }
it { is_expected.to validate_length_of(:kubernetes_namespace).is_at_most(63) }
+ it { is_expected.to validate_length_of(:flux_resource_path).is_at_most(255) }
describe 'validation' do
it 'does not become invalid record when external_url is empty' do
@@ -690,178 +691,214 @@ RSpec.describe Environment, :use_clean_rails_memory_store_caching, feature_categ
subject { environment.stop_with_actions!(user) }
- before do
- expect(environment).to receive(:available?).and_call_original
- end
-
- context 'when no other actions' do
- context 'environment is available' do
- before do
- environment.update!(state: :available)
- end
-
- it do
- actions = subject
-
- expect(environment).to be_stopped
- expect(actions).to match_array([])
- end
+ shared_examples_for 'stop with playing a teardown job' do
+ before do
+ expect(environment).to receive(:available?).and_call_original
end
- context 'environment is already stopped' do
- before do
- environment.update!(state: :stopped)
- end
+ context 'when no other actions' do
+ context 'environment is available' do
+ before do
+ environment.update!(state: :available)
+ end
- it do
- subject
+ it do
+ actions = subject
- expect(environment).to be_stopped
+ expect(environment).to be_stopped
+ expect(actions).to match_array([])
+ end
end
- end
- end
-
- context 'when matching action is defined' do
- let(:pipeline) { create(:ci_pipeline, project: project) }
- let(:build_a) { create(:ci_build, :success, pipeline: pipeline) }
- before do
- create(:deployment, :success,
- environment: environment,
- deployable: build_a,
- on_stop: 'close_app_a')
- end
+ context 'environment is already stopped' do
+ before do
+ environment.update!(state: :stopped)
+ end
- context 'when user is not allowed to stop environment' do
- let!(:close_action) do
- create(:ci_build, :manual, pipeline: pipeline, name: 'close_app_a')
- end
+ it do
+ subject
- it 'raises an exception' do
- expect { subject }.to raise_error(Gitlab::Access::AccessDeniedError)
+ expect(environment).to be_stopped
+ end
end
end
- context 'when user is allowed to stop environment' do
- before do
- project.add_developer(user)
+ context 'when matching action is defined' do
+ let(:pipeline) { create(:ci_pipeline, project: project) }
+ let(:job_a) { create(factory_type, :success, pipeline: pipeline, **factory_options) }
- create(:protected_branch, :developers_can_merge, name: 'master', project: project)
+ before do
+ create(:deployment, :success,
+ environment: environment,
+ deployable: job_a,
+ on_stop: 'close_app_a')
end
- context 'when action did not yet finish' do
+ context 'when user is not allowed to stop environment' do
let!(:close_action) do
- create(:ci_build, :manual, pipeline: pipeline, name: 'close_app_a')
+ create(factory_type, :manual, pipeline: pipeline, name: 'close_app_a', **factory_options)
end
- it 'returns the same action' do
- action = subject.first
- expect(action).to eq(close_action)
- expect(action.user).to eq(user)
+ it 'raises an exception' do
+ expect { subject }.to raise_error(Gitlab::Access::AccessDeniedError)
end
+ end
- it 'environment is not stopped' do
- subject
+ context 'when user is allowed to stop environment' do
+ before do
+ project.add_developer(user)
- expect(environment).not_to be_stopped
+ create(:protected_branch, :developers_can_merge, name: 'master', project: project)
end
- end
- context 'if action did finish' do
- let!(:close_action) do
- create(:ci_build, :manual, :success, pipeline: pipeline, name: 'close_app_a')
- end
+ context 'when action did not yet finish' do
+ let!(:close_action) do
+ create(factory_type, :manual, pipeline: pipeline, name: 'close_app_a', **factory_options)
+ end
- it 'returns a new action of the same type' do
- action = subject.first
+ it 'returns the same action' do
+ action = subject.first
+ expect(action).to eq(close_action)
+ expect(action.user).to eq(user)
+ end
- expect(action).to be_persisted
- expect(action.name).to eq(close_action.name)
- expect(action.user).to eq(user)
- end
- end
+ it 'environment is not stopped' do
+ subject
- context 'close action does not raise ActiveRecord::StaleObjectError' do
- let!(:close_action) do
- create(:ci_build, :manual, pipeline: pipeline, name: 'close_app_a')
+ expect(environment).not_to be_stopped
+ end
end
- before do
- # preload the build
- environment.stop_actions
+ context 'if action did finish' do
+ let!(:close_action) do
+ create(factory_type, :manual, :success, pipeline: pipeline, name: 'close_app_a', **factory_options)
+ end
- # Update record as the other process. This makes `environment.stop_action` stale.
- close_action.drop!
- end
+ it 'returns a new action of the same type when build job' do
+ skip unless factory_type == :ci_build
- it 'successfully plays the build even if the build was a stale object' do
- # Since build is droped.
- expect(close_action.processed).to be_falsey
+ action = subject.first
+
+ expect(action).to be_persisted
+ expect(action.name).to eq(close_action.name)
+ expect(action.user).to eq(user)
+ end
- # it encounters the StaleObjectError at first, but reloads the object and runs `build.play`
- expect { subject }.not_to raise_error
+ it 'does nothing when bridge job' do
+ skip unless factory_type == :ci_bridge
- # Now the build should be processed.
- expect(close_action.reload.processed).to be_truthy
+ action = subject.first
+
+ expect(action).to be_nil
+ end
end
- end
- end
- end
- context 'when there are more then one stop action for the environment' do
- let(:pipeline) { create(:ci_pipeline, project: project) }
- let(:build_a) { create(:ci_build, :success, pipeline: pipeline) }
- let(:build_b) { create(:ci_build, :success, pipeline: pipeline) }
+ context 'close action does not raise ActiveRecord::StaleObjectError' do
+ let!(:close_action) do
+ create(factory_type, :manual, pipeline: pipeline, name: 'close_app_a', **factory_options)
+ end
- let!(:close_actions) do
- [
- create(:ci_build, :manual, pipeline: pipeline, name: 'close_app_a'),
- create(:ci_build, :manual, pipeline: pipeline, name: 'close_app_b')
- ]
- end
+ before do
+ # preload the job
+ environment.stop_actions
- before do
- project.add_developer(user)
+ # Update record as the other process. This makes `environment.stop_action` stale.
+ close_action.drop!
+ end
- create(:deployment, :success,
- environment: environment,
- deployable: build_a,
- finished_at: 5.minutes.ago,
- on_stop: 'close_app_a')
+ it 'successfully plays the job even if the job was a stale object when build job' do
+ skip unless factory_type == :ci_build
- create(:deployment, :success,
- environment: environment,
- deployable: build_b,
- finished_at: 1.second.ago,
- on_stop: 'close_app_b')
- end
+ # Since job is droped.
+ expect(close_action.processed).to be_falsey
+
+ # it encounters the StaleObjectError at first, but reloads the object and runs `job.play`
+ expect { subject }.not_to raise_error
+
+ # Now the job should be processed.
+ expect(close_action.reload.processed).to be_truthy
+ end
+
+ it 'does nothing when bridge job' do
+ skip unless factory_type == :ci_bridge
+
+ expect(close_action.processed).to be_falsey
- it 'returns the same actions' do
- actions = subject
+ # it encounters the StaleObjectError at first, but reloads the object and runs `job.play`
+ expect { subject }.not_to raise_error
- expect(actions.count).to eq(close_actions.count)
- expect(actions.pluck(:id)).to match_array(close_actions.pluck(:id))
- expect(actions.pluck(:user)).to match_array(close_actions.pluck(:user))
+ # Bridge is not retried currently.
+ expect(close_action.processed).to be_falsey
+ end
+ end
+ end
end
- context 'when there are failed builds' do
+ context 'when there are more then one stop action for the environment' do
+ let(:pipeline) { create(:ci_pipeline, project: project) }
+ let(:job_a) { create(factory_type, :success, pipeline: pipeline, **factory_options) }
+ let(:job_b) { create(factory_type, :success, pipeline: pipeline, **factory_options) }
+
+ let!(:close_actions) do
+ [
+ create(factory_type, :manual, pipeline: pipeline, name: 'close_app_a', **factory_options),
+ create(factory_type, :manual, pipeline: pipeline, name: 'close_app_b', **factory_options)
+ ]
+ end
+
before do
- create(:ci_build, :failed, pipeline: pipeline, name: 'close_app_c')
+ project.add_developer(user)
+
+ create(:deployment, :success,
+ environment: environment,
+ deployable: job_a,
+ finished_at: 5.minutes.ago,
+ on_stop: 'close_app_a')
- create(:deployment, :failed,
+ create(:deployment, :success,
environment: environment,
- deployable: create(:ci_build, pipeline: pipeline),
- on_stop: 'close_app_c')
+ deployable: job_b,
+ finished_at: 1.second.ago,
+ on_stop: 'close_app_b')
end
- it 'returns only stop actions from successful builds' do
+ it 'returns the same actions' do
actions = subject
- expect(actions).to match_array(close_actions)
- expect(actions.count).to eq(pipeline.latest_successful_builds.count)
+ expect(actions.count).to eq(close_actions.count)
+ expect(actions.pluck(:id)).to match_array(close_actions.pluck(:id))
+ expect(actions.pluck(:user)).to match_array(close_actions.pluck(:user))
+ end
+
+ context 'when there are failed builds' do
+ before do
+ create(factory_type, :failed, pipeline: pipeline, name: 'close_app_c', **factory_options)
+
+ create(:deployment, :failed,
+ environment: environment,
+ deployable: create(factory_type, pipeline: pipeline, **factory_options),
+ on_stop: 'close_app_c')
+ end
+
+ it 'returns only stop actions from successful builds' do
+ actions = subject
+
+ expect(actions).to match_array(close_actions)
+ expect(actions.count).to eq(pipeline.latest_successful_jobs.count)
+ end
end
end
end
+
+ it_behaves_like 'stop with playing a teardown job' do
+ let(:factory_type) { :ci_build }
+ let(:factory_options) { {} }
+ end
+
+ it_behaves_like 'stop with playing a teardown job' do
+ let(:factory_type) { :ci_bridge }
+ let(:factory_options) { { downstream: project } }
+ end
end
describe '#stop_actions' do
@@ -1814,13 +1851,23 @@ RSpec.describe Environment, :use_clean_rails_memory_store_caching, feature_categ
let_it_be(:project) { create(:project, :repository) }
let_it_be(:environment, reload: true) { create(:environment, project: project) }
- let!(:deployment) { create(:deployment, project: project, environment: environment, deployable: build) }
- let!(:build) { create(:ci_build, :running, project: project, environment: environment) }
+ let!(:deployment) { create(:deployment, project: project, environment: environment, deployable: job) }
+ let!(:job) { create(:ci_build, :running, project: project, environment: environment) }
it 'cancels an active deployment job' do
subject
- expect(build.reset).to be_canceled
+ expect(job.reset).to be_canceled
+ end
+
+ context 'when deployment job is bridge' do
+ let!(:job) { create(:ci_bridge, :running, project: project, environment: environment) }
+
+ it 'does not cancel an active deployment job' do
+ subject
+
+ expect(job.reset).to be_running
+ end
end
context 'when deployable does not exist' do
@@ -1831,7 +1878,7 @@ RSpec.describe Environment, :use_clean_rails_memory_store_caching, feature_categ
it 'does not raise an error' do
expect { subject }.not_to raise_error
- expect(build.reset).to be_running
+ expect(job.reset).to be_running
end
end
end
diff --git a/spec/models/group_spec.rb b/spec/models/group_spec.rb
index 01fd17bfe10..23e72f6663a 100644
--- a/spec/models/group_spec.rb
+++ b/spec/models/group_spec.rb
@@ -390,6 +390,13 @@ RSpec.describe Group, feature_category: :groups_and_projects do
expect(internal_group).to be_invalid
expect(internal_group.errors[:visibility_level]).to include('private is not allowed since this group contains projects with higher visibility.')
end
+
+ it 'is valid if higher visibility project is deleted' do
+ internal_project.update_attribute(:pending_delete, true)
+ internal_group.visibility_level = Gitlab::VisibilityLevel::PRIVATE
+
+ expect(internal_group).to be_valid
+ end
end
context 'when group has a higher visibility' do
@@ -1806,34 +1813,48 @@ RSpec.describe Group, feature_category: :groups_and_projects do
end
context 'members-related methods' do
- let!(:group) { create(:group, :nested) }
- let!(:sub_group) { create(:group, parent: group) }
- let!(:maintainer) { group.parent.add_member(create(:user), GroupMember::MAINTAINER) }
- let!(:developer) { group.add_member(create(:user), GroupMember::DEVELOPER) }
- let!(:other_developer) { group.add_member(create(:user), GroupMember::DEVELOPER) }
+ let_it_be(:group) { create(:group, :nested) }
+ let_it_be(:sub_group) { create(:group, parent: group) }
+
+ let_it_be(:maintainer) { group.parent.add_member(create(:user), GroupMember::MAINTAINER) }
+ let_it_be(:developer) { group.add_member(create(:user), GroupMember::DEVELOPER) }
+ let_it_be(:other_developer) { group.add_member(create(:user), GroupMember::DEVELOPER) }
- describe '#direct_and_indirect_members' do
+ describe '#hierarchy_members' do
it 'returns parents members' do
- expect(group.direct_and_indirect_members).to include(developer)
- expect(group.direct_and_indirect_members).to include(maintainer)
+ expect(group.hierarchy_members).to include(developer)
+ expect(group.hierarchy_members).to include(maintainer)
end
it 'returns descendant members' do
- expect(group.direct_and_indirect_members).to include(other_developer)
+ expect(group.hierarchy_members).to include(other_developer)
end
end
- describe '#direct_and_indirect_members_with_inactive' do
- let!(:maintainer_blocked) { group.parent.add_member(create(:user, :blocked), GroupMember::MAINTAINER) }
+ describe '#hierarchy_members_with_inactive' do
+ let_it_be(:maintainer_blocked) { group.parent.add_member(create(:user, :blocked), GroupMember::MAINTAINER) }
it 'returns parents members' do
- expect(group.direct_and_indirect_members_with_inactive).to include(developer)
- expect(group.direct_and_indirect_members_with_inactive).to include(maintainer)
- expect(group.direct_and_indirect_members_with_inactive).to include(maintainer_blocked)
+ expect(group.hierarchy_members_with_inactive).to include(developer)
+ expect(group.hierarchy_members_with_inactive).to include(maintainer)
+ expect(group.hierarchy_members_with_inactive).to include(maintainer_blocked)
end
it 'returns descendant members' do
- expect(group.direct_and_indirect_members_with_inactive).to include(other_developer)
+ expect(group.hierarchy_members_with_inactive).to include(other_developer)
+ end
+ end
+
+ describe '#descendant_project_members_with_inactive' do
+ let_it_be(:ancestor_group_project) { create(:project, group: group) }
+ let_it_be(:ancestor_group_project_member) { ancestor_group_project.add_maintainer(create(:user)) }
+
+ let_it_be(:project) { create(:project, group: sub_group) }
+ let_it_be(:project_member) { project.add_maintainer(create(:user)) }
+ let_it_be(:blocked_project_member) { project.add_maintainer(create(:user, :blocked)) }
+
+ it 'returns members of descendant projects' do
+ expect(sub_group.descendant_project_members_with_inactive).to contain_exactly(project_member, blocked_project_member)
end
end
end
@@ -1857,76 +1878,6 @@ RSpec.describe Group, feature_category: :groups_and_projects do
end
end
- context 'user-related methods' do
- let_it_be(:user_a) { create(:user) }
- let_it_be(:user_b) { create(:user) }
- let_it_be(:user_c) { create(:user) }
- let_it_be(:user_d) { create(:user) }
-
- let_it_be(:group) { create(:group) }
- let_it_be(:nested_group) { create(:group, parent: group) }
- let_it_be(:deep_nested_group) { create(:group, parent: nested_group) }
- let_it_be(:project) { create(:project, namespace: group) }
-
- let_it_be(:another_group) { create(:group) }
- let_it_be(:another_user) { create(:user) }
-
- before_all do
- group.add_developer(user_a)
- group.add_developer(user_c)
- nested_group.add_developer(user_b)
- deep_nested_group.add_developer(user_a)
- project.add_developer(user_d)
-
- another_group.add_developer(another_user)
-
- create(:group_group_link, shared_group: group, shared_with_group: another_group)
- end
-
- describe '#direct_and_indirect_users' do
- it 'returns member users on every nest level without duplication' do
- expect(group.direct_and_indirect_users).to contain_exactly(user_a, user_b, user_c, user_d)
- expect(nested_group.direct_and_indirect_users).to contain_exactly(user_a, user_b, user_c)
- expect(deep_nested_group.direct_and_indirect_users).to contain_exactly(user_a, user_b, user_c)
- end
-
- it 'does not return members of projects belonging to ancestor groups' do
- expect(nested_group.direct_and_indirect_users).not_to include(user_d)
- end
-
- context 'when share_with_groups is true' do
- it 'also returns members of groups invited to this group' do
- expect(group.direct_and_indirect_users(share_with_groups: true))
- .to contain_exactly(user_a, user_b, user_c, user_d, another_user)
- end
- end
- end
-
- describe '#direct_and_indirect_users_with_inactive' do
- let(:user_blocked_1) { create(:user, :blocked) }
- let(:user_blocked_2) { create(:user, :blocked) }
- let(:user_blocked_3) { create(:user, :blocked) }
- let(:project_in_group) { create(:project, namespace: nested_group) }
-
- before do
- group.add_developer(user_blocked_1)
- nested_group.add_developer(user_blocked_1)
- deep_nested_group.add_developer(user_blocked_2)
- project_in_group.add_developer(user_blocked_3)
- end
-
- it 'returns member users on every nest level without duplication' do
- expect(group.direct_and_indirect_users_with_inactive).to contain_exactly(user_a, user_b, user_c, user_d, user_blocked_1, user_blocked_2, user_blocked_3)
- expect(nested_group.direct_and_indirect_users_with_inactive).to contain_exactly(user_a, user_b, user_c, user_blocked_1, user_blocked_2, user_blocked_3)
- expect(deep_nested_group.direct_and_indirect_users_with_inactive).to contain_exactly(user_a, user_b, user_c, user_blocked_1, user_blocked_2)
- end
-
- it 'returns members of projects belonging to group' do
- expect(nested_group.direct_and_indirect_users_with_inactive).to include(user_blocked_3)
- end
- end
- end
-
describe '#project_users_with_descendants' do
let(:user_a) { create(:user) }
let(:user_b) { create(:user) }
diff --git a/spec/models/instance_configuration_spec.rb b/spec/models/instance_configuration_spec.rb
index 7710a05820c..346f743e8ef 100644
--- a/spec/models/instance_configuration_spec.rb
+++ b/spec/models/instance_configuration_spec.rb
@@ -103,7 +103,9 @@ RSpec.describe InstanceConfiguration do
diff_max_patch_bytes: 409600,
max_artifacts_size: 50,
max_pages_size: 60,
- snippet_size_limit: 70
+ snippet_size_limit: 70,
+ max_import_remote_file_size: 80,
+ bulk_import_max_download_file_size: 90
)
end
@@ -118,6 +120,8 @@ RSpec.describe InstanceConfiguration do
expect(size_limits[:max_artifacts_size]).to eq(50.megabytes)
expect(size_limits[:max_pages_size]).to eq(60.megabytes)
expect(size_limits[:snippet_size_limit]).to eq(70.bytes)
+ expect(size_limits[:max_import_remote_file_size]).to eq(80.megabytes)
+ expect(size_limits[:bulk_import_max_download_file_size]).to eq(90.megabytes)
end
it 'returns nil if receive_max_input_size not set' do
@@ -132,7 +136,9 @@ RSpec.describe InstanceConfiguration do
Gitlab::CurrentSettings.current_application_settings.update!(
max_import_size: 0,
max_export_size: 0,
- max_pages_size: 0
+ max_pages_size: 0,
+ max_import_remote_file_size: 0,
+ bulk_import_max_download_file_size: 0
)
size_limits = subject.settings[:size_limits]
@@ -140,6 +146,8 @@ RSpec.describe InstanceConfiguration do
expect(size_limits[:max_import_size]).to be_nil
expect(size_limits[:max_export_size]).to be_nil
expect(size_limits[:max_pages_size]).to be_nil
+ expect(size_limits[:max_import_remote_file_size]).to eq(0)
+ expect(size_limits[:bulk_import_max_download_file_size]).to eq(0)
end
end
diff --git a/spec/models/integration_spec.rb b/spec/models/integration_spec.rb
index 7fcd74cd37f..0b41b46ae3d 100644
--- a/spec/models/integration_spec.rb
+++ b/spec/models/integration_spec.rb
@@ -836,8 +836,8 @@ RSpec.describe Integration, feature_category: :integrations do
end
shared_examples '#api_field_names' do
- it 'filters out secret fields' do
- safe_fields = %w[some_safe_field safe_field url trojan_gift api_only_field]
+ it 'filters out secret fields and conditional fields' do
+ safe_fields = %w[some_safe_field safe_field url trojan_gift api_only_field enabled_field]
expect(fake_integration.new).to have_attributes(
api_field_names: match_array(safe_fields)
@@ -849,6 +849,11 @@ RSpec.describe Integration, feature_category: :integrations do
it 'filters out API only fields' do
expect(fake_integration.new.form_fields.pluck(:name)).not_to include('api_only_field')
end
+
+ it 'filters conditionals fields' do
+ expect(fake_integration.new.form_fields.pluck(:name)).to include('enabled_field')
+ expect(fake_integration.new.form_fields.pluck(:name)).not_to include('disabled_field', 'disabled_field_2')
+ end
end
context 'when the class overrides #fields' do
@@ -856,21 +861,24 @@ RSpec.describe Integration, feature_category: :integrations do
Class.new(Integration) do
def fields
[
- { name: 'token', type: 'password' },
- { name: 'api_token', type: 'password' },
- { name: 'token_api', type: 'password' },
- { name: 'safe_token', type: 'password' },
- { name: 'key', type: 'password' },
- { name: 'api_key', type: 'password' },
- { name: 'password', type: 'password' },
- { name: 'password_field', type: 'password' },
+ { name: 'token', type: :password },
+ { name: 'api_token', type: :password },
+ { name: 'token_api', type: :password },
+ { name: 'safe_token', type: :password },
+ { name: 'key', type: :password },
+ { name: 'api_key', type: :password },
+ { name: 'password', type: :password },
+ { name: 'password_field', type: :password },
{ name: 'webhook' },
{ name: 'some_safe_field' },
{ name: 'safe_field' },
{ name: 'url' },
- { name: 'trojan_horse', type: 'password' },
- { name: 'trojan_gift', type: 'text' },
- { name: 'api_only_field', api_only: true }
+ { name: 'trojan_horse', type: :password },
+ { name: 'trojan_gift', type: :text },
+ { name: 'api_only_field', api_only: true },
+ { name: 'enabled_field', if: true },
+ { name: 'disabled_field', if: false },
+ { name: 'disabled_field_2', if: nil }
].shuffle
end
end
@@ -884,21 +892,24 @@ RSpec.describe Integration, feature_category: :integrations do
context 'when the class uses the field DSL' do
let(:fake_integration) do
Class.new(described_class) do
- field :token, type: 'password'
- field :api_token, type: 'password'
- field :token_api, type: 'password'
- field :safe_token, type: 'password'
- field :key, type: 'password'
- field :api_key, type: 'password'
- field :password, type: 'password'
- field :password_field, type: 'password'
+ field :token, type: :password
+ field :api_token, type: :password
+ field :token_api, type: :password
+ field :safe_token, type: :password
+ field :key, type: :password
+ field :api_key, type: :password
+ field :password, type: :password
+ field :password_field, type: :password
field :webhook
field :some_safe_field
field :safe_field
field :url
- field :trojan_horse, type: 'password'
- field :trojan_gift, type: 'text'
+ field :trojan_horse, type: :password
+ field :trojan_gift, type: :text
field :api_only_field, api_only: true
+ field :enabled_field, if: -> { true }
+ field :disabled_field, if: -> { false }
+ field :disabled_field_2, if: -> { nil }
end
end
@@ -1030,9 +1041,9 @@ RSpec.describe Integration, feature_category: :integrations do
it 'returns all fields with type `password`' do
allow(subject).to receive(:fields).and_return(
[
- { name: 'password', type: 'password' },
- { name: 'secret', type: 'password' },
- { name: 'public', type: 'text' }
+ { name: 'password', type: :password },
+ { name: 'secret', type: :password },
+ { name: 'public', type: :text }
])
expect(subject.secret_fields).to match_array(%w[password secret])
@@ -1117,14 +1128,14 @@ RSpec.describe Integration, feature_category: :integrations do
field :foo_p, storage: :properties
field :foo_dt, storage: :data_fields
- field :bar, type: 'password'
+ field :bar, type: :password
field :password, is_secret: true
field :webhook
field :with_help, help: -> { 'help' }
- field :select, type: 'select'
- field :boolean, type: 'checkbox'
+ field :select, type: :select
+ field :boolean, type: :checkbox
end
end
@@ -1182,15 +1193,15 @@ RSpec.describe Integration, feature_category: :integrations do
specify 'fields have expected attributes' do
expect(integration.fields).to include(
- have_attributes(name: 'foo', type: 'text'),
- have_attributes(name: 'foo_p', type: 'text'),
- have_attributes(name: 'foo_dt', type: 'text'),
- have_attributes(name: 'bar', type: 'password'),
- have_attributes(name: 'password', type: 'password'),
- have_attributes(name: 'webhook', type: 'text'),
+ have_attributes(name: 'foo', type: :text),
+ have_attributes(name: 'foo_p', type: :text),
+ have_attributes(name: 'foo_dt', type: :text),
+ have_attributes(name: 'bar', type: :password),
+ have_attributes(name: 'password', type: :password),
+ have_attributes(name: 'webhook', type: :text),
have_attributes(name: 'with_help', help: 'help'),
- have_attributes(name: 'select', type: 'select'),
- have_attributes(name: 'boolean', type: 'checkbox')
+ have_attributes(name: 'select', type: :select),
+ have_attributes(name: 'boolean', type: :checkbox)
)
end
end
@@ -1242,7 +1253,7 @@ RSpec.describe Integration, feature_category: :integrations do
context 'when using data fields' do
let(:klass) do
Class.new(Integration) do
- field :project_url, storage: :data_fields, type: 'checkbox'
+ field :project_url, storage: :data_fields, type: :checkbox
def data_fields
issue_tracker_data || self.build_issue_tracker_data
diff --git a/spec/models/integrations/chat_message/issue_message_spec.rb b/spec/models/integrations/chat_message/issue_message_spec.rb
index cd40e4c361e..14451427a5a 100644
--- a/spec/models/integrations/chat_message/issue_message_spec.rb
+++ b/spec/models/integrations/chat_message/issue_message_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Integrations::ChatMessage::IssueMessage do
+RSpec.describe Integrations::ChatMessage::IssueMessage, feature_category: :integrations do
subject { described_class.new(args) }
let(:args) do
@@ -24,7 +24,7 @@ RSpec.describe Integrations::ChatMessage::IssueMessage do
url: 'http://url.com',
action: 'open',
state: 'opened',
- description: 'issue description'
+ description: 'issue description <http://custom-url.com|CLICK HERE>'
}
}
end
@@ -45,7 +45,7 @@ RSpec.describe Integrations::ChatMessage::IssueMessage do
end
context 'open' do
- it 'returns a message regarding opening of issues' do
+ it 'returns a slack-link sanitized message regarding opening of issues' do
expect(subject.pretext).to eq(
'[<http://somewhere.com|project_name>] Issue <http://url.com|#100 Issue title> opened by Test User (test.user)')
expect(subject.attachments).to eq(
@@ -53,7 +53,7 @@ RSpec.describe Integrations::ChatMessage::IssueMessage do
{
title: "#100 Issue title",
title_link: "http://url.com",
- text: "issue description",
+ text: "issue description &lt;http://custom-url.com|CLICK HERE&gt;",
color: color
}
])
@@ -96,7 +96,7 @@ RSpec.describe Integrations::ChatMessage::IssueMessage do
it 'returns a message regarding opening of issues' do
expect(subject.pretext).to eq(
'[[project_name](http://somewhere.com)] Issue [#100 Issue title](http://url.com) opened by Test User (test.user)')
- expect(subject.attachments).to eq('issue description')
+ expect(subject.attachments).to eq('issue description &lt;http://custom-url.com|CLICK HERE&gt;')
expect(subject.activity).to eq({
title: 'Issue opened by Test User (test.user)',
subtitle: 'in [project_name](http://somewhere.com)',
diff --git a/spec/models/integrations/discord_spec.rb b/spec/models/integrations/discord_spec.rb
index 42ea4a287fe..7ab7308ac1c 100644
--- a/spec/models/integrations/discord_spec.rb
+++ b/spec/models/integrations/discord_spec.rb
@@ -2,7 +2,7 @@
require "spec_helper"
-RSpec.describe Integrations::Discord do
+RSpec.describe Integrations::Discord, feature_category: :integrations do
it_behaves_like "chat integration", "Discord notifications" do
let(:client) { Discordrb::Webhooks::Client }
let(:client_arguments) { { url: webhook_url } }
@@ -20,6 +20,26 @@ RSpec.describe Integrations::Discord do
end
end
+ describe 'validations' do
+ let_it_be(:project) { create(:project) }
+
+ subject { integration }
+
+ describe 'only allows one channel on events' do
+ context 'when given more than one channel' do
+ let(:integration) { build(:discord_integration, project: project, note_channel: 'webhook1,webhook2') }
+
+ it { is_expected.not_to be_valid }
+ end
+
+ context 'when given one channel' do
+ let(:integration) { build(:discord_integration, project: project, note_channel: 'webhook1') }
+
+ it { is_expected.to be_valid }
+ end
+ end
+ end
+
describe '#execute' do
include StubRequests
diff --git a/spec/models/integrations/every_integration_spec.rb b/spec/models/integrations/every_integration_spec.rb
index c39a3486eb4..9639961c741 100644
--- a/spec/models/integrations/every_integration_spec.rb
+++ b/spec/models/integrations/every_integration_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Every integration' do
+RSpec.describe 'Every integration', feature_category: :integrations do
all_integration_names = Integration.available_integration_names
all_integration_names.each do |integration_name|
@@ -15,14 +15,14 @@ RSpec.describe 'Every integration' do
integration.fields.each do |field|
next unless field[:is_secret]
- expect(field[:type]).to eq('password'),
+ expect(field[:type]).to eq(:password),
"Field '#{field[:name]}' should use type 'password'"
end
end
it 'defines non-empty titles and help texts for all secret fields' do
integration.fields.each do |field|
- next unless field[:type] == 'password'
+ next unless field[:type] == :password
expect(field[:non_empty_password_title]).to be_present,
"Field '#{field[:name]}' should define :non_empty_password_title"
diff --git a/spec/models/integrations/field_spec.rb b/spec/models/integrations/field_spec.rb
index ca71dd0e6d3..49eaecd1b2e 100644
--- a/spec/models/integrations/field_spec.rb
+++ b/spec/models/integrations/field_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe ::Integrations::Field do
+RSpec.describe ::Integrations::Field, feature_category: :integrations do
subject(:field) { described_class.new(**attrs) }
let(:attrs) { { name: nil, integration_class: test_integration } }
@@ -17,31 +17,31 @@ RSpec.describe ::Integrations::Field do
describe '#initialize' do
it 'sets type password for secret fields' do
attrs[:is_secret] = true
- attrs[:type] = 'text'
+ attrs[:type] = :text
- expect(field[:type]).to eq('password')
+ expect(field[:type]).to eq(:password)
end
it 'uses the given type for other names' do
attrs[:name] = 'field'
- attrs[:type] = 'select'
+ attrs[:type] = :select
- expect(field[:type]).to eq('select')
+ expect(field[:type]).to eq(:select)
end
it 'raises an error if an invalid attribute is given' do
attrs[:foo] = 'foo'
attrs[:bar] = 'bar'
attrs[:name] = 'name'
- attrs[:type] = 'text'
+ attrs[:type] = :text
expect { field }.to raise_error(ArgumentError, "Invalid attributes [:foo, :bar]")
end
it 'raises an error if an invalid type is given' do
- attrs[:type] = 'other'
+ attrs[:type] = :other
- expect { field }.to raise_error(ArgumentError, 'Invalid type "other"')
+ expect { field }.to raise_error(ArgumentError, 'Invalid type :other')
end
end
@@ -82,9 +82,11 @@ RSpec.describe ::Integrations::Field do
when :api_only
be false
when :type
- eq 'text'
+ eq :text
when :is_secret
eq false
+ when :if
+ be true
else
be_nil
end
@@ -169,7 +171,7 @@ RSpec.describe ::Integrations::Field do
context 'when a secret field' do
before do
- attrs[:type] = 'password'
+ attrs[:type] = :password
end
it { is_expected.to be_secret }
@@ -183,4 +185,9 @@ RSpec.describe ::Integrations::Field do
it { is_expected.not_to be_secret }
end
end
+
+ describe '#key?' do
+ it { is_expected.to be_key(:type) }
+ it { is_expected.not_to be_key(:foo) }
+ end
end
diff --git a/spec/models/integrations/google_play_spec.rb b/spec/models/integrations/google_play_spec.rb
index 8349ac71bc9..a0bc73378d3 100644
--- a/spec/models/integrations/google_play_spec.rb
+++ b/spec/models/integrations/google_play_spec.rb
@@ -20,6 +20,8 @@ RSpec.describe Integrations::GooglePlay, feature_category: :mobile_devops do
it { is_expected.to allow_value('a.a.a').for(:package_name) }
it { is_expected.to allow_value('com.example').for(:package_name) }
it { is_expected.not_to allow_value('com').for(:package_name) }
+ it { is_expected.to allow_value(true, false).for(:google_play_protected_refs) }
+ it { is_expected.not_to allow_value(nil).for(:google_play_protected_refs) }
it { is_expected.not_to allow_value('com.example.my app').for(:package_name) }
it { is_expected.not_to allow_value('1com.example.myapp').for(:package_name) }
it { is_expected.not_to allow_value('com.1example.myapp').for(:package_name) }
@@ -33,7 +35,7 @@ RSpec.describe Integrations::GooglePlay, feature_category: :mobile_devops do
describe '#fields' do
it 'returns custom fields' do
expect(google_play_integration.fields.pluck(:name)).to match_array(%w[package_name service_account_key
- service_account_key_file_name])
+ service_account_key_file_name google_play_protected_refs])
end
end
@@ -67,9 +69,8 @@ RSpec.describe Integrations::GooglePlay, feature_category: :mobile_devops do
describe '#ci_variables' do
let(:google_play_integration) { build_stubbed(:google_play_integration) }
-
- it 'returns vars when the integration is activated' do
- ci_vars = [
+ let(:ci_vars) do
+ [
{
key: 'SUPPLY_PACKAGE_NAME',
value: google_play_integration.package_name,
@@ -83,8 +84,36 @@ RSpec.describe Integrations::GooglePlay, feature_category: :mobile_devops do
public: false
}
]
+ end
+
+ it 'returns the vars for protected branch' do
+ expect(google_play_integration.ci_variables(protected_ref: true)).to match_array(ci_vars)
+ end
+
+ it "doesn't return vars for unproteced branch" do
+ expect(google_play_integration.ci_variables(protected_ref: false)).to be_empty
+ end
+ end
+
+ describe '#initialize_properties' do
+ context 'when google_play_protected_refs is nil' do
+ let(:google_play_integration) { described_class.new(google_play_protected_refs: nil) }
- expect(google_play_integration.ci_variables).to match_array(ci_vars)
+ it 'sets google_play_protected_refs to true' do
+ expect(google_play_integration.google_play_protected_refs).to be(true)
+ end
+ end
+
+ context 'when google_play_protected_refs is false' do
+ let(:google_play_integration) { build(:google_play_integration, google_play_protected_refs: false) }
+
+ it 'sets google_play_protected_refs to false' do
+ expect(google_play_integration.google_play_protected_refs).to be(false)
+ end
+
+ it "returns vars for unprotected ref when google_play_protected_refs is false" do
+ expect(google_play_integration.ci_variables(protected_ref: false)).not_to be_empty
+ end
end
end
end
@@ -94,7 +123,7 @@ RSpec.describe Integrations::GooglePlay, feature_category: :mobile_devops do
describe '#ci_variables' do
it 'returns an empty array' do
- expect(google_play_integration.ci_variables).to match_array([])
+ expect(google_play_integration.ci_variables(protected_ref: true)).to be_empty
end
end
end
diff --git a/spec/models/integrations/pumble_spec.rb b/spec/models/integrations/pumble_spec.rb
index 8b9b5d214c6..8d966ff5da5 100644
--- a/spec/models/integrations/pumble_spec.rb
+++ b/spec/models/integrations/pumble_spec.rb
@@ -2,7 +2,7 @@
require "spec_helper"
-RSpec.describe Integrations::Pumble do
+RSpec.describe Integrations::Pumble, feature_category: :integrations do
it_behaves_like "chat integration", "Pumble" do
let(:client_arguments) { webhook_url }
let(:payload) do
diff --git a/spec/models/issue_link_spec.rb b/spec/models/issue_link_spec.rb
index d69a3f2954c..9af667c2960 100644
--- a/spec/models/issue_link_spec.rb
+++ b/spec/models/issue_link_spec.rb
@@ -2,7 +2,9 @@
require 'spec_helper'
-RSpec.describe IssueLink do
+RSpec.describe IssueLink, feature_category: :portfolio_management do
+ let_it_be(:project) { create(:project) }
+
it_behaves_like 'issuable link' do
let_it_be_with_reload(:issuable_link) { create(:issue_link) }
let_it_be(:issuable) { create(:issue) }
@@ -14,46 +16,15 @@ RSpec.describe IssueLink do
it { expect(described_class.issuable_type).to eq(:issue) }
end
- describe 'Scopes' do
- let_it_be(:issue1) { create(:issue) }
- let_it_be(:issue2) { create(:issue) }
-
- describe '.for_source_issue' do
- it 'includes linked issues for source issue' do
- source_issue = create(:issue)
- issue_link_1 = create(:issue_link, source: source_issue, target: issue1)
- issue_link_2 = create(:issue_link, source: source_issue, target: issue2)
-
- result = described_class.for_source_issue(source_issue)
-
- expect(result).to contain_exactly(issue_link_1, issue_link_2)
- end
- end
-
- describe '.for_target_issue' do
- it 'includes linked issues for target issue' do
- target_issue = create(:issue)
- issue_link_1 = create(:issue_link, source: issue1, target: target_issue)
- issue_link_2 = create(:issue_link, source: issue2, target: target_issue)
-
- result = described_class.for_target_issue(target_issue)
-
- expect(result).to contain_exactly(issue_link_1, issue_link_2)
- end
- end
-
- describe '.for_issues' do
- let_it_be(:issue) { create(:issue) }
- let_it_be(:source_link) { create(:issue_link, source: issue, target: issue1) }
- let_it_be(:target_link) { create(:issue_link, source: issue2, target: issue) }
-
- it 'includes links when issue is source' do
- expect(described_class.for_issues(issue, issue1)).to contain_exactly(source_link)
- end
+ describe '.issuable_name' do
+ it { expect(described_class.issuable_name).to eq('issue') }
+ end
- it 'includes links when issue is target' do
- expect(described_class.for_issues(issue, issue2)).to contain_exactly(target_link)
- end
- end
+ it_behaves_like 'includes LinkableItem concern' do
+ let_it_be(:item) { create(:issue, project: project) }
+ let_it_be(:item1) { create(:issue, project: project) }
+ let_it_be(:item2) { create(:issue, project: project) }
+ let_it_be(:link_factory) { :issue_link }
+ let_it_be(:item_type) { 'issue' }
end
end
diff --git a/spec/models/issue_spec.rb b/spec/models/issue_spec.rb
index 8d25ac93263..9db710cb3cc 100644
--- a/spec/models/issue_spec.rb
+++ b/spec/models/issue_spec.rb
@@ -188,7 +188,7 @@ RSpec.describe Issue, feature_category: :team_planning do
expect(issue).not_to be_valid
expect(issue.errors[:base])
- .to include(_('A confidential issue cannot have a parent that already has non-confidential children.'))
+ .to include(_('A confidential issue must have only confidential children. Make any child items confidential and try again.'))
end
it 'allows to make child confidential' do
@@ -1058,7 +1058,7 @@ RSpec.describe Issue, feature_category: :team_planning do
end
describe '#to_branch_name' do
- let_it_be(:issue) { create(:issue, project: reusable_project, iid: 123, title: 'Testing Issue') }
+ let_it_be(:issue, reload: true) { create(:issue, project: reusable_project, iid: 123, title: 'Testing Issue') }
it 'returns a branch name with the issue title if not confidential' do
expect(issue.to_branch_name).to eq('123-testing-issue')
@@ -2031,4 +2031,12 @@ RSpec.describe Issue, feature_category: :team_planning do
expect { issue1.unsubscribe_email_participant(email) }.not_to change { issue2.issue_email_participants.count }
end
end
+
+ describe '#update_search_data!' do
+ it 'copies namespace_id to search data' do
+ issue = create(:issue)
+
+ expect(issue.search_data.namespace_id).to eq(issue.namespace_id)
+ end
+ end
end
diff --git a/spec/models/label_spec.rb b/spec/models/label_spec.rb
index 65e02da2b5d..4862b0b0453 100644
--- a/spec/models/label_spec.rb
+++ b/spec/models/label_spec.rb
@@ -2,7 +2,9 @@
require 'spec_helper'
-RSpec.describe Label do
+RSpec.describe Label, feature_category: :team_planning do
+ using RSpec::Parameterized::TableSyntax
+
let_it_be(:project) { create(:project) }
describe 'modules' do
@@ -162,11 +164,54 @@ RSpec.describe Label do
end
end
+ describe 'ensure_lock_on_merge_allowed' do
+ let(:validation_error) { 'can not be set for template labels' }
+
+ # rubocop:disable Rails/SaveBang
+ context 'when creating a label' do
+ let(:label) { described_class.create(title: 'test', template: template, lock_on_merge: lock_on_merge) }
+
+ where(:template, :lock_on_merge, :valid, :errors) do
+ false | false | true | []
+ false | true | true | []
+ true | false | true | []
+ true | true | false | [validation_error]
+ false | true | true | []
+ end
+
+ with_them do
+ it 'validates lock_on_merge on label creation' do
+ expect(label.valid?).to be(valid)
+ expect(label.errors[:lock_on_merge]).to eq(errors)
+ end
+ end
+ end
+ # rubocop:enable Rails/SaveBang
+
+ context 'when updating a label' do
+ let_it_be(:template_label) { create(:label, template: true) }
+
+ where(:lock_on_merge, :valid, :errors) do
+ true | false | [validation_error]
+ false | true | []
+ end
+
+ with_them do
+ it 'validates lock_on_merge value if label is a template' do
+ template_label.update_column(:lock_on_merge, lock_on_merge)
+
+ expect(template_label.valid?).to be(valid)
+ expect(template_label.errors[:lock_on_merge]).to eq(errors)
+ end
+ end
+ end
+ end
+
describe 'scopes' do
describe '.on_board' do
let(:board) { create(:board, project: project) }
- let!(:list1) { create(:list, board: board, label: development) }
- let!(:list2) { create(:list, board: board, label: testing) }
+ let!(:list1) { create(:list, board: board, label: development) }
+ let!(:list2) { create(:list, board: board, label: testing) }
let!(:development) { create(:label, project: project, name: 'Development') }
let!(:testing) { create(:label, project: project, name: 'Testing') }
@@ -176,6 +221,35 @@ RSpec.describe Label do
expect(described_class.on_board(board.id)).to match_array([development, testing])
end
end
+
+ describe '.with_lock_on_merge' do
+ let(:label) { create(:label, project: project, name: 'Label') }
+ let(:label_locked) { create(:label, project: project, name: 'Label locked', lock_on_merge: true) }
+
+ it 'return only locked labels' do
+ expect(described_class.with_lock_on_merge).to match_array([label_locked])
+ end
+ end
+ end
+
+ describe 'destroying labels' do
+ context 'when lock_on_merge is true' do
+ it 'prevents label from being destroyed' do
+ label = create(:label, lock_on_merge: true)
+
+ expect(label.destroy).to be false
+ expect(label.errors.full_messages).to include("#{label.name} is locked and was not removed")
+ end
+ end
+
+ context 'when lock_on_merge is false' do
+ it 'allows label to be destroyed' do
+ label = create(:label, lock_on_merge: false)
+
+ expect(label.destroy).to eq label
+ expect(label.destroyed?).to be_truthy
+ end
+ end
end
describe '#color' do
diff --git a/spec/models/loose_foreign_keys/deleted_record_spec.rb b/spec/models/loose_foreign_keys/deleted_record_spec.rb
index 0c16a725663..ed80f5c1516 100644
--- a/spec/models/loose_foreign_keys/deleted_record_spec.rb
+++ b/spec/models/loose_foreign_keys/deleted_record_spec.rb
@@ -2,7 +2,9 @@
require 'spec_helper'
-RSpec.describe LooseForeignKeys::DeletedRecord, type: :model do
+RSpec.describe LooseForeignKeys::DeletedRecord, type: :model, feature_category: :database do
+ using RSpec::Parameterized::TableSyntax
+
let_it_be(:table) { 'public.projects' }
describe 'class methods' do
@@ -14,14 +16,30 @@ RSpec.describe LooseForeignKeys::DeletedRecord, type: :model do
let(:records) { described_class.load_batch_for_table(table, 10) }
describe '.load_batch_for_table' do
- it 'loads records and orders them by creation date' do
- expect(records).to eq([deleted_record_1, deleted_record_2, deleted_record_4])
+ where(:union_feature_flag_value) do
+ [true, false]
end
- it 'supports configurable batch size' do
- records = described_class.load_batch_for_table(table, 2)
+ with_them do
+ before do
+ stub_feature_flags('loose_foreign_keys_batch_load_using_union' => union_feature_flag_value)
+ end
+
+ it 'loads records and orders them by creation date' do
+ expect(records).to eq([deleted_record_1, deleted_record_2, deleted_record_4])
+ end
+
+ it 'supports configurable batch size' do
+ records = described_class.load_batch_for_table(table, 2)
+
+ expect(records).to eq([deleted_record_1, deleted_record_2])
+ end
- expect(records).to eq([deleted_record_1, deleted_record_2])
+ it 'returns the partition number in each returned record' do
+ records = described_class.load_batch_for_table(table, 4)
+
+ expect(records).to all(have_attributes(partition: (a_value > 0)))
+ end
end
end
diff --git a/spec/models/member_spec.rb b/spec/models/member_spec.rb
index d21edea9751..f8aaae3edad 100644
--- a/spec/models/member_spec.rb
+++ b/spec/models/member_spec.rb
@@ -351,6 +351,19 @@ RSpec.describe Member, feature_category: :groups_and_projects do
it { is_expected.to include(expiring_tomorrow, not_expiring) }
end
+ describe '.expiring_and_not_notified' do
+ let_it_be(:expiring_in_5_days) { create(:group_member, expires_at: 5.days.from_now) }
+ let_it_be(:expiring_in_5_days_with_notified) { create(:group_member, expires_at: 5.days.from_now, expiry_notified_at: Date.today) }
+ let_it_be(:expiring_in_7_days) { create(:group_member, expires_at: 7.days.from_now) }
+ let_it_be(:expiring_in_10_days) { create(:group_member, expires_at: 10.days.from_now) }
+ let_it_be(:not_expiring) { create(:group_member) }
+
+ subject { described_class.expiring_and_not_notified(7.days.from_now.to_date) }
+
+ it { is_expected.not_to include(expiring_in_5_days_with_notified, expiring_in_10_days, not_expiring) }
+ it { is_expected.to include(expiring_in_5_days, expiring_in_7_days) }
+ end
+
describe '.created_today' do
let_it_be(:now) { Time.current }
let_it_be(:created_today) { create(:group_member, created_at: now.beginning_of_day) }
diff --git a/spec/models/members/project_member_spec.rb b/spec/models/members/project_member_spec.rb
index 3eb34bf9493..a2b5bde8890 100644
--- a/spec/models/members/project_member_spec.rb
+++ b/spec/models/members/project_member_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe ProjectMember do
+RSpec.describe ProjectMember, feature_category: :groups_and_projects do
describe 'associations' do
it { is_expected.to belong_to(:project).with_foreign_key(:source_id) }
end
diff --git a/spec/models/merge_request_spec.rb b/spec/models/merge_request_spec.rb
index bf71d289105..da3f691b63a 100644
--- a/spec/models/merge_request_spec.rb
+++ b/spec/models/merge_request_spec.rb
@@ -1869,16 +1869,25 @@ RSpec.describe MergeRequest, factory_default: :keep, feature_category: :code_rev
end
describe '#committers' do
- it 'returns all the committers of every commit in the merge request' do
- users = subject.commits.without_merge_commits.map(&:committer_email).uniq.map do |email|
- create(:user, email: email)
- end
+ let(:commits) { double }
+ let(:committers) { double }
+
+ context 'when not given with_merge_commits' do
+ it 'calls committers on the commits object with the expected param' do
+ expect(subject).to receive(:commits).and_return(commits)
+ expect(commits).to receive(:committers).with(with_merge_commits: false).and_return(committers)
- expect(subject.committers).to match_array(users)
+ expect(subject.committers).to eq(committers)
+ end
end
- it 'returns an empty array if no committer is associated with a user' do
- expect(subject.committers).to be_empty
+ context 'when given with_merge_commits true' do
+ it 'calls committers on the commits object with the expected param' do
+ expect(subject).to receive(:commits).and_return(commits)
+ expect(commits).to receive(:committers).with(with_merge_commits: true).and_return(committers)
+
+ expect(subject.committers(with_merge_commits: true)).to eq(committers)
+ end
end
end
@@ -3257,6 +3266,15 @@ RSpec.describe MergeRequest, factory_default: :keep, feature_category: :code_rev
end
end
end
+
+ context 'with check_mergeability_retry_lease option' do
+ it 'call check_mergeability with sync_retry_lease' do
+ allow(subject).to receive(:mergeable_state?) { true }
+ expect(subject).to receive(:check_mergeability).with(sync_retry_lease: true)
+
+ subject.mergeable?(check_mergeability_retry_lease: true)
+ end
+ end
end
describe '#skipped_mergeable_checks' do
@@ -3291,6 +3309,14 @@ RSpec.describe MergeRequest, factory_default: :keep, feature_category: :code_rev
subject.check_mergeability
end
+ context 'when sync_retry_lease is true' do
+ it 'executes MergeabilityCheckService' do
+ expect(mergeability_service).to receive(:execute).with(retry_lease: true)
+
+ subject.check_mergeability(sync_retry_lease: true)
+ end
+ end
+
context 'when async is true' do
it 'executes MergeabilityCheckService asynchronously' do
expect(mergeability_service).to receive(:async_execute)
@@ -5124,24 +5150,39 @@ RSpec.describe MergeRequest, factory_default: :keep, feature_category: :code_rev
let(:merge_request) { build(:merge_request, source_project: create(:project, :repository)) }
- it 'does schedule MergeRequests::CleanupRefWorker' do
- expect(MergeRequests::CleanupRefWorker).to receive(:perform_async).with(merge_request.id, 'train')
+ it 'deletes refs asynchronously' do
+ expect(merge_request.target_project.repository)
+ .to receive(:async_delete_refs)
+ .with(merge_request.train_ref_path)
subject
end
- context 'when merge_request_cleanup_ref_worker_async is disabled' do
+ context 'when merge_request_delete_gitaly_refs_in_batches is disabled' do
before do
- stub_feature_flags(merge_request_cleanup_ref_worker_async: false)
+ stub_feature_flags(merge_request_delete_gitaly_refs_in_batches: false)
end
- it 'deletes all refs from the target project' do
- expect(merge_request.target_project.repository)
- .to receive(:delete_refs)
- .with(merge_request.train_ref_path)
+ it 'does schedule MergeRequests::CleanupRefWorker' do
+ expect(MergeRequests::CleanupRefWorker).to receive(:perform_async).with(merge_request.id, 'train')
subject
end
+
+ context 'when merge_request_cleanup_ref_worker_async is disabled' do
+ before do
+ stub_feature_flags(merge_request_delete_gitaly_refs_in_batches: false)
+ stub_feature_flags(merge_request_cleanup_ref_worker_async: false)
+ end
+
+ it 'deletes all refs from the target project' do
+ expect(merge_request.target_project.repository)
+ .to receive(:delete_refs)
+ .with(merge_request.train_ref_path)
+
+ subject
+ end
+ end
end
end
diff --git a/spec/models/metrics/dashboard/annotation_spec.rb b/spec/models/metrics/dashboard/annotation_spec.rb
index 9b8601e4052..7c4f392fcdc 100644
--- a/spec/models/metrics/dashboard/annotation_spec.rb
+++ b/spec/models/metrics/dashboard/annotation_spec.rb
@@ -5,11 +5,6 @@ require 'spec_helper'
RSpec.describe Metrics::Dashboard::Annotation do
using RSpec::Parameterized::TableSyntax
- describe 'associations' do
- it { is_expected.to belong_to(:environment).inverse_of(:metrics_dashboard_annotations) }
- it { is_expected.to belong_to(:cluster).class_name('Clusters::Cluster').inverse_of(:metrics_dashboard_annotations) }
- end
-
describe 'validation' do
it { is_expected.to validate_presence_of(:description) }
it { is_expected.to validate_presence_of(:dashboard_path) }
@@ -18,18 +13,6 @@ RSpec.describe Metrics::Dashboard::Annotation do
it { is_expected.to validate_length_of(:panel_xid).is_at_most(255) }
it { is_expected.to validate_length_of(:description).is_at_most(255) }
- context 'orphaned annotation' do
- subject { build(:metrics_dashboard_annotation, environment: nil) }
-
- it { is_expected.not_to be_valid }
-
- it 'reports error about both missing relations' do
- subject.valid?
-
- expect(subject.errors.full_messages).to include(/Annotation must belong to a cluster or an environment/)
- end
- end
-
context 'ending_at_after_starting_at' do
where(:starting_at, :ending_at, :valid?, :message) do
2.days.ago.beginning_of_day | 1.day.ago.beginning_of_day | true | nil
@@ -49,28 +32,6 @@ RSpec.describe Metrics::Dashboard::Annotation do
end
end
end
-
- context 'environments annotation' do
- subject { build(:metrics_dashboard_annotation) }
-
- it { is_expected.to be_valid }
- end
-
- context 'clusters annotation' do
- subject { build(:metrics_dashboard_annotation, :with_cluster) }
-
- it { is_expected.to be_valid }
- end
-
- context 'annotation with shared ownership' do
- subject { build(:metrics_dashboard_annotation, :with_cluster, environment: build(:environment)) }
-
- it 'reports error about both shared ownership' do
- subject.valid?
-
- expect(subject.errors.full_messages).to include(/Annotation can't belong to both a cluster and an environment at the same time/)
- end
- end
end
describe 'scopes' do
diff --git a/spec/models/milestone_spec.rb b/spec/models/milestone_spec.rb
index 1f0f89fea60..15bcbb3962c 100644
--- a/spec/models/milestone_spec.rb
+++ b/spec/models/milestone_spec.rb
@@ -322,7 +322,7 @@ RSpec.describe Milestone, feature_category: :team_planning do
let_it_be(:group) { create(:group) }
let_it_be(:group_other) { create(:group) }
- before(:all) do
+ before_all do
create(:milestone, project: project)
create(:milestone, project: project_other)
create(:milestone, group: group)
diff --git a/spec/models/ml/experiment_spec.rb b/spec/models/ml/experiment_spec.rb
index 1ee35d6da03..36bdb611833 100644
--- a/spec/models/ml/experiment_spec.rb
+++ b/spec/models/ml/experiment_spec.rb
@@ -79,6 +79,45 @@ RSpec.describe Ml::Experiment, feature_category: :mlops do
end
end
+ describe '.find_or_create' do
+ let(:name) { exp.name }
+ let(:project) { exp.project }
+
+ subject(:find_or_create) { described_class.find_or_create(project, name, exp.user) }
+
+ context 'when experiments exists' do
+ it 'fetches existing experiment', :aggregate_failures do
+ expect { find_or_create }.not_to change { Ml::Experiment.count }
+
+ expect(find_or_create).to eq(exp)
+ end
+ end
+
+ context 'when experiments does not exist' do
+ let(:name) { 'a new experiment' }
+
+ it 'creates the experiment', :aggregate_failures do
+ expect { find_or_create }.to change { Ml::Experiment.count }.by(1)
+
+ expect(find_or_create.name).to eq(name)
+ expect(find_or_create.user).to eq(exp.user)
+ expect(find_or_create.project).to eq(project)
+ end
+ end
+
+ context 'when experiment name exists but project is different' do
+ let(:project) { create(:project) }
+
+ it 'creates a model', :aggregate_failures do
+ expect { find_or_create }.to change { Ml::Experiment.count }.by(1)
+
+ expect(find_or_create.name).to eq(name)
+ expect(find_or_create.user).to eq(exp.user)
+ expect(find_or_create.project).to eq(project)
+ end
+ end
+ end
+
describe '#with_candidate_count' do
let_it_be(:exp3) do
create(:ml_experiments, project: exp.project).tap do |e|
diff --git a/spec/models/ml/model_spec.rb b/spec/models/ml/model_spec.rb
index 397ea23dd85..42d8ed5c0c5 100644
--- a/spec/models/ml/model_spec.rb
+++ b/spec/models/ml/model_spec.rb
@@ -3,24 +3,27 @@
require 'spec_helper'
RSpec.describe Ml::Model, feature_category: :mlops do
+ let_it_be(:project1) { create(:project) }
+ let_it_be(:project2) { create(:project) }
+ let_it_be(:existing_model) { create(:ml_models, name: 'an_existing_model', project: project1) }
+ let_it_be(:another_existing_model) { create(:ml_models, name: 'an_existing_model', project: project2) }
+ let_it_be(:valid_name) { 'a_valid_name' }
+ let_it_be(:default_experiment) { create(:ml_experiments, name: valid_name, project: project1) }
+
describe 'associations' do
it { is_expected.to belong_to(:project) }
it { is_expected.to have_one(:default_experiment) }
it { is_expected.to have_many(:versions) }
+ it { is_expected.to have_one(:latest_version).class_name('Ml::ModelVersion').inverse_of(:model) }
end
describe '#valid?' do
using RSpec::Parameterized::TableSyntax
- let_it_be(:project) { create(:project) }
- let_it_be(:existing_model) { create(:ml_models, name: 'an_existing_model', project: project) }
- let_it_be(:valid_name) { 'a_valid_name' }
- let_it_be(:default_experiment) { create(:ml_experiments, name: valid_name, project: project) }
-
let(:name) { valid_name }
subject(:errors) do
- m = described_class.new(name: name, project: project, default_experiment: default_experiment)
+ m = described_class.new(name: name, project: project1, default_experiment: default_experiment)
m.validate
m.errors
end
@@ -52,11 +55,67 @@ RSpec.describe Ml::Model, feature_category: :mlops do
context 'when model version project is different than model project' do
before do
- allow(default_experiment).to receive(:project_id).and_return(project.id + 1)
+ allow(default_experiment).to receive(:project_id).and_return(project1.id + 1)
end
it { expect(errors).to include(:default_experiment) }
end
end
+
+ describe '.by_project' do
+ subject { described_class.by_project(project1) }
+
+ it { is_expected.to match_array([existing_model]) }
+ end
+
+ describe '.including_latest_version' do
+ subject { described_class.including_latest_version }
+
+ it 'loads latest version' do
+ expect(subject.first.association_cached?(:latest_version)).to be(true)
+ end
+ end
+ end
+
+ describe '.find_or_create' do
+ subject(:find_or_create) { described_class.find_or_create(project, name, experiment) }
+
+ let(:name) { existing_model.name }
+ let(:project) { existing_model.project }
+ let(:experiment) { default_experiment }
+
+ context 'when model name does not exist in the project' do
+ let(:name) { 'new_model' }
+ let(:experiment) { build(:ml_experiments, name: name, project: project) }
+
+ it 'creates a model', :aggregate_failures do
+ expect { find_or_create }.to change { Ml::Model.count }.by(1)
+
+ expect(find_or_create.name).to eq(name)
+ expect(find_or_create.project).to eq(project)
+ expect(find_or_create.default_experiment).to eq(experiment)
+ end
+ end
+
+ context 'when model name exists but project is different' do
+ let(:project) { create(:project) }
+ let(:experiment) { build(:ml_experiments, name: name, project: project) }
+
+ it 'creates a model', :aggregate_failures do
+ expect { find_or_create }.to change { Ml::Model.count }.by(1)
+
+ expect(find_or_create.name).to eq(name)
+ expect(find_or_create.project).to eq(project)
+ expect(find_or_create.default_experiment).to eq(experiment)
+ end
+ end
+
+ context 'when model exists' do
+ it 'fetches existing model', :aggregate_failures do
+ expect { find_or_create }.not_to change { Ml::Model.count }
+
+ expect(find_or_create).to eq(existing_model)
+ end
+ end
end
end
diff --git a/spec/models/ml/model_version_spec.rb b/spec/models/ml/model_version_spec.rb
index ef53a1ac3a0..4bb272fef5d 100644
--- a/spec/models/ml/model_version_spec.rb
+++ b/spec/models/ml/model_version_spec.rb
@@ -6,6 +6,13 @@ RSpec.describe Ml::ModelVersion, feature_category: :mlops do
using RSpec::Parameterized::TableSyntax
let_it_be(:base_project) { create(:project) }
+ let_it_be(:model1) { create(:ml_models, project: base_project) }
+ let_it_be(:model2) { create(:ml_models, project: base_project) }
+
+ let_it_be(:model_version1) { create(:ml_model_versions, model: model1) }
+ let_it_be(:model_version2) { create(:ml_model_versions, model: model_version1.model) }
+ let_it_be(:model_version3) { create(:ml_model_versions, model: model2) }
+ let_it_be(:model_version4) { create(:ml_model_versions, model: model_version3.model) }
describe 'associations' do
it { is_expected.to belong_to(:project) }
@@ -14,17 +21,16 @@ RSpec.describe Ml::ModelVersion, feature_category: :mlops do
end
describe 'validation' do
- let_it_be(:valid_version) { 'valid_version' }
- let_it_be(:model) { create(:ml_models, project: base_project) }
+ let_it_be(:valid_version) { '1.0.0' }
let_it_be(:valid_package) do
- build_stubbed(:ml_model_package, project: base_project, version: valid_version, name: model.name)
+ build_stubbed(:ml_model_package, project: base_project, version: valid_version, name: model1.name)
end
let(:package) { valid_package }
let(:version) { valid_version }
subject(:errors) do
- mv = described_class.new(version: version, model: model, package: package, project: model.project)
+ mv = described_class.new(version: version, model: model1, package: package, project: model1.project)
mv.validate
mv.errors
end
@@ -45,7 +51,7 @@ RSpec.describe Ml::ModelVersion, feature_category: :mlops do
context 'when version is not unique in project+name' do
let_it_be(:existing_model_version) do
- create(:ml_model_versions, model: model)
+ create(:ml_model_versions, model: model1)
end
let(:version) { existing_model_version.version }
@@ -57,7 +63,7 @@ RSpec.describe Ml::ModelVersion, feature_category: :mlops do
describe 'model' do
context 'when project is different' do
before do
- allow(model).to receive(:project_id).and_return(non_existing_record_id)
+ allow(model1).to receive(:project_id).and_return(non_existing_record_id)
end
it { expect(errors[:model]).to include('model project must be the same') }
@@ -80,11 +86,57 @@ RSpec.describe Ml::ModelVersion, feature_category: :mlops do
context 'when package is not ml_model' do
let(:package) do
- build_stubbed(:generic_package, project: base_project, name: model.name, version: valid_version)
+ build_stubbed(:generic_package, project: base_project, name: model1.name, version: valid_version)
end
it { expect(errors[:package]).to include('package must be ml_model') }
end
end
end
+
+ describe '#find_or_create!' do
+ let_it_be(:existing_model_version) { create(:ml_model_versions, model: model1, version: '1.0.0') }
+
+ let(:version) { existing_model_version.version }
+ let(:package) { nil }
+
+ subject(:find_or_create) { described_class.find_or_create!(model1, version, package) }
+
+ context 'if model version exists' do
+ it 'returns the model version', :aggregate_failures do
+ expect { find_or_create }.not_to change { Ml::ModelVersion.count }
+ is_expected.to eq(existing_model_version)
+ end
+ end
+
+ context 'if model version does not exist' do
+ let(:version) { '2.0.0' }
+ let(:package) { create(:ml_model_package, project: model1.project, name: model1.name, version: version) }
+
+ it 'creates another model version', :aggregate_failures do
+ expect { find_or_create }.to change { Ml::ModelVersion.count }.by(1)
+ model_version = find_or_create
+
+ expect(model_version.version).to eq(version)
+ expect(model_version.model).to eq(model1)
+ expect(model_version.package).to eq(package)
+ end
+ end
+ end
+
+ describe '.order_by_model_id_id_desc' do
+ subject { described_class.order_by_model_id_id_desc }
+
+ it 'orders by (model_id, id desc)' do
+ is_expected.to match_array([model_version2, model_version1, model_version4, model_version3])
+ end
+ end
+
+ describe '.latest_by_model' do
+ subject { described_class.latest_by_model }
+
+ it 'returns only the latest model version per model id' do
+ is_expected.to match_array([model_version4, model_version2])
+ end
+ end
end
diff --git a/spec/models/namespace/aggregation_schedule_spec.rb b/spec/models/namespace/aggregation_schedule_spec.rb
index ea9dddf2513..c05344ff729 100644
--- a/spec/models/namespace/aggregation_schedule_spec.rb
+++ b/spec/models/namespace/aggregation_schedule_spec.rb
@@ -16,21 +16,11 @@ RSpec.describe Namespace::AggregationSchedule, :clean_gitlab_redis_shared_state,
aggregation_schedule.save!
end
- context 'when reduce_aggregation_schedule_lease FF is enabled' do
- it 'returns namespace_aggregation_schedule_lease_duration value from Gitlabsettings' do
- allow(::Gitlab::CurrentSettings).to receive(:namespace_aggregation_schedule_lease_duration_in_seconds)
- .and_return(240)
- stub_feature_flags(reduce_aggregation_schedule_lease: true)
+ it 'returns namespace_aggregation_schedule_lease_duration value from Gitlab CurrentSettings' do
+ allow(::Gitlab::CurrentSettings).to receive(:namespace_aggregation_schedule_lease_duration_in_seconds)
+ .and_return(240)
- expect(aggregation_schedule.default_lease_timeout).to eq 4.minutes.to_i
- end
- end
-
- context 'when reduce_aggregation_schedule_lease FF is disabled' do
- it 'is 30 minutes' do
- stub_feature_flags(reduce_aggregation_schedule_lease: false)
- expect(aggregation_schedule.default_lease_timeout).to eq 30.minutes.to_i
- end
+ expect(aggregation_schedule.default_lease_timeout).to eq 4.minutes.to_i
end
end
diff --git a/spec/models/namespace/package_setting_spec.rb b/spec/models/namespace/package_setting_spec.rb
index 9dfb58301b1..f3fda200fda 100644
--- a/spec/models/namespace/package_setting_spec.rb
+++ b/spec/models/namespace/package_setting_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Namespace::PackageSetting do
+RSpec.describe Namespace::PackageSetting, feature_category: :package_registry do
describe 'relationships' do
it { is_expected.to belong_to(:namespace) }
end
@@ -15,6 +15,9 @@ RSpec.describe Namespace::PackageSetting do
it { is_expected.not_to allow_value(nil).for(:maven_duplicates_allowed) }
it { is_expected.to allow_value(true, false).for(:generic_duplicates_allowed) }
it { is_expected.not_to allow_value(nil).for(:generic_duplicates_allowed) }
+ it { is_expected.to allow_value(true).for(:nuget_duplicates_allowed) }
+ it { is_expected.to allow_value(false).for(:nuget_duplicates_allowed) }
+ it { is_expected.not_to allow_value(nil).for(:nuget_duplicates_allowed) }
end
describe 'regex values' do
@@ -25,7 +28,7 @@ RSpec.describe Namespace::PackageSetting do
valid_regexps = %w[SNAPSHOT .* v.+ v10.1.* (?:v.+|SNAPSHOT|TEMP)]
invalid_regexps = ['[', '(?:v.+|SNAPSHOT|TEMP']
- [:maven_duplicate_exception_regex, :generic_duplicate_exception_regex].each do |attribute|
+ %i[maven_duplicate_exception_regex generic_duplicate_exception_regex nuget_duplicate_exception_regex].each do |attribute|
valid_regexps.each do |valid_regexp|
it { is_expected.to allow_value(valid_regexp).for(attribute) }
end
@@ -44,18 +47,18 @@ RSpec.describe Namespace::PackageSetting do
context 'package types with package_settings' do
# As more package types gain settings they will be added to this list
- [:maven_package, :generic_package].each do |format|
+ %i[maven_package generic_package nuget_package].each do |format|
context "with package_type:#{format}" do
- let_it_be(:package) { create(format, name: 'foo', version: 'beta') } # rubocop:disable Rails/SaveBang
+ let_it_be(:package) { create(format, name: 'foo', version: '1.0.0-beta') }
let_it_be(:package_type) { package.package_type }
let_it_be(:package_setting) { package.project.namespace.package_settings }
where(:duplicates_allowed, :duplicate_exception_regex, :result) do
- true | '' | true
- false | '' | false
- false | '.*' | true
- false | 'fo.*' | true
- false | 'be.*' | true
+ true | '' | true
+ false | '' | false
+ false | '.*' | true
+ false | 'fo.*' | true
+ false | '.*be.*' | true
end
with_them do
@@ -75,7 +78,7 @@ RSpec.describe Namespace::PackageSetting do
end
context 'package types without package_settings' do
- [:npm_package, :conan_package, :nuget_package, :pypi_package, :composer_package, :golang_package, :debian_package].each do |format|
+ %i[npm_package conan_package pypi_package composer_package golang_package debian_package].each do |format|
context "with package_type:#{format}" do
let_it_be(:package) { create(format) } # rubocop:disable Rails/SaveBang
let_it_be(:package_setting) { package.project.namespace.package_settings }
diff --git a/spec/models/namespace/root_storage_statistics_spec.rb b/spec/models/namespace/root_storage_statistics_spec.rb
index f2c661c1cfb..4b66b7532a7 100644
--- a/spec/models/namespace/root_storage_statistics_spec.rb
+++ b/spec/models/namespace/root_storage_statistics_spec.rb
@@ -41,7 +41,7 @@ RSpec.describe Namespace::RootStorageStatistics, type: :model, feature_category:
total_lfs_objects_size = project_stat1.lfs_objects_size + project_stat2.lfs_objects_size
total_build_artifacts_size = project_stat1.build_artifacts_size + project_stat2.build_artifacts_size
total_packages_size = project_stat1.packages_size + project_stat2.packages_size
- total_storage_size = project_stat1.storage_size + project_stat2.storage_size
+ total_storage_size = project_stat1.reload.storage_size + project_stat2.reload.storage_size
total_snippets_size = project_stat1.snippets_size + project_stat2.snippets_size
total_pipeline_artifacts_size = project_stat1.pipeline_artifacts_size + project_stat2.pipeline_artifacts_size
total_uploads_size = project_stat1.uploads_size + project_stat2.uploads_size
@@ -64,7 +64,7 @@ RSpec.describe Namespace::RootStorageStatistics, type: :model, feature_category:
root_storage_statistics.reload
- total_storage_size = project_stat1.storage_size + project_stat2.storage_size + 999
+ total_storage_size = project_stat1.reload.storage_size + project_stat2.reload.storage_size + 999
expect(root_storage_statistics.container_registry_size).to eq(999)
expect(root_storage_statistics.storage_size).to eq(total_storage_size)
@@ -162,7 +162,7 @@ RSpec.describe Namespace::RootStorageStatistics, type: :model, feature_category:
total_dependency_proxy_size = root_namespace_stat.dependency_proxy_size +
group1_namespace_stat.dependency_proxy_size + group2_namespace_stat.dependency_proxy_size +
subgroup1_namespace_stat.dependency_proxy_size
- total_storage_size = project_stat1.storage_size + project_stat2.storage_size +
+ total_storage_size = project_stat1.reload.storage_size + project_stat2.reload.storage_size +
root_namespace_stat.storage_size + group1_namespace_stat.storage_size +
group2_namespace_stat.storage_size + subgroup1_namespace_stat.storage_size
@@ -183,7 +183,7 @@ RSpec.describe Namespace::RootStorageStatistics, type: :model, feature_category:
root_storage_statistics.recalculate!
- total_storage_size = project_stat1.storage_size + project_stat2.storage_size
+ total_storage_size = project_stat1.reload.storage_size + project_stat2.reload.storage_size
expect(root_storage_statistics.storage_size).to eq(total_storage_size)
end
@@ -204,7 +204,8 @@ RSpec.describe Namespace::RootStorageStatistics, type: :model, feature_category:
root_storage_statistics.recalculate!
- expect(root_storage_statistics.storage_size).to eq(project_stat1.storage_size + project_stat2.storage_size)
+ expect(root_storage_statistics.storage_size)
+ .to eq(project_stat1.reload.storage_size + project_stat2.reload.storage_size)
expect(root_storage_statistics.dependency_proxy_size).to eq(0)
end
@@ -249,7 +250,8 @@ RSpec.describe Namespace::RootStorageStatistics, type: :model, feature_category:
root_storage_statistics.recalculate!
- expect(root_storage_statistics.reload.private_forks_storage_size).to eq(project_fork.statistics.storage_size)
+ expect(root_storage_statistics.reload.private_forks_storage_size)
+ .to eq(project_fork.statistics.reload.storage_size)
end
it 'aggregates total public forks size' do
@@ -258,7 +260,8 @@ RSpec.describe Namespace::RootStorageStatistics, type: :model, feature_category:
root_storage_statistics.recalculate!
- expect(root_storage_statistics.reload.public_forks_storage_size).to eq(project_fork.statistics.storage_size)
+ expect(root_storage_statistics.reload.public_forks_storage_size)
+ .to eq(project_fork.statistics.reload.storage_size)
end
it 'aggregates total internal forks size' do
@@ -267,7 +270,8 @@ RSpec.describe Namespace::RootStorageStatistics, type: :model, feature_category:
root_storage_statistics.recalculate!
- expect(root_storage_statistics.reload.internal_forks_storage_size).to eq(project_fork.statistics.storage_size)
+ expect(root_storage_statistics.reload.internal_forks_storage_size)
+ .to eq(project_fork.statistics.reload.storage_size)
end
it 'aggregates multiple forks' do
@@ -277,7 +281,7 @@ RSpec.describe Namespace::RootStorageStatistics, type: :model, feature_category:
root_storage_statistics.recalculate!
- total_size = fork_a.statistics.storage_size + fork_b.statistics.storage_size
+ total_size = fork_a.statistics.reload.storage_size + fork_b.statistics.reload.storage_size
expect(root_storage_statistics.reload.private_forks_storage_size).to eq(total_size)
end
@@ -289,7 +293,7 @@ RSpec.describe Namespace::RootStorageStatistics, type: :model, feature_category:
root_storage_statistics.recalculate!
- expect(root_storage_statistics.reload.private_forks_storage_size).to eq(fork_a.statistics.storage_size)
+ expect(root_storage_statistics.reload.private_forks_storage_size).to eq(fork_a.statistics.reload.storage_size)
end
it 'aggregates forks in subgroups' do
@@ -299,7 +303,8 @@ RSpec.describe Namespace::RootStorageStatistics, type: :model, feature_category:
root_storage_statistics.recalculate!
- expect(root_storage_statistics.reload.private_forks_storage_size).to eq(project_fork.statistics.storage_size)
+ expect(root_storage_statistics.reload.private_forks_storage_size)
+ .to eq(project_fork.statistics.reload.storage_size)
end
it 'aggregates forks along with total storage size' do
@@ -309,9 +314,9 @@ RSpec.describe Namespace::RootStorageStatistics, type: :model, feature_category:
root_storage_statistics.recalculate!
root_storage_statistics.reload
- expect(root_storage_statistics.private_forks_storage_size).to eq(project_fork.statistics.storage_size)
+ expect(root_storage_statistics.private_forks_storage_size).to eq(project_fork.statistics.reload.storage_size)
- total = project.statistics.storage_size + project_fork.statistics.storage_size
+ total = project.statistics.storage_size + project_fork.statistics.reload.storage_size
expect(root_storage_statistics.storage_size).to eq(total)
end
diff --git a/spec/models/namespace_spec.rb b/spec/models/namespace_spec.rb
index 1c02b4754fa..623c9c7e07c 100644
--- a/spec/models/namespace_spec.rb
+++ b/spec/models/namespace_spec.rb
@@ -504,6 +504,7 @@ RSpec.describe Namespace, feature_category: :groups_and_projects do
it { is_expected.to delegate_method(:prevent_sharing_groups_outside_hierarchy).to(:namespace_settings).allow_nil }
it { is_expected.to delegate_method(:runner_registration_enabled).to(:namespace_settings) }
it { is_expected.to delegate_method(:runner_registration_enabled?).to(:namespace_settings) }
+ it { is_expected.to delegate_method(:default_branch_protection_defaults).to(:namespace_settings) }
it { is_expected.to delegate_method(:allow_runner_registration_token).to(:namespace_settings) }
it { is_expected.to delegate_method(:maven_package_requests_forwarding).to(:package_settings) }
it { is_expected.to delegate_method(:pypi_package_requests_forwarding).to(:package_settings) }
@@ -555,6 +556,22 @@ RSpec.describe Namespace, feature_category: :groups_and_projects do
end
end
end
+
+ describe '#default_branch_protection_defaults' do
+ context 'when namespace_settings is nil' do
+ before do
+ allow(subject).to receive(:namespace_settings).and_return(nil)
+ end
+
+ it 'does not raise an error' do
+ expect { subject.default_branch_protection_defaults }.not_to raise_error
+ end
+
+ it 'returns nil' do
+ expect(subject.default_branch_protection_defaults).to be_nil
+ end
+ end
+ end
end
describe "Respond to" do
@@ -2389,7 +2406,7 @@ RSpec.describe Namespace, feature_category: :groups_and_projects do
end
context 'when parent has shared runners disabled but allows override' do
- let(:parent) { create(:group, :shared_runners_disabled, :allow_descendants_override_disabled_shared_runners) }
+ let(:parent) { create(:group, :shared_runners_disabled_and_overridable) }
let(:group) { build(:group, shared_runners_enabled: true, parent_id: parent.id) }
it 'is valid' do
@@ -2415,7 +2432,7 @@ RSpec.describe Namespace, feature_category: :groups_and_projects do
context 'when namespace is a group' do
context 'without a parent' do
context 'with shared runners disabled' do
- let(:namespace) { build(:group, :allow_descendants_override_disabled_shared_runners, :shared_runners_disabled) }
+ let(:namespace) { build(:group, :shared_runners_disabled_and_overridable) }
it 'is valid' do
expect(namespace).to be_valid
@@ -2423,13 +2440,13 @@ RSpec.describe Namespace, feature_category: :groups_and_projects do
end
context 'with shared runners enabled' do
- let(:namespace) { create(:namespace) }
+ let(:namespace) { build(:group) }
it 'is invalid' do
namespace.allow_descendants_override_disabled_shared_runners = true
expect(namespace).to be_invalid
- expect(namespace.errors[:allow_descendants_override_disabled_shared_runners]).to include('cannot be changed if shared runners are enabled')
+ expect(namespace.errors[:allow_descendants_override_disabled_shared_runners]).to include('can not be true if shared runners are enabled')
end
end
end
@@ -2437,7 +2454,7 @@ RSpec.describe Namespace, feature_category: :groups_and_projects do
context 'with a parent' do
context 'when parent does not allow shared runners' do
let(:parent) { create(:group, :shared_runners_disabled) }
- let(:group) { build(:group, :shared_runners_disabled, :allow_descendants_override_disabled_shared_runners, parent_id: parent.id) }
+ let(:group) { build(:group, :shared_runners_disabled_and_overridable, parent_id: parent.id) }
it 'is invalid' do
expect(group).to be_invalid
@@ -2447,7 +2464,7 @@ RSpec.describe Namespace, feature_category: :groups_and_projects do
context 'when parent allows shared runners and setting to true' do
let(:parent) { create(:group, shared_runners_enabled: true) }
- let(:group) { build(:group, :shared_runners_disabled, :allow_descendants_override_disabled_shared_runners, parent_id: parent.id) }
+ let(:group) { build(:group, :shared_runners_disabled_and_overridable, parent_id: parent.id) }
it 'is valid' do
expect(group).to be_valid
diff --git a/spec/models/namespaces/project_namespace_spec.rb b/spec/models/namespaces/project_namespace_spec.rb
index 78403db7fa8..c635d6e54e7 100644
--- a/spec/models/namespaces/project_namespace_spec.rb
+++ b/spec/models/namespaces/project_namespace_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe Namespaces::ProjectNamespace, type: :model do
describe 'relationships' do
- it { is_expected.to have_one(:project).with_foreign_key(:project_namespace_id).inverse_of(:project_namespace) }
+ it { is_expected.to have_one(:project).inverse_of(:project_namespace) }
specify do
project = create(:project)
@@ -32,4 +32,79 @@ RSpec.describe Namespaces::ProjectNamespace, type: :model do
expect { project.reload }.to raise_error(ActiveRecord::RecordNotFound)
end
end
+
+ describe '.create_from_project!' do
+ context 'when namespace does not exist' do
+ it 'new project_namespace is not saved' do
+ expect_any_instance_of(described_class) do |instance|
+ expect(instance).not_to receive(:save!)
+ end
+
+ project = Project.new(namespace: nil)
+ described_class.create_from_project!(project)
+ end
+ end
+
+ context 'for new record when namespace exists' do
+ let(:project) { build(:project) }
+ let(:project_namespace) { project.project_namespace }
+
+ it 'syncs the project attributes to project namespace' do
+ project_name = 'project 1 name'
+ project.name = project_name
+
+ described_class.create_from_project!(project)
+ expect(project.project_namespace.name).to eq(project_name)
+ end
+
+ context 'when project has an unsaved project namespace' do
+ it 'saves the same project namespace' do
+ described_class.create_from_project!(project)
+
+ expect(project_namespace).to be_persisted
+ end
+ end
+ end
+ end
+
+ describe '#sync_attributes_from_project' do
+ context 'with existing project' do
+ let(:project) { create(:project) }
+ let(:project_namespace) { project.project_namespace }
+ let(:project_new_namespace) { create(:namespace) }
+ let(:project_new_path) { 'project-new-path' }
+ let(:project_new_name) { project_new_path.titleize }
+ let(:project_new_visibility_level) { Gitlab::VisibilityLevel::INTERNAL }
+ let(:project_shared_runners_enabled) { !project.shared_runners_enabled }
+
+ before do
+ project.name = project_new_name
+ project.path = project_new_path
+ project.visibility_level = project_new_visibility_level
+ project.namespace = project_new_namespace
+ project.shared_runners_enabled = project_shared_runners_enabled
+ end
+
+ it 'syncs the relevant keys from the project' do
+ project_namespace.sync_attributes_from_project(project)
+
+ expect(project_namespace.name).to eq(project_new_name)
+ expect(project_namespace.path).to eq(project_new_path)
+ expect(project_namespace.visibility_level).to eq(project_new_visibility_level)
+ expect(project_namespace.namespace).to eq(project_new_namespace)
+ expect(project_namespace.namespace_id).to eq(project_new_namespace.id)
+ expect(project_namespace.shared_runners_enabled).to eq(project_shared_runners_enabled)
+ end
+ end
+
+ it 'syncs visibility_level if project is new' do
+ project = build(:project)
+ project_namespace = project.project_namespace
+ project_namespace.visibility_level = Gitlab::VisibilityLevel::PUBLIC
+
+ project_namespace.sync_attributes_from_project(project)
+
+ expect(project_namespace.visibility_level).to eq(Gitlab::VisibilityLevel::PRIVATE)
+ end
+ end
end
diff --git a/spec/models/network/graph_spec.rb b/spec/models/network/graph_spec.rb
index 16894bf28f1..d0c73d6285c 100644
--- a/spec/models/network/graph_spec.rb
+++ b/spec/models/network/graph_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Network::Graph do
+RSpec.describe Network::Graph, feature_category: :source_code_management do
let(:project) { create(:project, :repository) }
let!(:note_on_commit) { create(:note_on_commit, project: project) }
@@ -36,6 +36,12 @@ RSpec.describe Network::Graph do
expect(commits).to all(be_kind_of(Network::Commit))
end
+ it 'only fetches the commits once', :request_store do
+ expect(Gitlab::Git::Commit).to receive(:find_all).once.and_call_original
+
+ graph
+ end
+
it 'sorts commits by commit date (descending)' do
# Remove duplicate timestamps because they make it harder to
# assert that the commits are sorted as expected.
diff --git a/spec/models/note_spec.rb b/spec/models/note_spec.rb
index e99d77dc0a0..0fc689b9f6c 100644
--- a/spec/models/note_spec.rb
+++ b/spec/models/note_spec.rb
@@ -795,6 +795,24 @@ RSpec.describe Note, feature_category: :team_planning do
expect(note.system_note_visible_for?(nil)).to be_truthy
end
end
+
+ context 'when referenced resource is not present' do
+ let(:note) do
+ create :note, noteable: ext_issue, project: ext_proj, note: "mentioned in merge request !1", system: true
+ end
+
+ it "returns true for other users" do
+ expect(note.system_note_visible_for?(private_user)).to be_truthy
+ end
+
+ it "returns true if user visible reference count set" do
+ note.user_visible_reference_count = 0
+ note.total_reference_count = 0
+
+ expect(note).not_to receive(:reference_mentionables)
+ expect(note.system_note_visible_for?(ext_issue.author)).to be_truthy
+ end
+ end
end
describe '#system_note_with_references?' do
@@ -1588,6 +1606,24 @@ RSpec.describe Note, feature_category: :team_planning do
.with("/#{noteable.project.namespace.to_param}/#{noteable.project.to_param}/noteable/#{noteable.class.name.underscore}/#{noteable.id}/notes")
end
+ it 'broadcasts an Action Cable event for the noteable' do
+ expect(Noteable::NotesChannel).to receive(:broadcast_to).with(note.noteable, event: 'updated')
+
+ note.save!
+ end
+
+ context 'when action_cable_notes is disabled' do
+ before do
+ stub_feature_flags(action_cable_notes: false)
+ end
+
+ it 'does not broadcast an Action Cable event' do
+ expect(Noteable::NotesChannel).not_to receive(:broadcast_to)
+
+ note.save!
+ end
+ end
+
it "expires cache for note's issue when note is saved" do
expect_expiration(note.noteable)
diff --git a/spec/models/operations/feature_flags/strategy_spec.rb b/spec/models/operations/feature_flags/strategy_spec.rb
index 91a465025a2..6d55f94b496 100644
--- a/spec/models/operations/feature_flags/strategy_spec.rb
+++ b/spec/models/operations/feature_flags/strategy_spec.rb
@@ -29,7 +29,7 @@ RSpec.describe Operations::FeatureFlags::Strategy do
expect(strategy).to be_invalid
- expect(strategy.errors[:name]).to eq(['strategy name is invalid'])
+ expect(strategy.errors[:name]).to eq([s_('Validation|strategy name is invalid')])
expect(strategy.errors[:parameters]).to be_empty
end
end
@@ -51,7 +51,7 @@ RSpec.describe Operations::FeatureFlags::Strategy do
expect(strategy).to be_invalid
- expect(strategy.errors[:parameters]).to eq(['parameters are invalid'])
+ expect(strategy.errors[:parameters]).to eq([s_('Validation|parameters are invalid')])
end
end
@@ -83,7 +83,9 @@ RSpec.describe Operations::FeatureFlags::Strategy do
expect(strategy).to be_invalid
- expect(strategy.errors[:parameters]).to eq(['percentage must be a string between 0 and 100 inclusive'])
+ expect(strategy.errors[:parameters]).to eq([
+ s_('Validation|percentage must be a string between 0 and 100 inclusive')
+ ])
end
end
@@ -120,7 +122,7 @@ RSpec.describe Operations::FeatureFlags::Strategy do
expect(strategy).to be_invalid
- expect(strategy.errors[:parameters]).to eq(['groupId parameter is invalid'])
+ expect(strategy.errors[:parameters]).to eq([s_('Validation|groupId parameter is invalid')])
end
end
@@ -164,7 +166,7 @@ RSpec.describe Operations::FeatureFlags::Strategy do
expect(strategy).to be_invalid
- expect(strategy.errors[:parameters]).to eq(['parameters are invalid'])
+ expect(strategy.errors[:parameters]).to eq([s_('Validation|parameters are invalid')])
end
end
@@ -201,7 +203,9 @@ RSpec.describe Operations::FeatureFlags::Strategy do
expect(strategy).to be_invalid
- expect(strategy.errors[:parameters]).to eq(['rollout must be a string between 0 and 100 inclusive'])
+ expect(strategy.errors[:parameters]).to eq([
+ s_('Validation|rollout must be a string between 0 and 100 inclusive')
+ ])
end
end
@@ -307,7 +311,7 @@ RSpec.describe Operations::FeatureFlags::Strategy do
expect(strategy).to be_invalid
- expect(strategy.errors[:parameters]).to eq(['parameters are invalid'])
+ expect(strategy.errors[:parameters]).to eq([s_('Validation|parameters are invalid')])
end
end
@@ -365,7 +369,7 @@ RSpec.describe Operations::FeatureFlags::Strategy do
expect(strategy).to be_invalid
- expect(strategy.errors[:parameters]).to eq(['parameters are invalid'])
+ expect(strategy.errors[:parameters]).to eq([s_('Validation|parameters are invalid')])
end
end
@@ -394,7 +398,7 @@ RSpec.describe Operations::FeatureFlags::Strategy do
expect(strategy).to be_invalid
- expect(strategy.errors[:parameters]).to eq(['parameters are invalid'])
+ expect(strategy.errors[:parameters]).to eq([s_('Validation|parameters are invalid')])
end
end
@@ -435,7 +439,7 @@ RSpec.describe Operations::FeatureFlags::Strategy do
expect(strategy).to be_invalid
- expect(strategy.errors[:user_list]).to eq(['must belong to the same project'])
+ expect(strategy.errors[:user_list]).to eq([s_('Validation|must belong to the same project')])
end
end
diff --git a/spec/models/organizations/organization_spec.rb b/spec/models/organizations/organization_spec.rb
index a9cac30e9a1..7838fc1c5a4 100644
--- a/spec/models/organizations/organization_spec.rb
+++ b/spec/models/organizations/organization_spec.rb
@@ -162,4 +162,22 @@ RSpec.describe Organizations::Organization, type: :model, feature_category: :cel
expect(described_class.where(id: organization)).not_to exist
end
end
+
+ describe '#user?' do
+ let_it_be(:user) { create :user }
+
+ subject { organization.user?(user) }
+
+ context 'when user is an organization user' do
+ before do
+ create :organization_user, organization: organization, user: user
+ end
+
+ it { is_expected.to eq true }
+ end
+
+ context 'when user is not an organization user' do
+ it { is_expected.to eq false }
+ end
+ end
end
diff --git a/spec/models/packages/nuget/metadatum_spec.rb b/spec/models/packages/nuget/metadatum_spec.rb
index 4b02353d6e8..e1520c0782f 100644
--- a/spec/models/packages/nuget/metadatum_spec.rb
+++ b/spec/models/packages/nuget/metadatum_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe Packages::Nuget::Metadatum, type: :model, feature_category: :package_registry do
+ it { is_expected.to be_a Packages::Nuget::VersionNormalizable }
+
describe 'relationships' do
it { is_expected.to belong_to(:package).inverse_of(:nuget_metadatum) }
end
@@ -15,6 +17,18 @@ RSpec.describe Packages::Nuget::Metadatum, type: :model, feature_category: :pack
it { is_expected.to validate_presence_of(:description) }
it { is_expected.to validate_length_of(:description).is_at_most(described_class::MAX_DESCRIPTION_LENGTH) }
+ context 'for normalized_version presence' do
+ it { is_expected.to validate_presence_of(:normalized_version) }
+
+ context 'when nuget_normalized_version feature flag is disabled' do
+ before do
+ stub_feature_flags(nuget_normalized_version: false)
+ end
+
+ it { is_expected.not_to validate_presence_of(:normalized_version) }
+ end
+ end
+
%i[license_url project_url icon_url].each do |url|
describe "##{url}" do
it { is_expected.to allow_value('http://sandbox.com').for(url) }
@@ -36,4 +50,54 @@ RSpec.describe Packages::Nuget::Metadatum, type: :model, feature_category: :pack
end
end
end
+
+ it { is_expected.to delegate_method(:version).to(:package).with_prefix }
+
+ describe '.normalized_version_in' do
+ let_it_be(:nuget_metadatums) { create_list(:nuget_metadatum, 2) }
+
+ subject { described_class.normalized_version_in(nuget_metadatums.first.normalized_version) }
+
+ it { is_expected.to contain_exactly(nuget_metadatums.first) }
+ end
+
+ describe 'callbacks' do
+ describe '#set_normalized_version' do
+ using RSpec::Parameterized::TableSyntax
+
+ let_it_be_with_reload(:nuget_metadatum) { create(:nuget_metadatum) }
+
+ where(:version, :normalized_version) do
+ '1.0' | '1.0.0'
+ '1.0.0.0' | '1.0.0'
+ '0.1' | '0.1.0'
+ '1.0.7+r3456' | '1.0.7'
+ '8.0.0.00+RC.54' | '8.0.0'
+ '1.0.0-Alpha' | '1.0.0-alpha'
+ '1.0.00-RC-02' | '1.0.0-rc-02'
+ '8.0.000-preview.0.546.0' | '8.0.0-preview.0.546.0'
+ '0.1.0-dev.37+0999370' | '0.1.0-dev.37'
+ '1.2.3' | '1.2.3'
+ end
+
+ with_them do
+ it 'saves the normalized version' do
+ nuget_metadatum.package.update_column(:version, version)
+ nuget_metadatum.save!
+
+ expect(nuget_metadatum.normalized_version).to eq(normalized_version)
+ end
+
+ context 'when the nuget_normalized_version feature flag is disabled' do
+ before do
+ stub_feature_flags(nuget_normalized_version: false)
+ end
+
+ it 'does not save the normalized version' do
+ expect(nuget_metadatum.normalized_version).not_to eq(normalized_version)
+ end
+ end
+ end
+ end
+ end
end
diff --git a/spec/models/packages/package_spec.rb b/spec/models/packages/package_spec.rb
index 120b7d72cd9..381b5af117e 100644
--- a/spec/models/packages/package_spec.rb
+++ b/spec/models/packages/package_spec.rb
@@ -867,6 +867,24 @@ RSpec.describe Packages::Package, type: :model, feature_category: :package_regis
end
end
+ describe '.with_npm_scope' do
+ let_it_be(:package1) { create(:npm_package, name: '@test/foobar') }
+ let_it_be(:package2) { create(:npm_package, name: '@test2/foobar') }
+ let_it_be(:package3) { create(:npm_package, name: 'foobar') }
+
+ subject { described_class.with_npm_scope('test') }
+
+ it { is_expected.to contain_exactly(package1) }
+
+ context 'when npm_package_registry_fix_group_path_validation is disabled' do
+ before do
+ stub_feature_flags(npm_package_registry_fix_group_path_validation: false)
+ end
+
+ it { is_expected.to contain_exactly(package1) }
+ end
+ end
+
describe '.without_nuget_temporary_name' do
let!(:package1) { create(:nuget_package) }
let!(:package2) { create(:nuget_package, name: Packages::Nuget::TEMPORARY_PACKAGE_NAME) }
@@ -958,6 +976,35 @@ RSpec.describe Packages::Package, type: :model, feature_category: :package_regis
it { is_expected.to match_array([nuget_package]) }
end
+ describe '.with_case_insensitive_name' do
+ let_it_be(:nuget_package) { create(:nuget_package, name: 'TestPackage') }
+
+ subject { described_class.with_case_insensitive_name('testpackage') }
+
+ it { is_expected.to match_array([nuget_package]) }
+ end
+
+ describe '.with_nuget_version_or_normalized_version' do
+ let_it_be(:nuget_package) { create(:nuget_package, :with_metadatum, version: '1.0.7+r3456') }
+
+ before do
+ nuget_package.nuget_metadatum.update_column(:normalized_version, '1.0.7')
+ end
+
+ subject { described_class.with_nuget_version_or_normalized_version(version, with_normalized: with_normalized) }
+
+ where(:version, :with_normalized, :expected) do
+ '1.0.7' | true | [ref(:nuget_package)]
+ '1.0.7' | false | []
+ '1.0.7+r3456' | true | [ref(:nuget_package)]
+ '1.0.7+r3456' | false | [ref(:nuget_package)]
+ end
+
+ with_them do
+ it { is_expected.to match_array(expected) }
+ end
+ end
+
context 'status scopes' do
let_it_be(:default_package) { create(:maven_package, :default) }
let_it_be(:hidden_package) { create(:maven_package, :hidden) }
@@ -1014,32 +1061,6 @@ RSpec.describe Packages::Package, type: :model, feature_category: :package_regis
end
end
- describe '.select_only_first_by_name' do
- let_it_be(:project) { create(:project) }
- let_it_be(:package1) { create(:package, name: 'p1', created_at: 1000, project: project) }
- let_it_be(:package2) { create(:package, name: 'p1', created_at: 1001, project: project) }
- let_it_be(:package3) { create(:package, name: 'p2', project: project) }
-
- subject { described_class.order_name_desc_version_desc.select_only_first_by_name }
-
- it 'returns only the most recent package by name' do
- is_expected.to eq([package3, package2])
- end
- end
-
- describe '.order_name_desc_version_desc' do
- let_it_be(:project) { create(:project) }
- let_it_be(:package1) { create(:package, name: 'p1', created_at: 1000, project: project) }
- let_it_be(:package2) { create(:package, name: 'p1', created_at: 1001, project: project) }
- let_it_be(:package3) { create(:package, name: 'p2', project: project) }
-
- subject { described_class.order_name_desc_version_desc }
-
- it 'sorts packages by name desc and created desc' do
- is_expected.to eq([package3, package2, package1])
- end
- end
-
context 'sorting' do
let_it_be(:project) { create(:project, name: 'aaa') }
let_it_be(:project2) { create(:project, name: 'bbb') }
@@ -1440,6 +1461,19 @@ RSpec.describe Packages::Package, type: :model, feature_category: :package_regis
end
end
+ describe '#normalized_nuget_version' do
+ let_it_be(:package) { create(:nuget_package, :with_metadatum, version: '1.0') }
+ let(:normalized_version) { '1.0.0' }
+
+ subject { package.normalized_nuget_version }
+
+ before do
+ package.nuget_metadatum.update_column(:normalized_version, normalized_version)
+ end
+
+ it { is_expected.to eq(normalized_version) }
+ end
+
describe "#publish_creation_event" do
let_it_be(:project) { create(:project) }
diff --git a/spec/models/pages_deployment_spec.rb b/spec/models/pages_deployment_spec.rb
index 553491f6eff..bff69485e43 100644
--- a/spec/models/pages_deployment_spec.rb
+++ b/spec/models/pages_deployment_spec.rb
@@ -64,58 +64,20 @@ RSpec.describe PagesDeployment, feature_category: :pages do
stub_pages_object_storage(::Pages::DeploymentUploader)
end
- describe '#store_after_commit?' do
- context 'when feature flag pages_deploy_upload_file_outside_transaction is disabled' do
- it 'returns false' do
- Feature.disable(:pages_deploy_upload_file_outside_transaction)
-
- deployment = create(:pages_deployment, project: project)
- expect(deployment.store_after_commit?).to eq(false)
- end
+ it 'stores the file outsize of the transaction' do
+ expect_next_instance_of(PagesDeployment) do |deployment|
+ expect(deployment).to receive(:store_file_now!)
end
- context 'when feature flag pages_deploy_upload_file_outside_transaction is enabled' do
- it 'returns true' do
- deployment = create(:pages_deployment, project: project)
- expect(deployment.store_after_commit?).to eq(true)
- end
- end
- end
-
- context 'when feature flag pages_deploy_upload_file_outside_transaction is disabled' do
- before do
- Feature.disable(:pages_deploy_upload_file_outside_transaction)
- end
-
- it 'stores the file within the transaction' do
- expect_next_instance_of(PagesDeployment) do |deployment|
- expect(deployment).not_to receive(:store_file_now!)
- end
-
- create(:pages_deployment, project: project)
- end
+ create(:pages_deployment, project: project)
end
- context 'when feature flag pages_deploy_upload_file_outside_transaction is enabled' do
- before do
- Feature.enable(:pages_deploy_upload_file_outside_transaction)
- end
-
- it 'stores the file outsize of the transaction' do
- expect_next_instance_of(PagesDeployment) do |deployment|
- expect(deployment).to receive(:store_file_now!)
- end
+ it 'does nothing when the file did not change' do
+ deployment = create(:pages_deployment, project: project)
- create(:pages_deployment, project: project)
- end
+ expect(deployment).not_to receive(:store_file_now!)
- it 'does nothing when the file did not change' do
- deployment = create(:pages_deployment, project: project)
-
- expect(deployment).not_to receive(:store_file_now!)
-
- deployment.touch
- end
+ deployment.touch
end
end
diff --git a/spec/models/performance_monitoring/prometheus_dashboard_spec.rb b/spec/models/performance_monitoring/prometheus_dashboard_spec.rb
deleted file mode 100644
index f338e5439ad..00000000000
--- a/spec/models/performance_monitoring/prometheus_dashboard_spec.rb
+++ /dev/null
@@ -1,277 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe PerformanceMonitoring::PrometheusDashboard do
- let(:json_content) do
- {
- "dashboard" => "Dashboard Title",
- "templating" => {
- "variables" => {
- "variable1" => %w(value1 value2 value3)
- }
- },
- "panel_groups" => [{
- "group" => "Group Title",
- "panels" => [{
- "type" => "area-chart",
- "title" => "Chart Title",
- "y_label" => "Y-Axis",
- "metrics" => [{
- "id" => "metric_of_ages",
- "unit" => "count",
- "label" => "Metric of Ages",
- "query_range" => "http_requests_total"
- }]
- }]
- }]
- }
- end
-
- describe '.from_json' do
- subject { described_class.from_json(json_content) }
-
- it 'creates a PrometheusDashboard object' do
- expect(subject).to be_a described_class
- expect(subject.dashboard).to eq(json_content['dashboard'])
- expect(subject.panel_groups).to all(be_a PerformanceMonitoring::PrometheusPanelGroup)
- end
-
- describe 'validations' do
- shared_examples 'validation failed' do |errors_messages|
- it 'raises error with corresponding messages', :aggregate_failures do
- expect { subject }.to raise_error do |error|
- expect(error).to be_kind_of(ActiveModel::ValidationError)
- expect(error.model.errors.messages).to eq(errors_messages)
- end
- end
- end
-
- context 'dashboard content is missing' do
- let(:json_content) { nil }
-
- it_behaves_like 'validation failed', panel_groups: ["should be an array of panel_groups objects"], dashboard: ["can't be blank"]
- end
-
- context 'dashboard content is NOT a hash' do
- let(:json_content) { YAML.safe_load("'test'") }
-
- it_behaves_like 'validation failed', panel_groups: ["should be an array of panel_groups objects"], dashboard: ["can't be blank"]
- end
-
- context 'content is an array' do
- let(:json_content) { [{ "dashboard" => "Dashboard Title" }] }
-
- it_behaves_like 'validation failed', panel_groups: ["should be an array of panel_groups objects"], dashboard: ["can't be blank"]
- end
-
- context 'dashboard definition is missing panels_groups and dashboard keys' do
- let(:json_content) do
- {
- "dashboard" => nil
- }
- end
-
- it_behaves_like 'validation failed', panel_groups: ["should be an array of panel_groups objects"], dashboard: ["can't be blank"]
- end
-
- context 'group definition is missing panels and group keys' do
- let(:json_content) do
- {
- "dashboard" => "Dashboard Title",
- "templating" => {
- "variables" => {
- "variable1" => %w(value1 value2 value3)
- }
- },
- "panel_groups" => [{ "group" => nil }]
- }
- end
-
- it_behaves_like 'validation failed', panels: ["should be an array of panels objects"], group: ["can't be blank"]
- end
-
- context 'panel definition is missing metrics and title keys' do
- let(:json_content) do
- {
- "dashboard" => "Dashboard Title",
- "templating" => {
- "variables" => {
- "variable1" => %w(value1 value2 value3)
- }
- },
- "panel_groups" => [{
- "group" => "Group Title",
- "panels" => [{
- "type" => "area-chart",
- "y_label" => "Y-Axis"
- }]
- }]
- }
- end
-
- it_behaves_like 'validation failed', metrics: ["should be an array of metrics objects"], title: ["can't be blank"]
- end
-
- context 'metrics definition is missing unit, query and query_range keys' do
- let(:json_content) do
- {
- "dashboard" => "Dashboard Title",
- "templating" => {
- "variables" => {
- "variable1" => %w(value1 value2 value3)
- }
- },
- "panel_groups" => [{
- "group" => "Group Title",
- "panels" => [{
- "type" => "area-chart",
- "title" => "Chart Title",
- "y_label" => "Y-Axis",
- "metrics" => [{
- "id" => "metric_of_ages",
- "label" => "Metric of Ages",
- "query_range" => nil
- }]
- }]
- }]
- }
- end
-
- it_behaves_like 'validation failed', unit: ["can't be blank"], query_range: ["can't be blank"], query: ["can't be blank"]
- end
-
- # for each parent entry validation first is done to its children,
- # whole execution is stopped on first encountered error
- # which is the one that is reported
- context 'multiple offences on different levels' do
- let(:json_content) do
- {
- "dashboard" => nil,
- "panel_groups" => [{
- "group" => nil,
- "panels" => [{
- "type" => "area-chart",
- "title" => nil,
- "y_label" => "Y-Axis",
- "metrics" => [{
- "id" => "metric_of_ages",
- "label" => "Metric of Ages",
- "query_range" => 'query'
- }, {
- "id" => "metric_of_ages",
- "unit" => "count",
- "label" => "Metric of Ages",
- "query_range" => nil
- }]
- }]
- }, {
- "group" => 'group',
- "panels" => nil
- }]
- }
- end
-
- it_behaves_like 'validation failed', unit: ["can't be blank"]
- end
- end
- end
-
- describe '.find_for' do
- let(:project) { build_stubbed(:project) }
- let(:user) { build_stubbed(:user) }
- let(:environment) { build_stubbed(:environment, project: project) }
- let(:path) { ::Metrics::Dashboard::SystemDashboardService::DASHBOARD_PATH }
-
- context 'dashboard has been found' do
- it 'uses dashboard finder to find and load dashboard data and returns dashboard instance', :aggregate_failures do
- expect(Gitlab::Metrics::Dashboard::Finder).to receive(:find).with(project, user, { environment: environment, dashboard_path: path }).and_return(status: :success, dashboard: json_content)
-
- dashboard_instance = described_class.find_for(project: project, user: user, path: path, options: { environment: environment })
-
- expect(dashboard_instance).to be_instance_of described_class
- expect(dashboard_instance.environment).to eq environment
- expect(dashboard_instance.path).to eq path
- end
- end
-
- context 'dashboard has NOT been found' do
- it 'returns nil' do
- allow(Gitlab::Metrics::Dashboard::Finder).to receive(:find).and_return(http_status: :not_found)
-
- dashboard_instance = described_class.find_for(project: project, user: user, path: path, options: { environment: environment })
-
- expect(dashboard_instance).to be_nil
- end
- end
-
- context 'dashboard has invalid schema', :aggregate_failures do
- it 'still returns dashboard object' do
- expect(Gitlab::Metrics::Dashboard::Finder).to receive(:find).and_return(http_status: :unprocessable_entity)
-
- dashboard_instance = described_class.find_for(project: project, user: user, path: path, options: { environment: environment })
-
- expect(dashboard_instance).to be_instance_of described_class
- expect(dashboard_instance.environment).to eq environment
- expect(dashboard_instance.path).to eq path
- end
- end
- end
-
- describe '#schema_validation_warnings' do
- let(:environment) { create(:environment, project: project) }
- let(:path) { '.gitlab/dashboards/test.yml' }
- let(:project) { create(:project, :repository, :custom_repo, files: { path => dashboard_schema.to_yaml }) }
-
- subject(:schema_validation_warnings) { described_class.new(dashboard_schema.merge(path: path, environment: environment)).schema_validation_warnings }
-
- before do
- allow(Gitlab::Metrics::Dashboard::Finder).to receive(:find_raw).with(project, dashboard_path: path).and_call_original
- end
-
- context 'when schema is valid' do
- let(:dashboard_schema) { YAML.safe_load(fixture_file('lib/gitlab/metrics/dashboard/sample_dashboard.yml')) }
-
- it 'returns empty array' do
- expect(described_class).to receive(:from_json).with(dashboard_schema)
-
- expect(schema_validation_warnings).to eq []
- end
- end
-
- context 'when schema is invalid' do
- let(:dashboard_schema) { YAML.safe_load(fixture_file('lib/gitlab/metrics/dashboard/dashboard_missing_panel_groups.yml')) }
-
- it 'returns array with errors messages' do
- instance = described_class.new
- instance.errors.add(:test, 'test error')
-
- expect(described_class).to receive(:from_json).and_raise(ActiveModel::ValidationError.new(instance))
- expect(described_class.new.schema_validation_warnings).to eq ['test: test error']
- end
- end
-
- context 'when YAML has wrong syntax' do
- let(:project) { create(:project, :repository, :custom_repo, files: { path => fixture_file('lib/gitlab/metrics/dashboard/broken_yml_syntax.yml') }) }
-
- subject(:schema_validation_warnings) { described_class.new(path: path, environment: environment).schema_validation_warnings }
-
- it 'returns array with errors messages' do
- expect(described_class).not_to receive(:from_json)
-
- expect(schema_validation_warnings).to eq ['Invalid yaml']
- end
- end
- end
-
- describe '#to_yaml' do
- subject { prometheus_dashboard.to_yaml }
-
- let(:prometheus_dashboard) { described_class.from_json(json_content) }
- let(:expected_yaml) do
- "---\npanel_groups:\n- panels:\n - metrics:\n - id: metric_of_ages\n unit: count\n label: Metric of Ages\n query: \n query_range: http_requests_total\n type: area-chart\n title: Chart Title\n y_label: Y-Axis\n weight: \n group: Group Title\n priority: \ndashboard: Dashboard Title\n"
- end
-
- it { is_expected.to eq(expected_yaml) }
- end
-end
diff --git a/spec/models/plan_limits_spec.rb b/spec/models/plan_limits_spec.rb
index bee1c4f47b0..d10f375788a 100644
--- a/spec/models/plan_limits_spec.rb
+++ b/spec/models/plan_limits_spec.rb
@@ -248,6 +248,7 @@ RSpec.describe PlanLimits do
ci_max_artifact_size_requirements_v2
ci_max_artifact_size_coverage_fuzzing
ci_max_artifact_size_api_fuzzing
+ ci_max_artifact_size_annotations
]
end
@@ -270,7 +271,7 @@ RSpec.describe PlanLimits do
end
let(:datetime_columns) do
- %w[dashboard_limit_enabled_at]
+ %w[dashboard_limit_enabled_at updated_at]
end
let(:history_columns) do
diff --git a/spec/models/pool_repository_spec.rb b/spec/models/pool_repository_spec.rb
index 9861e832bef..93c1e59458d 100644
--- a/spec/models/pool_repository_spec.rb
+++ b/spec/models/pool_repository_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe PoolRepository do
+RSpec.describe PoolRepository, feature_category: :source_code_management do
describe 'associations' do
it { is_expected.to belong_to(:shard) }
it { is_expected.to belong_to(:source_project) }
@@ -16,12 +16,43 @@ RSpec.describe PoolRepository do
it { is_expected.to validate_presence_of(:source_project) }
end
+ describe 'scopes' do
+ let_it_be(:project1) { create(:project) }
+ let_it_be(:project2) { create(:project) }
+ let_it_be(:new_shard) { create(:shard, name: 'new') }
+ let_it_be(:pool_repository1) { create(:pool_repository, source_project: project1) }
+ let_it_be(:pool_repository2) { create(:pool_repository, source_project: project1, shard: new_shard) }
+ let_it_be(:another_pool_repository) { create(:pool_repository, source_project: project2) }
+
+ describe '.by_source_project' do
+ subject { described_class.by_source_project(project1) }
+
+ it 'returns pool repositories per source project from all shards' do
+ is_expected.to match_array([pool_repository1, pool_repository2])
+ end
+ end
+
+ describe '.by_source_project_and_shard_name' do
+ subject { described_class.by_source_project_and_shard_name(project1, new_shard.name) }
+
+ it 'returns only a requested pool repository' do
+ is_expected.to match_array([pool_repository2])
+ end
+ end
+ end
+
describe '#disk_path' do
it 'sets the hashed disk_path' do
pool = create(:pool_repository)
expect(pool.disk_path).to match(%r{\A@pools/\h{2}/\h{2}/\h{64}})
end
+
+ it 'keeps disk_path if already provided' do
+ pool = create(:pool_repository, disk_path: '@pools/aa/bbbb')
+
+ expect(pool.disk_path).to eq('@pools/aa/bbbb')
+ end
end
describe '#unlink_repository' do
diff --git a/spec/models/postgresql/replication_slot_spec.rb b/spec/models/postgresql/replication_slot_spec.rb
index 35c166ab064..ae338864af3 100644
--- a/spec/models/postgresql/replication_slot_spec.rb
+++ b/spec/models/postgresql/replication_slot_spec.rb
@@ -72,7 +72,7 @@ RSpec.describe Postgresql::ReplicationSlot do
context 'with enough slots available' do
skip_examples = described_class.max_replication_slots <= described_class.count
- before(:all) do
+ before_all do
skip('max_replication_slots too small') if skip_examples
@current_slot_count = described_class
diff --git a/spec/models/preloaders/user_max_access_level_in_groups_preloader_spec.rb b/spec/models/preloaders/user_max_access_level_in_groups_preloader_spec.rb
index 3fba2ac003b..5befa3ab66f 100644
--- a/spec/models/preloaders/user_max_access_level_in_groups_preloader_spec.rb
+++ b/spec/models/preloaders/user_max_access_level_in_groups_preloader_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Preloaders::UserMaxAccessLevelInGroupsPreloader do
+RSpec.describe Preloaders::UserMaxAccessLevelInGroupsPreloader, feature_category: :system_access do
let_it_be(:user) { create(:user) }
let_it_be(:group1) { create(:group, :private).tap { |g| g.add_developer(user) } }
let_it_be(:group2) { create(:group, :private).tap { |g| g.add_developer(user) } }
diff --git a/spec/models/preloaders/user_max_access_level_in_projects_preloader_spec.rb b/spec/models/preloaders/user_max_access_level_in_projects_preloader_spec.rb
index 17db284c61e..2070d6d167d 100644
--- a/spec/models/preloaders/user_max_access_level_in_projects_preloader_spec.rb
+++ b/spec/models/preloaders/user_max_access_level_in_projects_preloader_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Preloaders::UserMaxAccessLevelInProjectsPreloader do
+RSpec.describe Preloaders::UserMaxAccessLevelInProjectsPreloader, feature_category: :system_access do
let_it_be(:user) { create(:user) }
let_it_be(:project_1) { create(:project) }
let_it_be(:project_2) { create(:project) }
diff --git a/spec/models/project_authorization_spec.rb b/spec/models/project_authorization_spec.rb
index dc4922d8114..2ba7f5c4ca4 100644
--- a/spec/models/project_authorization_spec.rb
+++ b/spec/models/project_authorization_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe ProjectAuthorization do
+RSpec.describe ProjectAuthorization, feature_category: :groups_and_projects do
describe 'unique user, project authorizations' do
let_it_be(:user) { create(:user) }
let_it_be(:project_1) { create(:project) }
@@ -85,231 +85,4 @@ RSpec.describe ProjectAuthorization do
expect(user.project_authorizations.pluck(:user_id, :project_id, :access_level)).to match_array(attributes.map(&:values))
end
end
-
- shared_examples_for 'does not log any detail' do
- it 'does not log any detail' do
- expect(Gitlab::AppLogger).not_to receive(:info)
-
- execute
- end
- end
-
- shared_examples_for 'logs the detail' do |batch_size:|
- it 'logs the detail' do
- expect(Gitlab::AppLogger).to receive(:info).with(
- entire_size: 3,
- message: 'Project authorizations refresh performed with delay',
- total_delay: (3 / batch_size.to_f).ceil * ProjectAuthorization::SLEEP_DELAY,
- **Gitlab::ApplicationContext.current
- )
-
- execute
- end
- end
-
- describe '.insert_all_in_batches' do
- let_it_be(:user) { create(:user) }
- let_it_be(:project_1) { create(:project) }
- let_it_be(:project_2) { create(:project) }
- let_it_be(:project_3) { create(:project) }
-
- let(:attributes) do
- [
- { user_id: user.id, project_id: project_1.id, access_level: Gitlab::Access::MAINTAINER },
- { user_id: user.id, project_id: project_2.id, access_level: Gitlab::Access::MAINTAINER },
- { user_id: user.id, project_id: project_3.id, access_level: Gitlab::Access::MAINTAINER }
- ]
- end
-
- subject(:execute) { described_class.insert_all_in_batches(attributes, per_batch_size) }
-
- before do
- # Configure as if a replica database is enabled
- allow(::Gitlab::Database::LoadBalancing).to receive(:primary_only?).and_return(false)
- end
-
- shared_examples_for 'inserts the rows in batches, as per the `per_batch` size, without a delay between each batch' do
- specify do
- expect(described_class).not_to receive(:sleep)
-
- execute
-
- expect(user.project_authorizations.pluck(:user_id, :project_id, :access_level)).to match_array(attributes.map(&:values))
- end
- end
-
- context 'when the total number of records to be inserted is greater than the batch size' do
- let(:per_batch_size) { 2 }
-
- it 'inserts the rows in batches, as per the `per_batch` size, with a delay between each batch' do
- expect(described_class).to receive(:insert_all).twice.and_call_original
- expect(described_class).to receive(:sleep).twice
-
- execute
-
- expect(user.project_authorizations.pluck(:user_id, :project_id, :access_level)).to match_array(attributes.map(&:values))
- end
-
- it_behaves_like 'logs the detail', batch_size: 2
-
- context 'when the GitLab installation does not have a replica database configured' do
- before do
- # Configure as if a replica database is not enabled
- allow(::Gitlab::Database::LoadBalancing).to receive(:primary_only?).and_return(true)
- end
-
- it_behaves_like 'inserts the rows in batches, as per the `per_batch` size, without a delay between each batch'
- it_behaves_like 'does not log any detail'
- end
- end
-
- context 'when the total number of records to be inserted is less than the batch size' do
- let(:per_batch_size) { 5 }
-
- it_behaves_like 'inserts the rows in batches, as per the `per_batch` size, without a delay between each batch'
- it_behaves_like 'does not log any detail'
- end
- end
-
- describe '.delete_all_in_batches_for_project' do
- let_it_be(:project) { create(:project) }
- let_it_be(:user_1) { create(:user) }
- let_it_be(:user_2) { create(:user) }
- let_it_be(:user_3) { create(:user) }
- let_it_be(:user_4) { create(:user) }
-
- let(:user_ids) { [user_1.id, user_2.id, user_3.id] }
-
- subject(:execute) do
- described_class.delete_all_in_batches_for_project(
- project: project,
- user_ids: user_ids,
- per_batch: per_batch_size
- )
- end
-
- before do
- # Configure as if a replica database is enabled
- allow(::Gitlab::Database::LoadBalancing).to receive(:primary_only?).and_return(false)
- end
-
- before_all do
- create(:project_authorization, user: user_1, project: project)
- create(:project_authorization, user: user_2, project: project)
- create(:project_authorization, user: user_3, project: project)
- create(:project_authorization, user: user_4, project: project)
- end
-
- shared_examples_for 'removes the project authorizations of the specified users in the current project, without a delay between each batch' do
- specify do
- expect(described_class).not_to receive(:sleep)
-
- execute
-
- expect(project.project_authorizations.pluck(:user_id)).not_to include(*user_ids)
- end
- end
-
- context 'when the total number of records to be removed is greater than the batch size' do
- let(:per_batch_size) { 2 }
-
- it 'removes the project authorizations of the specified users in the current project, with a delay between each batch' do
- expect(described_class).to receive(:sleep).twice
-
- execute
-
- expect(project.project_authorizations.pluck(:user_id)).not_to include(*user_ids)
- end
-
- it_behaves_like 'logs the detail', batch_size: 2
-
- context 'when the GitLab installation does not have a replica database configured' do
- before do
- # Configure as if a replica database is not enabled
- allow(::Gitlab::Database::LoadBalancing).to receive(:primary_only?).and_return(true)
- end
-
- it_behaves_like 'removes the project authorizations of the specified users in the current project, without a delay between each batch'
- it_behaves_like 'does not log any detail'
- end
- end
-
- context 'when the total number of records to be removed is less than the batch size' do
- let(:per_batch_size) { 5 }
-
- it_behaves_like 'removes the project authorizations of the specified users in the current project, without a delay between each batch'
- it_behaves_like 'does not log any detail'
- end
- end
-
- describe '.delete_all_in_batches_for_user' do
- let_it_be(:user) { create(:user) }
- let_it_be(:project_1) { create(:project) }
- let_it_be(:project_2) { create(:project) }
- let_it_be(:project_3) { create(:project) }
- let_it_be(:project_4) { create(:project) }
-
- let(:project_ids) { [project_1.id, project_2.id, project_3.id] }
-
- subject(:execute) do
- described_class.delete_all_in_batches_for_user(
- user: user,
- project_ids: project_ids,
- per_batch: per_batch_size
- )
- end
-
- before do
- # Configure as if a replica database is enabled
- allow(::Gitlab::Database::LoadBalancing).to receive(:primary_only?).and_return(false)
- end
-
- before_all do
- create(:project_authorization, user: user, project: project_1)
- create(:project_authorization, user: user, project: project_2)
- create(:project_authorization, user: user, project: project_3)
- create(:project_authorization, user: user, project: project_4)
- end
-
- shared_examples_for 'removes the project authorizations of the specified projects from the current user, without a delay between each batch' do
- specify do
- expect(described_class).not_to receive(:sleep)
-
- execute
-
- expect(user.project_authorizations.pluck(:project_id)).not_to include(*project_ids)
- end
- end
-
- context 'when the total number of records to be removed is greater than the batch size' do
- let(:per_batch_size) { 2 }
-
- it 'removes the project authorizations of the specified projects from the current user, with a delay between each batch' do
- expect(described_class).to receive(:sleep).twice
-
- execute
-
- expect(user.project_authorizations.pluck(:project_id)).not_to include(*project_ids)
- end
-
- it_behaves_like 'logs the detail', batch_size: 2
-
- context 'when the GitLab installation does not have a replica database configured' do
- before do
- # Configure as if a replica database is not enabled
- allow(::Gitlab::Database::LoadBalancing).to receive(:primary_only?).and_return(true)
- end
-
- it_behaves_like 'removes the project authorizations of the specified projects from the current user, without a delay between each batch'
- it_behaves_like 'does not log any detail'
- end
- end
-
- context 'when the total number of records to be removed is less than the batch size' do
- let(:per_batch_size) { 5 }
-
- it_behaves_like 'removes the project authorizations of the specified projects from the current user, without a delay between each batch'
- it_behaves_like 'does not log any detail'
- end
- end
end
diff --git a/spec/models/project_authorizations/changes_spec.rb b/spec/models/project_authorizations/changes_spec.rb
new file mode 100644
index 00000000000..d0718153d16
--- /dev/null
+++ b/spec/models/project_authorizations/changes_spec.rb
@@ -0,0 +1,326 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ProjectAuthorizations::Changes, feature_category: :groups_and_projects do
+ describe '.apply!' do
+ subject(:apply_project_authorization_changes) { project_authorization_changes.apply! }
+
+ shared_examples_for 'does not log any detail' do
+ it 'does not log any detail' do
+ expect(Gitlab::AppLogger).not_to receive(:info)
+
+ apply_project_authorization_changes
+ end
+ end
+
+ shared_examples_for 'logs the detail' do |batch_size:|
+ it 'logs the detail' do
+ expect(Gitlab::AppLogger).to receive(:info).with(
+ entire_size: 3,
+ message: 'Project authorizations refresh performed with delay',
+ total_delay: (3 / batch_size.to_f).ceil * ProjectAuthorizations::Changes::SLEEP_DELAY,
+ **Gitlab::ApplicationContext.current
+ )
+
+ apply_project_authorization_changes
+ end
+ end
+
+ shared_examples_for 'publishes AuthorizationsChangedEvent' do
+ it 'publishes a AuthorizationsChangedEvent event with project id' do
+ project_ids.each do |project_id|
+ project_data = { project_id: project_id }
+ project_event = instance_double('::ProjectAuthorizations::AuthorizationsChangedEvent', data: project_data)
+
+ allow(::ProjectAuthorizations::AuthorizationsChangedEvent).to receive(:new)
+ .with(data: project_data)
+ .and_return(project_event)
+
+ expect(::Gitlab::EventStore).to receive(:publish).with(project_event)
+ end
+
+ apply_project_authorization_changes
+ end
+ end
+
+ shared_examples_for 'does not publishes AuthorizationsChangedEvent' do
+ it 'does not publishes a AuthorizationsChangedEvent event' do
+ expect(::Gitlab::EventStore).not_to receive(:publish)
+
+ apply_project_authorization_changes
+ end
+ end
+
+ context 'when new authorizations should be added' do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project_1) { create(:project) }
+ let_it_be(:project_2) { create(:project) }
+ let_it_be(:project_3) { create(:project) }
+ let(:project_ids) { [project_1.id, project_2.id, project_3.id] }
+
+ let(:authorizations_to_add) do
+ [
+ { user_id: user.id, project_id: project_1.id, access_level: Gitlab::Access::MAINTAINER },
+ { user_id: user.id, project_id: project_2.id, access_level: Gitlab::Access::MAINTAINER },
+ { user_id: user.id, project_id: project_3.id, access_level: Gitlab::Access::MAINTAINER }
+ ]
+ end
+
+ let(:project_authorization_changes) do
+ ProjectAuthorizations::Changes.new do |changes|
+ changes.add(authorizations_to_add)
+ end
+ end
+
+ before do
+ # Configure as if a replica database is enabled
+ allow(::Gitlab::Database::LoadBalancing).to receive(:primary_only?).and_return(false)
+ end
+
+ shared_examples_for 'inserts the rows in batches, as per the `per_batch` size, without a delay between batches' do
+ specify do
+ expect(project_authorization_changes).not_to receive(:sleep)
+
+ apply_project_authorization_changes
+
+ expect(user.project_authorizations.pluck(:user_id, :project_id,
+ :access_level)).to match_array(authorizations_to_add.map(&:values))
+ end
+ end
+
+ context 'when the total number of records to be inserted is greater than the batch size' do
+ before do
+ stub_const("#{described_class}::BATCH_SIZE", 2)
+ end
+
+ it 'inserts the rows in batches, as per the `per_batch` size, with a delay between each batch' do
+ expect(ProjectAuthorization).to receive(:insert_all).twice.and_call_original
+ expect(project_authorization_changes).to receive(:sleep).twice
+
+ apply_project_authorization_changes
+
+ expect(user.project_authorizations.pluck(:user_id, :project_id,
+ :access_level)).to match_array(authorizations_to_add.map(&:values))
+ end
+
+ it_behaves_like 'logs the detail', batch_size: 2
+ it_behaves_like 'publishes AuthorizationsChangedEvent'
+
+ context 'when the GitLab installation does not have a replica database configured' do
+ before do
+ # Configure as if a replica database is not enabled
+ allow(::Gitlab::Database::LoadBalancing).to receive(:primary_only?).and_return(true)
+ end
+
+ it_behaves_like 'inserts the rows in batches, as per the `per_batch` size, without a delay between batches'
+ it_behaves_like 'does not log any detail'
+ it_behaves_like 'publishes AuthorizationsChangedEvent'
+ end
+ end
+
+ context 'when the total number of records to be inserted is less than the batch size' do
+ before do
+ stub_const("#{described_class}::BATCH_SIZE", 5)
+ end
+
+ it_behaves_like 'inserts the rows in batches, as per the `per_batch` size, without a delay between batches'
+ it_behaves_like 'does not log any detail'
+ it_behaves_like 'publishes AuthorizationsChangedEvent'
+ end
+ end
+
+ context 'when authorizations should be deleted for a project' do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user_1) { create(:user) }
+ let_it_be(:user_2) { create(:user) }
+ let_it_be(:user_3) { create(:user) }
+ let_it_be(:user_4) { create(:user) }
+
+ let(:user_ids) { [user_1.id, user_2.id, user_3.id] }
+ let(:project_ids) { [project.id] }
+
+ let(:project_authorization_changes) do
+ ProjectAuthorizations::Changes.new do |changes|
+ changes.remove_users_in_project(project, user_ids)
+ end
+ end
+
+ before do
+ # Configure as if a replica database is enabled
+ allow(::Gitlab::Database::LoadBalancing).to receive(:primary_only?).and_return(false)
+ end
+
+ before_all do
+ create(:project_authorization, user: user_1, project: project)
+ create(:project_authorization, user: user_2, project: project)
+ create(:project_authorization, user: user_3, project: project)
+ create(:project_authorization, user: user_4, project: project)
+ end
+
+ shared_examples_for 'removes project authorizations of the users in the current project, without a delay' do
+ specify do
+ expect(project_authorization_changes).not_to receive(:sleep)
+
+ apply_project_authorization_changes
+
+ expect(project.project_authorizations.pluck(:user_id)).not_to include(*user_ids)
+ end
+ end
+
+ shared_examples_for 'does not removes project authorizations of the users in the current project' do
+ it 'does not delete any project authorization' do
+ expect { apply_project_authorization_changes }.not_to change { project.project_authorizations.count }
+ end
+ end
+
+ context 'when the total number of records to be removed is greater than the batch size' do
+ before do
+ stub_const("#{described_class}::BATCH_SIZE", 2)
+ end
+
+ it 'removes project authorizations of the users in the current project, with a delay' do
+ expect(project_authorization_changes).to receive(:sleep).twice
+
+ apply_project_authorization_changes
+
+ expect(project.project_authorizations.pluck(:user_id)).not_to include(*user_ids)
+ end
+
+ it_behaves_like 'logs the detail', batch_size: 2
+ it_behaves_like 'publishes AuthorizationsChangedEvent'
+
+ context 'when the GitLab installation does not have a replica database configured' do
+ before do
+ # Configure as if a replica database is not enabled
+ allow(::Gitlab::Database::LoadBalancing).to receive(:primary_only?).and_return(true)
+ end
+
+ it_behaves_like 'removes project authorizations of the users in the current project, without a delay'
+ it_behaves_like 'does not log any detail'
+ it_behaves_like 'publishes AuthorizationsChangedEvent'
+ end
+ end
+
+ context 'when the total number of records to be removed is less than the batch size' do
+ before do
+ stub_const("#{described_class}::BATCH_SIZE", 5)
+ end
+
+ it_behaves_like 'removes project authorizations of the users in the current project, without a delay'
+ it_behaves_like 'does not log any detail'
+ it_behaves_like 'publishes AuthorizationsChangedEvent'
+ end
+
+ context 'when the user_ids list is empty' do
+ let(:user_ids) { [] }
+
+ it_behaves_like 'does not removes project authorizations of the users in the current project'
+ it_behaves_like 'does not publishes AuthorizationsChangedEvent'
+ end
+
+ context 'when the user_ids list is nil' do
+ let(:user_ids) { nil }
+
+ it_behaves_like 'does not removes project authorizations of the users in the current project'
+ it_behaves_like 'does not publishes AuthorizationsChangedEvent'
+ end
+ end
+
+ describe 'when authorizations should be deleted for an user' do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project_1) { create(:project) }
+ let_it_be(:project_2) { create(:project) }
+ let_it_be(:project_3) { create(:project) }
+ let_it_be(:project_4) { create(:project) }
+
+ let(:project_ids) { [project_1.id, project_2.id, project_3.id] }
+
+ let(:project_authorization_changes) do
+ ProjectAuthorizations::Changes.new do |changes|
+ changes.remove_projects_for_user(user, project_ids)
+ end
+ end
+
+ before do
+ # Configure as if a replica database is enabled
+ allow(::Gitlab::Database::LoadBalancing).to receive(:primary_only?).and_return(false)
+ end
+
+ before_all do
+ create(:project_authorization, user: user, project: project_1)
+ create(:project_authorization, user: user, project: project_2)
+ create(:project_authorization, user: user, project: project_3)
+ create(:project_authorization, user: user, project: project_4)
+ end
+
+ shared_examples_for 'removes project authorizations of projects from the current user, without a delay' do
+ specify do
+ expect(project_authorization_changes).not_to receive(:sleep)
+
+ apply_project_authorization_changes
+
+ expect(user.project_authorizations.pluck(:project_id)).not_to include(*project_ids)
+ end
+ end
+
+ shared_examples_for 'does not removes any project authorizations from the current user' do
+ it 'does not delete any project authorization' do
+ expect { apply_project_authorization_changes }.not_to change { user.project_authorizations.count }
+ end
+ end
+
+ context 'when the total number of records to be removed is greater than the batch size' do
+ before do
+ stub_const("#{described_class}::BATCH_SIZE", 2)
+ end
+
+ it 'removes the project authorizations of projects from the current user, with a delay between each batch' do
+ expect(project_authorization_changes).to receive(:sleep).twice
+
+ apply_project_authorization_changes
+
+ expect(user.project_authorizations.pluck(:project_id)).not_to include(*project_ids)
+ end
+
+ it_behaves_like 'logs the detail', batch_size: 2
+ it_behaves_like 'publishes AuthorizationsChangedEvent'
+
+ context 'when the GitLab installation does not have a replica database configured' do
+ before do
+ # Configure as if a replica database is not enabled
+ allow(::Gitlab::Database::LoadBalancing).to receive(:primary_only?).and_return(true)
+ end
+
+ it_behaves_like 'removes project authorizations of projects from the current user, without a delay'
+ it_behaves_like 'does not log any detail'
+ it_behaves_like 'publishes AuthorizationsChangedEvent'
+ end
+ end
+
+ context 'when the total number of records to be removed is less than the batch size' do
+ before do
+ stub_const("#{described_class}::BATCH_SIZE", 5)
+ end
+
+ it_behaves_like 'removes project authorizations of projects from the current user, without a delay'
+ it_behaves_like 'does not log any detail'
+ it_behaves_like 'publishes AuthorizationsChangedEvent'
+ end
+
+ context 'when the project_ids list is empty' do
+ let(:project_ids) { [] }
+
+ it_behaves_like 'does not removes any project authorizations from the current user'
+ it_behaves_like 'does not publishes AuthorizationsChangedEvent'
+ end
+
+ context 'when the user_ids list is nil' do
+ let(:project_ids) { nil }
+
+ it_behaves_like 'does not removes any project authorizations from the current user'
+ it_behaves_like 'does not publishes AuthorizationsChangedEvent'
+ end
+ end
+ end
+end
diff --git a/spec/models/project_setting_spec.rb b/spec/models/project_setting_spec.rb
index 5d06b30a529..3b890e75064 100644
--- a/spec/models/project_setting_spec.rb
+++ b/spec/models/project_setting_spec.rb
@@ -135,7 +135,7 @@ RSpec.describe ProjectSetting, type: :model, feature_category: :groups_and_proje
end
describe '#show_diff_preview_in_email?' do
- context 'when a project is a top-level namespace' do
+ context 'when a project has no parent group' do
let(:project_settings) { create(:project_setting, show_diff_preview_in_email: false) }
let(:project) { create(:project, project_setting: project_settings) }
@@ -157,75 +157,75 @@ RSpec.describe ProjectSetting, type: :model, feature_category: :groups_and_proje
end
end
- describe '#emails_enabled?' do
- context "when a project does not have a parent group" do
- let(:project_settings) { create(:project_setting, emails_enabled: true) }
- let(:project) { create(:project, project_setting: project_settings) }
-
- it "returns true" do
- expect(project.emails_enabled?).to be_truthy
- end
+ context 'when a parent group overrides project settings' do
+ let(:namespace_settings) { create(:namespace_settings, show_diff_preview_in_email: false) }
+ let(:project_settings) { create(:project_setting, show_diff_preview_in_email: true) }
+ let(:group) { create(:group, namespace_settings: namespace_settings) }
+ let(:project) { create(:project, namespace_id: group.id, project_setting: project_settings) }
- it "returns false when updating project settings" do
- project.update_attribute(:emails_disabled, false)
- expect(project.emails_enabled?).to be_truthy
+ context 'when show_diff_preview_in_email is disabled for the parent group' do
+ it 'returns false' do
+ expect(project).not_to be_show_diff_preview_in_email
end
end
- context "when a project has a parent group" do
- let(:namespace_settings) { create(:namespace_settings, emails_enabled: true) }
- let(:project_settings) { create(:project_setting, emails_enabled: true) }
- let(:group) { create(:group, namespace_settings: namespace_settings) }
- let(:project) do
- create(:project, namespace_id: group.id,
- project_setting: project_settings)
- end
-
- context 'when emails have been disabled in parent group' do
- it 'returns false' do
- group.update_attribute(:emails_disabled, true)
+ context 'when all ancestors have enabled diff previews' do
+ let(:namespace_settings) { create(:namespace_settings, show_diff_preview_in_email: true) }
- expect(project.emails_enabled?).to be_falsey
- end
+ it 'returns true' do
+ expect(project).to be_show_diff_preview_in_email
end
+ end
+ end
+ end
- context 'when emails are enabled in parent group' do
- before do
- allow(project.namespace).to receive(:emails_enabled?).and_return(true)
- end
+ describe '#emails_enabled?' do
+ context "when a project does not have a parent group" do
+ let_it_be(:project_settings) { create(:project_setting, emails_enabled: true) }
+ let_it_be(:project) { create(:project, project_setting: project_settings) }
- it 'returns true' do
- expect(project.emails_enabled?).to be_truthy
- end
+ it "returns true" do
+ expect(project.emails_enabled?).to be_truthy
+ end
- it 'returns false when disabled at the project' do
- project.update_attribute(:emails_disabled, true)
+ it "returns false when project_settings are set to false" do
+ project.project_setting.clear_memoization(:emails_enabled?)
+ project.update_attribute(:emails_enabled, false)
- expect(project.emails_enabled?).to be_falsey
- end
- end
+ expect(project.emails_enabled?).to be_falsey
end
end
- context 'when a parent group has a parent group' do
- let(:namespace_settings) { create(:namespace_settings, show_diff_preview_in_email: false) }
- let(:project_settings) { create(:project_setting, show_diff_preview_in_email: true) }
+ context "when a project has a parent group" do
+ let(:namespace_settings) { create(:namespace_settings, emails_enabled: true) }
+ let(:project_settings) { create(:project_setting, emails_enabled: true) }
let(:group) { create(:group, namespace_settings: namespace_settings) }
- let!(:project) { create(:project, namespace_id: group.id, project_setting: project_settings) }
+ let(:project) do
+ create(:project, namespace_id: group.id,
+ project_setting: project_settings)
+ end
- context 'when show_diff_preview_in_email is disabled for the parent group' do
+ context 'when emails have been disabled in parent group' do
it 'returns false' do
- expect(project).not_to be_show_diff_preview_in_email
+ group.update_attribute(:emails_disabled, true)
+
+ expect(project.emails_enabled?).to be_falsey
end
end
- context 'when all ancestors have enabled diff previews' do
- let(:namespace_settings) { create(:namespace_settings, show_diff_preview_in_email: true) }
+ context 'when emails are enabled in parent group' do
+ before do
+ allow(project.namespace).to receive(:emails_disabled?).and_return(false)
+ end
it 'returns true' do
- group.update_attribute(:show_diff_preview_in_email, true)
+ expect(project.emails_enabled?).to be_truthy
+ end
- expect(project).to be_show_diff_preview_in_email
+ it 'returns false when disabled at the project' do
+ project.update_attribute(:emails_enabled, false)
+
+ expect(project.emails_enabled?).to be_falsey
end
end
end
diff --git a/spec/models/project_spec.rb b/spec/models/project_spec.rb
index 538f6b363e9..5d622b8eccd 100644
--- a/spec/models/project_spec.rb
+++ b/spec/models/project_spec.rb
@@ -19,7 +19,7 @@ RSpec.describe Project, factory_default: :keep, feature_category: :groups_and_pr
describe 'associations' do
it { is_expected.to belong_to(:group) }
it { is_expected.to belong_to(:namespace) }
- it { is_expected.to belong_to(:project_namespace).class_name('Namespaces::ProjectNamespace').with_foreign_key('project_namespace_id') }
+ it { is_expected.to belong_to(:project_namespace).class_name('Namespaces::ProjectNamespace').with_foreign_key('project_namespace_id').inverse_of(:project) }
it { is_expected.to belong_to(:creator).class_name('User') }
it { is_expected.to belong_to(:pool_repository) }
it { is_expected.to have_many(:users) }
@@ -46,6 +46,8 @@ RSpec.describe Project, factory_default: :keep, feature_category: :groups_and_pr
it { is_expected.to have_one(:design_management_repository).class_name('DesignManagement::Repository').inverse_of(:project) }
it { is_expected.to have_one(:slack_integration) }
it { is_expected.to have_one(:catalog_resource) }
+ it { is_expected.to have_many(:ci_components).class_name('Ci::Catalog::Resources::Component') }
+ it { is_expected.to have_many(:catalog_resource_versions).class_name('Ci::Catalog::Resources::Version') }
it { is_expected.to have_one(:microsoft_teams_integration) }
it { is_expected.to have_one(:mattermost_integration) }
it { is_expected.to have_one(:hangouts_chat_integration) }
@@ -633,8 +635,8 @@ RSpec.describe Project, factory_default: :keep, feature_category: :groups_and_pr
end
it 'validates the visibility' do
- expect_any_instance_of(described_class).to receive(:visibility_level_allowed_as_fork).and_call_original
- expect_any_instance_of(described_class).to receive(:visibility_level_allowed_by_group).and_call_original
+ expect_any_instance_of(described_class).to receive(:visibility_level_allowed_as_fork).twice.and_call_original
+ expect_any_instance_of(described_class).to receive(:visibility_level_allowed_by_group).twice.and_call_original
create(:project)
end
@@ -1121,6 +1123,7 @@ RSpec.describe Project, factory_default: :keep, feature_category: :groups_and_pr
'group_runners_enabled' => '',
'default_git_depth' => 'ci_',
'forward_deployment_enabled' => 'ci_',
+ 'forward_deployment_rollback_allowed' => 'ci_',
'keep_latest_artifact' => '',
'restrict_user_defined_variables' => '',
'runner_token_expiration_interval' => '',
@@ -1147,6 +1150,12 @@ RSpec.describe Project, factory_default: :keep, feature_category: :groups_and_pr
end
end
+ describe '#ci_forward_deployment_rollback_allowed?' do
+ it_behaves_like 'a ci_cd_settings predicate method', prefix: 'ci_' do
+ let(:delegated_method) { :forward_deployment_rollback_allowed? }
+ end
+ end
+
describe '#ci_allow_fork_pipelines_to_run_in_parent_project?' do
it_behaves_like 'a ci_cd_settings predicate method', prefix: 'ci_' do
let(:delegated_method) { :allow_fork_pipelines_to_run_in_parent_project? }
@@ -3037,6 +3046,34 @@ RSpec.describe Project, factory_default: :keep, feature_category: :groups_and_pr
shard_name: 'foo'
)
end
+
+ it 'refreshes a memoized repository value' do
+ previous_repository = project.repository
+
+ allow(project).to receive(:disk_path).and_return('fancy/new/path')
+ allow(project).to receive(:repository_storage).and_return('foo')
+
+ project.track_project_repository
+
+ expect(project.repository).not_to eq(previous_repository)
+ end
+
+ context 'when "replicate_object_pool_on_move" FF is disabled' do
+ before do
+ stub_feature_flags(replicate_object_pool_on_move: false)
+ end
+
+ it 'does not update a memoized repository value' do
+ previous_repository = project.repository
+
+ allow(project).to receive(:disk_path).and_return('fancy/new/path')
+ allow(project).to receive(:repository_storage).and_return('foo')
+
+ project.track_project_repository
+
+ expect(project.repository).to eq(previous_repository)
+ end
+ end
end
end
@@ -3982,51 +4019,10 @@ RSpec.describe Project, factory_default: :keep, feature_category: :groups_and_pr
end
describe '#emails_disabled?' do
- let_it_be(:namespace) { create(:namespace) }
-
- let(:project) { build(:project, namespace: namespace, emails_disabled: false) }
-
- context 'emails disabled in group' do
- it 'returns true' do
- allow(project.namespace).to receive(:emails_disabled?) { true }
-
- expect(project.emails_disabled?).to be_truthy
- end
- end
-
- context 'emails enabled in group' do
- before do
- allow(project.namespace).to receive(:emails_disabled?) { false }
- end
-
- it 'returns false' do
- expect(project.emails_disabled?).to be_falsey
- end
-
- it 'returns true' do
- project.update_attribute(:emails_disabled, true)
-
- expect(project.emails_disabled?).to be_truthy
- end
- end
- end
-
- describe '#emails_enabled?' do
- context 'without a persisted project_setting object' do
- let(:project) { build(:project, emails_disabled: false) }
-
- it "is the opposite of emails_disabled" do
- expect(project.emails_enabled?).to be_truthy
- end
- end
-
- context 'with a persisted project_setting object' do
- let(:project_settings) { create(:project_setting, emails_enabled: true) }
- let(:project) { build(:project, emails_disabled: false, project_setting: project_settings) }
+ let(:project) { build(:project, emails_enabled: true) }
- it "is the opposite of emails_disabled" do
- expect(project.emails_enabled?).to be_truthy
- end
+ it "is the opposite of emails_disabled" do
+ expect(project.emails_disabled?).to be_falsey
end
end
@@ -6984,6 +6980,73 @@ RSpec.describe Project, factory_default: :keep, feature_category: :groups_and_pr
end
end
+ describe '#swap_pool_repository!' do
+ subject(:swap_pool_repository!) { project.swap_pool_repository! }
+
+ let_it_be_with_reload(:project) { create(:project, :empty_repo) }
+ let_it_be(:shard_to) { create(:shard, name: 'test_second_storage') }
+
+ let!(:pool1) { create(:pool_repository, source_project: project) }
+ let!(:pool2) { create(:pool_repository, shard: shard_to, source_project: project) }
+ let(:project_pool) { pool1 }
+ let(:repository_storage) { shard_to.name }
+
+ before do
+ stub_storage_settings(
+ 'test_second_storage' => {
+ 'gitaly_address' => Gitlab.config.repositories.storages.default.gitaly_address,
+ 'path' => TestEnv::SECOND_STORAGE_PATH
+ }
+ )
+
+ project.update!(pool_repository: project_pool, repository_storage: repository_storage)
+ end
+
+ shared_examples 'no pool repository swap' do
+ it 'does not change pool repository for the project' do
+ expect { swap_pool_repository! }.not_to change { project.reload.pool_repository }
+ end
+ end
+
+ it 'moves project to the new pool repository' do
+ expect { swap_pool_repository! }.to change { project.reload.pool_repository }.from(pool1).to(pool2)
+ end
+
+ context 'when feature flag replicate_object_pool_on_move is disabled' do
+ before do
+ stub_feature_flags(replicate_object_pool_on_move: false)
+ end
+
+ it_behaves_like 'no pool repository swap'
+ end
+
+ context 'when repository does not exist' do
+ let(:project) { build(:project) }
+
+ it_behaves_like 'no pool repository swap'
+ end
+
+ context 'when project does not have a pool repository' do
+ let(:project_pool) { nil }
+
+ it_behaves_like 'no pool repository swap'
+ end
+
+ context 'when project pool is on the same shard as repository' do
+ let(:project_pool) { pool2 }
+
+ it_behaves_like 'no pool repository swap'
+ end
+
+ context 'when pool repository for shard is missing' do
+ let(:pool2) { nil }
+
+ it 'raises record not found error' do
+ expect { swap_pool_repository! }.to raise_error(ActiveRecord::RecordNotFound)
+ end
+ end
+ end
+
describe '#leave_pool_repository' do
let(:pool) { create(:pool_repository) }
let(:project) { create(:project, :repository, pool_repository: pool) }
@@ -7011,6 +7074,53 @@ RSpec.describe Project, factory_default: :keep, feature_category: :groups_and_pr
end
end
+ describe '#link_pool_repository' do
+ let(:pool) { create(:pool_repository) }
+ let(:project) { build(:project, :empty_repo, pool_repository: pool) }
+
+ subject { project.link_pool_repository }
+
+ it 'links pool repository to project repository' do
+ expect(pool).to receive(:link_repository).with(project.repository)
+
+ subject
+ end
+
+ context 'when pool repository is missing' do
+ let(:pool) { nil }
+
+ it 'does not link anything' do
+ allow_next_instance_of(PoolRepository) do |pool_repository|
+ expect(pool_repository).not_to receive(:link_repository)
+ end
+
+ subject
+ end
+ end
+
+ context 'when pool repository is on the different shard as project repository' do
+ let(:pool) { create(:pool_repository, shard: create(:shard, name: 'new')) }
+
+ it 'does not link anything' do
+ expect(pool).not_to receive(:link_repository)
+
+ subject
+ end
+
+ context 'when feature flag replicate_object_pool_on_move is disabled' do
+ before do
+ stub_feature_flags(replicate_object_pool_on_move: false)
+ end
+
+ it 'links pool repository to project repository' do
+ expect(pool).to receive(:link_repository).with(project.repository)
+
+ subject
+ end
+ end
+ end
+ end
+
describe '#check_personal_projects_limit' do
context 'when creating a project for a group' do
it 'does nothing' do
@@ -7084,10 +7194,10 @@ RSpec.describe Project, factory_default: :keep, feature_category: :groups_and_pr
where(:shared_runners_setting, :project_shared_runners_enabled, :valid_record) do
:shared_runners_enabled | true | true
:shared_runners_enabled | false | true
- :disabled_and_overridable | true | true
- :disabled_and_overridable | false | true
- :disabled_and_unoverridable | true | false
- :disabled_and_unoverridable | false | true
+ :shared_runners_disabled_and_overridable | true | true
+ :shared_runners_disabled_and_overridable | false | true
+ :shared_runners_disabled_and_unoverridable | true | false
+ :shared_runners_disabled_and_unoverridable | false | true
end
with_them do
@@ -7333,6 +7443,32 @@ RSpec.describe Project, factory_default: :keep, feature_category: :groups_and_pr
end
end
+ describe '#pages_variables' do
+ let(:group) { build(:group, path: 'group') }
+ let(:project) { build(:project, path: 'project', namespace: group) }
+
+ it 'returns the pages variables' do
+ expect(project.pages_variables.to_hash).to eq({
+ 'CI_PAGES_DOMAIN' => 'example.com',
+ 'CI_PAGES_URL' => 'http://group.example.com/project'
+ })
+ end
+
+ it 'returns the pages variables' do
+ build(
+ :project_setting,
+ project: project,
+ pages_unique_domain_enabled: true,
+ pages_unique_domain: 'unique-domain'
+ )
+
+ expect(project.pages_variables.to_hash).to eq({
+ 'CI_PAGES_DOMAIN' => 'example.com',
+ 'CI_PAGES_URL' => 'http://unique-domain.example.com'
+ })
+ end
+ end
+
describe '#closest_setting' do
shared_examples_for 'fetching closest setting' do
let!(:namespace) { create(:namespace) }
@@ -8992,6 +9128,12 @@ RSpec.describe Project, factory_default: :keep, feature_category: :groups_and_pr
it { is_expected.to eq false }
end
+
+ context 'when there is no creator' do
+ let_it_be(:project) { build_stubbed(:project, creator: nil) }
+
+ it { is_expected.to eq false }
+ end
end
it_behaves_like 'something that has web-hooks' do
diff --git a/spec/models/project_statistics_spec.rb b/spec/models/project_statistics_spec.rb
index 71c205fca7c..dd7989244d4 100644
--- a/spec/models/project_statistics_spec.rb
+++ b/spec/models/project_statistics_spec.rb
@@ -25,6 +25,55 @@ RSpec.describe ProjectStatistics do
end
end
+ describe 'callbacks' do
+ context 'on after_commit' do
+ context 'when storage size components are updated' do
+ it 'updates the correct storage size for relevant attributes' do
+ statistics.update!(repository_size: 10)
+
+ expect(statistics.reload.storage_size).to eq(10)
+ end
+ end
+
+ context 'when storage size components are not updated' do
+ it 'does not affect the storage_size total' do
+ statistics.update!(pipeline_artifacts_size: 3, container_registry_size: 50)
+
+ expect(statistics.reload.storage_size).to eq(0)
+ end
+ end
+ end
+
+ describe 'with race conditions' do
+ before do
+ statistics.update!(storage_size: 14621247)
+ end
+
+ it 'handles concurrent updates correctly' do
+ # Concurrently update the statistics in two different processes
+ t1 = Thread.new do
+ stats_1 = ProjectStatistics.find(statistics.id)
+ stats_1.snippets_size = 530
+ stats_1.save!
+ end
+
+ t2 = Thread.new do
+ stats_2 = ProjectStatistics.find(statistics.id)
+ ProjectStatistics.update_counters(stats_2.id, packages_size: 1000)
+ stats_2.refresh_storage_size!
+ end
+
+ [t1, t2].each(&:join)
+
+ # Reload the statistics object
+ statistics.reload
+
+ # The final storage size should be correctly updated
+ expect(statistics.storage_size).to eq(1530) # Final value is correct (snippets_size + packages_size)
+ end
+ end
+ end
+
describe 'statistics columns' do
it "supports bigint values" do
expect do
@@ -260,12 +309,13 @@ RSpec.describe ProjectStatistics do
describe '#update_repository_size' do
before do
- allow(project.repository).to receive(:size).and_return(12)
+ allow(project.repository).to receive(:recent_objects_size).and_return(5)
+
statistics.update_repository_size
end
- it "stores the size of the repository" do
- expect(statistics.repository_size).to eq 12.megabytes
+ it 'stores the size of the repository' do
+ expect(statistics.repository_size).to eq 5.megabytes
end
end
@@ -353,75 +403,6 @@ RSpec.describe ProjectStatistics do
end
end
- describe '#update_storage_size' do
- it "sums the relevant storage counters" do
- statistics.update!(
- repository_size: 2,
- wiki_size: 4,
- lfs_objects_size: 3,
- snippets_size: 2,
- build_artifacts_size: 3,
- packages_size: 6,
- uploads_size: 5
- )
-
- statistics.reload
-
- expect(statistics.storage_size).to eq 25
- end
-
- it 'excludes the container_registry_size' do
- statistics.update!(
- repository_size: 2,
- uploads_size: 5,
- container_registry_size: 10
- )
-
- statistics.reload
-
- expect(statistics.storage_size).to eq 7
- end
-
- it 'excludes the pipeline_artifacts_size' do
- statistics.update!(
- repository_size: 2,
- uploads_size: 5,
- pipeline_artifacts_size: 10
- )
-
- statistics.reload
-
- expect(statistics.storage_size).to eq 7
- end
-
- it 'works during wiki_size backfill' do
- statistics.update!(
- repository_size: 2,
- wiki_size: nil,
- lfs_objects_size: 3
- )
-
- statistics.reload
-
- expect(statistics.storage_size).to eq 5
- end
-
- context 'when nullable columns are nil' do
- it 'does not raise any error' do
- expect do
- statistics.update!(
- repository_size: 2,
- wiki_size: nil,
- lfs_objects_size: 3,
- snippets_size: nil
- )
- end.not_to raise_error
-
- expect(statistics.storage_size).to eq 5
- end
- end
- end
-
describe '#refresh_storage_size!' do
subject(:refresh_storage_size) { statistics.refresh_storage_size! }
@@ -447,6 +428,7 @@ RSpec.describe ProjectStatistics do
statistics.update_columns(
repository_size: 2,
wiki_size: nil,
+ snippets_size: nil,
storage_size: 0
)
end
diff --git a/spec/models/project_team_spec.rb b/spec/models/project_team_spec.rb
index f3139e72113..e557990c7e9 100644
--- a/spec/models/project_team_spec.rb
+++ b/spec/models/project_team_spec.rb
@@ -176,10 +176,13 @@ RSpec.describe ProjectTeam, feature_category: :groups_and_projects do
let_it_be(:source_project_owner) { source_project.first_owner }
let_it_be(:source_project_developer) { create(:user) { |user| source_project.add_developer(user) } }
let_it_be(:current_user) { create(:user) { |user| target_project.add_maintainer(user) } }
+ let(:imported_members) { [source_project_owner.members.last, source_project_developer.members.last] }
subject(:import) { target_project.team.import(source_project, current_user) }
- it { is_expected.to be_truthy }
+ it 'matches the imported members' do
+ is_expected.to match_array(imported_members)
+ end
it 'target project includes source member with the same access' do
import
@@ -219,6 +222,12 @@ RSpec.describe ProjectTeam, feature_category: :groups_and_projects do
let(:target_access_level) { Gitlab::Access::OWNER }
end
end
+
+ context 'when source_project does not exist' do
+ let_it_be(:source_project) { nil }
+
+ it { is_expected.to eq(false) }
+ end
end
describe '#find_member' do
diff --git a/spec/models/release_spec.rb b/spec/models/release_spec.rb
index 446ef4180d2..164cef95cb6 100644
--- a/spec/models/release_spec.rb
+++ b/spec/models/release_spec.rb
@@ -17,6 +17,7 @@ RSpec.describe Release, feature_category: :release_orchestration do
it { is_expected.to have_many(:milestones) }
it { is_expected.to have_many(:milestone_releases) }
it { is_expected.to have_many(:evidences).class_name('Releases::Evidence') }
+ it { is_expected.to have_one(:catalog_resource_version).class_name('Ci::Catalog::Resources::Version') }
end
describe 'validation' do
diff --git a/spec/models/repository_spec.rb b/spec/models/repository_spec.rb
index aa2ac52a9ab..ea229ddf31f 100644
--- a/spec/models/repository_spec.rb
+++ b/spec/models/repository_spec.rb
@@ -297,6 +297,8 @@ RSpec.describe Repository, feature_category: :source_code_management do
end
context 'with a commit with invalid UTF-8 path' do
+ let(:project) { create(:project, :empty_repo) }
+
it 'does not raise an error' do
response = create_file_in_repo(project, 'master', 'master', "hello\x80world", 'some contents')
@@ -319,6 +321,7 @@ RSpec.describe Repository, feature_category: :source_code_management do
end
context 'with filename with pathspec characters' do
+ let(:project) { create(:project, :empty_repo) }
let(:filename) { ':wq' }
let(:newrev) { project.repository.commit('master').sha }
@@ -358,6 +361,7 @@ RSpec.describe Repository, feature_category: :source_code_management do
end
context 'with filename with pathspec characters' do
+ let(:project) { create(:project, :empty_repo) }
let(:filename) { ':wq' }
let(:newrev) { project.repository.commit('master').sha }
@@ -1105,7 +1109,7 @@ RSpec.describe Repository, feature_category: :source_code_management do
end
describe "#delete_file" do
- let_it_be(:project) { create(:project, :repository) }
+ let(:project) { create(:project, :repository) }
it 'removes file successfully' do
expect do
@@ -2400,6 +2404,7 @@ RSpec.describe Repository, feature_category: :source_code_management do
expect(repository).to receive(:expire_method_caches).with(
[
:size,
+ :recent_objects_size,
:commit_count,
:readme_path,
:contribution_guide,
@@ -2874,7 +2879,7 @@ RSpec.describe Repository, feature_category: :source_code_management do
describe '#expire_statistics_caches' do
it 'expires the caches' do
expect(repository).to receive(:expire_method_caches)
- .with(%i(size commit_count))
+ .with(%i(size recent_objects_size commit_count))
repository.expire_statistics_caches
end
@@ -3006,6 +3011,22 @@ RSpec.describe Repository, feature_category: :source_code_management do
end
end
+ describe '#recent_objects_size' do
+ context 'with a non-existing repository' do
+ it 'returns 0' do
+ expect(repository).to receive(:exists?).and_return(false)
+
+ expect(repository.recent_objects_size).to eq(0.0)
+ end
+ end
+
+ context 'with an existing repository' do
+ it 'returns the repository recent_objects_size as a Float' do
+ expect(repository.recent_objects_size).to be_an_instance_of(Float)
+ end
+ end
+ end
+
describe '#local_branches' do
it 'returns the local branches' do
masterrev = repository.find_branch('master').dereferenced_target
@@ -3787,4 +3808,113 @@ RSpec.describe Repository, feature_category: :source_code_management do
include_examples 'does not delete branch'
end
end
+
+ describe '#get_patch_id' do
+ let(:project) { create(:project, :repository) }
+
+ it 'returns patch_id of given revisions' do
+ expect(repository.get_patch_id('HEAD~', 'HEAD')).to eq('45435e5d7b339dd76d939508c7687701d0c17fff')
+ end
+
+ context 'when one of the param is invalid' do
+ it 'raises an ArgumentError error' do
+ expect { repository.get_patch_id('HEAD', nil) }.to raise_error(ArgumentError)
+ end
+ end
+
+ context 'when two revisions are the same' do
+ it 'raises an Gitlab::Git::CommandError error' do
+ expect { repository.get_patch_id('HEAD', 'HEAD') }.to raise_error(Gitlab::Git::CommandError)
+ end
+ end
+ end
+
+ describe '#object_pool' do
+ let_it_be(:primary_project) { create(:project, :empty_repo) }
+ let_it_be(:forked_project) { create(:project, :empty_repo) }
+
+ let(:repository) { primary_project.repository }
+
+ subject { repository.object_pool }
+
+ context 'without object pool' do
+ it { is_expected.to be_nil }
+ end
+
+ context 'when pool repository exists' do
+ let!(:pool) { create(:pool_repository, :ready, source_project: primary_project) }
+
+ context 'when the current repository is a primary repository' do
+ it { is_expected.to be_nil }
+
+ context 'when repository is linked to the pool repository' do
+ before do
+ pool.link_repository(repository)
+ end
+
+ after do
+ pool.unlink_repository(repository)
+ end
+
+ it 'returns a object pool for the repository' do
+ is_expected.to be_kind_of(Gitlab::Git::ObjectPool)
+
+ expect(subject).to have_attributes(
+ relative_path: "#{pool.disk_path}.git",
+ source_repository: repository,
+ storage: repository.shard
+ )
+ end
+ end
+ end
+
+ context 'when the current repository is not a primary repository' do
+ let(:repository) { forked_project.repository }
+
+ it { is_expected.to be_nil }
+
+ context 'when repository is linked to the pool repository' do
+ before do
+ pool.link_repository(repository)
+ forked_project.update!(pool_repository: pool)
+ end
+
+ after do
+ pool.unlink_repository(repository)
+ forked_project.update!(pool_repository: nil)
+ end
+
+ it 'returns a object pool with correct links to primary repository' do
+ is_expected.to be_kind_of(Gitlab::Git::ObjectPool)
+
+ expect(subject).to have_attributes(
+ relative_path: "#{pool.disk_path}.git",
+ source_repository: primary_project.repository,
+ storage: primary_project.repository.shard
+ )
+ end
+ end
+
+ context 'when repository is linked to the pool repository in Gitaly only' do
+ before do
+ pool.link_repository(repository)
+ end
+
+ after do
+ pool.unlink_repository(repository)
+ end
+
+ it 'returns an object pool without a link to the primary repository' do
+ is_expected.to be_kind_of(Gitlab::Git::ObjectPool)
+
+ expect(subject).to have_attributes(
+ relative_path: "#{pool.disk_path}.git",
+ source_repository: nil,
+ storage: primary_project.repository.shard
+ )
+ end
+ end
+ end
+ end
+ end
end
diff --git a/spec/models/service_desk/custom_email_verification_spec.rb b/spec/models/service_desk/custom_email_verification_spec.rb
index f114367cfbf..2598eb06c8e 100644
--- a/spec/models/service_desk/custom_email_verification_spec.rb
+++ b/spec/models/service_desk/custom_email_verification_spec.rb
@@ -96,6 +96,20 @@ RSpec.describe ServiceDesk::CustomEmailVerification, feature_category: :service_
end
end
+ describe 'scopes' do
+ let_it_be(:verification) { create(:service_desk_custom_email_verification, project: project) }
+ let_it_be(:other_project) { create(:project) }
+ let_it_be(:overdue_verification) do
+ create(:service_desk_custom_email_verification, :overdue, project: other_project)
+ end
+
+ describe '.overdue' do
+ it 'returns verifications that are overdue' do
+ expect(described_class.overdue).to eq([overdue_verification])
+ end
+ end
+ end
+
describe '#accepted_until' do
it 'returns nil' do
expect(subject.accepted_until).to be_nil
diff --git a/spec/models/broadcast_message_spec.rb b/spec/models/system/broadcast_message_spec.rb
index 7485496cf90..b50dfc35eea 100644
--- a/spec/models/broadcast_message_spec.rb
+++ b/spec/models/system/broadcast_message_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe BroadcastMessage, feature_category: :onboarding do
+RSpec.describe System::BroadcastMessage, feature_category: :onboarding do
subject { build(:broadcast_message) }
it { is_expected.to be_valid }
diff --git a/spec/models/user_spec.rb b/spec/models/user_spec.rb
index 059cbac638b..788600194a5 100644
--- a/spec/models/user_spec.rb
+++ b/spec/models/user_spec.rb
@@ -124,6 +124,9 @@ RSpec.describe User, feature_category: :user_profile do
it { is_expected.to delegate_method(:organization).to(:user_detail).allow_nil }
it { is_expected.to delegate_method(:organization=).to(:user_detail).with_arguments(:args).allow_nil }
+
+ it { is_expected.to delegate_method(:email_reset_offered_at).to(:user_detail).allow_nil }
+ it { is_expected.to delegate_method(:email_reset_offered_at=).to(:user_detail).with_arguments(:args).allow_nil }
end
describe 'associations' do
@@ -4076,32 +4079,6 @@ RSpec.describe User, feature_category: :user_profile do
expect(user.following?(followee)).to be_falsey
end
-
- context 'when disable_follow_users feature flag is off' do
- before do
- stub_feature_flags(disable_follow_users: false)
- end
-
- it 'follows user even if user disabled following' do
- user = create(:user)
- user.enabled_following = false
-
- followee = create(:user)
-
- expect(user.follow(followee)).to be_truthy
- expect(user.following?(followee)).to be_truthy
- end
-
- it 'follows user even if followee user disabled following' do
- user = create(:user)
-
- followee = create(:user)
- followee.enabled_following = false
-
- expect(user.follow(followee)).to be_truthy
- expect(user.following?(followee)).to be_truthy
- end
- end
end
describe '#unfollow' do
@@ -4148,15 +4125,11 @@ RSpec.describe User, feature_category: :user_profile do
let_it_be(:user) { create(:user) }
let_it_be(:followee) { create(:user) }
- where(:user_enabled_following, :followee_enabled_following, :feature_flag_status, :result) do
- true | true | false | true
- true | false | false | true
- true | true | true | true
- true | false | true | false
- false | true | false | true
- false | true | true | false
- false | false | false | true
- false | false | true | false
+ where(:user_enabled_following, :followee_enabled_following, :result) do
+ true | true | true
+ true | false | false
+ false | true | false
+ false | false | false
end
with_them do
@@ -4164,7 +4137,6 @@ RSpec.describe User, feature_category: :user_profile do
user.enabled_following = user_enabled_following
followee.enabled_following = followee_enabled_following
followee.save!
- stub_feature_flags(disable_follow_users: feature_flag_status)
end
it { expect(user.following_users_allowed?(followee)).to eq result }
@@ -7028,31 +7000,6 @@ RSpec.describe User, feature_category: :user_profile do
end
end
- describe '#dismissed_callout_before?' do
- let_it_be(:user, refind: true) { create(:user) }
- let_it_be(:feature_name) { Users::Callout.feature_names.each_key.first }
-
- context 'when no callout dismissal record exists' do
- it 'returns false' do
- expect(user.dismissed_callout_before?(feature_name, 1.day.ago)).to eq false
- end
- end
-
- context 'when dismissed callout exists' do
- before_all do
- create(:callout, user: user, feature_name: feature_name, dismissed_at: 4.months.ago)
- end
-
- it 'returns false when dismissed_before is earlier than dismissed_at' do
- expect(user.dismissed_callout_before?(feature_name, 6.months.ago)).to eq false
- end
-
- it 'returns true when dismissed_before is later than dismissed_at' do
- expect(user.dismissed_callout_before?(feature_name, 3.months.ago)).to eq true
- end
- end
- end
-
describe '#find_or_initialize_callout' do
let_it_be(:user, refind: true) { create(:user) }
let_it_be(:feature_name) { Users::Callout.feature_names.each_key.first }
diff --git a/spec/models/users/calloutable_spec.rb b/spec/models/users/calloutable_spec.rb
index a50debd84d4..457431019f8 100644
--- a/spec/models/users/calloutable_spec.rb
+++ b/spec/models/users/calloutable_spec.rb
@@ -23,15 +23,4 @@ RSpec.describe Users::Calloutable, feature_category: :shared do
expect(callout_dismissed_day_ago.dismissed_after?(15.days.ago)).to eq(true)
end
end
-
- describe '#dismissed_before?' do
- let(:some_feature_name) { Users::Callout.feature_names.keys.second }
- let(:callout_dismissed_hour_ago) { create(:callout, feature_name: some_feature_name, dismissed_at: 1.hour.ago) }
- let(:callout_dismissed_minute_ago) { create(:callout, feature_name: some_feature_name, dismissed_at: 1.minute.ago) }
-
- it 'returns whether a callout dismissed before specified date' do
- expect(callout_dismissed_hour_ago.dismissed_before?(30.minutes.ago)).to eq(true)
- expect(callout_dismissed_minute_ago.dismissed_before?(30.minutes.ago)).to eq(false)
- end
- end
end
diff --git a/spec/models/wiki_page_spec.rb b/spec/models/wiki_page_spec.rb
index c626f98f874..ee61f191f05 100644
--- a/spec/models/wiki_page_spec.rb
+++ b/spec/models/wiki_page_spec.rb
@@ -2,7 +2,7 @@
require "spec_helper"
-RSpec.describe WikiPage do
+RSpec.describe WikiPage, feature_category: :wiki do
let(:user) { create(:user) }
let(:container) { create(:project) }
let(:wiki) { container.wiki }
@@ -830,13 +830,21 @@ RSpec.describe WikiPage do
expect(subject.content_changed?).to be(true)
end
- it 'returns false if only the newline format has changed' do
+ it 'returns false if only the newline format has changed from LF to CRLF' do
expect(subject.page).to receive(:text_data).and_return("foo\nbar")
subject.attributes[:content] = "foo\r\nbar"
expect(subject.content_changed?).to be(false)
end
+
+ it 'returns false if only the newline format has changed from CRLF to LF' do
+ expect(subject.page).to receive(:text_data).and_return("foo\r\nbar")
+
+ subject.attributes[:content] = "foo\nbar"
+
+ expect(subject.content_changed?).to be(false)
+ end
end
end
diff --git a/spec/models/work_item_spec.rb b/spec/models/work_item_spec.rb
index 7963c0898b3..541199e08cb 100644
--- a/spec/models/work_item_spec.rb
+++ b/spec/models/work_item_spec.rb
@@ -79,6 +79,31 @@ RSpec.describe WorkItem, feature_category: :portfolio_management do
end
end
+ describe '.in_namespaces' do
+ let(:group) { create(:group) }
+ let!(:group_work_item) { create(:work_item, namespace: group) }
+ let!(:project_work_item) { create(:work_item, project: reusable_project) }
+
+ subject { described_class.in_namespaces(group) }
+
+ it { is_expected.to contain_exactly(group_work_item) }
+ end
+
+ describe '.with_confidentiality_check' do
+ let(:user) { create(:user) }
+ let!(:authored_work_item) { create(:work_item, :confidential, project: reusable_project, author: user) }
+ let!(:assigned_work_item) { create(:work_item, :confidential, project: reusable_project, assignees: [user]) }
+ let!(:public_work_item) { create(:work_item, project: reusable_project) }
+
+ before do
+ create(:work_item, :confidential, project: reusable_project)
+ end
+
+ subject { described_class.with_confidentiality_check(user) }
+
+ it { is_expected.to contain_exactly(public_work_item, authored_work_item, assigned_work_item) }
+ end
+
describe '#noteable_target_type_name' do
it 'returns `issue` as the target name' do
work_item = build(:work_item)
diff --git a/spec/models/work_items/parent_link_spec.rb b/spec/models/work_items/parent_link_spec.rb
index d7f87da1965..3fcfa856db4 100644
--- a/spec/models/work_items/parent_link_spec.rb
+++ b/spec/models/work_items/parent_link_spec.rb
@@ -18,9 +18,9 @@ RSpec.describe WorkItems::ParentLink, feature_category: :portfolio_management do
it { is_expected.to validate_uniqueness_of(:work_item) }
describe 'hierarchy' do
- let_it_be(:issue) { build(:work_item, project: project) }
+ let_it_be(:issue) { create(:work_item, project: project) }
let_it_be(:incident) { build(:work_item, :incident, project: project) }
- let_it_be(:task1) { build(:work_item, :task, project: project) }
+ let_it_be(:task1) { create(:work_item, :task, project: project) }
let_it_be(:task2) { build(:work_item, :task, project: project) }
it 'is valid if issue parent has task child' do
@@ -158,6 +158,38 @@ RSpec.describe WorkItems::ParentLink, feature_category: :portfolio_management do
end
end
end
+
+ context 'when parent is already linked' do
+ shared_examples 'invalid link' do |link_factory|
+ let_it_be(:parent_link) { build(:parent_link, work_item_parent: issue, work_item: task1) }
+ let(:error_msg) { 'cannot assign a linked work item as a parent' }
+
+ context 'when parent is the link target' do
+ before do
+ create(link_factory, source_id: task1.id, target_id: issue.id)
+ end
+
+ it do
+ expect(parent_link).not_to be_valid
+ expect(parent_link.errors[:work_item]).to include(error_msg)
+ end
+ end
+
+ context 'when parent is the link source' do
+ before do
+ create(link_factory, source_id: issue.id, target_id: task1.id)
+ end
+
+ it do
+ expect(parent_link).not_to be_valid
+ expect(parent_link.errors[:work_item]).to include(error_msg)
+ end
+ end
+ end
+
+ it_behaves_like 'invalid link', :work_item_link
+ it_behaves_like 'invalid link', :issue_link
+ end
end
end
@@ -178,6 +210,14 @@ RSpec.describe WorkItems::ParentLink, feature_category: :portfolio_management do
expect(result).to include(link1, link2)
end
end
+
+ describe 'for_children' do
+ it 'includes the correct records' do
+ result = described_class.for_children([task1.id, task2.id])
+
+ expect(result).to include(link1, link2)
+ end
+ end
end
context 'with confidential work items' do
diff --git a/spec/models/work_items/related_work_item_link_spec.rb b/spec/models/work_items/related_work_item_link_spec.rb
new file mode 100644
index 00000000000..349e4c0ba49
--- /dev/null
+++ b/spec/models/work_items/related_work_item_link_spec.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe WorkItems::RelatedWorkItemLink, type: :model, feature_category: :portfolio_management do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:issue) { create(:work_item, :issue, project: project) }
+
+ it_behaves_like 'issuable link' do
+ let_it_be_with_reload(:issuable_link) { create(:work_item_link) }
+ let_it_be(:issuable) { issue }
+ let(:issuable_class) { 'WorkItem' }
+ let(:issuable_link_factory) { :work_item_link }
+ end
+
+ it_behaves_like 'includes LinkableItem concern' do
+ let_it_be(:item) { create(:work_item, project: project) }
+ let_it_be(:item1) { create(:work_item, project: project) }
+ let_it_be(:item2) { create(:work_item, project: project) }
+ let_it_be(:link_factory) { :work_item_link }
+ let_it_be(:item_type) { described_class.issuable_name }
+ end
+
+ describe '.issuable_type' do
+ it { expect(described_class.issuable_type).to eq(:issue) }
+ end
+
+ describe '.issuable_name' do
+ it { expect(described_class.issuable_name).to eq('work item') }
+ end
+end
diff --git a/spec/models/work_items/type_spec.rb b/spec/models/work_items/type_spec.rb
index f5806c296ac..e4d2ccdfc5a 100644
--- a/spec/models/work_items/type_spec.rb
+++ b/spec/models/work_items/type_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe WorkItems::Type do
+RSpec.describe WorkItems::Type, feature_category: :team_planning do
describe 'modules' do
it { is_expected.to include_module(CacheMarkdownField) }
end
@@ -49,10 +49,10 @@ RSpec.describe WorkItems::Type do
it 'deletes type but not unrelated issues' do
type = create(:work_item_type)
- expect(described_class.count).to eq(8)
+ expect(described_class.count).to eq(10)
expect { type.destroy! }.not_to change(Issue, :count)
- expect(described_class.count).to eq(7)
+ expect(described_class.count).to eq(9)
end
end
diff --git a/spec/models/work_items/widget_definition_spec.rb b/spec/models/work_items/widget_definition_spec.rb
index a33e08a1bf2..da772eec39c 100644
--- a/spec/models/work_items/widget_definition_spec.rb
+++ b/spec/models/work_items/widget_definition_spec.rb
@@ -14,7 +14,8 @@ RSpec.describe WorkItems::WidgetDefinition, feature_category: :team_planning do
::WorkItems::Widgets::Notes,
::WorkItems::Widgets::Notifications,
::WorkItems::Widgets::CurrentUserTodos,
- ::WorkItems::Widgets::AwardEmoji
+ ::WorkItems::Widgets::AwardEmoji,
+ ::WorkItems::Widgets::LinkedItems
]
if Gitlab.ee?
diff --git a/spec/models/work_items/widgets/linked_items_spec.rb b/spec/models/work_items/widgets/linked_items_spec.rb
new file mode 100644
index 00000000000..b4a53b75561
--- /dev/null
+++ b/spec/models/work_items/widgets/linked_items_spec.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe WorkItems::Widgets::LinkedItems, feature_category: :portfolio_management do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:work_item) { create(:work_item) }
+ let_it_be(:work_item_link) { create(:work_item_link, source: work_item) }
+
+ describe '.type' do
+ subject { described_class.type }
+
+ it { is_expected.to eq(:linked_items) }
+ end
+
+ describe '#type' do
+ subject { described_class.new(work_item).type }
+
+ it { is_expected.to eq(:linked_items) }
+ end
+
+ describe '#related_issues' do
+ it { expect(described_class.new(work_item).related_issues(user)).to eq(work_item.related_issues(user)) }
+ end
+end
diff --git a/spec/policies/ci/bridge_policy_spec.rb b/spec/policies/ci/bridge_policy_spec.rb
index e598e2f7626..d23355b4c1e 100644
--- a/spec/policies/ci/bridge_policy_spec.rb
+++ b/spec/policies/ci/bridge_policy_spec.rb
@@ -13,6 +13,8 @@ RSpec.describe Ci::BridgePolicy do
described_class.new(user, bridge)
end
+ it_behaves_like 'a deployable job policy', :ci_bridge
+
describe '#play_job' do
before do
fake_access = double('Gitlab::UserAccess')
diff --git a/spec/policies/ci/build_policy_spec.rb b/spec/policies/ci/build_policy_spec.rb
index ec3b3fde719..6ab89daff82 100644
--- a/spec/policies/ci/build_policy_spec.rb
+++ b/spec/policies/ci/build_policy_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Ci::BuildPolicy do
+RSpec.describe Ci::BuildPolicy, feature_category: :continuous_integration do
let(:user) { create(:user) }
let(:build) { create(:ci_build, pipeline: pipeline) }
let(:pipeline) { create(:ci_empty_pipeline, project: project) }
@@ -11,6 +11,8 @@ RSpec.describe Ci::BuildPolicy do
described_class.new(user, build)
end
+ it_behaves_like 'a deployable job policy', :ci_build
+
shared_context 'public pipelines disabled' do
before do
project.update_attribute(:public_builds, false)
@@ -99,12 +101,15 @@ RSpec.describe Ci::BuildPolicy do
context 'when maintainer is allowed to push to pipeline branch' do
let(:project) { create(:project, :public) }
- let(:owner) { user }
- it 'enables update_build if user is maintainer' do
- allow_any_instance_of(Project).to receive(:empty_repo?).and_return(false)
- allow_any_instance_of(Project).to receive(:branch_allows_collaboration?).and_return(true)
+ before do
+ project.add_maintainer(user)
+ allow(project).to receive(:empty_repo?).and_return(false)
+ allow(project).to receive(:branch_allows_collaboration?).and_return(true)
+ end
+
+ it 'enables update_build if user is maintainer' do
expect(policy).to be_allowed :update_build
expect(policy).to be_allowed :update_commit_status
end
@@ -127,6 +132,16 @@ RSpec.describe Ci::BuildPolicy do
it 'does not include ability to update build' do
expect(policy).to be_disallowed :update_build
end
+
+ context 'when the user is admin', :enable_admin_mode do
+ before do
+ user.update!(admin: true)
+ end
+
+ it 'does not include ability to update build' do
+ expect(policy).to be_disallowed :update_build
+ end
+ end
end
context 'when developers can push to the branch' do
@@ -252,7 +267,7 @@ RSpec.describe Ci::BuildPolicy do
create(:protected_branch, :developers_can_push, name: build.ref, project: project)
end
- it { expect(policy).to be_allowed :erase_build }
+ it { expect(policy).to be_disallowed :erase_build }
end
context 'when the build was created for a protected tag' do
@@ -262,7 +277,7 @@ RSpec.describe Ci::BuildPolicy do
build.update!(tag: true)
end
- it { expect(policy).to be_allowed :erase_build }
+ it { expect(policy).to be_disallowed :erase_build }
end
context 'when the build was created for an unprotected ref' do
diff --git a/spec/policies/deploy_key_policy_spec.rb b/spec/policies/deploy_key_policy_spec.rb
index d84b80a8738..754f36ce3b0 100644
--- a/spec/policies/deploy_key_policy_spec.rb
+++ b/spec/policies/deploy_key_policy_spec.rb
@@ -2,69 +2,89 @@
require 'spec_helper'
-RSpec.describe DeployKeyPolicy do
+RSpec.describe DeployKeyPolicy, feature_category: :groups_and_projects do
subject { described_class.new(current_user, deploy_key) }
- describe 'updating a deploy_key' do
- context 'when a regular user' do
- let(:current_user) { create(:user) }
+ let_it_be(:current_user, refind: true) { create(:user) }
+ let_it_be(:admin) { create(:user, :admin) }
- context 'tries to update private deploy key attached to project' do
- let(:deploy_key) { create(:deploy_key, public: false) }
- let(:project) { create(:project_empty_repo) }
+ context 'when deploy key is public' do
+ let_it_be(:deploy_key) { create(:deploy_key, public: true) }
- before do
- project.add_maintainer(current_user)
- project.deploy_keys << deploy_key
- end
+ context 'and current_user is nil' do
+ let(:current_user) { nil }
- it { is_expected.to be_allowed(:update_deploy_key) }
- end
+ it { is_expected.to be_disallowed(:read_deploy_key) }
+
+ it { is_expected.to be_disallowed(:update_deploy_key) }
+ end
- context 'tries to update private deploy key attached to other project' do
- let(:deploy_key) { create(:deploy_key, public: false) }
- let(:other_project) { create(:project_empty_repo) }
+ context 'and current_user is present' do
+ it { is_expected.to be_allowed(:read_deploy_key) }
- before do
- other_project.deploy_keys << deploy_key
- end
+ it { is_expected.to be_disallowed(:update_deploy_key) }
+ end
- it { is_expected.to be_disallowed(:update_deploy_key) }
+ context 'when current_user is admin' do
+ let(:current_user) { admin }
+
+ context 'when admin mode enabled', :enable_admin_mode do
+ it { is_expected.to be_allowed(:read_deploy_key) }
+
+ it { is_expected.to be_allowed(:update_deploy_key) }
end
- context 'tries to update public deploy key' do
- let(:deploy_key) { create(:another_deploy_key, public: true) }
+ context 'when admin mode disabled' do
+ it { is_expected.to be_allowed(:read_deploy_key) }
it { is_expected.to be_disallowed(:update_deploy_key) }
end
end
+ end
+
+ context 'when deploy key is private' do
+ let_it_be(:deploy_key) { create(:deploy_key, :private) }
+
+ context 'and current_user is nil' do
+ let(:current_user) { nil }
- context 'when an admin user' do
- let(:current_user) { create(:user, :admin) }
+ it { is_expected.to be_disallowed(:read_deploy_key) }
- context 'tries to update private deploy key' do
- let(:deploy_key) { create(:deploy_key, public: false) }
+ it { is_expected.to be_disallowed(:update_deploy_key) }
+ end
+
+ context 'when current_user is admin' do
+ let(:current_user) { admin }
- context 'when admin mode enabled', :enable_admin_mode do
- it { is_expected.to be_allowed(:update_deploy_key) }
- end
+ context 'when admin mode enabled', :enable_admin_mode do
+ it { is_expected.to be_allowed(:read_deploy_key) }
- context 'when admin mode disabled' do
- it { is_expected.to be_disallowed(:update_deploy_key) }
- end
+ it { is_expected.to be_allowed(:update_deploy_key) }
end
- context 'when an admin user tries to update public deploy key' do
- let(:deploy_key) { create(:another_deploy_key, public: true) }
+ context 'when admin mode disabled' do
+ it { is_expected.to be_disallowed(:read_deploy_key) }
+
+ it { is_expected.to be_disallowed(:update_deploy_key) }
+ end
+ end
- context 'when admin mode enabled', :enable_admin_mode do
- it { is_expected.to be_allowed(:update_deploy_key) }
- end
+ context 'when assigned to the project' do
+ let_it_be(:deploy_keys_project) { create(:deploy_keys_project, deploy_key: deploy_key) }
- context 'when admin mode disabled' do
- it { is_expected.to be_disallowed(:update_deploy_key) }
- end
+ before_all do
+ deploy_keys_project.project.add_maintainer(current_user)
end
+
+ it { is_expected.to be_allowed(:read_deploy_key) }
+
+ it { is_expected.to be_allowed(:update_deploy_key) }
+ end
+
+ context 'when assigned to another project' do
+ it { is_expected.to be_disallowed(:read_deploy_key) }
+
+ it { is_expected.to be_disallowed(:update_deploy_key) }
end
end
end
diff --git a/spec/policies/metrics/dashboard/annotation_policy_spec.rb b/spec/policies/metrics/dashboard/annotation_policy_spec.rb
deleted file mode 100644
index 2d1ef0ee0cb..00000000000
--- a/spec/policies/metrics/dashboard/annotation_policy_spec.rb
+++ /dev/null
@@ -1,67 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Metrics::Dashboard::AnnotationPolicy, :models do
- let(:policy) { described_class.new(user, annotation) }
-
- let_it_be(:user) { create(:user) }
-
- shared_examples 'metrics dashboard annotation policy' do
- context 'when guest' do
- before do
- project.add_guest(user)
- end
-
- it { expect(policy).to be_disallowed :read_metrics_dashboard_annotation }
- it { expect(policy).to be_disallowed :admin_metrics_dashboard_annotation }
- end
-
- context 'when reporter' do
- before do
- project.add_reporter(user)
- end
-
- it { expect(policy).to be_allowed :read_metrics_dashboard_annotation }
- it { expect(policy).to be_disallowed :admin_metrics_dashboard_annotation }
- end
-
- context 'when developer' do
- before do
- project.add_developer(user)
- end
-
- it { expect(policy).to be_allowed :read_metrics_dashboard_annotation }
- it { expect(policy).to be_allowed :admin_metrics_dashboard_annotation }
- end
-
- context 'when maintainer' do
- before do
- project.add_maintainer(user)
- end
-
- it { expect(policy).to be_allowed :read_metrics_dashboard_annotation }
- it { expect(policy).to be_allowed :admin_metrics_dashboard_annotation }
- end
- end
-
- describe 'rules' do
- context 'environments annotation' do
- let_it_be(:environment) { create(:environment) }
- let_it_be(:annotation) { create(:metrics_dashboard_annotation, environment: environment) }
-
- it_behaves_like 'metrics dashboard annotation policy' do
- let(:project) { environment.project }
- end
- end
-
- context 'cluster annotation' do
- let_it_be(:cluster) { create(:cluster, :project) }
- let_it_be(:annotation) { create(:metrics_dashboard_annotation, environment: nil, cluster: cluster) }
-
- it_behaves_like 'metrics dashboard annotation policy' do
- let(:project) { cluster.project }
- end
- end
- end
-end
diff --git a/spec/policies/organizations/organization_policy_spec.rb b/spec/policies/organizations/organization_policy_spec.rb
index 52d5a41aa7f..e51362227c9 100644
--- a/spec/policies/organizations/organization_policy_spec.rb
+++ b/spec/policies/organizations/organization_policy_spec.rb
@@ -7,21 +7,33 @@ RSpec.describe Organizations::OrganizationPolicy, feature_category: :cell do
subject(:policy) { described_class.new(current_user, organization) }
+ context 'when the user is anonymous' do
+ let_it_be(:current_user) { nil }
+
+ it { is_expected.to be_allowed(:read_organization) }
+ end
+
context 'when the user is an admin' do
let_it_be(:current_user) { create(:user, :admin) }
context 'when admin mode is enabled', :enable_admin_mode do
it { is_expected.to be_allowed(:admin_organization) }
+ it { is_expected.to be_allowed(:read_organization) }
end
context 'when admin mode is disabled' do
it { is_expected.to be_disallowed(:admin_organization) }
+ it { is_expected.to be_allowed(:read_organization) }
end
end
- context 'when the user is not an admin' do
- let_it_be(:current_user) { create(:user) }
+ context 'when the user is an organization user' do
+ let_it_be(:current_user) { create :user }
+
+ before do
+ create :organization_user, organization: organization, user: current_user
+ end
- it { is_expected.to be_disallowed(:admin_organization) }
+ it { is_expected.to be_allowed(:read_organization) }
end
end
diff --git a/spec/policies/packages/policies/project_policy_spec.rb b/spec/policies/packages/policies/project_policy_spec.rb
index 5c267ff5ac5..fde10f64be8 100644
--- a/spec/policies/packages/policies/project_policy_spec.rb
+++ b/spec/policies/packages/policies/project_policy_spec.rb
@@ -127,5 +127,30 @@ RSpec.describe Packages::Policies::ProjectPolicy do
it_behaves_like 'package access with repository disabled'
end
+
+ context 'with package_registry_allow_anyone_to_pull_option disabled' do
+ where(:project, :expect_to_be_allowed) do
+ ref(:private_project) | false
+ ref(:internal_project) | false
+ ref(:public_project) | true
+ end
+
+ with_them do
+ let(:current_user) { anonymous }
+
+ before do
+ stub_application_setting(package_registry_allow_anyone_to_pull_option: false)
+ project.project_feature.update!(package_registry_access_level: ProjectFeature::PUBLIC)
+ end
+
+ it do
+ if expect_to_be_allowed
+ is_expected.to be_allowed(:read_package)
+ else
+ is_expected.to be_disallowed(:read_package)
+ end
+ end
+ end
+ end
end
end
diff --git a/spec/policies/project_policy_spec.rb b/spec/policies/project_policy_spec.rb
index 602b7148d0e..2854d6daece 100644
--- a/spec/policies/project_policy_spec.rb
+++ b/spec/policies/project_policy_spec.rb
@@ -2736,26 +2736,62 @@ RSpec.describe ProjectPolicy, feature_category: :system_access do
expect_allowed(:build_read_container_image)
end
end
+ end
+
+ context 'with external guest users' do
+ where(:project_visibility, :access_level, :allowed) do
+ :public | ProjectFeature::ENABLED | true
+ :public | ProjectFeature::PRIVATE | false
+ :public | ProjectFeature::DISABLED | false
+
+ :internal | ProjectFeature::ENABLED | true
+ :internal | ProjectFeature::PRIVATE | false
+ :internal | ProjectFeature::DISABLED | false
+
+ :private | ProjectFeature::ENABLED | false
+ :private | ProjectFeature::PRIVATE | false
+ :private | ProjectFeature::DISABLED | false
+ end
+
+ with_them do
+ let(:current_user) { guest }
+ let(:project) { send("#{project_visibility}_project") }
+
+ before do
+ project.project_feature.update!(container_registry_access_level: access_level)
+ current_user.update_column(:external, true)
+ end
- def permissions_abilities(role)
- case role
- when :admin
- if project_visibility == :private || access_level == ProjectFeature::PRIVATE
- maintainer_operations_permissions - admin_excluded_permissions
+ it 'allows/disallows the abilities based on the container_registry feature access level' do
+ if allowed
+ expect_allowed(*permissions_abilities(:guest))
+ expect_disallowed(*(all_permissions - permissions_abilities(:guest)))
else
- maintainer_operations_permissions
+ expect_disallowed(*all_permissions)
end
- when :maintainer, :owner
- maintainer_operations_permissions
- when :developer
- developer_operations_permissions
- when :reporter, :guest
- guest_operations_permissions
- when :anonymous
- anonymous_operations_permissions
+ end
+ end
+ end
+
+ # Overrides `permissions_abilities` defined below to be suitable for container_image policies
+ def permissions_abilities(role)
+ case role
+ when :admin
+ if project_visibility == :private || access_level == ProjectFeature::PRIVATE
+ maintainer_operations_permissions - admin_excluded_permissions
else
- raise "Unknown role #{role}"
+ maintainer_operations_permissions
end
+ when :maintainer, :owner
+ maintainer_operations_permissions
+ when :developer
+ developer_operations_permissions
+ when :reporter, :guest
+ guest_operations_permissions
+ when :anonymous
+ anonymous_operations_permissions
+ else
+ raise "Unknown role #{role}"
end
end
end
diff --git a/spec/policies/work_item_policy_spec.rb b/spec/policies/work_item_policy_spec.rb
index 3d282271d60..bd8f5604eba 100644
--- a/spec/policies/work_item_policy_spec.rb
+++ b/spec/policies/work_item_policy_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe WorkItemPolicy do
+RSpec.describe WorkItemPolicy, feature_category: :team_planning do
let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project, group: group) }
let_it_be(:public_project) { create(:project, :public, group: group) }
@@ -201,4 +201,24 @@ RSpec.describe WorkItemPolicy do
end
end
end
+
+ describe 'admin_work_item_link' do
+ context 'when user is not a member of the project' do
+ let(:current_user) { non_member_user }
+
+ it { is_expected.to be_disallowed(:admin_work_item_link) }
+ end
+
+ context 'when user is guest' do
+ let(:current_user) { guest }
+
+ it { is_expected.to be_allowed(:admin_work_item_link) }
+ end
+
+ context 'when user is reporter' do
+ let(:current_user) { reporter }
+
+ it { is_expected.to be_allowed(:admin_work_item_link) }
+ end
+ end
end
diff --git a/spec/presenters/merge_request_presenter_spec.rb b/spec/presenters/merge_request_presenter_spec.rb
index d0febf64035..b4210099e14 100644
--- a/spec/presenters/merge_request_presenter_spec.rb
+++ b/spec/presenters/merge_request_presenter_spec.rb
@@ -121,8 +121,8 @@ RSpec.describe MergeRequestPresenter do
context 'issues links' do
let_it_be(:project) { create(:project, :private, :repository, creator: user, namespace: user.namespace) }
- let_it_be(:issue_a) { create(:issue, project: project) }
- let_it_be(:issue_b) { create(:issue, project: project) }
+ let_it_be(:issue_a) { create(:issue, project: project, iid: 1) }
+ let_it_be(:issue_b) { create(:issue, project: project, iid: 3) }
let_it_be(:resource) do
create(
@@ -143,6 +143,17 @@ RSpec.describe MergeRequestPresenter do
resource.cache_merge_request_closes_issues!
end
+ describe '#issues_sentence' do
+ let(:issue_c) { create(:issue, project: project, iid: 10) }
+ let(:issues) { [issue_b, issue_c, issue_a] }
+
+ subject { described_class.new(resource, current_user: user).send(:issues_sentence, project, issues) }
+
+ it 'orders issues numerically' do
+ is_expected.to eq("##{issue_a.iid}, ##{issue_b.iid}, and ##{issue_c.iid}")
+ end
+ end
+
describe '#closing_issues_links' do
subject { described_class.new(resource, current_user: user).closing_issues_links }
diff --git a/spec/presenters/ml/model_presenter_spec.rb b/spec/presenters/ml/model_presenter_spec.rb
new file mode 100644
index 00000000000..dbbd3b57033
--- /dev/null
+++ b/spec/presenters/ml/model_presenter_spec.rb
@@ -0,0 +1,43 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ml::ModelPresenter, feature_category: :mlops do
+ let_it_be(:project) { build_stubbed(:project) }
+ let_it_be(:model1) { build_stubbed(:ml_models, project: project) }
+ let_it_be(:model2) { build_stubbed(:ml_models, :with_latest_version_and_package, project: project) }
+
+ describe '#latest_version_name' do
+ subject { model.present.latest_version_name }
+
+ context 'when model has version' do
+ let(:model) { model2 }
+
+ it 'is the version of latest_version' do
+ is_expected.to eq(model2.latest_version.version)
+ end
+ end
+
+ context 'when model has no versions' do
+ let(:model) { model1 }
+
+ it { is_expected.to be_nil }
+ end
+ end
+
+ describe '#latest_package_path' do
+ subject { model.present.latest_package_path }
+
+ context 'when model version does not have package' do
+ let(:model) { model1 }
+
+ it { is_expected.to be_nil }
+ end
+
+ context 'when latest model version has package' do
+ let(:model) { model2 }
+
+ it { is_expected.to eq("/#{project.full_path}/-/packages/#{model.latest_version.package_id}") }
+ end
+ end
+end
diff --git a/spec/presenters/ml/models_index_presenter_spec.rb b/spec/presenters/ml/models_index_presenter_spec.rb
deleted file mode 100644
index 697b57a51c1..00000000000
--- a/spec/presenters/ml/models_index_presenter_spec.rb
+++ /dev/null
@@ -1,33 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Ml::ModelsIndexPresenter, feature_category: :mlops do
- let_it_be(:project) { build_stubbed(:project) }
- let_it_be(:model1) { build_stubbed(:ml_model_package, project: project) }
- let_it_be(:model2) { build_stubbed(:ml_model_package, project: project) }
- let_it_be(:models) do
- [model1, model2]
- end
-
- describe '#execute' do
- subject { Gitlab::Json.parse(described_class.new(models).present)['models'] }
-
- it 'presents models correctly' do
- expected_models = [
- {
- 'name' => model1.name,
- 'version' => model1.version,
- 'path' => "/#{project.full_path}/-/packages/#{model1.id}"
- },
- {
- 'name' => model2.name,
- 'version' => model2.version,
- 'path' => "/#{project.full_path}/-/packages/#{model2.id}"
- }
- ]
-
- is_expected.to match_array(expected_models)
- end
- end
-end
diff --git a/spec/presenters/packages/npm/package_presenter_spec.rb b/spec/presenters/packages/npm/package_presenter_spec.rb
deleted file mode 100644
index fe4773a9cad..00000000000
--- a/spec/presenters/packages/npm/package_presenter_spec.rb
+++ /dev/null
@@ -1,33 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Packages::Npm::PackagePresenter, feature_category: :package_registry do
- let_it_be(:metadata) do
- {
- name: 'foo',
- versions: { '1.0.0' => { 'dist' => { 'tarball' => 'http://localhost/tarball.tgz' } } },
- dist_tags: { 'latest' => '1.0.0' }
- }
- end
-
- subject { described_class.new(metadata) }
-
- describe '#name' do
- it 'returns the name' do
- expect(subject.name).to eq('foo')
- end
- end
-
- describe '#versions' do
- it 'returns the versions' do
- expect(subject.versions).to eq({ '1.0.0' => { 'dist' => { 'tarball' => 'http://localhost/tarball.tgz' } } })
- end
- end
-
- describe '#dist_tags' do
- it 'returns the dist_tags' do
- expect(subject.dist_tags).to eq({ 'latest' => '1.0.0' })
- end
- end
-end
diff --git a/spec/presenters/packages/nuget/v2/metadata_index_presenter_spec.rb b/spec/presenters/packages/nuget/v2/metadata_index_presenter_spec.rb
new file mode 100644
index 00000000000..598db641b75
--- /dev/null
+++ b/spec/presenters/packages/nuget/v2/metadata_index_presenter_spec.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Packages::Nuget::V2::MetadataIndexPresenter, feature_category: :package_registry do
+ describe '#xml' do
+ let(:presenter) { described_class.new }
+
+ subject(:xml) { Nokogiri::XML(presenter.xml.to_xml) }
+
+ specify { expect(xml.root.name).to eq('Edmx') }
+
+ specify { expect(xml.at_xpath('//edmx:Edmx')).to be_present }
+
+ specify { expect(xml.at_xpath('//edmx:Edmx/edmx:DataServices')).to be_present }
+
+ specify do
+ expect(xml.css('*').map(&:name)).to include(
+ 'Schema', 'EntityType', 'Key', 'PropertyRef', 'EntityContainer', 'EntitySet', 'FunctionImport', 'Parameter'
+ )
+ end
+
+ specify do
+ expect(xml.css('*').select { |el| el.name == 'Property' }.map { |el| el.attribute_nodes.first.value })
+ .to match_array(
+ %w[Id Version Authors Dependencies Description DownloadCount IconUrl Published ProjectUrl Tags Title
+ LicenseUrl]
+ )
+ end
+
+ specify { expect(xml.css('*').detect { |el| el.name == 'EntityContainer' }.attr('Name')).to eq('V2FeedContext') }
+
+ specify { expect(xml.css('*').detect { |el| el.name == 'FunctionImport' }.attr('Name')).to eq('FindPackagesById') }
+ end
+end
diff --git a/spec/presenters/packages/nuget/v2/service_index_presenter_spec.rb b/spec/presenters/packages/nuget/v2/service_index_presenter_spec.rb
new file mode 100644
index 00000000000..09dd3ff7fe4
--- /dev/null
+++ b/spec/presenters/packages/nuget/v2/service_index_presenter_spec.rb
@@ -0,0 +1,54 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Packages::Nuget::V2::ServiceIndexPresenter, feature_category: :package_registry do
+ let_it_be(:project) { build_stubbed(:project) }
+ let_it_be(:group) { build_stubbed(:group) }
+
+ describe '#xml' do
+ let(:project_or_group) { project }
+ let(:presenter) { described_class.new(project_or_group) }
+ let(:xml_doc) { Nokogiri::XML::Document.parse(presenter.xml.to_xml) }
+ let(:service_node) { xml_doc.at_xpath('//xmlns:service') }
+
+ it { expect(xml_doc.root.name).to eq('service') }
+
+ it 'includes the workspace and collection nodes' do
+ workspace = xml_doc.at_xpath('//xmlns:service/xmlns:workspace')
+ collection = xml_doc.at_xpath('//xmlns:service/xmlns:workspace/xmlns:collection')
+
+ expect(workspace).to be_present
+ expect(workspace.children).to include(collection)
+ expect(collection).to be_present
+ end
+
+ it 'sets the appropriate XML namespaces on the root node' do
+ expect(service_node.namespaces['xmlns']).to eq('http://www.w3.org/2007/app')
+ expect(service_node.namespaces['xmlns:atom']).to eq('http://www.w3.org/2005/Atom')
+ end
+
+ context 'when the presenter is initialized with a project' do
+ it 'sets the XML base path correctly for a project scope' do
+ expect(service_node['xml:base']).to include(expected_xml_base(project))
+ end
+ end
+
+ context 'when the presenter is initialized with a group' do
+ let(:project_or_group) { group }
+
+ it 'sets the XML base path correctly for a group scope' do
+ expect(service_node['xml:base']).to include(expected_xml_base(group))
+ end
+ end
+ end
+
+ def expected_xml_base(project_or_group)
+ case project_or_group
+ when Project
+ api_v4_projects_packages_nuget_v2_path(id: project_or_group.id)
+ when Group
+ api_v4_groups___packages_nuget_v2_path(id: project_or_group.id)
+ end
+ end
+end
diff --git a/spec/rack_servers/puma_spec.rb b/spec/rack_servers/puma_spec.rb
index 6729119fe92..1d7efe67564 100644
--- a/spec/rack_servers/puma_spec.rb
+++ b/spec/rack_servers/puma_spec.rb
@@ -6,7 +6,7 @@ require 'fileutils'
require 'excon'
RSpec.describe 'Puma' do
- before(:all) do
+ before_all do
project_root = Rails.root.to_s
config_lines = File.read(Rails.root.join('config/puma.example.development.rb'))
.gsub('config.ru', File.join(__dir__, 'configs/config.ru'))
diff --git a/spec/requests/admin/abuse_reports_controller_spec.rb b/spec/requests/admin/abuse_reports_controller_spec.rb
index 8d033a2e147..c443a441af8 100644
--- a/spec/requests/admin/abuse_reports_controller_spec.rb
+++ b/spec/requests/admin/abuse_reports_controller_spec.rb
@@ -53,16 +53,16 @@ RSpec.describe Admin::AbuseReportsController, type: :request, feature_category:
end
end
- describe 'PUT #update' do
+ shared_examples 'moderates user' do
let(:report) { create(:abuse_report) }
let(:params) { { user_action: 'block_user', close: 'true', reason: 'spam', comment: 'obvious spam' } }
let(:expected_params) { ActionController::Parameters.new(params).permit! }
let(:message) { 'Service response' }
- subject(:request) { put admin_abuse_report_path(report, params) }
+ subject(:request) { put path }
- it 'invokes the Admin::AbuseReportUpdateService' do
- expect_next_instance_of(Admin::AbuseReportUpdateService, report, admin, expected_params) do |service|
+ it 'invokes the Admin::AbuseReports::ModerateUserService' do
+ expect_next_instance_of(Admin::AbuseReports::ModerateUserService, report, admin, expected_params) do |service|
expect(service).to receive(:execute).and_call_original
end
@@ -71,7 +71,7 @@ RSpec.describe Admin::AbuseReportsController, type: :request, feature_category:
context 'when the service response is a success' do
before do
- allow_next_instance_of(Admin::AbuseReportUpdateService, report, admin, expected_params) do |service|
+ allow_next_instance_of(Admin::AbuseReports::ModerateUserService, report, admin, expected_params) do |service|
allow(service).to receive(:execute).and_return(ServiceResponse.success(message: message))
end
@@ -86,7 +86,7 @@ RSpec.describe Admin::AbuseReportsController, type: :request, feature_category:
context 'when the service response is an error' do
before do
- allow_next_instance_of(Admin::AbuseReportUpdateService, report, admin, expected_params) do |service|
+ allow_next_instance_of(Admin::AbuseReports::ModerateUserService, report, admin, expected_params) do |service|
allow(service).to receive(:execute).and_return(ServiceResponse.error(message: message))
end
@@ -100,6 +100,18 @@ RSpec.describe Admin::AbuseReportsController, type: :request, feature_category:
end
end
+ describe 'PUT #update' do
+ let(:path) { admin_abuse_report_path(report, params) }
+
+ it_behaves_like 'moderates user'
+ end
+
+ describe 'PUT #moderate_user' do
+ let(:path) { moderate_user_admin_abuse_report_path(report, params) }
+
+ it_behaves_like 'moderates user'
+ end
+
describe 'DELETE #destroy' do
let!(:report) { create(:abuse_report) }
let(:params) { {} }
diff --git a/spec/requests/api/admin/batched_background_migrations_spec.rb b/spec/requests/api/admin/batched_background_migrations_spec.rb
index 180b6c7abd6..2b205ca656f 100644
--- a/spec/requests/api/admin/batched_background_migrations_spec.rb
+++ b/spec/requests/api/admin/batched_background_migrations_spec.rb
@@ -100,6 +100,7 @@ RSpec.describe API::Admin::BatchedBackgroundMigrations, feature_category: :datab
expect(json_response.first['id']).to eq(migration.id)
expect(json_response.first['job_class_name']).to eq(migration.job_class_name)
expect(json_response.first['table_name']).to eq(migration.table_name)
+ expect(json_response.first['column_name']).to eq(migration.column_name)
expect(json_response.first['status']).to eq(migration.status_name.to_s)
expect(json_response.first['progress']).to be_zero
end
@@ -151,6 +152,7 @@ RSpec.describe API::Admin::BatchedBackgroundMigrations, feature_category: :datab
expect(json_response.first['id']).to eq(ci_database_migration.id)
expect(json_response.first['job_class_name']).to eq(ci_database_migration.job_class_name)
expect(json_response.first['table_name']).to eq(ci_database_migration.table_name)
+ expect(json_response.first['column_name']).to eq(ci_database_migration.column_name)
expect(json_response.first['status']).to eq(ci_database_migration.status_name.to_s)
expect(json_response.first['progress']).to be_zero
end
diff --git a/spec/requests/api/broadcast_messages_spec.rb b/spec/requests/api/admin/broadcast_messages_spec.rb
index 530c81364a8..58347104a7a 100644
--- a/spec/requests/api/broadcast_messages_spec.rb
+++ b/spec/requests/api/admin/broadcast_messages_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe API::BroadcastMessages, :aggregate_failures, feature_category: :onboarding do
+RSpec.describe API::Admin::BroadcastMessages, :aggregate_failures, feature_category: :onboarding do
let_it_be(:admin) { create(:admin) }
let_it_be(:message) { create(:broadcast_message) }
let_it_be(:path) { '/broadcast_messages' }
@@ -17,7 +17,8 @@ RSpec.describe API::BroadcastMessages, :aggregate_failures, feature_category: :o
expect(response).to include_pagination_headers
expect(json_response).to be_kind_of(Array)
expect(json_response.first.keys)
- .to match_array(%w(id message starts_at ends_at color font active target_access_levels target_path broadcast_type dismissable))
+ .to match_array(%w[id message starts_at ends_at color font active target_access_levels target_path
+ broadcast_type dismissable])
end
end
@@ -30,7 +31,8 @@ RSpec.describe API::BroadcastMessages, :aggregate_failures, feature_category: :o
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['id']).to eq message.id
expect(json_response.keys)
- .to match_array(%w(id message starts_at ends_at color font active target_access_levels target_path broadcast_type dismissable))
+ .to match_array(%w[id message starts_at ends_at color font active target_access_levels target_path
+ broadcast_type dismissable])
end
end
@@ -130,6 +132,22 @@ RSpec.describe API::BroadcastMessages, :aggregate_failures, feature_category: :o
expect(response).to have_gitlab_http_status(:created)
expect(json_response['dismissable']).to eq true
end
+
+ context 'when create does not persist record' do
+ let_it_be(:message) { build(:broadcast_message) }.freeze
+ let_it_be(:stubbed_errors) { ActiveModel::Errors.new(double).tap { |e| e.add(:base, 'error') } }.freeze
+
+ before do
+ allow(System::BroadcastMessage).to receive(:create).and_return(message)
+ allow(message).to receive(:errors).and_return(stubbed_errors)
+ end
+
+ it 'calls render_validation_error!' do
+ post api(path, admin, admin_mode: true), params: { message: 'message' }
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+ end
end
end
@@ -222,6 +240,23 @@ RSpec.describe API::BroadcastMessages, :aggregate_failures, feature_category: :o
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['dismissable']).to eq true
end
+
+ context 'when update fails' do
+ let_it_be(:message) { build(:broadcast_message) }.freeze
+ let_it_be(:stubbed_errors) { ActiveModel::Errors.new(double).tap { |e| e.add(:base, 'error') } }.freeze
+
+ before do
+ allow(System::BroadcastMessage).to receive(:find).and_return(message)
+ allow(message).to receive(:update).and_return(false)
+ allow(message).to receive(:errors).and_return(stubbed_errors)
+ end
+
+ it 'calls render_validation_error!' do
+ put api(path, admin, admin_mode: true), params: { message: 'message' }
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+ end
end
end
@@ -246,7 +281,7 @@ RSpec.describe API::BroadcastMessages, :aggregate_failures, feature_category: :o
delete api(path, admin, admin_mode: true)
expect(response).to have_gitlab_http_status(:no_content)
- end.to change { BroadcastMessage.count }.by(-1)
+ end.to change { System::BroadcastMessage.count }.by(-1)
end
end
end
diff --git a/spec/requests/api/admin/plan_limits_spec.rb b/spec/requests/api/admin/plan_limits_spec.rb
index 97eb8a2b13f..f0bc90efa50 100644
--- a/spec/requests/api/admin/plan_limits_spec.rb
+++ b/spec/requests/api/admin/plan_limits_spec.rb
@@ -102,7 +102,7 @@ RSpec.describe API::Admin::PlanLimits, 'PlanLimits', feature_category: :shared d
'ci_registered_group_runners': 107,
'ci_registered_project_runners': 108,
'conan_max_file_size': 10,
- 'enforcement_limit': 15,
+ 'enforcement_limit': 100,
'generic_packages_max_file_size': 20,
'helm_max_file_size': 25,
'maven_max_file_size': 30,
@@ -124,11 +124,11 @@ RSpec.describe API::Admin::PlanLimits, 'PlanLimits', feature_category: :shared d
expect(json_response['ci_registered_group_runners']).to eq(107)
expect(json_response['ci_registered_project_runners']).to eq(108)
expect(json_response['conan_max_file_size']).to eq(10)
- expect(json_response['enforcement_limit']).to eq(15)
+ expect(json_response['enforcement_limit']).to eq(100)
expect(json_response['generic_packages_max_file_size']).to eq(20)
expect(json_response['helm_max_file_size']).to eq(25)
expect(json_response['limits_history']).to eq(
- { "enforcement_limit" => [{ "user_id" => admin.id, "username" => admin.username, "timestamp" => current_timestamp, "value" => 15 }],
+ { "enforcement_limit" => [{ "user_id" => admin.id, "username" => admin.username, "timestamp" => current_timestamp, "value" => 100 }],
"notification_limit" => [{ "user_id" => admin.id, "username" => admin.username, "timestamp" => current_timestamp, "value" => 90 }],
"storage_size_limit" => [{ "user_id" => admin.id, "username" => admin.username, "timestamp" => current_timestamp, "value" => 80 }] }
)
diff --git a/spec/requests/api/api_spec.rb b/spec/requests/api/api_spec.rb
index 01bb8101f76..1d1d66ad125 100644
--- a/spec/requests/api/api_spec.rb
+++ b/spec/requests/api/api_spec.rb
@@ -229,7 +229,7 @@ RSpec.describe API::API, feature_category: :system_access do
expect(data.stringify_keys).not_to include('meta.project', 'meta.root_namespace', 'meta.user')
end
- expect(BroadcastMessage).to receive(:all).and_raise('An error!')
+ expect(System::BroadcastMessage).to receive(:all).and_raise('An error!')
get(api('/broadcast_messages'))
diff --git a/spec/requests/api/ci/jobs_spec.rb b/spec/requests/api/ci/jobs_spec.rb
index ed0cec46a42..c7b7131a600 100644
--- a/spec/requests/api/ci/jobs_spec.rb
+++ b/spec/requests/api/ci/jobs_spec.rb
@@ -238,7 +238,7 @@ RSpec.describe API::Ci::Jobs, feature_category: :continuous_integration do
]
end
- before(:all) do
+ before_all do
project.update!(group: group)
end
diff --git a/spec/requests/api/ci/pipeline_schedules_spec.rb b/spec/requests/api/ci/pipeline_schedules_spec.rb
index d5f60e62b06..d760e4ddf28 100644
--- a/spec/requests/api/ci/pipeline_schedules_spec.rb
+++ b/spec/requests/api/ci/pipeline_schedules_spec.rb
@@ -311,8 +311,7 @@ RSpec.describe API::Ci::PipelineSchedules, feature_category: :continuous_integra
end
end
- # Move this from `shared_context` to `describe` when `ci_refactoring_pipeline_schedule_create_service` is removed.
- shared_context 'POST /projects/:id/pipeline_schedules' do # rubocop:disable RSpec/ContextWording
+ describe 'POST /projects/:id/pipeline_schedules' do
let(:params) { attributes_for(:ci_pipeline_schedule) }
context 'authenticated user with valid permissions' do
@@ -369,8 +368,7 @@ RSpec.describe API::Ci::PipelineSchedules, feature_category: :continuous_integra
end
end
- # Move this from `shared_context` to `describe` when `ci_refactoring_pipeline_schedule_create_service` is removed.
- shared_context 'PUT /projects/:id/pipeline_schedules/:pipeline_schedule_id' do
+ describe 'PUT /projects/:id/pipeline_schedules/:pipeline_schedule_id' do
let(:pipeline_schedule) do
create(:ci_pipeline_schedule, project: project, owner: developer)
end
@@ -439,18 +437,6 @@ RSpec.describe API::Ci::PipelineSchedules, feature_category: :continuous_integra
end
end
- it_behaves_like 'POST /projects/:id/pipeline_schedules'
- it_behaves_like 'PUT /projects/:id/pipeline_schedules/:pipeline_schedule_id'
-
- context 'when the FF ci_refactoring_pipeline_schedule_create_service is disabled' do
- before do
- stub_feature_flags(ci_refactoring_pipeline_schedule_create_service: false)
- end
-
- it_behaves_like 'POST /projects/:id/pipeline_schedules'
- it_behaves_like 'PUT /projects/:id/pipeline_schedules/:pipeline_schedule_id'
- end
-
describe 'POST /projects/:id/pipeline_schedules/:pipeline_schedule_id/take_ownership' do
let(:pipeline_schedule) do
create(:ci_pipeline_schedule, project: project, owner: developer)
diff --git a/spec/requests/api/ci/pipelines_spec.rb b/spec/requests/api/ci/pipelines_spec.rb
index 869b0ec9dca..3544a6dd72a 100644
--- a/spec/requests/api/ci/pipelines_spec.rb
+++ b/spec/requests/api/ci/pipelines_spec.rb
@@ -43,19 +43,6 @@ RSpec.describe API::Ci::Pipelines, feature_category: :continuous_integration do
expect(json_response.first.keys).to contain_exactly(*%w[id iid project_id sha ref status web_url created_at updated_at source name])
end
-
- context 'when pipeline_name_in_api feature flag is off' do
- before do
- stub_feature_flags(pipeline_name_in_api: false)
- end
-
- it 'does not include pipeline name in response and ignores name parameter' do
- get api("/projects/#{project.id}/pipelines", user), params: { name: 'Chatops pipeline' }
-
- expect(json_response.length).to eq(1)
- expect(json_response.first.keys).not_to include('name')
- end
- end
end
it 'avoids N+1 queries' do
@@ -894,19 +881,6 @@ RSpec.describe API::Ci::Pipelines, feature_category: :continuous_integration do
expect(json_response["coverage"]).to eq('30.00')
end
end
-
- context 'with pipeline_name_in_api disabled' do
- before do
- stub_feature_flags(pipeline_name_in_api: false)
- end
-
- it 'does not return name', :aggregate_failures do
- get api("/projects/#{project.id}/pipelines/#{pipeline.id}", user)
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response.keys).not_to include('name')
- end
- end
end
context 'unauthorized user' do
@@ -971,19 +945,6 @@ RSpec.describe API::Ci::Pipelines, feature_category: :continuous_integration do
expect(json_response['sha']).to eq(second_branch.target)
end
end
-
- context 'with pipeline_name_in_api disabled' do
- before do
- stub_feature_flags(pipeline_name_in_api: false)
- end
-
- it 'does not return name', :aggregate_failures do
- get api("/projects/#{project.id}/pipelines/latest", user)
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response.keys).not_to include('name')
- end
- end
end
context 'unauthorized user' do
diff --git a/spec/requests/api/commit_statuses_spec.rb b/spec/requests/api/commit_statuses_spec.rb
index 7540e19e278..2f0e64cd4da 100644
--- a/spec/requests/api/commit_statuses_spec.rb
+++ b/spec/requests/api/commit_statuses_spec.rb
@@ -152,6 +152,7 @@ RSpec.describe API::CommitStatuses, feature_category: :continuous_integration do
expect(json_response['ref']).not_to be_empty
expect(json_response['target_url']).to be_nil
expect(json_response['description']).to be_nil
+ expect(json_response['pipeline_id']).not_to be_nil
if status == 'failed'
expect(CommitStatus.find(json_response['id'])).to be_api_failure
diff --git a/spec/requests/api/commits_spec.rb b/spec/requests/api/commits_spec.rb
index 28126f1bdc2..687ce333ca5 100644
--- a/spec/requests/api/commits_spec.rb
+++ b/spec/requests/api/commits_spec.rb
@@ -573,13 +573,9 @@ RSpec.describe API::Commits, feature_category: :source_code_management do
subject
end
- it_behaves_like 'Snowplow event tracking with RedisHLL context' do
+ it_behaves_like 'internal event tracking' do
+ let(:action) { ::Gitlab::UsageDataCounters::EditorUniqueCounter::EDIT_BY_WEB_IDE }
let(:namespace) { project.namespace.reload }
- let(:category) { 'Gitlab::UsageDataCounters::EditorUniqueCounter' }
- let(:action) { 'ide_edit' }
- let(:property) { 'g_edit_by_web_ide' }
- let(:label) { 'usage_activity_by_stage_monthly.create.action_monthly_active_users_ide_edit' }
- let(:context) { [Gitlab::Tracking::ServicePingContext.new(data_source: :redis_hll, event: event_name).to_context] }
end
context 'counts.web_ide_commits Snowplow event tracking' do
diff --git a/spec/requests/api/composer_packages_spec.rb b/spec/requests/api/composer_packages_spec.rb
index 2bb2ffa03c4..3652bee5e44 100644
--- a/spec/requests/api/composer_packages_spec.rb
+++ b/spec/requests/api/composer_packages_spec.rb
@@ -289,7 +289,7 @@ RSpec.describe API::ComposerPackages, feature_category: :package_registry do
let(:url) { "/projects/#{project.id}/packages/composer" }
let(:params) { {} }
- before(:all) do
+ before_all do
project.repository.add_tag(user, 'v1.2.99', 'master')
end
diff --git a/spec/requests/api/draft_notes_spec.rb b/spec/requests/api/draft_notes_spec.rb
index 3911bb8bc00..f15ed6e2d5f 100644
--- a/spec/requests/api/draft_notes_spec.rb
+++ b/spec/requests/api/draft_notes_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe API::DraftNotes, feature_category: :code_review_workflow do
let_it_be(:user) { create(:user) }
let_it_be(:user_2) { create(:user) }
- let_it_be(:project) { create(:project, :public) }
+ let_it_be(:project) { create(:project, :public, :repository) }
let_it_be(:merge_request) { create(:merge_request, source_project: project, target_project: project, author: user) }
let_it_be(:private_project) { create(:project, :private) }
@@ -184,6 +184,24 @@ RSpec.describe API::DraftNotes, feature_category: :code_review_workflow do
end
end
+ context "when using a diff with position" do
+ let!(:draft_note) { create(:draft_note_on_text_diff, merge_request: merge_request, author: user) }
+
+ it_behaves_like 'diff draft notes API', 'iid'
+
+ context "when position is for a previous commit on the merge request" do
+ it "returns a 400 bad request error because the line_code is old" do
+ # SHA taken from an earlier commit listed in spec/factories/merge_requests.rb
+ position = draft_note.position.to_h.merge(new_line: 'c1acaa58bbcbc3eafe538cb8274ba387047b69f8')
+
+ post api("/projects/#{project.id}/merge_requests/#{merge_request['iid']}/draft_notes", user),
+ params: { body: 'hi!', position: position }
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+ end
+ end
+
context "when attempting to resolve a disscussion" do
context "when providing a non-existant ID" do
it "returns a 400 Bad Request" do
diff --git a/spec/requests/api/files_spec.rb b/spec/requests/api/files_spec.rb
index ea341703301..01acb83360c 100644
--- a/spec/requests/api/files_spec.rb
+++ b/spec/requests/api/files_spec.rb
@@ -342,6 +342,23 @@ RSpec.describe API::Files, feature_category: :source_code_management do
expect(response).to have_gitlab_http_status(:ok)
end
+ context 'when a project is moved' do
+ let_it_be(:redirect_route) { 'new/project/location' }
+ let_it_be(:file_path) { 'files%2Fruby%2Fpopen.rb' }
+
+ it 'redirects to the new project location' do
+ project.route.create_redirect(redirect_route)
+
+ url = "/projects/#{CGI.escape(redirect_route)}/repository/files/#{file_path}"
+ get api(url, api_user, **options), params: params
+
+ expect(response).to have_gitlab_http_status(:moved_permanently)
+ expect(response.headers['Location']).to start_with(
+ "#{request.base_url}/api/v4/projects/#{project.id}/repository/files/#{file_path}"
+ )
+ end
+ end
+
it 'sets inline content disposition by default' do
url = route(file_path) + '/raw'
diff --git a/spec/requests/api/graphql/abuse_report_labels_spec.rb b/spec/requests/api/graphql/abuse_report_labels_spec.rb
new file mode 100644
index 00000000000..bae8a7937fa
--- /dev/null
+++ b/spec/requests/api/graphql/abuse_report_labels_spec.rb
@@ -0,0 +1,58 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'abuse_report_labels', feature_category: :insider_threat do
+ include GraphqlHelpers
+
+ let_it_be(:current_user) { create(:admin) }
+ let_it_be(:project_label) { create(:label) }
+ let_it_be(:label_one) { create(:abuse_report_label, title: 'Uno') }
+ let_it_be(:label_two) { create(:abuse_report_label, title: 'Dos') }
+
+ let(:fields) do
+ <<~GRAPHQL
+ nodes {
+ id
+ title
+ description
+ color
+ textColor
+ }
+ GRAPHQL
+ end
+
+ let(:arguments) { { searchTerm: '' } }
+ let(:query) { graphql_query_for('abuseReportLabels', arguments, fields) }
+
+ before do
+ post_graphql(query, current_user: current_user)
+ end
+
+ it_behaves_like 'a working graphql query that returns data'
+
+ it 'returns abuse report labels sorted by title in ascending order' do
+ expect(graphql_data_at('abuseReportLabels', 'nodes').size).to eq 2
+ expect(graphql_data_at('abuseReportLabels', 'nodes', 0)).to match(a_graphql_entity_for(label_two))
+ expect(graphql_data_at('abuseReportLabels', 'nodes', 1)).to match(a_graphql_entity_for(label_one))
+ end
+
+ context 'when current user is not an admin' do
+ let_it_be(:current_user) { create(:user) }
+
+ it_behaves_like 'a working graphql query'
+
+ it 'does not contain any data' do
+ expect(graphql_data_at('abuseReportLabels', 'nodes')).to be_empty
+ end
+ end
+
+ context 'with a search term param' do
+ let(:arguments) { { searchTerm: 'uno' } }
+
+ it 'returns only matching abuse report labels' do
+ expect(graphql_data_at('abuseReportLabels', 'nodes').size).to eq 1
+ expect(graphql_data_at('abuseReportLabels', 'nodes', 0)).to match(a_graphql_entity_for(label_one))
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/abuse_report_spec.rb b/spec/requests/api/graphql/abuse_report_spec.rb
new file mode 100644
index 00000000000..7d0b8b35763
--- /dev/null
+++ b/spec/requests/api/graphql/abuse_report_spec.rb
@@ -0,0 +1,50 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'abuse_report', feature_category: :insider_threat do
+ include GraphqlHelpers
+
+ let_it_be(:current_user) { create(:admin) }
+ let_it_be(:label) { create(:abuse_report_label, title: 'Uno') }
+ let_it_be(:report) { create(:abuse_report, labels: [label]) }
+
+ let(:report_gid) { Gitlab::GlobalId.build(report, id: report.id).to_s }
+
+ let(:fields) do
+ <<~GRAPHQL
+ labels {
+ nodes {
+ id
+ title
+ description
+ color
+ textColor
+ }
+ }
+ GRAPHQL
+ end
+
+ let(:arguments) { { id: report_gid } }
+ let(:query) { graphql_query_for('abuseReport', arguments, fields) }
+
+ before do
+ post_graphql(query, current_user: current_user)
+ end
+
+ it_behaves_like 'a working graphql query that returns data'
+
+ it 'returns abuse report with labels' do
+ expect(graphql_data_at('abuseReport', 'labels', 'nodes', 0)).to match(a_graphql_entity_for(label))
+ end
+
+ context 'when current user is not an admin' do
+ let_it_be(:current_user) { create(:user) }
+
+ it_behaves_like 'a working graphql query'
+
+ it 'does not contain any data' do
+ expect(graphql_data_at('abuseReportLabel')).to be_nil
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/achievements/user_achievements_query_spec.rb b/spec/requests/api/graphql/achievements/user_achievements_query_spec.rb
index 080f375245d..fa47cf4988a 100644
--- a/spec/requests/api/graphql/achievements/user_achievements_query_spec.rb
+++ b/spec/requests/api/graphql/achievements/user_achievements_query_spec.rb
@@ -14,8 +14,10 @@ RSpec.describe 'UserAchievements', feature_category: :user_profile do
<<~HEREDOC
id
achievements {
+ count
nodes {
userAchievements {
+ count
nodes {
id
achievement {
@@ -58,6 +60,11 @@ RSpec.describe 'UserAchievements', feature_category: :user_profile do
)
end
+ it 'returns the correct achievement and user_achievement counts' do
+ expect(graphql_data_at(:namespace, :achievements, :count)).to be(1)
+ expect(graphql_data_at(:namespace, :achievements, :nodes, :userAchievements, :count)).to contain_exactly(1)
+ end
+
it 'can lookahead to eliminate N+1 queries', :use_clean_rails_memory_store_caching do
control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do
post_graphql(query, current_user: user)
diff --git a/spec/requests/api/graphql/ci/application_setting_spec.rb b/spec/requests/api/graphql/ci/application_setting_spec.rb
index 42ab1786fee..a0c3bedd493 100644
--- a/spec/requests/api/graphql/ci/application_setting_spec.rb
+++ b/spec/requests/api/graphql/ci/application_setting_spec.rb
@@ -27,9 +27,7 @@ RSpec.describe 'getting Application Settings', feature_category: :continuous_int
post_graphql(query, current_user: user)
end
- it_behaves_like 'a working graphql query'
-
- specify { expect(settings_data).to be nil }
+ it_behaves_like 'a working graphql query that returns no data'
end
context 'with admin permissions' do
@@ -39,7 +37,7 @@ RSpec.describe 'getting Application Settings', feature_category: :continuous_int
post_graphql(query, current_user: user)
end
- it_behaves_like 'a working graphql query'
+ it_behaves_like 'a working graphql query that returns data'
it 'fetches the settings data' do
# assert against hash to ensure no additional fields are exposed
diff --git a/spec/requests/api/graphql/ci/group_environment_scopes_spec.rb b/spec/requests/api/graphql/ci/group_environment_scopes_spec.rb
index 13a3a128979..d224fdbdc32 100644
--- a/spec/requests/api/graphql/ci/group_environment_scopes_spec.rb
+++ b/spec/requests/api/graphql/ci/group_environment_scopes_spec.rb
@@ -33,36 +33,55 @@ RSpec.describe 'Query.group(fullPath).environmentScopes', feature_category: :sec
end
before do
- group.add_developer(user)
expected_environment_scopes.each_with_index do |env, index|
create(:ci_group_variable, group: group, key: "var#{index + 1}", environment_scope: env)
end
end
- context 'when query has no parameters' do
- let(:environment_scopes_params) { "" }
+ context 'when the user can administer the group' do
+ before do
+ group.add_owner(user)
+ end
- it 'returns all avaiable environment scopes' do
- post_graphql(query, current_user: user)
+ context 'when query has no parameters' do
+ let(:environment_scopes_params) { "" }
- expect(graphql_data.dig('group', 'environmentScopes', 'nodes')).to eq(
- expected_environment_scopes.map { |env_scope| { 'name' => env_scope } }
- )
+ it 'returns all avaiable environment scopes' do
+ post_graphql(query, current_user: user)
+
+ expect(graphql_data.dig('group', 'environmentScopes', 'nodes')).to eq(
+ expected_environment_scopes.map { |env_scope| { 'name' => env_scope } }
+ )
+ end
+ end
+
+ context 'when query has search parameters' do
+ let(:environment_scopes_params) { "(search: \"group1\")" }
+
+ it 'returns only environment scopes with group1 prefix' do
+ post_graphql(query, current_user: user)
+
+ expect(graphql_data.dig('group', 'environmentScopes', 'nodes')).to eq(
+ [
+ { 'name' => 'group1_environment1' },
+ { 'name' => 'group1_environment2' }
+ ]
+ )
+ end
end
end
- context 'when query has search parameters' do
- let(:environment_scopes_params) { "(search: \"group1\")" }
+ context 'when the user cannot administer the group' do
+ let(:environment_scopes_params) { "" }
+
+ before do
+ group.add_developer(user)
+ end
- it 'returns only environment scopes with group1 prefix' do
+ it 'returns nothing' do
post_graphql(query, current_user: user)
- expect(graphql_data.dig('group', 'environmentScopes', 'nodes')).to eq(
- [
- { 'name' => 'group1_environment1' },
- { 'name' => 'group1_environment2' }
- ]
- )
+ expect(graphql_data.dig('group', 'environmentScopes')).to be_nil
end
end
end
diff --git a/spec/requests/api/graphql/ci/runner_spec.rb b/spec/requests/api/graphql/ci/runner_spec.rb
index 6acd705c982..3cfb98c57fd 100644
--- a/spec/requests/api/graphql/ci/runner_spec.rb
+++ b/spec/requests/api/graphql/ci/runner_spec.rb
@@ -109,9 +109,9 @@ RSpec.describe 'Query.runner(id)', feature_category: :runner_fleet do
runner.maintainer_note.present? ? a_string_including('<strong>Test maintenance note</strong>') : '',
job_count: runner.builds.count,
jobs: a_hash_including(
- "count" => runner.builds.count,
- "nodes" => an_instance_of(Array),
- "pageInfo" => anything
+ 'count' => runner.builds.count,
+ 'nodes' => an_instance_of(Array),
+ 'pageInfo' => anything
),
project_count: nil,
admin_url: "http://localhost/admin/runners/#{runner.id}",
@@ -124,8 +124,21 @@ RSpec.describe 'Query.runner(id)', feature_category: :runner_fleet do
'assignRunner' => true
},
managers: a_hash_including(
- "count" => runner.runner_managers.count,
- "nodes" => an_instance_of(Array),
+ 'count' => runner.runner_managers.count,
+ 'nodes' => runner.runner_managers.map do |runner_manager|
+ a_graphql_entity_for(
+ runner_manager,
+ system_id: runner_manager.system_xid,
+ version: runner_manager.version,
+ revision: runner_manager.revision,
+ ip_address: runner_manager.ip_address,
+ executor_name: runner_manager.executor_type&.dasherize,
+ architecture_name: runner_manager.architecture,
+ platform_name: runner_manager.platform,
+ status: runner_manager.status.to_s.upcase,
+ job_execution_status: runner_manager.builds.running.any? ? 'RUNNING' : 'IDLE'
+ )
+ end,
"pageInfo" => anything
)
)
@@ -215,11 +228,19 @@ RSpec.describe 'Query.runner(id)', feature_category: :runner_fleet do
end
end
- context 'with build running' do
+ context 'with build running', :freeze_time do
+ let!(:pipeline) { create(:ci_pipeline, project: project1) }
+ let!(:runner_manager) do
+ create(:ci_runner_machine,
+ runner: runner, ip_address: '127.0.0.1', version: '16.3', revision: 'a', architecture: 'arm', platform: 'osx',
+ contacted_at: 1.second.ago, executor_type: 'docker')
+ end
+
+ let!(:runner) { create(:ci_runner) }
+ let!(:build) { create(:ci_build, :running, runner: runner, pipeline: pipeline) }
+
before do
- project = create(:project, :repository)
- pipeline = create(:ci_pipeline, project: project)
- create(:ci_build, :running, runner: runner, pipeline: pipeline)
+ create(:ci_runner_machine_build, runner_manager: runner_manager, build: build)
end
it_behaves_like 'runner details fetch'
diff --git a/spec/requests/api/graphql/ci/runners_spec.rb b/spec/requests/api/graphql/ci/runners_spec.rb
index c8706ae9698..3f6d39435fd 100644
--- a/spec/requests/api/graphql/ci/runners_spec.rb
+++ b/spec/requests/api/graphql/ci/runners_spec.rb
@@ -34,67 +34,116 @@ RSpec.describe 'Query.runners', feature_category: :runner_fleet do
QUERY
end
- let(:query) do
- %(
- query {
- runners(type:#{runner_type},status:#{status}) {
- #{fields}
+ context 'with filters' do
+ let(:query) do
+ %(
+ query {
+ runners(type: #{runner_type}, status: #{status}) {
+ #{fields}
+ }
}
- }
- )
- end
-
- before do
- allow_next_instance_of(::Gitlab::Ci::RunnerUpgradeCheck) do |instance|
- allow(instance).to receive(:check_runner_upgrade_suggestion)
+ )
end
- post_graphql(query, current_user: current_user)
- end
-
- shared_examples 'a working graphql query returning expected runner' do
- it_behaves_like 'a working graphql query'
+ before do
+ allow_next_instance_of(::Gitlab::Ci::RunnerUpgradeCheck) do |instance|
+ allow(instance).to receive(:check_runner_upgrade_suggestion)
+ end
- it 'returns expected runner' do
- expect(runners_graphql_data['nodes']).to contain_exactly(a_graphql_entity_for(expected_runner))
+ post_graphql(query, current_user: current_user)
end
- it 'does not execute more queries per runner', :aggregate_failures do
- # warm-up license cache and so on:
- personal_access_token = create(:personal_access_token, user: current_user)
- args = { current_user: current_user, token: { personal_access_token: personal_access_token } }
- post_graphql(query, **args)
- expect(graphql_data_at(:runners, :nodes)).not_to be_empty
+ shared_examples 'a working graphql query returning expected runner' do
+ it_behaves_like 'a working graphql query'
+
+ it 'returns expected runner' do
+ expect(runners_graphql_data['nodes']).to contain_exactly(a_graphql_entity_for(expected_runner))
+ end
+
+ it 'does not execute more queries per runner', :aggregate_failures do
+ # warm-up license cache and so on:
+ personal_access_token = create(:personal_access_token, user: current_user)
+ args = { current_user: current_user, token: { personal_access_token: personal_access_token } }
+ post_graphql(query, **args)
+ expect(graphql_data_at(:runners, :nodes)).not_to be_empty
- admin2 = create(:admin)
- personal_access_token = create(:personal_access_token, user: admin2)
- args = { current_user: admin2, token: { personal_access_token: personal_access_token } }
- control = ActiveRecord::QueryRecorder.new { post_graphql(query, **args) }
+ admin2 = create(:admin)
+ personal_access_token = create(:personal_access_token, user: admin2)
+ args = { current_user: admin2, token: { personal_access_token: personal_access_token } }
+ control = ActiveRecord::QueryRecorder.new { post_graphql(query, **args) }
- create(:ci_runner, :instance, version: '14.0.0', tag_list: %w[tag5 tag6], creator: admin2)
- create(:ci_runner, :project, version: '14.0.1', projects: [project], tag_list: %w[tag3 tag8],
- creator: current_user)
+ create(:ci_runner, :instance, version: '14.0.0', tag_list: %w[tag5 tag6], creator: admin2)
+ create(:ci_runner, :project, version: '14.0.1', projects: [project], tag_list: %w[tag3 tag8],
+ creator: current_user)
- expect { post_graphql(query, **args) }.not_to exceed_query_limit(control)
+ expect { post_graphql(query, **args) }.not_to exceed_query_limit(control)
+ end
end
- end
- context 'runner_type is INSTANCE_TYPE and status is ACTIVE' do
- let(:runner_type) { 'INSTANCE_TYPE' }
- let(:status) { 'ACTIVE' }
+ context 'runner_type is INSTANCE_TYPE and status is ACTIVE' do
+ let(:runner_type) { 'INSTANCE_TYPE' }
+ let(:status) { 'ACTIVE' }
- let!(:expected_runner) { instance_runner }
+ let!(:expected_runner) { instance_runner }
- it_behaves_like 'a working graphql query returning expected runner'
- end
+ it_behaves_like 'a working graphql query returning expected runner'
+ end
- context 'runner_type is PROJECT_TYPE and status is NEVER_CONTACTED' do
- let(:runner_type) { 'PROJECT_TYPE' }
- let(:status) { 'NEVER_CONTACTED' }
+ context 'runner_type is PROJECT_TYPE and status is NEVER_CONTACTED' do
+ let(:runner_type) { 'PROJECT_TYPE' }
+ let(:status) { 'NEVER_CONTACTED' }
- let!(:expected_runner) { project_runner }
+ let!(:expected_runner) { project_runner }
+
+ it_behaves_like 'a working graphql query returning expected runner'
+ end
+ end
- it_behaves_like 'a working graphql query returning expected runner'
+ context 'without filters' do
+ context 'with managers requested for multiple runners' do
+ let(:fields) do
+ <<~QUERY
+ nodes {
+ managers {
+ nodes {
+ #{all_graphql_fields_for('CiRunnerManager', max_depth: 1)}
+ }
+ }
+ }
+ QUERY
+ end
+
+ let(:query) do
+ %(
+ query {
+ runners {
+ #{fields}
+ }
+ }
+ )
+ end
+
+ it 'does not execute more queries per runner', :aggregate_failures do
+ # warm-up license cache and so on:
+ personal_access_token = create(:personal_access_token, user: current_user)
+ args = { current_user: current_user, token: { personal_access_token: personal_access_token } }
+ post_graphql(query, **args)
+ expect(graphql_data_at(:runners, :nodes)).not_to be_empty
+
+ admin2 = create(:admin)
+ personal_access_token = create(:personal_access_token, user: admin2)
+ args = { current_user: admin2, token: { personal_access_token: personal_access_token } }
+ control = ActiveRecord::QueryRecorder.new { post_graphql(query, **args) }
+
+ create(:ci_runner, :instance, :with_runner_manager, version: '14.0.0', tag_list: %w[tag5 tag6],
+ creator: admin2)
+ create(:ci_runner, :project, :with_runner_manager, version: '14.0.1', projects: [project],
+ tag_list: %w[tag3 tag8],
+ creator: current_user)
+
+ expect { post_graphql(query, **args) }.not_to exceed_query_limit(control)
+ end
+ end
end
end
diff --git a/spec/requests/api/graphql/current_user/todos_query_spec.rb b/spec/requests/api/graphql/current_user/todos_query_spec.rb
index ee019a99f8d..790ae4b955e 100644
--- a/spec/requests/api/graphql/current_user/todos_query_spec.rb
+++ b/spec/requests/api/graphql/current_user/todos_query_spec.rb
@@ -40,7 +40,7 @@ RSpec.describe 'Query current user todos', feature_category: :source_code_manage
post_graphql(query, current_user: current_user)
end
- it_behaves_like 'a working graphql query'
+ it_behaves_like 'a working graphql query that returns data'
it 'contains the expected ids' do
is_expected.to contain_exactly(
diff --git a/spec/requests/api/graphql/current_user_query_spec.rb b/spec/requests/api/graphql/current_user_query_spec.rb
index aceef77920d..b4e570bcaaa 100644
--- a/spec/requests/api/graphql/current_user_query_spec.rb
+++ b/spec/requests/api/graphql/current_user_query_spec.rb
@@ -25,7 +25,7 @@ RSpec.describe 'getting project information', feature_category: :system_access d
context 'when there is a current_user' do
let_it_be(:current_user) { create(:user) }
- it_behaves_like 'a working graphql query'
+ it_behaves_like 'a working graphql query that returns data'
it { is_expected.to include('name' => current_user.name, 'namespace' => { 'id' => current_user.namespace.to_global_id.to_s }) }
end
@@ -33,8 +33,6 @@ RSpec.describe 'getting project information', feature_category: :system_access d
context 'when there is no current_user' do
let(:current_user) { nil }
- it_behaves_like 'a working graphql query'
-
- it { is_expected.to be_nil }
+ it_behaves_like 'a working graphql query that returns no data'
end
end
diff --git a/spec/requests/api/graphql/environments/deployments_spec.rb b/spec/requests/api/graphql/environments/deployments_spec.rb
index 0022a38d2d3..a4abf3f583a 100644
--- a/spec/requests/api/graphql/environments/deployments_spec.rb
+++ b/spec/requests/api/graphql/environments/deployments_spec.rb
@@ -314,14 +314,17 @@ RSpec.describe 'Environments Deployments query', feature_category: :continuous_d
end
def create_deployments
- create_list(:deployment, 3, environment: environment, project: project).each do |deployment|
- deployment.user = create(:user).tap { |u| project.add_developer(u) }
- deployment.deployable =
- create(:ci_build, project: project, environment: environment.name, deployment: deployment,
- user: deployment.user)
+ deployments = create_list(:deployment, 2, environment: environment, project: project)
+ set_deployment_attributes(deployments.first, :ci_build)
+ set_deployment_attributes(deployments.second, :ci_bridge)
+ deployments.each(&:save!)
+ end
- deployment.save!
- end
+ def set_deployment_attributes(deployment, factory_type)
+ deployment.user = create(:user).tap { |u| project.add_developer(u) }
+ deployment.deployable =
+ create(factory_type, project: project, environment: environment.name, deployment: deployment,
+ user: deployment.user)
end
end
@@ -432,7 +435,7 @@ RSpec.describe 'Environments Deployments query', feature_category: :continuous_d
deployments.each do |deployment|
deployment_in_record = project.deployments.find_by_iid(deployment['iid'])
- expect(deployment_in_record.build.to_global_id.to_s).to eq(deployment['job']['id'])
+ expect(deployment_in_record.job.to_global_id.to_s).to eq(deployment['job']['id'])
end
end
end
diff --git a/spec/requests/api/graphql/group/autocomplete_users_spec.rb b/spec/requests/api/graphql/group/autocomplete_users_spec.rb
new file mode 100644
index 00000000000..708604885c9
--- /dev/null
+++ b/spec/requests/api/graphql/group/autocomplete_users_spec.rb
@@ -0,0 +1,48 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'autocomplete users for a group', feature_category: :team_planning do
+ include GraphqlHelpers
+
+ let_it_be(:parent_group) { create(:group) }
+ let_it_be(:group) { create(:group, parent: parent_group) }
+
+ let_it_be(:parent_group_member) { create(:user).tap { |u| parent_group.add_guest(u) } }
+ let_it_be(:group_member) { create(:user).tap { |u| group.add_guest(u) } }
+
+ let_it_be(:other_group) { create(:group) }
+ let_it_be(:other_group_member) { create(:user).tap { |u| other_group.add_guest(u) } }
+
+ let(:params) { {} }
+ let(:query) do
+ graphql_query_for(
+ 'group',
+ { 'fullPath' => group.full_path },
+ query_graphql_field('autocompleteUsers', params, 'id')
+ )
+ end
+
+ let(:response_user_ids) { graphql_data.dig('group', 'autocompleteUsers').pluck('id') }
+
+ it 'returns members of the group and its ancestors' do
+ post_graphql(query, current_user: group_member)
+
+ expected_user_ids = [
+ parent_group_member,
+ group_member
+ ].map { |u| u.to_global_id.to_s }
+
+ expect(response_user_ids).to match_array(expected_user_ids)
+ end
+
+ context 'with search param' do
+ let(:params) { { search: group_member.username } }
+
+ it 'only returns users matching the search query' do
+ post_graphql(query, current_user: group_member)
+
+ expect(response_user_ids).to contain_exactly(group_member.to_global_id.to_s)
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/group/work_items_spec.rb b/spec/requests/api/graphql/group/work_items_spec.rb
new file mode 100644
index 00000000000..f6dad577b5e
--- /dev/null
+++ b/spec/requests/api/graphql/group/work_items_spec.rb
@@ -0,0 +1,106 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'getting a work_item list for a group', feature_category: :team_planning do
+ include GraphqlHelpers
+
+ let_it_be(:group) { create(:group, :public) }
+ let_it_be(:sub_group) { create(:group, parent: group) }
+ let_it_be(:project) { create(:project, :repository, :public, group: group) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:reporter) { create(:user).tap { |user| group.add_reporter(user) } }
+
+ let_it_be(:project_work_item) { create(:work_item, project: project) }
+ let_it_be(:sub_group_work_item) do
+ create(
+ :work_item,
+ namespace: sub_group,
+ author: reporter
+ )
+ end
+
+ let_it_be(:group_work_item) do
+ create(
+ :work_item,
+ namespace: group,
+ author: reporter
+ )
+ end
+
+ let_it_be(:confidential_work_item) do
+ create(:work_item, :confidential, namespace: group, author: reporter)
+ end
+
+ let_it_be(:other_work_item) { create(:work_item) }
+
+ let(:work_items_data) { graphql_data['group']['workItems']['nodes'] }
+ let(:work_item_filter_params) { {} }
+ let(:current_user) { user }
+ let(:query_group) { group }
+
+ let(:fields) do
+ <<~QUERY
+ nodes {
+ #{all_graphql_fields_for('workItems'.classify, max_depth: 2)}
+ }
+ QUERY
+ end
+
+ context 'when the user can not see confidential work_items' do
+ it_behaves_like 'a working graphql query' do
+ before do
+ post_graphql(query, current_user: current_user)
+ end
+ end
+
+ it 'does not return confidential issues' do
+ post_graphql(query, current_user: current_user)
+
+ expect(work_item_ids).to contain_exactly(
+ group_work_item.to_global_id.to_s
+ )
+ end
+ end
+
+ context 'when the user can see confidential work_items' do
+ let(:current_user) { reporter }
+
+ it_behaves_like 'a working graphql query' do
+ before do
+ post_graphql(query, current_user: current_user)
+ end
+ end
+
+ it 'returns also confidential work_items' do
+ post_graphql(query, current_user: current_user)
+
+ expect(work_item_ids).to eq([
+ confidential_work_item.to_global_id.to_s, group_work_item.to_global_id.to_s
+ ])
+ end
+
+ context 'when the namespace_level_work_items feature flag is disabled' do
+ before do
+ stub_feature_flags(namespace_level_work_items: false)
+ post_graphql(query, current_user: current_user)
+ end
+
+ it 'returns null in the workItems field' do
+ expect(graphql_data['group']['workItems']).to be_nil
+ end
+ end
+ end
+
+ def work_item_ids
+ graphql_dig_at(work_items_data, :id)
+ end
+
+ def query(params = work_item_filter_params)
+ graphql_query_for(
+ 'group',
+ { 'fullPath' => query_group.full_path },
+ query_graphql_field('workItems', params, fields)
+ )
+ end
+end
diff --git a/spec/requests/api/graphql/mutations/award_emojis/add_spec.rb b/spec/requests/api/graphql/mutations/award_emojis/add_spec.rb
index 18cc85d36e0..dbace8f2b53 100644
--- a/spec/requests/api/graphql/mutations/award_emojis/add_spec.rb
+++ b/spec/requests/api/graphql/mutations/award_emojis/add_spec.rb
@@ -57,7 +57,7 @@ RSpec.describe 'Adding an AwardEmoji', feature_category: :shared do
it_behaves_like 'a mutation that does not create an AwardEmoji'
it_behaves_like 'a mutation that returns top-level errors',
- errors: ['You cannot award emoji to this resource.']
+ errors: ['You cannot add emoji reactions to this resource.']
end
context 'when the given awardable is an Awardable' do
diff --git a/spec/requests/api/graphql/mutations/award_emojis/toggle_spec.rb b/spec/requests/api/graphql/mutations/award_emojis/toggle_spec.rb
index 7c6a487cdd0..65a5fb87f9a 100644
--- a/spec/requests/api/graphql/mutations/award_emojis/toggle_spec.rb
+++ b/spec/requests/api/graphql/mutations/award_emojis/toggle_spec.rb
@@ -56,7 +56,7 @@ RSpec.describe 'Toggling an AwardEmoji', feature_category: :shared do
it_behaves_like 'a mutation that does not create or destroy an AwardEmoji'
it_behaves_like 'a mutation that returns top-level errors',
- errors: ['You cannot award emoji to this resource.']
+ errors: ['You cannot add emoji reactions to this resource.']
end
context 'when the given awardable is an Awardable' do
diff --git a/spec/requests/api/graphql/mutations/ci/pipeline_schedule/create_spec.rb b/spec/requests/api/graphql/mutations/ci/pipeline_schedule/create_spec.rb
index 0d5e5f5d2fb..b2fe2754198 100644
--- a/spec/requests/api/graphql/mutations/ci/pipeline_schedule/create_spec.rb
+++ b/spec/requests/api/graphql/mutations/ci/pipeline_schedule/create_spec.rb
@@ -68,8 +68,7 @@ RSpec.describe 'PipelineSchedulecreate', feature_category: :continuous_integrati
end
end
- # Move this from `shared_context` to `context` when `ci_refactoring_pipeline_schedule_create_service` is removed.
- shared_context 'when authorized' do # rubocop:disable RSpec/ContextWording
+ context 'when authorized' do
before_all do
project.add_developer(user)
end
@@ -149,14 +148,4 @@ RSpec.describe 'PipelineSchedulecreate', feature_category: :continuous_integrati
end
end
end
-
- it_behaves_like 'when authorized'
-
- context 'when the FF ci_refactoring_pipeline_schedule_create_service is disabled' do
- before do
- stub_feature_flags(ci_refactoring_pipeline_schedule_create_service: false)
- end
-
- it_behaves_like 'when authorized'
- end
end
diff --git a/spec/requests/api/graphql/mutations/ci/pipeline_trigger/create_spec.rb b/spec/requests/api/graphql/mutations/ci/pipeline_trigger/create_spec.rb
new file mode 100644
index 00000000000..1af12d51e1e
--- /dev/null
+++ b/spec/requests/api/graphql/mutations/ci/pipeline_trigger/create_spec.rb
@@ -0,0 +1,75 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'PipelineTriggerCreate', feature_category: :continuous_integration do
+ include GraphqlHelpers
+
+ let_it_be(:user) { create(:user) }
+ let_it_be_with_reload(:project) { create(:project) }
+
+ let(:mutation) { graphql_mutation(:pipeline_trigger_create, params) }
+ let(:project_path) { project.full_path }
+ let(:description) { 'Ye old pipeline trigger token' }
+
+ let(:params) do
+ {
+ project_path: project_path,
+ description: description
+ }
+ end
+
+ subject { post_graphql_mutation(mutation, current_user: user) }
+
+ context 'when unauthorized' do
+ it 'returns an error' do
+ subject
+
+ expect(graphql_errors).not_to be_empty
+ expect(graphql_errors[0]['message'])
+ .to eq(
+ "The resource that you are attempting to access does not exist " \
+ "or you don't have permission to perform this action"
+ )
+ end
+ end
+
+ context 'when authorized' do
+ before_all do
+ project.add_owner(user)
+ end
+
+ context 'when the params are invalid' do
+ let(:description) { nil }
+
+ it 'does not create a pipeline trigger token and returns an error' do
+ expect { subject }.not_to change { project.triggers.count }
+ expect(response).to have_gitlab_http_status(:success)
+ expect(graphql_errors.to_s).to include('provided invalid value for description (Expected value to not be null)')
+ end
+ end
+
+ context 'when the params are valid' do
+ it 'creates a pipeline trigger token' do
+ expect { subject }.to change { project.triggers.count }.by(1)
+ expect(graphql_errors.to_s).to eql("")
+ end
+
+ it 'returns the new pipeline trigger token' do
+ subject
+
+ expect(graphql_data_at(:pipeline_trigger_create, :pipeline_trigger)).to match a_hash_including(
+ 'owner' => a_hash_including(
+ 'id' => user.to_global_id.to_s,
+ 'username' => user.username,
+ 'name' => user.name
+ ),
+ 'description' => description,
+ "canAccessProject" => true,
+ "hasTokenExposed" => true,
+ "lastUsed" => nil
+ )
+ end
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/mutations/ci/pipeline_trigger/delete_spec.rb b/spec/requests/api/graphql/mutations/ci/pipeline_trigger/delete_spec.rb
new file mode 100644
index 00000000000..5ff2da30cb6
--- /dev/null
+++ b/spec/requests/api/graphql/mutations/ci/pipeline_trigger/delete_spec.rb
@@ -0,0 +1,63 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'PipelineTriggerDelete', feature_category: :continuous_integration do
+ include GraphqlHelpers
+
+ let_it_be(:current_user) { build(:user) }
+ let_it_be(:project) { build(:project) }
+
+ let(:mutation) { graphql_mutation(:pipeline_trigger_delete, params) }
+
+ let_it_be(:trigger) { create(:ci_trigger, owner: current_user, project: project) }
+ let(:id) { trigger.to_global_id.to_s }
+
+ let(:params) do
+ {
+ id: id
+ }
+ end
+
+ subject { post_graphql_mutation(mutation, current_user: current_user) }
+
+ context 'when unauthorized' do
+ it_behaves_like 'a mutation on an unauthorized resource'
+ end
+
+ context 'when authorized' do
+ before_all do
+ project.add_owner(current_user)
+ end
+
+ context 'when the id is invalid' do
+ let(:id) { non_existing_record_id }
+
+ it_behaves_like 'an invalid argument to the mutation', argument_name: :id
+
+ it 'does not delete a pipeline trigger token' do
+ expect { subject }.not_to change { project.triggers.count }
+ expect(response).to have_gitlab_http_status(:success)
+ end
+ end
+
+ context 'when the id is nil' do
+ let(:id) { nil }
+
+ it_behaves_like 'an invalid argument to the mutation', argument_name: :id
+
+ it 'does not delete a pipeline trigger token' do
+ expect { subject }.not_to change { project.triggers.count }
+ expect(response).to have_gitlab_http_status(:success)
+ end
+ end
+
+ context 'when the params are valid' do
+ it_behaves_like 'a working GraphQL mutation'
+
+ it 'deletes the pipeline trigger token' do
+ expect { subject }.to change { project.triggers.count }.by(-1)
+ end
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/mutations/ci/pipeline_trigger/update_spec.rb b/spec/requests/api/graphql/mutations/ci/pipeline_trigger/update_spec.rb
new file mode 100644
index 00000000000..ce6e20c088e
--- /dev/null
+++ b/spec/requests/api/graphql/mutations/ci/pipeline_trigger/update_spec.rb
@@ -0,0 +1,71 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'PipelineTriggerUpdate', feature_category: :continuous_integration do
+ include GraphqlHelpers
+
+ let_it_be(:current_user) { build(:user) }
+ let_it_be(:project) { build(:project) }
+
+ let(:mutation) { graphql_mutation(:pipeline_trigger_update, params) }
+ let_it_be(:old_description) { "Boring old description." }
+ let(:new_description) { 'Awesome new description!' }
+ let_it_be(:trigger) { create(:ci_trigger, owner: current_user, project: project, description: old_description) }
+
+ let(:params) do
+ {
+ id: trigger.to_global_id.to_s,
+ description: new_description
+ }
+ end
+
+ subject { post_graphql_mutation(mutation, current_user: current_user) }
+
+ context 'when unauthorized' do
+ it_behaves_like 'a mutation on an unauthorized resource'
+ end
+
+ context 'when authorized' do
+ before_all do
+ project.add_owner(current_user)
+ end
+
+ context 'when the params are invalid' do
+ let(:new_description) { nil }
+
+ it_behaves_like 'an invalid argument to the mutation', argument_name: 'description'
+
+ it 'does not update a pipeline trigger token' do
+ expect { subject }.not_to change { trigger }
+ expect(response).to have_gitlab_http_status(:success)
+ end
+ end
+
+ context 'when the params are valid' do
+ it_behaves_like 'a working GraphQL mutation'
+
+ it 'updates the pipeline trigger token' do
+ expect { subject }.to change { trigger.reload.description }.to(new_description)
+
+ expect(graphql_errors).to be_blank
+ end
+
+ it 'returns the updated trigger token' do
+ subject
+
+ expect(graphql_data_at(:pipeline_trigger_update, :pipeline_trigger)).to match a_hash_including(
+ 'owner' => a_hash_including(
+ 'id' => current_user.to_global_id.to_s,
+ 'username' => current_user.username,
+ 'name' => current_user.name
+ ),
+ 'description' => new_description,
+ "canAccessProject" => true,
+ "hasTokenExposed" => true,
+ "lastUsed" => nil
+ )
+ end
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/mutations/issues/update_spec.rb b/spec/requests/api/graphql/mutations/issues/update_spec.rb
index 97ead687a82..ff100d99628 100644
--- a/spec/requests/api/graphql/mutations/issues/update_spec.rb
+++ b/spec/requests/api/graphql/mutations/issues/update_spec.rb
@@ -147,5 +147,10 @@ RSpec.describe 'Update of an existing issue', feature_category: :team_planning d
end
end
end
+
+ it_behaves_like 'updating time estimate' do
+ let(:resource) { issue }
+ let(:mutation_name) { 'updateIssue' }
+ end
end
end
diff --git a/spec/requests/api/graphql/mutations/merge_requests/set_time_estimate_spec.rb b/spec/requests/api/graphql/mutations/merge_requests/set_time_estimate_spec.rb
new file mode 100644
index 00000000000..6bc130a97cf
--- /dev/null
+++ b/spec/requests/api/graphql/mutations/merge_requests/set_time_estimate_spec.rb
@@ -0,0 +1,41 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Setting time estimate of a merge request', feature_category: :code_review_workflow do
+ include GraphqlHelpers
+
+ let_it_be(:current_user) { create(:user) }
+ let_it_be(:project) { create(:project, :public) }
+ let_it_be(:merge_request) { create(:merge_request, source_project: project) }
+
+ let(:input) do
+ {
+ iid: merge_request.iid.to_s
+ }
+ end
+
+ let(:extra_params) { { project_path: project.full_path } }
+ let(:input_params) { input.merge(extra_params) }
+ let(:mutation) { graphql_mutation(:merge_request_update, input_params, nil, ['productAnalyticsState']) }
+ let(:mutation_response) { graphql_mutation_response(:merge_request_update) }
+
+ context 'when the user is not allowed to update a merge request' do
+ before_all do
+ project.add_reporter(current_user)
+ end
+
+ it_behaves_like 'a mutation that returns a top-level access error'
+ end
+
+ context 'when updating a time estimate' do
+ before_all do
+ project.add_developer(current_user)
+ end
+
+ it_behaves_like 'updating time estimate' do
+ let(:resource) { merge_request }
+ let(:mutation_name) { 'mergeRequestUpdate' }
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/mutations/metrics/dashboard/annotations/create_spec.rb b/spec/requests/api/graphql/mutations/metrics/dashboard/annotations/create_spec.rb
index d81744abe1b..0e55b6f2c9f 100644
--- a/spec/requests/api/graphql/mutations/metrics/dashboard/annotations/create_spec.rb
+++ b/spec/requests/api/graphql/mutations/metrics/dashboard/annotations/create_spec.rb
@@ -43,9 +43,6 @@ RSpec.describe Mutations::Metrics::Dashboard::Annotations::Create, feature_categ
project.add_reporter(current_user)
end
- it_behaves_like 'a mutation that returns top-level errors',
- errors: [Gitlab::Graphql::Authorize::AuthorizeResource::RESOURCE_ACCESS_ERROR]
-
it 'does not create the annotation' do
expect do
post_graphql_mutation(mutation, current_user: current_user)
@@ -58,25 +55,6 @@ RSpec.describe Mutations::Metrics::Dashboard::Annotations::Create, feature_categ
project.add_developer(current_user)
end
- it 'creates the annotation' do
- expect do
- post_graphql_mutation(mutation, current_user: current_user)
- end.to change { Metrics::Dashboard::Annotation.count }.by(1)
- end
-
- it 'returns the created annotation' do
- post_graphql_mutation(mutation, current_user: current_user)
-
- annotation = Metrics::Dashboard::Annotation.first
- annotation_id = GitlabSchema.id_from_object(annotation).to_s
-
- expect(mutation_response['annotation']['description']).to match(description)
- expect(mutation_response['annotation']['startingAt'].to_time).to match(starting_at.to_time)
- expect(mutation_response['annotation']['endingAt'].to_time).to match(ending_at.to_time)
- expect(mutation_response['annotation']['id']).to match(annotation_id)
- expect(annotation.environment_id).to eq(environment.id)
- end
-
context 'when environment_id is missing' do
let(:mutation) do
variables = {
@@ -137,25 +115,6 @@ RSpec.describe Mutations::Metrics::Dashboard::Annotations::Create, feature_categ
project.add_developer(current_user)
end
- it 'creates the annotation' do
- expect do
- post_graphql_mutation(mutation, current_user: current_user)
- end.to change { Metrics::Dashboard::Annotation.count }.by(1)
- end
-
- it 'returns the created annotation' do
- post_graphql_mutation(mutation, current_user: current_user)
-
- annotation = Metrics::Dashboard::Annotation.first
- annotation_id = GitlabSchema.id_from_object(annotation).to_s
-
- expect(mutation_response['annotation']['description']).to match(description)
- expect(mutation_response['annotation']['startingAt'].to_time).to match(starting_at.to_time)
- expect(mutation_response['annotation']['endingAt'].to_time).to match(ending_at.to_time)
- expect(mutation_response['annotation']['id']).to match(annotation_id)
- expect(annotation.cluster_id).to eq(cluster.id)
- end
-
context 'when cluster_id is missing' do
let(:mutation) do
variables = {
@@ -177,9 +136,6 @@ RSpec.describe Mutations::Metrics::Dashboard::Annotations::Create, feature_categ
project.add_guest(current_user)
end
- it_behaves_like 'a mutation that returns top-level errors',
- errors: [Gitlab::Graphql::Authorize::AuthorizeResource::RESOURCE_ACCESS_ERROR]
-
it 'does not create the annotation' do
expect do
post_graphql_mutation(mutation, current_user: current_user)
diff --git a/spec/requests/api/graphql/mutations/metrics/dashboard/annotations/delete_spec.rb b/spec/requests/api/graphql/mutations/metrics/dashboard/annotations/delete_spec.rb
index 09977cd19d7..c81f6381398 100644
--- a/spec/requests/api/graphql/mutations/metrics/dashboard/annotations/delete_spec.rb
+++ b/spec/requests/api/graphql/mutations/metrics/dashboard/annotations/delete_spec.rb
@@ -7,8 +7,7 @@ RSpec.describe Mutations::Metrics::Dashboard::Annotations::Delete, feature_categ
let_it_be(:current_user) { create(:user) }
let_it_be(:project) { create(:project, :private, :repository) }
- let_it_be(:environment) { create(:environment, project: project) }
- let_it_be(:annotation) { create(:metrics_dashboard_annotation, environment: environment) }
+ let_it_be(:annotation) { create(:metrics_dashboard_annotation) }
let(:variables) { { id: GitlabSchema.id_from_object(annotation).to_s } }
let(:mutation) { graphql_mutation(:delete_annotation, variables) }
@@ -28,14 +27,6 @@ RSpec.describe Mutations::Metrics::Dashboard::Annotations::Delete, feature_categ
project.add_developer(current_user)
end
- context 'with valid params' do
- it 'deletes the annotation' do
- expect do
- post_graphql_mutation(mutation, current_user: current_user)
- end.to change { Metrics::Dashboard::Annotation.count }.by(-1)
- end
- end
-
context 'with invalid params' do
let(:variables) { { id: GitlabSchema.id_from_object(project).to_s } }
@@ -44,21 +35,6 @@ RSpec.describe Mutations::Metrics::Dashboard::Annotations::Delete, feature_categ
end
end
- context 'when the delete fails' do
- let(:service_response) { { message: 'Annotation has not been deleted', status: :error, last_step: :delete } }
-
- before do
- allow_next_instance_of(Metrics::Dashboard::Annotations::DeleteService) do |delete_service|
- allow(delete_service).to receive(:execute).and_return(service_response)
- end
- end
- it 'returns the error' do
- post_graphql_mutation(mutation, current_user: current_user)
-
- expect(mutation_response['errors']).to eq([service_response[:message]])
- end
- end
-
context 'when metrics dashboard feature is unavailable' do
before do
stub_feature_flags(remove_monitor_metrics: true)
diff --git a/spec/requests/api/graphql/mutations/namespace/package_settings/update_spec.rb b/spec/requests/api/graphql/mutations/namespace/package_settings/update_spec.rb
index 2f26a2f92b2..480e184a60c 100644
--- a/spec/requests/api/graphql/mutations/namespace/package_settings/update_spec.rb
+++ b/spec/requests/api/graphql/mutations/namespace/package_settings/update_spec.rb
@@ -15,6 +15,8 @@ RSpec.describe 'Updating the package settings', feature_category: :package_regis
maven_duplicate_exception_regex: 'foo-.*',
generic_duplicates_allowed: false,
generic_duplicate_exception_regex: 'bar-.*',
+ nuget_duplicates_allowed: false,
+ nuget_duplicate_exception_regex: 'bar-.*',
maven_package_requests_forwarding: true,
lock_maven_package_requests_forwarding: true,
npm_package_requests_forwarding: true,
@@ -32,6 +34,8 @@ RSpec.describe 'Updating the package settings', feature_category: :package_regis
mavenDuplicateExceptionRegex
genericDuplicatesAllowed
genericDuplicateExceptionRegex
+ nugetDuplicatesAllowed
+ nugetDuplicateExceptionRegex
mavenPackageRequestsForwarding
lockMavenPackageRequestsForwarding
npmPackageRequestsForwarding
@@ -58,6 +62,8 @@ RSpec.describe 'Updating the package settings', feature_category: :package_regis
expect(package_settings_response['mavenDuplicateExceptionRegex']).to eq(params[:maven_duplicate_exception_regex])
expect(package_settings_response['genericDuplicatesAllowed']).to eq(params[:generic_duplicates_allowed])
expect(package_settings_response['genericDuplicateExceptionRegex']).to eq(params[:generic_duplicate_exception_regex])
+ expect(package_settings_response['nugetDuplicatesAllowed']).to eq(params[:nuget_duplicates_allowed])
+ expect(package_settings_response['nugetDuplicateExceptionRegex']).to eq(params[:nuget_duplicate_exception_regex])
expect(package_settings_response['mavenPackageRequestsForwarding']).to eq(params[:maven_package_requests_forwarding])
expect(package_settings_response['lockMavenPackageRequestsForwarding']).to eq(params[:lock_maven_package_requests_forwarding])
expect(package_settings_response['pypiPackageRequestsForwarding']).to eq(params[:pypi_package_requests_forwarding])
@@ -98,6 +104,8 @@ RSpec.describe 'Updating the package settings', feature_category: :package_regis
maven_duplicate_exception_regex: 'SNAPSHOT',
generic_duplicates_allowed: true,
generic_duplicate_exception_regex: 'foo',
+ nuget_duplicates_allowed: true,
+ nuget_duplicate_exception_regex: 'foo',
maven_package_requests_forwarding: nil,
lock_maven_package_requests_forwarding: false,
npm_package_requests_forwarding: nil,
@@ -109,6 +117,8 @@ RSpec.describe 'Updating the package settings', feature_category: :package_regis
maven_duplicate_exception_regex: 'foo-.*',
generic_duplicates_allowed: false,
generic_duplicate_exception_regex: 'bar-.*',
+ nuget_duplicates_allowed: false,
+ nuget_duplicate_exception_regex: 'bar-.*',
maven_package_requests_forwarding: true,
lock_maven_package_requests_forwarding: true,
npm_package_requests_forwarding: true,
@@ -119,6 +129,26 @@ RSpec.describe 'Updating the package settings', feature_category: :package_regis
it_behaves_like 'returning a success'
it_behaves_like 'rejecting invalid regex'
+
+ context 'when nuget_duplicates_option FF is disabled' do
+ let(:params) do
+ {
+ namespace_path: namespace.full_path,
+ 'nugetDuplicatesAllowed' => false
+ }
+ end
+
+ before do
+ stub_feature_flags(nuget_duplicates_option: false)
+ end
+
+ it 'raises an error', :aggregate_failures do
+ subject
+
+ expect(graphql_errors.size).to eq(1)
+ expect(graphql_errors.first['message']).to include('feature flag is disabled')
+ end
+ end
end
RSpec.shared_examples 'accepting the mutation request creating the package settings' do
diff --git a/spec/requests/api/graphql/mutations/snippets/update_spec.rb b/spec/requests/api/graphql/mutations/snippets/update_spec.rb
index 7c5ab691b51..06594d89338 100644
--- a/spec/requests/api/graphql/mutations/snippets/update_spec.rb
+++ b/spec/requests/api/graphql/mutations/snippets/update_spec.rb
@@ -188,16 +188,10 @@ RSpec.describe 'Updating a Snippet', feature_category: :source_code_management d
stub_session('warden.user.user.key' => [[current_user.id], current_user.authenticatable_salt])
end
- it_behaves_like 'Snowplow event tracking with RedisHLL context' do
+ it_behaves_like 'internal event tracking' do
+ let(:action) { ::Gitlab::UsageDataCounters::EditorUniqueCounter::EDIT_BY_SNIPPET_EDITOR }
let(:user) { current_user }
- let(:property) { 'g_edit_by_snippet_ide' }
let(:namespace) { project.namespace }
- let(:category) { 'Gitlab::UsageDataCounters::EditorUniqueCounter' }
- let(:action) { 'ide_edit' }
- let(:label) { 'usage_activity_by_stage_monthly.create.action_monthly_active_users_ide_edit' }
- let(:context) do
- [Gitlab::Tracking::ServicePingContext.new(data_source: :redis_hll, event: event_name).to_context]
- end
end
end
end
diff --git a/spec/requests/api/graphql/mutations/work_items/create_spec.rb b/spec/requests/api/graphql/mutations/work_items/create_spec.rb
index fca3c84e534..78b93c3210b 100644
--- a/spec/requests/api/graphql/mutations/work_items/create_spec.rb
+++ b/spec/requests/api/graphql/mutations/work_items/create_spec.rb
@@ -140,7 +140,7 @@ RSpec.describe 'Create a work item', feature_category: :team_planning do
}
end
- before(:all) do
+ before_all do
create(:parent_link, work_item_parent: parent, work_item: adjacent, relative_position: 0)
end
@@ -264,6 +264,14 @@ RSpec.describe 'Create a work item', feature_category: :team_planning do
let(:mutation) { graphql_mutation(:workItemCreate, input.merge('namespacePath' => project.full_path), fields) }
it_behaves_like 'creates work item'
+
+ context 'when the namespace_level_work_items feature flag is disabled' do
+ before do
+ stub_feature_flags(namespace_level_work_items: false)
+ end
+
+ it_behaves_like 'creates work item'
+ end
end
end
@@ -272,6 +280,16 @@ RSpec.describe 'Create a work item', feature_category: :team_planning do
let(:mutation) { graphql_mutation(:workItemCreate, input.merge(namespacePath: group.full_path), fields) }
it_behaves_like 'creates work item'
+
+ context 'when the namespace_level_work_items feature flag is disabled' do
+ before do
+ stub_feature_flags(namespace_level_work_items: false)
+ end
+
+ it_behaves_like 'a mutation that returns top-level errors', errors: [
+ Mutations::WorkItems::Create::DISABLED_FF_ERROR
+ ]
+ end
end
context 'when both projectPath and namespacePath are passed' do
diff --git a/spec/requests/api/graphql/mutations/work_items/linked_items/add_spec.rb b/spec/requests/api/graphql/mutations/work_items/linked_items/add_spec.rb
new file mode 100644
index 00000000000..f18e0e44905
--- /dev/null
+++ b/spec/requests/api/graphql/mutations/work_items/linked_items/add_spec.rb
@@ -0,0 +1,131 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe "Add linked items to a work item", feature_category: :portfolio_management do
+ include GraphqlHelpers
+
+ let_it_be(:project) { create(:project, :private) }
+ let_it_be(:reporter) { create(:user).tap { |user| project.add_reporter(user) } }
+ let_it_be(:work_item) { create(:work_item, project: project) }
+ let_it_be(:related1) { create(:work_item, project: project) }
+ let_it_be(:related2) { create(:work_item, project: project) }
+
+ let(:mutation_response) { graphql_mutation_response(:work_item_add_linked_items) }
+ let(:mutation) { graphql_mutation(:workItemAddLinkedItems, input, fields) }
+
+ let(:ids_to_link) { [related1.to_global_id.to_s, related2.to_global_id.to_s] }
+ let(:input) { { 'id' => work_item.to_global_id.to_s, 'workItemsIds' => ids_to_link } }
+
+ let(:fields) do
+ <<~FIELDS
+ workItem {
+ id
+ widgets {
+ type
+ ... on WorkItemWidgetLinkedItems {
+ linkedItems {
+ edges {
+ node {
+ linkType
+ workItem {
+ id
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ errors
+ message
+ FIELDS
+ end
+
+ context 'when the user is not allowed to read the work item' do
+ let(:current_user) { create(:user) }
+
+ it_behaves_like 'a mutation that returns a top-level access error'
+ end
+
+ context 'when user has permissions to read the work item' do
+ let(:current_user) { reporter }
+
+ it 'links the work items' do
+ expect do
+ post_graphql_mutation(mutation, current_user: current_user)
+ end.to change { WorkItems::RelatedWorkItemLink.count }.by(2)
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(mutation_response['workItem']).to include('id' => work_item.to_global_id.to_s)
+ expect(mutation_response['message']).to eq("Successfully linked ID(s): #{related1.id} and #{related2.id}.")
+ expect(mutation_response['workItem']['widgets']).to include(
+ {
+ 'linkedItems' => { 'edges' => match_array([
+ { 'node' => { 'linkType' => 'relates_to', 'workItem' => { 'id' => related1.to_global_id.to_s } } },
+ { 'node' => { 'linkType' => 'relates_to', 'workItem' => { 'id' => related2.to_global_id.to_s } } }
+ ]) },
+ 'type' => 'LINKED_ITEMS'
+ }
+ )
+ end
+
+ context 'when linking a work item fails' do
+ let_it_be(:private_project) { create(:project, :private) }
+ let_it_be(:related2) { create(:work_item, project: private_project) }
+
+ it 'adds valid items and returns an error message for failed item' do
+ expect do
+ post_graphql_mutation(mutation, current_user: current_user)
+ end.to change { WorkItems::RelatedWorkItemLink.count }.by(1)
+
+ expect(mutation_response['errors']).to contain_exactly(
+ "Item with ID: #{related2.id} cannot be added. " \
+ "You don't have permission to perform this action."
+ )
+ end
+
+ context 'when a work item does not exist' do
+ let(:input) do
+ {
+ 'id' => work_item.to_global_id.to_s,
+ 'workItemsIds' => ["gid://gitlab/WorkItem/#{non_existing_record_id}"]
+ }
+ end
+
+ it 'returns an error message' do
+ expect do
+ post_graphql_mutation(mutation, current_user: current_user)
+ end.not_to change { WorkItems::RelatedWorkItemLink.count }
+
+ expect_graphql_errors_to_include("Couldn't find WorkItem with 'id'=#{non_existing_record_id}")
+ end
+ end
+
+ context 'when there are more than the max allowed items to link' do
+ let(:max_work_items) { Mutations::WorkItems::LinkedItems::Base::MAX_WORK_ITEMS }
+ let(:error_msg) { "No more than #{max_work_items} work items can be linked at the same time." }
+
+ before do
+ max_work_items.times { |i| ids_to_link.push("gid://gitlab/WorkItem/#{i}") }
+ end
+
+ it 'returns an error message' do
+ expect do
+ post_graphql_mutation(mutation, current_user: current_user)
+ end.not_to change { WorkItems::RelatedWorkItemLink.count }
+
+ expect_graphql_errors_to_include("No more than #{max_work_items} work items can be linked at the same time.")
+ end
+ end
+ end
+
+ context 'when `linked_work_items` feature flag is disabled' do
+ before do
+ stub_feature_flags(linked_work_items: false)
+ end
+
+ it_behaves_like 'a mutation that returns a top-level access error'
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/mutations/work_items/subscribe_spec.rb b/spec/requests/api/graphql/mutations/work_items/subscribe_spec.rb
new file mode 100644
index 00000000000..00672332082
--- /dev/null
+++ b/spec/requests/api/graphql/mutations/work_items/subscribe_spec.rb
@@ -0,0 +1,73 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Subscribe to a work item', feature_category: :team_planning do
+ include GraphqlHelpers
+
+ let_it_be(:project) { create(:project, :private) }
+ let_it_be(:guest) { create(:user).tap { |user| project.add_guest(user) } }
+ let_it_be(:work_item) { create(:work_item, project: project) }
+
+ let(:subscribed_state) { true }
+ let(:mutation_input) { { 'id' => work_item.to_global_id.to_s, 'subscribed' => subscribed_state } }
+ let(:mutation) { graphql_mutation(:workItemSubscribe, mutation_input, fields) }
+ let(:mutation_response) { graphql_mutation_response(:work_item_subscribe) }
+ let(:fields) do
+ <<~FIELDS
+ workItem {
+ widgets {
+ type
+ ... on WorkItemWidgetNotifications {
+ subscribed
+ }
+ }
+ }
+ errors
+ FIELDS
+ end
+
+ context 'when user is not allowed to update subscription work items' do
+ let(:current_user) { create(:user) }
+
+ it_behaves_like 'a mutation that returns a top-level access error'
+ end
+
+ context
+
+ context 'when user has permissions to update its subscription to the work items' do
+ let(:current_user) { guest }
+
+ it "subscribe the user to the work item's notifications" do
+ expect do
+ post_graphql_mutation(mutation, current_user: current_user)
+ end.to change { work_item.subscribed?(current_user, project) }.to(true)
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(mutation_response['workItem']['widgets']).to include({
+ 'type' => 'NOTIFICATIONS',
+ 'subscribed' => true
+ })
+ end
+
+ context 'when unsunscribing' do
+ let(:subscribed_state) { false }
+
+ before do
+ create(:subscription, project: project, user: current_user, subscribable: work_item, subscribed: true)
+ end
+
+ it "unsubscribe the user from the work item's notifications" do
+ expect do
+ post_graphql_mutation(mutation, current_user: current_user)
+ end.to change { work_item.subscribed?(current_user, project) }.to(false)
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(mutation_response['workItem']['widgets']).to include({
+ 'type' => 'NOTIFICATIONS',
+ 'subscribed' => false
+ })
+ end
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/mutations/work_items/update_spec.rb b/spec/requests/api/graphql/mutations/work_items/update_spec.rb
index ea9516f256c..cff21c10a5a 100644
--- a/spec/requests/api/graphql/mutations/work_items/update_spec.rb
+++ b/spec/requests/api/graphql/mutations/work_items/update_spec.rb
@@ -573,7 +573,7 @@ RSpec.describe 'Update a work item', feature_category: :team_planning do
end
context 'when updating relative position' do
- before(:all) do
+ before_all do
create(:parent_link, work_item_parent: valid_parent, work_item: valid_child1)
create(:parent_link, work_item_parent: valid_parent, work_item: valid_child2)
end
@@ -655,7 +655,7 @@ RSpec.describe 'Update a work item', feature_category: :team_planning do
let_it_be(:work_item, reload: true) { create(:work_item, :task, project: project) }
context "when parent is already assigned" do
- before(:all) do
+ before_all do
create(:parent_link, work_item_parent: valid_parent, work_item: work_item)
create(:parent_link, work_item_parent: valid_parent, work_item: valid_child1)
create(:parent_link, work_item_parent: valid_parent, work_item: valid_child2)
diff --git a/spec/requests/api/graphql/project/alert_management/alerts_spec.rb b/spec/requests/api/graphql/project/alert_management/alerts_spec.rb
index 7f586edd510..55d223daf27 100644
--- a/spec/requests/api/graphql/project/alert_management/alerts_spec.rb
+++ b/spec/requests/api/graphql/project/alert_management/alerts_spec.rb
@@ -74,6 +74,7 @@ RSpec.describe 'getting Alert Management Alerts', feature_category: :incident_ma
'details' => { 'custom.alert' => 'payload', 'runbook' => 'runbook' },
'createdAt' => triggered_alert.created_at.strftime('%Y-%m-%dT%H:%M:%SZ'),
'updatedAt' => triggered_alert.updated_at.strftime('%Y-%m-%dT%H:%M:%SZ'),
+ 'metricsDashboardUrl' => nil,
'detailsUrl' => triggered_alert.details_url,
'prometheusAlert' => nil,
'runbook' => 'runbook'
diff --git a/spec/requests/api/graphql/project/autocomplete_users_spec.rb b/spec/requests/api/graphql/project/autocomplete_users_spec.rb
new file mode 100644
index 00000000000..7c416465ed4
--- /dev/null
+++ b/spec/requests/api/graphql/project/autocomplete_users_spec.rb
@@ -0,0 +1,99 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'autocomplete users for a project', feature_category: :team_planning do
+ include GraphqlHelpers
+
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, :repository, :public, group: group) }
+
+ let_it_be(:direct_member) { create(:user).tap { |u| project.add_guest(u) } }
+ let_it_be(:indirect_member) { create(:user).tap { |u| group.add_guest(u) } }
+
+ let_it_be(:group_invited_to_project) do
+ create(:group).tap { |g| create(:project_group_link, project: project, group: g) }
+ end
+
+ let_it_be(:member_from_project_share) { create(:user).tap { |u| group_invited_to_project.add_guest(u) } }
+
+ let_it_be(:group_invited_to_parent_group) do
+ create(:group).tap { |g| create(:group_group_link, shared_group: group, shared_with_group: g) }
+ end
+
+ let_it_be(:member_from_parent_group_share) { create(:user).tap { |u| group_invited_to_parent_group.add_guest(u) } }
+
+ let_it_be(:sibling_project) { create(:project, :repository, :public, group: group) }
+ let_it_be(:sibling_member) { create(:user).tap { |u| sibling_project.add_guest(u) } }
+
+ let(:params) { {} }
+ let(:query) do
+ graphql_query_for(
+ 'project',
+ { 'fullPath' => project.full_path },
+ query_graphql_field('autocompleteUsers', params, 'id')
+ )
+ end
+
+ let(:response_user_ids) { graphql_data.dig('project', 'autocompleteUsers').pluck('id') }
+
+ it 'returns members of the project' do
+ post_graphql(query, current_user: direct_member)
+
+ expected_user_ids = [
+ direct_member,
+ indirect_member,
+ member_from_project_share,
+ member_from_parent_group_share
+ ].map { |u| u.to_global_id.to_s }
+
+ expect(response_user_ids).to match_array(expected_user_ids)
+ end
+
+ context 'with search param' do
+ let(:params) { { search: indirect_member.username } }
+
+ it 'only returns users matching the search query' do
+ post_graphql(query, current_user: direct_member)
+
+ expect(response_user_ids).to contain_exactly(indirect_member.to_global_id.to_s)
+ end
+ end
+
+ context 'with merge request interaction' do
+ let(:merge_request) { create(:merge_request, source_project: project) }
+ let(:fields) do
+ <<~FIELDS
+ id
+ mergeRequestInteraction(id: "#{merge_request.to_global_id}") {
+ canMerge
+ }
+ FIELDS
+ end
+
+ let(:query) do
+ graphql_query_for(
+ 'project',
+ { 'fullPath' => project.full_path },
+ query_graphql_field('autocompleteUsers', params, fields)
+ )
+ end
+
+ it 'returns MR state related to the users' do
+ project.add_maintainer(direct_member)
+
+ post_graphql(query, current_user: direct_member)
+
+ expect(graphql_data.dig('project', 'autocompleteUsers')).to include(
+ a_hash_including(
+ 'id' => direct_member.to_global_id.to_s,
+ 'mergeRequestInteraction' => { 'canMerge' => true }
+ ),
+ a_hash_including(
+ 'id' => indirect_member.to_global_id.to_s,
+ 'mergeRequestInteraction' => { 'canMerge' => false }
+ )
+ )
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/project/merge_requests_spec.rb b/spec/requests/api/graphql/project/merge_requests_spec.rb
index e3c4396e7d8..05ed0ed8729 100644
--- a/spec/requests/api/graphql/project/merge_requests_spec.rb
+++ b/spec/requests/api/graphql/project/merge_requests_spec.rb
@@ -396,6 +396,28 @@ RSpec.describe 'getting merge request listings nested in a project', feature_cat
include_examples 'N+1 query check', skip_cached: false
end
+
+ context 'when requesting diffStats' do
+ let(:requested_fields) { ['diffStats { path }'] }
+
+ before do
+ create_list(:merge_request_diff, 2, merge_request: merge_request_a)
+ create_list(:merge_request_diff, 2, merge_request: merge_request_b)
+ create_list(:merge_request_diff, 2, merge_request: merge_request_c)
+ end
+
+ include_examples 'N+1 query check', skip_cached: false
+
+ context 'when each merge request diff has no head_commit_sha' do
+ before do
+ [merge_request_a, merge_request_b, merge_request_c].each do |mr|
+ mr.merge_request_diffs.update!(head_commit_sha: nil)
+ end
+ end
+
+ include_examples 'N+1 query check', skip_cached: false
+ end
+ end
end
describe 'performance' do
diff --git a/spec/requests/api/graphql/project/work_items_spec.rb b/spec/requests/api/graphql/project/work_items_spec.rb
index 478112b687a..4aba83dae92 100644
--- a/spec/requests/api/graphql/project/work_items_spec.rb
+++ b/spec/requests/api/graphql/project/work_items_spec.rb
@@ -361,6 +361,59 @@ RSpec.describe 'getting a work item list for a project', feature_category: :team
end
end
+ context 'when fetching work item linked items widget' do
+ let_it_be(:related_items) { create_list(:work_item, 3, project: project, milestone: milestone1) }
+
+ let(:fields) do
+ <<~GRAPHQL
+ nodes {
+ widgets {
+ type
+ ... on WorkItemWidgetLinkedItems {
+ linkedItems {
+ nodes {
+ linkId
+ linkType
+ linkCreatedAt
+ linkUpdatedAt
+ workItem {
+ id
+ widgets {
+ ... on WorkItemWidgetMilestone {
+ milestone {
+ id
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ GRAPHQL
+ end
+
+ before do
+ create(:work_item_link, source: item1, target: related_items[0], link_type: 'relates_to')
+ end
+
+ it 'executes limited number of N+1 queries', :use_sql_query_cache do
+ control = ActiveRecord::QueryRecorder.new(skip_cached: false) do
+ post_graphql(query, current_user: current_user)
+ end
+
+ create(:work_item_link, source: item1, target: related_items[1], link_type: 'relates_to')
+ create(:work_item_link, source: item1, target: related_items[2], link_type: 'relates_to')
+
+ expect_graphql_errors_to_be_empty
+ # TODO: Fix N+1 queries executed for the linked work item widgets
+ # https://gitlab.com/gitlab-org/gitlab/-/issues/420605
+ expect { post_graphql(query, current_user: current_user) }
+ .not_to exceed_all_query_limit(control).with_threshold(11)
+ end
+ end
+
def item_ids
graphql_dig_at(items_data, :node, :id)
end
diff --git a/spec/requests/api/graphql/project_query_spec.rb b/spec/requests/api/graphql/project_query_spec.rb
index 54f141d9401..783e96861b1 100644
--- a/spec/requests/api/graphql/project_query_spec.rb
+++ b/spec/requests/api/graphql/project_query_spec.rb
@@ -8,6 +8,7 @@ RSpec.describe 'getting project information', feature_category: :groups_and_proj
let_it_be(:group) { create(:group) }
let_it_be(:project, reload: true) { create(:project, :repository, group: group) }
let_it_be(:current_user) { create(:user) }
+ let_it_be(:other_user) { create(:user) }
let(:project_fields) { all_graphql_fields_for('project'.to_s.classify, max_depth: 1) }
@@ -23,7 +24,60 @@ RSpec.describe 'getting project information', feature_category: :groups_and_proj
it 'includes the project', :use_clean_rails_memory_store_caching, :request_store do
post_graphql(query, current_user: current_user)
- expect(graphql_data['project']).not_to be_nil
+ expect(graphql_data['project']).to include('id' => global_id_of(project).to_s)
+ end
+
+ context 'when querying for pipeline triggers' do
+ let(:project_fields) { query_nodes(:pipeline_triggers) }
+ let(:pipeline_trigger) { project.triggers.first }
+
+ before do
+ create(:ci_trigger, project: project, owner: current_user)
+ end
+
+ it 'fetches the pipeline trigger tokens' do
+ post_graphql(query, current_user: current_user)
+
+ expect(graphql_data_at(:project, :pipeline_triggers, :nodes).first).to match({
+ 'id' => pipeline_trigger.to_global_id.to_s,
+ 'canAccessProject' => true,
+ 'description' => pipeline_trigger.description,
+ 'hasTokenExposed' => true,
+ 'lastUsed' => nil,
+ 'token' => pipeline_trigger.token
+ })
+ end
+
+ it 'does not produce N+1 queries' do
+ baseline = ActiveRecord::QueryRecorder.new { post_graphql(query, current_user: current_user) }
+
+ build_list(:ci_trigger, 2, owner: current_user, project: project)
+
+ expect { post_graphql(query, current_user: current_user) }.not_to exceed_query_limit(baseline)
+ end
+
+ context 'when other project member is not authorized to see the full token' do
+ before do
+ project.add_maintainer(other_user)
+ post_graphql(query, current_user: other_user)
+ end
+
+ it 'shows truncated token' do
+ expect(graphql_data_at(:project, :pipeline_triggers,
+ :nodes).first['token']).to eql pipeline_trigger.token[0, 4]
+ end
+ end
+
+ context 'when user is not a member of a public project' do
+ before do
+ project.update!(visibility_level: Gitlab::VisibilityLevel::PUBLIC)
+ post_graphql(query, current_user: other_user)
+ end
+
+ it 'cannot read the token' do
+ expect(graphql_data_at(:project, :pipeline_triggers, :nodes)).to eql([])
+ end
+ end
end
end
@@ -35,10 +89,10 @@ RSpec.describe 'getting project information', feature_category: :groups_and_proj
it 'includes the project' do
post_graphql(query, current_user: current_user)
- expect(graphql_data['project']).not_to be_nil
+ expect(graphql_data['project']).to include('id' => global_id_of(project).to_s)
end
- it_behaves_like 'a working graphql query' do
+ it_behaves_like 'a working graphql query that returns data' do
before do
post_graphql(query, current_user: current_user)
end
@@ -239,13 +293,7 @@ RSpec.describe 'getting project information', feature_category: :groups_and_proj
end
context 'when the user does not have access to the project' do
- it 'returns an empty field' do
- post_graphql(query, current_user: current_user)
-
- expect(graphql_data['project']).to be_nil
- end
-
- it_behaves_like 'a working graphql query' do
+ it_behaves_like 'a working graphql query that returns no data' do
before do
post_graphql(query, current_user: current_user)
end
diff --git a/spec/requests/api/graphql/user/user_achievements_query_spec.rb b/spec/requests/api/graphql/user/user_achievements_query_spec.rb
index 27d32d07372..2e6c3dcba61 100644
--- a/spec/requests/api/graphql/user/user_achievements_query_spec.rb
+++ b/spec/requests/api/graphql/user/user_achievements_query_spec.rb
@@ -13,6 +13,7 @@ RSpec.describe 'UserAchievements', feature_category: :user_profile do
let_it_be(:fields) do
<<~HEREDOC
userAchievements {
+ count
nodes {
id
achievement {
@@ -54,6 +55,10 @@ RSpec.describe 'UserAchievements', feature_category: :user_profile do
)
end
+ it 'returns the correct user_achievement count' do
+ expect(graphql_data_at(:user, :userAchievements, :count)).to be(1)
+ end
+
it 'can lookahead to eliminate N+1 queries', :use_clean_rails_memory_store_caching do
control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do
post_graphql(query, current_user: user)
diff --git a/spec/requests/api/graphql/work_item_spec.rb b/spec/requests/api/graphql/work_item_spec.rb
index 6702224f303..fa354bc1f66 100644
--- a/spec/requests/api/graphql/work_item_spec.rb
+++ b/spec/requests/api/graphql/work_item_spec.rb
@@ -539,6 +539,79 @@ RSpec.describe 'Query.work_item(id)', feature_category: :team_planning do
)
end
end
+
+ describe 'linked items widget' do
+ let_it_be(:related_item1) { create(:work_item, project: project) }
+ let_it_be(:related_item2) { create(:work_item, project: project) }
+ let_it_be(:related_item3) { create(:work_item) }
+ let_it_be(:link1) { create(:work_item_link, source: work_item, target: related_item1, link_type: 'relates_to') }
+ let_it_be(:link2) { create(:work_item_link, source: work_item, target: related_item2, link_type: 'relates_to') }
+ let_it_be(:link3) { create(:work_item_link, source: work_item, target: related_item3, link_type: 'relates_to') }
+
+ let(:work_item_fields) do
+ <<~GRAPHQL
+ id
+ widgets {
+ type
+ ... on WorkItemWidgetLinkedItems {
+ linkedItems {
+ nodes {
+ linkId
+ linkType
+ linkCreatedAt
+ linkUpdatedAt
+ workItem {
+ id
+ }
+ }
+ }
+ }
+ }
+ GRAPHQL
+ end
+
+ it 'returns widget information' do
+ expect(work_item_data).to include(
+ 'widgets' => include(
+ hash_including(
+ 'type' => 'LINKED_ITEMS',
+ 'linkedItems' => { 'nodes' => match_array(
+ [
+ hash_including(
+ 'linkId' => link1.to_gid.to_s, 'linkType' => 'relates_to',
+ 'linkCreatedAt' => link1.created_at.iso8601, 'linkUpdatedAt' => link1.updated_at.iso8601,
+ 'workItem' => { 'id' => related_item1.to_gid.to_s }
+ ),
+ hash_including(
+ 'linkId' => link2.to_gid.to_s, 'linkType' => 'relates_to',
+ 'linkCreatedAt' => link2.created_at.iso8601, 'linkUpdatedAt' => link2.updated_at.iso8601,
+ 'workItem' => { 'id' => related_item2.to_gid.to_s }
+ )
+ ]
+ ) }
+ )
+ )
+ )
+ end
+
+ context 'when `linked_work_items` feature flag is disabled' do
+ before do
+ stub_feature_flags(linked_work_items: false)
+ post_graphql(query, current_user: current_user)
+ end
+
+ it 'returns empty result' do
+ expect(work_item_data).to include(
+ 'widgets' => include(
+ hash_including(
+ 'type' => 'LINKED_ITEMS',
+ 'linkedItems' => { "nodes" => [] }
+ )
+ )
+ )
+ end
+ end
+ end
end
describe 'notes widget' do
diff --git a/spec/requests/api/internal/base_spec.rb b/spec/requests/api/internal/base_spec.rb
index 12b7b8d7054..fa35e367420 100644
--- a/spec/requests/api/internal/base_spec.rb
+++ b/spec/requests/api/internal/base_spec.rb
@@ -1012,6 +1012,21 @@ RSpec.describe API::Internal::Base, feature_category: :system_access do
end
end
+ context 'when result is not ::Gitlab::GitAccessResult::Success or ::Gitlab::GitAccessResult::CustomAction' do
+ it 'responds with 500' do
+ personal_project = create(:project, namespace: user.namespace)
+
+ allow_next_instance_of(Gitlab::GitAccess) do |access|
+ allow(access).to receive(:check).and_return(nil)
+ end
+ push(key, personal_project)
+
+ expect(response).to have_gitlab_http_status(:internal_server_error)
+ expect(json_response['status']).to be_falsey
+ expect(json_response['message']).to eq(::API::Helpers::InternalHelpers::UNKNOWN_CHECK_RESULT_ERROR)
+ end
+ end
+
context "archived project" do
before do
project.add_developer(user)
diff --git a/spec/requests/api/internal/kubernetes_spec.rb b/spec/requests/api/internal/kubernetes_spec.rb
index 09170ca952f..ec30840dfd8 100644
--- a/spec/requests/api/internal/kubernetes_spec.rb
+++ b/spec/requests/api/internal/kubernetes_spec.rb
@@ -4,7 +4,11 @@ require 'spec_helper'
RSpec.describe API::Internal::Kubernetes, feature_category: :deployment_management do
let(:jwt_auth_headers) do
- jwt_token = JWT.encode({ 'iss' => Gitlab::Kas::JWT_ISSUER }, Gitlab::Kas.secret, 'HS256')
+ jwt_token = JWT.encode(
+ { 'iss' => Gitlab::Kas::JWT_ISSUER, 'aud' => Gitlab::Kas::JWT_AUDIENCE },
+ Gitlab::Kas.secret,
+ 'HS256'
+ )
{ Gitlab::Kas::INTERNAL_API_REQUEST_HEADER => jwt_token }
end
diff --git a/spec/requests/api/internal/pages_spec.rb b/spec/requests/api/internal/pages_spec.rb
index 1006319eabf..65aa2326af5 100644
--- a/spec/requests/api/internal/pages_spec.rb
+++ b/spec/requests/api/internal/pages_spec.rb
@@ -151,20 +151,6 @@ RSpec.describe API::Internal::Pages, feature_category: :pages do
project.mark_pages_as_deployed
end
- context 'when the feature flag is disabled' do
- before do
- stub_feature_flags(pages_unique_domain: false)
- end
-
- context 'when there are no pages deployed for the related project' do
- it 'responds with 204 No Content' do
- get api('/internal/pages'), headers: auth_header, params: { host: 'unique-domain.example.com' }
-
- expect(response).to have_gitlab_http_status(:no_content)
- end
- end
- end
-
context 'when the unique domain is disabled' do
before do
project.project_setting.update!(pages_unique_domain_enabled: false)
diff --git a/spec/requests/api/labels_spec.rb b/spec/requests/api/labels_spec.rb
index b5d7d564749..a9bc38ae77c 100644
--- a/spec/requests/api/labels_spec.rb
+++ b/spec/requests/api/labels_spec.rb
@@ -484,6 +484,18 @@ RSpec.describe API::Labels, feature_category: :team_planning do
let(:params) { { name: valid_label_title_1 } }
end
+ context 'when lock_on_merge' do
+ let(:label_locked) { create(:label, title: 'Locked label', project: project, lock_on_merge: true) }
+
+ it 'returns 400 because label could not be deleted' do
+ delete api("/projects/#{project.id}/labels", user), params: { label_id: label_locked.id }
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response['message']).to eq('Label is locked and was not removed')
+ expect(project.labels).to include(label_locked)
+ end
+ end
+
context 'with group label' do
let_it_be(:group) { create(:group) }
let_it_be(:group_label) { create(:group_label, title: valid_group_label_title_1, group: group) }
diff --git a/spec/requests/api/merge_requests_spec.rb b/spec/requests/api/merge_requests_spec.rb
index f4cac0854e7..4edcd66e91a 100644
--- a/spec/requests/api/merge_requests_spec.rb
+++ b/spec/requests/api/merge_requests_spec.rb
@@ -2,15 +2,14 @@
require "spec_helper"
-RSpec.describe API::MergeRequests, :aggregate_failures, feature_category: :source_code_management,
- quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/418757' do
+RSpec.describe API::MergeRequests, :aggregate_failures, feature_category: :source_code_management do
include ProjectForksHelper
let_it_be(:base_time) { Time.now }
let_it_be(:user) { create(:user) }
let_it_be(:user2) { create(:user) }
let_it_be(:admin) { create(:user, :admin) }
- let_it_be(:project) { create(:project, :public, :repository, creator: user, namespace: user.namespace, only_allow_merge_if_pipeline_succeeds: false) }
+ let_it_be_with_refind(:project) { create(:project, :public, :repository, creator: user, namespace: user.namespace, only_allow_merge_if_pipeline_succeeds: false) }
let(:milestone1) { create(:milestone, title: '0.9', project: project) }
let(:milestone) { create(:milestone, title: '1.0.0', project: project) }
@@ -167,42 +166,6 @@ RSpec.describe API::MergeRequests, :aggregate_failures, feature_category: :sourc
expect(merge_request.reload.merge_status).to eq('unchecked')
end
end
-
- context 'when restrict_merge_status_recheck FF is disabled' do
- before do
- stub_feature_flags(restrict_merge_status_recheck: false)
- end
-
- context 'with batched_api_mergeability_checks FF on' do
- it 'checks mergeability asynchronously in batch', :sidekiq_inline do
- get(api(endpoint_path, user2), params: { with_merge_status_recheck: true })
-
- expect_successful_response_with_paginated_array
-
- expect(merge_request.reload.merge_status).to eq('can_be_merged')
- end
- end
-
- context 'with batched_api_mergeability_checks FF off' do
- before do
- stub_feature_flags(batched_api_mergeability_checks: false)
- end
-
- context 'with merge status recheck projection' do
- it 'does enqueue a merge status recheck' do
- expect_next_instances_of(check_service_class, (1..2)) do |service|
- expect(service).not_to receive(:execute)
- expect(service).to receive(:async_execute).and_call_original
- end
-
- get(api(endpoint_path, user2), params: { with_merge_status_recheck: true })
-
- expect_successful_response_with_paginated_array
- expect(mr_entity['merge_status']).to eq('checking')
- end
- end
- end
- end
end
end
diff --git a/spec/requests/api/metrics/dashboard/annotations_spec.rb b/spec/requests/api/metrics/dashboard/annotations_spec.rb
index 250fe2a3ee3..6000fc2a6b7 100644
--- a/spec/requests/api/metrics/dashboard/annotations_spec.rb
+++ b/spec/requests/api/metrics/dashboard/annotations_spec.rb
@@ -21,77 +21,6 @@ RSpec.describe API::Metrics::Dashboard::Annotations, feature_category: :metrics
end
context "with :source_type == #{source_type.pluralize}" do
- context 'with correct permissions' do
- context 'with valid parameters' do
- it 'creates a new annotation', :aggregate_failures do
- post api(url, user), params: params
-
- expect(response).to have_gitlab_http_status(:created)
- expect(json_response["#{source_type}_id"]).to eq(source.id)
- expect(json_response['starting_at'].to_time).to eq(starting_at.to_time)
- expect(json_response['ending_at'].to_time).to eq(ending_at.to_time)
- expect(json_response['description']).to eq(params[:description])
- expect(json_response['dashboard_path']).to eq(dashboard)
- end
- end
-
- context 'with invalid parameters' do
- it 'returns error message' do
- post api(url, user), params: { dashboard_path: '', starting_at: nil, description: nil }
-
- expect(response).to have_gitlab_http_status(:bad_request)
- expect(json_response['message']).to include({ "starting_at" => ["can't be blank"], "description" => ["can't be blank"], "dashboard_path" => ["can't be blank"] })
- end
- end
-
- context 'with undeclared params' do
- before do
- params[:undeclared_param] = 'xyz'
- end
-
- it 'filters out undeclared params' do
- expect(::Metrics::Dashboard::Annotations::CreateService).to receive(:new).with(user, hash_excluding(:undeclared_param))
-
- post api(url, user), params: params
- end
- end
-
- context 'with special characers in dashboard_path in request body' do
- let(:dashboard_escaped) { 'config/prometheus/common_metrics%26copy.yml' }
- let(:dashboard_unescaped) { 'config/prometheus/common_metrics&copy.yml' }
-
- shared_examples 'special characters unescaped' do
- let(:expected_params) do
- {
- 'starting_at' => starting_at.to_time,
- 'ending_at' => ending_at.to_time,
- source_type.to_s => source,
- 'dashboard_path' => dashboard_unescaped,
- 'description' => params[:description]
- }
- end
-
- it 'unescapes the dashboard_path', :aggregate_failures do
- expect(::Metrics::Dashboard::Annotations::CreateService).to receive(:new).with(user, expected_params)
-
- post api(url, user), params: params
- end
- end
-
- context 'with escaped characters' do
- it_behaves_like 'special characters unescaped' do
- let(:dashboard) { dashboard_escaped }
- end
- end
-
- context 'with unescaped characers' do
- it_behaves_like 'special characters unescaped' do
- let(:dashboard) { dashboard_unescaped }
- end
- end
- end
- end
-
context 'without correct permissions' do
let_it_be(:guest) { create(:user) }
@@ -102,7 +31,7 @@ RSpec.describe API::Metrics::Dashboard::Annotations, feature_category: :metrics
it 'returns error message' do
post api(url, guest), params: params
- expect(response).to have_gitlab_http_status(:forbidden)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
diff --git a/spec/requests/api/metrics/user_starred_dashboards_spec.rb b/spec/requests/api/metrics/user_starred_dashboards_spec.rb
index 6fc98de0777..bdeba777350 100644
--- a/spec/requests/api/metrics/user_starred_dashboards_spec.rb
+++ b/spec/requests/api/metrics/user_starred_dashboards_spec.rb
@@ -15,54 +15,13 @@ RSpec.describe API::Metrics::UserStarredDashboards, feature_category: :metrics d
}
end
- before do
- stub_feature_flags(remove_monitor_metrics: false)
- end
-
describe 'POST /projects/:id/metrics/user_starred_dashboards' do
before do
project.add_reporter(user)
end
context 'with correct permissions' do
- context 'with valid parameters' do
- context 'dashboard_path as url param url escaped' do
- it 'creates a new user starred metrics dashboard', :aggregate_failures do
- post api(url, user), params: params
-
- expect(response).to have_gitlab_http_status(:created)
- expect(json_response['project_id']).to eq(project.id)
- expect(json_response['user_id']).to eq(user.id)
- expect(json_response['dashboard_path']).to eq(dashboard)
- end
- end
-
- context 'dashboard_path in request body unescaped' do
- let(:params) do
- {
- dashboard_path: dashboard
- }
- end
-
- it 'creates a new user starred metrics dashboard', :aggregate_failures do
- post api(url, user), params: params
-
- expect(response).to have_gitlab_http_status(:created)
- expect(json_response['project_id']).to eq(project.id)
- expect(json_response['user_id']).to eq(user.id)
- expect(json_response['dashboard_path']).to eq(dashboard)
- end
- end
- end
-
context 'with invalid parameters' do
- it 'returns error message' do
- post api(url, user), params: { dashboard_path: '' }
-
- expect(response).to have_gitlab_http_status(:bad_request)
- expect(json_response['error']).to eq('dashboard_path is empty')
- end
-
context 'user is missing' do
it 'returns 404 not found' do
post api(url, nil), params: params
@@ -90,10 +49,6 @@ RSpec.describe API::Metrics::UserStarredDashboards, feature_category: :metrics d
end
context 'when metrics dashboard feature is unavailable' do
- before do
- stub_feature_flags(remove_monitor_metrics: true)
- end
-
it 'returns 404 not found' do
post api(url, user), params: params
@@ -113,44 +68,6 @@ RSpec.describe API::Metrics::UserStarredDashboards, feature_category: :metrics d
end
context 'with correct permissions' do
- context 'with valid parameters' do
- context 'dashboard_path as url param url escaped' do
- it 'deletes given user starred metrics dashboard', :aggregate_failures do
- delete api(url, user), params: params
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response['deleted_rows']).to eq(1)
- expect(::Metrics::UsersStarredDashboard.all.pluck(:dashboard_path)).not_to include(dashboard)
- end
- end
-
- context 'dashboard_path in request body unescaped' do
- let(:params) do
- {
- dashboard_path: dashboard
- }
- end
-
- it 'deletes given user starred metrics dashboard', :aggregate_failures do
- delete api(url, user), params: params
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response['deleted_rows']).to eq(1)
- expect(::Metrics::UsersStarredDashboard.all.pluck(:dashboard_path)).not_to include(dashboard)
- end
- end
-
- context 'dashboard_path has not been specified' do
- it 'deletes all starred dashboards for that user within given project', :aggregate_failures do
- delete api(url, user), params: {}
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response['deleted_rows']).to eq(2)
- expect(::Metrics::UsersStarredDashboard.all).to contain_exactly(other_user_starred_dashboard, other_project_starred_dashboard)
- end
- end
- end
-
context 'with invalid parameters' do
context 'user is missing' do
it 'returns 404 not found' do
@@ -179,10 +96,6 @@ RSpec.describe API::Metrics::UserStarredDashboards, feature_category: :metrics d
end
context 'when metrics dashboard feature is unavailable' do
- before do
- stub_feature_flags(remove_monitor_metrics: true)
- end
-
it 'returns 404 not found' do
delete api(url, user), params: params
diff --git a/spec/requests/api/ml/mlflow/runs_spec.rb b/spec/requests/api/ml/mlflow/runs_spec.rb
index a85fe4d867a..45479666e9a 100644
--- a/spec/requests/api/ml/mlflow/runs_spec.rb
+++ b/spec/requests/api/ml/mlflow/runs_spec.rb
@@ -39,6 +39,11 @@ RSpec.describe API::Ml::Mlflow::Runs, feature_category: :mlops do
response
end
+ before do
+ allow(Gitlab::Application.routes).to receive(:default_url_options)
+ .and_return(protocol: 'http', host: 'www.example.com', script_name: '')
+ end
+
RSpec.shared_examples 'MLflow|run_id param error cases' do
context 'when run id is not passed' do
let(:params) { {} }
@@ -162,6 +167,17 @@ RSpec.describe API::Ml::Mlflow::Runs, feature_category: :mlops do
})
end
+ context 'with a relative root URL' do
+ before do
+ allow(Gitlab::Application.routes).to receive(:default_url_options)
+ .and_return(protocol: 'http', host: 'www.example.com', script_name: '/gitlab/root')
+ end
+
+ it 'gets a run including a valid artifact_uri' do
+ expect(json_response['run']['info']['artifact_uri']).to eql("http://www.example.com/gitlab/root/api/v4/projects/#{project_id}/packages/generic/ml_experiment_#{experiment.iid}/#{candidate.iid}/")
+ end
+ end
+
describe 'Error States' do
it_behaves_like 'MLflow|run_id param error cases'
it_behaves_like 'MLflow|shared error cases'
diff --git a/spec/requests/api/npm_group_packages_spec.rb b/spec/requests/api/npm_group_packages_spec.rb
index 431c59cf1b8..fe0bf1d8b46 100644
--- a/spec/requests/api/npm_group_packages_spec.rb
+++ b/spec/requests/api/npm_group_packages_spec.rb
@@ -2,8 +2,7 @@
require 'spec_helper'
-RSpec.describe API::NpmGroupPackages, feature_category: :package_registry,
- quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/418757' do
+RSpec.describe API::NpmGroupPackages, feature_category: :package_registry do
using RSpec::Parameterized::TableSyntax
include_context 'npm api setup'
diff --git a/spec/requests/api/npm_project_packages_spec.rb b/spec/requests/api/npm_project_packages_spec.rb
index 8c0b9572af3..340420e46e0 100644
--- a/spec/requests/api/npm_project_packages_spec.rb
+++ b/spec/requests/api/npm_project_packages_spec.rb
@@ -2,8 +2,7 @@
require 'spec_helper'
-RSpec.describe API::NpmProjectPackages, feature_category: :package_registry,
- quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/418757' do
+RSpec.describe API::NpmProjectPackages, feature_category: :package_registry do
include ExclusiveLeaseHelpers
include_context 'npm api setup'
@@ -72,6 +71,27 @@ RSpec.describe API::NpmProjectPackages, feature_category: :package_registry,
it_behaves_like 'enqueue a worker to sync a metadata cache'
end
end
+
+ context 'when user is not authorized after exception was raised' do
+ let(:exception) { Rack::Timeout::RequestTimeoutException.new('Request ran for longer than 60000ms') }
+
+ subject { get(url) }
+
+ before do
+ project.add_developer(user)
+ end
+
+ it 'correctly reports an exception', :aggregate_failures do
+ allow_next_instance_of(Packages::Npm::GenerateMetadataService) do |instance|
+ allow(instance).to receive(:execute).and_raise(exception)
+ end
+
+ allow(Gitlab::Auth::UniqueIpsLimiter).to receive(:limit_user!)
+ .and_invoke(-> { nil }, -> { raise Gitlab::Auth::UnauthorizedError })
+
+ subject
+ end
+ end
end
describe 'GET /api/v4/projects/:id/packages/npm/-/package/*package_name/dist-tags' do
diff --git a/spec/requests/api/nuget_group_packages_spec.rb b/spec/requests/api/nuget_group_packages_spec.rb
index 07199119cb5..92eb869b871 100644
--- a/spec/requests/api/nuget_group_packages_spec.rb
+++ b/spec/requests/api/nuget_group_packages_spec.rb
@@ -31,6 +31,12 @@ RSpec.describe API::NugetGroupPackages, feature_category: :package_registry do
end
end
+ describe 'GET /api/v4/groups/:id/-/packages/nuget/v2' do
+ it_behaves_like 'handling nuget service requests', v2: true do
+ let(:url) { "/groups/#{target.id}/-/packages/nuget/v2" }
+ end
+ end
+
describe 'GET /api/v4/groups/:id/-/packages/nuget/metadata/*package_name/index' do
it_behaves_like 'handling nuget metadata requests with package name',
example_names_with_status:
diff --git a/spec/requests/api/nuget_project_packages_spec.rb b/spec/requests/api/nuget_project_packages_spec.rb
index 887dfd4beeb..da74409cd77 100644
--- a/spec/requests/api/nuget_project_packages_spec.rb
+++ b/spec/requests/api/nuget_project_packages_spec.rb
@@ -42,6 +42,52 @@ RSpec.describe API::NugetProjectPackages, feature_category: :package_registry do
it_behaves_like 'accept get request on private project with access to package registry for everyone'
end
+ describe 'GET /api/v4/projects/:id/packages/nuget/v2' do
+ let(:url) { "/projects/#{target.id}/packages/nuget/v2" }
+
+ it_behaves_like 'handling nuget service requests', v2: true
+
+ it_behaves_like 'accept get request on private project with access to package registry for everyone'
+ end
+
+ describe 'GET /api/v4/projects/:id/packages/nuget/v2/$metadata' do
+ let(:url) { "/projects/#{target.id}/packages/nuget/v2/$metadata" }
+
+ subject(:api_request) { get api(url) }
+
+ it { is_expected.to have_request_urgency(:low) }
+
+ context 'with valid target' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:visibility_level, :user_role, :member, :expected_status) do
+ 'PUBLIC' | :developer | true | :success
+ 'PUBLIC' | :guest | true | :success
+ 'PUBLIC' | :developer | false | :success
+ 'PUBLIC' | :guest | false | :success
+ 'PUBLIC' | :anonymous | false | :success
+ 'PRIVATE' | :developer | true | :success
+ 'PRIVATE' | :guest | true | :success
+ 'PRIVATE' | :developer | false | :success
+ 'PRIVATE' | :guest | false | :success
+ 'PRIVATE' | :anonymous | false | :success
+ 'INTERNAL' | :developer | true | :success
+ 'INTERNAL' | :guest | true | :success
+ 'INTERNAL' | :developer | false | :success
+ 'INTERNAL' | :guest | false | :success
+ 'INTERNAL' | :anonymous | false | :success
+ end
+
+ with_them do
+ before do
+ update_visibility_to(Gitlab::VisibilityLevel.const_get(visibility_level, false))
+ end
+
+ it_behaves_like 'process nuget v2 $metadata service request', params[:user_role], params[:expected_status], params[:member]
+ end
+ end
+ end
+
describe 'GET /api/v4/projects/:id/packages/nuget/metadata/*package_name/index' do
let(:url) { "/projects/#{target.id}/packages/nuget/metadata/#{package_name}/index.json" }
@@ -125,7 +171,7 @@ RSpec.describe API::NugetProjectPackages, feature_category: :package_registry do
end
describe 'GET /api/v4/projects/:id/packages/nuget/download/*package_name/*package_version/*package_filename' do
- let_it_be(:package) { create(:nuget_package, :with_symbol_package, project: project, name: package_name) }
+ let_it_be(:package) { create(:nuget_package, :with_symbol_package, :with_metadatum, project: project, name: package_name, version: '0.1') }
let(:format) { 'nupkg' }
let(:url) { "/projects/#{target.id}/packages/nuget/download/#{package.name}/#{package.version}/#{package.name}.#{package.version}.#{format}" }
@@ -183,75 +229,39 @@ RSpec.describe API::NugetProjectPackages, feature_category: :package_registry do
end
describe 'PUT /api/v4/projects/:id/packages/nuget/authorize' do
- include_context 'workhorse headers'
-
- let(:url) { "/projects/#{target.id}/packages/nuget/authorize" }
- let(:headers) { {} }
-
- subject { put api(url), headers: headers }
-
- it_behaves_like 'nuget authorize upload endpoint'
+ it_behaves_like 'nuget authorize upload endpoint' do
+ let(:url) { "/projects/#{target.id}/packages/nuget/authorize" }
+ end
end
describe 'PUT /api/v4/projects/:id/packages/nuget' do
- include_context 'workhorse headers'
-
- let_it_be(:file_name) { 'package.nupkg' }
-
- let(:url) { "/projects/#{target.id}/packages/nuget" }
- let(:headers) { {} }
- let(:params) { { package: temp_file(file_name) } }
- let(:file_key) { :package }
- let(:send_rewritten_field) { true }
-
- subject do
- workhorse_finalize(
- api(url),
- method: :put,
- file_key: file_key,
- params: params,
- headers: headers,
- send_rewritten_field: send_rewritten_field
- )
+ it_behaves_like 'nuget upload endpoint' do
+ let(:url) { "/projects/#{target.id}/packages/nuget" }
end
-
- it_behaves_like 'nuget upload endpoint'
end
describe 'PUT /api/v4/projects/:id/packages/nuget/symbolpackage/authorize' do
- include_context 'workhorse headers'
-
- let(:url) { "/projects/#{target.id}/packages/nuget/symbolpackage/authorize" }
- let(:headers) { {} }
-
- subject { put api(url), headers: headers }
-
- it_behaves_like 'nuget authorize upload endpoint'
+ it_behaves_like 'nuget authorize upload endpoint' do
+ let(:url) { "/projects/#{target.id}/packages/nuget/symbolpackage/authorize" }
+ end
end
describe 'PUT /api/v4/projects/:id/packages/nuget/symbolpackage' do
- include_context 'workhorse headers'
-
- let_it_be(:file_name) { 'package.snupkg' }
-
- let(:url) { "/projects/#{target.id}/packages/nuget/symbolpackage" }
- let(:headers) { {} }
- let(:params) { { package: temp_file(file_name) } }
- let(:file_key) { :package }
- let(:send_rewritten_field) { true }
-
- subject do
- workhorse_finalize(
- api(url),
- method: :put,
- file_key: file_key,
- params: params,
- headers: headers,
- send_rewritten_field: send_rewritten_field
- )
+ it_behaves_like 'nuget upload endpoint', symbol_package: true do
+ let(:url) { "/projects/#{target.id}/packages/nuget/symbolpackage" }
+ end
+ end
+
+ describe 'PUT /api/v4/projects/:id/packages/nuget/v2/authorize' do
+ it_behaves_like 'nuget authorize upload endpoint' do
+ let(:url) { "/projects/#{target.id}/packages/nuget/v2/authorize" }
end
+ end
- it_behaves_like 'nuget upload endpoint', symbol_package: true
+ describe 'PUT /api/v4/projects/:id/packages/nuget/v2' do
+ it_behaves_like 'nuget upload endpoint' do
+ let(:url) { "/projects/#{target.id}/packages/nuget/v2" }
+ end
end
def update_visibility_to(visibility)
diff --git a/spec/requests/api/project_attributes.yml b/spec/requests/api/project_attributes.yml
index 86ff739da7e..aa8568d4951 100644
--- a/spec/requests/api/project_attributes.yml
+++ b/spec/requests/api/project_attributes.yml
@@ -60,6 +60,7 @@ itself: # project
- container_registry_image_prefix
- default_branch
- empty_repo
+ - emails_disabled
- forks_count
- http_url_to_repo
- import_status
@@ -98,6 +99,7 @@ ci_cd_settings:
remapped_attributes:
default_git_depth: ci_default_git_depth
forward_deployment_enabled: ci_forward_deployment_enabled
+ forward_deployment_rollback_allowed: ci_forward_deployment_rollback_allowed
job_token_scope_enabled: ci_job_token_scope_enabled
separated_caches: ci_separated_caches
allow_fork_pipelines_to_run_in_parent_project: ci_allow_fork_pipelines_to_run_in_parent_project
@@ -163,7 +165,6 @@ project_setting:
- jitsu_key
- mirror_branch_regex
- allow_pipeline_trigger_approve_deployment
- - emails_enabled
- pages_unique_domain_enabled
- pages_unique_domain
- runner_registration_enabled
diff --git a/spec/requests/api/projects_spec.rb b/spec/requests/api/projects_spec.rb
index f5d1bbbc7e8..26132215404 100644
--- a/spec/requests/api/projects_spec.rb
+++ b/spec/requests/api/projects_spec.rb
@@ -2583,7 +2583,6 @@ RSpec.describe API::Projects, :aggregate_failures, feature_category: :groups_and
link = create(:project_group_link, project: project, group: group)
get api(path, admin, admin_mode: true)
-
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['id']).to eq(project.id)
expect(json_response['description']).to eq(project.description)
@@ -2634,6 +2633,8 @@ RSpec.describe API::Projects, :aggregate_failures, feature_category: :groups_and
expect(json_response['feature_flags_access_level']).to be_present
expect(json_response['infrastructure_access_level']).to be_present
expect(json_response['monitor_access_level']).to be_present
+ expect(json_response).to have_key('emails_disabled')
+ expect(json_response).to have_key('emails_enabled')
end
it 'exposes all necessary attributes' do
@@ -2707,7 +2708,6 @@ RSpec.describe API::Projects, :aggregate_failures, feature_category: :groups_and
expect(json_response['feature_flags_access_level']).to be_present
expect(json_response['infrastructure_access_level']).to be_present
expect(json_response['monitor_access_level']).to be_present
- expect(json_response).to have_key('emails_disabled')
expect(json_response['resolve_outdated_diff_discussions']).to eq(project.resolve_outdated_diff_discussions)
expect(json_response['remove_source_branch_after_merge']).to be_truthy
expect(json_response['container_registry_enabled']).to be_present
@@ -2738,6 +2738,7 @@ RSpec.describe API::Projects, :aggregate_failures, feature_category: :groups_and
expect(json_response['only_allow_merge_if_all_discussions_are_resolved']).to eq(project.only_allow_merge_if_all_discussions_are_resolved)
expect(json_response['ci_default_git_depth']).to eq(project.ci_default_git_depth)
expect(json_response['ci_forward_deployment_enabled']).to eq(project.ci_forward_deployment_enabled)
+ expect(json_response['ci_forward_deployment_rollback_allowed']).to eq(project.ci_forward_deployment_rollback_allowed)
expect(json_response['ci_allow_fork_pipelines_to_run_in_parent_project']).to eq(project.ci_allow_fork_pipelines_to_run_in_parent_project)
expect(json_response['ci_separated_caches']).to eq(project.ci_separated_caches)
expect(json_response['merge_method']).to eq(project.merge_method.to_s)
@@ -2769,6 +2770,45 @@ RSpec.describe API::Projects, :aggregate_failures, feature_category: :groups_and
expect(json_response['name']).to eq(project.name)
end
+ context 'when a project is moved' do
+ let(:redirect_route) { 'new/project/location' }
+ let(:perform_request) { get api("/projects/#{CGI.escape(redirect_route)}", user), params: { license: true } }
+
+ before do
+ project.route.create_redirect(redirect_route)
+ end
+
+ it 'redirects to the new project location' do
+ perform_request
+
+ expect(response).to have_gitlab_http_status(:moved_permanently)
+
+ url = response.headers['Location']
+ expect(url).to start_with("#{request.base_url}/api/v4/projects/#{project.id}")
+ expect(CGI.parse(URI(url).query)).to include({ 'license' => ['true'] })
+ end
+
+ context 'when a user do not have access' do
+ let(:user) { create(:user) }
+
+ it 'returns a 404 error' do
+ perform_request
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ context 'when api_redirect_moved_projects is disabled' do
+ it 'returns a 404 error' do
+ stub_feature_flags(api_redirect_moved_projects: false)
+
+ perform_request
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+
it 'returns a 404 error if not found' do
get api("/projects/#{non_existing_record_id}", user)
expect(response).to have_gitlab_http_status(:not_found)
@@ -3081,6 +3121,7 @@ RSpec.describe API::Projects, :aggregate_failures, feature_category: :groups_and
expect(json_response).not_to include(
'ci_default_git_depth',
'ci_forward_deployment_enabled',
+ 'ci_forward_deployment_rollback_allowed',
'ci_job_token_scope_enabled',
'ci_separated_caches',
'ci_allow_fork_pipelines_to_run_in_parent_project',
@@ -3654,7 +3695,7 @@ RSpec.describe API::Projects, :aggregate_failures, feature_category: :groups_and
end.to change { project.members.count }.by(2)
expect(response).to have_gitlab_http_status(:created)
- expect(json_response['message']).to eq('Successfully imported')
+ expect(json_response['status']).to eq('success')
end
it 'returns 404 if the source project does not exist' do
@@ -3712,6 +3753,22 @@ RSpec.describe API::Projects, :aggregate_failures, feature_category: :groups_and
expect(response).to have_gitlab_http_status(:unprocessable_entity)
expect(json_response['message']).to eq('Import failed')
end
+
+ context 'when importing of members did not work for some or all members' do
+ it 'fails to import some members' do
+ project_bot = create(:user, :project_bot)
+ project2.add_developer(project_bot)
+
+ expect do
+ post api(path, user)
+ end.to change { project.members.count }.by(2)
+
+ expect(response).to have_gitlab_http_status(:created)
+ error_message = { project_bot.username => 'User project bots cannot be added to other groups / projects' }
+ expect(json_response['message']).to eq(error_message)
+ expect(json_response['total_members_count']).to eq(3)
+ end
+ end
end
describe 'PUT /projects/:id' do
@@ -3931,6 +3988,16 @@ RSpec.describe API::Projects, :aggregate_failures, feature_category: :groups_and
expect(json_response['emails_disabled']).to eq(true)
end
+ it 'updates emails_enabled?' do
+ project_param = { emails_enabled: false }
+
+ put api("/projects/#{project3.id}", user), params: project_param
+
+ expect(response).to have_gitlab_http_status(:ok)
+
+ expect(json_response['emails_enabled']).to eq(false)
+ end
+
it 'updates build_git_strategy' do
project_param = { build_git_strategy: 'clone' }
@@ -4150,6 +4217,7 @@ RSpec.describe API::Projects, :aggregate_failures, feature_category: :groups_and
merge_method: 'ff',
ci_default_git_depth: 20,
ci_forward_deployment_enabled: false,
+ ci_forward_deployment_rollback_allowed: false,
ci_allow_fork_pipelines_to_run_in_parent_project: false,
ci_separated_caches: false,
description: 'new description' }
@@ -4425,6 +4493,29 @@ RSpec.describe API::Projects, :aggregate_failures, feature_category: :groups_and
expect(response).to have_gitlab_http_status(:forbidden)
end
end
+
+ context 'when a project is moved' do
+ let_it_be(:redirect_route) { 'new/project/location' }
+ let_it_be(:path) { "/projects/#{CGI.escape(redirect_route)}/archive" }
+
+ before do
+ project.route.create_redirect(redirect_route)
+ end
+
+ it 'returns 405 error' do
+ post api(path, user)
+
+ expect(response).to have_gitlab_http_status(:method_not_allowed)
+ end
+
+ context 'when user do not have access to the project' do
+ it 'returns 404 error' do
+ post api(path, create(:user))
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
end
describe 'POST /projects/:id/unarchive' do
diff --git a/spec/requests/api/repositories_spec.rb b/spec/requests/api/repositories_spec.rb
index 8853eff0b3e..a94ed63bf47 100644
--- a/spec/requests/api/repositories_spec.rb
+++ b/spec/requests/api/repositories_spec.rb
@@ -37,6 +37,52 @@ RSpec.describe API::Repositories, feature_category: :source_code_management do
end
end
+ context 'when path does not exist' do
+ let(:path) { 'bogus' }
+
+ context 'when handle_structured_gitaly_errors feature is disabled' do
+ before do
+ stub_feature_flags(handle_structured_gitaly_errors: false)
+ end
+
+ it 'returns an empty array' do
+ get api("#{route}?path=#{path}", current_user)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to include_pagination_headers
+ expect(json_response).to be_an(Array)
+ expect(json_response).to be_an_empty
+ end
+ end
+
+ context 'when handle_structured_gitaly_errors feature is enabled' do
+ before do
+ stub_feature_flags(handle_structured_gitaly_errors: true)
+ end
+
+ it_behaves_like '404 response' do
+ let(:request) { get api("#{route}?path=#{path}", current_user) }
+ let(:message) { '404 invalid revision or path Not Found' }
+ end
+ end
+ end
+
+ context 'when path is empty directory ' do
+ context 'when handle_structured_gitaly_errors feature is disabled' do
+ before do
+ stub_feature_flags(handle_structured_gitaly_errors: false)
+ end
+
+ it 'returns an empty array' do
+ get api(route, current_user)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to include_pagination_headers
+ expect(json_response).to be_an(Array)
+ end
+ end
+ end
+
context 'when repository is disabled' do
include_context 'disabled repository'
diff --git a/spec/requests/api/settings_spec.rb b/spec/requests/api/settings_spec.rb
index dfaba969153..12af1fc1b79 100644
--- a/spec/requests/api/settings_spec.rb
+++ b/spec/requests/api/settings_spec.rb
@@ -59,6 +59,7 @@ RSpec.describe API::Settings, 'Settings', :do_not_mock_admin_mode_setting, featu
expect(json_response['group_runner_token_expiration_interval']).to be_nil
expect(json_response['project_runner_token_expiration_interval']).to be_nil
expect(json_response['max_export_size']).to eq(0)
+ expect(json_response['max_decompressed_archive_size']).to eq(25600)
expect(json_response['max_terraform_state_size_bytes']).to eq(0)
expect(json_response['pipeline_limit_per_project_user_sha']).to eq(0)
expect(json_response['delete_inactive_projects']).to be(false)
@@ -81,6 +82,8 @@ RSpec.describe API::Settings, 'Settings', :do_not_mock_admin_mode_setting, featu
expect(json_response['ci_max_includes']).to eq(150)
expect(json_response['allow_account_deletion']).to eq(true)
expect(json_response['gitlab_shell_operation_limit']).to eq(600)
+ expect(json_response['namespace_aggregation_schedule_lease_duration_in_seconds']).to eq(300)
+ expect(json_response['default_branch_protection_defaults']).to be_kind_of(Hash)
end
end
@@ -154,6 +157,7 @@ RSpec.describe API::Settings, 'Settings', :do_not_mock_admin_mode_setting, featu
diff_max_files: 2000,
diff_max_lines: 50000,
default_branch_protection: ::Gitlab::Access::PROTECTION_DEV_CAN_MERGE,
+ default_branch_protection_defaults: ::Gitlab::Access::BranchProtection.protected_against_developer_pushes.stringify_keys,
local_markdown_version: 3,
allow_local_requests_from_web_hooks_and_services: true,
allow_local_requests_from_system_hooks: false,
@@ -168,6 +172,7 @@ RSpec.describe API::Settings, 'Settings', :do_not_mock_admin_mode_setting, featu
mailgun_events_enabled: true,
mailgun_signing_key: 'MAILGUN_SIGNING_KEY',
max_export_size: 6,
+ max_decompressed_archive_size: 20000,
max_terraform_state_size_bytes: 1_000,
disabled_oauth_sign_in_sources: 'unknown',
import_sources: 'github,bitbucket',
@@ -186,6 +191,7 @@ RSpec.describe API::Settings, 'Settings', :do_not_mock_admin_mode_setting, featu
jira_connect_application_key: '123',
jira_connect_proxy_url: 'http://example.com',
bulk_import_enabled: false,
+ bulk_import_max_download_file_size: 1,
allow_runner_registration_token: true,
user_defaults_to_private_profile: true,
default_syntax_highlighting_theme: 2,
@@ -193,7 +199,9 @@ RSpec.describe API::Settings, 'Settings', :do_not_mock_admin_mode_setting, featu
silent_mode_enabled: true,
valid_runner_registrars: ['group'],
allow_account_deletion: false,
- gitlab_shell_operation_limit: 500
+ gitlab_shell_operation_limit: 500,
+ namespace_aggregation_schedule_lease_duration_in_seconds: 400,
+ max_import_remote_file_size: 2
}
expect(response).to have_gitlab_http_status(:ok)
@@ -230,6 +238,7 @@ RSpec.describe API::Settings, 'Settings', :do_not_mock_admin_mode_setting, featu
expect(json_response['diff_max_files']).to eq(2000)
expect(json_response['diff_max_lines']).to eq(50000)
expect(json_response['default_branch_protection']).to eq(Gitlab::Access::PROTECTION_DEV_CAN_MERGE)
+ expect(json_response['default_branch_protection_defaults']).to eq(::Gitlab::Access::BranchProtection.protected_against_developer_pushes.stringify_keys)
expect(json_response['local_markdown_version']).to eq(3)
expect(json_response['allow_local_requests_from_web_hooks_and_services']).to eq(true)
expect(json_response['allow_local_requests_from_system_hooks']).to eq(false)
@@ -244,6 +253,7 @@ RSpec.describe API::Settings, 'Settings', :do_not_mock_admin_mode_setting, featu
expect(json_response['mailgun_events_enabled']).to be(true)
expect(json_response['mailgun_signing_key']).to eq('MAILGUN_SIGNING_KEY')
expect(json_response['max_export_size']).to eq(6)
+ expect(json_response['max_decompressed_archive_size']).to eq(20000)
expect(json_response['max_terraform_state_size_bytes']).to eq(1_000)
expect(json_response['disabled_oauth_sign_in_sources']).to eq([])
expect(json_response['import_sources']).to match_array(%w(github bitbucket))
@@ -270,6 +280,9 @@ RSpec.describe API::Settings, 'Settings', :do_not_mock_admin_mode_setting, featu
expect(json_response['valid_runner_registrars']).to eq(['group'])
expect(json_response['allow_account_deletion']).to be(false)
expect(json_response['gitlab_shell_operation_limit']).to be(500)
+ expect(json_response['namespace_aggregation_schedule_lease_duration_in_seconds']).to be(400)
+ expect(json_response['max_import_remote_file_size']).to be(2)
+ expect(json_response['bulk_import_max_download_file_size']).to be(1)
end
end
diff --git a/spec/requests/api/snippets_spec.rb b/spec/requests/api/snippets_spec.rb
index 4ba2a768e01..0b97bb5c443 100644
--- a/spec/requests/api/snippets_spec.rb
+++ b/spec/requests/api/snippets_spec.rb
@@ -13,7 +13,8 @@ RSpec.describe API::Snippets, :aggregate_failures, factory_default: :keep, featu
let_it_be_with_refind(:private_snippet) { create(:personal_snippet, :repository, :private, author: user) }
let_it_be(:internal_snippet) { create(:personal_snippet, :repository, :internal, author: user) }
- let_it_be(:user_token) { create(:personal_access_token, user: user) }
+ let_it_be(:user_token) { create(:personal_access_token, user: user) }
+ let_it_be(:admin_token) { create(:personal_access_token, :admin_mode, user: admin, scopes: [:sudo, :api]) }
let_it_be(:other_user_token) { create(:personal_access_token, user: other_user) }
let_it_be(:project) do
create_default(:project, :public).tap do |p|
@@ -21,9 +22,17 @@ RSpec.describe API::Snippets, :aggregate_failures, factory_default: :keep, featu
end
end
- describe 'GET /snippets/' do
+ shared_examples "returns unauthorized when not authenticated" do
+ it 'returns 401 for non-authenticated' do
+ get api(path)
+
+ expect(response).to have_gitlab_http_status(:unauthorized)
+ end
+ end
+
+ shared_examples "returns filtered snippets for user" do
it 'returns snippets available for user' do
- get api("/snippets/", personal_access_token: user_token)
+ get api(path, personal_access_token: user_token)
expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
@@ -38,8 +47,32 @@ RSpec.describe API::Snippets, :aggregate_failures, factory_default: :keep, featu
expect(json_response.last).to have_key('visibility')
end
+ context 'filtering snippets by created_after/created_before' do
+ let_it_be(:private_snippet_before_time_range) { create(:personal_snippet, :repository, :private, author: user, created_at: Time.parse("2021-08-20T00:00:00Z")) }
+ let_it_be(:private_snippet_in_time_range1) { create(:personal_snippet, :repository, :private, author: user, created_at: Time.parse("2021-08-22T00:00:00Z")) }
+ let_it_be(:private_snippet_in_time_range2) { create(:personal_snippet, :repository, :private, author: user, created_at: Time.parse("2021-08-24T00:00:00Z")) }
+ let_it_be(:private_snippet_after_time_range) { create(:personal_snippet, :repository, :private, author: user, created_at: Time.parse("2021-08-26T00:00:00Z")) }
+
+ let(:path) { "/snippets?created_after=2021-08-21T00:00:00Z&created_before=2021-08-25T00:00:00Z" }
+
+ it 'returns snippets available for user in given time range' do
+ get api(path, personal_access_token: user_token)
+
+ expect(json_response.map { |snippet| snippet['id'] }).to contain_exactly(
+ private_snippet_in_time_range1.id,
+ private_snippet_in_time_range2.id)
+ end
+ end
+ end
+
+ describe 'GET /snippets/' do
+ let(:path) { "/snippets" }
+
+ it_behaves_like "returns unauthorized when not authenticated"
+ it_behaves_like "returns filtered snippets for user"
+
it 'hides private snippets from regular user' do
- get api("/snippets/", personal_access_token: other_user_token)
+ get api(path, personal_access_token: other_user_token)
expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
@@ -47,39 +80,16 @@ RSpec.describe API::Snippets, :aggregate_failures, factory_default: :keep, featu
expect(json_response.size).to eq(0)
end
- it 'returns 401 for non-authenticated' do
- get api("/snippets/")
-
- expect(response).to have_gitlab_http_status(:unauthorized)
- end
-
it 'does not return snippets related to a project with disable feature visibility' do
public_snippet = create(:project_snippet, :public, author: user, project: project)
project.project_feature.update_attribute(:snippets_access_level, 0)
- get api("/snippets/", personal_access_token: user_token)
+ get api(path, personal_access_token: user_token)
json_response.each do |snippet|
expect(snippet["id"]).not_to eq(public_snippet.id)
end
end
-
- context 'filtering snippets by created_after/created_before' do
- let_it_be(:private_snippet_before_time_range) { create(:personal_snippet, :repository, :private, author: user, created_at: Time.parse("2021-08-20T00:00:00Z")) }
- let_it_be(:private_snippet_in_time_range1) { create(:personal_snippet, :repository, :private, author: user, created_at: Time.parse("2021-08-22T00:00:00Z")) }
- let_it_be(:private_snippet_in_time_range2) { create(:personal_snippet, :repository, :private, author: user, created_at: Time.parse("2021-08-24T00:00:00Z")) }
- let_it_be(:private_snippet_after_time_range) { create(:personal_snippet, :repository, :private, author: user, created_at: Time.parse("2021-08-26T00:00:00Z")) }
-
- let(:path) { "/snippets?created_after=2021-08-21T00:00:00Z&created_before=2021-08-25T00:00:00Z" }
-
- it 'returns snippets available for user in given time range' do
- get api(path, personal_access_token: user_token)
-
- expect(json_response.map { |snippet| snippet['id'] }).to contain_exactly(
- private_snippet_in_time_range1.id,
- private_snippet_in_time_range2.id)
- end
- end
end
describe 'GET /snippets/public' do
@@ -92,6 +102,8 @@ RSpec.describe API::Snippets, :aggregate_failures, factory_default: :keep, featu
let(:path) { "/snippets/public" }
+ it_behaves_like "returns unauthorized when not authenticated"
+
it 'returns only public snippets from all users when authenticated' do
get api(path, personal_access_token: user_token)
@@ -110,12 +122,6 @@ RSpec.describe API::Snippets, :aggregate_failures, factory_default: :keep, featu
end
end
- it 'requires authentication' do
- get api(path, nil)
-
- expect(response).to have_gitlab_http_status(:unauthorized)
- end
-
context 'filtering public snippets by created_after/created_before' do
let_it_be(:public_snippet_before_time_range) { create(:personal_snippet, :repository, :public, author: other_user, created_at: Time.parse("2021-08-20T00:00:00Z")) }
let_it_be(:public_snippet_in_time_range) { create(:personal_snippet, :repository, :public, author: other_user, created_at: Time.parse("2021-08-22T00:00:00Z")) }
@@ -132,6 +138,49 @@ RSpec.describe API::Snippets, :aggregate_failures, factory_default: :keep, featu
end
end
+ describe 'GET /snippets/all' do
+ let(:path) { "/snippets/all" }
+
+ it_behaves_like "returns unauthorized when not authenticated"
+ it_behaves_like "returns filtered snippets for user"
+
+ context 'with additional snippets' do
+ let!(:hidden_snippet) { create(:personal_snippet, :repository, :private, author: other_user) }
+ let!(:viewable_snippet) { create(:personal_snippet, :repository, :internal, author: user) }
+
+ context 'and user is admin', :enable_admin_mode do
+ it 'returns all snippets' do
+ get api(path, personal_access_token: admin_token)
+
+ ids = json_response.map { |snippet| snippet['id'] }
+
+ expect(ids).to contain_exactly(
+ viewable_snippet.id,
+ hidden_snippet.id,
+ internal_snippet.id,
+ private_snippet.id,
+ public_snippet.id
+ )
+ end
+ end
+
+ context 'and user is not admin' do
+ it 'returns all internal and public snippets' do
+ get api(path, personal_access_token: user_token)
+
+ ids = json_response.map { |snippet| snippet['id'] }
+
+ expect(ids).to contain_exactly(
+ viewable_snippet.id,
+ internal_snippet.id,
+ private_snippet.id,
+ public_snippet.id
+ )
+ end
+ end
+ end
+ end
+
describe 'GET /snippets/:id/raw' do
let(:snippet) { private_snippet }
@@ -448,10 +497,8 @@ RSpec.describe API::Snippets, :aggregate_failures, factory_default: :keep, featu
end
context "when admin" do
- let_it_be(:token) { create(:personal_access_token, :admin_mode, user: admin, scopes: [:sudo]) }
-
subject do
- put api("/snippets/#{snippet.id}", personal_access_token: token), params: { visibility: 'private', sudo: user.id }
+ put api("/snippets/#{snippet.id}", personal_access_token: admin_token), params: { visibility: 'private', sudo: user.id }
end
context 'when sudo is defined' do
diff --git a/spec/requests/api/users_spec.rb b/spec/requests/api/users_spec.rb
index 2bbcf6b3f38..81881532240 100644
--- a/spec/requests/api/users_spec.rb
+++ b/spec/requests/api/users_spec.rb
@@ -4789,7 +4789,7 @@ RSpec.describe API::Users, :aggregate_failures, feature_category: :user_profile
}.as_json
end
- before :all do
+ before_all do
group.add_member(user, Gitlab::Access::OWNER)
project.add_member(user, Gitlab::Access::OWNER)
create(:merge_request, source_project: project, source_branch: "my-personal-branch-1", author: user)
diff --git a/spec/requests/groups/work_items_controller_spec.rb b/spec/requests/groups/work_items_controller_spec.rb
new file mode 100644
index 00000000000..c47b3f03ec1
--- /dev/null
+++ b/spec/requests/groups/work_items_controller_spec.rb
@@ -0,0 +1,49 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Group Level Work Items', feature_category: :team_planning do
+ let_it_be(:group) { create(:group, :private) }
+ let_it_be(:project) { create(:project, group: group) }
+ let_it_be(:developer) { create(:user).tap { |u| group.add_developer(u) } }
+
+ describe 'GET /groups/:group/-/work_items' do
+ let(:work_items_path) { url_for(controller: 'groups/work_items', action: :index, group_id: group.full_path) }
+
+ before do
+ sign_in(current_user)
+ end
+
+ context 'when the user can read the group' do
+ let(:current_user) { developer }
+
+ it 'renders index' do
+ get work_items_path
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+
+ context 'when the namespace_level_work_items feature flag is disabled' do
+ before do
+ stub_feature_flags(namespace_level_work_items: false)
+ end
+
+ it 'returns not found' do
+ get work_items_path
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+
+ context 'when the user cannot read the group' do
+ let(:current_user) { create(:user) }
+
+ it 'returns not found' do
+ get work_items_path
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+end
diff --git a/spec/requests/import/github_controller_spec.rb b/spec/requests/import/github_controller_spec.rb
index 0f28f5e93f3..2499ef859dd 100644
--- a/spec/requests/import/github_controller_spec.rb
+++ b/spec/requests/import/github_controller_spec.rb
@@ -12,31 +12,13 @@ RSpec.describe Import::GithubController, feature_category: :importers do
stub_application_setting(import_sources: ['github'])
login_as(user)
- end
-
- context 'with feature enabled' do
- before do
- stub_feature_flags(import_details_page: true)
-
- request
- end
- it 'responds with a 200 and shows the template', :aggregate_failures do
- expect(response).to have_gitlab_http_status(:ok)
- expect(response).to render_template(:details)
- end
+ request
end
- context 'with feature disabled' do
- before do
- stub_feature_flags(import_details_page: false)
-
- request
- end
-
- it 'responds with a 404' do
- expect(response).to have_gitlab_http_status(:not_found)
- end
+ it 'responds with a 200 and shows the template', :aggregate_failures do
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to render_template(:details)
end
end
end
diff --git a/spec/requests/lfs_http_spec.rb b/spec/requests/lfs_http_spec.rb
index 199138eb3a9..b07296a0df2 100644
--- a/spec/requests/lfs_http_spec.rb
+++ b/spec/requests/lfs_http_spec.rb
@@ -807,7 +807,7 @@ RSpec.describe 'Git LFS API and storage', feature_category: :source_code_managem
end
end
- describe 'to one project', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/418757' do
+ describe 'to one project' do
describe 'when user is authenticated' do
describe 'when user has push access to the project' do
before do
diff --git a/spec/requests/organizations/organizations_controller_spec.rb b/spec/requests/organizations/organizations_controller_spec.rb
index bd54b50de99..788d740504a 100644
--- a/spec/requests/organizations/organizations_controller_spec.rb
+++ b/spec/requests/organizations/organizations_controller_spec.rb
@@ -5,38 +5,60 @@ require 'spec_helper'
RSpec.describe Organizations::OrganizationsController, feature_category: :cell do
let_it_be(:organization) { create(:organization) }
- RSpec.shared_examples 'basic organization controller action' do
+ shared_examples 'successful response' do
+ it 'renders 200 OK' do
+ gitlab_request
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ end
+
+ shared_examples 'action disabled by `ui_for_organizations` feature flag' do
before do
- sign_in(user)
+ stub_feature_flags(ui_for_organizations: false)
end
- context 'when the user does not have authorization' do
- let_it_be(:user) { create(:user) }
+ it 'renders 404' do
+ gitlab_request
- it 'renders 404' do
- gitlab_request
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
- expect(response).to have_gitlab_http_status(:not_found)
- end
+ shared_examples 'basic organization controller action' do
+ context 'when the user is not logged in' do
+ it_behaves_like 'successful response'
+ it_behaves_like 'action disabled by `ui_for_organizations` feature flag'
end
- context 'when the user has authorization', :enable_admin_mode do
- let_it_be(:user) { create(:admin) }
+ context 'when the user is logged in' do
+ before do
+ sign_in(user)
+ end
- it 'renders 200 OK' do
- gitlab_request
+ context 'with no association to an organization' do
+ let_it_be(:user) { create(:user) }
- expect(response).to have_gitlab_http_status(:ok)
+ it_behaves_like 'successful response'
+ it_behaves_like 'action disabled by `ui_for_organizations` feature flag'
end
- context 'when the feature flag `ui_for_organizations` is disabled' do
- it 'renders 404' do
- stub_feature_flags(ui_for_organizations: false)
+ context 'as as admin', :enable_admin_mode do
+ let_it_be(:user) { create(:admin) }
- gitlab_request
+ it_behaves_like 'successful response'
+ it_behaves_like 'action disabled by `ui_for_organizations` feature flag'
+ end
- expect(response).to have_gitlab_http_status(:not_found)
+ context 'as an organization user' do
+ let_it_be(:user) { create :user }
+
+ before do
+ create :organization_user, organization: organization, user: user
end
+
+ it_behaves_like 'successful response'
+ it_behaves_like 'action disabled by `ui_for_organizations` feature flag'
end
end
end
diff --git a/spec/requests/projects/blob_spec.rb b/spec/requests/projects/blob_spec.rb
deleted file mode 100644
index 7d62619e76a..00000000000
--- a/spec/requests/projects/blob_spec.rb
+++ /dev/null
@@ -1,87 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'Blobs', feature_category: :source_code_management do
- let_it_be(:project) { create(:project, :public, :repository, lfs: true) }
-
- describe 'GET /:namespace_id/:project_id/-/blob/:id' do
- subject(:request) do
- get namespace_project_blob_path(namespace_id: project.namespace, project_id: project, id: id)
- end
-
- context 'with LFS file' do
- let(:id) { 'master/files/lfs/lfs_object.iso' }
- let(:object_store_host) { 'http://127.0.0.1:9000' }
- let(:connect_src) do
- csp = response.headers['Content-Security-Policy']
- csp.split('; ').find { |src| src.starts_with?('connect-src') }
- end
-
- let(:gitlab_config) do
- Gitlab.config.gitlab.deep_merge(
- 'content_security_policy' => {
- 'enabled' => content_security_policy_enabled
- }
- )
- end
-
- let(:lfs_config) do
- Gitlab.config.lfs.deep_merge(
- 'enabled' => lfs_enabled,
- 'object_store' => {
- 'remote_directory' => 'lfs-objects',
- 'enabled' => true,
- 'proxy_download' => proxy_download,
- 'connection' => {
- 'endpoint' => object_store_host,
- 'path_style' => true
- }
- }
- )
- end
-
- before do
- stub_config_setting(gitlab_config)
- stub_lfs_setting(lfs_config)
- stub_lfs_object_storage(proxy_download: proxy_download)
-
- request
- end
-
- describe 'directly downloading lfs file' do
- let(:lfs_enabled) { true }
- let(:proxy_download) { false }
- let(:content_security_policy_enabled) { true }
-
- it { expect(response).to have_gitlab_http_status(:success) }
-
- it { expect(connect_src).to include(object_store_host) }
-
- context 'when lfs is disabled' do
- let(:lfs_enabled) { false }
-
- it { expect(response).to have_gitlab_http_status(:success) }
-
- it { expect(connect_src).not_to include(object_store_host) }
- end
-
- context 'when content_security_policy is disabled' do
- let(:content_security_policy_enabled) { false }
-
- it { expect(response).to have_gitlab_http_status(:success) }
-
- it { expect(connect_src).not_to include(object_store_host) }
- end
-
- context 'when proxy download is enabled' do
- let(:proxy_download) { true }
-
- it { expect(response).to have_gitlab_http_status(:success) }
-
- it { expect(connect_src).not_to include(object_store_host) }
- end
- end
- end
- end
-end
diff --git a/spec/requests/projects/merge_requests/creations_spec.rb b/spec/requests/projects/merge_requests/creations_spec.rb
index e8a073fef5f..8f55aa90bee 100644
--- a/spec/requests/projects/merge_requests/creations_spec.rb
+++ b/spec/requests/projects/merge_requests/creations_spec.rb
@@ -10,6 +10,17 @@ RSpec.describe 'merge requests creations', feature_category: :code_review_workfl
let_it_be(:project) { create(:project, :repository, group: group) }
let_it_be(:user) { create(:user) }
+ let(:get_params) do
+ {
+ namespace_id: project.namespace.to_param,
+ project_id: project,
+ merge_request: {
+ source_branch: 'two-commits',
+ target_branch: 'master'
+ }
+ }
+ end
+
before_all do
group.add_developer(user)
end
@@ -18,16 +29,52 @@ RSpec.describe 'merge requests creations', feature_category: :code_review_workfl
login_as(user)
end
- def get_new
- get namespace_project_new_merge_request_path(namespace_id: project.namespace, project_id: project)
+ def get_new(params = get_params)
+ get namespace_project_new_merge_request_path(params)
end
- it 'avoids N+1 DB queries even with forked projects' do
- control = ActiveRecord::QueryRecorder.new(skip_cached: false) { get_new }
+ describe 'GET new' do
+ context 'without merge_request params' do
+ it 'avoids N+1 DB queries even with forked projects' do
+ control = ActiveRecord::QueryRecorder.new(skip_cached: false) { get_new }
+
+ 5.times { fork_project(project, user) }
+
+ expect { get_new }.not_to exceed_query_limit(control)
+ end
+
+ it 'renders branch selection screen' do
+ get_new(get_params.except(:merge_request))
+
+ expect(response).to be_successful
+ expect(response).to render_template(partial: '_new_compare')
+ end
+ end
+
+ context 'with merge_request params' do
+ it 'renders new merge request widget template' do
+ get_new
+
+ expect(response).to be_successful
+ expect(response).to render_template(partial: '_new_submit')
+ expect(response).not_to render_template(partial: '_new_compare')
+ end
- 5.times { fork_project(project, user) }
+ context 'when existing merge request with same target and source branches' do
+ let_it_be(:existing_mr) { create(:merge_request) }
- expect { get_new }.not_to exceed_query_limit(control)
+ it 'renders branch selection screen' do
+ allow_next_instance_of(MergeRequest) do |instance|
+ allow(instance).to receive(:existing_mrs_targeting_same_branch).and_return([existing_mr])
+ end
+
+ get_new
+
+ expect(response).to be_successful
+ expect(response).to render_template(partial: '_new_compare')
+ end
+ end
+ end
end
it_behaves_like "observability csp policy", Projects::MergeRequests::CreationsController do
diff --git a/spec/requests/projects/merge_requests_controller_spec.rb b/spec/requests/projects/merge_requests_controller_spec.rb
index 955b6e53686..e6a281d8d59 100644
--- a/spec/requests/projects/merge_requests_controller_spec.rb
+++ b/spec/requests/projects/merge_requests_controller_spec.rb
@@ -132,4 +132,44 @@ RSpec.describe Projects::MergeRequestsController, feature_category: :source_code
end
end
end
+
+ describe 'GET #pipelines.json' do
+ before do
+ login_as(user)
+ end
+
+ it 'avoids N+1 queries', :use_sql_query_cache do
+ create_pipeline
+
+ # warm up
+ get pipelines_project_merge_request_path(project, merge_request, format: :json)
+
+ control = ActiveRecord::QueryRecorder.new(skip_cached: false) do
+ get pipelines_project_merge_request_path(project, merge_request, format: :json)
+ end
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(Gitlab::Json.parse(response.body)['count']['all']).to eq(1)
+
+ create_pipeline
+
+ expect do
+ get pipelines_project_merge_request_path(project, merge_request, format: :json)
+ end.to issue_same_number_of_queries_as(control)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(Gitlab::Json.parse(response.body)['count']['all']).to eq(2)
+ end
+
+ private
+
+ def create_pipeline
+ create(
+ :ci_pipeline, :with_job, :success,
+ project: merge_request.source_project,
+ ref: merge_request.source_branch,
+ sha: merge_request.diff_head_sha
+ )
+ end
+ end
end
diff --git a/spec/requests/projects/merge_requests_discussions_spec.rb b/spec/requests/projects/merge_requests_discussions_spec.rb
index 644f26af006..24b6fb2f640 100644
--- a/spec/requests/projects/merge_requests_discussions_spec.rb
+++ b/spec/requests/projects/merge_requests_discussions_spec.rb
@@ -27,19 +27,15 @@ RSpec.describe 'merge requests discussions', feature_category: :source_code_mana
end
# rubocop:enable RSpec/InstanceVariable
- shared_examples 'N+1 queries' do
- it 'avoids N+1 DB queries', :request_store do
- send_request # warm up
+ it 'avoids N+1 DB queries', :request_store do
+ send_request # warm up
- create(:diff_note_on_merge_request, noteable: merge_request, project: merge_request.project)
- control = ActiveRecord::QueryRecorder.new { send_request }
+ create(:diff_note_on_merge_request, noteable: merge_request, project: merge_request.project)
+ control = ActiveRecord::QueryRecorder.new { send_request }
- create(:diff_note_on_merge_request, noteable: merge_request, project: merge_request.project)
+ create(:diff_note_on_merge_request, noteable: merge_request, project: merge_request.project)
- expect do
- send_request
- end.not_to exceed_query_limit(control).with_threshold(notes_metadata_threshold)
- end
+ expect { send_request }.not_to exceed_query_limit(control)
end
it 'returns 200' do
@@ -48,13 +44,6 @@ RSpec.describe 'merge requests discussions', feature_category: :source_code_mana
expect(response).to have_gitlab_http_status(:ok)
end
- # https://docs.gitlab.com/ee/development/query_recorder.html#use-request-specs-instead-of-controller-specs
- context 'with notes_metadata_threshold' do
- let(:notes_metadata_threshold) { 1 }
-
- it_behaves_like 'N+1 queries'
- end
-
it 'limits Gitaly queries', :request_store do
Gitlab::GitalyClient.allow_n_plus_1_calls do
create_list(:diff_note_on_merge_request, 7, noteable: merge_request, project: merge_request.project)
diff --git a/spec/requests/projects/metrics/dashboards/builder_spec.rb b/spec/requests/projects/metrics/dashboards/builder_spec.rb
deleted file mode 100644
index 8af2d1f1d25..00000000000
--- a/spec/requests/projects/metrics/dashboards/builder_spec.rb
+++ /dev/null
@@ -1,123 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'Projects::Metrics::Dashboards::BuilderController', feature_category: :metrics do
- let_it_be(:project) { create(:project) }
- let_it_be(:environment) { create(:environment, project: project) }
- let_it_be(:user) { create(:user) }
- let_it_be(:valid_panel_yml) do
- <<~YML
- ---
- title: "Super Chart A1"
- type: "area-chart"
- y_label: "y_label"
- weight: 1
- max_value: 1
- metrics:
- - id: metric_a1
- query_range: |+
- avg(
- sum(
- container_memory_usage_bytes{
- container_name!="POD",
- pod_name=~"^{{ci_environment_slug}}-(.*)",
- namespace="{{kube_namespace}}",
- user_def_variable="{{user_def_variable}}"
- }
- ) by (job)
- ) without (job)
- /1024/1024/1024
- unit: unit
- label: Legend Label
- YML
- end
-
- let_it_be(:invalid_panel_yml) do
- <<~YML
- ---
- title: "Super Chart A1"
- type: "area-chart"
- y_label: "y_label"
- weight: 1
- max_value: 1
- YML
- end
-
- def send_request(params = {})
- post namespace_project_metrics_dashboards_builder_path(namespace_id: project.namespace, project_id: project, format: :json, **params)
- end
-
- describe 'POST /:namespace/:project/-/metrics/dashboards/builder' do
- before do
- stub_feature_flags(remove_monitor_metrics: false)
- end
-
- context 'as anonymous user' do
- it 'redirects user to sign in page' do
- send_request
-
- expect(response).to redirect_to(new_user_session_path)
- end
- end
-
- context 'as user with guest access' do
- before do
- project.add_guest(user)
- login_as(user)
- end
-
- it 'returns not found' do
- send_request
-
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
-
- context 'as logged in user' do
- before do
- project.add_developer(user)
- login_as(user)
- end
-
- context 'valid yaml panel is supplied' do
- it 'returns success' do
- send_request(panel_yaml: valid_panel_yml)
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response).to include('title' => 'Super Chart A1', 'type' => 'area-chart')
- end
- end
-
- context 'invalid yaml panel is supplied' do
- it 'returns unprocessable entity' do
- send_request(panel_yaml: invalid_panel_yml)
-
- expect(response).to have_gitlab_http_status(:unprocessable_entity)
- expect(json_response['message']).to eq('Each "panel" must define an array :metrics')
- end
- end
-
- context 'invalid panel_yaml is not a yaml string' do
- it 'returns unprocessable entity' do
- send_request(panel_yaml: 1)
-
- expect(response).to have_gitlab_http_status(:unprocessable_entity)
- expect(json_response['message']).to eq('Invalid configuration format')
- end
- end
-
- context 'when metrics dashboard feature is unavailable' do
- before do
- stub_feature_flags(remove_monitor_metrics: true)
- end
-
- it 'returns not found' do
- send_request
-
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
- end
- end
-end
diff --git a/spec/requests/projects/ml/models_controller_spec.rb b/spec/requests/projects/ml/models_controller_spec.rb
index d03748c8dff..8569f2396d3 100644
--- a/spec/requests/projects/ml/models_controller_spec.rb
+++ b/spec/requests/projects/ml/models_controller_spec.rb
@@ -5,9 +5,9 @@ require 'spec_helper'
RSpec.describe Projects::Ml::ModelsController, feature_category: :mlops do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { project.first_owner }
- let_it_be(:model1_a) { create(:ml_model_package, project: project) }
- let_it_be(:model1_b) { create(:ml_model_package, project: project, name: model1_a.name) }
- let_it_be(:model2) { create(:ml_model_package, project: project) }
+ let_it_be(:model1) { create(:ml_models, :with_versions, project: project) }
+ let_it_be(:model2) { create(:ml_models, project: project) }
+ let_it_be(:model_in_different_project) { create(:ml_models) }
let(:model_registry_enabled) { true }
@@ -36,16 +36,17 @@ RSpec.describe Projects::Ml::ModelsController, feature_category: :mlops do
index_request
end
- it 'prepares model view using the presenter' do
- expect(::Ml::ModelsIndexPresenter).to receive(:new).and_call_original
-
+ it 'fetches the correct models' do
index_request
+
+ expect(assigns(:models)).to match_array([model1, model2])
end
it 'does not perform N+1 sql queries' do
control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) { list_models }
- create_list(:ml_model_package, 4, project: project)
+ create_list(:ml_model_versions, 2, model: model1)
+ create_list(:ml_model_versions, 2, model: model2)
expect { list_models }.not_to exceed_all_query_limit(control_count)
end
diff --git a/spec/requests/projects/noteable_notes_spec.rb b/spec/requests/projects/noteable_notes_spec.rb
index 55540447da0..a490e059680 100644
--- a/spec/requests/projects/noteable_notes_spec.rb
+++ b/spec/requests/projects/noteable_notes_spec.rb
@@ -14,6 +14,8 @@ RSpec.describe 'Project noteable notes', feature_category: :team_planning do
let(:response_etag) { response.headers['ETag'] }
let(:stored_etag) { "W/\"#{etag_store.get(notes_path)}\"" }
+ let(:default_headers) { { 'X-Last-Fetched-At' => 0 } }
+
before do
login_as(user)
end
@@ -21,7 +23,7 @@ RSpec.describe 'Project noteable notes', feature_category: :team_planning do
it 'does not set a Gitlab::EtagCaching ETag if there is a note' do
create(:note_on_merge_request, noteable: merge_request, project: merge_request.project)
- get notes_path
+ get notes_path, headers: default_headers
expect(response).to have_gitlab_http_status(:ok)
@@ -31,7 +33,7 @@ RSpec.describe 'Project noteable notes', feature_category: :team_planning do
end
it 'sets a Gitlab::EtagCaching ETag if there is no note' do
- get notes_path
+ get notes_path, headers: default_headers
expect(response).to have_gitlab_http_status(:ok)
expect(response_etag).to eq(stored_etag)
@@ -68,7 +70,7 @@ RSpec.describe 'Project noteable notes', feature_category: :team_planning do
)
)
- get notes_path, headers: { "if-none-match": stored_etag }
+ get notes_path, headers: default_headers.merge("if-none-match": stored_etag)
expect(response).to have_gitlab_http_status(:not_modified)
end
diff --git a/spec/requests/projects/notes_controller_spec.rb b/spec/requests/projects/notes_controller_spec.rb
new file mode 100644
index 00000000000..9cd8ba364ea
--- /dev/null
+++ b/spec/requests/projects/notes_controller_spec.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Projects::NotesController, feature_category: :team_planning do
+ let_it_be(:project) { create(:project, :public) }
+ let_it_be(:issue) { create(:issue, project: project) }
+
+ describe '#index' do
+ def get_notes
+ get project_noteable_notes_path(project, target_type: 'issue', target_id: issue.id, format: :json),
+ headers: { 'X-Last-Fetched-At': 0 }
+ end
+
+ it 'does not execute N+1 queries' do
+ get_notes
+
+ create(:note_on_issue, project: project, noteable: issue)
+
+ control = ActiveRecord::QueryRecorder.new { get_notes }
+
+ create(:note_on_issue, project: project, noteable: issue)
+
+ expect { get_notes }.not_to exceed_query_limit(control)
+ end
+ end
+end
diff --git a/spec/requests/projects/service_desk_controller_spec.rb b/spec/requests/projects/service_desk_controller_spec.rb
index 54fe176e244..05e48c2c5c7 100644
--- a/spec/requests/projects/service_desk_controller_spec.rb
+++ b/spec/requests/projects/service_desk_controller_spec.rb
@@ -71,7 +71,7 @@ RSpec.describe Projects::ServiceDeskController, feature_category: :service_desk
it 'toggles services desk incoming email' do
project.update!(service_desk_enabled: false)
- put project_service_desk_refresh_path(project, format: :json), params: { service_desk_enabled: true }
+ put project_service_desk_path(project, format: :json), params: { service_desk_enabled: true }
expect(json_response["service_desk_address"]).to be_present
expect(json_response["service_desk_enabled"]).to be_truthy
@@ -79,7 +79,7 @@ RSpec.describe Projects::ServiceDeskController, feature_category: :service_desk
end
it 'sets issue_template_key' do
- put project_service_desk_refresh_path(project, format: :json), params: { issue_template_key: 'service_desk' }
+ put project_service_desk_path(project, format: :json), params: { issue_template_key: 'service_desk' }
settings = project.service_desk_setting
expect(settings).to be_present
@@ -89,7 +89,7 @@ RSpec.describe Projects::ServiceDeskController, feature_category: :service_desk
end
it 'returns an error when update of service desk settings fails' do
- put project_service_desk_refresh_path(project, format: :json), params: { issue_template_key: 'invalid key' }
+ put project_service_desk_path(project, format: :json), params: { issue_template_key: 'invalid key' }
expect(response).to have_gitlab_http_status(:unprocessable_entity)
expect(json_response['message']).to eq('Issue template key is empty or does not exist')
@@ -100,7 +100,7 @@ RSpec.describe Projects::ServiceDeskController, feature_category: :service_desk
it 'renders 404' do
sign_in(other_user)
- put project_service_desk_refresh_path(project, format: :json), params: { service_desk_enabled: true }
+ put project_service_desk_path(project, format: :json), params: { service_desk_enabled: true }
expect(response).to have_gitlab_http_status(:not_found)
end
diff --git a/spec/requests/projects/tracing_controller_spec.rb b/spec/requests/projects/tracing_controller_spec.rb
index eecaa0d962a..8996ea7f8d6 100644
--- a/spec/requests/projects/tracing_controller_spec.rb
+++ b/spec/requests/projects/tracing_controller_spec.rb
@@ -14,14 +14,12 @@ RSpec.describe Projects::TracingController, feature_category: :tracing do
response
end
- describe 'GET #index' do
- before do
- stub_feature_flags(observability_tracing: observability_tracing_ff)
- sign_in(user)
- end
-
- let(:path) { project_tracing_index_path(project) }
+ before do
+ stub_feature_flags(observability_tracing: observability_tracing_ff)
+ sign_in(user)
+ end
+ shared_examples 'tracing route request' do
it_behaves_like 'observability csp policy' do
before_all do
project.add_developer(user)
@@ -45,6 +43,26 @@ RSpec.describe Projects::TracingController, feature_category: :tracing do
expect(subject).to have_gitlab_http_status(:ok)
end
+ context 'when feature is disabled' do
+ let(:observability_tracing_ff) { false }
+
+ it 'returns 404' do
+ expect(subject).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+ end
+
+ describe 'GET #index' do
+ let(:path) { project_tracing_index_path(project) }
+
+ it_behaves_like 'tracing route request'
+
+ describe 'html response' do
+ before_all do
+ project.add_developer(user)
+ end
+
it 'renders the js-tracing element correctly' do
element = Nokogiri::HTML.parse(subject.body).at_css('#js-tracing')
@@ -55,13 +73,31 @@ RSpec.describe Projects::TracingController, feature_category: :tracing do
}.to_json
expect(element.attributes['data-view-model'].value).to eq(expected_view_model)
end
+ end
+ end
- context 'when feature is disabled' do
- let(:observability_tracing_ff) { false }
+ describe 'GET #show' do
+ let(:path) { project_tracing_path(project, id: "test-trace-id") }
- it 'returns 404' do
- expect(subject).to have_gitlab_http_status(:not_found)
- end
+ it_behaves_like 'tracing route request'
+
+ describe 'html response' do
+ before_all do
+ project.add_developer(user)
+ end
+
+ it 'renders the js-tracing element correctly' do
+ element = Nokogiri::HTML.parse(subject.body).at_css('#js-tracing-details')
+
+ expected_view_model = {
+ tracingIndexUrl: project_tracing_index_path(project),
+ traceId: 'test-trace-id',
+ tracingUrl: Gitlab::Observability.tracing_url(project),
+ provisioningUrl: Gitlab::Observability.provisioning_url(project),
+ oauthUrl: Gitlab::Observability.oauth_url
+ }.to_json
+
+ expect(element.attributes['data-view-model'].value).to eq(expected_view_model)
end
end
end
diff --git a/spec/requests/sessions_spec.rb b/spec/requests/sessions_spec.rb
index 3bff9555834..8e069427678 100644
--- a/spec/requests/sessions_spec.rb
+++ b/spec/requests/sessions_spec.rb
@@ -38,6 +38,35 @@ RSpec.describe 'Sessions', feature_category: :system_access do
end
end
+ context 'when using two-factor authentication via OTP' do
+ let(:user) { create(:user, :two_factor, :invalid) }
+ let(:user_params) { { login: user.username, password: user.password } }
+
+ def authenticate_2fa(otp_attempt:)
+ post(user_session_path(params: { user: user_params })) # First sign-in request for password, second for OTP
+ post(user_session_path(params: { user: user_params.merge(otp_attempt: otp_attempt) }))
+ end
+
+ context 'with an invalid user' do
+ it 'raises StandardError when ActiveRecord::RecordInvalid is raised to return 500 instead of 422' do
+ otp = user.current_otp
+
+ expect { authenticate_2fa(otp_attempt: otp) }.to raise_error(StandardError)
+ end
+ end
+
+ context 'with an invalid record other than user' do
+ it 'raises ActiveRecord::RecordInvalid for invalid record to return 422f' do
+ otp = user.current_otp
+ allow_next_instance_of(ActiveRecord::RecordInvalid) do |instance|
+ allow(instance).to receive(:record).and_return(nil) # Simulate it's not a user
+ end
+
+ expect { authenticate_2fa(otp_attempt: otp) }.to raise_error(ActiveRecord::RecordInvalid)
+ end
+ end
+ end
+
context 'when user signs out' do
before do
post user_session_path(user: { login: user.username, password: user.password })
diff --git a/spec/requests/verifies_with_email_spec.rb b/spec/requests/verifies_with_email_spec.rb
index f3f8e4a1a83..cc85ebc7ade 100644
--- a/spec/requests/verifies_with_email_spec.rb
+++ b/spec/requests/verifies_with_email_spec.rb
@@ -21,6 +21,16 @@ RSpec.describe 'VerifiesWithEmail', :clean_gitlab_redis_sessions, :clean_gitlab_
expect(mail.to).to match_array([user.email])
expect(mail.subject).to eq(s_('IdentityVerification|Verify your identity'))
end
+
+ context 'when an unconfirmed verification email exists' do
+ let(:new_email) { 'new@email' }
+ let(:user) { create(:user, unconfirmed_email: new_email, confirmation_sent_at: 1.minute.ago) }
+
+ it 'sends a verification instructions email to the unconfirmed email address' do
+ mail = ActionMailer::Base.deliveries.find { |d| d.to.include?(new_email) }
+ expect(mail.subject).to eq(s_('IdentityVerification|Verify your identity'))
+ end
+ end
end
shared_examples_for 'prompt for email verification' do
@@ -147,12 +157,10 @@ RSpec.describe 'VerifiesWithEmail', :clean_gitlab_redis_sessions, :clean_gitlab_
post(user_session_path(user: { verification_token: 'token' }))
end
- it_behaves_like 'prompt for email verification'
-
it 'adds a verification error message' do
- expect(response.body)
- .to include("You&#39;ve reached the maximum amount of tries. "\
- 'Wait 10 minutes or send a new code and try again.')
+ expect(json_response)
+ .to include('message' => "You've reached the maximum amount of tries. "\
+ 'Wait 10 minutes or send a new code and try again.')
end
end
@@ -161,11 +169,10 @@ RSpec.describe 'VerifiesWithEmail', :clean_gitlab_redis_sessions, :clean_gitlab_
post(user_session_path(user: { verification_token: 'invalid_token' }))
end
- it_behaves_like 'prompt for email verification'
-
it 'adds a verification error message' do
- expect(response.body)
- .to include((s_('IdentityVerification|The code is incorrect. Enter it again, or send a new code.')))
+ expect(json_response)
+ .to include('message' => (s_('IdentityVerification|The code is incorrect. '\
+ 'Enter it again, or send a new code.')))
end
end
@@ -175,27 +182,56 @@ RSpec.describe 'VerifiesWithEmail', :clean_gitlab_redis_sessions, :clean_gitlab_
post(user_session_path(user: { verification_token: 'token' }))
end
- it_behaves_like 'prompt for email verification'
-
it 'adds a verification error message' do
- expect(response.body)
- .to include((s_('IdentityVerification|The code has expired. Send a new code and try again.')))
+ expect(json_response)
+ .to include('message' => (s_('IdentityVerification|The code has expired. Send a new code and try again.')))
end
end
context 'when a valid verification_token param exists' do
- before do
- post(user_session_path(user: { verification_token: 'token' }))
+ subject(:submit_token) { post(user_session_path(user: { verification_token: 'token' })) }
+
+ it 'unlocks the user, create logs and records the activity', :freeze_time do
+ expect { submit_token }.to change { user.reload.unlock_token }.to(nil)
+ .and change { user.locked_at }.to(nil)
+ .and change { AuditEvent.count }.by(1)
+ .and change { AuthenticationEvent.count }.by(1)
+ .and change { user.last_activity_on }.to(Date.today)
+ .and change { user.email_reset_offered_at }.to(Time.current)
+ end
+
+ it 'returns the success status and a redirect path' do
+ submit_token
+ expect(json_response).to eq('status' => 'success', 'redirect_path' => users_successful_verification_path)
end
- it 'unlocks the user' do
- user.reload
- expect(user.unlock_token).to be_nil
- expect(user.locked_at).to be_nil
+ context 'when an unconfirmed verification email exists' do
+ before do
+ user.update!(email: new_email)
+ end
+
+ let(:new_email) { 'new@email' }
+
+ it 'confirms the email' do
+ expect { submit_token }
+ .to change { user.reload.email }.to(new_email)
+ .and change { user.confirmed_at }
+ .and change { user.unconfirmed_email }.from(new_email).to(nil)
+ end
end
- it 'redirects to the successful verification path' do
- expect(response).to redirect_to(users_successful_verification_path)
+ context 'when email reset has already been offered' do
+ before do
+ user.update!(email_reset_offered_at: 1.hour.ago, email: 'new@email')
+ end
+
+ it 'does not change the email_reset_offered_at field' do
+ expect { submit_token }.not_to change { user.reload.email_reset_offered_at }
+ end
+
+ it 'does not confirm the email' do
+ expect { submit_token }.not_to change { user.reload.email }
+ end
end
end
@@ -206,8 +242,8 @@ RSpec.describe 'VerifiesWithEmail', :clean_gitlab_redis_sessions, :clean_gitlab_
post user_session_path, params: { user: { login: another_user.username, password: another_user.password } }
end
- it 'does not redirect to the successful verification path' do
- expect(response).not_to redirect_to(users_successful_verification_path)
+ it 'redirects to the root path' do
+ expect(response).to redirect_to(root_path)
end
end
end
@@ -277,7 +313,6 @@ RSpec.describe 'VerifiesWithEmail', :clean_gitlab_redis_sessions, :clean_gitlab_
end
it_behaves_like 'send verification instructions'
- it_behaves_like 'prompt for email verification'
end
context 'when exceeding the rate limit' do
@@ -301,8 +336,79 @@ RSpec.describe 'VerifiesWithEmail', :clean_gitlab_redis_sessions, :clean_gitlab_
mail = find_email_for(user)
expect(mail).to be_nil
end
+ end
+ end
- it_behaves_like 'prompt for email verification'
+ describe 'update_email' do
+ let(:new_email) { 'new@email' }
+
+ subject(:do_request) { patch(users_update_email_path(user: { email: new_email })) }
+
+ context 'when no verification_user_id session variable exists' do
+ it 'returns 204 No Content' do
+ do_request
+
+ expect(response).to have_gitlab_http_status(:no_content)
+ expect(response.body).to be_empty
+ end
+ end
+
+ context 'when a verification_user_id session variable exists' do
+ before do
+ stub_session(verification_user_id: user.id)
+ end
+
+ it 'locks the user' do
+ do_request
+
+ expect(user.reload.unlock_token).not_to be_nil
+ expect(user.locked_at).not_to be_nil
+ end
+
+ it 'sends a changed notification to the primary email and verification instructions to the unconfirmed email' do
+ perform_enqueued_jobs { do_request }
+
+ sent_mails = ActionMailer::Base.deliveries.map { |mail| { mail.to[0] => mail.subject } }
+
+ expect(sent_mails).to match_array([
+ { user.reload.unconfirmed_email => s_('IdentityVerification|Verify your identity') },
+ { user.email => 'Email Changed' }
+ ])
+ end
+
+ it 'calls the UpdateEmailService and returns a success response' do
+ expect_next_instance_of(Users::EmailVerification::UpdateEmailService, user: user) do |instance|
+ expect(instance).to receive(:execute).with(email: new_email).and_call_original
+ end
+
+ do_request
+
+ expect(json_response).to eq('status' => 'success')
+ end
+ end
+
+ context 'when failing to update the email address' do
+ let(:service_response) do
+ {
+ status: 'failure',
+ reason: 'the reason',
+ message: 'the message'
+ }
+ end
+
+ before do
+ stub_session(verification_user_id: user.id)
+ end
+
+ it 'calls the UpdateEmailService and returns an error response' do
+ expect_next_instance_of(Users::EmailVerification::UpdateEmailService, user: user) do |instance|
+ expect(instance).to receive(:execute).with(email: new_email).and_return(service_response)
+ end
+
+ do_request
+
+ expect(json_response).to eq(service_response.with_indifferent_access)
+ end
end
end
diff --git a/spec/routing/project_routing_spec.rb b/spec/routing/project_routing_spec.rb
index c78adc2dcef..abc42d11c63 100644
--- a/spec/routing/project_routing_spec.rb
+++ b/spec/routing/project_routing_spec.rb
@@ -380,13 +380,14 @@ RSpec.describe 'project routing' do
it_behaves_like 'redirecting a legacy path', '/gitlab/gitlabhq/hooks/hook_logs/1', '/gitlab/gitlabhq/-/hooks/hook_logs/1'
end
- # project_commit GET /:project_id/commit/:id(.:format) commit#show {id: /\h{7,40}/, project_id: /[^\/]+/}
+ # project_commit GET /:project_id/commit/:id(.:format) commit#show {id: Gitlab::Git::Commit::SHA_PATTERN, project_id: /[^\/]+/}
describe Projects::CommitController, 'routing' do
it 'to #show' do
expect(get('/gitlab/gitlabhq/-/commit/4246fbd')).to route_to('projects/commit#show', namespace_id: 'gitlab', project_id: 'gitlabhq', id: '4246fbd')
expect(get('/gitlab/gitlabhq/-/commit/4246fbd.diff')).to route_to('projects/commit#show', namespace_id: 'gitlab', project_id: 'gitlabhq', id: '4246fbd', format: 'diff')
expect(get('/gitlab/gitlabhq/-/commit/4246fbd.patch')).to route_to('projects/commit#show', namespace_id: 'gitlab', project_id: 'gitlabhq', id: '4246fbd', format: 'patch')
expect(get('/gitlab/gitlabhq/-/commit/4246fbd13872934f72a8fd0d6fb1317b47b59cb5')).to route_to('projects/commit#show', namespace_id: 'gitlab', project_id: 'gitlabhq', id: '4246fbd13872934f72a8fd0d6fb1317b47b59cb5')
+ expect(get('/gitlab/gitlabhq/-/commit/6ef19b41225c5369f1c104d45d8d85efa9b057b53b14b4b9b939dd74decc5321')).to route_to('projects/commit#show', namespace_id: 'gitlab', project_id: 'gitlabhq', id: '6ef19b41225c5369f1c104d45d8d85efa9b057b53b14b4b9b939dd74decc5321')
end
it_behaves_like 'redirecting a legacy path', "/gitlab/gitlabhq/commit/4246fbd", "/gitlab/gitlabhq/-/commit/4246fbd"
@@ -652,6 +653,10 @@ RSpec.describe 'project routing' do
it 'to #show' do
expect(get('/gitlab/gitlabhq/-/compare/master...stable')).to route_to('projects/compare#show', namespace_id: 'gitlab', project_id: 'gitlabhq', from: 'master', to: 'stable')
expect(get('/gitlab/gitlabhq/-/compare/issue/1234...stable')).to route_to('projects/compare#show', namespace_id: 'gitlab', project_id: 'gitlabhq', from: 'issue/1234', to: 'stable')
+ expect(get('/gitlab/gitlabhq/-/compare/257cc5642cb1a054f08cc83f2d943e56fd3ebe99...5716ca5987cbf97d6bb54920bea6adde242d87e6'))
+ .to route_to('projects/compare#show', namespace_id: 'gitlab', project_id: 'gitlabhq', from: '257cc5642cb1a054f08cc83f2d943e56fd3ebe99', to: '5716ca5987cbf97d6bb54920bea6adde242d87e6')
+ expect(get('/gitlab/gitlabhq/-/compare/47d6aca82756ff2e61e53520bfdf1faa6c86d933be4854eb34840c57d12e0c85...a52e146ac2ab2d0efbb768ab8ebd1e98a6055764c81fe424fbae4522f5b4cb92'))
+ .to route_to('projects/compare#show', namespace_id: 'gitlab', project_id: 'gitlabhq', from: '47d6aca82756ff2e61e53520bfdf1faa6c86d933be4854eb34840c57d12e0c85', to: 'a52e146ac2ab2d0efbb768ab8ebd1e98a6055764c81fe424fbae4522f5b4cb92')
end
it_behaves_like 'redirecting a legacy path', '/gitlab/gitlabhq/compare', '/gitlab/gitlabhq/-/compare'
diff --git a/spec/rubocop/cop/database/avoid_inheritance_column_spec.rb b/spec/rubocop/cop/database/avoid_inheritance_column_spec.rb
new file mode 100644
index 00000000000..e009cde8551
--- /dev/null
+++ b/spec/rubocop/cop/database/avoid_inheritance_column_spec.rb
@@ -0,0 +1,23 @@
+# frozen_string_literal: true
+
+require 'rubocop_spec_helper'
+require_relative '../../../../rubocop/cop/database/avoid_inheritance_column'
+
+RSpec.describe RuboCop::Cop::Database::AvoidInheritanceColumn, feature_category: :shared do
+ it 'flags when :inheritance_column is used' do
+ src = <<~SRC
+ self.inheritance_column = 'some_column'
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Do not use Single Table Inheritance https://docs.gitlab.com/ee/development/database/single_table_inheritance.html
+ SRC
+
+ expect_offense(src)
+ end
+
+ it 'does not flag when :inheritance_column is set to :_type_disabled' do
+ src = <<~SRC
+ self.inheritance_column = :_type_disabled
+ SRC
+
+ expect_no_offenses(src)
+ end
+end
diff --git a/spec/rubocop/cop/experiments_test_coverage_spec.rb b/spec/rubocop/cop/experiments_test_coverage_spec.rb
new file mode 100644
index 00000000000..eb1e672ef40
--- /dev/null
+++ b/spec/rubocop/cop/experiments_test_coverage_spec.rb
@@ -0,0 +1,169 @@
+# frozen_string_literal: true
+
+require 'rubocop_spec_helper'
+
+require_relative '../../../rubocop/cop/experiments_test_coverage'
+
+RSpec.describe RuboCop::Cop::ExperimentsTestCoverage, feature_category: :experimentation_conversion do
+ let(:class_offense) { described_class::CLASS_OFFENSE }
+ let(:block_offense) { described_class::BLOCK_OFFENSE }
+
+ before do
+ allow(File).to receive(:exist?).and_return(true)
+ allow(File).to receive(:new).and_return(instance_double(File, read: tests_code))
+ end
+
+ describe '#on_class' do
+ context 'when there are no tests' do
+ let(:tests_code) { '' }
+
+ it 'registers an offense' do
+ expect_offense(<<~RUBY)
+ class ExperimentName < ApplicationExperiment
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ #{class_offense}
+ end
+ RUBY
+ end
+ end
+
+ context 'when there is no stub_experiments' do
+ let(:tests_code) { "candidate third" }
+
+ it 'registers an offense' do
+ expect_offense(<<~RUBY)
+ class ExperimentName < ApplicationExperiment
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ #{class_offense}
+ candidate
+ variant(:third) { 'third option' }
+ end
+ RUBY
+ end
+ end
+
+ context 'when variant test is missing' do
+ let(:tests_code) { "\nstub_experiments(experiment_name: :candidate)" }
+
+ it 'registers an offense' do
+ expect_offense(<<~RUBY)
+ class ExperimentName < ApplicationExperiment
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ #{class_offense}
+ candidate
+ variant(:third) { 'third option' }
+ end
+ RUBY
+ end
+ end
+
+ context 'when stub_experiments is commented out' do
+ let(:tests_code) do
+ "\n# stub_experiments(experiment_name: :candidate, experiment_name: :third)"
+ end
+
+ it 'registers an offense' do
+ expect_offense(<<~RUBY)
+ class ExperimentName < ApplicationExperiment
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ #{class_offense}
+ candidate
+ variant(:third) { 'third option' }
+ end
+ RUBY
+ end
+ end
+
+ context 'when all tests are present' do
+ let(:tests_code) do
+ "#\nstub_experiments(experiment_name: :candidate, experiment_name: :third)"
+ end
+
+ before do
+ allow(cop).to receive(:filepath).and_return('app/experiments/experiment_name_experiment.rb')
+ end
+
+ it 'does not register an offense' do
+ expect_no_offenses(<<~RUBY)
+ class ExperimentName < ApplicationExperiment
+ candidate
+ variant(:third) { 'third option' }
+ end
+ RUBY
+ end
+ end
+ end
+
+ describe '#on_block' do
+ context 'when there are no tests' do
+ let(:tests_code) { '' }
+
+ it 'registers an offense' do
+ expect_offense(<<~RUBY)
+ experiment(:experiment_name) do |e|
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ #{block_offense}
+ end
+ RUBY
+ end
+ end
+
+ context 'when there is no stub_experiments' do
+ let(:tests_code) { "candidate third" }
+
+ it 'registers an offense' do
+ expect_offense(<<~RUBY)
+ experiment(:experiment_name) do |e|
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ #{block_offense}
+ e.candidate { 'candidate' }
+ e.variant(:third) { 'third option' }
+ e.run
+ end
+ RUBY
+ end
+ end
+
+ context 'when variant test is missing' do
+ let(:tests_code) { "\nstub_experiments(experiment_name: :candidate)" }
+
+ it 'registers an offense' do
+ expect_offense(<<~RUBY)
+ experiment(:experiment_name) do |e|
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ #{block_offense}
+ e.candidate { 'candidate' }
+ e.variant(:third) { 'third option' }
+ e.run
+ end
+ RUBY
+ end
+ end
+
+ context 'when stub_experiments is commented out' do
+ let(:tests_code) do
+ "\n# stub_experiments(experiment_name: :candidate, experiment_name: :third)"
+ end
+
+ it 'registers an offense' do
+ expect_offense(<<~RUBY)
+ experiment(:experiment_name) do |e|
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ #{block_offense}
+ e.candidate { 'candidate' }
+ e.variant(:third) { 'third option' }
+ e.run
+ end
+ RUBY
+ end
+ end
+
+ context 'when all tests are present' do
+ let(:tests_code) do
+ "#\nstub_experiments(experiment_name: :candidate, experiment_name: :third)"
+ end
+
+ it 'does not register an offense' do
+ expect_no_offenses(<<~RUBY)
+ experiment(:experiment_name) do |e|
+ e.candidate { 'candidate' }
+ e.variant(:third) { 'third option' }
+ e.run
+ end
+ RUBY
+ end
+ end
+ end
+end
diff --git a/spec/rubocop/cop/rspec/before_all_spec.rb b/spec/rubocop/cop/rspec/before_all_spec.rb
new file mode 100644
index 00000000000..5cf22bc4093
--- /dev/null
+++ b/spec/rubocop/cop/rspec/before_all_spec.rb
@@ -0,0 +1,74 @@
+# frozen_string_literal: true
+
+require 'rubocop_spec_helper'
+
+require_relative '../../../../rubocop/cop/rspec/before_all'
+
+RSpec.describe Rubocop::Cop::RSpec::BeforeAll, feature_category: :tooling do
+ context 'when using before(:all)' do
+ let(:source) do
+ <<~SRC
+ before(:all) do
+ ^^^^^^^^^^^^ Prefer using `before_all` over `before(:all)`. [...]
+ create_table_structure
+ end
+ SRC
+ end
+
+ let(:corrected_source) do
+ <<~SRC
+ before_all do
+ create_table_structure
+ end
+ SRC
+ end
+
+ it 'registers an offense and corrects', :aggregate_failures do
+ expect_offense(source)
+
+ expect_correction(corrected_source)
+ end
+ end
+
+ context 'when using before_all' do
+ let(:source) do
+ <<~SRC
+ before_all do
+ create_table_structure
+ end
+ SRC
+ end
+
+ it 'does not register an offense' do
+ expect_no_offenses(source)
+ end
+ end
+
+ context 'when using before(:each)' do
+ let(:source) do
+ <<~SRC
+ before(:each) do
+ create_table_structure
+ end
+ SRC
+ end
+
+ it 'does not register an offense' do
+ expect_no_offenses(source)
+ end
+ end
+
+ context 'when using before' do
+ let(:source) do
+ <<~SRC
+ before do
+ create_table_structure
+ end
+ SRC
+ end
+
+ it 'does not register an offense' do
+ expect_no_offenses(source)
+ end
+ end
+end
diff --git a/spec/scripts/database/migration_collision_checker_spec.rb b/spec/scripts/database/migration_collision_checker_spec.rb
new file mode 100644
index 00000000000..a3afbae50b6
--- /dev/null
+++ b/spec/scripts/database/migration_collision_checker_spec.rb
@@ -0,0 +1,48 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+
+require_relative '../../../scripts/database/migration_collision_checker'
+
+RSpec.describe MigrationCollisionChecker, feature_category: :database do
+ subject(:checker) { described_class.new }
+
+ before do
+ stub_const('MigrationCollisionChecker::MIGRATION_FOLDERS', [db_migration_path, elasticsearch_migration_path])
+ end
+
+ describe "#check" do
+ context "when there's no collision between migrations" do
+ let(:db_migration_path) { 'spec/fixtures/migrations/db/migrate/*.txt' }
+ let(:elasticsearch_migration_path) { 'spec/fixtures/migrations/elasticsearch/*.txt' }
+
+ it { expect(checker.check).to be_nil }
+ end
+
+ context 'when migration class name clashes' do
+ let(:db_migration_path) { 'spec/fixtures/migrations/db/*/*.txt' }
+ let(:elasticsearch_migration_path) { 'spec/fixtures/migrations/elasticsearch/*.txt' }
+
+ it 'returns the error code' do
+ expect(checker.check.error_code).to eq(1)
+ end
+
+ it 'returns the error message' do
+ expect(checker.check.error_message).to include(
+ 'Naming collisions were found between migrations', 'ClashMigration', 'Gitlab::ClashMigrationTwo'
+ )
+ end
+ end
+
+ context 'when migration class name clashes but they are marked to be skipped' do
+ let(:db_migration_path) { 'spec/fixtures/migrations/db/*/*.txt' }
+ let(:elasticsearch_migration_path) { 'spec/fixtures/migrations/elasticsearch/*.txt' }
+
+ before do
+ stub_const('MigrationCollisionChecker::SKIP_MIGRATIONS', %w[ClashMigration Gitlab::ClashMigrationTwo])
+ end
+
+ it { expect(checker.check).to be_nil }
+ end
+ end
+end
diff --git a/spec/scripts/generate_message_to_run_e2e_pipeline_spec.rb b/spec/scripts/generate_message_to_run_e2e_pipeline_spec.rb
index 61307937101..2747e1ec811 100644
--- a/spec/scripts/generate_message_to_run_e2e_pipeline_spec.rb
+++ b/spec/scripts/generate_message_to_run_e2e_pipeline_spec.rb
@@ -222,15 +222,20 @@ RSpec.describe GenerateMessageToRunE2ePipeline, feature_category: :tooling do
let(:expected_content) do
<<~MARKDOWN
<!-- Run e2e warning begin -->
- :warning: @#{author_username} Some end-to-end (E2E) tests have been selected based on the stage label on this MR.
- If not run already, please run the `e2e:package-and-test-ee` job in the `qa` stage
- and review the results **before merging this MR**. (E2E tests are not run automatically on some MRs due to [runner resource constraints](https://gitlab.com/gitlab-org/gitlab-qa/-/issues/261).)
+ @#{author_username} Some end-to-end (E2E) tests should run based on the stage label.
- If you would like to run all e2e tests, please apply the ~"pipeline:run-all-e2e" label and restart the pipeline.
+ Please start the `trigger-omnibus-and-follow-up-e2e` job in the `qa` stage and ensure tests in the `follow-up-e2e:package-and-test-ee` pipeline
+ pass **before this MR is merged**.
+ (E2E tests are computationally intensive and don't run automatically for every push/rebase, so we ask you to run this job manually at least once.)
- Once done, please apply the ✅ emoji on this comment.
+ To run all E2E tests, apply the ~"pipeline:run-all-e2e" label and run a new pipeline.
- For any questions or help in reviewing the E2E test results, please reach out on the internal #quality Slack channel.
+ E2E test jobs are allowed to fail due to [flakiness](https://about.gitlab.com/handbook/engineering/quality/quality-engineering/test-metrics-dashboards/#package-and-test).
+ See current failures at the latest [pipeline triage issue](https://gitlab.com/gitlab-org/quality/pipeline-triage/-/issues).
+
+ Once done, apply the ✅ emoji on this comment.
+
+ For any questions or help, reach out on the internal #quality Slack channel.
<!-- Run e2e warning end -->
MARKDOWN
end
diff --git a/spec/scripts/lib/glfm/update_example_snapshots_spec.rb b/spec/scripts/lib/glfm/update_example_snapshots_spec.rb
index 78ea31c8e39..717a584f931 100644
--- a/spec/scripts/lib/glfm/update_example_snapshots_spec.rb
+++ b/spec/scripts/lib/glfm/update_example_snapshots_spec.rb
@@ -576,7 +576,7 @@ RSpec.describe Glfm::UpdateExampleSnapshots, '#process', feature_category: :team
end
context 'with full processing of static and WYSIWYG HTML' do
- before(:all) do
+ before(:all) do # rubocop: disable RSpec/BeforeAll
# NOTE: It is a necessary to do a `yarn install` in order to ensure that
# `scripts/lib/glfm/render_wysiwyg_html_and_json.js` can be invoked successfully
# on the CI job (which will not be set up for frontend specs since this is
diff --git a/spec/scripts/trigger-build_spec.rb b/spec/scripts/trigger-build_spec.rb
index 3ac383e8d30..d3b520d385f 100644
--- a/spec/scripts/trigger-build_spec.rb
+++ b/spec/scripts/trigger-build_spec.rb
@@ -25,7 +25,7 @@ RSpec.describe Trigger, feature_category: :tooling do
}
end
- let(:com_api_endpoint) { 'https://gitlab.com/api/v4' }
+ let(:com_api_endpoint) { Trigger::Base.new.send(:endpoint) }
let(:com_api_token) { env['PROJECT_TOKEN_FOR_CI_SCRIPTS_API_USAGE'] }
let(:com_gitlab_client) { double('com_gitlab_client') }
@@ -237,8 +237,14 @@ RSpec.describe Trigger, feature_category: :tooling do
describe "TRIGGER_BRANCH" do
context 'when CNG_BRANCH is not set' do
it 'sets TRIGGER_BRANCH to master' do
+ stub_env('CI_PROJECT_NAMESPACE', 'gitlab-org')
expect(subject.variables['TRIGGER_BRANCH']).to eq('master')
end
+
+ it 'sets TRIGGER_BRANCH to main-jh on JH side' do
+ stub_env('CI_PROJECT_NAMESPACE', 'gitlab-cn')
+ expect(subject.variables['TRIGGER_BRANCH']).to eq('main-jh')
+ end
end
context 'when CNG_BRANCH is set' do
@@ -261,6 +267,20 @@ RSpec.describe Trigger, feature_category: :tooling do
end
it 'sets TRIGGER_BRANCH to the corresponding stable branch' do
+ stub_env('CI_PROJECT_NAMESPACE', 'gitlab-org')
+ expect(subject.variables['TRIGGER_BRANCH']).to eq(ref)
+ end
+ end
+
+ context 'when CI_COMMIT_REF_NAME is a stable branch on JH side' do
+ let(:ref) { '14-10-stable' }
+
+ before do
+ stub_env('CI_COMMIT_REF_NAME', "#{ref}-jh")
+ end
+
+ it 'sets TRIGGER_BRANCH to the corresponding stable branch' do
+ stub_env('CI_PROJECT_NAMESPACE', 'gitlab-cn')
expect(subject.variables['TRIGGER_BRANCH']).to eq(ref)
end
end
diff --git a/spec/serializers/admin/abuse_report_details_entity_spec.rb b/spec/serializers/admin/abuse_report_details_entity_spec.rb
index 08bfa57b062..727716d76a4 100644
--- a/spec/serializers/admin/abuse_report_details_entity_spec.rb
+++ b/spec/serializers/admin/abuse_report_details_entity_spec.rb
@@ -134,7 +134,8 @@ RSpec.describe Admin::AbuseReportDetailsEntity, feature_category: :insider_threa
:content,
:url,
:screenshot,
- :update_path
+ :update_path,
+ :moderate_user_path
])
end
end
diff --git a/spec/serializers/admin/abuse_report_entity_spec.rb b/spec/serializers/admin/abuse_report_entity_spec.rb
index 003d76a172f..c7f57258f40 100644
--- a/spec/serializers/admin/abuse_report_entity_spec.rb
+++ b/spec/serializers/admin/abuse_report_entity_spec.rb
@@ -19,6 +19,7 @@ RSpec.describe Admin::AbuseReportEntity, feature_category: :insider_threat do
:category,
:created_at,
:updated_at,
+ :count,
:reported_user,
:reporter,
:report_path
diff --git a/spec/serializers/base_discussion_entity_spec.rb b/spec/serializers/base_discussion_entity_spec.rb
index 334e71d23f4..d94dc702977 100644
--- a/spec/serializers/base_discussion_entity_spec.rb
+++ b/spec/serializers/base_discussion_entity_spec.rb
@@ -30,11 +30,23 @@ RSpec.describe BaseDiscussionEntity do
:id,
:individual_note,
:resolvable,
+ :resolved,
+ :resolved_by_push,
+ :resolved_by,
+ :resolved_at,
:resolve_path,
:resolve_with_issue_path
)
end
+ context 'when note is on an issue' do
+ let_it_be(:note) { create(:discussion_note_on_issue) }
+
+ it 'does not include resolve_with_issue_path' do
+ expect(subject.keys.sort).not_to include(:resolve_with_issue_path)
+ end
+ end
+
context 'when is LegacyDiffDiscussion' do
let(:project) { create(:project) }
let(:merge_request) { create(:merge_request, source_project: project) }
diff --git a/spec/serializers/deployment_entity_spec.rb b/spec/serializers/deployment_entity_spec.rb
index 0746e68d7c5..b0f3f328a4f 100644
--- a/spec/serializers/deployment_entity_spec.rb
+++ b/spec/serializers/deployment_entity_spec.rb
@@ -116,20 +116,28 @@ RSpec.describe DeploymentEntity do
describe 'playable_build' do
before do
- deployment.update!(deployable: build)
+ deployment.update!(deployable: job)
end
context 'when the deployment has a playable deployable' do
- context 'when this build is ready to be played' do
- let(:build) { create(:ci_build, :playable, :scheduled, pipeline: pipeline) }
+ context 'when this job is build and ready to be played' do
+ let(:job) { create(:ci_build, :playable, :scheduled, pipeline: pipeline) }
+
+ it 'exposes only the play_path' do
+ expect(subject[:playable_build].keys).to contain_exactly(:play_path)
+ end
+ end
+
+ context 'when this job is bridge and ready to be played' do
+ let(:job) { create(:ci_bridge, :playable, :manual, pipeline: pipeline, downstream: project) }
it 'exposes only the play_path' do
expect(subject[:playable_build].keys).to contain_exactly(:play_path)
end
end
- context 'when this build has failed' do
- let(:build) { create(:ci_build, :playable, :failed, pipeline: pipeline) }
+ context 'when this job has failed' do
+ let(:job) { create(:ci_build, :playable, :failed, pipeline: pipeline) }
it 'exposes the play_path and the retry_path' do
expect(subject[:playable_build].keys).to contain_exactly(:play_path, :retry_path)
@@ -138,7 +146,7 @@ RSpec.describe DeploymentEntity do
end
context 'when the deployment does not have a playable deployable' do
- let(:build) { create(:ci_build, pipeline: pipeline) }
+ let(:job) { create(:ci_build, pipeline: pipeline) }
it 'is not exposed' do
expect(subject[:playable_build]).to be_nil
diff --git a/spec/serializers/integrations/event_entity_spec.rb b/spec/serializers/integrations/event_entity_spec.rb
index 1b72b5d290c..a15c1bea61a 100644
--- a/spec/serializers/integrations/event_entity_spec.rb
+++ b/spec/serializers/integrations/event_entity_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Integrations::EventEntity do
+RSpec.describe Integrations::EventEntity, feature_category: :integrations do
let(:request) { EntityRequest.new(integration: integration) }
subject { described_class.new(event, request: request, integration: integration).as_json }
@@ -38,5 +38,24 @@ RSpec.describe Integrations::EventEntity do
expect(subject[:field][:placeholder]).to eq('#general, #development')
end
end
+
+ context 'with integration with fields when channels are masked' do
+ let(:integration) { create(:integrations_slack, note_events: false, note_channel: 'note-channel') }
+ let(:event) { 'note' }
+
+ before do
+ allow(integration).to receive(:mask_configurable_channels?).and_return(true)
+ end
+
+ it 'exposes correct attributes' do
+ expect(subject[:description]).to eq('Trigger event for new comments.')
+ expect(subject[:name]).to eq('note_events')
+ expect(subject[:title]).to eq('Note')
+ expect(subject[:value]).to eq(false)
+ expect(subject[:field][:name]).to eq('note_channel')
+ expect(subject[:field][:value]).to eq(Integrations::BaseChatNotification::SECRET_MASK)
+ expect(subject[:field][:placeholder]).to eq('#general, #development')
+ end
+ end
end
end
diff --git a/spec/serializers/profile/event_entity_spec.rb b/spec/serializers/profile/event_entity_spec.rb
index dbd748d3b11..b1246e7e47d 100644
--- a/spec/serializers/profile/event_entity_spec.rb
+++ b/spec/serializers/profile/event_entity_spec.rb
@@ -140,6 +140,17 @@ RSpec.describe Profile::EventEntity, feature_category: :user_profile do
expect(subject[:target][:issue_type]).to eq('incident')
end
end
+
+ context 'when target is an issue' do
+ let(:issue) { build_stubbed(:issue, author: target_user, project: project) }
+ let(:event) do
+ build(:event, :created, author: target_user, project: project, target: issue)
+ end
+
+ it 'exposes `issue_type`' do
+ expect(subject[:target][:issue_type]).to eq('issue')
+ end
+ end
end
context 'with resource parent' do
diff --git a/spec/serializers/project_note_entity_spec.rb b/spec/serializers/project_note_entity_spec.rb
index aaf7764a123..ffa9cf54e10 100644
--- a/spec/serializers/project_note_entity_spec.rb
+++ b/spec/serializers/project_note_entity_spec.rb
@@ -5,11 +5,11 @@ require 'spec_helper'
RSpec.describe ProjectNoteEntity do
include Gitlab::Routing
- let(:request) { double('request', current_user: user, noteable: note.noteable) }
+ let_it_be(:note) { create(:note_on_merge_request) }
+ let_it_be(:user) { create(:user) }
+ let(:request) { double('request', current_user: user, noteable: note.noteable) }
let(:entity) { described_class.new(note, request: request) }
- let(:note) { create(:note) }
- let(:user) { create(:user) }
subject { entity.as_json }
@@ -28,5 +28,13 @@ RSpec.describe ProjectNoteEntity do
it 'exposes paths to resolve note' do
expect(subject).to include(:resolve_path, :resolve_with_issue_path)
end
+
+ context 'when note is on an issue' do
+ let(:note) { create(:note_on_issue) }
+
+ it 'does not include resolve_with_issue_path' do
+ expect(subject).not_to include(:resolve_with_issue_path)
+ end
+ end
end
end
diff --git a/spec/serializers/stage_entity_spec.rb b/spec/serializers/stage_entity_spec.rb
index fe8ee027245..81cb89d0c38 100644
--- a/spec/serializers/stage_entity_spec.rb
+++ b/spec/serializers/stage_entity_spec.rb
@@ -76,8 +76,8 @@ RSpec.describe StageEntity, feature_category: :continuous_integration do
context 'with a skipped stage ' do
let(:stage) { create(:ci_stage, status: 'skipped') }
- it 'does not contain play_all_manual' do
- expect(subject[:status][:action]).not_to be_present
+ it 'contains play_all_manual' do
+ expect(subject[:status][:action]).to be_present
end
end
diff --git a/spec/services/admin/abuse_report_update_service_spec.rb b/spec/services/admin/abuse_reports/moderate_user_service_spec.rb
index 7069d8ee5c1..6e8a59f4e49 100644
--- a/spec/services/admin/abuse_report_update_service_spec.rb
+++ b/spec/services/admin/abuse_reports/moderate_user_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Admin::AbuseReportUpdateService, feature_category: :instance_resiliency do
+RSpec.describe Admin::AbuseReports::ModerateUserService, feature_category: :instance_resiliency do
let_it_be_with_reload(:abuse_report) { create(:abuse_report) }
let(:action) { 'ban_user' }
let(:close) { true }
diff --git a/spec/services/admin/plan_limits/update_service_spec.rb b/spec/services/admin/plan_limits/update_service_spec.rb
index 718367fadc2..e57c234780c 100644
--- a/spec/services/admin/plan_limits/update_service_spec.rb
+++ b/spec/services/admin/plan_limits/update_service_spec.rb
@@ -16,10 +16,10 @@ RSpec.describe Admin::PlanLimits::UpdateService, feature_category: :shared do
ci_registered_group_runners: 107,
ci_registered_project_runners: 108,
conan_max_file_size: 10,
- enforcement_limit: 15,
+ enforcement_limit: 100,
generic_packages_max_file_size: 20,
helm_max_file_size: 25,
- notification_limit: 30,
+ notification_limit: 95,
maven_max_file_size: 40,
npm_max_file_size: 60,
nuget_max_file_size: 60,
@@ -50,10 +50,10 @@ RSpec.describe Admin::PlanLimits::UpdateService, feature_category: :shared do
expect(limits.limits_history).to eq(
{ "enforcement_limit" =>
[{ "user_id" => user.id, "username" => user.username,
- "timestamp" => current_timestamp, "value" => 15 }],
+ "timestamp" => current_timestamp, "value" => 100 }],
"notification_limit" =>
[{ "user_id" => user.id, "username" => user.username,
- "timestamp" => current_timestamp, "value" => 30 }],
+ "timestamp" => current_timestamp, "value" => 95 }],
"storage_size_limit" =>
[{ "user_id" => user.id, "username" => user.username,
"timestamp" => current_timestamp, "value" => 90 }] }
@@ -68,13 +68,122 @@ RSpec.describe Admin::PlanLimits::UpdateService, feature_category: :shared do
end
context 'when the update is unsuccessful' do
- let(:params) { { notification_limit: 'abc' } }
+ context 'when notification_limit is less than storage_size_limit' do
+ let(:params) { { notification_limit: 2 } }
+
+ before do
+ limits.update!(
+ storage_size_limit: 5,
+ enforcement_limit: 10
+ )
+ end
- it 'returns an error' do
- response = update_plan_limits
+ it 'returns an error' do
+ response = update_plan_limits
+
+ expect(response[:status]).to eq :error
+ expect(response[:message]).to eq ["Notification limit must be greater than or equal to " \
+ "storage_size_limit (Dashboard limit): 5 " \
+ "and less than or equal to enforcement_limit: 10"]
+ end
+ end
+
+ context 'when notification_limit is greater than enforcement_limit' do
+ let(:params) { { notification_limit: 11 } }
+
+ before do
+ limits.update!(
+ storage_size_limit: 5,
+ enforcement_limit: 10
+ )
+ end
+
+ it 'returns an error' do
+ response = update_plan_limits
+
+ expect(response[:status]).to eq :error
+ expect(response[:message]).to eq ["Notification limit must be greater than or equal to " \
+ "storage_size_limit (Dashboard limit): 5 " \
+ "and less than or equal to enforcement_limit: 10"]
+ end
+ end
+
+ context 'when enforcement_limit is less than storage_size_limit' do
+ let(:params) { { enforcement_limit: 9 } }
+
+ before do
+ limits.update!(
+ storage_size_limit: 12,
+ notification_limit: 12
+ )
+ end
+
+ it 'returns an error' do
+ response = update_plan_limits
+
+ expect(response[:status]).to eq :error
+ expect(response[:message]).to eq ["Enforcement limit must be greater than " \
+ "or equal to storage_size_limit (Dashboard limit): " \
+ "12 and greater than or equal to notification_limit: 12"]
+ end
+ end
- expect(response[:status]).to eq :error
- expect(response[:message]).to include 'Notification limit is not a number'
+ context 'when enforcement_limit is less than notification_limit' do
+ let(:params) { { enforcement_limit: 9 } }
+
+ before do
+ limits.update!(
+ storage_size_limit: 10,
+ notification_limit: 10
+ )
+ end
+
+ it 'returns an error' do
+ response = update_plan_limits
+
+ expect(response[:status]).to eq :error
+ expect(response[:message]).to eq ["Enforcement limit must be greater than or equal to " \
+ "storage_size_limit (Dashboard limit): " \
+ "10 and greater than or equal to notification_limit: 10"]
+ end
+ end
+
+ context 'when storage_size_limit is greater than notification_limit' do
+ let(:params) { { storage_size_limit: 11 } }
+
+ before do
+ limits.update!(
+ enforcement_limit: 12,
+ notification_limit: 10
+ )
+ end
+
+ it 'returns an error' do
+ response = update_plan_limits
+
+ expect(response[:status]).to eq :error
+ expect(response[:message]).to eq ["Storage size limit (Dashboard limit) must be less than or " \
+ "equal to enforcement_limit: 12 and notification_limit: 10"]
+ end
+ end
+
+ context 'when storage_size_limit is greater than enforcement_limit' do
+ let(:params) { { storage_size_limit: 11 } }
+
+ before do
+ limits.update!(
+ enforcement_limit: 10,
+ notification_limit: 11
+ )
+ end
+
+ it 'returns an error' do
+ response = update_plan_limits
+
+ expect(response[:status]).to eq :error
+ expect(response[:message]).to eq ["Storage size limit (Dashboard limit) must be less than or " \
+ "equal to enforcement_limit: 10 and notification_limit: 11"]
+ end
end
end
end
diff --git a/spec/services/alert_management/process_prometheus_alert_service_spec.rb b/spec/services/alert_management/process_prometheus_alert_service_spec.rb
index eb5f3808021..9fe77bf2b17 100644
--- a/spec/services/alert_management/process_prometheus_alert_service_spec.rb
+++ b/spec/services/alert_management/process_prometheus_alert_service_spec.rb
@@ -66,22 +66,6 @@ RSpec.describe AlertManagement::ProcessPrometheusAlertService, feature_category:
expect(alert.environment).to eq(environment)
end
end
-
- context 'prometheus alert given' do
- let(:prometheus_alert) { create(:prometheus_alert, project: project) }
- let(:alert) { project.alert_management_alerts.last }
-
- before do
- payload['labels']['gitlab_alert_id'] = prometheus_alert.prometheus_metric_id
- end
-
- it 'sets the prometheus alert and environment' do
- execute
-
- expect(alert.prometheus_alert).to eq(prometheus_alert)
- expect(alert.environment).to eq(prometheus_alert.environment)
- end
- end
end
context 'when alert payload is invalid' do
diff --git a/spec/services/application_settings/update_service_spec.rb b/spec/services/application_settings/update_service_spec.rb
index a05219a0a49..9d73a4a6cee 100644
--- a/spec/services/application_settings/update_service_spec.rb
+++ b/spec/services/application_settings/update_service_spec.rb
@@ -147,11 +147,13 @@ RSpec.describe ApplicationSettings::UpdateService do
describe 'performance bar settings', feature_category: :application_performance do
using RSpec::Parameterized::TableSyntax
- where(:params_performance_bar_enabled,
- :params_performance_bar_allowed_group_path,
- :previous_performance_bar_allowed_group_id,
- :expected_performance_bar_allowed_group_id,
- :expected_valid) do
+ where(
+ :params_performance_bar_enabled,
+ :params_performance_bar_allowed_group_path,
+ :previous_performance_bar_allowed_group_id,
+ :expected_performance_bar_allowed_group_id,
+ :expected_valid
+ ) do
true | '' | nil | nil | true
true | '' | 42_000_000 | nil | true
true | nil | nil | nil | true
diff --git a/spec/services/authorized_project_update/find_records_due_for_refresh_service_spec.rb b/spec/services/authorized_project_update/find_records_due_for_refresh_service_spec.rb
index ca766590ada..56bc67fe42c 100644
--- a/spec/services/authorized_project_update/find_records_due_for_refresh_service_spec.rb
+++ b/spec/services/authorized_project_update/find_records_due_for_refresh_service_spec.rb
@@ -17,8 +17,7 @@ RSpec.describe AuthorizedProjectUpdate::FindRecordsDueForRefreshService, feature
context 'incorrect_auth_found_callback callback' do
let(:user) { create(:user) }
let(:service) do
- described_class.new(user,
- incorrect_auth_found_callback: callback)
+ described_class.new(user, incorrect_auth_found_callback: callback)
end
it 'is called' do
@@ -33,8 +32,7 @@ RSpec.describe AuthorizedProjectUpdate::FindRecordsDueForRefreshService, feature
context 'missing_auth_found_callback callback' do
let(:service) do
- described_class.new(user,
- missing_auth_found_callback: callback)
+ described_class.new(user, missing_auth_found_callback: callback)
end
it 'is called' do
diff --git a/spec/services/auto_merge/base_service_spec.rb b/spec/services/auto_merge/base_service_spec.rb
index 7afe5d406ba..d14470df9ee 100644
--- a/spec/services/auto_merge/base_service_spec.rb
+++ b/spec/services/auto_merge/base_service_spec.rb
@@ -97,8 +97,8 @@ RSpec.describe AutoMerge::BaseService, feature_category: :code_review_workflow d
it 'tracks the exception' do
expect(Gitlab::ErrorTracking)
- .to receive(:track_exception).with(kind_of(ActiveRecord::RecordInvalid),
- merge_request_id: merge_request.id)
+ .to receive(:track_exception)
+ .with(kind_of(ActiveRecord::RecordInvalid), merge_request_id: merge_request.id)
subject
end
@@ -122,8 +122,8 @@ RSpec.describe AutoMerge::BaseService, feature_category: :code_review_workflow d
it 'tracks the exception' do
expect(Gitlab::ErrorTracking)
- .to receive(:track_exception).with(kind_of(RuntimeError),
- merge_request_id: merge_request.id)
+ .to receive(:track_exception)
+ .with(kind_of(RuntimeError), merge_request_id: merge_request.id)
execute_with_error_in_yield
end
@@ -242,8 +242,8 @@ RSpec.describe AutoMerge::BaseService, feature_category: :code_review_workflow d
it 'tracks the exception' do
expect(Gitlab::ErrorTracking)
- .to receive(:track_exception).with(kind_of(RuntimeError),
- merge_request_id: merge_request.id)
+ .to receive(:track_exception)
+ .with(kind_of(RuntimeError), merge_request_id: merge_request.id)
cancel_with_error_in_yield
end
@@ -289,8 +289,8 @@ RSpec.describe AutoMerge::BaseService, feature_category: :code_review_workflow d
it 'tracks the exception' do
expect(Gitlab::ErrorTracking)
- .to receive(:track_exception).with(kind_of(RuntimeError),
- merge_request_id: merge_request.id)
+ .to receive(:track_exception)
+ .with(kind_of(RuntimeError), merge_request_id: merge_request.id)
abort_with_error_in_yield
end
diff --git a/spec/services/auto_merge/merge_when_pipeline_succeeds_service_spec.rb b/spec/services/auto_merge/merge_when_pipeline_succeeds_service_spec.rb
index 79c931990bb..0644efd11bf 100644
--- a/spec/services/auto_merge/merge_when_pipeline_succeeds_service_spec.rb
+++ b/spec/services/auto_merge/merge_when_pipeline_succeeds_service_spec.rb
@@ -11,9 +11,13 @@ RSpec.describe AutoMerge::MergeWhenPipelineSucceedsService, feature_category: :c
let(:pipeline_status) { :running }
before do
- create(:ci_pipeline, pipeline_status, ref: mr_merge_if_green_enabled.source_branch,
- sha: mr_merge_if_green_enabled.diff_head_sha,
- project: mr_merge_if_green_enabled.source_project)
+ create(
+ :ci_pipeline,
+ pipeline_status,
+ ref: mr_merge_if_green_enabled.source_branch,
+ sha: mr_merge_if_green_enabled.diff_head_sha,
+ project: mr_merge_if_green_enabled.source_project
+ )
mr_merge_if_green_enabled.update_head_pipeline
end
@@ -73,13 +77,18 @@ RSpec.describe AutoMerge::MergeWhenPipelineSucceedsService, feature_category: :c
end
let!(:build) do
- create(:ci_build, :created, pipeline: pipeline, ref: ref,
- name: 'build', ci_stage: build_stage)
+ create(
+ :ci_build,
+ :created,
+ pipeline: pipeline,
+ ref: ref,
+ name: 'build',
+ ci_stage: build_stage
+ )
end
let!(:test) do
- create(:ci_build, :created, pipeline: pipeline, ref: ref,
- name: 'test')
+ create(:ci_build, :created, pipeline: pipeline, ref: ref, name: 'test')
end
before do
diff --git a/spec/services/auto_merge_service_spec.rb b/spec/services/auto_merge_service_spec.rb
index 64473884b13..0ed8db5296a 100644
--- a/spec/services/auto_merge_service_spec.rb
+++ b/spec/services/auto_merge_service_spec.rb
@@ -19,9 +19,10 @@ RSpec.describe AutoMergeService, feature_category: :code_review_workflow do
if Gitlab.ee?
is_expected.to contain_exactly(
AutoMergeService::STRATEGY_MERGE_TRAIN,
- AutoMergeService::STRATEGY_ADD_TO_MERGE_TRAIN_WHEN_PIPELINE_SUCCEEDS,
- AutoMergeService::STRATEGY_MERGE_WHEN_CHECKS_PASS,
- AutoMergeService::STRATEGY_MERGE_WHEN_PIPELINE_SUCCEEDS)
+ AutoMergeService::STRATEGY_ADD_TO_MERGE_TRAIN_WHEN_PIPELINE_SUCCEEDS,
+ AutoMergeService::STRATEGY_MERGE_WHEN_CHECKS_PASS,
+ AutoMergeService::STRATEGY_MERGE_WHEN_PIPELINE_SUCCEEDS
+ )
else
is_expected.to eq([AutoMergeService::STRATEGY_MERGE_WHEN_PIPELINE_SUCCEEDS])
end
@@ -38,9 +39,13 @@ RSpec.describe AutoMergeService, feature_category: :code_review_workflow do
let(:pipeline_status) { :running }
before do
- create(:ci_pipeline, pipeline_status, ref: merge_request.source_branch,
- sha: merge_request.diff_head_sha,
- project: merge_request.source_project)
+ create(
+ :ci_pipeline,
+ pipeline_status,
+ ref: merge_request.source_branch,
+ sha: merge_request.diff_head_sha,
+ project: merge_request.source_project
+ )
merge_request.update_head_pipeline
end
@@ -68,11 +73,17 @@ RSpec.describe AutoMergeService, feature_category: :code_review_workflow do
let(:pipeline_status) { :running }
before do
- create(:ci_pipeline, pipeline_status, ref: merge_request.source_branch,
- sha: merge_request.diff_head_sha,
- project: merge_request.source_project)
+ create(
+ :ci_pipeline,
+ pipeline_status,
+ ref: merge_request.source_branch,
+ sha: merge_request.diff_head_sha,
+ project: merge_request.source_project
+ )
merge_request.update_head_pipeline
+
+ stub_licensed_features(merge_request_approvers: true) if Gitlab.ee?
end
it 'returns preferred strategy', if: Gitlab.ee? do
@@ -121,9 +132,13 @@ RSpec.describe AutoMergeService, feature_category: :code_review_workflow do
let(:strategy) { AutoMergeService::STRATEGY_MERGE_WHEN_PIPELINE_SUCCEEDS }
before do
- create(:ci_pipeline, pipeline_status, ref: merge_request.source_branch,
- sha: merge_request.diff_head_sha,
- project: merge_request.source_project)
+ create(
+ :ci_pipeline,
+ pipeline_status,
+ ref: merge_request.source_branch,
+ sha: merge_request.diff_head_sha,
+ project: merge_request.source_project
+ )
merge_request.update_head_pipeline
end
@@ -147,6 +162,10 @@ RSpec.describe AutoMergeService, feature_category: :code_review_workflow do
context 'when strategy is not specified' do
let(:strategy) {}
+ before do
+ stub_licensed_features(merge_request_approvers: true) if Gitlab.ee?
+ end
+
it 'chooses the most preferred strategy', if: Gitlab.ee? do
is_expected.to eq(:merge_when_checks_pass)
end
diff --git a/spec/services/award_emojis/destroy_service_spec.rb b/spec/services/award_emojis/destroy_service_spec.rb
index fbadee87f45..51b8914c8f0 100644
--- a/spec/services/award_emojis/destroy_service_spec.rb
+++ b/spec/services/award_emojis/destroy_service_spec.rb
@@ -44,7 +44,7 @@ RSpec.describe AwardEmojis::DestroyService, feature_category: :team_planning do
context 'when user is not authorized' do
it_behaves_like 'a service that does not authorize the user',
- error: 'User cannot destroy emoji on the awardable'
+ error: 'User cannot destroy emoji on the awardable'
end
context 'when the user is authorized' do
@@ -56,7 +56,7 @@ RSpec.describe AwardEmojis::DestroyService, feature_category: :team_planning do
let!(:award_from_user) { create(:award_emoji, name: name, user: user) }
it_behaves_like 'a service that does not authorize the user',
- error: 'User has not awarded emoji of type thumbsup on the awardable'
+ error: 'User has not awarded emoji of type thumbsup on the awardable'
end
context 'when user has awarded an emoji to the awardable' do
diff --git a/spec/services/batched_git_ref_updates/cleanup_scheduler_service_spec.rb b/spec/services/batched_git_ref_updates/cleanup_scheduler_service_spec.rb
new file mode 100644
index 00000000000..50081a5e9e7
--- /dev/null
+++ b/spec/services/batched_git_ref_updates/cleanup_scheduler_service_spec.rb
@@ -0,0 +1,55 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BatchedGitRefUpdates::CleanupSchedulerService, feature_category: :gitaly do
+ let(:service) { described_class.new }
+
+ describe '#execute' do
+ before do
+ BatchedGitRefUpdates::Deletion.create!(project_id: 123, ref: 'ref1')
+ BatchedGitRefUpdates::Deletion.create!(project_id: 123, ref: 'ref2')
+ BatchedGitRefUpdates::Deletion.create!(project_id: 456, ref: 'ref3')
+ BatchedGitRefUpdates::Deletion.create!(project_id: 789, ref: 'ref4', status: :processed)
+ end
+
+ it 'schedules ProjectCleanupWorker for each project in pending BatchedGitRefUpdates::Deletion' do
+ project_ids = []
+ expect(BatchedGitRefUpdates::ProjectCleanupWorker)
+ .to receive(:bulk_perform_async_with_contexts) do |deletions, arguments_proc:, context_proc:| # rubocop:disable Lint/UnusedBlockArgument
+ project_ids += deletions.map(&arguments_proc)
+ end
+
+ service.execute
+
+ expect(project_ids).to contain_exactly(123, 456)
+ end
+
+ it 'returns stats' do
+ stats = service.execute
+
+ expect(stats).to eq({
+ total_projects: 2
+ })
+ end
+
+ it 'acquires a lock to avoid running duplicate instances' do
+ expect(service).to receive(:in_lock) # Mock and don't yield
+ .with(described_class.name, retries: 0, ttl: described_class::LOCK_TIMEOUT)
+ expect(BatchedGitRefUpdates::ProjectCleanupWorker).not_to receive(:bulk_perform_async_with_contexts)
+
+ service.execute
+ end
+
+ it 'limits to MAX_PROJECTS before it stops' do
+ stub_const("#{described_class}::BATCH_SIZE", 1)
+ stub_const("#{described_class}::MAX_PROJECTS", 1)
+
+ stats = service.execute
+
+ expect(stats).to eq({
+ total_projects: 1
+ })
+ end
+ end
+end
diff --git a/spec/services/batched_git_ref_updates/project_cleanup_service_spec.rb b/spec/services/batched_git_ref_updates/project_cleanup_service_spec.rb
new file mode 100644
index 00000000000..dcdfdfade3c
--- /dev/null
+++ b/spec/services/batched_git_ref_updates/project_cleanup_service_spec.rb
@@ -0,0 +1,98 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BatchedGitRefUpdates::ProjectCleanupService, feature_category: :gitaly do
+ let(:service) { described_class.new(project1.id) }
+ let_it_be(:project1) { create(:project, :repository) }
+ let_it_be(:project2) { create(:project, :repository) }
+ let!(:project1_ref1) do
+ BatchedGitRefUpdates::Deletion.create!(project_id: project1.id, ref: 'refs/test/project1-ref1')
+ end
+
+ let!(:project1_ref2) do
+ BatchedGitRefUpdates::Deletion.create!(project_id: project1.id, ref: 'refs/test/project1-ref2')
+ end
+
+ let!(:project1_ref3) do
+ BatchedGitRefUpdates::Deletion.create!(project_id: project1.id, ref: 'refs/test/already-processed',
+ status: :processed)
+ end
+
+ let!(:project2_ref1) do
+ BatchedGitRefUpdates::Deletion.create!(project_id: project2.id, ref: 'refs/test/project2-ref1')
+ end
+
+ describe '#execute' do
+ before do
+ project1.repository.create_ref('HEAD', 'refs/test/ref-to-not-be-deleted')
+ project1.repository.create_ref('HEAD', project1_ref1.ref)
+ project1.repository.create_ref('HEAD', project1_ref2.ref)
+ project1.repository.create_ref('HEAD', 'refs/test/already-processed')
+ project2.repository.create_ref('HEAD', project2_ref1.ref)
+ end
+
+ it 'deletes the named refs in batches for the given project only' do
+ expect(test_refs(project1)).to include(
+ 'refs/test/ref-to-not-be-deleted',
+ 'refs/test/already-processed',
+ 'refs/test/project1-ref1',
+ 'refs/test/project1-ref1',
+ 'refs/test/project1-ref2')
+
+ service.execute
+
+ expect(test_refs(project1)).to include(
+ 'refs/test/already-processed',
+ 'refs/test/ref-to-not-be-deleted')
+
+ expect(test_refs(project1)).not_to include(
+ 'refs/test/project1-ref1',
+ 'refs/test/project1-ref2')
+
+ expect(test_refs(project2)).to include('refs/test/project2-ref1')
+ end
+
+ it 'marks the processed BatchedGitRefUpdates::Deletion as processed' do
+ service.execute
+
+ expect(BatchedGitRefUpdates::Deletion.status_pending.map(&:ref)).to contain_exactly('refs/test/project2-ref1')
+ expect(BatchedGitRefUpdates::Deletion.status_processed.map(&:ref)).to contain_exactly(
+ 'refs/test/project1-ref1',
+ 'refs/test/project1-ref2',
+ 'refs/test/already-processed')
+ end
+
+ it 'returns stats' do
+ result = service.execute
+
+ expect(result[:total_deletes]).to eq(2)
+ end
+
+ it 'acquires a lock for the given project_id to avoid running duplicate instances' do
+ expect(service).to receive(:in_lock) # Mock and don't yield
+ .with("#{described_class}/#{project1.id}", retries: 0, ttl: described_class::LOCK_TIMEOUT)
+
+ expect { service.execute }.not_to change { BatchedGitRefUpdates::Deletion.status_pending.count }
+ end
+
+ it 'does nothing when the project does not exist' do
+ result = described_class.new(non_existing_record_id).execute
+
+ expect(result[:total_deletes]).to eq(0)
+ end
+
+ it 'stops after it reaches limit of deleted refs' do
+ stub_const("#{described_class}::QUERY_BATCH_SIZE", 1)
+ stub_const("#{described_class}::MAX_DELETES", 1)
+
+ result = service.execute
+
+ expect(result[:total_deletes]).to eq(1)
+ end
+
+ def test_refs(project)
+ project.repository.list_refs(['refs/test/']).map(&:name)
+ end
+ end
+end
diff --git a/spec/services/bulk_imports/create_service_spec.rb b/spec/services/bulk_imports/create_service_spec.rb
index ff4afd6abd0..93feab97f44 100644
--- a/spec/services/bulk_imports/create_service_spec.rb
+++ b/spec/services/bulk_imports/create_service_spec.rb
@@ -219,8 +219,10 @@ RSpec.describe BulkImports::CreateService, feature_category: :importers do
context 'when gitlab version is lower than 15.5' do
let(:source_version) do
- Gitlab::VersionInfo.new(::BulkImport::MIN_MAJOR_VERSION,
- ::BulkImport::MIN_MINOR_VERSION_FOR_PROJECT)
+ Gitlab::VersionInfo.new(
+ ::BulkImport::MIN_MAJOR_VERSION,
+ ::BulkImport::MIN_MINOR_VERSION_FOR_PROJECT
+ )
end
before do
diff --git a/spec/services/bulk_imports/get_importable_data_service_spec.rb b/spec/services/bulk_imports/get_importable_data_service_spec.rb
index 570f5199f01..3c2c08e9214 100644
--- a/spec/services/bulk_imports/get_importable_data_service_spec.rb
+++ b/spec/services/bulk_imports/get_importable_data_service_spec.rb
@@ -35,8 +35,10 @@ RSpec.describe BulkImports::GetImportableDataService, feature_category: :importe
end
let(:source_version) do
- Gitlab::VersionInfo.new(::BulkImport::MIN_MAJOR_VERSION,
- ::BulkImport::MIN_MINOR_VERSION_FOR_PROJECT)
+ Gitlab::VersionInfo.new(
+ ::BulkImport::MIN_MAJOR_VERSION,
+ ::BulkImport::MIN_MINOR_VERSION_FOR_PROJECT
+ )
end
before do
diff --git a/spec/services/bulk_imports/relation_batch_export_service_spec.rb b/spec/services/bulk_imports/relation_batch_export_service_spec.rb
index c3abd02aff8..09f55f14a96 100644
--- a/spec/services/bulk_imports/relation_batch_export_service_spec.rb
+++ b/spec/services/bulk_imports/relation_batch_export_service_spec.rb
@@ -12,7 +12,7 @@ RSpec.describe BulkImports::RelationBatchExportService, feature_category: :impor
subject(:service) { described_class.new(user.id, batch.id) }
- before(:all) do
+ before_all do
Gitlab::Cache::Import::Caching.set_add(cache_key, label.id)
end
diff --git a/spec/services/ci/job_artifacts/create_service_spec.rb b/spec/services/ci/job_artifacts/create_service_spec.rb
index 7e471bf39a1..a23ba250daf 100644
--- a/spec/services/ci/job_artifacts/create_service_spec.rb
+++ b/spec/services/ci/job_artifacts/create_service_spec.rb
@@ -321,6 +321,45 @@ RSpec.describe Ci::JobArtifacts::CreateService, :clean_gitlab_redis_shared_state
end
end
+ shared_examples_for 'handling annotations' do |storage_type|
+ context 'when artifact type is annotations' do
+ let(:params) do
+ {
+ 'artifact_type' => 'annotations',
+ 'artifact_format' => 'gzip'
+ }.with_indifferent_access
+ end
+
+ if storage_type == :object_storage
+ let(:object_body) { File.read('spec/fixtures/gl-annotations.json.gz') }
+ let(:upload_filename) { 'gl-annotations.json.gz' }
+
+ before do
+ stub_request(:get, %r{s3.amazonaws.com/#{remote_path}})
+ .to_return(status: 200, body: File.read('spec/fixtures/gl-annotations.json.gz'))
+ end
+ else
+ let(:artifacts_file) do
+ file_to_upload('spec/fixtures/gl-annotations.json.gz', sha256: artifacts_sha256)
+ end
+ end
+
+ it 'calls parse service' do
+ expect_any_instance_of(Ci::ParseAnnotationsArtifactService) do |service|
+ expect(service).to receive(:execute).once.and_call_original
+ end
+
+ expect(execute[:status]).to eq(:success)
+ expect(job.job_annotations.as_json).to contain_exactly(
+ hash_including('name' => 'external_links', 'data' => [
+ hash_including('external_link' => hash_including('label' => 'URL 1', 'url' => 'https://url1.example.com/')),
+ hash_including('external_link' => hash_including('label' => 'URL 2', 'url' => 'https://url2.example.com/'))
+ ])
+ )
+ end
+ end
+ end
+
shared_examples_for 'handling object storage errors' do
shared_examples 'rescues object storage error' do |klass, message, expected_message|
it "handles #{klass}" do
@@ -495,6 +534,7 @@ RSpec.describe Ci::JobArtifacts::CreateService, :clean_gitlab_redis_shared_state
it_behaves_like 'handling uploads'
it_behaves_like 'handling dotenv', :object_storage
+ it_behaves_like 'handling annotations', :object_storage
it_behaves_like 'handling object storage errors'
it_behaves_like 'validating requirements'
end
@@ -506,6 +546,7 @@ RSpec.describe Ci::JobArtifacts::CreateService, :clean_gitlab_redis_shared_state
it_behaves_like 'handling uploads'
it_behaves_like 'handling dotenv', :local_storage
+ it_behaves_like 'handling annotations', :local_storage
it_behaves_like 'validating requirements'
end
end
diff --git a/spec/services/ci/parse_annotations_artifact_service_spec.rb b/spec/services/ci/parse_annotations_artifact_service_spec.rb
new file mode 100644
index 00000000000..4847447230b
--- /dev/null
+++ b/spec/services/ci/parse_annotations_artifact_service_spec.rb
@@ -0,0 +1,182 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::ParseAnnotationsArtifactService, feature_category: :build_artifacts do
+ let_it_be(:project) { create(:project) }
+
+ let_it_be_with_reload(:build) { create(:ci_build, project: project) }
+ let(:service) { described_class.new(project, nil) }
+
+ describe '#execute' do
+ subject { service.execute(artifact) }
+
+ context 'when build has an annotations artifact' do
+ let_it_be(:artifact) { create(:ci_job_artifact, :annotations, job: build) }
+
+ context 'when artifact does not have the specified blob' do
+ before do
+ allow(artifact).to receive(:each_blob)
+ end
+
+ it 'parses nothing' do
+ expect(subject[:status]).to eq(:success)
+
+ expect(build.job_annotations).to be_empty
+ end
+ end
+
+ context 'when artifact has the specified blob' do
+ let(:blob) { data.to_json }
+
+ before do
+ allow(artifact).to receive(:each_blob).and_yield(blob)
+ end
+
+ context 'when valid annotations are given' do
+ let(:data) do
+ {
+ external_links: [
+ {
+ external_link: {
+ label: 'URL 1',
+ url: 'https://url1.example.com/'
+ }
+ },
+ {
+ external_link: {
+ label: 'URL 2',
+ url: 'https://url2.example.com/'
+ }
+ }
+ ]
+ }
+ end
+
+ it 'parses the artifact' do
+ subject
+
+ expect(build.job_annotations.as_json).to contain_exactly(
+ hash_including('name' => 'external_links', 'data' => [
+ hash_including('external_link' => hash_including('label' => 'URL 1', 'url' => 'https://url1.example.com/')),
+ hash_including('external_link' => hash_including('label' => 'URL 2', 'url' => 'https://url2.example.com/'))
+ ])
+ )
+ end
+ end
+
+ context 'when valid annotations are given and annotation list name is the same' do
+ before do
+ build.job_annotations.create!(name: 'external_links', data: [
+ {
+ external_link: {
+ label: 'URL 1',
+ url: 'https://url1.example.com/'
+ }
+ }
+ ])
+ end
+
+ let(:data) do
+ {
+ external_links: [
+ {
+ external_link: {
+ label: 'URL 2',
+ url: 'https://url2.example.com/'
+ }
+ }
+ ]
+ }
+ end
+
+ it 'parses the artifact' do
+ subject
+
+ expect(build.job_annotations.as_json).to contain_exactly(
+ hash_including('name' => 'external_links', 'data' => [
+ hash_including('external_link' => hash_including('label' => 'URL 2', 'url' => 'https://url2.example.com/'))
+ ])
+ )
+ end
+ end
+
+ context 'when invalid JSON is given' do
+ let(:blob) { 'Invalid JSON!' }
+
+ it 'returns error' do
+ expect(subject[:status]).to eq(:error)
+ expect(subject[:http_status]).to eq(:bad_request)
+ end
+ end
+
+ context 'when root is not an object' do
+ let(:data) { [] }
+
+ it 'returns error' do
+ expect(subject[:status]).to eq(:error)
+ expect(subject[:message]).to eq('Annotations files must be a JSON object')
+ expect(subject[:http_status]).to eq(:bad_request)
+ end
+ end
+
+ context 'when item is not a valid annotation list' do
+ let(:data) { { external_links: {} } }
+
+ it 'returns error' do
+ expect(subject[:status]).to eq(:error)
+ expect(subject[:message]).to eq('Validation failed: Data must be a valid json schema')
+ expect(subject[:http_status]).to eq(:bad_request)
+ end
+ end
+
+ context 'when more than limitated annotations are specified in annotations' do
+ let(:data) do
+ {
+ external_links_1: [
+ {
+ external_link: {
+ label: 'URL',
+ url: 'https://example.com/'
+ }
+ }
+ ],
+ external_links_2: [
+ {
+ external_link: {
+ label: 'URL',
+ url: 'https://example.com/'
+ }
+ }
+ ]
+ }
+ end
+
+ before do
+ allow(service).to receive(:annotations_num_limit).and_return(1)
+ end
+
+ it 'returns error' do
+ expect(subject[:status]).to eq(:error)
+ expect(subject[:message]).to eq(
+ "Annotations files cannot have more than #{service.send(:annotations_num_limit)} annotation lists")
+ expect(subject[:http_status]).to eq(:bad_request)
+ end
+ end
+ end
+
+ context 'when artifact size is too big' do
+ before do
+ allow(artifact.file).to receive(:size) { service.send(:annotations_size_limit) + 1.kilobyte }
+ end
+
+ it 'returns error' do
+ expect(subject[:status]).to eq(:error)
+ expect(subject[:message]).to eq(
+ "Annotations Artifact Too Big. Maximum Allowable Size: #{service.send(:annotations_size_limit)}")
+ expect(subject[:http_status]).to eq(:bad_request)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/services/ci/pipeline_creation/cancel_redundant_pipelines_service_spec.rb b/spec/services/ci/pipeline_creation/cancel_redundant_pipelines_service_spec.rb
index 905ccf164ca..82a8e425cd0 100644
--- a/spec/services/ci/pipeline_creation/cancel_redundant_pipelines_service_spec.rb
+++ b/spec/services/ci/pipeline_creation/cancel_redundant_pipelines_service_spec.rb
@@ -20,6 +20,38 @@ RSpec.describe Ci::PipelineCreation::CancelRedundantPipelinesService, feature_ca
create(:ci_build, :interruptible, pipeline: pipeline)
end
+ shared_examples 'time limits pipeline cancellation' do
+ context 'with old pipelines' do
+ let(:old_pipeline) { create(:ci_pipeline, project: project, created_at: 5.days.ago) }
+
+ before do
+ create(:ci_build, :interruptible, :pending, pipeline: old_pipeline)
+ end
+
+ it 'ignores old pipelines' do
+ execute
+
+ expect(build_statuses(prev_pipeline)).to contain_exactly('canceled', 'success', 'canceled')
+ expect(build_statuses(pipeline)).to contain_exactly('pending')
+ expect(build_statuses(old_pipeline)).to contain_exactly('pending')
+ end
+
+ context 'with lower_interval_for_canceling_redundant_pipelines disabled' do
+ before do
+ stub_feature_flags(lower_interval_for_canceling_redundant_pipelines: false)
+ end
+
+ it 'cancels pipelines created more than 3 days ago' do
+ execute
+
+ expect(build_statuses(prev_pipeline)).to contain_exactly('canceled', 'success', 'canceled')
+ expect(build_statuses(pipeline)).to contain_exactly('pending')
+ expect(build_statuses(old_pipeline)).to contain_exactly('canceled')
+ end
+ end
+ end
+ end
+
describe '#execute!' do
subject(:execute) { service.execute }
@@ -218,6 +250,8 @@ RSpec.describe Ci::PipelineCreation::CancelRedundantPipelinesService, feature_ca
expect(build_statuses(pipeline.reload)).to contain_exactly('pending')
end
+
+ it_behaves_like 'time limits pipeline cancellation'
end
context 'when auto-cancel is disabled' do
@@ -452,6 +486,8 @@ RSpec.describe Ci::PipelineCreation::CancelRedundantPipelinesService, feature_ca
expect(build_statuses(pipeline.reload)).to contain_exactly('pending')
end
+
+ it_behaves_like 'time limits pipeline cancellation'
end
context 'when auto-cancel is disabled' do
diff --git a/spec/services/ci/pipeline_processing/atomic_processing_service_spec.rb b/spec/services/ci/pipeline_processing/atomic_processing_service_spec.rb
index 15f2cc0990c..93dc9481bf0 100644
--- a/spec/services/ci/pipeline_processing/atomic_processing_service_spec.rb
+++ b/spec/services/ci/pipeline_processing/atomic_processing_service_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Ci::PipelineProcessing::AtomicProcessingService, feature_category: :continuous_integration do
include RepoHelpers
+ include ExclusiveLeaseHelpers
describe 'Pipeline Processing Service Tests With Yaml' do
let_it_be(:project) { create(:project, :repository) }
@@ -1233,6 +1234,19 @@ RSpec.describe Ci::PipelineProcessing::AtomicProcessingService, feature_category
end
end
+ context 'when the exclusive lease is taken' do
+ let(:lease_key) { "ci/pipeline_processing/atomic_processing_service::pipeline_id:#{pipeline.id}" }
+
+ it 'skips pipeline processing' do
+ create_build('linux', stage_idx: 0)
+
+ stub_exclusive_lease_taken(lease_key)
+
+ expect(Gitlab::AppJsonLogger).to receive(:info).with(a_hash_including(message: /^Cannot obtain an exclusive lease/))
+ expect(process_pipeline).to be_falsy
+ end
+ end
+
private
def all_builds
diff --git a/spec/services/ci/pipeline_schedules/create_service_spec.rb b/spec/services/ci/pipeline_schedules/create_service_spec.rb
index a01c71432c3..3fc093c13da 100644
--- a/spec/services/ci/pipeline_schedules/create_service_spec.rb
+++ b/spec/services/ci/pipeline_schedules/create_service_spec.rb
@@ -3,9 +3,11 @@
require 'spec_helper'
RSpec.describe Ci::PipelineSchedules::CreateService, feature_category: :continuous_integration do
- let_it_be(:user) { create(:user) }
let_it_be(:reporter) { create(:user) }
- let_it_be(:project) { create(:project, :public, :repository) }
+ let_it_be_with_reload(:user) { create(:user) }
+ let_it_be_with_reload(:project) { create(:project, :public, :repository) }
+
+ subject(:service) { described_class.new(project, user, params) }
before_all do
project.add_maintainer(user)
@@ -82,5 +84,7 @@ RSpec.describe Ci::PipelineSchedules::CreateService, feature_category: :continuo
end
end
end
+
+ it_behaves_like 'pipeline schedules checking variables permission'
end
end
diff --git a/spec/services/ci/pipeline_schedules/update_service_spec.rb b/spec/services/ci/pipeline_schedules/update_service_spec.rb
index 5c1354bd5aa..834bbcfcfeb 100644
--- a/spec/services/ci/pipeline_schedules/update_service_spec.rb
+++ b/spec/services/ci/pipeline_schedules/update_service_spec.rb
@@ -3,18 +3,18 @@
require 'spec_helper'
RSpec.describe Ci::PipelineSchedules::UpdateService, feature_category: :continuous_integration do
- let_it_be(:user) { create(:user) }
+ let_it_be_with_reload(:user) { create(:user) }
+ let_it_be_with_reload(:project) { create(:project, :public, :repository) }
+ let_it_be_with_reload(:pipeline_schedule) { create(:ci_pipeline_schedule, project: project, owner: user) }
let_it_be(:reporter) { create(:user) }
- let_it_be(:project) { create(:project, :public, :repository) }
- let_it_be(:pipeline_schedule) do
- create(:ci_pipeline_schedule, project: project, owner: user, ref: 'master')
- end
let_it_be(:pipeline_schedule_variable) do
create(:ci_pipeline_schedule_variable,
key: 'foo', value: 'foovalue', pipeline_schedule: pipeline_schedule)
end
+ subject(:service) { described_class.new(pipeline_schedule, user, params) }
+
before_all do
project.add_maintainer(user)
project.add_reporter(reporter)
@@ -125,5 +125,7 @@ RSpec.describe Ci::PipelineSchedules::UpdateService, feature_category: :continuo
end
end
end
+
+ it_behaves_like 'pipeline schedules checking variables permission'
end
end
diff --git a/spec/services/ci/retry_job_service_spec.rb b/spec/services/ci/retry_job_service_spec.rb
index f15f4a16d4f..caed9815fb3 100644
--- a/spec/services/ci/retry_job_service_spec.rb
+++ b/spec/services/ci/retry_job_service_spec.rb
@@ -208,6 +208,45 @@ RSpec.describe Ci::RetryJobService, feature_category: :continuous_integration do
end
end
+ shared_examples_for 'creates associations for a deployable job' do |factory_type|
+ context 'when a job with a deployment is retried' do
+ let!(:job) do
+ create(factory_type, :with_deployment, :deploy_to_production, pipeline: pipeline, ci_stage: stage)
+ end
+
+ it 'creates a new deployment' do
+ expect { new_job }.to change { Deployment.count }.by(1)
+ end
+
+ it 'does not create a new environment' do
+ expect { new_job }.not_to change { Environment.count }
+ end
+ end
+
+ context 'when a job with a dynamic environment is retried' do
+ let_it_be(:other_developer) { create(:user).tap { |u| project.add_developer(u) } }
+
+ let(:environment_name) { 'review/$CI_COMMIT_REF_SLUG-$GITLAB_USER_ID' }
+
+ let!(:job) do
+ create(factory_type, :with_deployment,
+ environment: environment_name,
+ options: { environment: { name: environment_name } },
+ pipeline: pipeline,
+ ci_stage: stage,
+ user: other_developer)
+ end
+
+ it 'creates a new deployment' do
+ expect { new_job }.to change { Deployment.count }.by(1)
+ end
+
+ it 'does not create a new environment' do
+ expect { new_job }.not_to change { Environment.count }
+ end
+ end
+ end
+
describe '#clone!' do
let(:new_job) { service.clone!(job) }
@@ -219,6 +258,7 @@ RSpec.describe Ci::RetryJobService, feature_category: :continuous_integration do
include_context 'retryable bridge'
it_behaves_like 'clones the job'
+ it_behaves_like 'creates associations for a deployable job', :ci_bridge
context 'when given variables' do
let(:new_job) { service.clone!(job, variables: job_variables_attributes) }
@@ -235,43 +275,7 @@ RSpec.describe Ci::RetryJobService, feature_category: :continuous_integration do
let(:job) { job_to_clone }
it_behaves_like 'clones the job'
-
- context 'when a build with a deployment is retried' do
- let!(:job) do
- create(:ci_build, :with_deployment, :deploy_to_production, pipeline: pipeline, ci_stage: stage)
- end
-
- it 'creates a new deployment' do
- expect { new_job }.to change { Deployment.count }.by(1)
- end
-
- it 'does not create a new environment' do
- expect { new_job }.not_to change { Environment.count }
- end
- end
-
- context 'when a build with a dynamic environment is retried' do
- let_it_be(:other_developer) { create(:user).tap { |u| project.add_developer(u) } }
-
- let(:environment_name) { 'review/$CI_COMMIT_REF_SLUG-$GITLAB_USER_ID' }
-
- let!(:job) do
- create(:ci_build, :with_deployment,
- environment: environment_name,
- options: { environment: { name: environment_name } },
- pipeline: pipeline,
- ci_stage: stage,
- user: other_developer)
- end
-
- it 'creates a new deployment' do
- expect { new_job }.to change { Deployment.count }.by(1)
- end
-
- it 'does not create a new environment' do
- expect { new_job }.not_to change { Environment.count }
- end
- end
+ it_behaves_like 'creates associations for a deployable job', :ci_build
context 'when given variables' do
let(:new_job) { service.clone!(job, variables: job_variables_attributes) }
diff --git a/spec/services/ci/retry_pipeline_service_spec.rb b/spec/services/ci/retry_pipeline_service_spec.rb
index fc2c66e7f73..6d991baafd0 100644
--- a/spec/services/ci/retry_pipeline_service_spec.rb
+++ b/spec/services/ci/retry_pipeline_service_spec.rb
@@ -451,22 +451,18 @@ RSpec.describe Ci::RetryPipelineService, '#execute', feature_category: :continuo
before do
project.add_maintainer(user)
- create(:merge_request,
- source_project: forked_project,
- target_project: project,
- source_branch: 'fixes',
- allow_collaboration: true)
- create_build('rspec 1', :failed, test_stage)
- end
- it 'allows to retry failed pipeline' do
- allow_any_instance_of(Project).to receive(:branch_allows_collaboration?).and_return(true)
+ create_build('rspec 1', :failed, test_stage, project: project, ref: pipeline.ref)
+
allow_any_instance_of(Project).to receive(:empty_repo?).and_return(false)
+ allow_any_instance_of(Project).to receive(:branch_allows_collaboration?).and_return(true)
+ end
+ it 'allows to retry failed pipeline' do
service.execute(pipeline)
expect(build('rspec 1')).to be_pending
- expect(pipeline.reload).to be_running
+ expect(pipeline).to be_running
end
end
diff --git a/spec/services/clusters/kubernetes/create_or_update_namespace_service_spec.rb b/spec/services/clusters/kubernetes/create_or_update_namespace_service_spec.rb
index 48941792c4b..f05aa533628 100644
--- a/spec/services/clusters/kubernetes/create_or_update_namespace_service_spec.rb
+++ b/spec/services/clusters/kubernetes/create_or_update_namespace_service_spec.rb
@@ -113,10 +113,12 @@ RSpec.describe Clusters::Kubernetes::CreateOrUpdateNamespaceService, '#execute',
context 'when kubernetes namespace is not persisted' do
let(:kubernetes_namespace) do
- build(:cluster_kubernetes_namespace,
- cluster: cluster,
- project: project,
- environment: environment)
+ build(
+ :cluster_kubernetes_namespace,
+ cluster: cluster,
+ project: project,
+ environment: environment
+ )
end
it_behaves_like 'successful creation of kubernetes namespace'
@@ -126,11 +128,13 @@ RSpec.describe Clusters::Kubernetes::CreateOrUpdateNamespaceService, '#execute',
context 'project clusters' do
context 'when kubernetes namespace is not persisted' do
let(:kubernetes_namespace) do
- build(:cluster_kubernetes_namespace,
- cluster: cluster,
- project: cluster_project.project,
- cluster_project: cluster_project,
- environment: environment)
+ build(
+ :cluster_kubernetes_namespace,
+ cluster: cluster,
+ project: cluster_project.project,
+ cluster_project: cluster_project,
+ environment: environment
+ )
end
it_behaves_like 'successful creation of kubernetes namespace'
@@ -140,11 +144,13 @@ RSpec.describe Clusters::Kubernetes::CreateOrUpdateNamespaceService, '#execute',
let(:namespace) { "new-namespace-#{environment.slug}" }
let(:kubernetes_namespace) do
- create(:cluster_kubernetes_namespace,
- cluster: cluster,
- project: cluster_project.project,
- cluster_project: cluster_project,
- environment: environment)
+ create(
+ :cluster_kubernetes_namespace,
+ cluster: cluster,
+ project: cluster_project.project,
+ cluster_project: cluster_project,
+ environment: environment
+ )
end
before do
diff --git a/spec/services/clusters/kubernetes/create_or_update_service_account_service_spec.rb b/spec/services/clusters/kubernetes/create_or_update_service_account_service_spec.rb
index ab0c5691b06..e8e0174fe40 100644
--- a/spec/services/clusters/kubernetes/create_or_update_service_account_service_spec.rb
+++ b/spec/services/clusters/kubernetes/create_or_update_service_account_service_spec.rb
@@ -9,9 +9,12 @@ RSpec.describe Clusters::Kubernetes::CreateOrUpdateServiceAccountService, featur
let(:cluster_project) { cluster.cluster_project }
let(:project) { cluster_project.project }
let(:cluster) do
- create(:cluster,
- :project, :provided_by_gcp,
- platform_kubernetes: create(:cluster_platform_kubernetes, :configured))
+ create(
+ :cluster,
+ :project,
+ :provided_by_gcp,
+ platform_kubernetes: create(:cluster_platform_kubernetes, :configured)
+ )
end
let(:kubeclient) do
diff --git a/spec/services/cohorts_service_spec.rb b/spec/services/cohorts_service_spec.rb
index ab53bcf8657..fe7383d7f5d 100644
--- a/spec/services/cohorts_service_spec.rb
+++ b/spec/services/cohorts_service_spec.rb
@@ -94,8 +94,7 @@ RSpec.describe CohortsService, feature_category: :shared do
}
]
- expect(described_class.new.execute).to eq(months_included: 12,
- cohorts: expected_cohorts)
+ expect(described_class.new.execute).to eq(months_included: 12, cohorts: expected_cohorts)
end
end
end
diff --git a/spec/services/commits/commit_patch_service_spec.rb b/spec/services/commits/commit_patch_service_spec.rb
index a9d61be23be..3edebc0c153 100644
--- a/spec/services/commits/commit_patch_service_spec.rb
+++ b/spec/services/commits/commit_patch_service_spec.rb
@@ -62,8 +62,7 @@ RSpec.describe Commits::CommitPatchService, feature_category: :source_code_manag
context 'when the user does not have access' do
let(:user) { create(:user) }
- it_behaves_like 'an error response',
- 'You are not allowed to push into this branch'
+ it_behaves_like 'an error response', 'You are not allowed to push into this branch'
end
context 'when the patches are not valid' do
diff --git a/spec/services/deployments/create_for_job_service_spec.rb b/spec/services/deployments/create_for_job_service_spec.rb
new file mode 100644
index 00000000000..f5a30078b2d
--- /dev/null
+++ b/spec/services/deployments/create_for_job_service_spec.rb
@@ -0,0 +1,18 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Deployments::CreateForJobService, feature_category: :continuous_delivery do
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:user) { create(:user) }
+
+ let(:service) { described_class.new }
+
+ it_behaves_like 'create deployment for job' do
+ let(:factory_type) { :ci_build }
+ end
+
+ it_behaves_like 'create deployment for job' do
+ let(:factory_type) { :ci_bridge }
+ end
+end
diff --git a/spec/services/deployments/link_merge_requests_service_spec.rb b/spec/services/deployments/link_merge_requests_service_spec.rb
index a468af90ffb..011bd3f0214 100644
--- a/spec/services/deployments/link_merge_requests_service_spec.rb
+++ b/spec/services/deployments/link_merge_requests_service_spec.rb
@@ -158,11 +158,9 @@ RSpec.describe Deployments::LinkMergeRequestsService, feature_category: :continu
end
it "doesn't link the same merge_request twice" do
- create(:merge_request, :merged, merge_commit_sha: mr1_merge_commit_sha,
- source_project: project)
+ create(:merge_request, :merged, merge_commit_sha: mr1_merge_commit_sha, source_project: project)
- picked_mr = create(:merge_request, :merged, merge_commit_sha: '123abc',
- source_project: project)
+ picked_mr = create(:merge_request, :merged, merge_commit_sha: '123abc', source_project: project)
# the first MR includes c1c67abba which is a cherry-pick of the fake picked_mr merge request
create(:track_mr_picking_note, noteable: picked_mr, project: project, commit_id: 'c1c67abbaf91f624347bb3ae96eabe3a1b742478')
diff --git a/spec/services/deployments/older_deployments_drop_service_spec.rb b/spec/services/deployments/older_deployments_drop_service_spec.rb
deleted file mode 100644
index 7e3074a1688..00000000000
--- a/spec/services/deployments/older_deployments_drop_service_spec.rb
+++ /dev/null
@@ -1,117 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Deployments::OlderDeploymentsDropService, feature_category: :continuous_delivery do
- let(:environment) { create(:environment) }
- let(:deployment) { create(:deployment, environment: environment) }
- let(:service) { described_class.new(deployment) }
-
- describe '#execute' do
- subject { service.execute }
-
- shared_examples 'it does not drop any build' do
- it do
- expect { subject }.to not_change(Ci::Build.failed, :count)
- end
- end
-
- context 'when deployment is nil' do
- let(:deployment) { nil }
-
- it_behaves_like 'it does not drop any build'
- end
-
- context 'when a deployment is passed in' do
- context 'and there is no active deployment for the related environment' do
- let(:deployment) { create(:deployment, :canceled, environment: environment) }
- let(:deployment2) { create(:deployment, :canceled, environment: environment) }
-
- before do
- deployment
- deployment2
- end
-
- it_behaves_like 'it does not drop any build'
- end
-
- context 'and there are active deployment for the related environment' do
- let(:deployment) { create(:deployment, :running, environment: environment) }
- let(:deployment2) { create(:deployment, :running, environment: environment) }
-
- context 'and there is no older deployment than "deployment"' do
- before do
- deployment
- deployment2
- end
-
- it_behaves_like 'it does not drop any build'
- end
-
- context 'and there is an older deployment than "deployment"' do
- let(:older_deployment) { create(:deployment, :running, environment: environment) }
-
- before do
- older_deployment
- deployment
- deployment2
- end
-
- it 'drops that older deployment' do
- deployable = older_deployment.deployable
- expect(deployable.failed?).to be_falsey
-
- subject
-
- expect(deployable.reload.failed?).to be_truthy
- end
-
- context 'when older deployable is a manual job' do
- let(:older_deployment) { create(:deployment, :created, environment: environment, deployable: build) }
- let(:build) { create(:ci_build, :manual) }
-
- # Manual jobs should not be accounted as outdated deployment jobs.
- # See https://gitlab.com/gitlab-org/gitlab/-/issues/255978 for more information.
- it 'does not drop any builds nor track the exception' do
- expect(Gitlab::ErrorTracking).not_to receive(:track_exception)
-
- expect { subject }.not_to change { Ci::Build.failed.count }
- end
- end
-
- context 'when deployable.drop raises RuntimeError' do
- before do
- allow_any_instance_of(Ci::Build).to receive(:drop).and_raise(RuntimeError)
- end
-
- it 'does not drop an older deployment and tracks the exception' do
- expect(Gitlab::ErrorTracking).to receive(:track_exception)
- .with(kind_of(RuntimeError), subject_id: deployment.id, build_id: older_deployment.deployable_id)
-
- expect { subject }.not_to change { Ci::Build.failed.count }
- end
- end
-
- context 'when ActiveRecord::StaleObjectError is raised' do
- before do
- allow_any_instance_of(Ci::Build)
- .to receive(:drop).and_raise(ActiveRecord::StaleObjectError)
- end
-
- it 'resets the object via Gitlab::OptimisticLocking' do
- allow_any_instance_of(Ci::Build).to receive(:reset).at_least(:once)
-
- subject
- end
- end
-
- context 'and there is no deployable for that older deployment' do
- let(:older_deployment) { create(:deployment, :running, environment: environment, deployable: nil) }
-
- it_behaves_like 'it does not drop any build'
- end
- end
- end
- end
- end
-end
diff --git a/spec/services/deployments/update_environment_service_spec.rb b/spec/services/deployments/update_environment_service_spec.rb
index 33c9c9ed592..0a93e300eb6 100644
--- a/spec/services/deployments/update_environment_service_spec.rb
+++ b/spec/services/deployments/update_environment_service_spec.rb
@@ -120,10 +120,12 @@ RSpec.describe Deployments::UpdateEnvironmentService, feature_category: :continu
it 'tracks an exception' do
expect(Gitlab::ErrorTracking).to receive(:track_exception)
- .with(an_instance_of(described_class::EnvironmentUpdateFailure),
- project_id: project.id,
- environment_id: environment.id,
- reason: %q{External url javascript scheme is not allowed})
+ .with(
+ an_instance_of(described_class::EnvironmentUpdateFailure),
+ project_id: project.id,
+ environment_id: environment.id,
+ reason: %q{External url javascript scheme is not allowed}
+ )
.once
subject.execute
@@ -249,13 +251,15 @@ RSpec.describe Deployments::UpdateEnvironmentService, feature_category: :continu
context 'when yaml environment uses $CI_COMMIT_REF_NAME' do
let(:job) do
- create(:ci_build,
- :with_deployment,
- pipeline: pipeline,
- ref: 'master',
- environment: 'production',
- project: project,
- options: { environment: { name: 'production', url: 'http://review/$CI_COMMIT_REF_NAME' } })
+ create(
+ :ci_build,
+ :with_deployment,
+ pipeline: pipeline,
+ ref: 'master',
+ environment: 'production',
+ project: project,
+ options: { environment: { name: 'production', url: 'http://review/$CI_COMMIT_REF_NAME' } }
+ )
end
it { is_expected.to eq('http://review/master') }
@@ -263,13 +267,15 @@ RSpec.describe Deployments::UpdateEnvironmentService, feature_category: :continu
context 'when yaml environment uses $CI_ENVIRONMENT_SLUG' do
let(:job) do
- create(:ci_build,
- :with_deployment,
- pipeline: pipeline,
- ref: 'master',
- environment: 'prod-slug',
- project: project,
- options: { environment: { name: 'prod-slug', url: 'http://review/$CI_ENVIRONMENT_SLUG' } })
+ create(
+ :ci_build,
+ :with_deployment,
+ pipeline: pipeline,
+ ref: 'master',
+ environment: 'prod-slug',
+ project: project,
+ options: { environment: { name: 'prod-slug', url: 'http://review/$CI_ENVIRONMENT_SLUG' } }
+ )
end
it { is_expected.to eq('http://review/prod-slug') }
@@ -277,13 +283,15 @@ RSpec.describe Deployments::UpdateEnvironmentService, feature_category: :continu
context 'when yaml environment uses yaml_variables containing symbol keys' do
let(:job) do
- create(:ci_build,
- :with_deployment,
- pipeline: pipeline,
- yaml_variables: [{ key: :APP_HOST, value: 'host' }],
- environment: 'production',
- project: project,
- options: { environment: { name: 'production', url: 'http://review/$APP_HOST' } })
+ create(
+ :ci_build,
+ :with_deployment,
+ pipeline: pipeline,
+ yaml_variables: [{ key: :APP_HOST, value: 'host' }],
+ environment: 'production',
+ project: project,
+ options: { environment: { name: 'production', url: 'http://review/$APP_HOST' } }
+ )
end
it { is_expected.to eq('http://review/host') }
@@ -291,13 +299,15 @@ RSpec.describe Deployments::UpdateEnvironmentService, feature_category: :continu
context 'when job variables are generated during runtime' do
let(:job) do
- create(:ci_build,
- :with_deployment,
- pipeline: pipeline,
- environment: 'review/$CI_COMMIT_REF_NAME',
- project: project,
- job_variables: [job_variable],
- options: { environment: { name: 'review/$CI_COMMIT_REF_NAME', url: 'http://$DYNAMIC_ENV_URL' } })
+ create(
+ :ci_build,
+ :with_deployment,
+ pipeline: pipeline,
+ environment: 'review/$CI_COMMIT_REF_NAME',
+ project: project,
+ job_variables: [job_variable],
+ options: { environment: { name: 'review/$CI_COMMIT_REF_NAME', url: 'http://$DYNAMIC_ENV_URL' } }
+ )
end
let(:job_variable) do
@@ -319,14 +329,16 @@ RSpec.describe Deployments::UpdateEnvironmentService, feature_category: :continu
end
let(:job) do
- create(:ci_build,
- :with_deployment,
- pipeline: pipeline,
- ref: 'master',
- environment: 'production',
- project: project,
- yaml_variables: yaml_variables,
- options: { environment: { name: 'production', url: 'http://$MAIN_DOMAIN' } })
+ create(
+ :ci_build,
+ :with_deployment,
+ pipeline: pipeline,
+ ref: 'master',
+ environment: 'production',
+ project: project,
+ yaml_variables: yaml_variables,
+ options: { environment: { name: 'production', url: 'http://$MAIN_DOMAIN' } }
+ )
end
it { is_expected.to eq('http://appname-master.example.com') }
diff --git a/spec/services/environments/create_for_job_service_spec.rb b/spec/services/environments/create_for_job_service_spec.rb
new file mode 100644
index 00000000000..0d459301f19
--- /dev/null
+++ b/spec/services/environments/create_for_job_service_spec.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Environments::CreateForJobService, feature_category: :continuous_delivery do
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
+
+ let(:service) { described_class.new }
+
+ it_behaves_like 'create environment for job' do
+ let(:factory_type) { :ci_build }
+ end
+
+ it_behaves_like 'create environment for job' do
+ let(:factory_type) { :ci_bridge }
+ end
+end
diff --git a/spec/services/environments/create_service_spec.rb b/spec/services/environments/create_service_spec.rb
index c7d32f9111a..db427f3a8cd 100644
--- a/spec/services/environments/create_service_spec.rb
+++ b/spec/services/environments/create_service_spec.rb
@@ -14,7 +14,7 @@ RSpec.describe Environments::CreateService, feature_category: :environment_manag
describe '#execute' do
subject { service.execute }
- let(:params) { { name: 'production', external_url: 'https://gitlab.com', tier: :production, kubernetes_namespace: 'default' } }
+ let(:params) { { name: 'production', external_url: 'https://gitlab.com', tier: :production, kubernetes_namespace: 'default', flux_resource_path: 'path/to/flux/resource' } }
it 'creates an environment' do
expect { subject }.to change { ::Environment.count }.by(1)
@@ -28,6 +28,7 @@ RSpec.describe Environments::CreateService, feature_category: :environment_manag
expect(response.payload[:environment].external_url).to eq('https://gitlab.com')
expect(response.payload[:environment].tier).to eq('production')
expect(response.payload[:environment].kubernetes_namespace).to eq('default')
+ expect(response.payload[:environment].flux_resource_path).to eq('path/to/flux/resource')
end
context 'with a cluster agent' do
diff --git a/spec/services/environments/update_service_spec.rb b/spec/services/environments/update_service_spec.rb
index 808d6340314..ef12e9b2ffd 100644
--- a/spec/services/environments/update_service_spec.rb
+++ b/spec/services/environments/update_service_spec.rb
@@ -43,6 +43,21 @@ RSpec.describe Environments::UpdateService, feature_category: :environment_manag
end
end
+ context 'when setting a flux resource path to the environment' do
+ let(:params) { { flux_resource_path: 'path/to/flux/resource' } }
+
+ it 'updates the flux resource path' do
+ expect { subject }.to change { environment.reload.flux_resource_path }.to('path/to/flux/resource')
+ end
+
+ it 'returns successful response' do
+ response = subject
+
+ expect(response).to be_success
+ expect(response.payload[:environment]).to eq(environment)
+ end
+ end
+
context 'when setting a cluster agent to the environment' do
let_it_be(:agent_management_project) { create(:project) }
let_it_be(:cluster_agent) { create(:cluster_agent, project: agent_management_project) }
diff --git a/spec/services/git/base_hooks_service_spec.rb b/spec/services/git/base_hooks_service_spec.rb
index 60883db0cd5..e083c8d7316 100644
--- a/spec/services/git/base_hooks_service_spec.rb
+++ b/spec/services/git/base_hooks_service_spec.rb
@@ -363,17 +363,5 @@ RSpec.describe Git::BaseHooksService, feature_category: :source_code_management
subject.execute
end
end
-
- context 'when :notify_kas_on_git_push feature flag is disabled' do
- before do
- stub_feature_flags(notify_kas_on_git_push: false)
- end
-
- it do
- expect(Clusters::Agents::NotifyGitPushWorker).not_to receive(:perform_async)
-
- subject.execute
- end
- end
end
end
diff --git a/spec/services/grafana/proxy_service_spec.rb b/spec/services/grafana/proxy_service_spec.rb
deleted file mode 100644
index 7029bab379a..00000000000
--- a/spec/services/grafana/proxy_service_spec.rb
+++ /dev/null
@@ -1,169 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Grafana::ProxyService, feature_category: :metrics do
- include ReactiveCachingHelpers
-
- let_it_be(:project) { create(:project) }
- let_it_be(:grafana_integration) { create(:grafana_integration, project: project) }
-
- let(:proxy_path) { 'api/v1/query_range' }
- let(:datasource_id) { '1' }
- let(:query_params) do
- {
- 'query' => 'rate(relevant_metric)',
- 'start' => '1570441248',
- 'end' => '1570444848',
- 'step' => '900'
- }
- end
-
- let(:cache_params) { [project.id, datasource_id, proxy_path, query_params] }
-
- let(:service) do
- described_class.new(project, datasource_id, proxy_path, query_params)
- end
-
- shared_examples_for 'initializes an instance' do
- it 'initializes an instance of ProxyService class' do
- expect(subject).to be_an_instance_of(described_class)
- expect(subject.project).to eq(project)
- expect(subject.datasource_id).to eq('1')
- expect(subject.proxy_path).to eq('api/v1/query_range')
- expect(subject.query_params).to eq(query_params)
- end
- end
-
- describe '.from_cache' do
- subject { described_class.from_cache(*cache_params) }
-
- it_behaves_like 'initializes an instance'
- end
-
- describe '#initialize' do
- subject { service }
-
- it_behaves_like 'initializes an instance'
- end
-
- describe '#execute' do
- subject(:result) { service.execute }
-
- shared_examples 'missing proxy support' do
- it 'returns API not supported error' do
- expect(result).to eq(
- status: :error,
- message: 'Proxy support for this API is not available currently'
- )
- end
- end
-
- context 'with unsupported proxy path' do
- where(:proxy_path) do
- %w[
- /api/vl/query_range
- api/vl/query_range/
- api/vl/labels
- api/v2/query_range
- ../../../org/users
- ]
- end
-
- with_them do
- include_examples 'missing proxy support'
- end
- end
-
- context 'with unsupported datasource_id' do
- where(:datasource_id) do
- ['', '-1', '1str', 'str1', '../../1', '1/../..', "1\n1"]
- end
-
- with_them do
- include_examples 'missing proxy support'
- end
- end
-
- context 'when grafana integration is not configured' do
- before do
- allow(project).to receive(:grafana_integration).and_return(nil)
- end
-
- include_examples 'missing proxy support'
- end
-
- context 'with caching', :use_clean_rails_memory_store_caching do
- context 'when value not present in cache' do
- it 'returns nil' do
- expect(ExternalServiceReactiveCachingWorker)
- .to receive(:perform_async)
- .with(service.class, service.id, *cache_params)
-
- expect(result).to eq(nil)
- end
- end
-
- context 'when value present in cache' do
- let(:return_value) { { 'http_status' => 200, 'body' => 'body' } }
-
- before do
- stub_reactive_cache(service, return_value, cache_params)
- end
-
- it 'returns cached value' do
- expect(ReactiveCachingWorker)
- .not_to receive(:perform_async)
- .with(service.class, service.id, *cache_params)
-
- expect(result[:http_status]).to eq(return_value[:http_status])
- expect(result[:body]).to eq(return_value[:body])
- end
- end
- end
-
- context 'call prometheus api' do
- let(:client) { service.send(:client) }
-
- before do
- synchronous_reactive_cache(service)
- end
-
- context 'connection to grafana datasource succeeds' do
- let(:response) { instance_double(Gitlab::HTTP::Response) }
- let(:status_code) { 400 }
- let(:body) { 'body' }
-
- before do
- allow(client).to receive(:proxy_datasource).and_return(response)
-
- allow(response).to receive(:code).and_return(status_code)
- allow(response).to receive(:body).and_return(body)
- end
-
- it 'returns the http status code and body from prometheus' do
- expect(result).to eq(
- http_status: status_code,
- body: body,
- status: :success
- )
- end
- end
-
- context 'connection to grafana datasource fails' do
- before do
- allow(client).to receive(:proxy_datasource)
- .and_raise(Grafana::Client::Error, 'Network connection error')
- end
-
- it 'returns error' do
- expect(result).to eq(
- status: :error,
- message: 'Network connection error',
- http_status: :service_unavailable
- )
- end
- end
- end
- end
-end
diff --git a/spec/services/groups/create_service_spec.rb b/spec/services/groups/create_service_spec.rb
index 2317c6fba61..b2b27a1a075 100644
--- a/spec/services/groups/create_service_spec.rb
+++ b/spec/services/groups/create_service_spec.rb
@@ -59,6 +59,33 @@ RSpec.describe Groups::CreateService, '#execute', feature_category: :groups_and_
end
end
+ context 'creating a group with `default_branch_protection_defaults` attribute' do
+ let(:branch_protection) { ::Gitlab::Access::BranchProtection.protected_against_developer_pushes.stringify_keys }
+ let(:params) { group_params.merge(default_branch_protection_defaults: branch_protection) }
+ let(:service) { described_class.new(user, params) }
+ let(:created_group) { service.execute }
+
+ context 'for users who have the ability to create a group with `default_branch_protection`' do
+ before do
+ allow(Ability).to receive(:allowed?).and_call_original
+ allow(Ability).to receive(:allowed?).with(user, :update_default_branch_protection, an_instance_of(Group)).and_return(true)
+ end
+
+ it 'creates group with the specified default branch protection settings' do
+ expect(created_group.default_branch_protection_defaults).to eq(branch_protection)
+ end
+ end
+
+ context 'for users who do not have the ability to create a group with `default_branch_protection_defaults`' do
+ it 'does not create the group with the specified default branch protection settings' do
+ allow(Ability).to receive(:allowed?).and_call_original
+ allow(Ability).to receive(:allowed?).with(user, :create_group_with_default_branch_protection) { false }
+
+ expect(created_group.default_branch_protection_defaults).not_to eq(Gitlab::Access::PROTECTION_NONE)
+ end
+ end
+ end
+
context 'creating a group with `allow_mfa_for_subgroups` attribute' do
let(:params) { group_params.merge(allow_mfa_for_subgroups: false) }
let(:service) { described_class.new(user, params) }
diff --git a/spec/services/groups/group_links/create_service_spec.rb b/spec/services/groups/group_links/create_service_spec.rb
index 8acbcdc77af..9ba664212b8 100644
--- a/spec/services/groups/group_links/create_service_spec.rb
+++ b/spec/services/groups/group_links/create_service_spec.rb
@@ -54,7 +54,7 @@ RSpec.describe Groups::GroupLinks::CreateService, '#execute', feature_category:
it_behaves_like 'shareable'
context 'when sharing outside the hierarchy is disabled' do
- let_it_be(:group_parent) do
+ let_it_be_with_refind(:group_parent) do
create(:group,
namespace_settings: create(:namespace_settings, prevent_sharing_groups_outside_hierarchy: true))
end
diff --git a/spec/services/groups/participants_service_spec.rb b/spec/services/groups/participants_service_spec.rb
index 0b370ca9fd8..8359bf1670f 100644
--- a/spec/services/groups/participants_service_spec.rb
+++ b/spec/services/groups/participants_service_spec.rb
@@ -66,15 +66,7 @@ RSpec.describe Groups::ParticipantsService, feature_category: :groups_and_projec
subject(:usernames) { service_result.pluck(:username) }
- context 'when current_user is not a member' do
- let(:service) { described_class.new(group, create(:user)) }
-
- it { is_expected.not_to include(private_group_member.username) }
- end
-
- context 'when current_user is a member' do
- it { is_expected.to include(private_group_member.username) }
- end
+ it { is_expected.to include(private_group_member.username) }
end
end
diff --git a/spec/services/groups/update_service_spec.rb b/spec/services/groups/update_service_spec.rb
index 3819bcee36d..861728f00c6 100644
--- a/spec/services/groups/update_service_spec.rb
+++ b/spec/services/groups/update_service_spec.rb
@@ -9,33 +9,10 @@ RSpec.describe Groups::UpdateService, feature_category: :groups_and_projects do
let!(:public_group) { create(:group, :public) }
describe "#execute" do
- shared_examples 'with packages' do
- before do
- group.add_owner(user)
- end
-
- context 'with npm packages' do
- let!(:package) { create(:npm_package, project: project) }
-
- it 'does not allow a path update' do
- expect(update_group(group, user, path: 'updated')).to be false
- expect(group.errors[:path]).to include('cannot change when group contains projects with NPM packages')
- end
-
- it 'allows name update' do
- expect(update_group(group, user, name: 'Updated')).to be true
- expect(group.errors).to be_empty
- expect(group.name).to eq('Updated')
- end
- end
- end
-
context 'with project' do
let!(:group) { create(:group, :public) }
let(:project) { create(:project, namespace: group) }
- it_behaves_like 'with packages'
-
context 'located in a subgroup' do
let(:subgroup) { create(:group, parent: group) }
let!(:project) { create(:project, namespace: subgroup) }
@@ -44,8 +21,6 @@ RSpec.describe Groups::UpdateService, feature_category: :groups_and_projects do
subgroup.add_owner(user)
end
- it_behaves_like 'with packages'
-
it 'does allow a path update if there is not a root namespace change' do
expect(update_group(subgroup, user, path: 'updated')).to be true
expect(subgroup.errors[:path]).to be_empty
@@ -251,6 +226,163 @@ RSpec.describe Groups::UpdateService, feature_category: :groups_and_projects do
end
end
+ context "path change validation" do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:subgroup) { create(:group, parent: group) }
+ let_it_be(:project) { create(:project, namespace: subgroup) }
+
+ subject(:execute_update) { update_group(target_group, user, update_params) }
+
+ shared_examples 'not allowing a path update' do
+ let(:update_params) { { path: 'updated' } }
+
+ it 'does not allow a path update' do
+ target_group.add_maintainer(user)
+
+ expect(execute_update).to be false
+ expect(target_group.errors[:path]).to include('cannot change when group contains projects with NPM packages')
+ end
+ end
+
+ shared_examples 'allowing an update' do |on:|
+ let(:update_params) { { on => 'updated' } }
+
+ it "allows an update on #{on}" do
+ target_group.reload.add_maintainer(user)
+
+ expect(execute_update).to be true
+ expect(target_group.errors).to be_empty
+ expect(target_group[on]).to eq('updated')
+ end
+ end
+
+ context 'with namespaced npm packages' do
+ let_it_be(:package) { create(:npm_package, project: project, name: "@#{group.path}/test") }
+
+ context 'updating the root group' do
+ let_it_be_with_refind(:target_group) { group }
+
+ it_behaves_like 'not allowing a path update'
+ it_behaves_like 'allowing an update', on: :name
+
+ context 'when npm_package_registry_fix_group_path_validation is disabled' do
+ before do
+ stub_feature_flags(npm_package_registry_fix_group_path_validation: false)
+ expect_next_instance_of(::Groups::UpdateService) do |service|
+ expect(service).to receive(:valid_path_change_with_npm_packages?).and_call_original
+ end
+ end
+
+ it_behaves_like 'not allowing a path update'
+ it_behaves_like 'allowing an update', on: :name
+ end
+ end
+
+ context 'updating the subgroup' do
+ let_it_be_with_refind(:target_group) { subgroup }
+
+ it_behaves_like 'allowing an update', on: :path
+ it_behaves_like 'allowing an update', on: :name
+
+ context 'when npm_package_registry_fix_group_path_validation is disabled' do
+ before do
+ stub_feature_flags(npm_package_registry_fix_group_path_validation: false)
+ expect_next_instance_of(::Groups::UpdateService) do |service|
+ expect(service).to receive(:valid_path_change_with_npm_packages?).and_call_original
+ end
+ end
+
+ it_behaves_like 'not allowing a path update'
+ it_behaves_like 'allowing an update', on: :name
+ end
+ end
+ end
+
+ context 'with scoped npm packages' do
+ let_it_be(:package) { create(:npm_package, project: project, name: '@any_scope/test') }
+
+ context 'updating the root group' do
+ let_it_be_with_refind(:target_group) { group }
+
+ it_behaves_like 'allowing an update', on: :path
+ it_behaves_like 'allowing an update', on: :name
+
+ context 'when npm_package_registry_fix_group_path_validation is disabled' do
+ before do
+ stub_feature_flags(npm_package_registry_fix_group_path_validation: false)
+ expect_next_instance_of(::Groups::UpdateService) do |service|
+ expect(service).to receive(:valid_path_change_with_npm_packages?).and_call_original
+ end
+ end
+
+ it_behaves_like 'not allowing a path update'
+ it_behaves_like 'allowing an update', on: :name
+ end
+ end
+
+ context 'updating the subgroup' do
+ let_it_be_with_refind(:target_group) { subgroup }
+
+ it_behaves_like 'allowing an update', on: :path
+ it_behaves_like 'allowing an update', on: :name
+
+ context 'when npm_package_registry_fix_group_path_validation is disabled' do
+ before do
+ stub_feature_flags(npm_package_registry_fix_group_path_validation: false)
+ expect_next_instance_of(::Groups::UpdateService) do |service|
+ expect(service).to receive(:valid_path_change_with_npm_packages?).and_call_original
+ end
+ end
+
+ it_behaves_like 'not allowing a path update'
+ it_behaves_like 'allowing an update', on: :name
+ end
+ end
+ end
+
+ context 'with unscoped npm packages' do
+ let_it_be(:package) { create(:npm_package, project: project, name: 'test') }
+
+ context 'updating the root group' do
+ let_it_be_with_refind(:target_group) { group }
+
+ it_behaves_like 'allowing an update', on: :path
+ it_behaves_like 'allowing an update', on: :name
+
+ context 'when npm_package_registry_fix_group_path_validation is disabled' do
+ before do
+ stub_feature_flags(npm_package_registry_fix_group_path_validation: false)
+ expect_next_instance_of(::Groups::UpdateService) do |service|
+ expect(service).to receive(:valid_path_change_with_npm_packages?).and_call_original
+ end
+ end
+
+ it_behaves_like 'not allowing a path update'
+ it_behaves_like 'allowing an update', on: :name
+ end
+ end
+
+ context 'updating the subgroup' do
+ let_it_be_with_refind(:target_group) { subgroup }
+
+ it_behaves_like 'allowing an update', on: :path
+ it_behaves_like 'allowing an update', on: :name
+
+ context 'when npm_package_registry_fix_group_path_validation is disabled' do
+ before do
+ stub_feature_flags(npm_package_registry_fix_group_path_validation: false)
+ expect_next_instance_of(::Groups::UpdateService) do |service|
+ expect(service).to receive(:valid_path_change_with_npm_packages?).and_call_original
+ end
+ end
+
+ it_behaves_like 'not allowing a path update'
+ it_behaves_like 'allowing an update', on: :name
+ end
+ end
+ end
+ end
+
context 'when user is not group owner' do
context 'when group is private' do
before do
@@ -358,6 +490,32 @@ RSpec.describe Groups::UpdateService, feature_category: :groups_and_projects do
end
end
+ context 'updating default_branch_protection_defaults' do
+ let(:branch_protection) { ::Gitlab::Access::BranchProtection.protected_against_developer_pushes.stringify_keys }
+
+ let(:service) do
+ described_class.new(internal_group, user, default_branch_protection_defaults: branch_protection)
+ end
+
+ let(:settings) { internal_group.namespace_settings }
+ let(:expected_settings) { branch_protection }
+
+ context 'for users who have the ability to update default_branch_protection_defaults' do
+ it 'updates default_branch_protection attribute' do
+ internal_group.add_owner(user)
+
+ expect { service.execute }.to change { internal_group.default_branch_protection_defaults }.from({}).to(expected_settings)
+ end
+ end
+
+ context 'for users who do not have the ability to update default_branch_protection_defaults' do
+ it 'does not update the attribute' do
+ expect { service.execute }.not_to change { internal_group.default_branch_protection_defaults }
+ expect { service.execute }.not_to change { internal_group.namespace_settings.default_branch_protection_defaults }
+ end
+ end
+ end
+
context 'EventStore' do
let(:service) { described_class.new(group, user, **params) }
let(:root_group) { create(:group, path: 'root') }
diff --git a/spec/services/groups/update_shared_runners_service_spec.rb b/spec/services/groups/update_shared_runners_service_spec.rb
index 00eabb5c875..dc0d50bdab4 100644
--- a/spec/services/groups/update_shared_runners_service_spec.rb
+++ b/spec/services/groups/update_shared_runners_service_spec.rb
@@ -67,6 +67,21 @@ RSpec.describe Groups::UpdateSharedRunnersService, feature_category: :groups_and
.and change { sub_group.shared_runners_enabled }.from(false).to(true)
.and change { project.shared_runners_enabled }.from(false).to(true)
end
+
+ context 'when already allowing descendants to override' do
+ let(:group) { create(:group, :shared_runners_disabled_and_overridable) }
+
+ it 'enables shared Runners for itself and descendants' do
+ expect do
+ expect(subject[:status]).to eq(:success)
+
+ reload_models(group, sub_group, project)
+ end.to change { group.shared_runners_enabled }.from(false).to(true)
+ .and change { group.allow_descendants_override_disabled_shared_runners }.from(true).to(false)
+ .and change { sub_group.shared_runners_enabled }.from(false).to(true)
+ .and change { project.shared_runners_enabled }.from(false).to(true)
+ end
+ end
end
context 'when group has pending builds' do
@@ -101,7 +116,7 @@ RSpec.describe Groups::UpdateSharedRunnersService, feature_category: :groups_and
context 'disable shared Runners' do
let!(:group) { create(:group) }
- let!(:sub_group) { create(:group, :shared_runners_disabled, :allow_descendants_override_disabled_shared_runners, parent: group) }
+ let!(:sub_group) { create(:group, :shared_runners_disabled_and_overridable, parent: group) }
let!(:sub_group2) { create(:group, parent: group) }
let!(:project) { create(:project, group: group, shared_runners_enabled: true) }
let!(:project2) { create(:project, group: sub_group2, shared_runners_enabled: true) }
@@ -124,7 +139,7 @@ RSpec.describe Groups::UpdateSharedRunnersService, feature_category: :groups_and
end
context 'with override on self' do
- let(:group) { create(:group, :shared_runners_disabled, :allow_descendants_override_disabled_shared_runners) }
+ let(:group) { create(:group, :shared_runners_disabled_and_overridable) }
it 'disables it' do
expect do
@@ -172,7 +187,7 @@ RSpec.describe Groups::UpdateSharedRunnersService, feature_category: :groups_and
end
context 'when ancestor disables shared Runners but allows to override' do
- let!(:parent) { create(:group, :shared_runners_disabled, :allow_descendants_override_disabled_shared_runners) }
+ let!(:parent) { create(:group, :shared_runners_disabled_and_overridable) }
let!(:group) { create(:group, :shared_runners_disabled, parent: parent) }
let!(:project) { create(:project, shared_runners_enabled: false, group: group) }
diff --git a/spec/services/import/gitlab_projects/file_acquisition_strategies/remote_file_s3_spec.rb b/spec/services/import/gitlab_projects/file_acquisition_strategies/remote_file_s3_spec.rb
index 147bfccbfb7..12fafe3a318 100644
--- a/spec/services/import/gitlab_projects/file_acquisition_strategies/remote_file_s3_spec.rb
+++ b/spec/services/import/gitlab_projects/file_acquisition_strategies/remote_file_s3_spec.rb
@@ -10,7 +10,7 @@ RSpec.describe ::Import::GitlabProjects::FileAcquisitionStrategies::RemoteFileS3
let(:secret_access_key) { 'secret_access_key' }
let(:file_exists) { true }
let(:content_type) { 'application/x-tar' }
- let(:content_length) { 2.gigabytes }
+ let(:content_length) { 10.megabytes }
let(:presigned_url) { 'https://external.file.path/file.tar.gz?PRESIGNED=true&TOKEN=some-token' }
let(:s3_double) do
@@ -39,6 +39,8 @@ RSpec.describe ::Import::GitlabProjects::FileAcquisitionStrategies::RemoteFileS3
# Avoid network requests
expect(Aws::S3::Client).to receive(:new).and_return(double)
expect(Aws::S3::Object).to receive(:new).and_return(s3_double)
+
+ stub_application_setting(max_import_remote_file_size: 10)
end
describe 'validation' do
@@ -59,12 +61,12 @@ RSpec.describe ::Import::GitlabProjects::FileAcquisitionStrategies::RemoteFileS3
end
context 'content-length validation' do
- let(:content_length) { 11.gigabytes }
+ let(:content_length) { 11.megabytes }
it 'validates the remote content-length' do
expect(subject).not_to be_valid
expect(subject.errors.full_messages)
- .to include('Content length is too big (should be at most 10 GiB)')
+ .to include('Content length is too big (should be at most 10 MiB)')
end
end
diff --git a/spec/services/import/gitlab_projects/file_acquisition_strategies/remote_file_spec.rb b/spec/services/import/gitlab_projects/file_acquisition_strategies/remote_file_spec.rb
index 0807a0e9d05..a1df6357eca 100644
--- a/spec/services/import/gitlab_projects/file_acquisition_strategies/remote_file_spec.rb
+++ b/spec/services/import/gitlab_projects/file_acquisition_strategies/remote_file_spec.rb
@@ -9,8 +9,10 @@ RSpec.describe ::Import::GitlabProjects::FileAcquisitionStrategies::RemoteFile,
subject { described_class.new(params: params) }
before do
+ stub_application_setting(max_import_remote_file_size: 10)
+
stub_headers_for(remote_url, {
- 'content-length' => 10.gigabytes,
+ 'content-length' => 10.megabytes,
'content-type' => 'application/gzip'
})
end
@@ -54,11 +56,11 @@ RSpec.describe ::Import::GitlabProjects::FileAcquisitionStrategies::RemoteFile,
context 'when request is not from an S3 server' do
it 'validates the remote content-length' do
- stub_headers_for(remote_url, { 'content-length' => 11.gigabytes })
+ stub_application_setting(max_import_remote_file_size: 1)
expect(subject).not_to be_valid
expect(subject.errors.full_messages)
- .to include('Content length is too big (should be at most 10 GiB)')
+ .to include('Content length is too big (should be at most 1 MiB)')
end
it 'validates the remote content-type' do
diff --git a/spec/services/issuable/bulk_update_service_spec.rb b/spec/services/issuable/bulk_update_service_spec.rb
index a76d575a1e0..769e8adc750 100644
--- a/spec/services/issuable/bulk_update_service_spec.rb
+++ b/spec/services/issuable/bulk_update_service_spec.rb
@@ -31,6 +31,23 @@ RSpec.describe Issuable::BulkUpdateService, feature_category: :team_planning do
end
end
+ shared_examples 'updates confidentiality' do
+ it 'succeeds' do
+ result = bulk_update(issuables, confidential: true)
+
+ expect(result.success?).to be_truthy
+ expect(result.payload[:count]).to eq(issuables.count)
+ end
+
+ it 'updates the issuables confidentiality' do
+ bulk_update(issuables, confidential: true)
+
+ issuables.each do |issuable|
+ expect(issuable.reload.confidential).to be(true)
+ end
+ end
+ end
+
shared_examples 'updating labels' do
def create_issue_with_labels(labels)
create(:labeled_issue, project: project, labels: labels)
@@ -303,6 +320,16 @@ RSpec.describe Issuable::BulkUpdateService, feature_category: :team_planning do
end
end
+ describe 'updating confidentiality' do
+ let(:issuables) { create_list(:issue, 2, project: project) }
+
+ it_behaves_like 'updates confidentiality'
+
+ it_behaves_like 'not scheduling cached group count clear' do
+ let(:params) { { confidential: true } }
+ end
+ end
+
describe 'updating labels' do
let(:bug) { create(:label, project: project) }
let(:regression) { create(:label, project: project) }
@@ -390,6 +417,30 @@ RSpec.describe Issuable::BulkUpdateService, feature_category: :team_planning do
end
end
+ describe 'updating confidentiality' do
+ let_it_be(:project) { create(:project, :repository, group: group) }
+
+ before do
+ group.add_maintainer(user)
+ end
+
+ context 'with issues' do
+ let(:issuables) { create_list(:issue, 2, project: project) }
+
+ it_behaves_like 'updates confidentiality'
+ end
+
+ context 'with merge requests' do
+ let(:issuables) { [create(:merge_request, source_project: project, target_project: project)] }
+
+ it 'does not throw an error' do
+ result = bulk_update(issuables, confidential: true)
+
+ expect(result.success?).to be_truthy
+ end
+ end
+ end
+
describe 'updating labels' do
let(:project) { create(:project, :repository, group: group) }
let(:bug) { create(:group_label, group: group) }
diff --git a/spec/services/issues/import_csv_service_spec.rb b/spec/services/issues/import_csv_service_spec.rb
index 6a147782209..660686cf805 100644
--- a/spec/services/issues/import_csv_service_spec.rb
+++ b/spec/services/issues/import_csv_service_spec.rb
@@ -22,6 +22,8 @@ RSpec.describe Issues::ImportCsvService, feature_category: :team_planning do
describe '#execute' do
subject { service.execute }
+ it_behaves_like 'performs a spam check', true
+
it 'sets all issueable attributes and executes quick actions' do
project.add_developer(user)
project.add_developer(assignee)
@@ -38,5 +40,13 @@ RSpec.describe Issues::ImportCsvService, feature_category: :team_planning do
)
)
end
+
+ context 'when user is an admin' do
+ before do
+ allow(user).to receive(:can_admin_all_resources?).and_return(true)
+ end
+
+ it_behaves_like 'performs a spam check', false
+ end
end
end
diff --git a/spec/services/issues/update_service_spec.rb b/spec/services/issues/update_service_spec.rb
index a5151925c52..c677dc0315c 100644
--- a/spec/services/issues/update_service_spec.rb
+++ b/spec/services/issues/update_service_spec.rb
@@ -11,6 +11,7 @@ RSpec.describe Issues::UpdateService, :mailer, feature_category: :team_planning
let_it_be(:project, reload: true) { create(:project, :repository, group: group) }
let_it_be(:label) { create(:label, title: 'a', project: project) }
let_it_be(:label2) { create(:label, title: 'b', project: project) }
+ let_it_be(:label3) { create(:label, title: 'c', project: project) }
let_it_be(:milestone) { create(:milestone, project: project) }
let(:issue) do
@@ -992,75 +993,18 @@ RSpec.describe Issues::UpdateService, :mailer, feature_category: :team_planning
end
context 'updating labels' do
- let(:label3) { create(:label, project: project) }
- let(:result) { described_class.new(container: project, current_user: user, params: params).execute(issue).reload }
-
- context 'when add_label_ids and label_ids are passed' do
- let(:params) { { label_ids: [label.id], add_label_ids: [label3.id] } }
-
- before do
- issue.update!(labels: [label2])
- end
-
- it 'replaces the labels with the ones in label_ids and adds those in add_label_ids' do
- expect(result.label_ids).to contain_exactly(label.id, label3.id)
- end
- end
-
- context 'when remove_label_ids and label_ids are passed' do
- let(:params) { { label_ids: [label.id, label2.id, label3.id], remove_label_ids: [label.id] } }
-
- before do
- issue.update!(labels: [label, label3])
- end
-
- it 'replaces the labels with the ones in label_ids and removes those in remove_label_ids' do
- expect(result.label_ids).to contain_exactly(label2.id, label3.id)
- end
- end
-
- context 'when add_label_ids and remove_label_ids are passed' do
- let(:params) { { add_label_ids: [label3.id], remove_label_ids: [label.id] } }
-
- before do
- issue.update!(labels: [label])
- end
-
- it 'adds the passed labels' do
- expect(result.label_ids).to include(label3.id)
- end
-
- it 'removes the passed labels' do
- expect(result.label_ids).not_to include(label.id)
- end
- end
-
- context 'when same id is passed as add_label_ids and remove_label_ids' do
- let(:params) { { add_label_ids: [label.id], remove_label_ids: [label.id] } }
-
- context 'for a label assigned to an issue' do
- it 'removes the label' do
- issue.update!(labels: [label])
-
- expect(result.label_ids).to be_empty
- end
- end
-
- context 'for a label not assigned to an issue' do
- it 'does not add the label' do
- expect(result.label_ids).to be_empty
- end
- end
- end
+ let(:label_a) { label }
+ let(:label_b) { label2 }
+ let(:label_c) { label3 }
+ let(:label_locked) { create(:label, title: 'locked', project: project, lock_on_merge: true) }
+ let(:issuable) { issue }
- context 'when duplicate label titles are given' do
- let(:params) do
- { labels: [label3.title, label3.title] }
- end
+ it_behaves_like 'updating issuable labels'
+ it_behaves_like 'keeps issuable labels sorted after update'
+ it_behaves_like 'broadcasting issuable labels updates'
- it 'assigns the label once' do
- expect(result.labels).to contain_exactly(label3)
- end
+ def update_issuable(update_params)
+ update_issue(update_params)
end
end
@@ -1513,19 +1457,6 @@ RSpec.describe Issues::UpdateService, :mailer, feature_category: :team_planning
end
end
- context 'labels are updated' do
- let(:label_a) { label }
- let(:label_b) { label2 }
- let(:issuable) { issue }
-
- it_behaves_like 'keeps issuable labels sorted after update'
- it_behaves_like 'broadcasting issuable labels updates'
-
- def update_issuable(update_params)
- update_issue(update_params)
- end
- end
-
it_behaves_like 'issuable record that supports quick actions' do
let(:existing_issue) { create(:issue, project: project) }
let(:issuable) { described_class.new(container: project, current_user: user, params: params).execute(existing_issue) }
diff --git a/spec/services/labels/available_labels_service_spec.rb b/spec/services/labels/available_labels_service_spec.rb
index 51314c2c226..c9f75283c75 100644
--- a/spec/services/labels/available_labels_service_spec.rb
+++ b/spec/services/labels/available_labels_service_spec.rb
@@ -2,15 +2,18 @@
require 'spec_helper'
RSpec.describe Labels::AvailableLabelsService, feature_category: :team_planning do
- let(:user) { create(:user) }
+ let_it_be(:user) { create(:user) }
let(:project) { create(:project, :public, group: group) }
let(:group) { create(:group) }
let(:project_label) { create(:label, project: project) }
+ let(:project_label_locked) { create(:label, project: project, lock_on_merge: true) }
let(:other_project_label) { create(:label) }
+ let(:other_project_label_locked) { create(:label, lock_on_merge: true) }
let(:group_label) { create(:group_label, group: group) }
+ let(:group_label_locked) { create(:group_label, group: group, lock_on_merge: true) }
let(:other_group_label) { create(:group_label) }
- let!(:labels) { [project_label, other_project_label, group_label, other_group_label] }
+ let!(:labels) { [project_label, other_project_label, group_label, other_group_label, project_label_locked, other_project_label_locked, group_label_locked] }
describe '#find_or_create_by_titles' do
let(:label_titles) { labels.map(&:title).push('non existing title') }
@@ -20,7 +23,7 @@ RSpec.describe Labels::AvailableLabelsService, feature_category: :team_planning
it 'returns only relevant label ids' do
result = described_class.new(user, project, labels: label_titles).find_or_create_by_titles
- expect(result).to match_array([project_label, group_label])
+ expect(result).to match_array([project_label, group_label, project_label_locked, group_label_locked])
end
end
@@ -32,7 +35,7 @@ RSpec.describe Labels::AvailableLabelsService, feature_category: :team_planning
it 'creates new labels for not found titles' do
result = described_class.new(user, project, labels: label_titles).find_or_create_by_titles
- expect(result.count).to eq(5)
+ expect(result.count).to eq(8)
expect(result).to include(project_label, group_label)
expect(result).not_to include(other_project_label, other_group_label)
end
@@ -53,7 +56,7 @@ RSpec.describe Labels::AvailableLabelsService, feature_category: :team_planning
it 'returns only relevant label ids' do
result = described_class.new(user, group, labels: label_titles).find_or_create_by_titles
- expect(result).to match_array([group_label])
+ expect(result).to match_array([group_label, group_label_locked])
end
end
@@ -65,9 +68,9 @@ RSpec.describe Labels::AvailableLabelsService, feature_category: :team_planning
it 'creates new labels for not found titles' do
result = described_class.new(user, group, labels: label_titles).find_or_create_by_titles
- expect(result.count).to eq(5)
- expect(result).to include(group_label)
- expect(result).not_to include(project_label, other_project_label, other_group_label)
+ expect(result.count).to eq(8)
+ expect(result).to include(group_label, group_label_locked)
+ expect(result).not_to include(project_label, other_project_label, other_group_label, project_label_locked, other_project_label_locked)
end
end
end
@@ -80,13 +83,13 @@ RSpec.describe Labels::AvailableLabelsService, feature_category: :team_planning
it 'returns only relevant label ids' do
result = described_class.new(user, project, ids: label_ids).filter_labels_ids_in_param(:ids)
- expect(result).to match_array([project_label.id, group_label.id])
+ expect(result).to match_array([project_label.id, group_label.id, project_label_locked.id, group_label_locked.id])
end
it 'returns labels in preserved order' do
result = described_class.new(user, project, ids: label_ids.reverse).filter_labels_ids_in_param(:ids)
- expect(result).to eq([group_label.id, project_label.id])
+ expect(result).to eq([group_label_locked.id, project_label_locked.id, group_label.id, project_label.id])
end
end
@@ -94,7 +97,7 @@ RSpec.describe Labels::AvailableLabelsService, feature_category: :team_planning
it 'returns only relevant label ids' do
result = described_class.new(user, group, ids: label_ids).filter_labels_ids_in_param(:ids)
- expect(result).to match_array([group_label.id])
+ expect(result).to match_array([group_label.id, group_label_locked.id])
end
end
@@ -105,14 +108,46 @@ RSpec.describe Labels::AvailableLabelsService, feature_category: :team_planning
end
end
+ describe '#filter_locked_labels_ids_in_param' do
+ let(:label_ids) { labels.map(&:id).push(non_existing_record_id) }
+
+ context 'when parent is a project' do
+ it 'returns only locked label ids' do
+ result = described_class.new(user, project, ids: label_ids).filter_locked_labels_ids_in_param(:ids)
+
+ expect(result).to match_array([project_label_locked.id, group_label_locked.id])
+ end
+
+ it 'returns labels in preserved order' do
+ result = described_class.new(user, project, ids: label_ids.reverse).filter_locked_labels_ids_in_param(:ids)
+
+ expect(result).to eq([group_label_locked.id, project_label_locked.id])
+ end
+ end
+
+ context 'when parent is a group' do
+ it 'returns only locked label ids' do
+ result = described_class.new(user, group, ids: label_ids).filter_locked_labels_ids_in_param(:ids)
+
+ expect(result).to match_array([group_label_locked.id])
+ end
+ end
+
+ it 'accepts a single id parameter' do
+ result = described_class.new(user, project, label_id: project_label_locked.id).filter_locked_labels_ids_in_param(:label_id)
+
+ expect(result).to match_array([project_label_locked.id])
+ end
+ end
+
describe '#available_labels' do
context 'when parent is a project' do
it 'returns only relevant labels' do
result = described_class.new(user, project, {}).available_labels
- expect(result.count).to eq(2)
- expect(result).to include(project_label, group_label)
- expect(result).not_to include(other_project_label, other_group_label)
+ expect(result.count).to eq(4)
+ expect(result).to include(project_label, group_label, project_label_locked, group_label_locked)
+ expect(result).not_to include(other_project_label, other_group_label, other_project_label_locked)
end
end
@@ -120,9 +155,9 @@ RSpec.describe Labels::AvailableLabelsService, feature_category: :team_planning
it 'returns only relevant labels' do
result = described_class.new(user, group, {}).available_labels
- expect(result.count).to eq(1)
- expect(result).to include(group_label)
- expect(result).not_to include(project_label, other_project_label, other_group_label)
+ expect(result.count).to eq(2)
+ expect(result).to include(group_label, group_label_locked)
+ expect(result).not_to include(project_label, other_project_label, other_group_label, project_label_locked, other_project_label_locked)
end
end
end
diff --git a/spec/services/labels/create_service_spec.rb b/spec/services/labels/create_service_spec.rb
index 9be611490cf..8dbe050990c 100644
--- a/spec/services/labels/create_service_spec.rb
+++ b/spec/services/labels/create_service_spec.rb
@@ -176,6 +176,42 @@ RSpec.describe Labels::CreateService, feature_category: :team_planning do
end
end
end
+
+ describe 'lock_on_merge' do
+ let_it_be(:params) { { title: 'Locked label', lock_on_merge: true } }
+
+ context 'when feature flag is disabled' do
+ before do
+ stub_feature_flags(enforce_locked_labels_on_merge: false)
+ end
+
+ it 'does not allow setting lock_on_merge' do
+ label = described_class.new(params).execute(project: project)
+ label2 = described_class.new(params).execute(group: group)
+ label3 = described_class.new(params).execute(template: true)
+
+ expect(label.lock_on_merge).to be_falsey
+ expect(label2.lock_on_merge).to be_falsey
+ expect(label3).not_to be_persisted
+ end
+ end
+
+ context 'when feature flag is enabled' do
+ it 'allows setting lock_on_merge' do
+ label = described_class.new(params).execute(project: project)
+ label2 = described_class.new(params).execute(group: group)
+
+ expect(label.lock_on_merge).to be_truthy
+ expect(label2.lock_on_merge).to be_truthy
+ end
+
+ it 'does not alow setting lock_on_merge for templates' do
+ label = described_class.new(params).execute(template: true)
+
+ expect(label).not_to be_persisted
+ end
+ end
+ end
end
def params_with(color)
diff --git a/spec/services/labels/update_service_spec.rb b/spec/services/labels/update_service_spec.rb
index b9ac5282d10..9a8868dac10 100644
--- a/spec/services/labels/update_service_spec.rb
+++ b/spec/services/labels/update_service_spec.rb
@@ -71,6 +71,42 @@ RSpec.describe Labels::UpdateService, feature_category: :team_planning do
expect(label).not_to be_valid
end
end
+
+ describe 'lock_on_merge' do
+ let_it_be(:params) { { lock_on_merge: true } }
+
+ context 'when feature flag is disabled' do
+ before do
+ stub_feature_flags(enforce_locked_labels_on_merge: false)
+ end
+
+ it 'does not allow setting lock_on_merge' do
+ label = described_class.new(params).execute(@label)
+
+ expect(label.reload.lock_on_merge).to be_falsey
+
+ template_label = Labels::CreateService.new(title: 'Initial').execute(template: true)
+ label = described_class.new(params).execute(template_label)
+
+ expect(label.reload.lock_on_merge).to be_falsey
+ end
+ end
+
+ context 'when feature flag is enabled' do
+ it 'allows setting lock_on_merge' do
+ label = described_class.new(params).execute(@label)
+
+ expect(label.reload.lock_on_merge).to be_truthy
+ end
+
+ it 'does not allow setting lock_on_merge for templates' do
+ template_label = Labels::CreateService.new(title: 'Initial').execute(template: true)
+ label = described_class.new(params).execute(template_label)
+
+ expect(label.reload.lock_on_merge).to be_falsey
+ end
+ end
+ end
end
def params_with(color)
diff --git a/spec/services/loose_foreign_keys/batch_cleaner_service_spec.rb b/spec/services/loose_foreign_keys/batch_cleaner_service_spec.rb
index 6eee83d5ee9..86f528d1ea7 100644
--- a/spec/services/loose_foreign_keys/batch_cleaner_service_spec.rb
+++ b/spec/services/loose_foreign_keys/batch_cleaner_service_spec.rb
@@ -50,7 +50,7 @@ RSpec.describe LooseForeignKeys::BatchCleanerService, feature_category: :databas
let(:parent_record_1) { loose_fk_parent_table.create! }
let(:other_parent_record) { loose_fk_parent_table.create! }
- before(:all) do
+ before_all do
create_table_structure
end
diff --git a/spec/services/loose_foreign_keys/process_deleted_records_service_spec.rb b/spec/services/loose_foreign_keys/process_deleted_records_service_spec.rb
index af010547cc9..b59339b24b4 100644
--- a/spec/services/loose_foreign_keys/process_deleted_records_service_spec.rb
+++ b/spec/services/loose_foreign_keys/process_deleted_records_service_spec.rb
@@ -71,7 +71,7 @@ RSpec.describe LooseForeignKeys::ProcessDeletedRecordsService, feature_category:
let(:loose_fk_child_table_1_2) { table(:_test_loose_fk_child_table_1_2) }
let(:loose_fk_child_table_2_1) { table(:_test_loose_fk_child_table_2_1) }
- before(:all) do
+ before_all do
create_table_structure
end
diff --git a/spec/services/members/create_service_spec.rb b/spec/services/members/create_service_spec.rb
index c9dee0aadda..96fa8ab278d 100644
--- a/spec/services/members/create_service_spec.rb
+++ b/spec/services/members/create_service_spec.rb
@@ -121,7 +121,12 @@ RSpec.describe Members::CreateService, :aggregate_failures, :clean_gitlab_redis_
source.group.add_developer(member)
end
- it 'triggers the members added event' do
+ it 'triggers the members added and authorizations changed events' do
+ expect(Gitlab::EventStore)
+ .to receive(:publish)
+ .with(an_instance_of(ProjectAuthorizations::AuthorizationsChangedEvent))
+ .and_call_original
+
expect(Gitlab::EventStore)
.to receive(:publish)
.with(an_instance_of(Members::MembersAddedEvent))
diff --git a/spec/services/members/import_project_team_service_spec.rb b/spec/services/members/import_project_team_service_spec.rb
index 7dcdb70f2cd..e0657cfa8cc 100644
--- a/spec/services/members/import_project_team_service_spec.rb
+++ b/spec/services/members/import_project_team_service_spec.rb
@@ -8,7 +8,10 @@ RSpec.describe Members::ImportProjectTeamService, feature_category: :groups_and_
let_it_be(:target_project) { create(:project) }
let_it_be(:user) { create(:user) }
- subject { described_class.new(user, { id: target_project_id, project_id: source_project_id }) }
+ let(:source_project_id) { source_project.id }
+ let(:target_project_id) { target_project.id }
+
+ subject(:import) { described_class.new(user, { id: target_project_id, project_id: source_project_id }) }
before_all do
source_project.add_guest(user)
@@ -16,74 +19,110 @@ RSpec.describe Members::ImportProjectTeamService, feature_category: :groups_and_
end
context 'when project team members are imported successfully' do
- let(:source_project_id) { source_project.id }
- let(:target_project_id) { target_project.id }
+ it 'returns a successful response' do
+ result = import.execute
- it 'returns true' do
- expect(subject.execute).to be(true)
+ expect(result).to be_a(ServiceResponse)
+ expect(result.success?).to be(true)
+ expect(result.message).to eq('Successfully imported')
end
end
context 'when the project team import fails' do
context 'when the target project cannot be found' do
- let(:source_project_id) { source_project.id }
let(:target_project_id) { non_existing_record_id }
- it 'returns false' do
- expect(subject.execute).to be(false)
+ it 'returns unsuccessful response' do
+ result = import.execute
+
+ expect(result).to be_a(ServiceResponse)
+ expect(result.error?).to be(true)
+ expect(result.message).to eq('Target project does not exist')
+ expect(result.reason).to eq(:unprocessable_entity)
end
end
context 'when the source project cannot be found' do
let(:source_project_id) { non_existing_record_id }
- let(:target_project_id) { target_project.id }
- it 'returns false' do
- expect(subject.execute).to be(false)
+ it 'returns unsuccessful response' do
+ result = import.execute
+
+ expect(result).to be_a(ServiceResponse)
+ expect(result.error?).to be(true)
+ expect(result.message).to eq('Source project does not exist')
+ expect(result.reason).to eq(:unprocessable_entity)
end
end
context 'when the user doing the import does not exist' do
let(:user) { nil }
- let(:source_project_id) { source_project.id }
- let(:target_project_id) { target_project.id }
- it 'returns false' do
- expect(subject.execute).to be(false)
+ it 'returns unsuccessful response' do
+ result = import.execute
+
+ expect(result).to be_a(ServiceResponse)
+ expect(result.error?).to be(true)
+ expect(result.message).to eq('Forbidden')
+ expect(result.reason).to eq(:unprocessable_entity)
end
end
context 'when the user does not have permission to read the source project members' do
- let(:user) { create(:user) }
let(:source_project_id) { create(:project, :private).id }
- let(:target_project_id) { target_project.id }
- it 'returns false' do
- expect(subject.execute).to be(false)
+ it 'returns unsuccessful response' do
+ result = import.execute
+
+ expect(result).to be_a(ServiceResponse)
+ expect(result.error?).to be(true)
+ expect(result.message).to eq('Forbidden')
+ expect(result.reason).to eq(:unprocessable_entity)
end
end
context 'when the user does not have permission to admin the target project' do
- let(:source_project_id) { source_project.id }
let(:target_project_id) { create(:project).id }
- it 'returns false' do
- expect(subject.execute).to be(false)
+ it 'returns unsuccessful response' do
+ result = import.execute
+
+ expect(result).to be_a(ServiceResponse)
+ expect(result.error?).to be(true)
+ expect(result.message).to eq('Forbidden')
+ expect(result.reason).to eq(:unprocessable_entity)
end
end
context 'when the source and target project are valid but the ProjectTeam#import command fails' do
- let(:source_project_id) { source_project.id }
- let(:target_project_id) { target_project.id }
-
before do
allow_next_instance_of(ProjectTeam) do |project_team|
allow(project_team).to receive(:import).and_return(false)
end
end
- it 'returns false' do
- expect(subject.execute).to be(false)
+ it 'returns unsuccessful response' do
+ result = import.execute
+
+ expect(result).to be_a(ServiceResponse)
+ expect(result.error?).to be(true)
+ expect(result.message).to eq('Import failed')
+ expect(result.reason).to eq(:unprocessable_entity)
+ end
+ end
+
+ context 'when one of the imported project members is invalid' do
+ it 'returns unsuccessful response' do
+ project_bot = create(:user, :project_bot)
+ source_project.add_developer(project_bot)
+
+ result = import.execute
+
+ expect(result).to be_a(ServiceResponse)
+ expect(result.error?).to be(true)
+ message = { project_bot.username => 'User project bots cannot be added to other groups / projects' }
+ expect(result.message).to eq(message)
+ expect(result.payload[:total_members_count]).to eq(2)
end
end
end
diff --git a/spec/services/members/update_service_spec.rb b/spec/services/members/update_service_spec.rb
index 1c4b1abcfdb..3860543a85e 100644
--- a/spec/services/members/update_service_spec.rb
+++ b/spec/services/members/update_service_spec.rb
@@ -219,6 +219,25 @@ RSpec.describe Members::UpdateService, feature_category: :groups_and_projects do
end
end
end
+
+ context 'when project members expiration date is updated with expiry_notified_at' do
+ let_it_be(:params) { { expires_at: 20.days.from_now } }
+
+ before do
+ group_project.group.add_owner(current_user)
+ members.each do |member|
+ member.update!(expiry_notified_at: Date.today)
+ end
+ end
+
+ it "clear expiry_notified_at" do
+ subject
+
+ members.each do |member|
+ expect(member.reload.expiry_notified_at).to be_nil
+ end
+ end
+ end
end
shared_examples 'updating a group' do
@@ -250,6 +269,24 @@ RSpec.describe Members::UpdateService, feature_category: :groups_and_projects do
subject
end
end
+
+ context 'when group members expiration date is updated with expiry_notified_at' do
+ let_it_be(:params) { { expires_at: 20.days.from_now } }
+
+ before do
+ members.each do |member|
+ member.update!(expiry_notified_at: Date.today)
+ end
+ end
+
+ it "clear expiry_notified_at" do
+ subject
+
+ members.each do |member|
+ expect(member.reload.expiry_notified_at).to be_nil
+ end
+ end
+ end
end
subject { update_service.execute(members, permission: permission) }
diff --git a/spec/services/merge_requests/cleanup_refs_service_spec.rb b/spec/services/merge_requests/cleanup_refs_service_spec.rb
index efb6265e3d8..c818c60ad5f 100644
--- a/spec/services/merge_requests/cleanup_refs_service_spec.rb
+++ b/spec/services/merge_requests/cleanup_refs_service_spec.rb
@@ -18,6 +18,7 @@ RSpec.describe MergeRequests::CleanupRefsService, feature_category: :code_review
describe '#execute' do
before do
+ stub_feature_flags(merge_request_delete_gitaly_refs_in_batches: false)
stub_feature_flags(merge_request_cleanup_ref_worker_async: false)
# Need to re-enable this as it's being stubbed in spec_helper for
diff --git a/spec/services/merge_requests/create_ref_service_spec.rb b/spec/services/merge_requests/create_ref_service_spec.rb
new file mode 100644
index 00000000000..85ac651c1fa
--- /dev/null
+++ b/spec/services/merge_requests/create_ref_service_spec.rb
@@ -0,0 +1,176 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe MergeRequests::CreateRefService, feature_category: :merge_trains do
+ using RSpec::Parameterized::TableSyntax
+
+ describe '#execute' do
+ let_it_be(:project) { create(:project, :empty_repo) }
+ let_it_be(:user) { project.creator }
+ let_it_be(:first_parent_ref) { project.default_branch_or_main }
+ let_it_be(:source_branch) { 'branch' }
+ let(:target_ref) { "refs/merge-requests/#{merge_request.iid}/train" }
+ let(:source_sha) { project.commit(source_branch).sha }
+ let(:squash) { false }
+
+ let(:merge_request) do
+ create(
+ :merge_request,
+ title: 'Merge request ref test',
+ author: user,
+ source_project: project,
+ target_project: project,
+ source_branch: source_branch,
+ target_branch: first_parent_ref,
+ squash: squash
+ )
+ end
+
+ subject(:result) do
+ described_class.new(
+ current_user: user,
+ merge_request: merge_request,
+ target_ref: target_ref,
+ source_sha: source_sha,
+ first_parent_ref: first_parent_ref
+ ).execute
+ end
+
+ context 'when there is a user-caused gitaly error' do
+ let(:source_sha) { '123' }
+
+ it 'returns an error response' do
+ expect(result[:status]).to eq :error
+ end
+ end
+
+ context 'with valid inputs' do
+ before_all do
+ # ensure first_parent_ref is created before source_sha
+ project.repository.create_file(
+ user,
+ 'README.md',
+ '',
+ message: 'Base parent commit 1',
+ branch_name: first_parent_ref
+ )
+ project.repository.create_branch(source_branch, first_parent_ref)
+
+ # create two commits source_branch to test squashing
+ project.repository.create_file(
+ user,
+ '.gitlab-ci.yml',
+ '',
+ message: 'Feature branch commit 1',
+ branch_name: source_branch
+ )
+
+ project.repository.create_file(
+ user,
+ '.gitignore',
+ '',
+ message: 'Feature branch commit 2',
+ branch_name: source_branch
+ )
+
+ # create an extra commit not present on source_branch
+ project.repository.create_file(
+ user,
+ 'EXTRA',
+ '',
+ message: 'Base parent commit 2',
+ branch_name: first_parent_ref
+ )
+ end
+
+ it 'writes the merged result into target_ref', :aggregate_failures do
+ expect(result[:status]).to eq :success
+ expect(result[:commit_sha]).to eq(project.repository.commit(target_ref).sha)
+ expect(result[:source_sha]).to eq(project.repository.commit(target_ref).parents[1].sha)
+ expect(result[:target_sha]).to eq(project.repository.commit(first_parent_ref).sha)
+ expect(project.repository.commits(target_ref, limit: 10, order: 'topo').map(&:message)).to(
+ match(
+ [
+ a_string_matching(/Merge branch '#{source_branch}' into '#{first_parent_ref}'/),
+ 'Feature branch commit 2',
+ 'Feature branch commit 1',
+ 'Base parent commit 2',
+ 'Base parent commit 1'
+ ]
+ )
+ )
+ end
+
+ context 'when squash is requested' do
+ let(:squash) { true }
+
+ it 'writes the squashed result', :aggregate_failures do
+ expect(result[:status]).to eq :success
+ expect(result[:commit_sha]).to eq(project.repository.commit(target_ref).sha)
+ expect(result[:source_sha]).to eq(project.repository.commit(target_ref).parents[1].sha)
+ expect(result[:target_sha]).to eq(project.repository.commit(first_parent_ref).sha)
+ expect(project.repository.commits(target_ref, limit: 10, order: 'topo').map(&:message)).to(
+ match(
+ [
+ a_string_matching(/Merge branch '#{source_branch}' into '#{first_parent_ref}'/),
+ "#{merge_request.title}\n",
+ 'Base parent commit 2',
+ 'Base parent commit 1'
+ ]
+ )
+ )
+ end
+ end
+
+ context 'when semi-linear merges are enabled' do
+ before do
+ project.merge_method = :rebase_merge
+ project.save!
+ end
+
+ it 'writes the semi-linear merged result', :aggregate_failures do
+ expect(result[:status]).to eq :success
+ expect(result[:commit_sha]).to eq(project.repository.commit(target_ref).sha)
+ expect(result[:source_sha]).to eq(project.repository.commit(target_ref).parents[1].sha)
+ expect(result[:target_sha]).to eq(project.repository.commit(first_parent_ref).sha)
+ expect(project.repository.commits(target_ref, limit: 10, order: 'topo').map(&:message)).to(
+ match(
+ [
+ a_string_matching(/Merge branch '#{source_branch}' into '#{first_parent_ref}'/),
+ 'Feature branch commit 2',
+ 'Feature branch commit 1',
+ 'Base parent commit 2',
+ 'Base parent commit 1'
+ ]
+ )
+ )
+ end
+ end
+
+ context 'when fast-forward merges are enabled' do
+ before do
+ project.merge_method = :ff
+ project.save!
+ end
+
+ it 'writes the rebased merged result', :aggregate_failures do
+ expect(result[:status]).to eq :success
+ expect(result[:commit_sha]).to eq(project.repository.commit(target_ref).sha)
+ expect(result[:source_sha]).to eq(project.repository.commit(target_ref).sha)
+ expect(result[:target_sha]).to eq(project.repository.commit(first_parent_ref).sha)
+ expect(project.repository.commits(target_ref, limit: 10, order: 'topo').map(&:message)).to(
+ eq(
+ [
+ 'Feature branch commit 2',
+ 'Feature branch commit 1',
+ 'Base parent commit 2',
+ 'Base parent commit 1'
+ ]
+ )
+ )
+ end
+ end
+ end
+ end
+end
diff --git a/spec/services/merge_requests/ff_merge_service_spec.rb b/spec/services/merge_requests/ff_merge_service_spec.rb
index f2dbc02f12c..c48ed19e40d 100644
--- a/spec/services/merge_requests/ff_merge_service_spec.rb
+++ b/spec/services/merge_requests/ff_merge_service_spec.rb
@@ -19,6 +19,7 @@ RSpec.describe MergeRequests::FfMergeService, feature_category: :code_review_wor
let(:valid_merge_params) { { sha: merge_request.diff_head_sha } }
before do
+ stub_feature_flags(refactor_merge_service: false)
project.add_maintainer(user)
project.add_developer(user2)
end
diff --git a/spec/services/merge_requests/merge_orchestration_service_spec.rb b/spec/services/merge_requests/merge_orchestration_service_spec.rb
index b9bf936eddd..7e6eeec2a2d 100644
--- a/spec/services/merge_requests/merge_orchestration_service_spec.rb
+++ b/spec/services/merge_requests/merge_orchestration_service_spec.rb
@@ -35,6 +35,8 @@ RSpec.describe MergeRequests::MergeOrchestrationService, feature_category: :code
before do
create(:ci_pipeline, :detached_merge_request_pipeline, project: project, merge_request: merge_request)
merge_request.update_head_pipeline
+
+ stub_licensed_features(merge_request_approvers: true) if Gitlab.ee?
end
it 'schedules auto merge' do
@@ -116,6 +118,8 @@ RSpec.describe MergeRequests::MergeOrchestrationService, feature_category: :code
before do
create(:ci_pipeline, :detached_merge_request_pipeline, project: project, merge_request: merge_request)
merge_request.update_head_pipeline
+
+ stub_licensed_features(merge_request_approvers: true) if Gitlab.ee?
end
it 'fetches preferred auto merge strategy', if: Gitlab.ee? do
diff --git a/spec/services/merge_requests/merge_service_spec.rb b/spec/services/merge_requests/merge_service_spec.rb
index c77cf288f56..1faa1fd3644 100644
--- a/spec/services/merge_requests/merge_service_spec.rb
+++ b/spec/services/merge_requests/merge_service_spec.rb
@@ -8,67 +8,62 @@ RSpec.describe MergeRequests::MergeService, feature_category: :code_review_workf
let_it_be(:user) { create(:user) }
let_it_be(:user2) { create(:user) }
- let(:merge_request) { create(:merge_request, :simple, author: user2, assignees: [user2]) }
- let(:project) { merge_request.project }
+ where(:ff_refactor_merge_service_enabled) { [true, false] }
- before do
- project.add_maintainer(user)
- project.add_developer(user2)
- end
-
- describe '#execute' do
- let(:service) { described_class.new(project: project, current_user: user, params: merge_params) }
- let(:merge_params) do
- { commit_message: 'Awesome message', sha: merge_request.diff_head_sha }
- end
+ with_them do
+ let(:merge_request) { create(:merge_request, :simple, author: user2, assignees: [user2]) }
+ let(:project) { merge_request.project }
- let(:lease_key) { "merge_requests_merge_service:#{merge_request.id}" }
- let!(:lease) { stub_exclusive_lease(lease_key) }
+ before do
+ stub_feature_flags(refactor_merge_service: ff_refactor_merge_service_enabled)
- context 'valid params' do
- before do
- allow(service).to receive(:execute_hooks)
- expect(merge_request).to receive(:update_and_mark_in_progress_merge_commit_sha).twice.and_call_original
+ project.add_maintainer(user)
+ project.add_developer(user2)
+ end
- perform_enqueued_jobs do
- service.execute(merge_request)
- end
+ describe '#execute' do
+ let(:service) { described_class.new(project: project, current_user: user, params: merge_params) }
+ let(:merge_params) do
+ { commit_message: 'Awesome message', sha: merge_request.diff_head_sha }
end
- it { expect(merge_request).to be_valid }
- it { expect(merge_request).to be_merged }
+ let(:lease_key) { "merge_requests_merge_service:#{merge_request.id}" }
+ let!(:lease) { stub_exclusive_lease(lease_key) }
- it 'persists merge_commit_sha and nullifies in_progress_merge_commit_sha' do
- expect(merge_request.merge_commit_sha).not_to be_nil
- expect(merge_request.in_progress_merge_commit_sha).to be_nil
- end
+ shared_examples 'with valid params' do
+ before do
+ allow(service).to receive(:execute_hooks)
+ expect(merge_request).to receive(:update_and_mark_in_progress_merge_commit_sha).twice.and_call_original
- it 'does not update squash_commit_sha if it is not a squash' do
- expect(merge_request.squash_commit_sha).to be_nil
- end
+ perform_enqueued_jobs do
+ service.execute(merge_request)
+ end
+ end
- it 'sends email to user2 about merge of new merge_request' do
- email = ActionMailer::Base.deliveries.last
- expect(email.to.first).to eq(user2.email)
- expect(email.subject).to include(merge_request.title)
- end
+ it { expect(merge_request).to be_valid }
+ it { expect(merge_request).to be_merged }
- context 'note creation' do
- it 'creates resource state event about merge_request merge' do
- event = merge_request.resource_state_events.last
- expect(event.state).to eq('merged')
+ it 'does not update squash_commit_sha if it is not a squash' do
+ expect(merge_request.squash_commit_sha).to be_nil
end
- end
- context 'when squashing' do
- let(:merge_params) do
- { commit_message: 'Merge commit message',
- squash_commit_message: 'Squash commit message',
- sha: merge_request.diff_head_sha }
+ it 'sends email to user2 about merge of new merge_request' do
+ email = ActionMailer::Base.deliveries.last
+ expect(email.to.first).to eq(user2.email)
+ expect(email.subject).to include(merge_request.title)
end
+ context 'note creation' do
+ it 'creates resource state event about merge_request merge' do
+ event = merge_request.resource_state_events.last
+ expect(event.state).to eq('merged')
+ end
+ end
+ end
+
+ shared_examples 'squashing' do
+ # A merge request with 5 commits
let(:merge_request) do
- # A merge request with 5 commits
create(
:merge_request,
:simple,
@@ -80,6 +75,21 @@ RSpec.describe MergeRequests::MergeService, feature_category: :code_review_workf
)
end
+ let(:merge_params) do
+ { commit_message: 'Merge commit message',
+ squash_commit_message: 'Squash commit message',
+ sha: merge_request.diff_head_sha }
+ end
+
+ before do
+ allow(service).to receive(:execute_hooks)
+ expect(merge_request).to receive(:update_and_mark_in_progress_merge_commit_sha).twice.and_call_original
+
+ perform_enqueued_jobs do
+ service.execute(merge_request)
+ end
+ end
+
it 'merges the merge request with squashed commits' do
expect(merge_request).to be_merged
@@ -96,357 +106,339 @@ RSpec.describe MergeRequests::MergeService, feature_category: :code_review_workf
expect(merge_request.squash_commit_sha).to eq(squash_commit.id)
end
end
- end
- context 'running the service once' do
- let(:ref) { merge_request.to_reference(full: true) }
- let(:jid) { SecureRandom.hex }
-
- let(:messages) do
- [
- /#{ref} - Git merge started on JID #{jid}/,
- /#{ref} - Git merge finished on JID #{jid}/,
- /#{ref} - Post merge started on JID #{jid}/,
- /#{ref} - Post merge finished on JID #{jid}/,
- /#{ref} - Merge process finished on JID #{jid}/
- ]
- end
-
- before do
- merge_request.update!(merge_jid: jid)
- ::Gitlab::ApplicationContext.push(caller_id: 'MergeWorker')
- end
-
- it 'logs status messages' do
- allow(Gitlab::AppLogger).to receive(:info).and_call_original
+ context 'when merge strategy is merge commit' do
+ it 'persists merge_commit_sha and nullifies in_progress_merge_commit_sha' do
+ service.execute(merge_request)
- messages.each do |message|
- expect(Gitlab::AppLogger).to receive(:info).with(
- hash_including(
- 'meta.caller_id' => 'MergeWorker',
- message: message,
- merge_request_info: ref
- )
- ).and_call_original
+ expect(merge_request.merge_commit_sha).not_to be_nil
+ expect(merge_request.in_progress_merge_commit_sha).to be_nil
end
- service.execute(merge_request)
- end
- end
-
- context 'running the service multiple time' do
- it 'is idempotent' do
- 2.times { service.execute(merge_request) }
-
- expect(merge_request.merge_error).to be_falsey
- expect(merge_request).to be_valid
- expect(merge_request).to be_merged
-
- commit_messages = project.repository.commits('master', limit: 2).map(&:message)
- expect(commit_messages.uniq.size).to eq(2)
- expect(merge_request.in_progress_merge_commit_sha).to be_nil
- end
- end
-
- context 'when an invalid sha is passed' do
- let(:merge_request) do
- create(
- :merge_request,
- :simple,
- author: user2,
- assignees: [user2],
- squash: true,
- source_branch: 'improve/awesome',
- target_branch: 'fix'
- )
- end
+ it_behaves_like 'with valid params'
- let(:merge_params) do
- { sha: merge_request.commits.second.sha }
+ it_behaves_like 'squashing'
end
- it 'does not merge the MR' do
- service.execute(merge_request)
+ context 'when merge strategy is fast forward' do
+ before do
+ project.update!(merge_requests_ff_only_enabled: true)
+ end
- expect(merge_request).not_to be_merged
- expect(merge_request.merge_error).to match(/Branch has been updated/)
- end
- end
+ let(:merge_request) do
+ create(
+ :merge_request,
+ source_branch: 'flatten-dir',
+ target_branch: 'improve/awesome',
+ assignees: [user2],
+ author: create(:user)
+ )
+ end
- context 'when the `sha` param is missing' do
- let(:merge_params) { {} }
+ it 'does not create merge_commit_sha and nullifies in_progress_merge_commit_sha' do
+ service.execute(merge_request)
- it 'returns the error' do
- merge_error = 'Branch has been updated since the merge was requested. '\
- 'Please review the changes.'
+ expect(merge_request.merge_commit_sha).to be_nil
+ expect(merge_request.in_progress_merge_commit_sha).to be_nil
+ end
- expect { service.execute(merge_request) }
- .to change { merge_request.merge_error }
- .from(nil).to(merge_error)
- end
- end
+ it_behaves_like 'with valid params'
- context 'closes related issues' do
- before do
- allow(project).to receive(:default_branch).and_return(merge_request.target_branch)
- end
+ it 'updates squash_commit_sha if it is a squash' do
+ expect(merge_request).to receive(:update_and_mark_in_progress_merge_commit_sha).twice.and_call_original
- it 'closes GitLab issue tracker issues', :sidekiq_inline do
- issue = create :issue, project: project
- commit = double('commit', safe_message: "Fixes #{issue.to_reference}", date: Time.current, authored_date: Time.current)
- allow(merge_request).to receive(:commits).and_return([commit])
- merge_request.cache_merge_request_closes_issues!
+ merge_request.update!(squash: true)
- service.execute(merge_request)
+ expect { service.execute(merge_request) }
+ .to change { merge_request.squash_commit_sha }
+ .from(nil)
- expect(issue.reload.closed?).to be_truthy
+ expect(merge_request.merge_commit_sha).to be_nil
+ expect(merge_request.in_progress_merge_commit_sha).to be_nil
+ end
end
- context 'with Jira integration' do
- include JiraIntegrationHelpers
+ context 'running the service once' do
+ let(:ref) { merge_request.to_reference(full: true) }
+ let(:jid) { SecureRandom.hex }
- let(:jira_tracker) { project.create_jira_integration }
- let(:jira_issue) { ExternalIssue.new('JIRA-123', project) }
- let(:commit) { double('commit', safe_message: "Fixes #{jira_issue.to_reference}") }
+ let(:messages) do
+ [
+ /#{ref} - Git merge started on JID #{jid}/,
+ /#{ref} - Git merge finished on JID #{jid}/,
+ /#{ref} - Post merge started on JID #{jid}/,
+ /#{ref} - Post merge finished on JID #{jid}/,
+ /#{ref} - Merge process finished on JID #{jid}/
+ ]
+ end
before do
- stub_jira_integration_test
- project.update!(has_external_issue_tracker: true)
- jira_integration_settings
- stub_jira_urls(jira_issue.id)
- allow(merge_request).to receive(:commits).and_return([commit])
+ merge_request.update!(merge_jid: jid)
+ ::Gitlab::ApplicationContext.push(caller_id: 'MergeWorker')
end
- it 'closes issues on Jira issue tracker' do
- jira_issue = ExternalIssue.new('JIRA-123', project)
- stub_jira_urls(jira_issue)
- commit = double('commit', safe_message: "Fixes #{jira_issue.to_reference}")
- allow(merge_request).to receive(:commits).and_return([commit])
+ it 'logs status messages' do
+ allow(Gitlab::AppLogger).to receive(:info).and_call_original
- expect_any_instance_of(Integrations::Jira).to receive(:close_issue).with(merge_request, jira_issue, user).once
+ messages.each do |message|
+ expect(Gitlab::AppLogger).to receive(:info).with(
+ hash_including(
+ 'meta.caller_id' => 'MergeWorker',
+ message: message,
+ merge_request_info: ref
+ )
+ ).and_call_original
+ end
service.execute(merge_request)
end
+ end
- context 'wrong issue markdown' do
- it 'does not close issues on Jira issue tracker' do
- jira_issue = ExternalIssue.new('#JIRA-123', project)
- stub_jira_urls(jira_issue)
- commit = double('commit', safe_message: "Fixes #{jira_issue.to_reference}")
- allow(merge_request).to receive(:commits).and_return([commit])
+ context 'running the service multiple time' do
+ it 'is idempotent' do
+ 2.times { service.execute(merge_request) }
- expect_any_instance_of(Integrations::Jira).not_to receive(:close_issue)
+ expect(merge_request.merge_error).to be_falsey
+ expect(merge_request).to be_valid
+ expect(merge_request).to be_merged
- service.execute(merge_request)
- end
+ commit_messages = project.repository.commits('master', limit: 2).map(&:message)
+ expect(commit_messages.uniq.size).to eq(2)
+ expect(merge_request.in_progress_merge_commit_sha).to be_nil
end
end
- end
- context 'closes related todos' do
- let(:merge_request) { create(:merge_request, assignees: [user], author: user) }
- let(:project) { merge_request.project }
-
- let!(:todo) do
- create(:todo, :assigned,
- project: project,
- author: user,
- user: user,
- target: merge_request)
- end
+ context 'when an invalid sha is passed' do
+ let(:merge_request) do
+ create(
+ :merge_request,
+ :simple,
+ author: user2,
+ assignees: [user2],
+ squash: true,
+ source_branch: 'improve/awesome',
+ target_branch: 'fix'
+ )
+ end
- before do
- allow(service).to receive(:execute_hooks)
+ let(:merge_params) do
+ { sha: merge_request.commits.second.sha }
+ end
- perform_enqueued_jobs do
+ it 'does not merge the MR' do
service.execute(merge_request)
- todo.reload
+
+ expect(merge_request).not_to be_merged
+ expect(merge_request.merge_error).to match(/Branch has been updated/)
end
end
- it { expect(todo).to be_done }
- end
+ context 'when the `sha` param is missing' do
+ let(:merge_params) { {} }
+
+ it 'returns the error' do
+ merge_error = 'Branch has been updated since the merge was requested. '\
+ 'Please review the changes.'
- context 'source branch removal' do
- context 'when the source branch is protected' do
- let(:service) do
- described_class.new(project: project, current_user: user, params: merge_params.merge('should_remove_source_branch' => true))
+ expect { service.execute(merge_request) }
+ .to change { merge_request.merge_error }
+ .from(nil).to(merge_error)
end
+ end
+ context 'closes related issues' do
before do
- create(:protected_branch, project: project, name: merge_request.source_branch)
+ allow(project).to receive(:default_branch).and_return(merge_request.target_branch)
end
- it 'does not delete the source branch' do
- expect(::Branches::DeleteService).not_to receive(:new)
+ it 'closes GitLab issue tracker issues', :sidekiq_inline do
+ issue = create :issue, project: project
+ commit = double('commit', safe_message: "Fixes #{issue.to_reference}", date: Time.current, authored_date: Time.current)
+ allow(merge_request).to receive(:commits).and_return([commit])
+ merge_request.cache_merge_request_closes_issues!
service.execute(merge_request)
- end
- end
- context 'when the source branch is the default branch' do
- let(:service) do
- described_class.new(project: project, current_user: user, params: merge_params.merge('should_remove_source_branch' => true))
+ expect(issue.reload.closed?).to be_truthy
end
- before do
- allow(project).to receive(:root_ref?).with(merge_request.source_branch).and_return(true)
- end
+ context 'with Jira integration' do
+ include JiraIntegrationHelpers
- it 'does not delete the source branch' do
- expect(::Branches::DeleteService).not_to receive(:new)
- service.execute(merge_request)
- end
- end
+ let(:jira_tracker) { project.create_jira_integration }
+ let(:jira_issue) { ExternalIssue.new('JIRA-123', project) }
+ let(:commit) { double('commit', safe_message: "Fixes #{jira_issue.to_reference}") }
- context 'when the source branch can be removed' do
- context 'when MR author set the source branch to be removed' do
before do
- merge_request.update_attribute(:merge_params, { 'force_remove_source_branch' => '1' })
+ stub_jira_integration_test
+ project.update!(has_external_issue_tracker: true)
+ jira_integration_settings
+ stub_jira_urls(jira_issue.id)
+ allow(merge_request).to receive(:commits).and_return([commit])
end
- # Not a real use case. When a merger merges a MR , merge param 'should_remove_source_branch' is defined
- it 'removes the source branch using the author user' do
- expect(::MergeRequests::DeleteSourceBranchWorker).to receive(:perform_async).with(merge_request.id, merge_request.source_branch_sha, merge_request.author.id)
+ it 'closes issues on Jira issue tracker' do
+ jira_issue = ExternalIssue.new('JIRA-123', project)
+ stub_jira_urls(jira_issue)
+ commit = double('commit', safe_message: "Fixes #{jira_issue.to_reference}")
+ allow(merge_request).to receive(:commits).and_return([commit])
+
+ expect_any_instance_of(Integrations::Jira).to receive(:close_issue).with(merge_request, jira_issue, user).once
service.execute(merge_request)
-
- expect(merge_request.reload.should_remove_source_branch?).to be nil
end
- context 'when the merger set the source branch not to be removed' do
- let(:service) { described_class.new(project: project, current_user: user, params: merge_params.merge('should_remove_source_branch' => false)) }
+ context 'wrong issue markdown' do
+ it 'does not close issues on Jira issue tracker' do
+ jira_issue = ExternalIssue.new('#JIRA-123', project)
+ stub_jira_urls(jira_issue)
+ commit = double('commit', safe_message: "Fixes #{jira_issue.to_reference}")
+ allow(merge_request).to receive(:commits).and_return([commit])
- it 'does not delete the source branch' do
- expect(::MergeRequests::DeleteSourceBranchWorker).not_to receive(:perform_async)
+ expect_any_instance_of(Integrations::Jira).not_to receive(:close_issue)
service.execute(merge_request)
-
- expect(merge_request.reload.should_remove_source_branch?).to be false
end
end
end
+ end
- context 'when MR merger set the source branch to be removed' do
- let(:service) do
- described_class.new(project: project, current_user: user, params: merge_params.merge('should_remove_source_branch' => true))
- end
+ context 'closes related todos' do
+ let(:merge_request) { create(:merge_request, assignees: [user], author: user) }
+ let(:project) { merge_request.project }
- it 'removes the source branch using the current user' do
- expect(::MergeRequests::DeleteSourceBranchWorker).to receive(:perform_async).with(merge_request.id, merge_request.source_branch_sha, user.id)
+ let!(:todo) do
+ create(:todo, :assigned,
+ project: project,
+ author: user,
+ user: user,
+ target: merge_request)
+ end
- service.execute(merge_request)
+ before do
+ allow(service).to receive(:execute_hooks)
- expect(merge_request.reload.should_remove_source_branch?).to be true
+ perform_enqueued_jobs do
+ service.execute(merge_request)
+ todo.reload
end
end
- end
- end
- context 'error handling' do
- before do
- allow(Gitlab::AppLogger).to receive(:error)
+ it { expect(todo).to be_done }
end
- context 'when source is missing' do
- it 'logs and saves error' do
- allow(merge_request).to receive(:diff_head_sha) { nil }
+ context 'source branch removal' do
+ context 'when the source branch is protected' do
+ let(:service) do
+ described_class.new(project: project, current_user: user, params: merge_params.merge('should_remove_source_branch' => true))
+ end
- error_message = 'No source for merge'
+ before do
+ create(:protected_branch, project: project, name: merge_request.source_branch)
+ end
- service.execute(merge_request)
+ it 'does not delete the source branch' do
+ expect(::Branches::DeleteService).not_to receive(:new)
- expect(merge_request.merge_error).to eq(error_message)
- expect(Gitlab::AppLogger).to have_received(:error).with(
- hash_including(
- merge_request_info: merge_request.to_reference(full: true),
- message: a_string_matching(error_message)
- )
- )
+ service.execute(merge_request)
+ end
end
- end
-
- it 'logs and saves error if there is an exception' do
- error_message = 'error message'
- allow(service).to receive(:repository).and_raise(error_message)
- allow(service).to receive(:execute_hooks)
+ context 'when the source branch is the default branch' do
+ let(:service) do
+ described_class.new(project: project, current_user: user, params: merge_params.merge('should_remove_source_branch' => true))
+ end
- service.execute(merge_request)
+ before do
+ allow(project).to receive(:root_ref?).with(merge_request.source_branch).and_return(true)
+ end
- expect(merge_request.merge_error).to eq(described_class::GENERIC_ERROR_MESSAGE)
- expect(Gitlab::AppLogger).to have_received(:error).with(
- hash_including(
- merge_request_info: merge_request.to_reference(full: true),
- message: a_string_matching(error_message)
- )
- )
- end
+ it 'does not delete the source branch' do
+ expect(::Branches::DeleteService).not_to receive(:new)
+ service.execute(merge_request)
+ end
+ end
- it 'logs and saves error if user is not authorized' do
- stub_exclusive_lease
+ context 'when the source branch can be removed' do
+ context 'when MR author set the source branch to be removed' do
+ before do
+ merge_request.update_attribute(:merge_params, { 'force_remove_source_branch' => '1' })
+ end
- unauthorized_user = create(:user)
- project.add_reporter(unauthorized_user)
+ # Not a real use case. When a merger merges a MR , merge param 'should_remove_source_branch' is defined
+ it 'removes the source branch using the author user' do
+ expect(::MergeRequests::DeleteSourceBranchWorker).to receive(:perform_async).with(merge_request.id, merge_request.source_branch_sha, merge_request.author.id)
- service = described_class.new(project: project, current_user: unauthorized_user)
+ service.execute(merge_request)
- service.execute(merge_request)
+ expect(merge_request.reload.should_remove_source_branch?).to be nil
+ end
- expect(merge_request.merge_error)
- .to eq('You are not allowed to merge this merge request')
- end
+ context 'when the merger set the source branch not to be removed' do
+ let(:service) { described_class.new(project: project, current_user: user, params: merge_params.merge('should_remove_source_branch' => false)) }
- it 'logs and saves error if there is an PreReceiveError exception' do
- error_message = 'error message'
+ it 'does not delete the source branch' do
+ expect(::MergeRequests::DeleteSourceBranchWorker).not_to receive(:perform_async)
- allow(service).to receive(:repository).and_raise(Gitlab::Git::PreReceiveError, "GitLab: #{error_message}")
- allow(service).to receive(:execute_hooks)
+ service.execute(merge_request)
- service.execute(merge_request)
+ expect(merge_request.reload.should_remove_source_branch?).to be false
+ end
+ end
+ end
- expect(merge_request.merge_error).to include('Something went wrong during merge pre-receive hook')
- expect(Gitlab::AppLogger).to have_received(:error).with(
- hash_including(
- merge_request_info: merge_request.to_reference(full: true),
- message: a_string_matching(error_message)
- )
- )
- end
+ context 'when MR merger set the source branch to be removed' do
+ let(:service) do
+ described_class.new(project: project, current_user: user, params: merge_params.merge('should_remove_source_branch' => true))
+ end
- it 'logs and saves error if commit is not created' do
- allow_any_instance_of(Repository).to receive(:merge).and_return(false)
- allow(service).to receive(:execute_hooks)
+ it 'removes the source branch using the current user' do
+ expect(::MergeRequests::DeleteSourceBranchWorker).to receive(:perform_async).with(merge_request.id, merge_request.source_branch_sha, user.id)
- service.execute(merge_request)
+ service.execute(merge_request)
- expect(merge_request).to be_open
- expect(merge_request.merge_commit_sha).to be_nil
- expect(merge_request.merge_error).to include(described_class::GENERIC_ERROR_MESSAGE)
- expect(Gitlab::AppLogger).to have_received(:error).with(
- hash_including(
- merge_request_info: merge_request.to_reference(full: true),
- message: a_string_matching(described_class::GENERIC_ERROR_MESSAGE)
- )
- )
+ expect(merge_request.reload.should_remove_source_branch?).to be true
+ end
+ end
+ end
end
- context 'when squashing is required' do
+ context 'error handling' do
before do
- merge_request.update!(source_branch: 'master', target_branch: 'feature')
- merge_request.target_project.project_setting.squash_always!
+ allow(Gitlab::AppLogger).to receive(:error)
end
- it 'raises an error if squashing is not done' do
- error_message = 'requires squashing commits'
+ context 'when source is missing' do
+ it 'logs and saves error' do
+ allow(merge_request).to receive(:diff_head_sha) { nil }
- service.execute(merge_request)
+ error_message = 'No source for merge'
- expect(merge_request).to be_open
+ service.execute(merge_request)
- expect(merge_request.merge_commit_sha).to be_nil
- expect(merge_request.squash_commit_sha).to be_nil
- expect(merge_request.merge_error).to include(error_message)
+ expect(merge_request.merge_error).to eq(error_message)
+ expect(Gitlab::AppLogger).to have_received(:error).with(
+ hash_including(
+ merge_request_info: merge_request.to_reference(full: true),
+ message: a_string_matching(error_message)
+ )
+ )
+ end
+ end
+
+ it 'logs and saves error if there is an exception' do
+ error_message = 'error message'
+
+ allow_next_instance_of(MergeRequests::MergeStrategies::FromSourceBranch) do |strategy|
+ allow(strategy).to receive(:execute_git_merge!).and_raise(error_message)
+ end
+ # we can remove these allows upon refactor_merge_service cleanup
+ allow(service).to receive(:repository).and_raise(error_message)
+ allow(service).to receive(:execute_hooks)
+
+ service.execute(merge_request)
+
+ expect(merge_request.merge_error).to eq(described_class::GENERIC_ERROR_MESSAGE)
expect(Gitlab::AppLogger).to have_received(:error).with(
hash_including(
merge_request_info: merge_request.to_reference(full: true),
@@ -454,25 +446,34 @@ RSpec.describe MergeRequests::MergeService, feature_category: :code_review_workf
)
)
end
- end
- context 'when squashing' do
- before do
- merge_request.update!(source_branch: 'master', target_branch: 'feature')
+ it 'logs and saves error if user is not authorized' do
+ stub_exclusive_lease
+
+ unauthorized_user = create(:user)
+ project.add_reporter(unauthorized_user)
+
+ service = described_class.new(project: project, current_user: unauthorized_user)
+
+ service.execute(merge_request)
+
+ expect(merge_request.merge_error)
+ .to eq('You are not allowed to merge this merge request')
end
- it 'logs and saves error if there is an error when squashing' do
- error_message = 'Squashing failed: Squash the commits locally, resolve any conflicts, then push the branch.'
+ it 'logs and saves error if there is an PreReceiveError exception' do
+ error_message = 'error message'
- allow_any_instance_of(MergeRequests::SquashService).to receive(:squash!).and_return(nil)
- merge_request.update!(squash: true)
+ allow_next_instance_of(MergeRequests::MergeStrategies::FromSourceBranch) do |strategy|
+ allow(strategy).to receive(:execute_git_merge!).and_raise(Gitlab::Git::PreReceiveError, "GitLab: #{error_message}")
+ end
+ # we can remove these allows upon refactor_merge_service cleanup
+ allow(service).to receive(:repository).and_raise(Gitlab::Git::PreReceiveError, "GitLab: #{error_message}")
+ allow(service).to receive(:execute_hooks)
service.execute(merge_request)
- expect(merge_request).to be_open
- expect(merge_request.merge_commit_sha).to be_nil
- expect(merge_request.squash_commit_sha).to be_nil
- expect(merge_request.merge_error).to include(error_message)
+ expect(merge_request.merge_error).to include('Something went wrong during merge pre-receive hook')
expect(Gitlab::AppLogger).to have_received(:error).with(
hash_including(
merge_request_info: merge_request.to_reference(full: true),
@@ -481,68 +482,65 @@ RSpec.describe MergeRequests::MergeService, feature_category: :code_review_workf
)
end
- it 'logs and saves error if there is an PreReceiveError exception' do
- error_message = 'error message'
-
- allow(service).to receive(:repository).and_raise(Gitlab::Git::PreReceiveError, "GitLab: #{error_message}")
+ it 'logs and saves error if commit is not created' do
+ allow_any_instance_of(Repository).to receive(:merge).and_return(false)
allow(service).to receive(:execute_hooks)
- merge_request.update!(squash: true)
service.execute(merge_request)
expect(merge_request).to be_open
expect(merge_request.merge_commit_sha).to be_nil
- expect(merge_request.squash_commit_sha).to be_nil
- expect(merge_request.merge_error).to include('Something went wrong during merge pre-receive hook')
+ expect(merge_request.merge_error).to include(described_class::GENERIC_ERROR_MESSAGE)
expect(Gitlab::AppLogger).to have_received(:error).with(
hash_including(
merge_request_info: merge_request.to_reference(full: true),
- message: a_string_matching(error_message)
+ message: a_string_matching(described_class::GENERIC_ERROR_MESSAGE)
)
)
end
- context 'when fast-forward merge is not allowed' do
+ context 'when squashing is required' do
before do
- allow_any_instance_of(Repository).to receive(:ancestor?).and_return(nil)
+ merge_request.update!(source_branch: 'master', target_branch: 'feature')
+ merge_request.target_project.project_setting.squash_always!
end
- %w(semi-linear ff).each do |merge_method|
- it "logs and saves error if merge is #{merge_method} only" do
- merge_method = 'rebase_merge' if merge_method == 'semi-linear'
- merge_request.project.update!(merge_method: merge_method)
- error_message = 'Only fast-forward merge is allowed for your project. Please update your source branch'
- allow(service).to receive(:execute_hooks)
- expect(lease).to receive(:cancel)
+ it 'raises an error if squashing is not done' do
+ error_message = 'requires squashing commits'
- service.execute(merge_request)
+ service.execute(merge_request)
- expect(merge_request).to be_open
- expect(merge_request.merge_commit_sha).to be_nil
- expect(merge_request.squash_commit_sha).to be_nil
- expect(merge_request.merge_error).to include(error_message)
- expect(Gitlab::AppLogger).to have_received(:error).with(
- hash_including(
- merge_request_info: merge_request.to_reference(full: true),
- message: a_string_matching(error_message)
- )
+ expect(merge_request).to be_open
+
+ expect(merge_request.merge_commit_sha).to be_nil
+ expect(merge_request.squash_commit_sha).to be_nil
+ expect(merge_request.merge_error).to include(error_message)
+ expect(Gitlab::AppLogger).to have_received(:error).with(
+ hash_including(
+ merge_request_info: merge_request.to_reference(full: true),
+ message: a_string_matching(error_message)
)
- end
+ )
end
end
- end
-
- context 'when not mergeable' do
- let!(:error_message) { 'Merge request is not mergeable' }
- context 'with failing CI' do
+ context 'when squashing' do
before do
- allow(merge_request).to receive(:mergeable_ci_state?) { false }
+ merge_request.update!(source_branch: 'master', target_branch: 'feature')
end
- it 'logs and saves error' do
+ it 'logs and saves error if there is an error when squashing' do
+ error_message = 'Squashing failed: Squash the commits locally, resolve any conflicts, then push the branch.'
+
+ allow_any_instance_of(MergeRequests::SquashService).to receive(:squash!).and_return(nil)
+ merge_request.update!(squash: true)
+
service.execute(merge_request)
+ expect(merge_request).to be_open
+ expect(merge_request.merge_commit_sha).to be_nil
+ expect(merge_request.squash_commit_sha).to be_nil
+ expect(merge_request.merge_error).to include(error_message)
expect(Gitlab::AppLogger).to have_received(:error).with(
hash_including(
merge_request_info: merge_request.to_reference(full: true),
@@ -550,16 +548,24 @@ RSpec.describe MergeRequests::MergeService, feature_category: :code_review_workf
)
)
end
- end
- context 'with unresolved discussions' do
- before do
- allow(merge_request).to receive(:mergeable_discussions_state?) { false }
- end
+ it 'logs and saves error if there is an PreReceiveError exception' do
+ error_message = 'error message'
+
+ allow_next_instance_of(MergeRequests::MergeStrategies::FromSourceBranch) do |strategy|
+ allow(strategy).to receive(:execute_git_merge!).and_raise(Gitlab::Git::PreReceiveError, "GitLab: #{error_message}")
+ end
+ # we can remove these allows upon refactor_merge_service cleanup
+ allow(service).to receive(:repository).and_raise(Gitlab::Git::PreReceiveError, "GitLab: #{error_message}")
+ allow(service).to receive(:execute_hooks)
+ merge_request.update!(squash: true)
- it 'logs and saves error' do
service.execute(merge_request)
+ expect(merge_request).to be_open
+ expect(merge_request.merge_commit_sha).to be_nil
+ expect(merge_request.squash_commit_sha).to be_nil
+ expect(merge_request.merge_error).to include('Something went wrong during merge pre-receive hook')
expect(Gitlab::AppLogger).to have_received(:error).with(
hash_including(
merge_request_info: merge_request.to_reference(full: true),
@@ -568,27 +574,102 @@ RSpec.describe MergeRequests::MergeService, feature_category: :code_review_workf
)
end
- context 'when passing `skip_discussions_check: true` as `options` parameter' do
- it 'merges the merge request' do
- service.execute(merge_request, skip_discussions_check: true)
+ context 'when fast-forward merge is not allowed' do
+ before do
+ allow_any_instance_of(Repository).to receive(:ancestor?).and_return(nil)
+ end
- expect(merge_request).to be_valid
- expect(merge_request).to be_merged
+ %w(semi-linear ff).each do |merge_method|
+ it "logs and saves error if merge is #{merge_method} only" do
+ merge_method = 'rebase_merge' if merge_method == 'semi-linear'
+ merge_request.project.update!(merge_method: merge_method)
+ error_message = 'Only fast-forward merge is allowed for your project. Please update your source branch'
+ allow(service).to receive(:execute_hooks)
+ expect(lease).to receive(:cancel)
+
+ service.execute(merge_request)
+
+ expect(merge_request).to be_open
+ expect(merge_request.merge_commit_sha).to be_nil
+ expect(merge_request.squash_commit_sha).to be_nil
+ expect(merge_request.merge_error).to include(error_message)
+ expect(Gitlab::AppLogger).to have_received(:error).with(
+ hash_including(
+ merge_request_info: merge_request.to_reference(full: true),
+ message: a_string_matching(error_message)
+ )
+ )
+ end
end
end
end
- end
- end
- context 'when the other sidekiq worker has already been running' do
- before do
- stub_exclusive_lease_taken(lease_key)
+ context 'when not mergeable' do
+ let!(:error_message) { 'Merge request is not mergeable' }
+
+ context 'with failing CI' do
+ before do
+ allow(merge_request).to receive(:mergeable_ci_state?) { false }
+ end
+
+ it 'logs and saves error' do
+ service.execute(merge_request)
+
+ expect(Gitlab::AppLogger).to have_received(:error).with(
+ hash_including(
+ merge_request_info: merge_request.to_reference(full: true),
+ message: a_string_matching(error_message)
+ )
+ )
+ end
+ end
+
+ context 'with unresolved discussions' do
+ before do
+ allow(merge_request).to receive(:mergeable_discussions_state?) { false }
+ end
+
+ it 'logs and saves error' do
+ service.execute(merge_request)
+
+ expect(Gitlab::AppLogger).to have_received(:error).with(
+ hash_including(
+ merge_request_info: merge_request.to_reference(full: true),
+ message: a_string_matching(error_message)
+ )
+ )
+ end
+
+ context 'when passing `skip_discussions_check: true` as `options` parameter' do
+ it 'merges the merge request' do
+ service.execute(merge_request, skip_discussions_check: true)
+
+ expect(merge_request).to be_valid
+ expect(merge_request).to be_merged
+ end
+ end
+ end
+ end
+
+ context 'when passing `check_mergeability_retry_lease: true` as `options` parameter' do
+ it 'call mergeable? with check_mergeability_retry_lease' do
+ expect(merge_request).to receive(:mergeable?).with(hash_including(check_mergeability_retry_lease: true)).and_call_original
+
+ service.execute(merge_request, check_mergeability_retry_lease: true)
+ end
+ end
end
- it 'does not execute service' do
- expect(service).not_to receive(:commit)
+ context 'when the other sidekiq worker has already been running' do
+ before do
+ stub_exclusive_lease_taken(lease_key)
+ end
+
+ it 'does not execute service' do
+ expect(service).not_to receive(:commit)
- service.execute(merge_request)
+ service.execute(merge_request)
+ end
end
end
end
diff --git a/spec/services/merge_requests/merge_strategies/from_source_branch_spec.rb b/spec/services/merge_requests/merge_strategies/from_source_branch_spec.rb
new file mode 100644
index 00000000000..20277fdeec7
--- /dev/null
+++ b/spec/services/merge_requests/merge_strategies/from_source_branch_spec.rb
@@ -0,0 +1,87 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe MergeRequests::MergeStrategies::FromSourceBranch, feature_category: :code_review_workflow do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:user2) { create(:user) }
+
+ let(:merge_request) { create(:merge_request, :simple, author: user2, assignees: [user2]) }
+ let(:project) { merge_request.project }
+
+ subject(:strategy) { described_class.new(merge_request, user) }
+
+ before do
+ project.add_maintainer(user)
+ end
+
+ describe '#validate!' do
+ context 'when source is missing' do
+ before do
+ allow(merge_request).to receive(:diff_head_sha).and_return(nil)
+ end
+
+ it 'raises source error when source is missing' do
+ error_message = 'No source for merge'
+
+ expect { strategy.validate! }
+ .to raise_exception(MergeRequests::MergeStrategies::StrategyError, error_message)
+ end
+ end
+
+ context 'when merge request should be rebased' do
+ before do
+ allow(merge_request).to receive(:should_be_rebased?).and_return(true)
+ end
+
+ it 'raises needs rebase error' do
+ error_message = 'Only fast-forward merge is allowed for your project. Please update your source branch'
+
+ expect { strategy.validate! }
+ .to raise_exception(MergeRequests::MergeStrategies::StrategyError, error_message)
+ end
+ end
+
+ context 'when merge request should be squashed but is not' do
+ before do
+ merge_request.target_project.project_setting.squash_always!
+ merge_request.update!(squash: false)
+ end
+
+ it 'raises squashing error' do
+ error_message = 'This project requires squashing commits when merge requests are accepted.'
+
+ expect { strategy.validate! }
+ .to raise_exception(MergeRequests::MergeStrategies::StrategyError, error_message)
+ end
+ end
+ end
+
+ describe '#execute_git_merge!' do
+ context 'when fast-forward is required' do
+ before do
+ project.merge_method = :ff
+ project.save!
+ end
+
+ it 'performs a fast-forward merge' do
+ expect(merge_request.target_project.repository).to receive(:ff_merge).and_return('1234')
+
+ strategy.execute_git_merge!
+ end
+ end
+
+ context 'when a merge commit is required' do
+ before do
+ project.merge_method = :merge
+ project.save!
+ end
+
+ it 'performs standard merge' do
+ expect(merge_request.target_project.repository).to receive(:merge).and_return('1234')
+
+ strategy.execute_git_merge!
+ end
+ end
+ end
+end
diff --git a/spec/services/merge_requests/merge_to_ref_service_spec.rb b/spec/services/merge_requests/merge_to_ref_service_spec.rb
index 4e951f1bc85..5105a275fba 100644
--- a/spec/services/merge_requests/merge_to_ref_service_spec.rb
+++ b/spec/services/merge_requests/merge_to_ref_service_spec.rb
@@ -36,29 +36,6 @@ RSpec.describe MergeRequests::MergeToRefService, feature_category: :code_review_
expect(repository.ref_exists?(target_ref)).to be(true)
expect(ref_head.id).to eq(result[:commit_id])
end
-
- context 'cache_merge_to_ref_calls parameter', :use_clean_rails_memory_store_caching do
- before do
- # warm the cache
- #
- service.execute(merge_request, true)
- end
-
- context 'when true' do
- it 'caches the response', :request_store do
- expect { 3.times { service.execute(merge_request, true) } }
- .not_to change(Gitlab::GitalyClient, :get_request_count)
- end
- end
-
- context 'when false' do
- it 'does not cache the response', :request_store do
- expect(Gitlab::GitalyClient).to receive(:call).at_least(3).times.and_call_original
-
- 3.times { service.execute(merge_request, false) }
- end
- end
- end
end
shared_examples_for 'successfully evaluates pre-condition checks' do
diff --git a/spec/services/merge_requests/squash_service_spec.rb b/spec/services/merge_requests/squash_service_spec.rb
index 1afca466fb5..ecbe2d7e097 100644
--- a/spec/services/merge_requests/squash_service_spec.rb
+++ b/spec/services/merge_requests/squash_service_spec.rb
@@ -3,16 +3,19 @@
require 'spec_helper'
RSpec.describe MergeRequests::SquashService, feature_category: :source_code_management do
- let(:service) { described_class.new(project: project, current_user: user, params: { merge_request: merge_request }) }
- let(:user) { project.first_owner }
- let(:project) { create(:project, :repository) }
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:user) { project.first_owner }
+
+ let(:service) { described_class.new(merge_request: merge_request, current_user: user, commit_message: commit_message) }
+ let(:commit_message) { nil }
let(:repository) { project.repository.raw }
let(:log_error) { "Failed to squash merge request #{merge_request.to_reference(full: true)}:" }
+
let(:squash_dir_path) do
File.join(Gitlab.config.shared.path, 'tmp/squash', repository.gl_repository, merge_request.id.to_s)
end
- let(:merge_request_with_one_commit) do
+ let_it_be(:merge_request_with_one_commit) do
create(
:merge_request,
source_branch: 'feature', source_project: project,
@@ -20,7 +23,7 @@ RSpec.describe MergeRequests::SquashService, feature_category: :source_code_mana
)
end
- let(:merge_request_with_only_new_files) do
+ let_it_be(:merge_request_with_only_new_files) do
create(
:merge_request,
source_branch: 'video', source_project: project,
@@ -28,7 +31,7 @@ RSpec.describe MergeRequests::SquashService, feature_category: :source_code_mana
)
end
- let(:merge_request_with_large_files) do
+ let_it_be(:merge_request_with_large_files) do
create(
:merge_request,
source_branch: 'squash-large-files', source_project: project,
@@ -66,7 +69,7 @@ RSpec.describe MergeRequests::SquashService, feature_category: :source_code_mana
end
context 'when squash message matches commit message' do
- let(:service) { described_class.new(project: project, current_user: user, params: { merge_request: merge_request, squash_commit_message: merge_request.first_commit.safe_message }) }
+ let(:commit_message) { merge_request.first_commit.safe_message }
it 'returns that commit SHA' do
result = service.execute
@@ -82,7 +85,7 @@ RSpec.describe MergeRequests::SquashService, feature_category: :source_code_mana
end
context 'when squash message matches commit message but without trailing new line' do
- let(:service) { described_class.new(project: project, current_user: user, params: { merge_request: merge_request, squash_commit_message: merge_request.first_commit.safe_message.strip }) }
+ let(:commit_message) { merge_request.first_commit.safe_message.strip }
it 'returns that commit SHA' do
result = service.execute
@@ -98,7 +101,7 @@ RSpec.describe MergeRequests::SquashService, feature_category: :source_code_mana
end
end
- context 'the squashed commit' do
+ describe 'the squashed commit' do
let(:squash_sha) { service.execute[:squash_sha] }
let(:squash_commit) { project.repository.commit(squash_sha) }
@@ -125,7 +128,7 @@ RSpec.describe MergeRequests::SquashService, feature_category: :source_code_mana
end
context 'if a message was provided' do
- let(:service) { described_class.new(project: project, current_user: user, params: { merge_request: merge_request, squash_commit_message: message }) }
+ let(:commit_message) { message }
let(:message) { 'My custom message' }
let(:squash_sha) { service.execute[:squash_sha] }
@@ -191,7 +194,7 @@ RSpec.describe MergeRequests::SquashService, feature_category: :source_code_mana
include_examples 'the squash succeeds'
end
- context 'git errors' do
+ describe 'git errors' do
let(:merge_request) { merge_request_with_only_new_files }
let(:error) { 'A test error' }
diff --git a/spec/services/merge_requests/update_service_spec.rb b/spec/services/merge_requests/update_service_spec.rb
index 79f608a4614..2f6db13a041 100644
--- a/spec/services/merge_requests/update_service_spec.rb
+++ b/spec/services/merge_requests/update_service_spec.rb
@@ -1300,14 +1300,45 @@ RSpec.describe MergeRequests::UpdateService, :mailer, feature_category: :code_re
let(:issuable) { described_class.new(project: project, current_user: user, params: params).execute(existing_merge_request) }
end
- context 'labels are updated' do
+ context 'updating labels' do
let(:label_a) { label }
let(:label_b) { create(:label, title: 'b', project: project) }
+ let(:label_c) { create(:label, title: 'c', project: project) }
+ let(:label_locked) { create(:label, title: 'locked', project: project, lock_on_merge: true) }
let(:issuable) { merge_request }
+ it_behaves_like 'updating issuable labels'
it_behaves_like 'keeps issuable labels sorted after update'
it_behaves_like 'broadcasting issuable labels updates'
+ context 'when merge request has been merged' do
+ context 'when remove_label_ids contains a locked label' do
+ let(:params) { { remove_label_ids: [label_locked.id] } }
+
+ context 'when feature flag is disabled' do
+ before do
+ stub_feature_flags(enforce_locked_labels_on_merge: false)
+ end
+
+ it 'removes locked labels' do
+ merge_request.update!(state: 'merged', labels: [label_a, label_locked])
+ update_issuable(params)
+
+ expect(merge_request.label_ids).to contain_exactly(label_a.id)
+ end
+ end
+
+ context 'when feature flag is enabled' do
+ it 'does not remove locked labels' do
+ merge_request.update!(state: 'merged', labels: [label_a, label_locked])
+ update_issuable(params)
+
+ expect(merge_request.label_ids).to contain_exactly(label_a.id, label_locked.id)
+ end
+ end
+ end
+ end
+
def update_issuable(update_params)
update_merge_request(update_params)
end
diff --git a/spec/services/metrics/dashboard/annotations/create_service_spec.rb b/spec/services/metrics/dashboard/annotations/create_service_spec.rb
deleted file mode 100644
index 2bcfa54ead7..00000000000
--- a/spec/services/metrics/dashboard/annotations/create_service_spec.rb
+++ /dev/null
@@ -1,161 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Metrics::Dashboard::Annotations::CreateService, feature_category: :metrics do
- let_it_be(:user) { create(:user) }
-
- let(:description) { 'test annotation' }
- let(:dashboard_path) { 'config/prometheus/common_metrics.yml' }
- let(:starting_at) { 15.minutes.ago }
- let(:ending_at) { nil }
- let(:service_instance) { described_class.new(user, annotation_params) }
- let(:annotation_params) do
- {
- environment: environment,
- cluster: cluster,
- description: description,
- dashboard_path: dashboard_path,
- starting_at: starting_at,
- ending_at: ending_at
- }
- end
-
- shared_examples 'executed annotation creation' do
- it 'returns success response', :aggregate_failures do
- annotation = instance_double(::Metrics::Dashboard::Annotation)
- allow(::Metrics::Dashboard::Annotation).to receive(:new).and_return(annotation)
- allow(annotation).to receive(:save).and_return(true)
-
- response = service_instance.execute
-
- expect(response[:status]).to be :success
- expect(response[:annotation]).to be annotation
- end
-
- it 'creates annotation', :aggregate_failures do
- annotation = instance_double(::Metrics::Dashboard::Annotation)
-
- expect(::Metrics::Dashboard::Annotation)
- .to receive(:new).with(annotation_params).and_return(annotation)
- expect(annotation).to receive(:save).and_return(true)
-
- service_instance.execute
- end
- end
-
- shared_examples 'prevented annotation creation' do |message|
- it 'returns error response', :aggregate_failures do
- response = service_instance.execute
-
- expect(response[:status]).to be :error
- expect(response[:message]).to eql message
- end
-
- it 'does not change db state' do
- expect(::Metrics::Dashboard::Annotation).not_to receive(:new)
-
- service_instance.execute
- end
- end
-
- shared_examples 'annotation creation failure' do
- it 'returns error response', :aggregate_failures do
- annotation = instance_double(::Metrics::Dashboard::Annotation)
-
- expect(annotation).to receive(:errors).and_return('Model validation error')
- expect(::Metrics::Dashboard::Annotation)
- .to receive(:new).with(annotation_params).and_return(annotation)
- expect(annotation).to receive(:save).and_return(false)
-
- response = service_instance.execute
-
- expect(response[:status]).to be :error
- expect(response[:message]).to eql 'Model validation error'
- end
- end
-
- describe '.execute' do
- context 'with environment' do
- let(:environment) { create(:environment) }
- let(:cluster) { nil }
-
- context 'with anonymous user' do
- it_behaves_like 'prevented annotation creation', 'You are not authorized to create annotation for selected environment'
- end
-
- context 'with maintainer user' do
- before do
- environment.project.add_maintainer(user)
- end
-
- it_behaves_like 'executed annotation creation'
- end
- end
-
- context 'with cluster' do
- let(:environment) { nil }
-
- context 'with anonymous user' do
- let(:cluster) { create(:cluster, :project) }
-
- it_behaves_like 'prevented annotation creation', 'You are not authorized to create annotation for selected cluster'
- end
-
- context 'with maintainer user' do
- let(:cluster) { create(:cluster, :project) }
-
- before do
- cluster.project.add_maintainer(user)
- end
-
- it_behaves_like 'executed annotation creation'
- end
-
- context 'with owner user' do
- let(:cluster) { create(:cluster, :group) }
-
- before do
- cluster.group.add_owner(user)
- end
-
- it_behaves_like 'executed annotation creation'
- end
- end
-
- context 'non cluster nor environment is supplied' do
- let(:environment) { nil }
- let(:cluster) { nil }
-
- it_behaves_like 'annotation creation failure'
- end
-
- context 'missing dashboard_path' do
- let(:cluster) { create(:cluster, :project) }
- let(:environment) { nil }
- let(:dashboard_path) { nil }
-
- context 'with maintainer user' do
- before do
- cluster.project.add_maintainer(user)
- end
-
- it_behaves_like 'annotation creation failure'
- end
- end
-
- context 'incorrect dashboard_path' do
- let(:cluster) { create(:cluster, :project) }
- let(:environment) { nil }
- let(:dashboard_path) { 'something_incorrect.yml' }
-
- context 'with maintainer user' do
- before do
- cluster.project.add_maintainer(user)
- end
-
- it_behaves_like 'prevented annotation creation', 'Dashboard with requested path can not be found'
- end
- end
- end
-end
diff --git a/spec/services/metrics/dashboard/annotations/delete_service_spec.rb b/spec/services/metrics/dashboard/annotations/delete_service_spec.rb
deleted file mode 100644
index 557d6d95767..00000000000
--- a/spec/services/metrics/dashboard/annotations/delete_service_spec.rb
+++ /dev/null
@@ -1,93 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Metrics::Dashboard::Annotations::DeleteService, feature_category: :metrics do
- let(:user) { create(:user) }
- let(:service_instance) { described_class.new(user, annotation) }
-
- shared_examples 'executed annotation deletion' do
- it 'returns success response', :aggregate_failures do
- expect(annotation).to receive(:destroy).and_return(true)
-
- response = service_instance.execute
-
- expect(response[:status]).to be :success
- end
- end
-
- shared_examples 'prevented annotation deletion' do |message|
- it 'returns error response', :aggregate_failures do
- response = service_instance.execute
-
- expect(response[:status]).to be :error
- expect(response[:message]).to eql message
- end
-
- it 'does not change db state' do
- expect(annotation).not_to receive(:destroy)
-
- service_instance.execute
- end
- end
-
- describe '.execute' do
- context 'with specific environment' do
- let(:annotation) { create(:metrics_dashboard_annotation, environment: environment) }
- let(:environment) { create(:environment) }
-
- context 'with anonymous user' do
- it_behaves_like 'prevented annotation deletion', 'You are not authorized to delete this annotation'
- end
-
- context 'with maintainer user' do
- before do
- environment.project.add_maintainer(user)
- end
-
- it_behaves_like 'executed annotation deletion'
-
- context 'annotation failed to delete' do
- it 'returns error response', :aggregate_failures do
- allow(annotation).to receive(:destroy).and_return(false)
-
- response = service_instance.execute
-
- expect(response[:status]).to be :error
- expect(response[:message]).to eql 'Annotation has not been deleted'
- end
- end
- end
- end
-
- context 'with specific cluster' do
- let(:annotation) { create(:metrics_dashboard_annotation, cluster: cluster, environment: nil) }
-
- context 'with anonymous user' do
- let(:cluster) { create(:cluster, :project) }
-
- it_behaves_like 'prevented annotation deletion', 'You are not authorized to delete this annotation'
- end
-
- context 'with maintainer user' do
- let(:cluster) { create(:cluster, :project) }
-
- before do
- cluster.project.add_maintainer(user)
- end
-
- it_behaves_like 'executed annotation deletion'
- end
-
- context 'with owner user' do
- let(:cluster) { create(:cluster, :group) }
-
- before do
- cluster.group.add_owner(user)
- end
-
- it_behaves_like 'executed annotation deletion'
- end
- end
- end
-end
diff --git a/spec/services/metrics/dashboard/clone_dashboard_service_spec.rb b/spec/services/metrics/dashboard/clone_dashboard_service_spec.rb
deleted file mode 100644
index bb11b905a7c..00000000000
--- a/spec/services/metrics/dashboard/clone_dashboard_service_spec.rb
+++ /dev/null
@@ -1,195 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Metrics::Dashboard::CloneDashboardService, :use_clean_rails_memory_store_caching, feature_category: :metrics do
- include MetricsDashboardHelpers
-
- let_it_be(:user) { create(:user) }
- let_it_be(:project) { create(:project, :repository) }
- let_it_be(:environment) { create(:environment, project: project) }
-
- describe '#execute' do
- subject(:service_call) { described_class.new(project, user, params).execute }
-
- let(:commit_message) { 'test' }
- let(:branch) { "dashboard_new_branch" }
- let(:dashboard) { 'config/prometheus/common_metrics.yml' }
- let(:file_name) { 'custom_dashboard.yml' }
- let(:file_content_hash) { YAML.safe_load(File.read(dashboard)) }
- let(:params) do
- {
- dashboard: dashboard,
- file_name: file_name,
- commit_message: commit_message,
- branch: branch
- }
- end
-
- context 'user does not have push right to repository' do
- it_behaves_like 'misconfigured dashboard service response with stepable', :forbidden, 'You are not allowed to push into this branch. Create another branch or open a merge request.'
- end
-
- context 'with rights to push to the repository' do
- before do
- project.add_maintainer(user)
- end
-
- context 'wrong target file extension' do
- let(:file_name) { 'custom_dashboard.txt' }
-
- it_behaves_like 'misconfigured dashboard service response with stepable', :bad_request, 'The file name should have a .yml extension'
- end
-
- context 'wrong source dashboard file' do
- let(:dashboard) { 'config/prometheus/common_metrics_123.yml' }
-
- it_behaves_like 'misconfigured dashboard service response with stepable', :not_found, 'Not found.'
- end
-
- context 'path traversal attack attempt' do
- let(:dashboard) { 'config/prometheus/../database.yml' }
-
- it_behaves_like 'misconfigured dashboard service response with stepable', :not_found, 'Not found.'
- end
-
- context 'path traversal attack attempt on target file' do
- let(:file_name) { '../../custom_dashboard.yml' }
- let(:dashboard_attrs) do
- {
- commit_message: commit_message,
- branch_name: branch,
- start_branch: project.default_branch,
- encoding: 'text',
- file_path: ".gitlab/dashboards/custom_dashboard.yml",
- file_content: file_content_hash.to_yaml
- }
- end
-
- it 'strips target file name to safe value', :aggregate_failures do
- allow(::Gitlab::Metrics::Dashboard::Processor).to receive(:new).and_return(double(process: file_content_hash))
- service_instance = instance_double(::Files::CreateService)
- expect(::Files::CreateService).to receive(:new).with(project, user, dashboard_attrs).and_return(service_instance)
- expect(service_instance).to receive(:execute).and_return(status: :success)
-
- service_call
- end
- end
-
- context 'valid parameters' do
- before do
- allow(::Gitlab::Metrics::Dashboard::Processor).to receive(:new).and_return(double(process: file_content_hash))
- end
-
- it_behaves_like 'valid dashboard cloning process', ::Metrics::Dashboard::SystemDashboardService::DASHBOARD_PATH,
- [
- ::Gitlab::Metrics::Dashboard::Stages::CommonMetricsInserter,
- ::Gitlab::Metrics::Dashboard::Stages::CustomMetricsInserter
- ]
-
- it_behaves_like 'valid dashboard cloning process', ::Metrics::Dashboard::ClusterDashboardService::DASHBOARD_PATH,
- [
- ::Gitlab::Metrics::Dashboard::Stages::CommonMetricsInserter
- ]
-
- context 'selected branch already exists' do
- let(:branch) { 'existing_branch' }
-
- before do
- project.repository.add_branch(user, branch, 'master')
- end
-
- it_behaves_like 'misconfigured dashboard service response with stepable', :bad_request, 'There was an error creating the dashboard, branch named: existing_branch already exists.'
-
- # temporary not available function for first iteration
- # follow up issue https://gitlab.com/gitlab-org/gitlab/issues/196237 which
- # require this feature
- # it 'pass correct params to Files::CreateService', :aggregate_failures do
- # project.repository.add_branch(user, branch, 'master')
- #
- # service_instance = instance_double(::Files::CreateService)
- # expect(::Files::CreateService).to receive(:new).with(project, user, dashboard_attrs).and_return(service_instance)
- # expect(service_instance).to receive(:execute).and_return(status: :success)
- #
- # service_call
- # end
- end
-
- context 'blank branch name' do
- let(:branch) { '' }
-
- it_behaves_like 'misconfigured dashboard service response with stepable', :bad_request, 'There was an error creating the dashboard, branch name is invalid.'
- end
-
- context 'dashboard file already exists' do
- let(:branch) { 'custom_dashboard' }
-
- before do
- Files::CreateService.new(
- project,
- user,
- commit_message: 'Create custom dashboard custom_dashboard.yml',
- branch_name: 'master',
- start_branch: 'master',
- file_path: ".gitlab/dashboards/custom_dashboard.yml",
- file_content: File.read('config/prometheus/common_metrics.yml')
- ).execute
- end
-
- it_behaves_like 'misconfigured dashboard service response with stepable', :bad_request, "A file with 'custom_dashboard.yml' already exists in custom_dashboard branch"
- end
-
- it 'extends dashboard template path to absolute url' do
- allow(::Files::CreateService).to receive(:new).and_return(double(execute: { status: :success }))
-
- expect_file_read(Rails.root.join('config/prometheus/common_metrics.yml'), content: '')
-
- service_call
- end
-
- context 'Files::CreateService success' do
- before do
- allow(::Files::CreateService).to receive(:new).and_return(double(execute: { status: :success }))
- end
-
- it 'clears dashboards cache' do
- expect(project.repository).to receive(:refresh_method_caches).with([:metrics_dashboard])
-
- service_call
- end
-
- it 'returns success', :aggregate_failures do
- result = service_call
- dashboard_details = {
- path: '.gitlab/dashboards/custom_dashboard.yml',
- display_name: 'custom_dashboard.yml',
- default: false,
- system_dashboard: false
- }
-
- expect(result[:status]).to be :success
- expect(result[:http_status]).to be :created
- expect(result[:dashboard]).to match dashboard_details
- end
- end
-
- context 'Files::CreateService fails' do
- before do
- allow(::Files::CreateService).to receive(:new).and_return(double(execute: { status: :error }))
- end
-
- it 'does NOT clear dashboards cache' do
- expect(project.repository).not_to receive(:refresh_method_caches)
-
- service_call
- end
-
- it 'returns error' do
- result = service_call
- expect(result[:status]).to be :error
- end
- end
- end
- end
- end
-end
diff --git a/spec/services/metrics/dashboard/cluster_dashboard_service_spec.rb b/spec/services/metrics/dashboard/cluster_dashboard_service_spec.rb
deleted file mode 100644
index 53def716de3..00000000000
--- a/spec/services/metrics/dashboard/cluster_dashboard_service_spec.rb
+++ /dev/null
@@ -1,60 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Metrics::Dashboard::ClusterDashboardService, :use_clean_rails_memory_store_caching,
- feature_category: :metrics do
- include MetricsDashboardHelpers
-
- let_it_be(:user) { create(:user) }
- let_it_be(:cluster_project) { create(:cluster_project) }
- let_it_be(:cluster) { cluster_project.cluster }
- let_it_be(:project) { cluster_project.project }
-
- before do
- project.add_maintainer(user)
- end
-
- describe '.valid_params?' do
- let(:params) { { cluster: cluster, embedded: 'false' } }
-
- subject { described_class.valid_params?(params) }
-
- it { is_expected.to be_truthy }
-
- context 'with matching dashboard_path' do
- let(:params) { { dashboard_path: ::Metrics::Dashboard::ClusterDashboardService::DASHBOARD_PATH } }
-
- it { is_expected.to be_truthy }
- end
-
- context 'missing cluster without dashboard_path' do
- let(:params) { {} }
-
- it { is_expected.to be_falsey }
- end
- end
-
- describe '#get_dashboard' do
- let(:service_params) { [project, user, { cluster: cluster, cluster_type: :admin }] }
- let(:service_call) { subject.get_dashboard }
-
- subject { described_class.new(*service_params) }
-
- it_behaves_like 'valid dashboard service response'
- it_behaves_like 'caches the unprocessed dashboard for subsequent calls'
- it_behaves_like 'refreshes cache when dashboard_version is changed'
-
- it_behaves_like 'dashboard_version contains SHA256 hash of dashboard file content' do
- let(:dashboard_path) { described_class::DASHBOARD_PATH }
- let(:dashboard_version) { subject.send(:dashboard_version) }
- end
-
- context 'when called with a non-system dashboard' do
- let(:dashboard_path) { 'garbage/dashboard/path' }
-
- # We want to always return the cluster dashboard.
- it_behaves_like 'valid dashboard service response'
- end
- end
-end
diff --git a/spec/services/metrics/dashboard/cluster_metrics_embed_service_spec.rb b/spec/services/metrics/dashboard/cluster_metrics_embed_service_spec.rb
deleted file mode 100644
index 5d63505e5cc..00000000000
--- a/spec/services/metrics/dashboard/cluster_metrics_embed_service_spec.rb
+++ /dev/null
@@ -1,74 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Metrics::Dashboard::ClusterMetricsEmbedService, :use_clean_rails_memory_store_caching,
- feature_category: :metrics do
- include MetricsDashboardHelpers
- using RSpec::Parameterized::TableSyntax
-
- let_it_be(:user) { create(:user) }
- let_it_be(:cluster_project) { create(:cluster_project) }
- let_it_be(:cluster) { cluster_project.cluster }
- let_it_be(:project) { cluster_project.project }
-
- before do
- project.add_maintainer(user)
- end
-
- describe '.valid_params?' do
- let(:valid_params) { { cluster: 1, embedded: 'true', group: 'hello', title: 'world', y_label: 'countries' } }
-
- subject { described_class }
-
- it { expect(subject.valid_params?(valid_params)).to be_truthy }
-
- context 'missing all params' do
- let(:params) { {} }
-
- it { expect(subject.valid_params?(params)).to be_falsy }
- end
-
- [:cluster, :embedded, :group, :title, :y_label].each do |param_key|
- it 'returns false with missing param' do
- params = valid_params.except(param_key)
-
- expect(subject.valid_params?(params)).to be_falsy
- end
- end
- end
-
- describe '#get_dashboard' do
- let(:service_params) do
- [
- project,
- user,
- {
- cluster: cluster,
- cluster_type: :project,
- embedded: 'true',
- group: 'Cluster Health',
- title: 'CPU Usage',
- y_label: 'CPU (cores)'
- }
- ]
- end
-
- let(:service_call) { described_class.new(*service_params).get_dashboard }
- let(:panel_groups) { service_call[:dashboard][:panel_groups] }
- let(:panel) { panel_groups.first[:panels].first }
-
- it_behaves_like 'valid embedded dashboard service response'
- it_behaves_like 'caches the unprocessed dashboard for subsequent calls'
-
- it 'returns one panel' do
- expect(panel_groups.size).to eq 1
- expect(panel_groups.first[:panels].size).to eq 1
- end
-
- it 'returns panel by title and y_label' do
- expect(panel[:title]).to eq(service_params.last[:title])
- expect(panel[:y_label]).to eq(service_params.last[:y_label])
- end
- end
-end
diff --git a/spec/services/metrics/dashboard/custom_dashboard_service_spec.rb b/spec/services/metrics/dashboard/custom_dashboard_service_spec.rb
deleted file mode 100644
index 940daa38ae7..00000000000
--- a/spec/services/metrics/dashboard/custom_dashboard_service_spec.rb
+++ /dev/null
@@ -1,167 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Metrics::Dashboard::CustomDashboardService, :use_clean_rails_memory_store_caching,
- feature_category: :metrics do
- include MetricsDashboardHelpers
-
- let_it_be(:user) { create(:user) }
- let_it_be(:project) { create(:project) }
- let_it_be(:environment) { create(:environment, project: project) }
-
- let(:dashboard_path) { '.gitlab/dashboards/test.yml' }
- let(:service_params) { [project, user, { environment: environment, dashboard_path: dashboard_path }] }
-
- subject { described_class.new(*service_params) }
-
- before do
- project.add_maintainer(user) if user
- end
-
- describe '#raw_dashboard' do
- let(:project) { project_with_dashboard(dashboard_path) }
-
- it_behaves_like '#raw_dashboard raises error if dashboard loading fails'
- end
-
- describe '#get_dashboard' do
- let(:service_call) { subject.get_dashboard }
-
- context 'when the dashboard does not exist' do
- it_behaves_like 'misconfigured dashboard service response', :not_found
-
- it 'does not update gitlab_metrics_dashboard_processing_time_ms metric', :prometheus do
- service_call
- metric = subject.send(:processing_time_metric)
- labels = subject.send(:processing_time_metric_labels)
-
- expect(metric.get(labels)).to eq(0)
- end
- end
-
- it_behaves_like 'raises error for users with insufficient permissions'
-
- context 'when the dashboard exists' do
- let(:project) { project_with_dashboard(dashboard_path) }
-
- it_behaves_like 'valid dashboard service response'
- it_behaves_like 'updates gitlab_metrics_dashboard_processing_time_ms metric'
-
- it 'caches the unprocessed dashboard for subsequent calls' do
- expect_any_instance_of(described_class)
- .to receive(:get_raw_dashboard)
- .once
- .and_call_original
-
- described_class.new(*service_params).get_dashboard
- described_class.new(*service_params).get_dashboard
- end
-
- it 'tracks panel type' do
- allow(::Gitlab::Tracking).to receive(:event).and_call_original
-
- described_class.new(*service_params).get_dashboard
-
- expect(::Gitlab::Tracking).to have_received(:event)
- .with('MetricsDashboard::Chart', 'chart_rendered', { label: 'area-chart' })
- .at_least(:once)
- end
-
- context 'with metric in database' do
- let!(:prometheus_metric) do
- create(:prometheus_metric, project: project, identifier: 'metric_a1', group: 'custom')
- end
-
- it 'includes metric_id' do
- dashboard = described_class.new(*service_params).get_dashboard
-
- metric_id = dashboard[:dashboard][:panel_groups].find { |panel_group| panel_group[:group] == 'Group A' }
- .fetch(:panels).find { |panel| panel[:title] == 'Super Chart A1' }
- .fetch(:metrics).find { |metric| metric[:id] == 'metric_a1' }
- .fetch(:metric_id)
-
- expect(metric_id).to eq(prometheus_metric.id)
- end
- end
-
- context 'and the dashboard is then deleted' do
- it 'does not return the previously cached dashboard' do
- described_class.new(*service_params).get_dashboard
-
- delete_project_dashboard(project, user, dashboard_path)
-
- expect_any_instance_of(described_class)
- .to receive(:get_raw_dashboard)
- .once
- .and_call_original
-
- described_class.new(*service_params).get_dashboard
- end
- end
- end
-
- context 'when the dashboard is configured incorrectly' do
- let(:project) { project_with_dashboard(dashboard_path, {}) }
-
- it_behaves_like 'misconfigured dashboard service response', :unprocessable_entity
- end
- end
-
- describe '.all_dashboard_paths' do
- let(:all_dashboards) { described_class.all_dashboard_paths(project) }
-
- context 'when there are no project dashboards' do
- it 'returns an empty array' do
- expect(all_dashboards).to be_empty
- end
- end
-
- context 'when there are project dashboards available' do
- let(:dashboard_path) { '.gitlab/dashboards/test.yml' }
- let(:project) { project_with_dashboard(dashboard_path) }
-
- it 'returns the dashboard attributes' do
- expect(all_dashboards).to eq(
- [{
- path: dashboard_path,
- display_name: 'test.yml',
- default: false,
- system_dashboard: false,
- out_of_the_box_dashboard: false
- }]
- )
- end
-
- it 'caches repo file list' do
- expect(Gitlab::Metrics::Dashboard::RepoDashboardFinder).to receive(:list_dashboards)
- .with(project)
- .once
- .and_call_original
-
- described_class.all_dashboard_paths(project)
- described_class.all_dashboard_paths(project)
- end
- end
- end
-
- describe '.valid_params?' do
- let(:params) { { dashboard_path: '.gitlab/dashboard/test.yml' } }
-
- subject { described_class.valid_params?(params) }
-
- it { is_expected.to be_truthy }
-
- context 'missing dashboard_path' do
- let(:params) { {} }
-
- it { is_expected.to be_falsey }
- end
-
- context 'empty dashboard_path' do
- let(:params) { { dashboard_path: '' } }
-
- it { is_expected.to be_falsey }
- end
- end
-end
diff --git a/spec/services/metrics/dashboard/custom_metric_embed_service_spec.rb b/spec/services/metrics/dashboard/custom_metric_embed_service_spec.rb
deleted file mode 100644
index 8117296b048..00000000000
--- a/spec/services/metrics/dashboard/custom_metric_embed_service_spec.rb
+++ /dev/null
@@ -1,147 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Metrics::Dashboard::CustomMetricEmbedService, feature_category: :metrics do
- include MetricsDashboardHelpers
-
- let_it_be(:project, reload: true) { build(:project) }
- let_it_be(:user) { create(:user) }
- let_it_be(:environment) { create(:environment, project: project) }
-
- before do
- project.add_maintainer(user) if user
- end
-
- let(:dashboard_path) { system_dashboard_path }
- let(:group) { business_metric_title }
- let(:title) { 'title' }
- let(:y_label) { 'y_label' }
-
- describe '.valid_params?' do
- let(:valid_params) do
- {
- embedded: true,
- dashboard_path: dashboard_path,
- group: group,
- title: title,
- y_label: y_label
- }
- end
-
- subject { described_class.valid_params?(params) }
-
- let(:params) { valid_params }
-
- it { is_expected.to be_truthy }
-
- context 'missing embedded' do
- let(:params) { valid_params.except(:embedded) }
-
- it { is_expected.to be_falsey }
- end
-
- context 'not embedded' do
- let(:params) { valid_params.merge(embedded: 'false') }
-
- it { is_expected.to be_falsey }
- end
-
- context 'non-system dashboard' do
- let(:dashboard_path) { '.gitlab/dashboards/test.yml' }
-
- it { is_expected.to be_falsey }
- end
-
- context 'undefined dashboard' do
- let(:params) { valid_params.except(:dashboard_path) }
-
- it { is_expected.to be_truthy }
- end
-
- context 'non-custom metric group' do
- let(:group) { 'Different Group' }
-
- it { is_expected.to be_falsey }
- end
-
- context 'missing group' do
- let(:group) { nil }
-
- it { is_expected.to be_falsey }
- end
-
- context 'missing title' do
- let(:title) { nil }
-
- it { is_expected.to be_falsey }
- end
-
- context 'undefined y-axis label' do
- let(:params) { valid_params.except(:y_label) }
-
- it { is_expected.to be_falsey }
- end
- end
-
- describe '#get_dashboard' do
- let(:service_params) do
- [
- project,
- user,
- {
- embedded: true,
- environment: environment,
- dashboard_path: dashboard_path,
- group: group,
- title: title,
- y_label: y_label
- }
- ]
- end
-
- let(:service_call) { described_class.new(*service_params).get_dashboard }
-
- it_behaves_like 'misconfigured dashboard service response', :not_found
- it_behaves_like 'raises error for users with insufficient permissions'
-
- context 'the custom metric exists' do
- let!(:metric) { create(:prometheus_metric, project: project) }
-
- it_behaves_like 'valid embedded dashboard service response'
-
- it 'does not cache the unprocessed dashboard' do
- # Fail spec if any method of Cache class is called.
- stub_const('Gitlab::Metrics::Dashboard::Cache', double)
-
- described_class.new(*service_params).get_dashboard
- end
-
- context 'multiple metrics meet criteria' do
- let!(:metric_2) { create(:prometheus_metric, project: project, query: 'avg(metric_2)') }
-
- it_behaves_like 'valid embedded dashboard service response'
-
- it 'includes both metrics in a single panel' do
- result = service_call
-
- panel_groups = result[:dashboard][:panel_groups]
- panels = panel_groups[0][:panels]
- metrics = panels[0][:metrics]
- queries = metrics.map { |metric| metric[:query_range] }
-
- expect(panel_groups.length).to eq(1)
- expect(panels.length).to eq(1)
- expect(metrics.length).to eq(2)
- expect(queries).to include('avg(metric_2)', 'avg(metric)')
- end
- end
- end
-
- context 'when the metric exists in another project' do
- let!(:metric) { create(:prometheus_metric, project: create(:project)) }
-
- it_behaves_like 'misconfigured dashboard service response', :not_found
- end
- end
-end
diff --git a/spec/services/metrics/dashboard/default_embed_service_spec.rb b/spec/services/metrics/dashboard/default_embed_service_spec.rb
deleted file mode 100644
index 6ef248f6b09..00000000000
--- a/spec/services/metrics/dashboard/default_embed_service_spec.rb
+++ /dev/null
@@ -1,59 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Metrics::Dashboard::DefaultEmbedService, :use_clean_rails_memory_store_caching,
- feature_category: :metrics do
- include MetricsDashboardHelpers
-
- let_it_be(:project) { build(:project) }
- let_it_be(:user) { create(:user) }
- let_it_be(:environment) { create(:environment, project: project) }
-
- before do
- project.add_maintainer(user) if user
- end
-
- describe '.valid_params?' do
- let(:params) { { embedded: true } }
-
- subject { described_class.valid_params?(params) }
-
- it { is_expected.to be_truthy }
-
- context 'missing embedded' do
- let(:params) { {} }
-
- it { is_expected.to be_falsey }
- end
-
- context 'not embedded' do
- let(:params) { { embedded: 'false' } }
-
- it { is_expected.to be_falsey }
- end
- end
-
- describe '#get_dashboard' do
- let(:service_params) { [project, user, { environment: environment }] }
- let(:service_call) { described_class.new(*service_params).get_dashboard }
-
- it_behaves_like 'valid embedded dashboard service response'
- it_behaves_like 'raises error for users with insufficient permissions'
-
- it 'caches the unprocessed dashboard for subsequent calls' do
- system_service = Metrics::Dashboard::SystemDashboardService
-
- expect(system_service).to receive(:new).once.and_call_original
-
- described_class.new(*service_params).get_dashboard
- described_class.new(*service_params).get_dashboard
- end
-
- context 'when called with a non-system dashboard' do
- let(:dashboard_path) { 'garbage/dashboard/path' }
-
- it_behaves_like 'valid embedded dashboard service response'
- end
- end
-end
diff --git a/spec/services/metrics/dashboard/dynamic_embed_service_spec.rb b/spec/services/metrics/dashboard/dynamic_embed_service_spec.rb
deleted file mode 100644
index 1643f552a70..00000000000
--- a/spec/services/metrics/dashboard/dynamic_embed_service_spec.rb
+++ /dev/null
@@ -1,158 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Metrics::Dashboard::DynamicEmbedService, :use_clean_rails_memory_store_caching,
- feature_category: :metrics do
- include MetricsDashboardHelpers
-
- let_it_be(:project) { build(:project) }
- let_it_be(:user) { create(:user) }
- let_it_be(:environment) { create(:environment, project: project) }
-
- before do
- project.add_maintainer(user) if user
- end
-
- let(:dashboard_path) { '.gitlab/dashboards/test.yml' }
- let(:group) { 'Group A' }
- let(:title) { 'Super Chart A1' }
- let(:y_label) { 'y_label' }
-
- describe '.valid_params?' do
- let(:valid_params) do
- {
- embedded: true,
- dashboard_path: dashboard_path,
- group: group,
- title: title,
- y_label: y_label
- }
- end
-
- subject { described_class.valid_params?(params) }
-
- let(:params) { valid_params }
-
- it { is_expected.to be_truthy }
-
- context 'missing embedded' do
- let(:params) { valid_params.except(:embedded) }
-
- it { is_expected.to be_falsey }
- end
-
- context 'not embedded' do
- let(:params) { valid_params.merge(embedded: 'false') }
-
- it { is_expected.to be_falsey }
- end
-
- context 'undefined dashboard' do
- let(:params) { valid_params.except(:dashboard_path) }
-
- it { is_expected.to be_truthy }
- end
-
- context 'missing dashboard' do
- let(:dashboard) { '' }
-
- it { is_expected.to be_truthy }
- end
-
- context 'missing group' do
- let(:group) { '' }
-
- it { is_expected.to be_falsey }
- end
-
- context 'missing title' do
- let(:title) { '' }
-
- it { is_expected.to be_falsey }
- end
-
- context 'undefined y-axis label' do
- let(:params) { valid_params.except(:y_label) }
-
- it { is_expected.to be_falsey }
- end
- end
-
- describe '#get_dashboard' do
- let(:service_params) do
- [
- project,
- user,
- {
- environment: environment,
- dashboard_path: dashboard_path,
- group: group,
- title: title,
- y_label: y_label
- }
- ]
- end
-
- let(:service_call) { described_class.new(*service_params).get_dashboard }
-
- context 'when the dashboard does not exist' do
- it_behaves_like 'misconfigured dashboard service response', :not_found
- end
-
- context 'when the dashboard is exists' do
- let(:project) { project_with_dashboard(dashboard_path) }
-
- it_behaves_like 'valid embedded dashboard service response'
- it_behaves_like 'raises error for users with insufficient permissions'
-
- it 'caches the unprocessed dashboard for subsequent calls' do
- expect(YAML).to receive(:safe_load).once.and_call_original
-
- described_class.new(*service_params).get_dashboard
- described_class.new(*service_params).get_dashboard
- end
-
- context 'when the specified group is not present on the dashboard' do
- let(:group) { 'Group Not Found' }
-
- it_behaves_like 'misconfigured dashboard service response', :not_found
- end
-
- context 'when the specified title is not present on the dashboard' do
- let(:title) { 'Title Not Found' }
-
- it_behaves_like 'misconfigured dashboard service response', :not_found
- end
-
- context 'when the specified y-axis label is not present on the dashboard' do
- let(:y_label) { 'Y-Axis Not Found' }
-
- it_behaves_like 'misconfigured dashboard service response', :not_found
- end
- end
-
- shared_examples 'uses system dashboard' do
- it 'uses the overview dashboard' do
- expect(Gitlab::Metrics::Dashboard::Finder)
- .to receive(:find_raw)
- .with(project, dashboard_path: system_dashboard_path)
- .once
-
- service_call
- end
- end
-
- context 'when the dashboard is nil' do
- let(:dashboard_path) { nil }
-
- it_behaves_like 'uses system dashboard'
- end
-
- context 'when the dashboard is not present' do
- let(:dashboard_path) { '' }
-
- it_behaves_like 'uses system dashboard'
- end
- end
-end
diff --git a/spec/services/metrics/dashboard/gitlab_alert_embed_service_spec.rb b/spec/services/metrics/dashboard/gitlab_alert_embed_service_spec.rb
deleted file mode 100644
index 25812a492b2..00000000000
--- a/spec/services/metrics/dashboard/gitlab_alert_embed_service_spec.rb
+++ /dev/null
@@ -1,103 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Metrics::Dashboard::GitlabAlertEmbedService, feature_category: :metrics do
- include MetricsDashboardHelpers
-
- let_it_be(:alert) { create(:prometheus_alert) }
- let_it_be(:project) { alert.project }
- let_it_be(:user) { create(:user) }
-
- let(:alert_id) { alert.id }
-
- before_all do
- project.add_maintainer(user)
- end
-
- describe '.valid_params?' do
- let(:valid_params) do
- {
- embedded: true,
- prometheus_alert_id: alert_id
- }
- end
-
- subject { described_class.valid_params?(params) }
-
- let(:params) { valid_params }
-
- it { is_expected.to be_truthy }
-
- context 'missing embedded' do
- let(:params) { valid_params.except(:embedded) }
-
- it { is_expected.to be_falsey }
- end
-
- context 'not embedded' do
- let(:params) { valid_params.merge(embedded: 'false') }
-
- it { is_expected.to be_falsey }
- end
-
- context 'missing alert id' do
- let(:params) { valid_params.except(:prometheus_alert_id) }
-
- it { is_expected.to be_falsey }
- end
-
- context 'missing alert id' do
- let(:params) { valid_params.merge(prometheus_alert_id: 'none') }
-
- it { is_expected.to be_falsey }
- end
- end
-
- describe '#get_dashboard' do
- let(:service_params) do
- [
- project,
- user,
- {
- embedded: true,
- prometheus_alert_id: alert_id
- }
- ]
- end
-
- let(:service_call) { described_class.new(*service_params).get_dashboard }
-
- context 'when alerting is available' do
- it_behaves_like 'valid embedded dashboard service response'
- it_behaves_like 'raises error for users with insufficient permissions'
-
- it 'generates an panel based on the alert' do
- result = service_call
- panel = result[:dashboard][:panel_groups][0][:panels][0]
- metric = panel[:metrics].first
-
- expect(panel[:metrics].length).to eq 1
- expect(panel).to include(
- title: alert.prometheus_metric.title,
- y_label: alert.prometheus_metric.y_label,
- type: 'area-chart'
- )
- expect(metric[:metric_id]).to eq alert.prometheus_metric_id
- end
-
- context 'when the metric does not exist' do
- let(:alert_id) { -4 }
-
- it_behaves_like 'misconfigured dashboard service response', :not_found
- end
-
- it 'does not cache the unprocessed dashboard' do
- # Fail spec if any method of Cache class is called.
- stub_const('Gitlab::Metrics::Dashboard::Cache', double)
-
- described_class.new(*service_params).get_dashboard
- end
- end
- end
-end
diff --git a/spec/services/metrics/dashboard/grafana_metric_embed_service_spec.rb b/spec/services/metrics/dashboard/grafana_metric_embed_service_spec.rb
deleted file mode 100644
index 877a455ea44..00000000000
--- a/spec/services/metrics/dashboard/grafana_metric_embed_service_spec.rb
+++ /dev/null
@@ -1,279 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Metrics::Dashboard::GrafanaMetricEmbedService, feature_category: :metrics do
- include MetricsDashboardHelpers
- include ReactiveCachingHelpers
- include GrafanaApiHelpers
-
- let_it_be(:project) { create(:project) }
- let_it_be(:user) { create(:user) }
- let_it_be(:grafana_integration) { create(:grafana_integration, project: project) }
-
- let(:grafana_url) do
- valid_grafana_dashboard_link(grafana_integration.grafana_url)
- end
-
- before_all do
- project.add_maintainer(user)
- end
-
- describe '.valid_params?' do
- let(:valid_params) { { embedded: true, grafana_url: grafana_url } }
-
- subject { described_class.valid_params?(params) }
-
- let(:params) { valid_params }
-
- it { is_expected.to be_truthy }
-
- context 'missing embedded' do
- let(:params) { valid_params.except(:embedded) }
-
- it { is_expected.to be_falsey }
- end
-
- context 'not embedded' do
- let(:params) { valid_params.merge(embedded: 'false') }
-
- it { is_expected.to be_falsey }
- end
-
- context 'undefined grafana_url' do
- let(:params) { valid_params.except(:grafana_url) }
-
- it { is_expected.to be_falsey }
- end
- end
-
- describe '.from_cache' do
- let(:params) { [project.id, user.id, grafana_url] }
-
- subject { described_class.from_cache(*params) }
-
- it 'initializes an instance of GrafanaMetricEmbedService' do
- expect(subject).to be_an_instance_of(described_class)
- expect(subject.project).to eq(project)
- expect(subject.current_user).to eq(user)
- expect(subject.params[:grafana_url]).to eq(grafana_url)
- end
-
- context 'with unknown users' do
- let(:params) { [project.id, current_user_id, grafana_url] }
-
- context 'when anonymous' do
- where(:current_user_id) do
- [nil, '']
- end
-
- with_them do
- it 'sets current_user as nil' do
- expect(subject.current_user).to be_nil
- end
- end
- end
-
- context 'when invalid' do
- let(:current_user_id) { non_existing_record_id }
-
- it 'raise record not found error' do
- expect { subject }
- .to raise_error(ActiveRecord::RecordNotFound, /Couldn't find User/)
- end
- end
- end
- end
-
- describe '#get_dashboard', :use_clean_rails_memory_store_caching do
- let(:service_params) do
- [
- project,
- user,
- {
- embedded: true,
- grafana_url: grafana_url
- }
- ]
- end
-
- let(:service) { described_class.new(*service_params) }
- let(:service_call) { service.get_dashboard }
-
- context 'without caching' do
- before do
- synchronous_reactive_cache(service)
- end
-
- it_behaves_like 'raises error for users with insufficient permissions'
-
- context 'without a grafana integration' do
- before do
- allow(project).to receive(:grafana_integration).and_return(nil)
- end
-
- it_behaves_like 'misconfigured dashboard service response', :bad_request
- end
-
- context 'when grafana cannot be reached' do
- before do
- allow(grafana_integration.client).to receive(:get_dashboard).and_raise(::Grafana::Client::Error)
- end
-
- it_behaves_like 'misconfigured dashboard service response', :service_unavailable
- end
-
- context 'when panelId is missing' do
- let(:grafana_url) do
- grafana_integration.grafana_url +
- '/d/XDaNK6amz/gitlab-omnibus-redis' \
- '?from=1570397739557&to=1570484139557'
- end
-
- before do
- stub_dashboard_request(grafana_integration.grafana_url)
- end
-
- it_behaves_like 'misconfigured dashboard service response', :unprocessable_entity
- end
-
- context 'when uid is missing' do
- let(:grafana_url) { grafana_integration.grafana_url + '/d/' }
-
- before do
- stub_dashboard_request(grafana_integration.grafana_url)
- end
-
- it_behaves_like 'misconfigured dashboard service response', :unprocessable_entity
- end
-
- context 'when the dashboard response contains misconfigured json' do
- before do
- stub_dashboard_request(grafana_integration.grafana_url, body: '')
- end
-
- it_behaves_like 'misconfigured dashboard service response', :unprocessable_entity
- end
-
- context 'when the datasource response contains misconfigured json' do
- before do
- stub_dashboard_request(grafana_integration.grafana_url)
- stub_datasource_request(grafana_integration.grafana_url, body: '')
- end
-
- it_behaves_like 'misconfigured dashboard service response', :unprocessable_entity
- end
-
- context 'when the embed was created successfully' do
- before do
- stub_dashboard_request(grafana_integration.grafana_url)
- stub_datasource_request(grafana_integration.grafana_url)
- end
-
- context 'when project is private and user is member' do
- it_behaves_like 'valid embedded dashboard service response'
- end
-
- context 'when project is public and user is anonymous' do
- let(:project) { create(:project, :public) }
- let(:user) { nil }
- let(:grafana_integration) { create(:grafana_integration, project: project) }
-
- it_behaves_like 'valid embedded dashboard service response'
- end
- end
- end
-
- context 'with caching', :use_clean_rails_memory_store_caching do
- let(:cache_params) { [project.id, user.id, grafana_url] }
-
- context 'when value not present in cache' do
- it 'returns nil' do
- expect(ExternalServiceReactiveCachingWorker)
- .to receive(:perform_async)
- .with(service.class, service.id, *cache_params)
-
- expect(service_call).to eq(nil)
- end
- end
-
- context 'when value present in cache' do
- let(:return_value) { { 'http_status' => :ok, 'dashboard' => '{}' } }
-
- before do
- stub_reactive_cache(service, return_value, cache_params)
- end
-
- it 'returns cached value' do
- expect(ReactiveCachingWorker)
- .not_to receive(:perform_async)
- .with(service.class, service.id, *cache_params)
-
- expect(service_call[:http_status]).to eq(return_value[:http_status])
- expect(service_call[:dashboard]).to eq(return_value[:dashboard])
- end
- end
- end
- end
-end
-
-RSpec.describe Metrics::Dashboard::GrafanaUidParser do
- let_it_be(:grafana_integration) { create(:grafana_integration) }
- let_it_be(:project) { grafana_integration.project }
-
- subject { described_class.new(grafana_url, project).parse }
-
- context 'with a Grafana-defined uid' do
- let(:grafana_url) { grafana_integration.grafana_url + '/d/XDaNK6amz/?panelId=1' }
-
- it { is_expected.to eq 'XDaNK6amz' }
- end
-
- context 'with a user-defined uid' do
- let(:grafana_url) { grafana_integration.grafana_url + '/d/pgbouncer-main/pgbouncer-overview?panelId=1' }
-
- it { is_expected.to eq 'pgbouncer-main' }
- end
-
- context 'when a uid is not present' do
- let(:grafana_url) { grafana_integration.grafana_url }
-
- it { is_expected.to be nil }
- end
-
- context 'when the url starts with unrelated content' do
- let(:grafana_url) { 'js:' + grafana_integration.grafana_url }
-
- it { is_expected.to be nil }
- end
-end
-
-RSpec.describe Metrics::Dashboard::DatasourceNameParser do
- include GrafanaApiHelpers
-
- let(:grafana_url) { valid_grafana_dashboard_link('https://gitlab.grafana.net') }
- let(:grafana_dashboard) { Gitlab::Json.parse(fixture_file('grafana/dashboard_response.json'), symbolize_names: true) }
-
- subject { described_class.new(grafana_url, grafana_dashboard).parse }
-
- it { is_expected.to eq 'GitLab Omnibus' }
-
- context 'when the panelId is missing from the url' do
- let(:grafana_url) { 'https:/gitlab.grafana.net/d/jbdbks/' }
-
- it { is_expected.to be nil }
- end
-
- context 'when the panel is not present' do
- # We're looking for panelId of 8, but only 6 is present
- let(:grafana_dashboard) { { dashboard: { panels: [{ id: 6 }] } } }
-
- it { is_expected.to be nil }
- end
-
- context 'when the dashboard panel does not have a datasource' do
- let(:grafana_dashboard) { { dashboard: { panels: [{ id: 8 }] } } }
-
- it { is_expected.to be nil }
- end
-end
diff --git a/spec/services/metrics/dashboard/panel_preview_service_spec.rb b/spec/services/metrics/dashboard/panel_preview_service_spec.rb
deleted file mode 100644
index 584be717d7c..00000000000
--- a/spec/services/metrics/dashboard/panel_preview_service_spec.rb
+++ /dev/null
@@ -1,85 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Metrics::Dashboard::PanelPreviewService, feature_category: :metrics do
- let_it_be(:project) { create(:project) }
- let_it_be(:environment) { create(:environment, project: project) }
- let_it_be(:panel_yml) do
- <<~YML
- ---
- title: test panel
- YML
- end
-
- let_it_be(:dashboard) do
- {
- panel_groups: [
- {
- panels: [{ 'title' => 'test panel' }]
- }
- ]
- }
- end
-
- describe '#execute' do
- subject(:service_response) { described_class.new(project, panel_yml, environment).execute }
-
- context "valid panel's yaml" do
- before do
- allow_next_instance_of(::Gitlab::Metrics::Dashboard::Processor) do |processor|
- allow(processor).to receive(:process).and_return(dashboard)
- end
- end
-
- it 'returns success service response' do
- expect(service_response.success?).to be_truthy
- end
-
- it 'returns processed panel' do
- expect(service_response.payload).to eq('title' => 'test panel')
- end
-
- it 'uses dashboard processor' do
- sequence = [
- ::Gitlab::Metrics::Dashboard::Stages::CommonMetricsInserter,
- ::Gitlab::Metrics::Dashboard::Stages::MetricEndpointInserter,
- ::Gitlab::Metrics::Dashboard::Stages::PanelIdsInserter,
- ::Gitlab::Metrics::Dashboard::Stages::UrlValidator
- ]
- processor_params = [project, dashboard, sequence, environment: environment]
-
- expect_next_instance_of(::Gitlab::Metrics::Dashboard::Processor, *processor_params) do |processor|
- expect(processor).to receive(:process).and_return(dashboard)
- end
-
- service_response
- end
- end
-
- context "invalid panel's yaml" do
- [
- Gitlab::Metrics::Dashboard::Errors::DashboardProcessingError,
- Gitlab::Config::Loader::Yaml::NotHashError,
- Gitlab::Config::Loader::Yaml::DataTooLargeError,
- Gitlab::Config::Loader::FormatError
- ].each do |error_class|
- context "with #{error_class}" do
- before do
- allow_next_instance_of(::Gitlab::Metrics::Dashboard::Processor) do |processor|
- allow(processor).to receive(:process).and_raise(error_class.new('error'))
- end
- end
-
- it 'returns error service response' do
- expect(service_response.error?).to be_truthy
- end
-
- it 'returns error message' do
- expect(service_response.message).to eq('error')
- end
- end
- end
- end
- end
-end
diff --git a/spec/services/metrics/dashboard/pod_dashboard_service_spec.rb b/spec/services/metrics/dashboard/pod_dashboard_service_spec.rb
deleted file mode 100644
index a6fcb6b4842..00000000000
--- a/spec/services/metrics/dashboard/pod_dashboard_service_spec.rb
+++ /dev/null
@@ -1,74 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Metrics::Dashboard::PodDashboardService, :use_clean_rails_memory_store_caching,
- feature_category: :cell do
- include MetricsDashboardHelpers
-
- let_it_be(:user) { create(:user) }
- let_it_be(:project) { create(:project) }
- let_it_be(:environment) { create(:environment, project: project) }
-
- let(:dashboard_path) { described_class::DASHBOARD_PATH }
- let(:service_params) { [project, user, { environment: environment, dashboard_path: dashboard_path }] }
-
- before do
- project.add_maintainer(user)
- end
-
- subject { described_class.new(*service_params) }
-
- describe '#raw_dashboard' do
- it_behaves_like '#raw_dashboard raises error if dashboard loading fails'
- end
-
- describe '.valid_params?' do
- let(:params) { { dashboard_path: described_class::DASHBOARD_PATH } }
-
- subject { described_class.valid_params?(params) }
-
- it { is_expected.to be_truthy }
-
- context 'missing dashboard_path' do
- let(:params) { {} }
-
- it { is_expected.to be_falsey }
- end
-
- context 'non-matching dashboard_path' do
- let(:params) { { dashboard_path: 'path/to/bunk.yml' } }
-
- it { is_expected.to be_falsey }
- end
- end
-
- describe '#get_dashboard' do
- let(:service_call) { subject.get_dashboard }
-
- it_behaves_like 'valid dashboard service response'
- it_behaves_like 'caches the unprocessed dashboard for subsequent calls'
- it_behaves_like 'refreshes cache when dashboard_version is changed'
- it_behaves_like 'updates gitlab_metrics_dashboard_processing_time_ms metric'
-
- it_behaves_like 'dashboard_version contains SHA256 hash of dashboard file content' do
- let(:dashboard_version) { subject.send(:dashboard_version) }
- end
- end
-
- describe '.all_dashboard_paths' do
- it 'returns the dashboard attributes' do
- all_dashboards = described_class.all_dashboard_paths(project)
-
- expect(all_dashboards).to eq(
- [{
- path: described_class::DASHBOARD_PATH,
- display_name: described_class::DASHBOARD_NAME,
- default: false,
- system_dashboard: false,
- out_of_the_box_dashboard: true
- }]
- )
- end
- end
-end
diff --git a/spec/services/metrics/dashboard/system_dashboard_service_spec.rb b/spec/services/metrics/dashboard/system_dashboard_service_spec.rb
deleted file mode 100644
index b08b980e50e..00000000000
--- a/spec/services/metrics/dashboard/system_dashboard_service_spec.rb
+++ /dev/null
@@ -1,82 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Metrics::Dashboard::SystemDashboardService, :use_clean_rails_memory_store_caching,
- feature_category: :metrics do
- include MetricsDashboardHelpers
-
- let_it_be(:user) { create(:user) }
- let_it_be(:project) { create(:project) }
- let_it_be(:environment) { create(:environment, project: project) }
-
- let(:dashboard_path) { described_class::DASHBOARD_PATH }
- let(:service_params) { [project, user, { environment: environment, dashboard_path: dashboard_path }] }
-
- subject { described_class.new(*service_params) }
-
- before do
- project.add_maintainer(user) if user
- end
-
- describe '#raw_dashboard' do
- it_behaves_like '#raw_dashboard raises error if dashboard loading fails'
- end
-
- describe '#get_dashboard' do
- let(:service_call) { subject.get_dashboard }
-
- it_behaves_like 'valid dashboard service response'
- it_behaves_like 'raises error for users with insufficient permissions'
- it_behaves_like 'caches the unprocessed dashboard for subsequent calls'
- it_behaves_like 'refreshes cache when dashboard_version is changed'
- it_behaves_like 'updates gitlab_metrics_dashboard_processing_time_ms metric'
-
- it_behaves_like 'dashboard_version contains SHA256 hash of dashboard file content' do
- let(:dashboard_version) { subject.send(:dashboard_version) }
- end
-
- context 'when called with a non-system dashboard' do
- let(:dashboard_path) { 'garbage/dashboard/path' }
-
- # We want to always return the system dashboard.
- it_behaves_like 'valid dashboard service response'
- end
- end
-
- describe '.all_dashboard_paths' do
- it 'returns the dashboard attributes' do
- all_dashboards = described_class.all_dashboard_paths(project)
-
- expect(all_dashboards).to eq(
- [{
- path: described_class::DASHBOARD_PATH,
- display_name: described_class::DASHBOARD_NAME,
- default: true,
- system_dashboard: true,
- out_of_the_box_dashboard: true
- }]
- )
- end
- end
-
- describe '.valid_params?' do
- let(:params) { { dashboard_path: described_class::DASHBOARD_PATH } }
-
- subject { described_class.valid_params?(params) }
-
- it { is_expected.to be_truthy }
-
- context 'missing dashboard_path' do
- let(:params) { {} }
-
- it { is_expected.to be_falsey }
- end
-
- context 'non-matching dashboard_path' do
- let(:params) { { dashboard_path: 'path/to/bunk.yml' } }
-
- it { is_expected.to be_falsey }
- end
- end
-end
diff --git a/spec/services/metrics/dashboard/transient_embed_service_spec.rb b/spec/services/metrics/dashboard/transient_embed_service_spec.rb
deleted file mode 100644
index 1e3ccde6ae3..00000000000
--- a/spec/services/metrics/dashboard/transient_embed_service_spec.rb
+++ /dev/null
@@ -1,99 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Metrics::Dashboard::TransientEmbedService, :use_clean_rails_memory_store_caching,
- feature_category: :metrics do
- let_it_be(:project) { build(:project) }
- let_it_be(:user) { create(:user) }
- let_it_be(:environment) { create(:environment, project: project) }
-
- before do
- project.add_maintainer(user) if user
- end
-
- describe '.valid_params?' do
- let(:params) { { embedded: 'true', embed_json: '{}' } }
-
- subject { described_class.valid_params?(params) }
-
- it { is_expected.to be_truthy }
-
- context 'missing embedded' do
- let(:params) { { embed_json: '{}' } }
-
- it { is_expected.to be_falsey }
- end
-
- context 'not embedded' do
- let(:params) { { embedded: 'false', embed_json: '{}' } }
-
- it { is_expected.to be_falsey }
- end
-
- context 'missing embed_json' do
- let(:params) { { embedded: 'true' } }
-
- it { is_expected.to be_falsey }
- end
- end
-
- describe '#get_dashboard' do
- let(:embed_json) { get_embed_json }
- let(:service_params) { [project, user, { environment: environment, embedded: 'true', embed_json: embed_json }] }
- let(:service_call) { described_class.new(*service_params).get_dashboard }
-
- it_behaves_like 'valid embedded dashboard service response'
- it_behaves_like 'raises error for users with insufficient permissions'
-
- it 'caches the unprocessed dashboard for subsequent calls' do
- expect_any_instance_of(described_class)
- .to receive(:get_raw_dashboard)
- .once
- .and_call_original
-
- described_class.new(*service_params).get_dashboard
- described_class.new(*service_params).get_dashboard
- end
-
- it 'caches unique requests separately' do
- alt_embed_json = get_embed_json('area-chart')
- alt_service_params = [project, user, { environment: environment, embedded: 'true', embed_json: alt_embed_json }]
-
- embed = described_class.new(*service_params).get_dashboard
- alt_embed = described_class.new(*alt_service_params).get_dashboard
-
- expect(embed).not_to eq(alt_embed)
- expect(get_type_for_embed(embed)).to eq('line-graph')
- expect(get_type_for_embed(alt_embed)).to eq('area-chart')
- end
-
- context 'when embed_json cannot be parsed as json' do
- let(:embed_json) { '' }
-
- it_behaves_like 'misconfigured dashboard service response', :unprocessable_entity
- end
-
- private
-
- def get_embed_json(type = 'line-graph')
- {
- panel_groups: [{
- panels: [{
- type: type,
- title: 'title',
- y_label: 'y_label',
- metrics: [{
- query_range: 'up',
- label: 'y_label'
- }]
- }]
- }]
- }.to_json
- end
-
- def get_type_for_embed(embed)
- embed[:dashboard][:panel_groups][0][:panels][0][:type]
- end
- end
-end
diff --git a/spec/services/metrics/dashboard/update_dashboard_service_spec.rb b/spec/services/metrics/dashboard/update_dashboard_service_spec.rb
deleted file mode 100644
index 15bbe9f9364..00000000000
--- a/spec/services/metrics/dashboard/update_dashboard_service_spec.rb
+++ /dev/null
@@ -1,159 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Metrics::Dashboard::UpdateDashboardService, :use_clean_rails_memory_store_caching, feature_category: :metrics do
- include MetricsDashboardHelpers
-
- let_it_be(:user) { create(:user) }
- let_it_be(:project) { create(:project, :repository) }
- let_it_be(:environment) { create(:environment, project: project) }
-
- describe '#execute' do
- subject(:service_call) { described_class.new(project, user, params).execute }
-
- let(:commit_message) { 'test' }
- let(:branch) { 'dashboard_new_branch' }
- let(:dashboard) { 'config/prometheus/common_metrics.yml' }
- let(:file_name) { 'custom_dashboard.yml' }
- let(:file_content_hash) { YAML.safe_load(File.read(dashboard)) }
- let(:params) do
- {
- file_name: file_name,
- file_content: file_content_hash,
- commit_message: commit_message,
- branch: branch
- }
- end
-
- context 'user does not have push right to repository' do
- it_behaves_like 'misconfigured dashboard service response with stepable', :forbidden, 'You are not allowed to push into this branch. Create another branch or open a merge request.'
- end
-
- context 'with rights to push to the repository' do
- before do
- project.add_maintainer(user)
- end
-
- context 'path traversal attack attempt' do
- context 'with a yml extension' do
- let(:file_name) { 'config/prometheus/../database.yml' }
-
- it_behaves_like 'misconfigured dashboard service response with stepable', :bad_request, "A file with this name doesn't exist"
- end
-
- context 'without a yml extension' do
- let(:file_name) { '../../..../etc/passwd' }
-
- it_behaves_like 'misconfigured dashboard service response with stepable', :bad_request, 'The file name should have a .yml extension'
- end
- end
-
- context 'valid parameters' do
- it_behaves_like 'valid dashboard update process'
- end
-
- context 'selected branch already exists' do
- let(:branch) { 'existing_branch' }
-
- before do
- project.repository.add_branch(user, branch, 'master')
- end
-
- it_behaves_like 'misconfigured dashboard service response with stepable', :bad_request, 'There was an error updating the dashboard, branch named: existing_branch already exists.'
- end
-
- context 'Files::UpdateService success' do
- let(:merge_request) { project.merge_requests.last }
-
- before do
- allow(::Files::UpdateService).to receive(:new).and_return(double(execute: { status: :success }))
- end
-
- it 'returns success', :aggregate_failures do
- dashboard_details = {
- path: '.gitlab/dashboards/custom_dashboard.yml',
- display_name: 'custom_dashboard.yml',
- default: false,
- system_dashboard: false
- }
-
- expect(service_call[:status]).to be :success
- expect(service_call[:http_status]).to be :created
- expect(service_call[:dashboard]).to match dashboard_details
- expect(service_call[:merge_request]).to eq(Gitlab::UrlBuilder.build(merge_request))
- end
-
- context 'when the merge request does not succeed' do
- let(:error_message) { 'There was an error' }
-
- let(:merge_request) do
- build(:merge_request, target_project: project, source_project: project, author: user)
- end
-
- before do
- merge_request.errors.add(:base, error_message)
- allow_next_instance_of(::MergeRequests::CreateService) do |mr|
- allow(mr).to receive(:execute).and_return(merge_request)
- end
- end
-
- it 'returns an appropriate message and status code', :aggregate_failures do
- result = service_call
-
- expect(result.keys).to contain_exactly(:message, :http_status, :status, :last_step)
- expect(result[:status]).to eq(:error)
- expect(result[:http_status]).to eq(:bad_request)
- expect(result[:message]).to eq(error_message)
- end
- end
-
- context 'with escaped characters in file name' do
- let(:file_name) { "custom_dashboard%26copy.yml" }
-
- it 'escapes the special characters', :aggregate_failures do
- dashboard_details = {
- path: '.gitlab/dashboards/custom_dashboard&copy.yml',
- display_name: 'custom_dashboard&copy.yml',
- default: false,
- system_dashboard: false
- }
-
- expect(service_call[:status]).to be :success
- expect(service_call[:http_status]).to be :created
- expect(service_call[:dashboard]).to match dashboard_details
- end
- end
-
- context 'when pushing to the default branch' do
- let(:branch) { 'master' }
-
- it 'does not create a merge request', :aggregate_failures do
- dashboard_details = {
- path: '.gitlab/dashboards/custom_dashboard.yml',
- display_name: 'custom_dashboard.yml',
- default: false,
- system_dashboard: false
- }
-
- expect(::MergeRequests::CreateService).not_to receive(:new)
- expect(service_call.keys).to contain_exactly(:dashboard, :http_status, :status)
- expect(service_call[:status]).to be :success
- expect(service_call[:http_status]).to be :created
- expect(service_call[:dashboard]).to match dashboard_details
- end
- end
- end
-
- context 'Files::UpdateService fails' do
- before do
- allow(::Files::UpdateService).to receive(:new).and_return(double(execute: { status: :error }))
- end
-
- it 'returns error' do
- expect(service_call[:status]).to be :error
- end
- end
- end
- end
-end
diff --git a/spec/services/metrics/users_starred_dashboards/create_service_spec.rb b/spec/services/metrics/users_starred_dashboards/create_service_spec.rb
deleted file mode 100644
index e08bdca8410..00000000000
--- a/spec/services/metrics/users_starred_dashboards/create_service_spec.rb
+++ /dev/null
@@ -1,73 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Metrics::UsersStarredDashboards::CreateService, feature_category: :metrics do
- let_it_be(:user) { create(:user) }
-
- let(:dashboard_path) { 'config/prometheus/common_metrics.yml' }
- let(:service_instance) { described_class.new(user, project, dashboard_path) }
- let(:project) { create(:project) }
- let(:starred_dashboard_params) do
- {
- user: user,
- project: project,
- dashboard_path: dashboard_path
- }
- end
-
- shared_examples 'prevented starred dashboard creation' do |message|
- it 'returns error response', :aggregate_failures do
- expect(Metrics::UsersStarredDashboard).not_to receive(:new)
-
- response = service_instance.execute
-
- expect(response.status).to be :error
- expect(response.message).to eql message
- end
- end
-
- describe '.execute' do
- context 'with anonymous user' do
- it_behaves_like 'prevented starred dashboard creation', 'You are not authorized to add star to this dashboard'
- end
-
- context 'with reporter user' do
- before do
- project.add_reporter(user)
- end
-
- context 'incorrect dashboard_path' do
- let(:dashboard_path) { 'something_incorrect.yml' }
-
- it_behaves_like 'prevented starred dashboard creation', 'Dashboard with requested path can not be found'
- end
-
- context 'with valid dashboard path' do
- it 'creates starred dashboard and returns success response', :aggregate_failures do
- expect_next_instance_of(Metrics::UsersStarredDashboard, starred_dashboard_params) do |starred_dashboard|
- expect(starred_dashboard).to receive(:save).and_return true
- end
-
- response = service_instance.execute
-
- expect(response.status).to be :success
- end
-
- context 'Metrics::UsersStarredDashboard has validation errors' do
- it 'returns error response', :aggregate_failures do
- expect_next_instance_of(Metrics::UsersStarredDashboard, starred_dashboard_params) do |starred_dashboard|
- expect(starred_dashboard).to receive(:save).and_return(false)
- expect(starred_dashboard).to receive(:errors).and_return(double(messages: { base: ['Model validation error'] }))
- end
-
- response = service_instance.execute
-
- expect(response.status).to be :error
- expect(response.message).to eql(base: ['Model validation error'])
- end
- end
- end
- end
- end
-end
diff --git a/spec/services/metrics/users_starred_dashboards/delete_service_spec.rb b/spec/services/metrics/users_starred_dashboards/delete_service_spec.rb
deleted file mode 100644
index 8c4bcecc239..00000000000
--- a/spec/services/metrics/users_starred_dashboards/delete_service_spec.rb
+++ /dev/null
@@ -1,41 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Metrics::UsersStarredDashboards::DeleteService, feature_category: :metrics do
- subject(:service_instance) { described_class.new(user, project, dashboard_path) }
-
- let_it_be(:user) { create(:user) }
- let_it_be(:project) { create(:project) }
-
- describe '#execute' do
- let_it_be(:user_starred_dashboard_1) { create(:metrics_users_starred_dashboard, user: user, project: project, dashboard_path: 'dashboard_1') }
- let_it_be(:user_starred_dashboard_2) { create(:metrics_users_starred_dashboard, user: user, project: project) }
- let_it_be(:other_user_starred_dashboard) { create(:metrics_users_starred_dashboard, project: project) }
- let_it_be(:other_project_starred_dashboard) { create(:metrics_users_starred_dashboard, user: user) }
-
- context 'without dashboard_path' do
- let(:dashboard_path) { nil }
-
- it 'does not scope user starred dashboards by dashboard path' do
- result = service_instance.execute
-
- expect(result.success?).to be_truthy
- expect(result.payload[:deleted_rows]).to be(2)
- expect(Metrics::UsersStarredDashboard.all).to contain_exactly(other_user_starred_dashboard, other_project_starred_dashboard)
- end
- end
-
- context 'with dashboard_path' do
- let(:dashboard_path) { 'dashboard_1' }
-
- it 'does scope user starred dashboards by dashboard path' do
- result = service_instance.execute
-
- expect(result.success?).to be_truthy
- expect(result.payload[:deleted_rows]).to be(1)
- expect(Metrics::UsersStarredDashboard.all).to contain_exactly(user_starred_dashboard_2, other_user_starred_dashboard, other_project_starred_dashboard)
- end
- end
- end
-end
diff --git a/spec/services/ml/experiment_tracking/candidate_repository_spec.rb b/spec/services/ml/experiment_tracking/candidate_repository_spec.rb
index 079c36c9613..9b46675a08e 100644
--- a/spec/services/ml/experiment_tracking/candidate_repository_spec.rb
+++ b/spec/services/ml/experiment_tracking/candidate_repository_spec.rb
@@ -61,6 +61,15 @@ RSpec.describe ::Ml::ExperimentTracking::CandidateRepository, feature_category:
expect(subject.name).to eq('blah')
end
end
+
+ context 'when name is nil and no mlflow.runName is not present' do
+ let(:tags) { nil }
+ let(:name) { nil }
+
+ it 'gives the candidate a random name' do
+ expect(subject.name).to match(/[a-z]+-[a-z]+-[a-z]+-\d+/)
+ end
+ end
end
end
diff --git a/spec/services/ml/find_or_create_experiment_service_spec.rb b/spec/services/ml/find_or_create_experiment_service_spec.rb
new file mode 100644
index 00000000000..a8c533d1320
--- /dev/null
+++ b/spec/services/ml/find_or_create_experiment_service_spec.rb
@@ -0,0 +1,33 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ::Ml::FindOrCreateExperimentService, feature_category: :mlops do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { project.first_owner }
+ let_it_be(:existing_experiment) { create(:ml_experiments, project: project, user: user) }
+
+ let(:name) { 'new_experiment' }
+
+ subject(:new_experiment) { described_class.new(project, name, user).execute }
+
+ describe '#execute' do
+ it 'creates an experiment using Ml::Experiment.find_or_create', :aggregate_failures do
+ expect(Ml::Experiment).to receive(:find_or_create).and_call_original
+
+ expect(new_experiment.name).to eq('new_experiment')
+ expect(new_experiment.project).to eq(project)
+ expect(new_experiment.user).to eq(user)
+ end
+
+ context 'when experiment already exists' do
+ let(:name) { existing_experiment.name }
+
+ it 'fetches existing experiment', :aggregate_failures do
+ expect { new_experiment }.not_to change { Ml::Experiment.count }
+
+ expect(new_experiment).to eq(existing_experiment)
+ end
+ end
+ end
+end
diff --git a/spec/services/ml/find_or_create_model_service_spec.rb b/spec/services/ml/find_or_create_model_service_spec.rb
new file mode 100644
index 00000000000..6ddae20f8d6
--- /dev/null
+++ b/spec/services/ml/find_or_create_model_service_spec.rb
@@ -0,0 +1,45 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ::Ml::FindOrCreateModelService, feature_category: :mlops do
+ let_it_be(:existing_model) { create(:ml_models) }
+ let_it_be(:another_project) { create(:project) }
+
+ subject(:create_model) { described_class.new(project, name).execute }
+
+ describe '#execute' do
+ context 'when model name does not exist in the project' do
+ let(:name) { 'new_model' }
+ let(:project) { existing_model.project }
+
+ it 'creates a model', :aggregate_failures do
+ expect { create_model }.to change { Ml::Model.count }.by(1)
+
+ expect(create_model.name).to eq(name)
+ end
+ end
+
+ context 'when model name exists but project is different' do
+ let(:name) { existing_model.name }
+ let(:project) { another_project }
+
+ it 'creates a model', :aggregate_failures do
+ expect { create_model }.to change { Ml::Model.count }.by(1)
+
+ expect(create_model.name).to eq(name)
+ end
+ end
+
+ context 'when model with name exists' do
+ let(:name) { existing_model.name }
+ let(:project) { existing_model.project }
+
+ it 'fetches existing model', :aggregate_failures do
+ expect { create_model }.to change { Ml::Model.count }.by(0)
+
+ expect(create_model).to eq(existing_model)
+ end
+ end
+ end
+end
diff --git a/spec/services/ml/find_or_create_model_version_service_spec.rb b/spec/services/ml/find_or_create_model_version_service_spec.rb
new file mode 100644
index 00000000000..1211a9b1165
--- /dev/null
+++ b/spec/services/ml/find_or_create_model_version_service_spec.rb
@@ -0,0 +1,49 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ::Ml::FindOrCreateModelVersionService, feature_category: :mlops do
+ let_it_be(:existing_version) { create(:ml_model_versions) }
+ let_it_be(:another_project) { create(:project) }
+
+ let(:package) { nil }
+
+ let(:params) do
+ {
+ model_name: name,
+ version: version,
+ package: package
+ }
+ end
+
+ subject(:model_version) { described_class.new(project, params).execute }
+
+ describe '#execute' do
+ context 'when model version exists' do
+ let(:name) { existing_version.name }
+ let(:version) { existing_version.version }
+ let(:project) { existing_version.project }
+
+ it 'returns existing model version', :aggregate_failures do
+ expect { model_version }.to change { Ml::ModelVersion.count }.by(0)
+ expect(model_version).to eq(existing_version)
+ end
+ end
+
+ context 'when model version does not exist' do
+ let(:project) { existing_version.project }
+ let(:name) { 'a_new_model' }
+ let(:version) { '2.0.0' }
+
+ let(:package) { create(:ml_model_package, project: project, name: name, version: version) }
+
+ it 'creates a new model version', :aggregate_failures do
+ expect { model_version }.to change { Ml::ModelVersion.count }
+
+ expect(model_version.name).to eq(name)
+ expect(model_version.version).to eq(version)
+ expect(model_version.package).to eq(package)
+ end
+ end
+ end
+end
diff --git a/spec/services/namespace_settings/update_service_spec.rb b/spec/services/namespace_settings/update_service_spec.rb
index 37cbaf19a6e..413a551ca0c 100644
--- a/spec/services/namespace_settings/update_service_spec.rb
+++ b/spec/services/namespace_settings/update_service_spec.rb
@@ -41,7 +41,7 @@ RSpec.describe NamespaceSettings::UpdateService, feature_category: :groups_and_p
it "changes settings" do
expect { service.execute }
.to change { group.namespace_settings.default_branch_name }
- .from(nil).to(example_branch_name)
+ .from(nil).to(example_branch_name)
end
end
@@ -57,7 +57,54 @@ RSpec.describe NamespaceSettings::UpdateService, feature_category: :groups_and_p
it "updates default_branch_protection_defaults from the default_branch_protection param" do
expect { service.execute }
.to change { namespace_settings.default_branch_protection_defaults }
- .from({}).to(expected)
+ .from({}).to(expected)
+ end
+ end
+
+ context 'when default_branch_protection_defaults is updated' do
+ let(:namespace_settings) { group.namespace_settings }
+ let(:branch_protection) { ::Gitlab::Access::BranchProtection.protected_against_developer_pushes.stringify_keys }
+ let(:expected) { branch_protection }
+ let(:settings) { { default_branch_protection_defaults: branch_protection } }
+
+ context 'when the user has the ability to update' do
+ before do
+ allow(Ability).to receive(:allowed?).with(user, :update_default_branch_protection, group).and_return(true)
+ end
+
+ context 'when group is root' do
+ before do
+ allow(group).to receive(:root?).and_return(true)
+ end
+
+ it "updates default_branch_protection_defaults from the default_branch_protection param" do
+ expect { service.execute }
+ .to change { namespace_settings.default_branch_protection_defaults }
+ .from({}).to(expected)
+ end
+ end
+
+ context 'when group is not root' do
+ before do
+ allow(group).to receive(:root?).and_return(false)
+ end
+
+ it "does not update default_branch_protection_defaults and adds an error to the namespace_settings", :aggregate_failures do
+ expect { service.execute }.not_to change { namespace_settings.default_branch_protection_defaults }
+ expect(group.namespace_settings.errors[:default_branch_protection_defaults]).to include('only available on top-level groups.')
+ end
+ end
+ end
+
+ context 'when the user does not have the ability to update' do
+ before do
+ allow(Ability).to receive(:allowed?).with(user, :update_default_branch_protection, group).and_return(false)
+ end
+
+ it "does not update default_branch_protection_defaults and adds an error to the namespace_settings", :aggregate_failures do
+ expect { service.execute }.not_to change { namespace_settings.default_branch_protection_defaults }
+ expect(group.namespace_settings.errors[:default_branch_protection_defaults]).to include('can only be changed by a group admin.')
+ end
end
end
@@ -72,7 +119,7 @@ RSpec.describe NamespaceSettings::UpdateService, feature_category: :groups_and_p
it "changes settings" do
expect { service.execute }
.to change { group.namespace_settings.resource_access_token_creation_allowed }
- .from(true).to(false)
+ .from(true).to(false)
end
end
@@ -96,8 +143,8 @@ RSpec.describe NamespaceSettings::UpdateService, feature_category: :groups_and_p
using RSpec::Parameterized::TableSyntax
where(:setting_key, :setting_changes_from, :setting_changes_to) do
- :prevent_sharing_groups_outside_hierarchy | false | true
- :new_user_signups_cap | nil | 100
+ :prevent_sharing_groups_outside_hierarchy | false | true
+ :new_user_signups_cap | nil | 100
end
with_them do
@@ -145,7 +192,7 @@ RSpec.describe NamespaceSettings::UpdateService, feature_category: :groups_and_p
it 'changes settings' do
expect { service.execute }
.to change { group.namespace_settings.public_send(setting_key) }
- .from(setting_changes_from).to(setting_changes_to)
+ .from(setting_changes_from).to(setting_changes_to)
end
end
end
diff --git a/spec/services/namespaces/package_settings/update_service_spec.rb b/spec/services/namespaces/package_settings/update_service_spec.rb
index 385fd7c130e..8a4a51e409c 100644
--- a/spec/services/namespaces/package_settings/update_service_spec.rb
+++ b/spec/services/namespaces/package_settings/update_service_spec.rb
@@ -38,6 +38,8 @@ RSpec.describe ::Namespaces::PackageSettings::UpdateService, feature_category: :
maven_duplicate_exception_regex: 'SNAPSHOT',
generic_duplicates_allowed: true,
generic_duplicate_exception_regex: 'foo',
+ nuget_duplicates_allowed: true,
+ nuget_duplicate_exception_regex: 'foo',
maven_package_requests_forwarding: true,
lock_maven_package_requests_forwarding: false,
npm_package_requests_forwarding: nil,
@@ -49,6 +51,8 @@ RSpec.describe ::Namespaces::PackageSettings::UpdateService, feature_category: :
maven_duplicate_exception_regex: 'RELEASE',
generic_duplicates_allowed: false,
generic_duplicate_exception_regex: 'bar',
+ nuget_duplicates_allowed: false,
+ nuget_duplicate_exception_regex: 'bar',
maven_package_requests_forwarding: true,
lock_maven_package_requests_forwarding: true,
npm_package_requests_forwarding: true,
@@ -98,6 +102,8 @@ RSpec.describe ::Namespaces::PackageSettings::UpdateService, feature_category: :
maven_duplicate_exception_regex: 'RELEASE',
generic_duplicates_allowed: false,
generic_duplicate_exception_regex: 'bar',
+ nuget_duplicates_allowed: false,
+ nuget_duplicate_exception_regex: 'bar',
maven_package_requests_forwarding: true,
lock_maven_package_requests_forwarding: true,
npm_package_requests_forwarding: true,
diff --git a/spec/services/notes/quick_actions_service_spec.rb b/spec/services/notes/quick_actions_service_spec.rb
index c51e381014d..0065fd639b8 100644
--- a/spec/services/notes/quick_actions_service_spec.rb
+++ b/spec/services/notes/quick_actions_service_spec.rb
@@ -132,13 +132,59 @@ RSpec.describe Notes::QuickActionsService, feature_category: :team_planning do
end
describe '/estimate' do
- let(:note_text) { '/estimate 1h' }
+ before do
+ # reset to 10 minutes before each test
+ note.noteable.update!(time_estimate: 600)
+ end
+
+ shared_examples 'does not update time_estimate and displays the correct error message' do
+ it 'shows validation error message' do
+ content = execute(note)
+
+ expect(content).to be_empty
+ expect(note.noteable.errors[:time_estimate]).to include('must have a valid format and be greater than or equal to zero.')
+ expect(note.noteable.reload.time_estimate).to eq(600)
+ end
+ end
+
+ context 'when the time estimate is valid' do
+ let(:note_text) { '/estimate 1h' }
+
+ it 'adds time estimate to noteable' do
+ content = execute(note)
+
+ expect(content).to be_empty
+ expect(note.noteable.reload.time_estimate).to eq(3600)
+ end
+ end
+
+ context 'when the time estimate is 0' do
+ let(:note_text) { '/estimate 0' }
+
+ it 'adds time estimate to noteable' do
+ content = execute(note)
+
+ expect(content).to be_empty
+ expect(note.noteable.reload.time_estimate).to eq(0)
+ end
+ end
+
+ context 'when the time estimate is invalid' do
+ let(:note_text) { '/estimate a' }
+
+ include_examples "does not update time_estimate and displays the correct error message"
+ end
+
+ context 'when the time estimate is partially invalid' do
+ let(:note_text) { '/estimate 1d 3id' }
+
+ include_examples "does not update time_estimate and displays the correct error message"
+ end
- it 'adds time estimate to noteable' do
- content = execute(note)
+ context 'when the time estimate is negative' do
+ let(:note_text) { '/estimate -1h' }
- expect(content).to be_empty
- expect(note.noteable.time_estimate).to eq(3600)
+ include_examples "does not update time_estimate and displays the correct error message"
end
end
diff --git a/spec/services/notification_service_spec.rb b/spec/services/notification_service_spec.rb
index 1d1dd045a09..028c3ea6610 100644
--- a/spec/services/notification_service_spec.rb
+++ b/spec/services/notification_service_spec.rb
@@ -2895,6 +2895,7 @@ RSpec.describe NotificationService, :mailer, feature_category: :team_planning do
describe '#review_requested_of_merge_request' do
let(:merge_request) { create(:merge_request, author: author, source_project: project, reviewers: [reviewer]) }
+ let(:mailer) { double }
let_it_be(:current_user) { create(:user) }
let_it_be(:reviewer) { create(:user) }
@@ -2917,8 +2918,16 @@ RSpec.describe NotificationService, :mailer, feature_category: :team_planning do
should_not_email(@u_lazy_participant)
end
+ it 'deliver email immediately' do
+ allow(Notify).to receive(:request_review_merge_request_email)
+ .with(Integer, Integer, Integer, String).and_return(mailer)
+ expect(mailer).to receive(:deliver_later).with({})
+
+ notification.review_requested_of_merge_request(merge_request, current_user, reviewer)
+ end
+
it 'adds "review requested" reason for new reviewer' do
- notification.review_requested_of_merge_request(merge_request, current_user, [reviewer])
+ notification.review_requested_of_merge_request(merge_request, current_user, reviewer)
merge_request.reviewers.each do |reviewer|
email = find_email_for(reviewer)
@@ -3380,6 +3389,27 @@ RSpec.describe NotificationService, :mailer, feature_category: :team_planning do
end
end
+ describe '#member_about_to_expire' do
+ let_it_be(:group_member) { create(:group_member, expires_at: 7.days.from_now.to_date) }
+ let_it_be(:project_member) { create(:project_member, expires_at: 7.days.from_now.to_date) }
+
+ context "with group member" do
+ it 'emails the user that their group membership will be expired' do
+ notification.member_about_to_expire(group_member)
+
+ should_email(group_member.user)
+ end
+ end
+
+ context "with project member" do
+ it 'emails the user that their project membership will be expired' do
+ notification.member_about_to_expire(project_member)
+
+ should_email(project_member.user)
+ end
+ end
+ end
+
def create_member!
create(:project_member, user: added_user, project: project)
end
diff --git a/spec/services/packages/composer/create_package_service_spec.rb b/spec/services/packages/composer/create_package_service_spec.rb
index 78d5d76fe4f..a2f5b973fd9 100644
--- a/spec/services/packages/composer/create_package_service_spec.rb
+++ b/spec/services/packages/composer/create_package_service_spec.rb
@@ -50,7 +50,7 @@ RSpec.describe Packages::Composer::CreatePackageService, feature_category: :pack
context 'with a tag' do
let(:tag) { project.repository.find_tag('v1.2.3') }
- before(:all) do
+ before_all do
project.repository.add_tag(user, 'v1.2.3', 'master')
end
diff --git a/spec/services/packages/go/create_package_service_spec.rb b/spec/services/packages/go/create_package_service_spec.rb
index f552af81077..fcef06bfa03 100644
--- a/spec/services/packages/go/create_package_service_spec.rb
+++ b/spec/services/packages/go/create_package_service_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe Packages::Go::CreatePackageService, feature_category: :package_re
let_it_be(:project) { create :project_empty_repo, path: 'my-go-lib' }
let_it_be(:mod) { create :go_module, project: project }
- before :all do
+ before_all do
create :go_module_commit, :module, project: project, tag: 'v1.0.0'
end
diff --git a/spec/services/packages/ml_model/create_package_file_service_spec.rb b/spec/services/packages/ml_model/create_package_file_service_spec.rb
index d749aee227a..32754279e17 100644
--- a/spec/services/packages/ml_model/create_package_file_service_spec.rb
+++ b/spec/services/packages/ml_model/create_package_file_service_spec.rb
@@ -41,19 +41,22 @@ RSpec.describe Packages::MlModel::CreatePackageFileService, feature_category: :m
.to change { project.packages.ml_model.count }.by(1)
.and change { Packages::PackageFile.count }.by(1)
.and change { Packages::PackageFileBuildInfo.count }.by(0)
+ .and change { Ml::ModelVersion.count }.by(1)
new_model = project.packages.ml_model.last
package_file = new_model.package_files.last
+ new_model_version = Ml::ModelVersion.last
- aggregate_failures do
- expect(new_model.name).to eq('new_model')
- expect(new_model.version).to eq('1.0.0')
- expect(new_model.status).to eq('default')
- expect(package_file.package).to eq(new_model)
- expect(package_file.file_name).to eq(file_name)
- expect(package_file.size).to eq(file.size)
- expect(package_file.file_sha256).to eq(sha256)
- end
+ expect(new_model.name).to eq('new_model')
+ expect(new_model.version).to eq('1.0.0')
+ expect(new_model.status).to eq('default')
+ expect(package_file.package).to eq(new_model)
+ expect(package_file.file_name).to eq(file_name)
+ expect(package_file.size).to eq(file.size)
+ expect(package_file.file_sha256).to eq(sha256)
+ expect(new_model_version.name).to eq('new_model')
+ expect(new_model_version.version).to eq('1.0.0')
+ expect(new_model_version.package).to eq(new_model)
end
end
diff --git a/spec/services/packages/nuget/update_package_from_metadata_service_spec.rb b/spec/services/packages/nuget/update_package_from_metadata_service_spec.rb
index caa4e42d002..0459588bf8d 100644
--- a/spec/services/packages/nuget/update_package_from_metadata_service_spec.rb
+++ b/spec/services/packages/nuget/update_package_from_metadata_service_spec.rb
@@ -304,5 +304,15 @@ RSpec.describe Packages::Nuget::UpdatePackageFromMetadataService, :clean_gitlab_
it_behaves_like 'raising an', described_class::InvalidMetadataError, with_message: params[:error_message]
end
end
+
+ context 'with an invalid zip file' do
+ before do
+ allow_next_instance_of(::Packages::Nuget::MetadataExtractionService) do |instance|
+ allow(instance).to receive(:execute).and_raise(Zip::Error)
+ end
+ end
+
+ it_behaves_like 'raising an', described_class::ZipError, with_message: 'Could not open the .nupkg file'
+ end
end
end
diff --git a/spec/services/packages/rubygems/process_gem_service_spec.rb b/spec/services/packages/rubygems/process_gem_service_spec.rb
index a1b4eae9655..3304702b348 100644
--- a/spec/services/packages/rubygems/process_gem_service_spec.rb
+++ b/spec/services/packages/rubygems/process_gem_service_spec.rb
@@ -151,5 +151,14 @@ RSpec.describe Packages::Rubygems::ProcessGemService, feature_category: :package
expect(package_file.reload.file_name).to eq('package.gem')
end
end
+
+ context 'with invalid metadata' do
+ include_context 'with invalid Rubygems metadata'
+
+ it 'raises the correct error' do
+ expect { subject }
+ .to raise_error(::Packages::Rubygems::ProcessGemService::InvalidMetadataError, 'Invalid metadata')
+ end
+ end
end
end
diff --git a/spec/services/personal_access_tokens/revoke_token_family_service_spec.rb b/spec/services/personal_access_tokens/revoke_token_family_service_spec.rb
index 3e32200cc77..8a66efb1585 100644
--- a/spec/services/personal_access_tokens/revoke_token_family_service_spec.rb
+++ b/spec/services/personal_access_tokens/revoke_token_family_service_spec.rb
@@ -14,5 +14,12 @@ RSpec.describe PersonalAccessTokens::RevokeTokenFamilyService, feature_category:
expect(response).to be_success
expect(token_1.reload).to be_revoked
end
+
+ it 'does not revoke any active token not in the pat family' do
+ unrelated_token = create(:personal_access_token)
+
+ expect(response).to be_success
+ expect(unrelated_token.reload).to be_active
+ end
end
end
diff --git a/spec/services/post_receive_service_spec.rb b/spec/services/post_receive_service_spec.rb
index 20d86f74f86..167baed06e7 100644
--- a/spec/services/post_receive_service_spec.rb
+++ b/spec/services/post_receive_service_spec.rb
@@ -243,7 +243,7 @@ RSpec.describe PostReceiveService, feature_category: :team_planning do
context 'nil broadcast message' do
it 'does not output a broadcast message' do
- allow(BroadcastMessage).to receive(:current).and_return(nil)
+ allow(System::BroadcastMessage).to receive(:current).and_return(nil)
expect(has_alert_messages?(subject)).to be_falsey
end
diff --git a/spec/services/product_analytics/build_activity_graph_service_spec.rb b/spec/services/product_analytics/build_activity_graph_service_spec.rb
index cd1bc42e156..2eb35523da7 100644
--- a/spec/services/product_analytics/build_activity_graph_service_spec.rb
+++ b/spec/services/product_analytics/build_activity_graph_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe ProductAnalytics::BuildActivityGraphService, feature_category: :product_analytics do
+RSpec.describe ProductAnalytics::BuildActivityGraphService, feature_category: :product_analytics_data_management do
let_it_be(:project) { create(:project) }
let_it_be(:time_now) { Time.zone.now }
let_it_be(:time_ago) { Time.zone.now - 5.days }
diff --git a/spec/services/product_analytics/build_graph_service_spec.rb b/spec/services/product_analytics/build_graph_service_spec.rb
index ee0e2190501..13c7206241c 100644
--- a/spec/services/product_analytics/build_graph_service_spec.rb
+++ b/spec/services/product_analytics/build_graph_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe ProductAnalytics::BuildGraphService, feature_category: :product_analytics do
+RSpec.describe ProductAnalytics::BuildGraphService, feature_category: :product_analytics_data_management do
let_it_be(:project) { create(:project) }
let_it_be(:events) do
diff --git a/spec/services/projects/container_repository/third_party/cleanup_tags_service_spec.rb b/spec/services/projects/container_repository/third_party/cleanup_tags_service_spec.rb
index 78343490e3a..295dc94400a 100644
--- a/spec/services/projects/container_repository/third_party/cleanup_tags_service_spec.rb
+++ b/spec/services/projects/container_repository/third_party/cleanup_tags_service_spec.rb
@@ -325,11 +325,7 @@ RSpec.describe Projects::ContainerRepository::ThirdParty::CleanupTagsService, :c
Gitlab::Redis::Cache.with do |redis|
expect(redis).to receive(:pipelined).and_call_original
- times = Gitlab::Redis::ClusterUtil.cluster?(redis) ? 2 : 1
-
- # Set 2 instances as redis is a MultiStore.
- # Redis Cluster uses only 1 pipeline as the keys have hash-tags
- expect_next_instances_of(Redis::PipelinedConnection, times) do |pipeline|
+ expect_next_instance_of(Redis::PipelinedConnection) do |pipeline|
selected_tags.each do |tag_name, created_at, ex|
expect(pipeline).to receive(:set).with(cache_key(tag_name), rfc3339(created_at), ex: ex).and_call_original
end
@@ -376,11 +372,7 @@ RSpec.describe Projects::ContainerRepository::ThirdParty::CleanupTagsService, :c
expect(redis).to receive(:mget).and_call_original
expect(redis).to receive(:pipelined).and_call_original
- times = Gitlab::Redis::ClusterUtil.cluster?(redis) ? 2 : 1
-
- # Set 2 instances as redis is a MultiStore
- # Redis Cluster uses only 1 pipeline as the keys have hash-tags
- expect_next_instances_of(Redis::PipelinedConnection, times) do |pipeline|
+ expect_next_instance_of(Redis::PipelinedConnection) do |pipeline|
expect(pipeline).to receive(:set).and_call_original
end
end
diff --git a/spec/services/projects/create_service_spec.rb b/spec/services/projects/create_service_spec.rb
index 8a737e4df56..683e438eb08 100644
--- a/spec/services/projects/create_service_spec.rb
+++ b/spec/services/projects/create_service_spec.rb
@@ -628,11 +628,13 @@ RSpec.describe Projects::CreateService, '#execute', feature_category: :groups_an
context 'repository creation' do
it 'synchronously creates the repository' do
expect_next_instance_of(Project) do |instance|
- expect(instance).to receive(:create_repository)
+ expect(instance).to receive(:create_repository).and_return(true)
end
project = create_project(user, opts)
+
expect(project).to be_valid
+ expect(project).to be_persisted
expect(project.owner).to eq(user)
expect(project.namespace).to eq(user.namespace)
expect(project.project_namespace).to be_in_sync_with_project(project)
diff --git a/spec/services/projects/destroy_service_spec.rb b/spec/services/projects/destroy_service_spec.rb
index ccf58964c71..0210e101e5f 100644
--- a/spec/services/projects/destroy_service_spec.rb
+++ b/spec/services/projects/destroy_service_spec.rb
@@ -115,6 +115,23 @@ RSpec.describe Projects::DestroyService, :aggregate_failures, :event_store_publi
expect(project.reload.delete_error).to be_present
expect(project.delete_error).to match(error_message)
end
+
+ context 'when parent group visibility was made more restrictive while project was marked "pending deletion"' do
+ let!(:group) { create(:group, :public) }
+ let!(:project) { create(:project, :repository, :public, namespace: group) }
+
+ it 'sets the project visibility level to that of the parent group' do
+ group.add_owner(user)
+ project.group.update_attribute(:visibility_level, Gitlab::VisibilityLevel::INTERNAL)
+
+ expect(project.reload.visibility_level).to be(Gitlab::VisibilityLevel::PUBLIC)
+ expect(project.group.visibility_level).to be(Gitlab::VisibilityLevel::INTERNAL)
+
+ destroy_project(project, user, {})
+
+ expect(project.reload.visibility_level).to be(Gitlab::VisibilityLevel::INTERNAL)
+ end
+ end
end
context "deleting a project with merge requests" do
diff --git a/spec/services/projects/fork_service_spec.rb b/spec/services/projects/fork_service_spec.rb
index 4ba72b5870d..4d55f310974 100644
--- a/spec/services/projects/fork_service_spec.rb
+++ b/spec/services/projects/fork_service_spec.rb
@@ -380,7 +380,7 @@ RSpec.describe Projects::ForkService, feature_category: :source_code_management
end
context 'when a project is already forked' do
- it 'creates a new poolresository after the project is moved to a new shard' do
+ it 'creates a new pool repository after the project is moved to a new shard' do
project = create(:project, :public, :repository)
fork_before_move = fork_project(project, nil, using_service: true)
@@ -393,6 +393,9 @@ RSpec.describe Projects::ForkService, feature_category: :source_code_management
allow_any_instance_of(Gitlab::Git::Repository).to receive(:replicate)
allow_any_instance_of(Gitlab::Git::Repository).to receive(:checksum)
.and_return(::Gitlab::Git::BLANK_SHA)
+ allow_next_instance_of(Gitlab::Git::ObjectPool) do |object_pool|
+ allow(object_pool).to receive(:link)
+ end
storage_move = create(
:project_repository_storage_move,
diff --git a/spec/services/projects/participants_service_spec.rb b/spec/services/projects/participants_service_spec.rb
index 04c43dff2dc..b01e64439ec 100644
--- a/spec/services/projects/participants_service_spec.rb
+++ b/spec/services/projects/participants_service_spec.rb
@@ -120,120 +120,88 @@ RSpec.describe Projects::ParticipantsService, feature_category: :groups_and_proj
describe '#project_members' do
subject(:usernames) { service.project_members.map { |member| member[:username] } }
- shared_examples 'return project members' do
- context 'when there is a project in group namespace' do
- let_it_be(:public_group) { create(:group, :public) }
- let_it_be(:public_project) { create(:project, :public, namespace: public_group) }
+ context 'when there is a project in group namespace' do
+ let_it_be(:public_group) { create(:group, :public) }
+ let_it_be(:public_project, reload: true) { create(:project, :public, namespace: public_group) }
- let_it_be(:public_group_owner) { create(:user) }
+ let_it_be(:public_group_owner) { create(:user) }
- let(:service) { described_class.new(public_project, create(:user)) }
-
- before do
- public_group.add_owner(public_group_owner)
- end
+ let(:service) { described_class.new(public_project, create(:user)) }
- it 'returns members of a group' do
- expect(usernames).to include(public_group_owner.username)
- end
+ before do
+ public_group.add_owner(public_group_owner)
end
- context 'when there is a private group and a public project' do
- let_it_be(:public_group) { create(:group, :public) }
- let_it_be(:private_group) { create(:group, :private, :nested) }
- let_it_be(:public_project) { create(:project, :public, namespace: public_group) }
-
- let_it_be(:project_issue) { create(:issue, project: public_project) }
-
- let_it_be(:public_group_owner) { create(:user) }
- let_it_be(:private_group_member) { create(:user) }
- let_it_be(:public_project_maintainer) { create(:user) }
- let_it_be(:private_group_owner) { create(:user) }
-
- let_it_be(:group_ancestor_owner) { create(:user) }
-
- before_all do
- public_group.add_owner public_group_owner
- private_group.add_developer private_group_member
- public_project.add_maintainer public_project_maintainer
-
- private_group.add_owner private_group_owner
- private_group.parent.add_owner group_ancestor_owner
- end
+ it 'returns members of a group' do
+ expect(usernames).to include(public_group_owner.username)
+ end
+ end
- context 'when the private group is invited to the public project' do
- before_all do
- create(:project_group_link, group: private_group, project: public_project)
- end
+ context 'when there is a private group and a public project' do
+ let_it_be(:public_group) { create(:group, :public) }
+ let_it_be(:private_group) { create(:group, :private, :nested) }
+ let_it_be(:public_project, reload: true) { create(:project, :public, namespace: public_group) }
- context 'when a user who is outside the public project and the private group is signed in' do
- let(:service) { described_class.new(public_project, create(:user)) }
+ let_it_be(:project_issue) { create(:issue, project: public_project) }
- it 'does not return the private group' do
- expect(usernames).not_to include(private_group.name)
- end
+ let_it_be(:public_group_owner) { create(:user) }
+ let_it_be(:private_group_member) { create(:user) }
+ let_it_be(:public_project_maintainer) { create(:user) }
+ let_it_be(:private_group_owner) { create(:user) }
- it 'does not return private group members' do
- expect(usernames).not_to include(private_group_member.username)
- end
+ let_it_be(:group_ancestor_owner) { create(:user) }
- it 'returns the project maintainer' do
- expect(usernames).to include(public_project_maintainer.username)
- end
+ before_all do
+ public_group.add_owner public_group_owner
+ private_group.add_developer private_group_member
+ public_project.add_maintainer public_project_maintainer
- it 'returns project members from an invited public group' do
- invited_public_group = create(:group, :public)
- invited_public_group.add_owner create(:user)
+ private_group.add_owner private_group_owner
+ private_group.parent.add_owner group_ancestor_owner
+ end
- create(:project_group_link, group: invited_public_group, project: public_project)
+ context 'when the private group is invited to the public project' do
+ before_all do
+ create(:project_group_link, group: private_group, project: public_project)
+ end
- expect(usernames).to include(invited_public_group.users.first.username)
- end
+ let(:service) { described_class.new(public_project, create(:user)) }
- it 'does not return ancestors of the private group' do
- expect(usernames).not_to include(group_ancestor_owner.username)
- end
- end
+ it 'does not return the private group' do
+ expect(usernames).not_to include(private_group.name)
+ end
- context 'when public project maintainer is signed in' do
- let(:service) { described_class.new(public_project, public_project_maintainer) }
+ it 'returns private group members' do
+ expect(usernames).to include(private_group_member.username)
+ end
- it 'returns private group members' do
- expect(usernames).to include(private_group_member.username)
- end
+ it 'returns the project maintainer' do
+ expect(usernames).to include(public_project_maintainer.username)
+ end
- it 'returns members of the ancestral groups of the private group' do
- expect(usernames).to include(group_ancestor_owner.username)
- end
- end
+ it 'returns project members from an invited public group' do
+ invited_public_group = create(:group, :public)
+ invited_public_group.add_owner create(:user)
- context 'when private group owner is signed in' do
- let(:service) { described_class.new(public_project, private_group_owner) }
+ create(:project_group_link, group: invited_public_group, project: public_project)
- it 'returns private group members' do
- expect(usernames).to include(private_group_member.username)
- end
+ expect(usernames).to include(invited_public_group.users.first.username)
+ end
- it 'returns ancestors of the the private group' do
- expect(usernames).to include(group_ancestor_owner.username)
- end
- end
+ it 'returns members of the ancestral groups of the private group' do
+ expect(usernames).to include(group_ancestor_owner.username)
+ end
- context 'when the namespace owner of the public project is signed in' do
- let(:service) { described_class.new(public_project, public_group_owner) }
+ it 'returns invited group members of the private group' do
+ invited_group = create(:group, :public)
+ create(:group_group_link, shared_group: private_group, shared_with_group: invited_group)
- it 'returns private group members' do
- expect(usernames).to include(private_group_member.username)
- end
+ other_user = create(:user)
+ invited_group.add_guest(other_user)
- it 'does not return members of the ancestral groups of the private group' do
- expect(usernames).to include(group_ancestor_owner.username)
- end
- end
+ expect(usernames).to include(other_user.username)
end
end
end
-
- it_behaves_like 'return project members'
end
end
diff --git a/spec/services/projects/prometheus/alerts/notify_service_spec.rb b/spec/services/projects/prometheus/alerts/notify_service_spec.rb
index cc1f83ddc2b..73932887cd9 100644
--- a/spec/services/projects/prometheus/alerts/notify_service_spec.rb
+++ b/spec/services/projects/prometheus/alerts/notify_service_spec.rb
@@ -17,91 +17,12 @@ RSpec.describe Projects::Prometheus::Alerts::NotifyService, feature_category: :i
subject { service.execute(token_input) }
context 'with valid payload' do
- let_it_be(:alert_firing) { create(:prometheus_alert, project: project) }
- let_it_be(:alert_resolved) { create(:prometheus_alert, project: project) }
- let_it_be(:cluster, reload: true) { create(:cluster, :provided_by_user, projects: [project]) }
-
- let(:payload_raw) { prometheus_alert_payload(firing: [alert_firing], resolved: [alert_resolved]) }
+ let(:payload_raw) { prometheus_alert_payload(firing: ['Alert A'], resolved: ['Alert B']) }
let(:payload) { ActionController::Parameters.new(payload_raw).permit! }
let(:payload_alert_firing) { payload_raw['alerts'].first }
let(:token) { 'token' }
let(:source) { 'Prometheus' }
- context 'with environment specific clusters' do
- let(:prd_cluster) do
- cluster
- end
-
- let(:stg_cluster) do
- create(:cluster, :provided_by_user, projects: [project], enabled: true, environment_scope: 'stg/*')
- end
-
- let(:stg_environment) do
- create(:environment, project: project, name: 'stg/1')
- end
-
- let(:alert_firing) do
- create(:prometheus_alert, project: project, environment: stg_environment)
- end
-
- before do
- create(:clusters_integrations_prometheus, cluster: prd_cluster, alert_manager_token: token)
- create(:clusters_integrations_prometheus, cluster: stg_cluster, alert_manager_token: nil)
- end
-
- context 'without token' do
- let(:token_input) { nil }
-
- include_examples 'processes one firing and one resolved prometheus alerts'
- end
-
- context 'with token' do
- it_behaves_like 'alerts service responds with an error and takes no actions', :unauthorized
- end
- end
-
- context 'with project specific cluster using prometheus integration' do
- where(:cluster_enabled, :integration_enabled, :configured_token, :token_input, :result) do
- true | true | token | token | :success
- true | true | nil | nil | :success
- true | true | token | 'x' | :failure
- true | true | token | nil | :failure
- true | false | token | token | :failure
- false | true | token | token | :failure
- false | nil | nil | token | :failure
- end
-
- with_them do
- before do
- cluster.update!(enabled: cluster_enabled)
-
- unless integration_enabled.nil?
- create(
- :clusters_integrations_prometheus,
- cluster: cluster,
- enabled: integration_enabled,
- alert_manager_token: configured_token
- )
- end
- end
-
- case result = params[:result]
- when :success
- include_examples 'processes one firing and one resolved prometheus alerts'
- when :failure
- it_behaves_like 'alerts service responds with an error and takes no actions', :unauthorized
- else
- raise "invalid result: #{result.inspect}"
- end
- end
- end
-
- context 'without project specific cluster' do
- let_it_be(:cluster) { create(:cluster, enabled: true) }
-
- it_behaves_like 'alerts service responds with an error and takes no actions', :unauthorized
- end
-
context 'with manual prometheus installation' do
where(:alerting_setting, :configured_token, :token_input, :result) do
true | token | token | :success
@@ -230,7 +151,7 @@ RSpec.describe Projects::Prometheus::Alerts::NotifyService, feature_category: :i
context 'with multiple firing alerts and resolving alerts' do
let(:payload_raw) do
- prometheus_alert_payload(firing: [alert_firing, alert_firing], resolved: [alert_resolved])
+ prometheus_alert_payload(firing: ['Alert A', 'Alert A'], resolved: ['Alert B'])
end
it 'processes Prometheus alerts' do
@@ -248,7 +169,7 @@ RSpec.describe Projects::Prometheus::Alerts::NotifyService, feature_category: :i
context 'when payload exceeds max amount of processable alerts' do
# We are defining 2 alerts in payload_raw above
let(:max_alerts) { 1 }
- let(:fingerprint) { prometheus_alert_payload_fingerprint(alert_resolved) }
+ let(:fingerprint) { prometheus_alert_payload_fingerprint('Alert A') }
before do
stub_const("#{described_class}::PROCESS_MAX_ALERTS", max_alerts)
diff --git a/spec/services/projects/transfer_service_spec.rb b/spec/services/projects/transfer_service_spec.rb
index 46fe7d7bbbe..1ddf6168c07 100644
--- a/spec/services/projects/transfer_service_spec.rb
+++ b/spec/services/projects/transfer_service_spec.rb
@@ -570,12 +570,12 @@ RSpec.describe Projects::TransferService, feature_category: :groups_and_projects
using RSpec::Parameterized::TableSyntax
where(:project_shared_runners_enabled, :shared_runners_setting, :expected_shared_runners_enabled) do
- true | :disabled_and_unoverridable | false
- false | :disabled_and_unoverridable | false
- true | :disabled_and_overridable | true
- false | :disabled_and_overridable | false
- true | :shared_runners_enabled | true
- false | :shared_runners_enabled | false
+ true | :shared_runners_disabled_and_unoverridable | false
+ false | :shared_runners_disabled_and_unoverridable | false
+ true | :shared_runners_disabled_and_overridable | true
+ false | :shared_runners_disabled_and_overridable | false
+ true | :shared_runners_enabled | true
+ false | :shared_runners_enabled | false
end
with_them do
diff --git a/spec/services/projects/update_repository_storage_service_spec.rb b/spec/services/projects/update_repository_storage_service_spec.rb
index af920d51776..d3972009d38 100644
--- a/spec/services/projects/update_repository_storage_service_spec.rb
+++ b/spec/services/projects/update_repository_storage_service_spec.rb
@@ -12,10 +12,19 @@ RSpec.describe Projects::UpdateRepositoryStorageService, feature_category: :sour
before do
allow(Time).to receive(:now).and_return(time)
- allow(Gitlab.config.repositories.storages).to receive(:keys).and_return(%w[default test_second_storage])
+
+ stub_storage_settings(
+ 'test_second_storage' => {
+ 'gitaly_address' => Gitlab.config.repositories.storages.default.gitaly_address,
+ 'path' => TestEnv::SECOND_STORAGE_PATH
+ }
+ )
end
context 'without wiki and design repository' do
+ let!(:shard_default) { create(:shard, name: 'default') }
+ let!(:shard_second_storage) { create(:shard, name: 'test_second_storage') }
+
let(:project) { create(:project, :repository, wiki_enabled: false) }
let(:destination) { 'test_second_storage' }
let(:repository_storage_move) { create(:project_repository_storage_move, :scheduled, container: project, destination_storage_name: destination) }
@@ -23,6 +32,12 @@ RSpec.describe Projects::UpdateRepositoryStorageService, feature_category: :sour
let(:project_repository_double) { double(:repository) }
let(:original_project_repository_double) { double(:repository) }
+ let(:object_pool_double) { double(:object_pool, repository: object_pool_repository_double) }
+ let(:object_pool_repository_double) { double(:repository) }
+
+ let(:original_object_pool_double) { double(:object_pool, repository: original_object_pool_repository_double) }
+ let(:original_object_pool_repository_double) { double(:repository) }
+
before do
allow(Gitlab::GitalyClient).to receive(:filesystem_id).with('default').and_call_original
allow(Gitlab::GitalyClient).to receive(:filesystem_id).with('test_second_storage').and_return(SecureRandom.uuid)
@@ -33,6 +48,17 @@ RSpec.describe Projects::UpdateRepositoryStorageService, feature_category: :sour
allow(Gitlab::Git::Repository).to receive(:new)
.with('default', project.repository.raw.relative_path, nil, nil)
.and_return(original_project_repository_double)
+
+ allow(Gitlab::Git::ObjectPool).to receive(:new).and_call_original
+ allow(Gitlab::Git::ObjectPool).to receive(:new)
+ .with('test_second_storage', anything, anything, anything)
+ .and_return(object_pool_double)
+ allow(Gitlab::Git::ObjectPool).to receive(:new)
+ .with('default', anything, anything, anything)
+ .and_return(original_object_pool_double)
+
+ allow(original_object_pool_double).to receive(:create)
+ allow(object_pool_double).to receive(:create)
end
context 'when the move succeeds' do
@@ -124,25 +150,138 @@ RSpec.describe Projects::UpdateRepositoryStorageService, feature_category: :sour
end
end
- context 'when a object pool was joined' do
- let!(:pool) { create(:pool_repository, :ready, source_project: project) }
+ context 'with repository pool' do
+ let(:shard_from) { shard_default }
+ let(:shard_to) { shard_second_storage }
+ let(:old_object_pool_checksum) { 'abcd' }
+ let(:new_object_pool_checksum) { old_object_pool_checksum }
- it 'leaves the pool' do
- allow(Gitlab::GitalyClient).to receive(:filesystem_id).with('default').and_call_original
- allow(Gitlab::GitalyClient).to receive(:filesystem_id).with('test_second_storage').and_return(SecureRandom.uuid)
+ before do
+ allow(project_repository_double).to receive(:replicate).with(project.repository.raw)
+ allow(project_repository_double).to receive(:checksum).and_return(checksum)
+ allow(original_project_repository_double).to receive(:remove)
- expect(project_repository_double).to receive(:replicate)
- .with(project.repository.raw)
- expect(project_repository_double).to receive(:checksum)
- .and_return(checksum)
- expect(original_project_repository_double).to receive(:remove)
+ allow(object_pool_repository_double).to receive(:replicate).with(original_object_pool_repository_double)
+ allow(object_pool_repository_double).to receive(:checksum).and_return(new_object_pool_checksum)
+ allow(original_object_pool_repository_double).to receive(:checksum).and_return(old_object_pool_checksum)
- result = subject.execute
- project.reload
+ allow(object_pool_double).to receive(:link) do |repository|
+ expect(repository.storage).to eq 'test_second_storage'
+ end
+ end
- expect(result).to be_success
- expect(project.repository_storage).to eq('test_second_storage')
- expect(project.reload_pool_repository).to be_nil
+ context 'when project had a repository pool' do
+ let!(:pool_repository) { create(:pool_repository, :ready, shard: shard_from, source_project: project) }
+
+ it 'creates a new repository pool and connects project to it' do
+ result = subject.execute
+ expect(result).to be_success
+
+ project.reload.cleanup
+
+ new_pool_repository = project.pool_repository
+
+ expect(new_pool_repository).not_to eq(pool_repository)
+ expect(new_pool_repository.shard).to eq(shard_second_storage)
+ expect(new_pool_repository.state).to eq('ready')
+ expect(new_pool_repository.disk_path).to eq(pool_repository.disk_path)
+ expect(new_pool_repository.source_project).to eq(project)
+
+ expect(object_pool_double).to have_received(:link).with(project.repository.raw)
+ end
+
+ context 'when feature flag replicate_object_pool_on_move is disabled' do
+ before do
+ stub_feature_flags(replicate_object_pool_on_move: false)
+ end
+
+ it 'just moves the repository without the object pool' do
+ result = subject.execute
+ expect(result).to be_success
+
+ project.reload.cleanup
+
+ new_pool_repository = project.pool_repository
+
+ expect(new_pool_repository).to eq(pool_repository)
+ expect(new_pool_repository.shard).to eq(shard_default)
+ expect(new_pool_repository.state).to eq('ready')
+ expect(new_pool_repository.source_project).to eq(project)
+
+ expect(object_pool_repository_double).not_to have_received(:replicate)
+ expect(object_pool_double).not_to have_received(:link)
+ end
+ end
+
+ context 'when new shard has a repository pool' do
+ let!(:new_pool_repository) { create(:pool_repository, :ready, shard: shard_to, source_project: project) }
+
+ it 'connects project to it' do
+ result = subject.execute
+ expect(result).to be_success
+
+ project.reload.cleanup
+
+ project_pool_repository = project.pool_repository
+
+ expect(project_pool_repository).to eq(new_pool_repository)
+ expect(object_pool_double).to have_received(:link).with(project.repository.raw)
+ end
+ end
+
+ context 'when repository does not exist' do
+ let(:project) { create(:project) }
+ let(:checksum) { nil }
+
+ it 'does not mirror object pool' do
+ result = subject.execute
+ expect(result).to be_success
+
+ expect(object_pool_repository_double).not_to have_received(:replicate)
+ end
+ end
+
+ context 'when project belongs to repository pool, but not as a root project' do
+ let!(:another_project) { create(:project, :repository) }
+ let!(:pool_repository) { create(:pool_repository, :ready, shard: shard_from, source_project: another_project) }
+
+ before do
+ project.update!(pool_repository: pool_repository)
+ end
+
+ it 'creates a new repository pool and connects project to it' do
+ result = subject.execute
+ expect(result).to be_success
+
+ project.reload.cleanup
+
+ new_pool_repository = project.pool_repository
+
+ expect(new_pool_repository).not_to eq(pool_repository)
+ expect(new_pool_repository.shard).to eq(shard_second_storage)
+ expect(new_pool_repository.state).to eq('ready')
+ expect(new_pool_repository.source_project).to eq(another_project)
+
+ expect(object_pool_double).to have_received(:link).with(project.repository.raw)
+ end
+ end
+
+ context 'when object pool checksum does not match' do
+ let(:new_object_pool_checksum) { 'not_match' }
+
+ it 'raises an error and does not change state' do
+ original_count = PoolRepository.count
+
+ expect { subject.execute }.to raise_error(UpdateRepositoryStorageMethods::Error)
+
+ project.reload
+
+ expect(PoolRepository.count).to eq(original_count)
+
+ expect(project.pool_repository).to eq(pool_repository)
+ expect(project.repository.shard).to eq('default')
+ end
+ end
end
end
diff --git a/spec/services/projects/update_service_spec.rb b/spec/services/projects/update_service_spec.rb
index bfcd2be6ce4..d9090b87514 100644
--- a/spec/services/projects/update_service_spec.rb
+++ b/spec/services/projects/update_service_spec.rb
@@ -499,24 +499,24 @@ RSpec.describe Projects::UpdateService, feature_category: :groups_and_projects d
expect(result).to eq({
status: :error,
- message: "Name can contain only letters, digits, emojis, '_', '.', '+', dashes, or spaces. It must start with a letter, digit, emoji, or '_'."
+ message: "Name can contain only letters, digits, emoji, '_', '.', '+', dashes, or spaces. It must start with a letter, digit, emoji, or '_'."
})
end
end
- context 'when updating #emails_disabled' do
+ context 'when updating #emails_enabled' do
it 'updates the attribute for the project owner' do
- expect { update_project(project, user, emails_disabled: true) }
- .to change { project.emails_disabled }
- .to(true)
+ expect { update_project(project, user, emails_enabled: false) }
+ .to change { project.emails_enabled }
+ .to(false)
end
it 'does not update when not project owner' do
maintainer = create(:user)
project.add_member(maintainer, :maintainer)
- expect { update_project(project, maintainer, emails_disabled: true) }
- .not_to change { project.emails_disabled }
+ expect { update_project(project, maintainer, emails_enabled: false) }
+ .not_to change { project.emails_enabled }
end
end
@@ -794,104 +794,69 @@ RSpec.describe Projects::UpdateService, feature_category: :groups_and_projects d
let(:group) { create(:group, path: 'group') }
let(:project) { create(:project, path: 'project', group: group) }
- context 'with pages_unique_domain feature flag disabled' do
- before do
- stub_feature_flags(pages_unique_domain: false)
- end
-
- it 'does not change pages unique domain' do
- expect(project)
- .to receive(:update)
- .with({ project_setting_attributes: { has_confluence: true } })
- .and_call_original
-
- expect do
- update_project(project, user, project_setting_attributes: {
- has_confluence: true,
- pages_unique_domain_enabled: true
- })
- end.not_to change { project.project_setting.pages_unique_domain_enabled }
- end
-
- it 'does not remove other attributes' do
- expect(project)
- .to receive(:update)
- .with({ name: 'True' })
- .and_call_original
+ it 'updates project pages unique domain' do
+ expect do
+ update_project(project, user, project_setting_attributes: {
+ pages_unique_domain_enabled: true
+ })
+ end.to change { project.project_setting.pages_unique_domain_enabled }
- update_project(project, user, name: 'True')
- end
+ expect(project.project_setting.pages_unique_domain_enabled).to eq true
+ expect(project.project_setting.pages_unique_domain).to match %r{project-group-\w+}
end
- context 'with pages_unique_domain feature flag enabled' do
- before do
- stub_feature_flags(pages_unique_domain: true)
- end
+ it 'does not changes unique domain when it already exists' do
+ project.project_setting.update!(
+ pages_unique_domain_enabled: false,
+ pages_unique_domain: 'unique-domain'
+ )
- it 'updates project pages unique domain' do
- expect do
- update_project(project, user, project_setting_attributes: {
- pages_unique_domain_enabled: true
- })
- end.to change { project.project_setting.pages_unique_domain_enabled }
+ expect do
+ update_project(project, user, project_setting_attributes: {
+ pages_unique_domain_enabled: true
+ })
+ end.to change { project.project_setting.pages_unique_domain_enabled }
- expect(project.project_setting.pages_unique_domain_enabled).to eq true
- expect(project.project_setting.pages_unique_domain).to match %r{project-group-\w+}
- end
+ expect(project.project_setting.pages_unique_domain_enabled).to eq true
+ expect(project.project_setting.pages_unique_domain).to eq 'unique-domain'
+ end
- it 'does not changes unique domain when it already exists' do
- project.project_setting.update!(
- pages_unique_domain_enabled: false,
- pages_unique_domain: 'unique-domain'
- )
+ it 'does not changes unique domain when it disabling unique domain' do
+ project.project_setting.update!(
+ pages_unique_domain_enabled: true,
+ pages_unique_domain: 'unique-domain'
+ )
- expect do
- update_project(project, user, project_setting_attributes: {
- pages_unique_domain_enabled: true
- })
- end.to change { project.project_setting.pages_unique_domain_enabled }
+ expect do
+ update_project(project, user, project_setting_attributes: {
+ pages_unique_domain_enabled: false
+ })
+ end.not_to change { project.project_setting.pages_unique_domain }
- expect(project.project_setting.pages_unique_domain_enabled).to eq true
- expect(project.project_setting.pages_unique_domain).to eq 'unique-domain'
- end
+ expect(project.project_setting.pages_unique_domain_enabled).to eq false
+ expect(project.project_setting.pages_unique_domain).to eq 'unique-domain'
+ end
- it 'does not changes unique domain when it disabling unique domain' do
- project.project_setting.update!(
+ context 'when there is another project with the unique domain' do
+ it 'fails pages unique domain already exists' do
+ create(
+ :project_setting,
pages_unique_domain_enabled: true,
pages_unique_domain: 'unique-domain'
)
- expect do
- update_project(project, user, project_setting_attributes: {
- pages_unique_domain_enabled: false
- })
- end.not_to change { project.project_setting.pages_unique_domain }
+ allow(Gitlab::Pages::RandomDomain)
+ .to receive(:generate)
+ .and_return('unique-domain')
- expect(project.project_setting.pages_unique_domain_enabled).to eq false
- expect(project.project_setting.pages_unique_domain).to eq 'unique-domain'
- end
+ result = update_project(project, user, project_setting_attributes: {
+ pages_unique_domain_enabled: true
+ })
- context 'when there is another project with the unique domain' do
- it 'fails pages unique domain already exists' do
- create(
- :project_setting,
- pages_unique_domain_enabled: true,
- pages_unique_domain: 'unique-domain'
- )
-
- allow(Gitlab::Pages::RandomDomain)
- .to receive(:generate)
- .and_return('unique-domain')
-
- result = update_project(project, user, project_setting_attributes: {
- pages_unique_domain_enabled: true
- })
-
- expect(result).to eq(
- status: :error,
- message: 'Project setting pages unique domain has already been taken'
- )
- end
+ expect(result).to eq(
+ status: :error,
+ message: 'Project setting pages unique domain has already been taken'
+ )
end
end
end
diff --git a/spec/services/projects/update_statistics_service_spec.rb b/spec/services/projects/update_statistics_service_spec.rb
index 762378c93ec..f6565853460 100644
--- a/spec/services/projects/update_statistics_service_spec.rb
+++ b/spec/services/projects/update_statistics_service_spec.rb
@@ -23,13 +23,13 @@ RSpec.describe Projects::UpdateStatisticsService, feature_category: :groups_and_
let_it_be(:project) { create(:project) }
where(:statistics, :method_caches) do
- [] | %i(size commit_count)
- ['repository_size'] | [:size]
- [:repository_size] | [:size]
+ [] | %i(size recent_objects_size commit_count)
+ ['repository_size'] | %i(size recent_objects_size)
+ [:repository_size] | %i(size recent_objects_size)
[:lfs_objects_size] | nil
[:commit_count] | [:commit_count]
- [:repository_size, :commit_count] | %i(size commit_count)
- [:repository_size, :commit_count, :lfs_objects_size] | %i(size commit_count)
+ [:repository_size, :commit_count] | %i(size recent_objects_size commit_count)
+ [:repository_size, :commit_count, :lfs_objects_size] | %i(size recent_objects_size commit_count)
end
with_them do
diff --git a/spec/services/prometheus/proxy_service_spec.rb b/spec/services/prometheus/proxy_service_spec.rb
deleted file mode 100644
index f71662f62ad..00000000000
--- a/spec/services/prometheus/proxy_service_spec.rb
+++ /dev/null
@@ -1,240 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Prometheus::ProxyService, feature_category: :metrics do
- include ReactiveCachingHelpers
-
- let_it_be(:project) { create(:project) }
- let_it_be(:environment) { create(:environment, project: project) }
-
- describe 'configuration' do
- it 'ReactiveCaching refresh is not needed' do
- expect(described_class.reactive_cache_refresh_interval).to be > described_class.reactive_cache_lifetime
- end
- end
-
- describe '#initialize' do
- let(:params) { ActionController::Parameters.new(query: '1').permit! }
-
- it 'initializes attributes' do
- result = described_class.new(environment, 'GET', 'query', params)
-
- expect(result.proxyable).to eq(environment)
- expect(result.method).to eq('GET')
- expect(result.path).to eq('query')
- expect(result.params).to eq('query' => '1')
- end
-
- it 'converts ActionController::Parameters into hash' do
- result = described_class.new(environment, 'GET', 'query', params)
-
- expect(result.params).to be_an_instance_of(Hash)
- end
-
- context 'with unknown params' do
- let(:params) { ActionController::Parameters.new(query: '1', other_param: 'val').permit! }
-
- it 'filters unknown params' do
- result = described_class.new(environment, 'GET', 'query', params)
-
- expect(result.params).to eq('query' => '1')
- end
- end
-
- context 'with series method' do
- let(:params) do
- ActionController::Parameters.new(
- match: ['1'],
- start: "2020-06-11T10:15:51Z",
- end: "2020-06-11T11:16:06Z",
- unknown_param: 'val'
- ).permit!
- end
-
- it 'allows match, start and end parameters' do
- result = described_class.new(environment, 'GET', 'series', params)
-
- expect(result.params).to eq(
- 'match' => ['1'],
- 'start' => "2020-06-11T10:15:51Z",
- 'end' => "2020-06-11T11:16:06Z"
- )
- end
- end
- end
-
- describe '#execute' do
- let(:prometheus_adapter) { instance_double(::Integrations::Prometheus) }
- let(:params) { ActionController::Parameters.new(query: '1').permit! }
-
- subject { described_class.new(environment, 'GET', 'query', params) }
-
- context 'when prometheus_adapter is nil' do
- before do
- allow(environment).to receive(:prometheus_adapter).and_return(nil)
- end
-
- it 'returns error' do
- expect(subject.execute).to eq(
- status: :error,
- message: 'No prometheus server found',
- http_status: :service_unavailable
- )
- end
- end
-
- context 'when prometheus_adapter cannot query' do
- before do
- allow(environment).to receive(:prometheus_adapter).and_return(prometheus_adapter)
- allow(prometheus_adapter).to receive(:can_query?).and_return(false)
- end
-
- it 'returns error' do
- expect(subject.execute).to eq(
- status: :error,
- message: 'No prometheus server found',
- http_status: :service_unavailable
- )
- end
- end
-
- context 'cannot proxy' do
- subject { described_class.new(environment, 'POST', 'garbage', params) }
-
- it 'returns error' do
- expect(subject.execute).to eq(
- message: 'Proxy support for this API is not available currently',
- status: :error
- )
- end
- end
-
- context 'with caching', :use_clean_rails_memory_store_caching do
- let(:return_value) { { 'http_status' => 200, 'body' => 'body' } }
-
- let(:opts) do
- [environment.class.name, environment.id, 'GET', 'query', { 'query' => '1' }]
- end
-
- before do
- allow(environment).to receive(:prometheus_adapter)
- .and_return(prometheus_adapter)
- allow(prometheus_adapter).to receive(:can_query?).and_return(true)
- end
-
- context 'when value present in cache' do
- before do
- stub_reactive_cache(subject, return_value, opts)
- end
-
- it 'returns cached value' do
- result = subject.execute
-
- expect(result[:http_status]).to eq(return_value[:http_status])
- expect(result[:body]).to eq(return_value[:body])
- end
- end
-
- context 'when value not present in cache' do
- it 'returns nil' do
- expect(ExternalServiceReactiveCachingWorker)
- .to receive(:perform_async)
- .with(subject.class, subject.id, *opts)
-
- result = subject.execute
-
- expect(result).to eq(nil)
- end
- end
- end
-
- context 'call prometheus api' do
- let(:prometheus_client) { instance_double(Gitlab::PrometheusClient) }
-
- before do
- synchronous_reactive_cache(subject)
-
- allow(environment).to receive(:prometheus_adapter)
- .and_return(prometheus_adapter)
- allow(prometheus_adapter).to receive(:can_query?).and_return(true)
- allow(prometheus_adapter).to receive(:prometheus_client)
- .and_return(prometheus_client)
- end
-
- context 'connection to prometheus server succeeds' do
- let(:rest_client_response) { instance_double(RestClient::Response) }
- let(:prometheus_http_status_code) { 400 }
-
- let(:response_body) do
- '{"status":"error","errorType":"bad_data","error":"parse error at char 1: no expression found in input"}'
- end
-
- before do
- allow(prometheus_client).to receive(:proxy).and_return(rest_client_response)
-
- allow(rest_client_response).to receive(:code)
- .and_return(prometheus_http_status_code)
- allow(rest_client_response).to receive(:body).and_return(response_body)
- end
-
- it 'returns the http status code and body from prometheus' do
- expect(subject.execute).to eq(
- http_status: prometheus_http_status_code,
- body: response_body,
- status: :success
- )
- end
- end
-
- context 'connection to prometheus server fails' do
- context 'prometheus client raises Gitlab::PrometheusClient::Error' do
- before do
- allow(prometheus_client).to receive(:proxy)
- .and_raise(Gitlab::PrometheusClient::Error, 'Network connection error')
- end
-
- it 'returns error' do
- expect(subject.execute).to eq(
- status: :error,
- message: 'Network connection error',
- http_status: :service_unavailable
- )
- end
- end
- end
-
- context 'with series API' do
- let(:rest_client_response) { instance_double(RestClient::Response, code: 200, body: '') }
-
- let(:params) do
- ActionController::Parameters.new(match: ['1'], start: 1.hour.ago.rfc3339, end: Time.current.rfc3339).permit!
- end
-
- subject { described_class.new(environment, 'GET', 'series', params) }
-
- it 'calls PrometheusClient with given parameters' do
- expect(prometheus_client).to receive(:proxy)
- .with('series', params.to_h)
- .and_return(rest_client_response)
-
- subject.execute
- end
- end
- end
- end
-
- describe '.from_cache' do
- it 'initializes an instance of ProxyService class' do
- result = described_class.from_cache(
- environment.class.name, environment.id, 'GET', 'query', { 'query' => '1' }
- )
-
- expect(result).to be_an_instance_of(described_class)
- expect(result.proxyable).to eq(environment)
- expect(result.method).to eq('GET')
- expect(result.path).to eq('query')
- expect(result.params).to eq('query' => '1')
- end
- end
-end
diff --git a/spec/services/prometheus/proxy_variable_substitution_service_spec.rb b/spec/services/prometheus/proxy_variable_substitution_service_spec.rb
deleted file mode 100644
index a5395eed1b4..00000000000
--- a/spec/services/prometheus/proxy_variable_substitution_service_spec.rb
+++ /dev/null
@@ -1,204 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Prometheus::ProxyVariableSubstitutionService, feature_category: :metrics do
- describe '#execute' do
- let_it_be(:environment) { create(:environment) }
-
- let(:params_keys) { { query: 'up{environment="{{ci_environment_slug}}"}' } }
- let(:params) { ActionController::Parameters.new(params_keys).permit! }
- let(:result) { subject.execute }
-
- subject { described_class.new(environment, params) }
-
- shared_examples 'success' do
- it 'replaces variables with values' do
- expect(result[:status]).to eq(:success)
- expect(result[:params][:query]).to eq(expected_query)
- end
- end
-
- shared_examples 'error' do |message|
- it 'returns error' do
- expect(result[:status]).to eq(:error)
- expect(result[:message]).to eq(message)
- end
- end
-
- context 'does not alter params passed to the service' do
- it do
- subject.execute
-
- expect(params).to eq(
- ActionController::Parameters.new(
- query: 'up{environment="{{ci_environment_slug}}"}'
- ).permit!
- )
- end
- end
-
- context 'with predefined variables' do
- context 'with nil query' do
- let(:params_keys) { {} }
-
- it_behaves_like 'success' do
- let(:expected_query) { nil }
- end
- end
-
- context 'with liquid format' do
- let(:params_keys) do
- { query: 'up{environment="{{ci_environment_slug}}"}' }
- end
-
- it_behaves_like 'success' do
- let(:expected_query) { %[up{environment="#{environment.slug}"}] }
- end
- end
- end
-
- context 'with custom variables' do
- let(:pod_name) { "pod1" }
-
- let(:params_keys) do
- {
- query: 'up{pod_name="{{pod_name}}"}',
- variables: { 'pod_name' => pod_name }
- }
- end
-
- it_behaves_like 'success' do
- let(:expected_query) { %q[up{pod_name="pod1"}] }
- end
-
- context 'with predefined variables in variables parameter' do
- let(:params_keys) do
- {
- query: 'up{pod_name="{{pod_name}}",env="{{ci_environment_slug}}"}',
- variables: { 'pod_name' => pod_name, 'ci_environment_slug' => 'custom_value' }
- }
- end
-
- it_behaves_like 'success' do
- # Predefined variable values should not be overwritten by custom variable
- # values.
- let(:expected_query) { "up{pod_name=\"#{pod_name}\",env=\"#{environment.slug}\"}" }
- end
- end
-
- context 'with invalid variables parameter' do
- let(:params_keys) do
- {
- query: 'up{pod_name="{{pod_name}}"}',
- variables: ['a']
- }
- end
-
- it_behaves_like 'error', 'Optional parameter "variables" must be a Hash. Ex: variables[key1]=value1'
- end
-
- context 'with nil variables' do
- let(:params_keys) do
- {
- query: 'up{pod_name="{{pod_name}}"}',
- variables: nil
- }
- end
-
- it_behaves_like 'success' do
- let(:expected_query) { 'up{pod_name="{{pod_name}}"}' }
- end
- end
- end
-
- context 'gsub variable substitution tolerance for weirdness' do
- context 'with whitespace around variable' do
- let(:params_keys) do
- {
- query: 'up{' \
- "env1={{ ci_environment_slug}}," \
- "env2={{ci_environment_slug }}," \
- "{{ environment_filter }}" \
- '}'
- }
- end
-
- it_behaves_like 'success' do
- let(:expected_query) do
- 'up{' \
- "env1=#{environment.slug}," \
- "env2=#{environment.slug}," \
- "container_name!=\"POD\",environment=\"#{environment.slug}\"" \
- '}'
- end
- end
- end
-
- context 'with empty variables' do
- let(:params_keys) do
- { query: "up{env1={{}},env2={{ }}}" }
- end
-
- it_behaves_like 'success' do
- let(:expected_query) { "up{env1={{}},env2={{ }}}" }
- end
- end
-
- context 'with multiple occurrences of variable in string' do
- let(:params_keys) do
- { query: "up{env1={{ci_environment_slug}},env2={{ci_environment_slug}}}" }
- end
-
- it_behaves_like 'success' do
- let(:expected_query) { "up{env1=#{environment.slug},env2=#{environment.slug}}" }
- end
- end
-
- context 'with multiple variables in string' do
- let(:params_keys) do
- { query: "up{env={{ci_environment_slug}},{{environment_filter}}}" }
- end
-
- it_behaves_like 'success' do
- let(:expected_query) do
- "up{env=#{environment.slug}," \
- "container_name!=\"POD\",environment=\"#{environment.slug}\"}"
- end
- end
- end
-
- context 'with unknown variables in string' do
- let(:params_keys) { { query: "up{env={{env_slug}}}" } }
-
- it_behaves_like 'success' do
- let(:expected_query) { "up{env={{env_slug}}}" }
- end
- end
-
- context 'with unknown and known variables in string' do
- let(:params_keys) do
- { query: "up{env={{ci_environment_slug}},other_env={{env_slug}}}" }
- end
-
- it_behaves_like 'success' do
- let(:expected_query) { "up{env=#{environment.slug},other_env={{env_slug}}}" }
- end
- end
- end
-
- context '__range' do
- let(:params_keys) do
- {
- query: 'topk(5, sum by (method) (rate(rest_client_requests_total[{{__range}}])))',
- start_time: '2020-05-29T08:19:07.142Z',
- end_time: '2020-05-29T16:19:07.142Z'
- }
- end
-
- it_behaves_like 'success' do
- let(:expected_query) { "topk(5, sum by (method) (rate(rest_client_requests_total[#{8.hours.to_i}s])))" }
- end
- end
- end
-end
diff --git a/spec/services/quick_actions/interpret_service_spec.rb b/spec/services/quick_actions/interpret_service_spec.rb
index 186b532233e..30a3c212ba5 100644
--- a/spec/services/quick_actions/interpret_service_spec.rb
+++ b/spec/services/quick_actions/interpret_service_spec.rb
@@ -535,6 +535,10 @@ RSpec.describe QuickActions::InterpretService, feature_category: :team_planning
shared_examples 'merge automatically command' do
let(:project) { repository_project }
+ before do
+ stub_licensed_features(merge_request_approvers: true) if Gitlab.ee?
+ end
+
it 'runs merge command if content contains /merge and returns merge message' do
_, updates, message = service.execute(content, issuable)
@@ -1454,9 +1458,21 @@ RSpec.describe QuickActions::InterpretService, feature_category: :team_planning
let(:issuable) { issue }
end
- it_behaves_like 'failed command' do
+ context 'when provided an invalid estimate' do
let(:content) { '/estimate abc' }
let(:issuable) { issue }
+
+ it 'populates {} if content contains an unsupported command' do
+ _, updates, _ = service.execute(content, issuable)
+
+ expect(updates[:time_estimate]).to be_nil
+ end
+
+ it "returns empty message" do
+ _, _, message = service.execute(content, issuable)
+
+ expect(message).to be_empty
+ end
end
it_behaves_like 'spend command' do
@@ -2678,12 +2694,44 @@ RSpec.describe QuickActions::InterpretService, feature_category: :team_planning
end
describe 'estimate command' do
- let(:content) { '/estimate 79d' }
+ context 'positive estimation' do
+ let(:content) { '/estimate 79d' }
- it 'includes the formatted duration' do
- _, explanations = service.explain(content, merge_request)
+ it 'includes the formatted duration' do
+ _, explanations = service.explain(content, merge_request)
+
+ expect(explanations).to eq(['Sets time estimate to 3mo 3w 4d.'])
+ end
+ end
- expect(explanations).to eq(['Sets time estimate to 3mo 3w 4d.'])
+ context 'zero estimation' do
+ let(:content) { '/estimate 0' }
+
+ it 'includes the formatted duration' do
+ _, explanations = service.explain(content, merge_request)
+
+ expect(explanations).to eq(['Removes time estimate.'])
+ end
+ end
+
+ context 'negative estimation' do
+ let(:content) { '/estimate -79d' }
+
+ it 'does not explain' do
+ _, explanations = service.explain(content, merge_request)
+
+ expect(explanations).to be_empty
+ end
+ end
+
+ context 'invalid estimation' do
+ let(:content) { '/estimate a' }
+
+ it 'does not explain' do
+ _, explanations = service.explain(content, merge_request)
+
+ expect(explanations).to be_empty
+ end
end
end
diff --git a/spec/services/search_service_spec.rb b/spec/services/search_service_spec.rb
index c937a93c6ef..4ba704532e6 100644
--- a/spec/services/search_service_spec.rb
+++ b/spec/services/search_service_spec.rb
@@ -89,6 +89,12 @@ RSpec.describe SearchService, feature_category: :global_search do
end
end
+ describe '#search_type' do
+ subject { described_class.new(user, search: valid_search).search_type }
+
+ it { is_expected.to eq('basic') }
+ end
+
describe '#show_snippets?' do
context 'when :snippets is \'true\'' do
it 'returns true' do
diff --git a/spec/services/security/ci_configuration/sast_create_service_spec.rb b/spec/services/security/ci_configuration/sast_create_service_spec.rb
index e80fe1a42fa..555902fd77c 100644
--- a/spec/services/security/ci_configuration/sast_create_service_spec.rb
+++ b/spec/services/security/ci_configuration/sast_create_service_spec.rb
@@ -45,8 +45,13 @@ RSpec.describe Security::CiConfiguration::SastCreateService, :snowplow,
let(:params) { { initialize_with_sast: false } }
- it 'raises an error' do
- expect { result }.to raise_error(Gitlab::Graphql::Errors::MutationError)
+ it 'returns a ServiceResponse error' do
+ expect(result).to be_kind_of(ServiceResponse)
+ expect(result.status).to eq(:error)
+ expect(result.message).to eq('You must <a target="_blank" rel="noopener noreferrer" ' \
+ 'href="http://localhost/help/user/project/repository/index.md#' \
+ 'add-files-to-a-repository">add at least one file to the ' \
+ 'repository</a> before using Security features.')
end
end
diff --git a/spec/services/service_response_spec.rb b/spec/services/service_response_spec.rb
index 03fcc11b6bd..18c27fb6979 100644
--- a/spec/services/service_response_spec.rb
+++ b/spec/services/service_response_spec.rb
@@ -59,8 +59,7 @@ RSpec.describe ServiceResponse, feature_category: :shared do
end
it 'creates an error response with payload' do
- response = described_class.error(message: 'Bad apple',
- payload: { bad: 'apple' })
+ response = described_class.error(message: 'Bad apple', payload: { bad: 'apple' })
expect(response).to be_error
expect(response.message).to eq('Bad apple')
@@ -68,8 +67,7 @@ RSpec.describe ServiceResponse, feature_category: :shared do
end
it 'creates an error response with a reason' do
- response = described_class.error(message: 'Bad apple',
- reason: :permission_denied)
+ response = described_class.error(message: 'Bad apple', reason: :permission_denied)
expect(response).to be_error
expect(response.message).to eq('Bad apple')
diff --git a/spec/services/spam/spam_action_service_spec.rb b/spec/services/spam/spam_action_service_spec.rb
index bc73a5cbfaf..fc86ecfe7f2 100644
--- a/spec/services/spam/spam_action_service_spec.rb
+++ b/spec/services/spam/spam_action_service_spec.rb
@@ -2,8 +2,7 @@
require 'spec_helper'
-RSpec.describe Spam::SpamActionService, feature_category: :instance_resiliency,
- quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/418757' do
+RSpec.describe Spam::SpamActionService, feature_category: :instance_resiliency do
include_context 'includes Spam constants'
let(:issue) { create(:issue, project: project, author: author) }
@@ -43,20 +42,6 @@ RSpec.describe Spam::SpamActionService, feature_category: :instance_resiliency,
described_service.execute
end
- context 'when spam_params is nil' do
- let(:spam_params) { nil }
- let(:expected_service_params_not_present_message) do
- /Skipped spam check because spam_params was not present/
- end
-
- it "returns success with a messaage" do
- response = subject
-
- expect(response.message).to match(expected_service_params_not_present_message)
- expect(issue).not_to be_spam
- end
- end
-
context 'when user is nil' do
let(:spam_params) { true }
let(:user) { nil }
diff --git a/spec/services/suggestions/apply_service_spec.rb b/spec/services/suggestions/apply_service_spec.rb
index 6e2c623035e..c49c1fb54a7 100644
--- a/spec/services/suggestions/apply_service_spec.rb
+++ b/spec/services/suggestions/apply_service_spec.rb
@@ -22,10 +22,12 @@ RSpec.describe Suggestions::ApplyService, feature_category: :code_suggestions do
position = build_position(**position_args)
- diff_note = create(:diff_note_on_merge_request,
- noteable: merge_request,
- position: position,
- project: project)
+ diff_note = create(
+ :diff_note_on_merge_request,
+ noteable: merge_request,
+ position: position,
+ project: project
+ )
suggestion_args = { note: diff_note }.merge(content_args)
@@ -46,8 +48,7 @@ RSpec.describe Suggestions::ApplyService, feature_category: :code_suggestions do
suggestions.each do |suggestion|
path = suggestion.diff_file.file_path
- blob = project.repository.blob_at_branch(merge_request.source_branch,
- path)
+ blob = project.repository.blob_at_branch(merge_request.source_branch, path)
expect(blob.data).to eq(expected_content_by_path[path.to_sym])
end
@@ -398,9 +399,11 @@ RSpec.describe Suggestions::ApplyService, feature_category: :code_suggestions do
expect(result[:status]).to eq(:success)
refresh = MergeRequests::RefreshService.new(project: project, current_user: user)
- refresh.execute(merge_request.diff_head_sha,
- suggestion.commit_id,
- merge_request.source_branch_ref)
+ refresh.execute(
+ merge_request.diff_head_sha,
+ suggestion.commit_id,
+ merge_request.source_branch_ref
+ )
result
end
@@ -576,18 +579,22 @@ RSpec.describe Suggestions::ApplyService, feature_category: :code_suggestions do
end
let(:merge_request) do
- create(:merge_request,
- source_branch: 'conflict-resolvable-fork',
- source_project: forked_project,
- target_branch: 'conflict-start',
- target_project: project)
+ create(
+ :merge_request,
+ source_branch: 'conflict-resolvable-fork',
+ source_project: forked_project,
+ target_branch: 'conflict-start',
+ target_project: project
+ )
end
let!(:diff_note) do
- create(:diff_note_on_merge_request,
- noteable: merge_request,
- position: position,
- project: project)
+ create(
+ :diff_note_on_merge_request,
+ noteable: merge_request,
+ position: position,
+ project: project
+ )
end
before do
@@ -596,9 +603,8 @@ RSpec.describe Suggestions::ApplyService, feature_category: :code_suggestions do
it 'updates file in the source project' do
expect(Files::MultiService).to receive(:new)
- .with(merge_request.source_project,
- user,
- anything).and_call_original
+ .with(merge_request.source_project, user, anything)
+ .and_call_original
apply_service.new(user, suggestion).execute
end
@@ -623,8 +629,10 @@ RSpec.describe Suggestions::ApplyService, feature_category: :code_suggestions do
it 'returns error' do
result = apply_service.new(user, suggestion).execute
- expect(result).to eq(message: "You are not allowed to push into this branch",
- status: :error)
+ expect(result).to eq(
+ message: "You are not allowed to push into this branch",
+ status: :error
+ )
end
end
end
@@ -660,8 +668,7 @@ RSpec.describe Suggestions::ApplyService, feature_category: :code_suggestions do
end
it 'returns error message' do
- expect(result).to eq(message: 'A file was not found.',
- status: :error)
+ expect(result).to eq(message: 'A file was not found.', status: :error)
end
it_behaves_like 'service not tracking apply suggestion event'
@@ -700,8 +707,10 @@ RSpec.describe Suggestions::ApplyService, feature_category: :code_suggestions do
let(:result) { apply_service.new(user, suggestion, other_branch_suggestion).execute }
it 'renders error message' do
- expect(result).to eq(message: 'Suggestions must all be on the same branch.',
- status: :error)
+ expect(result).to eq(
+ message: 'Suggestions must all be on the same branch.',
+ status: :error
+ )
end
it_behaves_like 'service not tracking apply suggestion event'
@@ -736,8 +745,10 @@ RSpec.describe Suggestions::ApplyService, feature_category: :code_suggestions do
let(:result) { apply_service.new(user, suggestion, overlapping_suggestion).execute }
it 'returns error message' do
- expect(result).to eq(message: 'Suggestions are not applicable as their lines cannot overlap.',
- status: :error)
+ expect(result).to eq(
+ message: 'Suggestions are not applicable as their lines cannot overlap.',
+ status: :error
+ )
end
it_behaves_like 'service not tracking apply suggestion event'
diff --git a/spec/services/suggestions/create_service_spec.rb b/spec/services/suggestions/create_service_spec.rb
index a8bc3cba697..483022ea46e 100644
--- a/spec/services/suggestions/create_service_spec.rb
+++ b/spec/services/suggestions/create_service_spec.rb
@@ -5,8 +5,11 @@ require 'spec_helper'
RSpec.describe Suggestions::CreateService, feature_category: :code_suggestions do
let(:project_with_repo) { create(:project, :repository) }
let(:merge_request) do
- create(:merge_request, source_project: project_with_repo,
- target_project: project_with_repo)
+ create(
+ :merge_request,
+ source_project: project_with_repo,
+ target_project: project_with_repo
+ )
end
def build_position(args = {})
@@ -66,8 +69,7 @@ RSpec.describe Suggestions::CreateService, feature_category: :code_suggestions d
context 'should not try to parse suggestions' do
context 'when not a diff note for merge requests' do
let(:note) do
- create(:diff_note_on_commit, project: project_with_repo,
- note: markdown)
+ create(:diff_note_on_commit, project: project_with_repo, note: markdown)
end
it 'does not try to parse suggestions' do
@@ -81,10 +83,13 @@ RSpec.describe Suggestions::CreateService, feature_category: :code_suggestions d
context 'when diff note is not for text' do
let(:note) do
- create(:diff_note_on_merge_request, project: project_with_repo,
- noteable: merge_request,
- position: position,
- note: markdown)
+ create(
+ :diff_note_on_merge_request,
+ project: project_with_repo,
+ noteable: merge_request,
+ position: position,
+ note: markdown
+ )
end
before do
@@ -103,10 +108,13 @@ RSpec.describe Suggestions::CreateService, feature_category: :code_suggestions d
context 'when diff file is not found' do
let(:note) do
- create(:diff_note_on_merge_request, project: project_with_repo,
- noteable: merge_request,
- position: position,
- note: markdown)
+ create(
+ :diff_note_on_merge_request,
+ project: project_with_repo,
+ noteable: merge_request,
+ position: position,
+ note: markdown
+ )
end
before do
@@ -124,16 +132,21 @@ RSpec.describe Suggestions::CreateService, feature_category: :code_suggestions d
context 'should create suggestions' do
let(:note) do
- create(:diff_note_on_merge_request, project: project_with_repo,
- noteable: merge_request,
- position: position,
- note: markdown)
+ create(
+ :diff_note_on_merge_request,
+ project: project_with_repo,
+ noteable: merge_request,
+ position: position,
+ note: markdown
+ )
end
let(:expected_suggestions) do
- Gitlab::Diff::SuggestionsParser.parse(markdown,
- project: note.project,
- position: note.position)
+ Gitlab::Diff::SuggestionsParser.parse(
+ markdown,
+ project: note.project,
+ position: note.position
+ )
end
it 'persists suggestion records' do
diff --git a/spec/services/suggestions/outdate_service_spec.rb b/spec/services/suggestions/outdate_service_spec.rb
index 7bd70866bf7..20209097394 100644
--- a/spec/services/suggestions/outdate_service_spec.rb
+++ b/spec/services/suggestions/outdate_service_spec.rb
@@ -12,17 +12,22 @@ RSpec.describe Suggestions::OutdateService, feature_category: :code_suggestions
let(:diff_file) { suggestion.diff_file }
let(:position) { build_position(file_path, comment_line) }
let(:note) do
- create(:diff_note_on_merge_request, noteable: merge_request,
- position: position,
- project: project)
+ create(
+ :diff_note_on_merge_request,
+ noteable: merge_request,
+ position: position,
+ project: project
+ )
end
def build_position(path, line)
- Gitlab::Diff::Position.new(old_path: path,
- new_path: path,
- old_line: nil,
- new_line: line,
- diff_refs: merge_request.diff_refs)
+ Gitlab::Diff::Position.new(
+ old_path: path,
+ new_path: path,
+ old_line: nil,
+ new_line: line,
+ diff_refs: merge_request.diff_refs
+ )
end
def commit_changes(file_path, new_content)
diff --git a/spec/services/task_list_toggle_service_spec.rb b/spec/services/task_list_toggle_service_spec.rb
index 5d55c1ca8de..0d21aeef7c0 100644
--- a/spec/services/task_list_toggle_service_spec.rb
+++ b/spec/services/task_list_toggle_service_spec.rb
@@ -64,9 +64,12 @@ RSpec.describe TaskListToggleService, feature_category: :team_planning do
end
it 'checks Task 1' do
- toggler = described_class.new(markdown, markdown_html,
- toggle_as_checked: true,
- line_source: '* [ ] Task 1', line_number: 1)
+ toggler = described_class.new(
+ markdown, markdown_html,
+ toggle_as_checked: true,
+ line_source: '* [ ] Task 1',
+ line_number: 1
+ )
expect(toggler.execute).to be_truthy
expect(toggler.updated_markdown.lines[0]).to eq "* [x] Task 1\n"
@@ -74,9 +77,12 @@ RSpec.describe TaskListToggleService, feature_category: :team_planning do
end
it 'unchecks Item 1' do
- toggler = described_class.new(markdown, markdown_html,
- toggle_as_checked: false,
- line_source: '1. [X] Item 1', line_number: 6)
+ toggler = described_class.new(
+ markdown, markdown_html,
+ toggle_as_checked: false,
+ line_source: '1. [X] Item 1',
+ line_number: 6
+ )
expect(toggler.execute).to be_truthy
expect(toggler.updated_markdown.lines[5]).to eq "1. [ ] Item 1\n"
@@ -84,9 +90,12 @@ RSpec.describe TaskListToggleService, feature_category: :team_planning do
end
it 'checks task in loose list' do
- toggler = described_class.new(markdown, markdown_html,
- toggle_as_checked: true,
- line_source: '- [ ] loose list', line_number: 9)
+ toggler = described_class.new(
+ markdown, markdown_html,
+ toggle_as_checked: true,
+ line_source: '- [ ] loose list',
+ line_number: 9
+ )
expect(toggler.execute).to be_truthy
expect(toggler.updated_markdown.lines[8]).to eq "- [x] loose list\n"
@@ -94,9 +103,12 @@ RSpec.describe TaskListToggleService, feature_category: :team_planning do
end
it 'checks task with no-break space' do
- toggler = described_class.new(markdown, markdown_html,
- toggle_as_checked: true,
- line_source: '+ [ ] No-break space (U+00A0)', line_number: 13)
+ toggler = described_class.new(
+ markdown, markdown_html,
+ toggle_as_checked: true,
+ line_source: '+ [ ] No-break space (U+00A0)',
+ line_number: 13
+ )
expect(toggler.execute).to be_truthy
expect(toggler.updated_markdown.lines[12]).to eq "+ [x] No-break space (U+00A0)\n"
@@ -104,9 +116,12 @@ RSpec.describe TaskListToggleService, feature_category: :team_planning do
end
it 'checks Another item' do
- toggler = described_class.new(markdown, markdown_html,
- toggle_as_checked: true,
- line_source: '2) [ ] Another item', line_number: 15)
+ toggler = described_class.new(
+ markdown, markdown_html,
+ toggle_as_checked: true,
+ line_source: '2) [ ] Another item',
+ line_number: 15
+ )
expect(toggler.execute).to be_truthy
expect(toggler.updated_markdown.lines[14]).to eq "2) [x] Another item"
@@ -114,18 +129,25 @@ RSpec.describe TaskListToggleService, feature_category: :team_planning do
end
it 'returns false if line_source does not match the text' do
- toggler = described_class.new(markdown, markdown_html,
- toggle_as_checked: false,
- line_source: '* [x] Task Added', line_number: 2)
+ toggler = described_class.new(
+ markdown, markdown_html,
+ toggle_as_checked: false,
+ line_source: '* [x] Task Added',
+ line_number: 2
+ )
expect(toggler.execute).to be_falsey
end
it 'tolerates \r\n line endings' do
rn_markdown = markdown.gsub("\n", "\r\n")
- toggler = described_class.new(rn_markdown, markdown_html,
- toggle_as_checked: true,
- line_source: '* [ ] Task 1', line_number: 1)
+ toggler = described_class.new(
+ rn_markdown,
+ markdown_html,
+ toggle_as_checked: true,
+ line_source: '* [ ] Task 1',
+ line_number: 1
+ )
expect(toggler.execute).to be_truthy
expect(toggler.updated_markdown.lines[0]).to eq "* [x] Task 1\r\n"
@@ -133,17 +155,25 @@ RSpec.describe TaskListToggleService, feature_category: :team_planning do
end
it 'returns false if markdown is nil' do
- toggler = described_class.new(nil, markdown_html,
- toggle_as_checked: false,
- line_source: '* [x] Task Added', line_number: 2)
+ toggler = described_class.new(
+ nil,
+ markdown_html,
+ toggle_as_checked: false,
+ line_source: '* [x] Task Added',
+ line_number: 2
+ )
expect(toggler.execute).to be_falsey
end
it 'returns false if markdown_html is nil' do
- toggler = described_class.new(markdown, nil,
- toggle_as_checked: false,
- line_source: '* [x] Task Added', line_number: 2)
+ toggler = described_class.new(
+ markdown,
+ nil,
+ toggle_as_checked: false,
+ line_source: '* [x] Task Added',
+ line_number: 2
+ )
expect(toggler.execute).to be_falsey
end
@@ -156,9 +186,13 @@ RSpec.describe TaskListToggleService, feature_category: :team_planning do
EOT
markdown_html = parse_markdown(markdown)
- toggler = described_class.new(markdown, markdown_html,
- toggle_as_checked: true,
- line_source: '> > * [ ] Task 1', line_number: 1)
+ toggler = described_class.new(
+ markdown,
+ markdown_html,
+ toggle_as_checked: true,
+ line_source: '> > * [ ] Task 1',
+ line_number: 1
+ )
expect(toggler.execute).to be_truthy
expect(toggler.updated_markdown.lines[0]).to eq "> > * [x] Task 1\n"
@@ -177,9 +211,13 @@ RSpec.describe TaskListToggleService, feature_category: :team_planning do
EOT
markdown_html = parse_markdown(markdown)
- toggler = described_class.new(markdown, markdown_html,
- toggle_as_checked: true,
- line_source: '* [ ] Task 1', line_number: 5)
+ toggler = described_class.new(
+ markdown,
+ markdown_html,
+ toggle_as_checked: true,
+ line_source: '* [ ] Task 1',
+ line_number: 5
+ )
expect(toggler.execute).to be_truthy
expect(toggler.updated_markdown.lines[4]).to eq "* [x] Task 1\n"
@@ -195,9 +233,13 @@ RSpec.describe TaskListToggleService, feature_category: :team_planning do
EOT
markdown_html = parse_markdown(markdown)
- toggler = described_class.new(markdown, markdown_html,
- toggle_as_checked: true,
- line_source: '- - [ ] Task 1', line_number: 1)
+ toggler = described_class.new(
+ markdown,
+ markdown_html,
+ toggle_as_checked: true,
+ line_source: '- - [ ] Task 1',
+ line_number: 1
+ )
expect(toggler.execute).to be_truthy
expect(toggler.updated_markdown.lines[0]).to eq "- - [x] Task 1\n"
@@ -212,9 +254,13 @@ RSpec.describe TaskListToggleService, feature_category: :team_planning do
EOT
markdown_html = parse_markdown(markdown)
- toggler = described_class.new(markdown, markdown_html,
- toggle_as_checked: true,
- line_source: '1. - [ ] Task 1', line_number: 1)
+ toggler = described_class.new(
+ markdown,
+ markdown_html,
+ toggle_as_checked: true,
+ line_source: '1. - [ ] Task 1',
+ line_number: 1
+ )
expect(toggler.execute).to be_truthy
expect(toggler.updated_markdown.lines[0]).to eq "1. - [x] Task 1\n"
diff --git a/spec/services/todo_service_spec.rb b/spec/services/todo_service_spec.rb
index 32e17df4d69..0888c27aab2 100644
--- a/spec/services/todo_service_spec.rb
+++ b/spec/services/todo_service_spec.rb
@@ -1109,19 +1109,23 @@ RSpec.describe TodoService, feature_category: :team_planning do
end
let(:note) do
- build(:diff_note_on_design,
- noteable: design,
- author: author,
- note: "Hey #{john_doe.to_reference}")
+ build(
+ :diff_note_on_design,
+ noteable: design,
+ author: author,
+ note: "Hey #{john_doe.to_reference}"
+ )
end
it 'creates a todo for mentioned user on new diff note' do
service.new_note(note, author)
- should_create_todo(user: john_doe,
- target: design,
- action: Todo::MENTIONED,
- note: note)
+ should_create_todo(
+ user: john_doe,
+ target: design,
+ action: Todo::MENTIONED,
+ note: note
+ )
end
end
diff --git a/spec/services/todos/destroy/group_private_service_spec.rb b/spec/services/todos/destroy/group_private_service_spec.rb
index be470688084..b3185bc72ff 100644
--- a/spec/services/todos/destroy/group_private_service_spec.rb
+++ b/spec/services/todos/destroy/group_private_service_spec.rb
@@ -14,7 +14,7 @@ RSpec.describe Todos::Destroy::GroupPrivateService, feature_category: :team_plan
let!(:todo_group_member) { create(:todo, user: group_member, group: group) }
let!(:todo_project_member) { create(:todo, user: project_member, group: group) }
- describe '#execute' do
+ describe '#execute', :aggregate_failures do
before do
group.add_developer(group_member)
project.add_developer(project_member)
@@ -57,7 +57,37 @@ RSpec.describe Todos::Destroy::GroupPrivateService, feature_category: :team_plan
end
it 'removes todos only for users who are not group users' do
- expect { subject }.to change { Todo.count }.from(7).to(5)
+ expect { subject }.to change { Todo.count }.from(7).to(4)
+
+ expect(parent_member.todos).to contain_exactly(todo_parent_member)
+ expect(subgroup_member.todos).to be_empty
+ expect(subgproject_member.todos).to contain_exactly(todo_subproject_member)
+ end
+ end
+
+ context 'with member via group share' do
+ let(:invited_group) { create(:group) }
+ let(:invited_group_member) { create(:user).tap { |u| invited_group.add_guest(u) } }
+
+ let!(:todo_invited_group_member) { create(:todo, user: invited_group_member, group: group) }
+
+ it 'does not remove todos for users invited to the group' do
+ create(:group_group_link, shared_group: group, shared_with_group: invited_group)
+
+ expect { subject }.to change { Todo.count }.from(5).to(3)
+
+ expect(invited_group_member.todos).to contain_exactly(todo_invited_group_member)
+ end
+
+ it 'does not remove todos for users invited to an ancestor group' do
+ parent_group = create(:group)
+ group.update!(parent: parent_group)
+
+ create(:group_group_link, shared_group: parent_group, shared_with_group: invited_group)
+
+ expect { subject }.to change { Todo.count }.from(5).to(3)
+
+ expect(invited_group_member.todos).to contain_exactly(todo_invited_group_member)
end
end
end
diff --git a/spec/services/users/activity_service_spec.rb b/spec/services/users/activity_service_spec.rb
index e2141f9bf6f..3e0f9b829ce 100644
--- a/spec/services/users/activity_service_spec.rb
+++ b/spec/services/users/activity_service_spec.rb
@@ -48,8 +48,9 @@ RSpec.describe Users::ActivityService, feature_category: :user_profile do
end
it 'tracks RedisHLL event' do
- expect(Gitlab::UsageDataCounters::HLLRedisCounter).to receive(:track_event)
- .with('unique_active_user', values: user.id)
+ expect(Gitlab::UsageDataCounters::HLLRedisCounter)
+ .to receive(:track_event)
+ .with('unique_active_user', values: user.id)
subject.execute
end
@@ -63,8 +64,11 @@ RSpec.describe Users::ActivityService, feature_category: :user_profile do
let(:namespace) { build(:group) }
let(:project) { build(:project) }
let(:context) do
- payload = Gitlab::Tracking::ServicePingContext.new(data_source: :redis_hll,
- event: 'unique_active_user').to_context
+ payload = Gitlab::Tracking::ServicePingContext.new(
+ data_source: :redis_hll,
+ event: 'unique_active_user'
+ ).to_context
+
[Gitlab::Json.dump(payload)]
end
end
diff --git a/spec/services/users/destroy_service_spec.rb b/spec/services/users/destroy_service_spec.rb
index 5cd11efe942..102983368e6 100644
--- a/spec/services/users/destroy_service_spec.rb
+++ b/spec/services/users/destroy_service_spec.rb
@@ -16,9 +16,7 @@ RSpec.describe Users::DestroyService, feature_category: :user_management do
expect { service.execute(user) }
.to(
change do
- Users::GhostUserMigration.where(user: user,
- initiator_user: admin)
- .exists?
+ Users::GhostUserMigration.where(user: user, initiator_user: admin).exists?
end.from(false).to(true))
end
@@ -112,8 +110,7 @@ RSpec.describe Users::DestroyService, feature_category: :user_management do
it 'removes repository' do
expect(
- gitlab_shell.repository_exists?(project.repository_storage,
- "#{project.disk_path}.git")
+ gitlab_shell.repository_exists?(project.repository_storage, "#{project.disk_path}.git")
).to be_falsey
end
end
@@ -123,8 +120,7 @@ RSpec.describe Users::DestroyService, feature_category: :user_management do
it 'removes repository' do
expect(
- gitlab_shell.repository_exists?(project.repository_storage,
- "#{project.disk_path}.git")
+ gitlab_shell.repository_exists?(project.repository_storage, "#{project.disk_path}.git")
).to be_falsey
end
end
@@ -137,8 +133,7 @@ RSpec.describe Users::DestroyService, feature_category: :user_management do
end
expect { service.execute(user) }
- .to raise_error(Users::DestroyService::DestroyError,
- "Project #{project.id} can't be deleted")
+ .to raise_error(Users::DestroyService::DestroyError, "Project #{project.id} can't be deleted")
end
end
end
@@ -217,9 +212,7 @@ RSpec.describe Users::DestroyService, feature_category: :user_management do
expect { described_class.new(admin).execute(user) }
.to(
change do
- Users::GhostUserMigration.where(user: user,
- initiator_user: admin)
- .exists?
+ Users::GhostUserMigration.where(user: user, initiator_user: admin).exists?
end.from(false).to(true))
end
end
@@ -236,9 +229,7 @@ RSpec.describe Users::DestroyService, feature_category: :user_management do
expect { described_class.new(user).execute(user) }
.to(
change do
- Users::GhostUserMigration.where(user: user,
- initiator_user: user)
- .exists?
+ Users::GhostUserMigration.where(user: user, initiator_user: user).exists?
end.from(false).to(true))
end
@@ -250,9 +241,7 @@ RSpec.describe Users::DestroyService, feature_category: :user_management do
.execute(other_user, skip_authorization: true)
end.to(
change do
- Users::GhostUserMigration.where(user: other_user,
- initiator_user: user )
- .exists?
+ Users::GhostUserMigration.where(user: other_user, initiator_user: user).exists?
end.from(false).to(true))
end
end
diff --git a/spec/services/users/email_verification/update_email_service_spec.rb b/spec/services/users/email_verification/update_email_service_spec.rb
new file mode 100644
index 00000000000..8b4e5b8d7b5
--- /dev/null
+++ b/spec/services/users/email_verification/update_email_service_spec.rb
@@ -0,0 +1,119 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Users::EmailVerification::UpdateEmailService, feature_category: :instance_resiliency do
+ let_it_be_with_reload(:user) { create(:user) }
+ let(:email) { build_stubbed(:user).email }
+
+ describe '#execute' do
+ subject(:execute_service) { described_class.new(user: user).execute(email: email) }
+
+ context 'when successful' do
+ it { is_expected.to eq(status: :success) }
+
+ it 'does not send a confirmation instructions email' do
+ expect { execute_service }.not_to have_enqueued_mail(DeviseMailer, :confirmation_instructions)
+ end
+
+ it 'sets the unconfirmed_email and confirmation_sent_at fields', :freeze_time do
+ expect { execute_service }
+ .to change { user.unconfirmed_email }.from(nil).to(email)
+ .and change { user.confirmation_sent_at }.from(nil).to(Time.current)
+ end
+ end
+
+ context 'when rate limited' do
+ before do
+ allow(Gitlab::ApplicationRateLimiter).to receive(:throttled?)
+ .with(:email_verification_code_send, scope: user).and_return(true)
+ end
+
+ it 'returns a failure status' do
+ expect(execute_service).to eq(
+ {
+ status: :failure,
+ reason: :rate_limited,
+ message: format(s_("IdentityVerification|You've reached the maximum amount of tries. " \
+ 'Wait %{interval} and try again.'), interval: 'about 1 hour')
+ }
+ )
+ end
+ end
+
+ context 'when email reset has already been offered' do
+ before do
+ user.email_reset_offered_at = 1.minute.ago
+ end
+
+ it 'returns a failure status' do
+ expect(execute_service).to eq(
+ {
+ status: :failure,
+ reason: :already_offered,
+ message: s_('IdentityVerification|Email update is only offered once.')
+ }
+ )
+ end
+ end
+
+ context 'when email is unchanged' do
+ let(:email) { user.email }
+
+ it 'returns a failure status' do
+ expect(execute_service).to eq(
+ {
+ status: :failure,
+ reason: :no_change,
+ message: s_('IdentityVerification|A code has already been sent to this email address. ' \
+ 'Check your spam folder or enter another email address.')
+ }
+ )
+ end
+ end
+
+ context 'when email is missing' do
+ let(:email) { '' }
+
+ it 'returns a failure status' do
+ expect(execute_service).to eq(
+ {
+ status: :failure,
+ reason: :validation_error,
+ message: "Email can't be blank"
+ }
+ )
+ end
+ end
+
+ context 'when email is not valid' do
+ let(:email) { 'xxx' }
+
+ it 'returns a failure status' do
+ expect(execute_service).to eq(
+ {
+ status: :failure,
+ reason: :validation_error,
+ message: 'Email is invalid'
+ }
+ )
+ end
+ end
+
+ context 'when email is already taken' do
+ before do
+ create(:user, email: email)
+ end
+
+ it 'returns a failure status' do
+ expect(execute_service).to eq(
+ {
+ status: :failure,
+ reason: :validation_error,
+ message: 'Email has already been taken'
+ }
+ )
+ end
+ end
+ end
+end
diff --git a/spec/services/users/migrate_records_to_ghost_user_in_batches_service_spec.rb b/spec/services/users/migrate_records_to_ghost_user_in_batches_service_spec.rb
index 0b9f92a868e..6dc0ebbf86b 100644
--- a/spec/services/users/migrate_records_to_ghost_user_in_batches_service_spec.rb
+++ b/spec/services/users/migrate_records_to_ghost_user_in_batches_service_spec.rb
@@ -29,8 +29,11 @@ RSpec.describe Users::MigrateRecordsToGhostUserInBatchesService, feature_categor
end
it 'process jobs ordered by the consume_after timestamp' do
- older_ghost_user_migration = create(:ghost_user_migration, user: create(:user),
- consume_after: 5.minutes.ago)
+ older_ghost_user_migration = create(
+ :ghost_user_migration,
+ user: create(:user),
+ consume_after: 5.minutes.ago
+ )
# setup execution tracker to only allow a single job to be processed
allow_next_instance_of(::Gitlab::Utils::ExecutionTracker) do |tracker|
@@ -38,9 +41,11 @@ RSpec.describe Users::MigrateRecordsToGhostUserInBatchesService, feature_categor
end
expect(Users::MigrateRecordsToGhostUserService).to(
- receive(:new).with(older_ghost_user_migration.user,
- older_ghost_user_migration.initiator_user,
- any_args)
+ receive(:new).with(
+ older_ghost_user_migration.user,
+ older_ghost_user_migration.initiator_user,
+ any_args
+ )
).and_call_original
service.execute
diff --git a/spec/services/users/migrate_records_to_ghost_user_service_spec.rb b/spec/services/users/migrate_records_to_ghost_user_service_spec.rb
index cfa0ddff04d..36b2730a2de 100644
--- a/spec/services/users/migrate_records_to_ghost_user_service_spec.rb
+++ b/spec/services/users/migrate_records_to_ghost_user_service_spec.rb
@@ -45,10 +45,13 @@ RSpec.describe Users::MigrateRecordsToGhostUserService, feature_category: :user_
context 'when deleted user is present as both author and merge_user' do
include_examples 'migrating records to the ghost user', MergeRequest, [:author, :merge_user] do
let(:created_record) do
- create(:merge_request, source_project: project,
- author: user,
- merge_user: user,
- target_branch: "first")
+ create(
+ :merge_request,
+ source_project: project,
+ author: user,
+ merge_user: user,
+ target_branch: "first"
+ )
end
end
end
@@ -56,9 +59,12 @@ RSpec.describe Users::MigrateRecordsToGhostUserService, feature_category: :user_
context 'when deleted user is present only as both merge_user' do
include_examples 'migrating records to the ghost user', MergeRequest, [:merge_user] do
let(:created_record) do
- create(:merge_request, source_project: project,
- merge_user: user,
- target_branch: "first")
+ create(
+ :merge_request,
+ source_project: project,
+ merge_user: user,
+ target_branch: "first"
+ )
end
end
end
@@ -212,11 +218,14 @@ RSpec.describe Users::MigrateRecordsToGhostUserService, feature_category: :user_
end
it 'nullifies merge request associations', :aggregate_failures do
- merge_request = create(:merge_request, source_project: project,
- target_project: project,
- assignee: user,
- updated_by: user,
- merge_user: user)
+ merge_request = create(
+ :merge_request,
+ source_project: project,
+ target_project: project,
+ assignee: user,
+ updated_by: user,
+ merge_user: user
+ )
merge_request.metrics.update!(merged_by: user, latest_closed_by: user)
merge_request.reviewers = [user]
merge_request.assignees = [user]
@@ -242,10 +251,18 @@ RSpec.describe Users::MigrateRecordsToGhostUserService, feature_category: :user_
nullify_in_batches_regexp(:merge_request_metrics, :latest_closed_by_id, user)
]
- expected_queries += delete_in_batches_regexps(:merge_request_assignees, :user_id, user,
- merge_request.assignees)
- expected_queries += delete_in_batches_regexps(:merge_request_reviewers, :user_id, user,
- merge_request.reviewers)
+ expected_queries += delete_in_batches_regexps(
+ :merge_request_assignees,
+ :user_id,
+ user,
+ merge_request.assignees
+ )
+ expected_queries += delete_in_batches_regexps(
+ :merge_request_reviewers,
+ :user_id,
+ user,
+ merge_request.reviewers
+ )
expect(query_recorder.log).to include(*expected_queries)
end
@@ -322,8 +339,7 @@ RSpec.describe Users::MigrateRecordsToGhostUserService, feature_category: :user_
raise_error(Users::MigrateRecordsToGhostUserService::DestroyError, 'foo'))
expect(snippet.reload).not_to be_nil
expect(
- gitlab_shell.repository_exists?(snippet.repository_storage,
- "#{snippet.disk_path}.git")
+ gitlab_shell.repository_exists?(snippet.repository_storage, "#{snippet.disk_path}.git")
).to be(true)
end
end
diff --git a/spec/services/users/refresh_authorized_projects_service_spec.rb b/spec/services/users/refresh_authorized_projects_service_spec.rb
index 55b27954a74..b36152f81c3 100644
--- a/spec/services/users/refresh_authorized_projects_service_spec.rb
+++ b/spec/services/users/refresh_authorized_projects_service_spec.rb
@@ -29,8 +29,7 @@ RSpec.describe Users::RefreshAuthorizedProjectsService, feature_category: :user_
context 'incorrect_auth_found_callback callback' do
let(:user) { create(:user) }
let(:service) do
- described_class.new(user,
- incorrect_auth_found_callback: callback)
+ described_class.new(user, incorrect_auth_found_callback: callback)
end
it 'is called' do
@@ -45,8 +44,7 @@ RSpec.describe Users::RefreshAuthorizedProjectsService, feature_category: :user_
context 'missing_auth_found_callback callback' do
let(:service) do
- described_class.new(user,
- missing_auth_found_callback: callback)
+ described_class.new(user, missing_auth_found_callback: callback)
end
it 'is called' do
@@ -108,10 +106,7 @@ RSpec.describe Users::RefreshAuthorizedProjectsService, feature_category: :user_
describe '#update_authorizations' do
context 'when there are no rows to add and remove' do
it 'does not change authorizations' do
- expect(ProjectAuthorization).not_to receive(:delete_all_in_batches_for_user)
- expect(ProjectAuthorization).not_to receive(:insert_all_in_batches)
-
- service.update_authorizations([], [])
+ expect { service.update_authorizations([], []) }.to not_change { user.project_authorizations.count }
end
end
@@ -146,14 +141,15 @@ RSpec.describe Users::RefreshAuthorizedProjectsService, feature_category: :user_
user.project_authorizations.delete_all
expect(Gitlab::AppJsonLogger).to(
- receive(:info)
- .with(event: 'authorized_projects_refresh',
- user_id: user.id,
- 'authorized_projects_refresh.source': source,
- 'authorized_projects_refresh.rows_deleted_count': 0,
- 'authorized_projects_refresh.rows_added_count': 1,
- 'authorized_projects_refresh.rows_deleted_slice': [],
- 'authorized_projects_refresh.rows_added_slice': [[user.id, project.id, Gitlab::Access::MAINTAINER]])
+ receive(:info).with(
+ event: 'authorized_projects_refresh',
+ user_id: user.id,
+ 'authorized_projects_refresh.source': source,
+ 'authorized_projects_refresh.rows_deleted_count': 0,
+ 'authorized_projects_refresh.rows_added_count': 1,
+ 'authorized_projects_refresh.rows_deleted_slice': [],
+ 'authorized_projects_refresh.rows_added_slice': [[user.id, project.id, Gitlab::Access::MAINTAINER]]
+ )
)
to_be_added = [
diff --git a/spec/services/users/reject_service_spec.rb b/spec/services/users/reject_service_spec.rb
index f72666d8a63..8c8351b81a8 100644
--- a/spec/services/users/reject_service_spec.rb
+++ b/spec/services/users/reject_service_spec.rb
@@ -40,8 +40,7 @@ RSpec.describe Users::RejectService, feature_category: :user_management do
expect(subject[:status]).to eq(:success)
expect(
- Users::GhostUserMigration.where(user: user,
- initiator_user: current_user)
+ Users::GhostUserMigration.where(user: user, initiator_user: current_user)
).to be_exists
end
@@ -58,7 +57,13 @@ RSpec.describe Users::RejectService, feature_category: :user_management do
subject
- expect(Gitlab::AppLogger).to have_received(:info).with(message: "User instance access request rejected", user: user.username.to_s, email: user.email.to_s, rejected_by: current_user.username.to_s, ip_address: current_user.current_sign_in_ip.to_s)
+ expect(Gitlab::AppLogger).to have_received(:info).with(
+ message: "User instance access request rejected",
+ user: user.username.to_s,
+ email: user.email.to_s,
+ rejected_by: current_user.username.to_s,
+ ip_address: current_user.current_sign_in_ip.to_s
+ )
end
end
end
diff --git a/spec/services/users/update_service_spec.rb b/spec/services/users/update_service_spec.rb
index 9ff3d9208fa..4cd78bc3b9c 100644
--- a/spec/services/users/update_service_spec.rb
+++ b/spec/services/users/update_service_spec.rb
@@ -203,15 +203,6 @@ RSpec.describe Users::UpdateService, feature_category: :user_profile do
expect(user.enabled_following).to eq(false)
end
- it 'does not remove followers/followees if feature flag is off' do
- stub_feature_flags(disable_follow_users: false)
-
- expect do
- update_user(user, enabled_following: false)
- end.to not_change { user.followed_users.count }
- .and not_change { user.following_users.count }
- end
-
context 'when there is more followers/followees then batch limit' do
before do
stub_env('BATCH_SIZE', 1)
diff --git a/spec/services/web_hook_service_spec.rb b/spec/services/web_hook_service_spec.rb
index fb7d487b29b..259f5156d42 100644
--- a/spec/services/web_hook_service_spec.rb
+++ b/spec/services/web_hook_service_spec.rb
@@ -295,6 +295,20 @@ RSpec.describe WebHookService, :request_store, :clean_gitlab_redis_shared_state,
expect(WebMock).not_to have_requested(:post, stubbed_hostname(project_hook.url))
end
+ context 'when silent mode is enabled' do
+ before do
+ stub_application_setting(silent_mode_enabled: true)
+ end
+
+ it 'blocks and logs an error' do
+ stub_full_request(project_hook.url, method: :post)
+
+ expect(Gitlab::AuthLogger).to receive(:error).with(include(message: 'GitLab is in silent mode'))
+ expect(service_instance.execute).to be_error
+ expect(WebMock).not_to have_requested(:post, stubbed_hostname(project_hook.url))
+ end
+ end
+
it 'handles exceptions' do
exceptions = Gitlab::HTTP::HTTP_ERRORS + [
Gitlab::Json::LimitedEncoder::LimitExceeded, URI::InvalidURIError
@@ -733,6 +747,19 @@ RSpec.describe WebHookService, :request_store, :clean_gitlab_redis_shared_state,
end
end
+ context 'when silent mode is enabled' do
+ before do
+ stub_application_setting(silent_mode_enabled: true)
+ end
+
+ it 'does not queue a worker and logs an error' do
+ expect(WebHookWorker).not_to receive(:perform_async)
+ expect(Gitlab::AuthLogger).to receive(:error).with(include(message: 'GitLab is in silent mode'))
+
+ service_instance.async_execute
+ end
+ end
+
context 'when hook has custom context attributes' do
it 'includes the attributes in the worker context' do
expect(WebHookWorker).to receive(:perform_async) do
diff --git a/spec/services/webauthn/authenticate_service_spec.rb b/spec/services/webauthn/authenticate_service_spec.rb
index 99b8c7b0b36..e629da663d9 100644
--- a/spec/services/webauthn/authenticate_service_spec.rb
+++ b/spec/services/webauthn/authenticate_service_spec.rb
@@ -15,11 +15,13 @@ RSpec.describe Webauthn::AuthenticateService, feature_category: :system_access d
webauthn_credential = WebAuthn::Credential.from_create(create_result)
- registration = WebauthnRegistration.new(credential_xid: Base64.strict_encode64(webauthn_credential.raw_id),
- public_key: webauthn_credential.public_key,
- counter: 0,
- name: 'name',
- user_id: user.id)
+ registration = WebauthnRegistration.new(
+ credential_xid: Base64.strict_encode64(webauthn_credential.raw_id),
+ public_key: webauthn_credential.public_key,
+ counter: 0,
+ name: 'name',
+ user_id: user.id
+ )
registration.save!
end
diff --git a/spec/services/work_items/related_work_item_links/create_service_spec.rb b/spec/services/work_items/related_work_item_links/create_service_spec.rb
new file mode 100644
index 00000000000..992beb705aa
--- /dev/null
+++ b/spec/services/work_items/related_work_item_links/create_service_spec.rb
@@ -0,0 +1,37 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe WorkItems::RelatedWorkItemLinks::CreateService, feature_category: :portfolio_management do
+ describe '#execute' do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, group: group) }
+ let_it_be(:issuable) { create(:work_item, project: project) }
+ let_it_be(:issuable2) { create(:work_item, project: project) }
+ let_it_be(:restricted_issuable) { create(:work_item) }
+ let_it_be(:another_project) { create(:project, group: group) }
+ let_it_be(:issuable3) { create(:work_item, project: another_project) }
+ let_it_be(:issuable_a) { create(:work_item, project: project) }
+ let_it_be(:issuable_b) { create(:work_item, project: project) }
+ let_it_be(:issuable_link) { create(:work_item_link, source: issuable, target: issuable_b) }
+
+ let(:issuable_parent) { issuable.project }
+ let(:issuable_type) { 'work item' }
+ let(:issuable_link_class) { WorkItems::RelatedWorkItemLink }
+ let(:params) { {} }
+
+ before_all do
+ project.add_guest(user)
+ another_project.add_guest(user)
+ end
+
+ it_behaves_like 'issuable link creation', use_references: false do
+ let(:response_keys) { [:status, :created_references, :message] }
+ let(:already_assigned_error_msg) { "Work items are already linked" }
+ let(:no_found_error_msg) do
+ 'No matching work item found. Make sure you are adding a valid ID and you have access to the item.'
+ end
+ end
+ end
+end
diff --git a/spec/services/work_items/task_list_reference_removal_service_spec.rb b/spec/services/work_items/task_list_reference_removal_service_spec.rb
index 4e87ce66c21..0d34aaa3c1c 100644
--- a/spec/services/work_items/task_list_reference_removal_service_spec.rb
+++ b/spec/services/work_items/task_list_reference_removal_service_spec.rb
@@ -132,7 +132,7 @@ RSpec.describe WorkItems::TaskListReferenceRemovalService, feature_category: :te
let(:line_number_end) { line_number_start - 1 }
it_behaves_like 'failing work item task reference removal service',
- 'line_number_end must be greater or equal to line_number_start'
+ 'line_number_end must be greater or equal to line_number_start'
end
context 'when lock_version is older than current' do
diff --git a/spec/services/work_items/widgets/description_service/update_service_spec.rb b/spec/services/work_items/widgets/description_service/update_service_spec.rb
index 7da5b24a3b7..84704d3e002 100644
--- a/spec/services/work_items/widgets/description_service/update_service_spec.rb
+++ b/spec/services/work_items/widgets/description_service/update_service_spec.rb
@@ -12,8 +12,13 @@ RSpec.describe WorkItems::Widgets::DescriptionService::UpdateService, feature_ca
let(:params) { { description: 'updated description' } }
let(:current_user) { author }
let(:work_item) do
- create(:work_item, author: author, project: project, description: 'old description',
- last_edited_at: Date.yesterday, last_edited_by: random_user
+ create(
+ :work_item,
+ author: author,
+ project: project,
+ description: 'old description',
+ last_edited_at: Date.yesterday,
+ last_edited_by: random_user
)
end
diff --git a/spec/services/work_items/widgets/hierarchy_service/create_service_spec.rb b/spec/services/work_items/widgets/hierarchy_service/create_service_spec.rb
index 8d834c9a4f8..a4adb72c616 100644
--- a/spec/services/work_items/widgets/hierarchy_service/create_service_spec.rb
+++ b/spec/services/work_items/widgets/hierarchy_service/create_service_spec.rb
@@ -13,7 +13,7 @@ RSpec.describe WorkItems::Widgets::HierarchyService::CreateService, feature_cate
it { expect { subject }.to raise_error(described_class::WidgetError, message) }
end
- before(:all) do
+ before_all do
project.add_developer(user)
end
diff --git a/spec/simplecov_env.rb b/spec/simplecov_env.rb
index 47a2f43e30c..d48acc77afb 100644
--- a/spec/simplecov_env.rb
+++ b/spec/simplecov_env.rb
@@ -1,12 +1,11 @@
# frozen_string_literal: true
-require 'simplecov'
-require 'simplecov-cobertura'
-require 'simplecov-lcov'
+require_relative 'simplecov_env_core'
require 'gitlab/utils/all'
module SimpleCovEnv
extend self
+ extend SimpleCovEnvCore
def start!
return if !ENV.key?('SIMPLECOV') || ENV['SIMPLECOV'] == '0'
@@ -19,19 +18,6 @@ module SimpleCovEnv
SimpleCov.start
end
- def configure_formatter
- SimpleCov::Formatter::LcovFormatter.config.report_with_single_file = true
-
- SimpleCov.formatters = SimpleCov::Formatter::MultiFormatter.new(
- [
- SimpleCov::Formatter::SimpleFormatter,
- SimpleCov::Formatter::HTMLFormatter,
- SimpleCov::Formatter::CoberturaFormatter,
- SimpleCov::Formatter::LcovFormatter
- ]
- )
- end
-
def configure_job
SimpleCov.configure do
if ENV['CI_JOB_NAME']
@@ -49,46 +35,4 @@ module SimpleCovEnv
end
end
end
-
- def configure_profile
- SimpleCov.configure do
- load_profile 'test_frameworks'
-
- add_filter %r{^/(ee/)?(bin|gems|vendor)}
- add_filter %r{^/(ee/)?db/fixtures/development}
- add_filter %r{^/(ee/)?db/migrate/\d{14}_init_schema\.rb\z}
-
- add_group 'Channels', %r{^/(ee/)?app/channels}
- add_group 'Components', %r{^/(ee/)?app/components}
- add_group 'Config', %r{^/(ee/)?config}
- add_group 'Controllers', %r{^/(ee/)?app/controllers}
- add_group 'Elastic migrations', %r{^/(ee/)?elastic}
- add_group 'Enums', %r{^/(ee/)?app/enums}
- add_group 'Events', %r{^/(ee/)?app/events}
- add_group 'Experiments', %r{^/(ee/)?app/experiments}
- add_group 'Finders', %r{^/(ee/)?app/finders}
- add_group 'Fixtures', %r{^/(ee/)?db/fixtures}
- add_group 'GraphQL', %r{^/(ee/)?app/graphql}
- add_group 'Helpers', %r{^/(ee/)?app/helpers}
- add_group 'Libraries', %r{^/(ee/)?lib}
- add_group 'Mailers', %r{^/(ee/)?app/mailers}
- add_group 'Metrics server', %r{^/(ee/)?metrics_server}
- add_group 'Migrations', %r{^/(ee/)?db/(geo/)?(migrate|optional_migrations|post_migrate)}
- add_group 'Models', %r{^/(ee/)?app/models}
- add_group 'Policies', %r{^/(ee/)?app/policies}
- add_group 'Presenters', %r{^/(ee/)?app/presenters}
- add_group 'Replicators', %r{^/(ee/)?app/replicators}
- add_group 'Seeds', %r{^/(ee/)?db/seeds}
- add_group 'Serializers', %r{^/(ee/)?app/serializers}
- add_group 'Services', %r{^/(ee/)?app/services}
- add_group 'Sidekiq cluster', %r{^/(ee/)?sidekiq_cluster}
- add_group 'Tooling', %r{^/(ee/)?(danger|haml_lint|rubocop|scripts|tooling)}
- add_group 'Uploaders', %r{^/(ee/)?app/uploaders}
- add_group 'Validators', %r{^/(ee/)?app/validators}
- add_group 'Views', %r{^/(ee/)?app/views}
- add_group 'Workers', %r{^/(ee/)?app/workers}
-
- merge_timeout 365 * 24 * 3600
- end
- end
end
diff --git a/spec/simplecov_env_core.rb b/spec/simplecov_env_core.rb
new file mode 100644
index 00000000000..68711549213
--- /dev/null
+++ b/spec/simplecov_env_core.rb
@@ -0,0 +1,64 @@
+# frozen_string_literal: true
+
+require 'simplecov'
+require 'simplecov-cobertura'
+require 'simplecov-lcov'
+
+module SimpleCovEnvCore
+ extend self
+
+ def configure_formatter
+ SimpleCov::Formatter::LcovFormatter.config.report_with_single_file = true
+
+ SimpleCov.formatters = SimpleCov::Formatter::MultiFormatter.new(
+ [
+ SimpleCov::Formatter::SimpleFormatter,
+ SimpleCov::Formatter::HTMLFormatter,
+ SimpleCov::Formatter::CoberturaFormatter,
+ SimpleCov::Formatter::LcovFormatter
+ ]
+ )
+ end
+
+ def configure_profile
+ SimpleCov.configure do
+ load_profile 'test_frameworks'
+
+ add_filter %r{^/(ee/)?(bin|gems|vendor)}
+ add_filter %r{^/(ee/)?db/fixtures/development}
+ add_filter %r{^/(ee/)?db/migrate/\d{14}_init_schema\.rb\z}
+
+ add_group 'Channels', %r{^/(ee/)?app/channels}
+ add_group 'Components', %r{^/(ee/)?app/components}
+ add_group 'Config', %r{^/(ee/)?config}
+ add_group 'Controllers', %r{^/(ee/)?app/controllers}
+ add_group 'Elastic migrations', %r{^/(ee/)?elastic}
+ add_group 'Enums', %r{^/(ee/)?app/enums}
+ add_group 'Events', %r{^/(ee/)?app/events}
+ add_group 'Experiments', %r{^/(ee/)?app/experiments}
+ add_group 'Finders', %r{^/(ee/)?app/finders}
+ add_group 'Fixtures', %r{^/(ee/)?db/fixtures}
+ add_group 'GraphQL', %r{^/(ee/)?app/graphql}
+ add_group 'Helpers', %r{^/(ee/)?app/helpers}
+ add_group 'Libraries', %r{^/(ee/)?lib}
+ add_group 'Mailers', %r{^/(ee/)?app/mailers}
+ add_group 'Metrics server', %r{^/(ee/)?metrics_server}
+ add_group 'Migrations', %r{^/(ee/)?db/(geo/)?(migrate|optional_migrations|post_migrate)}
+ add_group 'Models', %r{^/(ee/)?app/models}
+ add_group 'Policies', %r{^/(ee/)?app/policies}
+ add_group 'Presenters', %r{^/(ee/)?app/presenters}
+ add_group 'Replicators', %r{^/(ee/)?app/replicators}
+ add_group 'Seeds', %r{^/(ee/)?db/seeds}
+ add_group 'Serializers', %r{^/(ee/)?app/serializers}
+ add_group 'Services', %r{^/(ee/)?app/services}
+ add_group 'Sidekiq cluster', %r{^/(ee/)?sidekiq_cluster}
+ add_group 'Tooling', %r{^/(ee/)?(danger|haml_lint|rubocop|scripts|tooling)}
+ add_group 'Uploaders', %r{^/(ee/)?app/uploaders}
+ add_group 'Validators', %r{^/(ee/)?app/validators}
+ add_group 'Views', %r{^/(ee/)?app/views}
+ add_group 'Workers', %r{^/(ee/)?app/workers}
+
+ merge_timeout 365 * 24 * 3600
+ end
+ end
+end
diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb
index 4d66784d943..d7ceab1289e 100644
--- a/spec/spec_helper.rb
+++ b/spec/spec_helper.rb
@@ -181,6 +181,7 @@ RSpec.configure do |config|
config.include SearchHelpers, type: :feature
config.include WaitHelpers, type: :feature
config.include WaitForRequests, type: :feature
+ config.include Features::DomHelpers, type: :feature
config.include EmailHelpers, :mailer, type: :mailer
config.include Warden::Test::Helpers, type: :request
config.include Gitlab::Routing, type: :routing
@@ -295,8 +296,14 @@ RSpec.configure do |config|
# Only a few percent of users will be "enrolled" into the new nav with this flag.
# Having it enabled globally would make it impossible to test the current nav.
+ # https://gitlab.com/gitlab-org/gitlab/-/issues/420121
stub_feature_flags(super_sidebar_nav_enrolled: false)
+ # The anonymous super-sidebar is under heavy development and enabling the flag
+ # globally leads to a lot of errors. This issue is for fixing all test to work with the
+ # new nav: https://gitlab.com/gitlab-org/gitlab/-/issues/420119
+ stub_feature_flags(super_sidebar_logged_out: false)
+
# It's disabled in specs because we don't support certain features which
# cause spec failures.
stub_feature_flags(gitlab_error_tracking: false)
@@ -339,6 +346,9 @@ RSpec.configure do |config|
# Keep-around refs should only be turned off for specific projects/repositories.
stub_feature_flags(disable_keep_around_refs: false)
+ # Postgres is the primary data source, and ClickHouse only when enabled in certain cases.
+ stub_feature_flags(clickhouse_data_collection: false)
+
allow(Gitlab::GitalyClient).to receive(:can_use_disk?).and_return(enable_rugged)
else
unstub_all_feature_flags
@@ -393,7 +403,7 @@ RSpec.configure do |config|
end
config.around(:example, :request_store) do |example|
- Gitlab::WithRequestStore.with_request_store { example.run }
+ ::Gitlab::SafeRequestStore.ensure_request_store { example.run }
end
config.around(:example, :enable_rugged) do |example|
diff --git a/spec/support/database/auto_explain.rb b/spec/support/database/auto_explain.rb
new file mode 100644
index 00000000000..108d88e37b9
--- /dev/null
+++ b/spec/support/database/auto_explain.rb
@@ -0,0 +1,135 @@
+# frozen_string_literal: true
+
+module AutoExplain
+ class << self
+ def setup
+ Gitlab::Database::EachDatabase.each_connection do |connection|
+ next unless record_auto_explain?(connection)
+
+ connection.execute("LOAD 'auto_explain'")
+
+ # This param can only be set on pg14+ so we can't set it when starting postgres.
+ connection.execute('ALTER SYSTEM SET compute_query_id TO on')
+ connection.execute('SELECT pg_reload_conf()')
+ end
+ end
+
+ def record
+ Gitlab::Database::EachDatabase.each_connection do |connection, connection_name|
+ next unless record_auto_explain?(connection)
+
+ connection.execute(<<~SQL.squish)
+ CREATE EXTENSION IF NOT EXISTS file_fdw;
+ CREATE SERVER IF NOT EXISTS pglog FOREIGN DATA WRAPPER file_fdw;
+ SQL
+
+ csvlog_columns = [
+ 'log_time timestamp(3) with time zone',
+ 'user_name text',
+ 'database_name text',
+ 'process_id integer',
+ 'connection_from text',
+ 'session_id text',
+ 'session_line_num bigint',
+ 'command_tag text',
+ 'session_start_time timestamp with time zone',
+ 'virtual_transaction_id text',
+ 'transaction_id bigint',
+ 'error_severity text',
+ 'sql_state_code text',
+ 'message text',
+ 'detail text',
+ 'hint text',
+ 'internal_query text',
+ 'internal_query_pos integer',
+ 'context text',
+ 'query text',
+ 'query_pos integer',
+ 'location text',
+ 'application_name text',
+ 'backend_type text',
+ 'leader_pid integer',
+ 'query_id bigint'
+ ]
+
+ connection.transaction do
+ connection.execute(<<~SQL.squish)
+ CREATE FOREIGN TABLE IF NOT EXISTS pglog (#{csvlog_columns.join(', ')})
+ SERVER pglog
+ OPTIONS ( filename 'log/pglog.csv', format 'csv' );
+ SQL
+
+ log_file = Rails.root.join(
+ File.dirname(ENV.fetch('RSPEC_AUTO_EXPLAIN_LOG_PATH', 'auto_explain/auto_explain.ndjson.gz')),
+ "#{ENV.fetch('CI_JOB_NAME_SLUG', 'rspec')}.#{Process.pid}.#{connection_name}.ndjson.gz"
+ )
+
+ FileUtils.mkdir_p(File.dirname(log_file))
+
+ fingerprints = Set.new
+ recording_start = Time.now
+
+ Zlib::GzipWriter.open(log_file) do |gz|
+ pg = connection.raw_connection
+
+ pg.exec('SET statement_timeout TO 0;')
+
+ pg.send_query(<<~SQL.squish)
+ SELECT DISTINCT ON (m.query_id)
+ m.message->>'Query Text' as query, m.message->'Plan' as plan
+ FROM (
+ SELECT substring(message from '\{.*$')::jsonb AS message, query_id
+ FROM pglog
+ WHERE message LIKE '%{%'
+ ) m
+ ORDER BY m.query_id;
+ SQL
+
+ pg.set_single_row_mode
+ pg.get_result.stream_each do |row|
+ query = row['query']
+ fingerprint = PgQuery.fingerprint(query)
+ next unless fingerprints.add?(fingerprint)
+
+ plan = Gitlab::Json.parse(row['plan'])
+
+ output = {
+ query: query,
+ plan: plan,
+ fingerprint: fingerprint,
+ normalized: PgQuery.normalize(query)
+ }
+
+ gz.puts Gitlab::Json.generate(output)
+ end
+
+ puts "auto_explain log contains #{fingerprints.size} entries for #{connection_name}, writing to #{log_file}"
+ puts "took #{Time.now - recording_start}"
+ end
+
+ raise ActiveRecord::Rollback
+ end
+ end
+ end
+
+ private
+
+ def record_auto_explain?(connection)
+ ENV['CI'] \
+ && ENV['CI_MERGE_REQUEST_LABELS']&.include?('pipeline:record-queries') \
+ && ENV['CI_JOB_NAME_SLUG'] != 'db-migrate-non-superuser' \
+ && connection.database_version.to_s[0..1].to_i >= 14 \
+ && connection.select_one('SHOW is_superuser')['is_superuser'] == 'on'
+ end
+ end
+end
+
+RSpec.configure do |config|
+ config.before(:suite) do
+ AutoExplain.setup
+ end
+
+ config.after(:suite) do
+ AutoExplain.record
+ end
+end
diff --git a/spec/support/database/prevent_cross_database_modification.rb b/spec/support/database/prevent_cross_database_modification.rb
index cd0cbe733d1..77fa7feacd4 100644
--- a/spec/support/database/prevent_cross_database_modification.rb
+++ b/spec/support/database/prevent_cross_database_modification.rb
@@ -25,7 +25,7 @@ RSpec.configure do |config|
end
# Reset after execution to preferred state
- config.after do |example_file|
+ config.after do |_example_file|
::Gitlab::Database::QueryAnalyzers::PreventCrossDatabaseModification.suppress_in_rspec = true
::ApplicationRecord.gitlab_transactions_stack.clear
diff --git a/spec/support/database/query_recorder.rb b/spec/support/database/query_recorder.rb
deleted file mode 100644
index c0736221af3..00000000000
--- a/spec/support/database/query_recorder.rb
+++ /dev/null
@@ -1,17 +0,0 @@
-# frozen_string_literal: true
-
-RSpec.configure do |config|
- # Truncate the query_recorder log file before starting the suite
- config.before(:suite) do
- log_file = Rails.root.join(Gitlab::Database::QueryAnalyzers::QueryRecorder.log_file)
- File.write(log_file, '') if File.exist?(log_file)
- File.delete("#{log_file}.gz") if File.exist?("#{log_file}.gz")
- end
-
- config.after(:suite) do
- if ENV['CI']
- log_file = Rails.root.join(Gitlab::Database::QueryAnalyzers::QueryRecorder.log_file)
- system("gzip #{log_file}") if File.exist?(log_file)
- end
- end
-end
diff --git a/spec/support/fast_quarantine.rb b/spec/support/fast_quarantine.rb
index b5ed1a2aa96..9732a287cb2 100644
--- a/spec/support/fast_quarantine.rb
+++ b/spec/support/fast_quarantine.rb
@@ -7,10 +7,9 @@ return if ENV['CI_MERGE_REQUEST_LABELS'].to_s.include?('pipeline:run-flaky-tests
require_relative '../../tooling/lib/tooling/fast_quarantine'
RSpec.configure do |config|
- fast_quarantine_local_path = ENV.fetch('RSPEC_FAST_QUARANTINE_LOCAL_PATH', 'rspec/fast_quarantine-gitlab.txt')
fast_quarantine_path = ENV.fetch(
'RSPEC_FAST_QUARANTINE_PATH',
- File.expand_path("../../#{fast_quarantine_local_path}", __dir__)
+ File.expand_path("../../rspec/fast_quarantine-gitlab.txt", __dir__)
)
fast_quarantine = Tooling::FastQuarantine.new(fast_quarantine_path: fast_quarantine_path)
skipped_examples = []
@@ -28,10 +27,12 @@ RSpec.configure do |config|
next if skipped_examples.empty?
skipped_tests_report_path = ENV.fetch(
- 'SKIPPED_TESTS_REPORT_PATH',
+ 'RSPEC_SKIPPED_TESTS_REPORT_PATH',
File.expand_path("../../rspec/flaky/skipped_tests.txt", __dir__)
)
+ next warn("#{skipped_tests_report_path} doesn't exist!") unless File.exist?(skipped_tests_report_path.to_s)
+
File.write(skipped_tests_report_path, "#{ENV.fetch('CI_JOB_URL', 'local-run')}\n#{skipped_examples.join("\n")}\n\n")
end
end
diff --git a/spec/support/formatters/json_formatter.rb b/spec/support/formatters/json_formatter.rb
index 1fb0c7c91ec..a54004b3024 100644
--- a/spec/support/formatters/json_formatter.rb
+++ b/spec/support/formatters/json_formatter.rb
@@ -79,7 +79,8 @@ module Support
feature_category: example.metadata[:feature_category],
ci_job_url: ENV['CI_JOB_URL'],
retry_attempts: example.metadata[:retry_attempts],
- level: example.metadata[:level]
+ level: example.metadata[:level],
+ allowed_to_be_slow: example.metadata[:allowed_to_be_slow]
}
end
diff --git a/spec/support/helpers/content_editor_helpers.rb b/spec/support/helpers/content_editor_helpers.rb
index a6cc2560d0b..7597a13e475 100644
--- a/spec/support/helpers/content_editor_helpers.rb
+++ b/spec/support/helpers/content_editor_helpers.rb
@@ -9,6 +9,10 @@ module ContentEditorHelpers
end
end
+ def switch_to_markdown_editor
+ click_button("Switch to plain text editing")
+ end
+
def switch_to_content_editor
click_button("Switch to rich text editing")
end
diff --git a/spec/support/helpers/features/dom_helpers.rb b/spec/support/helpers/features/dom_helpers.rb
new file mode 100644
index 00000000000..ac6523f3360
--- /dev/null
+++ b/spec/support/helpers/features/dom_helpers.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+module Features
+ module DomHelpers
+ def find_by_testid(testid)
+ page.find("[data-testid='#{testid}']")
+ end
+
+ def within_testid(testid, &block)
+ page.within("[data-testid='#{testid}']", &block)
+ end
+ end
+end
diff --git a/spec/support/helpers/features/runners_helpers.rb b/spec/support/helpers/features/runners_helpers.rb
index 0504e883b82..dbd1edade8c 100644
--- a/spec/support/helpers/features/runners_helpers.rb
+++ b/spec/support/helpers/features/runners_helpers.rb
@@ -23,11 +23,11 @@ module Features
def input_filtered_search_keys(search_term)
focus_filtered_search
- page.within(search_bar_selector) do
- page.find('input').send_keys(search_term)
- click_on 'Search'
- end
+ page.find(search_bar_selector).find('input').send_keys(search_term)
+ # blur input
+ find('body').click
+ page.click_on 'Search'
wait_for_requests
end
@@ -49,9 +49,8 @@ module Features
# For OPERATORS_IS, clicking the filter
# immediately preselects "=" operator
-
- page.find('input').send_keys(value)
- page.find('input').send_keys(:enter)
+ send_keys(value)
+ send_keys(:enter)
click_on 'Search'
end
diff --git a/spec/support/helpers/filter_spec_helper.rb b/spec/support/helpers/filter_spec_helper.rb
index 7beed9c7755..dc282bf0a68 100644
--- a/spec/support/helpers/filter_spec_helper.rb
+++ b/spec/support/helpers/filter_spec_helper.rb
@@ -94,9 +94,9 @@ module FilterSpecHelper
when /\A(.+)?[^\d]\d+\z/
# Integer-based reference with optional project prefix
reference.gsub(/\d+\z/) { |i| i.to_i + 10_000 }
- when /\A(.+@)?(\h{7,40}\z)/
+ when /\A(.+@)?(#{Gitlab::Git::Commit::RAW_SHA_PATTERN}\z)/o
# SHA-based reference with optional prefix
- reference.gsub(/\h{7,40}\z/) { |v| v.reverse }
+ reference.gsub(/#{Gitlab::Git::Commit::RAW_SHA_PATTERN}\z/o) { |v| v.reverse }
else
reference.gsub(/\w+\z/) { |v| v.reverse }
end
diff --git a/spec/support/helpers/graphql_helpers.rb b/spec/support/helpers/graphql_helpers.rb
index 62e05129fb2..19a637d4893 100644
--- a/spec/support/helpers/graphql_helpers.rb
+++ b/spec/support/helpers/graphql_helpers.rb
@@ -151,7 +151,7 @@ module GraphqlHelpers
raise UnauthorizedObject unless parent
# we enable the request store so we can track gitaly calls.
- ::Gitlab::WithRequestStore.with_request_store do
+ ::Gitlab::SafeRequestStore.ensure_request_store do
prepared_args = case arg_style
when :internal_prepared
args_internal_prepared(field, args: args, query_ctx: query_ctx, parent: parent, extras: extras, query: query)
@@ -267,7 +267,7 @@ module GraphqlHelpers
# authentication (token set-up, license checks)
# It clears the request store, rails cache, and BatchLoader Executor between runs.
def run_with_clean_state(query, **args)
- ::Gitlab::WithRequestStore.with_request_store do
+ ::Gitlab::SafeRequestStore.ensure_request_store do
with_clean_rails_cache do
with_clean_batchloader_executor do
::GitlabSchema.execute(query, **args)
diff --git a/spec/support/helpers/kubernetes_helpers.rb b/spec/support/helpers/kubernetes_helpers.rb
index c3076a2c359..92a49d6a196 100644
--- a/spec/support/helpers/kubernetes_helpers.rb
+++ b/spec/support/helpers/kubernetes_helpers.rb
@@ -676,7 +676,6 @@ module KubernetesHelpers
}
end
- # noinspection RubyStringKeysInHashInspection
def knative_06_service(name: 'kubetest', namespace: 'default', domain: 'example.com', description: 'a knative service', environment: 'production', cluster_id: 9)
{ "apiVersion" => "serving.knative.dev/v1alpha1",
"kind" => "Service",
@@ -736,7 +735,6 @@ module KubernetesHelpers
"podcount" => 0 }
end
- # noinspection RubyStringKeysInHashInspection
def knative_07_service(name: 'kubetest', namespace: 'default', domain: 'example.com', description: 'a knative service', environment: 'production', cluster_id: 5)
{ "apiVersion" => "serving.knative.dev/v1alpha1",
"kind" => "Service",
@@ -788,7 +786,6 @@ module KubernetesHelpers
"podcount" => 0 }
end
- # noinspection RubyStringKeysInHashInspection
def knative_09_service(name: 'kubetest', namespace: 'default', domain: 'example.com', description: 'a knative service', environment: 'production', cluster_id: 5)
{ "apiVersion" => "serving.knative.dev/v1alpha1",
"kind" => "Service",
@@ -840,7 +837,6 @@ module KubernetesHelpers
"podcount" => 0 }
end
- # noinspection RubyStringKeysInHashInspection
def knative_05_service(name: 'kubetest', namespace: 'default', domain: 'example.com', description: 'a knative service', environment: 'production', cluster_id: 8)
{ "apiVersion" => "serving.knative.dev/v1alpha1",
"kind" => "Service",
diff --git a/spec/support/helpers/metrics_dashboard_helpers.rb b/spec/support/helpers/metrics_dashboard_helpers.rb
index 417baeda33a..1aae3964669 100644
--- a/spec/support/helpers/metrics_dashboard_helpers.rb
+++ b/spec/support/helpers/metrics_dashboard_helpers.rb
@@ -38,14 +38,6 @@ module MetricsDashboardHelpers
::Gitlab::Config::Loader::Yaml.new(data).load_raw!
end
- def system_dashboard_path
- Metrics::Dashboard::SystemDashboardService::DASHBOARD_PATH
- end
-
- def pod_dashboard_path
- Metrics::Dashboard::PodDashboardService::DASHBOARD_PATH
- end
-
def business_metric_title
Enums::PrometheusMetric.group_details[:business][:group_title]
end
diff --git a/spec/support/helpers/migrations_helpers/project_statistics_helper.rb b/spec/support/helpers/migrations_helpers/project_statistics_helper.rb
new file mode 100644
index 00000000000..4e7d83a38ac
--- /dev/null
+++ b/spec/support/helpers/migrations_helpers/project_statistics_helper.rb
@@ -0,0 +1,37 @@
+# frozen_string_literal: true
+
+module MigrationHelpers
+ module ProjectStatisticsHelper
+ def generate_records(projects, table, values = {})
+ projects.map do |proj|
+ table.create!(
+ values.merge({
+ project_id: proj.id,
+ namespace_id: proj.namespace_id
+ })
+ )
+ end
+ end
+
+ def create_migration(end_id:)
+ described_class.new(start_id: 1, end_id: end_id,
+ batch_table: 'project_statistics', batch_column: 'project_id',
+ sub_batch_size: 1_000, pause_ms: 0,
+ connection: ApplicationRecord.connection)
+ end
+
+ def create_project_stats(project_table, namespace, default_stats, override_stats = {})
+ stats = default_stats.merge(override_stats)
+
+ group = namespace.create!(name: 'group_a', path: 'group-a', type: 'Group')
+ project_namespace = namespace.create!(name: 'project_a', path: 'project_a', type: 'Project', parent_id: group.id)
+ proj = project_table.create!(name: 'project_a', path: 'project-a', namespace_id: group.id,
+ project_namespace_id: project_namespace.id)
+ project_statistics_table.create!(
+ project_id: proj.id,
+ namespace_id: group.id,
+ **stats
+ )
+ end
+ end
+end
diff --git a/spec/support/helpers/models/ci/partitioning_testing/rspec_hooks.rb b/spec/support/helpers/models/ci/partitioning_testing/rspec_hooks.rb
index 3f0a2bb7f3b..a764e751bf5 100644
--- a/spec/support/helpers/models/ci/partitioning_testing/rspec_hooks.rb
+++ b/spec/support/helpers/models/ci/partitioning_testing/rspec_hooks.rb
@@ -1,23 +1,19 @@
# frozen_string_literal: true
RSpec.configure do |config|
- config.include Ci::PartitioningTesting::PartitionIdentifiers
+ config.include ::Ci::PartitioningTesting::PartitionIdentifiers
config.around(:each, :ci_partitionable) do |example|
- unless Ci::Build.table_name.to_s.starts_with?('p_')
- skip 'Skipping partitioning tests until `ci_builds` is partitioned'
- end
-
- Ci::PartitioningTesting::SchemaHelpers.with_routing_tables do
+ ::Ci::PartitioningTesting::SchemaHelpers.with_routing_tables do
example.run
end
end
config.before(:all) do
- Ci::PartitioningTesting::SchemaHelpers.setup
+ ::Ci::PartitioningTesting::SchemaHelpers.setup
end
config.after(:all) do
- Ci::PartitioningTesting::SchemaHelpers.teardown
+ ::Ci::PartitioningTesting::SchemaHelpers.teardown
end
end
diff --git a/spec/support/helpers/models/ci/partitioning_testing/schema_helpers.rb b/spec/support/helpers/models/ci/partitioning_testing/schema_helpers.rb
index 849d9ea117e..a47aaffdb43 100644
--- a/spec/support/helpers/models/ci/partitioning_testing/schema_helpers.rb
+++ b/spec/support/helpers/models/ci/partitioning_testing/schema_helpers.rb
@@ -3,36 +3,27 @@
module Ci
module PartitioningTesting
module SchemaHelpers
- DEFAULT_PARTITION = 100
-
module_function
def with_routing_tables
- # model.table_name = :routing_table
+ # previous_table_name = Model.table_name
+ # Model.table_name = routing_table_name
+
yield
# ensure
- # model.table_name = :regular_table
+ # Model.table_name = previous_table_name
end
- # We're dropping the default values here to ensure that the application code
- # populates the `partition_id` value and it's not falling back on the
- # database default one. We should be able to clean this up after
- # partitioning the tables and substituting the routing table in the model:
- # https://gitlab.com/gitlab-org/gitlab/-/issues/377822
- #
def setup(connection: Ci::ApplicationRecord.connection)
each_partitionable_table do |table_name|
- change_column_default(table_name, from: DEFAULT_PARTITION, to: nil, connection: connection)
- change_column_default("p_#{table_name}", from: DEFAULT_PARTITION, to: nil, connection: connection)
create_test_partition("p_#{table_name}", connection: connection)
end
+ ensure_builds_id_uniquness(connection: connection)
end
def teardown(connection: Ci::ApplicationRecord.connection)
each_partitionable_table do |table_name|
drop_test_partition("p_#{table_name}", connection: connection)
- change_column_default(table_name, from: nil, to: DEFAULT_PARTITION, connection: connection)
- change_column_default("p_#{table_name}", from: nil, to: DEFAULT_PARTITION, connection: connection)
end
end
@@ -47,12 +38,6 @@ module Ci
end
end
- def change_column_default(table_name, from:, to:, connection:)
- return unless table_available?(table_name, connection: connection)
-
- connection.change_column_default(table_name, :partition_id, from: from, to: to)
- end
-
def create_test_partition(table_name, connection:)
return unless table_available?(table_name, connection: connection)
@@ -75,6 +60,16 @@ module Ci
SQL
end
+ # This can be removed after https://gitlab.com/gitlab-org/gitlab/-/issues/421173
+ # is implemented
+ def ensure_builds_id_uniquness(connection:)
+ connection.execute(<<~SQL.squish)
+ CREATE TRIGGER assign_p_ci_builds_id_trigger
+ BEFORE INSERT ON #{full_partition_name('ci_builds')}
+ FOR EACH ROW EXECUTE FUNCTION assign_p_ci_builds_id_value();
+ SQL
+ end
+
def table_available?(table_name, connection:)
connection.table_exists?(table_name) &&
connection.column_exists?(table_name, :partition_id)
diff --git a/spec/support/helpers/prometheus_helpers.rb b/spec/support/helpers/prometheus_helpers.rb
index e1f5e6dee14..da80f6f08c2 100644
--- a/spec/support/helpers/prometheus_helpers.rb
+++ b/spec/support/helpers/prometheus_helpers.rb
@@ -240,12 +240,11 @@ module PrometheusHelpers
def prometheus_alert_payload(firing: [], resolved: [])
status = firing.any? ? 'firing' : 'resolved'
alerts = firing + resolved
- alert_name = alerts.first&.title || ''
- prometheus_metric_id = alerts.first&.prometheus_metric_id&.to_s
+ alert_name = alerts.first || ''
alerts_map = \
- firing.map { |alert| prometheus_map_alert_payload('firing', alert) } +
- resolved.map { |alert| prometheus_map_alert_payload('resolved', alert) }
+ firing.map { |title| prometheus_map_alert_payload('firing', title) } +
+ resolved.map { |title| prometheus_map_alert_payload('resolved', title) }
# See https://prometheus.io/docs/alerting/configuration/#%3Cwebhook_config%3E
{
@@ -257,9 +256,7 @@ module PrometheusHelpers
'alertname' => alert_name
},
'commonLabels' => {
- 'alertname' => alert_name,
- 'gitlab' => 'hook',
- 'gitlab_alert_id' => prometheus_metric_id
+ 'alertname' => alert_name
},
'commonAnnotations' => {},
'externalURL' => '',
@@ -267,22 +264,21 @@ module PrometheusHelpers
}
end
- def prometheus_alert_payload_fingerprint(prometheus_alert)
+ def prometheus_alert_payload_fingerprint(title)
# timestamp is hard-coded in #prometheus_map_alert_payload
- fingerprint = "#{prometheus_alert.prometheus_metric_id}/2018-09-24T08:57:31.095725221Z"
+ # sample fingerprint format comes from AlertManagement::Payload::Prometheus
+ fingerprint = ["2018-09-24T08:57:31.095725221Z", title].join('/')
Gitlab::AlertManagement::Fingerprint.generate(fingerprint)
end
private
- def prometheus_map_alert_payload(status, alert)
+ def prometheus_map_alert_payload(status, title)
{
'status' => status,
'labels' => {
- 'alertname' => alert.title,
- 'gitlab' => 'hook',
- 'gitlab_alert_id' => alert.prometheus_metric_id.to_s
+ 'alertname' => title
},
'annotations' => {},
'startsAt' => '2018-09-24T08:57:31.095725221Z',
diff --git a/spec/support/helpers/stub_gitlab_calls.rb b/spec/support/helpers/stub_gitlab_calls.rb
index 748ea525e40..6d0e97b0a75 100644
--- a/spec/support/helpers/stub_gitlab_calls.rb
+++ b/spec/support/helpers/stub_gitlab_calls.rb
@@ -23,6 +23,10 @@ module StubGitlabCalls
end
def stub_ci_pipeline_yaml_file(ci_yaml_content)
+ allow_any_instance_of(Gitlab::Ci::ProjectConfig::Repository)
+ .to receive(:file_in_repository?)
+ .and_return(ci_yaml_content.present?)
+
allow_any_instance_of(Repository)
.to receive(:gitlab_ci_yml_for)
.and_return(ci_yaml_content)
diff --git a/spec/support/matchers/exceed_query_limit.rb b/spec/support/matchers/exceed_query_limit.rb
index 29ebe5a3918..cc912d8de66 100644
--- a/spec/support/matchers/exceed_query_limit.rb
+++ b/spec/support/matchers/exceed_query_limit.rb
@@ -271,15 +271,11 @@ RSpec::Matchers.define :issue_fewer_queries_than do
end
end
-RSpec::Matchers.define :issue_same_number_of_queries_as do
+RSpec::Matchers.define :issue_same_number_of_queries_as do |expected|
supports_block_expectations
include ExceedQueryLimitHelpers
- def control
- block_arg
- end
-
chain :or_fewer do
@or_fewer = true
end
@@ -288,12 +284,15 @@ RSpec::Matchers.define :issue_same_number_of_queries_as do
@skip_cached = true
end
- def control_recorder
- @control_recorder ||= ActiveRecord::QueryRecorder.new(&control)
- end
-
def expected_count
- control_recorder.count
+ # Some tests pass a query recorder, others pass a block that executes an action.
+ # Maybe, we need to clear the block usage and only accept query recorders.
+
+ @expected_count ||= if expected.is_a?(ActiveRecord::QueryRecorder)
+ query_recorder_count(expected)
+ else
+ ActiveRecord::QueryRecorder.new(&block_arg).count
+ end
end
def verify_count(&block)
diff --git a/spec/support/protected_branch_helpers.rb b/spec/support/protected_branch_helpers.rb
index d983d03fd2e..576275e9d1d 100644
--- a/spec/support/protected_branch_helpers.rb
+++ b/spec/support/protected_branch_helpers.rb
@@ -9,6 +9,10 @@ module ProtectedBranchHelpers
end
end
+ def show_add_form
+ click_button 'Add protected branch'
+ end
+
def set_protected_branch_name(branch_name)
find('.js-protected-branch-select').click
find('.dropdown-input-field').set(branch_name)
diff --git a/spec/support/rspec_order_todo.yml b/spec/support/rspec_order_todo.yml
index 3cce22c00e6..f52f843e56a 100644
--- a/spec/support/rspec_order_todo.yml
+++ b/spec/support/rspec_order_todo.yml
@@ -4149,7 +4149,7 @@
- './spec/features/projects/settings/user_changes_default_branch_spec.rb'
- './spec/features/projects/settings/user_interacts_with_deploy_keys_spec.rb'
- './spec/features/projects/settings/user_manages_merge_requests_settings_spec.rb'
-- './spec/features/projects/settings/user_manages_project_members_spec.rb'
+- './spec/features/projects/members/user_manages_project_members_spec.rb'
- './spec/features/projects/settings/user_renames_a_project_spec.rb'
- './spec/features/projects/settings/user_searches_in_settings_spec.rb'
- './spec/features/projects/settings/user_sees_revoke_deploy_token_modal_spec.rb'
@@ -4368,7 +4368,6 @@
- './spec/finders/merge_requests_finder_spec.rb'
- './spec/finders/merge_requests/oldest_per_commit_finder_spec.rb'
- './spec/finders/merge_request_target_project_finder_spec.rb'
-- './spec/finders/metrics/dashboards/annotations_finder_spec.rb'
- './spec/finders/metrics/users_starred_dashboards_finder_spec.rb'
- './spec/finders/milestones_finder_spec.rb'
- './spec/finders/namespaces/projects_finder_spec.rb'
@@ -5026,7 +5025,6 @@
- './spec/helpers/enable_search_settings_helper_spec.rb'
- './spec/helpers/environment_helper_spec.rb'
- './spec/helpers/environments_helper_spec.rb'
-- './spec/helpers/events_helper_spec.rb'
- './spec/helpers/explore_helper_spec.rb'
- './spec/helpers/export_helper_spec.rb'
- './spec/helpers/external_link_helper_spec.rb'
@@ -5453,8 +5451,6 @@
- './spec/lib/container_registry/path_spec.rb'
- './spec/lib/container_registry/registry_spec.rb'
- './spec/lib/container_registry/tag_spec.rb'
-- './spec/lib/csv_builder_spec.rb'
-- './spec/lib/csv_builders/stream_spec.rb'
- './spec/lib/declarative_enum_spec.rb'
- './spec/lib/error_tracking/stacktrace_builder_spec.rb'
- './spec/lib/event_filter_spec.rb'
@@ -6808,7 +6804,6 @@
- './spec/lib/gitlab/net_http_adapter_spec.rb'
- './spec/lib/gitlab/no_cache_headers_spec.rb'
- './spec/lib/gitlab/noteable_metadata_spec.rb'
-- './spec/lib/gitlab/null_request_store_spec.rb'
- './spec/lib/gitlab/object_hierarchy_spec.rb'
- './spec/lib/gitlab/octokit/middleware_spec.rb'
- './spec/lib/gitlab/omniauth_initializer_spec.rb'
@@ -6933,7 +6928,6 @@
- './spec/lib/gitlab/saas_spec.rb'
- './spec/lib/gitlab/safe_request_loader_spec.rb'
- './spec/lib/gitlab/safe_request_purger_spec.rb'
-- './spec/lib/gitlab/safe_request_store_spec.rb'
- './spec/lib/gitlab/sample_data_template_spec.rb'
- './spec/lib/gitlab/sanitizers/exception_message_spec.rb'
- './spec/lib/gitlab/sanitizers/exif_spec.rb'
@@ -7141,9 +7135,6 @@
- './spec/lib/gitlab/usage/metrics/instrumentations/snowplow_enabled_metric_spec.rb'
- './spec/lib/gitlab/usage/metrics/instrumentations/uuid_metric_spec.rb'
- './spec/lib/gitlab/usage/metrics/key_path_processor_spec.rb'
-- './spec/lib/gitlab/usage/metrics/names_suggestions/generator_spec.rb'
-- './spec/lib/gitlab/usage/metrics/names_suggestions/relation_parsers/joins_spec.rb'
-- './spec/lib/gitlab/usage/metrics/name_suggestion_spec.rb'
- './spec/lib/gitlab/usage/metric_spec.rb'
- './spec/lib/gitlab/usage/metrics/query_spec.rb'
- './spec/lib/gitlab/usage/service_ping/instrumented_payload_spec.rb'
@@ -7173,7 +7164,6 @@
- './spec/lib/gitlab/webpack/manifest_spec.rb'
- './spec/lib/gitlab/wiki_file_finder_spec.rb'
- './spec/lib/gitlab/wiki_pages/front_matter_parser_spec.rb'
-- './spec/lib/gitlab/with_request_store_spec.rb'
- './spec/lib/gitlab/word_diff/chunk_collection_spec.rb'
- './spec/lib/gitlab/word_diff/line_processor_spec.rb'
- './spec/lib/gitlab/word_diff/parser_spec.rb'
@@ -8160,6 +8150,7 @@
- './spec/requests/admin/version_check_controller_spec.rb'
- './spec/requests/api/access_requests_spec.rb'
- './spec/requests/api/admin/batched_background_migrations_spec.rb'
+- './spec/requests/api/admin/broadcast_messages_spec.rb'
- './spec/requests/api/admin/ci/variables_spec.rb'
- './spec/requests/api/admin/instance_clusters_spec.rb'
- './spec/requests/api/admin/plan_limits_spec.rb'
@@ -8175,7 +8166,6 @@
- './spec/requests/api/badges_spec.rb'
- './spec/requests/api/boards_spec.rb'
- './spec/requests/api/branches_spec.rb'
-- './spec/requests/api/broadcast_messages_spec.rb'
- './spec/requests/api/bulk_imports_spec.rb'
- './spec/requests/api/ci/job_artifacts_spec.rb'
- './spec/requests/api/ci/jobs_spec.rb'
diff --git a/spec/support/shared_contexts/features/integrations/integrations_shared_context.rb b/spec/support/shared_contexts/features/integrations/integrations_shared_context.rb
index 8c17136b1e2..848e333d88b 100644
--- a/spec/support/shared_contexts/features/integrations/integrations_shared_context.rb
+++ b/spec/support/shared_contexts/features/integrations/integrations_shared_context.rb
@@ -92,6 +92,8 @@ RSpec.shared_context 'with integration' do
hash.merge!(k => File.read('spec/fixtures/service_account.json'))
elsif integration == 'google_play' && k == :service_account_key_file_name
hash.merge!(k => 'service_account.json')
+ elsif integration == 'google_play' && k == :google_play_protected_refs # rubocop:disable Lint/DuplicateBranch
+ hash.merge!(k => true)
else
hash.merge!(k => "someword")
end
diff --git a/spec/support/shared_contexts/glfm/api_markdown_snapshot_shared_context.rb b/spec/support/shared_contexts/glfm/api_markdown_snapshot_shared_context.rb
index 3623fa0850d..a0d91d813ae 100644
--- a/spec/support/shared_contexts/glfm/api_markdown_snapshot_shared_context.rb
+++ b/spec/support/shared_contexts/glfm/api_markdown_snapshot_shared_context.rb
@@ -29,7 +29,6 @@ RSpec.shared_context 'with API::Markdown Snapshot shared context' do |ee_only: f
let(:normalizations) { normalizations_by_example_name.dig(name, :html, :static, :snapshot) }
it "verifies conversion of GLFM to HTML", :unlimited_max_formatted_output_length do
- # noinspection RubyResolve
normalized_html = normalize_html(html, normalizations)
api_url = metadata_by_example_name&.dig(name, :api_request_override_path) || (api "/markdown")
diff --git a/spec/support/shared_contexts/graphql/types/query_type_shared_context.rb b/spec/support/shared_contexts/graphql/types/query_type_shared_context.rb
index 26f550b9b40..434592ccd38 100644
--- a/spec/support/shared_contexts/graphql/types/query_type_shared_context.rb
+++ b/spec/support/shared_contexts/graphql/types/query_type_shared_context.rb
@@ -41,7 +41,9 @@ RSpec.shared_context 'with FOSS query type fields' do
:user,
:users,
:work_item,
- :audit_event_definitions
+ :audit_event_definitions,
+ :abuse_report,
+ :abuse_report_labels
]
end
end
diff --git a/spec/support/shared_contexts/lib/gitlab/background_migration/backfill_project_statistics.rb b/spec/support/shared_contexts/lib/gitlab/background_migration/backfill_project_statistics.rb
new file mode 100644
index 00000000000..1b835e1392d
--- /dev/null
+++ b/spec/support/shared_contexts/lib/gitlab/background_migration/backfill_project_statistics.rb
@@ -0,0 +1,106 @@
+# frozen_string_literal: true
+
+RSpec.shared_context 'when backfilling project statistics' do
+ let!(:namespaces) { table(:namespaces) }
+ let!(:project_statistics_table) { table(:project_statistics) }
+ let!(:projects) { table(:projects) }
+ let!(:count_of_columns) { ProjectStatistics::STORAGE_SIZE_COMPONENTS.count }
+ let(:default_storage_size) { 12 }
+
+ let!(:root_group) do
+ namespaces.create!(name: 'root-group', path: 'root-group', type: 'Group') do |new_group|
+ new_group.update!(traversal_ids: [new_group.id])
+ end
+ end
+
+ let!(:group) do
+ namespaces.create!(name: 'group', path: 'group', parent_id: root_group.id, type: 'Group') do |new_group|
+ new_group.update!(traversal_ids: [root_group.id, new_group.id])
+ end
+ end
+
+ let!(:sub_group) do
+ namespaces.create!(name: 'subgroup', path: 'subgroup', parent_id: group.id, type: 'Group') do |new_group|
+ new_group.update!(traversal_ids: [root_group.id, group.id, new_group.id])
+ end
+ end
+
+ let!(:namespace1) do
+ namespaces.create!(
+ name: 'namespace1', type: 'Group', path: 'space1'
+ )
+ end
+
+ let!(:proj_namespace1) do
+ namespaces.create!(
+ name: 'proj1', path: 'proj1', type: 'Project', parent_id: namespace1.id
+ )
+ end
+
+ let!(:proj_namespace2) do
+ namespaces.create!(
+ name: 'proj2', path: 'proj2', type: 'Project', parent_id: namespace1.id
+ )
+ end
+
+ let!(:proj_namespace3) do
+ namespaces.create!(
+ name: 'proj3', path: 'proj3', type: 'Project', parent_id: sub_group.id
+ )
+ end
+
+ let!(:proj_namespace4) do
+ namespaces.create!(
+ name: 'proj4', path: 'proj4', type: 'Project', parent_id: sub_group.id
+ )
+ end
+
+ let!(:proj_namespace5) do
+ namespaces.create!(
+ name: 'proj5', path: 'proj5', type: 'Project', parent_id: sub_group.id
+ )
+ end
+
+ let!(:proj1) do
+ projects.create!(
+ name: 'proj1', path: 'proj1', namespace_id: namespace1.id, project_namespace_id: proj_namespace1.id
+ )
+ end
+
+ let!(:proj2) do
+ projects.create!(
+ name: 'proj2', path: 'proj2', namespace_id: namespace1.id, project_namespace_id: proj_namespace2.id
+ )
+ end
+
+ let!(:proj3) do
+ projects.create!(
+ name: 'proj3', path: 'proj3', namespace_id: sub_group.id, project_namespace_id: proj_namespace3.id
+ )
+ end
+
+ let!(:proj4) do
+ projects.create!(
+ name: 'proj4', path: 'proj4', namespace_id: sub_group.id, project_namespace_id: proj_namespace4.id
+ )
+ end
+
+ let!(:proj5) do
+ projects.create!(
+ name: 'proj5', path: 'proj5', namespace_id: sub_group.id, project_namespace_id: proj_namespace5.id
+ )
+ end
+
+ let(:migration) do
+ described_class.new(start_id: 1, end_id: proj4.id,
+ batch_table: 'project_statistics', batch_column: 'project_id',
+ sub_batch_size: 1_000, pause_ms: 0,
+ connection: ApplicationRecord.connection)
+ end
+
+ let(:default_projects) do
+ [
+ proj1, proj2, proj3, proj4
+ ]
+ end
+end
diff --git a/spec/support/shared_contexts/lib/gitlab/sidekiq_middleware/server_metrics_shared_context.rb b/spec/support/shared_contexts/lib/gitlab/sidekiq_middleware/server_metrics_shared_context.rb
index b6c54e902a2..d9b2b44980c 100644
--- a/spec/support/shared_contexts/lib/gitlab/sidekiq_middleware/server_metrics_shared_context.rb
+++ b/spec/support/shared_contexts/lib/gitlab/sidekiq_middleware/server_metrics_shared_context.rb
@@ -18,6 +18,7 @@ RSpec.shared_context 'server metrics with mocked prometheus' do
let(:elasticsearch_requests_total) { double('elasticsearch calls total metric') }
let(:load_balancing_metric) { double('load balancing metric') }
let(:sidekiq_mem_total_bytes) { double('sidekiq mem total bytes') }
+ let(:completion_seconds_sum_metric) { double('sidekiq completion seconds sum metric') }
before do
allow(Gitlab::Metrics).to receive(:histogram).and_call_original
@@ -36,6 +37,7 @@ RSpec.shared_context 'server metrics with mocked prometheus' do
allow(Gitlab::Metrics).to receive(:counter).with(:sidekiq_redis_requests_total, anything).and_return(redis_requests_total)
allow(Gitlab::Metrics).to receive(:counter).with(:sidekiq_elasticsearch_requests_total, anything).and_return(elasticsearch_requests_total)
allow(Gitlab::Metrics).to receive(:counter).with(:sidekiq_load_balancing_count, anything).and_return(load_balancing_metric)
+ allow(Gitlab::Metrics).to receive(:counter).with(:sidekiq_jobs_completion_seconds_sum, anything).and_return(completion_seconds_sum_metric)
allow(Gitlab::Metrics).to receive(:gauge).with(:sidekiq_running_jobs, anything, {}, :all).and_return(running_jobs_metric)
allow(Gitlab::Metrics).to receive(:gauge).with(:sidekiq_concurrency, anything, {}, :all).and_return(concurrency_metric)
allow(Gitlab::Metrics).to receive(:gauge).with(:sidekiq_mem_total_bytes, anything, {}, :all).and_return(sidekiq_mem_total_bytes)
@@ -76,8 +78,13 @@ RSpec.shared_context 'server metrics call' do
}
end
+ let(:stub_subject) { true }
+
before do
- allow(subject).to receive(:get_thread_cputime).and_return(thread_cputime_before, thread_cputime_after)
+ if stub_subject
+ allow(subject).to receive(:get_thread_cputime).and_return(thread_cputime_before, thread_cputime_after)
+ end
+
allow(Gitlab::Metrics::System).to receive(:monotonic_time).and_return(monotonic_time_before, monotonic_time_after)
allow(Gitlab::InstrumentationHelper).to receive(:queue_duration_for_job).with(job).and_return(queue_duration_for_job)
allow(ActiveRecord::LogSubscriber).to receive(:runtime).and_return(db_duration * 1000)
@@ -93,6 +100,7 @@ RSpec.shared_context 'server metrics call' do
allow(running_jobs_metric).to receive(:increment)
allow(redis_requests_total).to receive(:increment)
allow(elasticsearch_requests_total).to receive(:increment)
+ allow(completion_seconds_sum_metric).to receive(:increment)
allow(queue_duration_seconds).to receive(:observe)
allow(user_execution_seconds_metric).to receive(:observe)
allow(db_seconds_metric).to receive(:observe)
diff --git a/spec/support/shared_contexts/lib/sbom/package_url_shared_contexts.rb b/spec/support/shared_contexts/lib/sbom/package_url_shared_contexts.rb
index 263cf9f5e19..a4c454ea264 100644
--- a/spec/support/shared_contexts/lib/sbom/package_url_shared_contexts.rb
+++ b/spec/support/shared_contexts/lib/sbom/package_url_shared_contexts.rb
@@ -4,10 +4,12 @@ require 'oj'
def parameterized_test_matrix(invalid: false)
test_cases_path = File.join(
- File.expand_path(__dir__), '..', '..', '..', '..', 'fixtures', 'lib', 'sbom', 'package-url-test-cases.json')
+ File.expand_path(__dir__), '../../../../fixtures/lib/sbom/package-url-test-cases.json')
test_cases = Gitlab::Json.parse(File.read(test_cases_path))
- test_cases.filter { _1.delete('is_invalid') == invalid }.each_with_object({}) do |test_case, memo|
+ test_cases
+ .filter { |test_case| test_case.delete('is_invalid') == invalid }
+ .each_with_object({}) do |test_case, memo|
description = test_case.delete('description')
memo[description] = test_case.symbolize_keys
end
diff --git a/spec/support/shared_contexts/navbar_structure_context.rb b/spec/support/shared_contexts/navbar_structure_context.rb
index 0abf688566a..112b90029b8 100644
--- a/spec/support/shared_contexts/navbar_structure_context.rb
+++ b/spec/support/shared_contexts/navbar_structure_context.rb
@@ -112,6 +112,7 @@ RSpec.shared_context 'project navbar structure' do
_('CI/CD'),
_('Packages and registries'),
_('Monitor'),
+ (_('Analytics') if Gitlab.ee?),
s_('UsageQuota|Usage Quotas')
]
}
diff --git a/spec/support/shared_contexts/policies/group_policy_shared_context.rb b/spec/support/shared_contexts/policies/group_policy_shared_context.rb
index 22caf2b3530..07a4cbdb534 100644
--- a/spec/support/shared_contexts/policies/group_policy_shared_context.rb
+++ b/spec/support/shared_contexts/policies/group_policy_shared_context.rb
@@ -12,7 +12,7 @@ RSpec.shared_context 'GroupPolicy context' do
let(:public_permissions) do
%i[
- read_group read_counts
+ read_group read_counts read_issue
read_label read_issue_board_list read_milestone read_issue_board
]
end
@@ -74,7 +74,6 @@ RSpec.shared_context 'GroupPolicy context' do
read_statistics
update_default_branch_protection
read_group_runners
- admin_group_runners
register_group_runners
read_billing
edit_billing
diff --git a/spec/support/shared_contexts/services/packages/rubygems/invalid_metadata.rb b/spec/support/shared_contexts/services/packages/rubygems/invalid_metadata.rb
new file mode 100644
index 00000000000..b6b962e5c08
--- /dev/null
+++ b/spec/support/shared_contexts/services/packages/rubygems/invalid_metadata.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+RSpec.shared_context 'with invalid Rubygems metadata' do
+ before do
+ allow_next_instance_of(::Packages::Rubygems::MetadataExtractionService) do |instance|
+ allow(instance).to receive(:execute).and_raise(ActiveRecord::StatementInvalid)
+ end
+ end
+end
diff --git a/spec/support/shared_contexts/user_contribution_events_shared_context.rb b/spec/support/shared_contexts/user_contribution_events_shared_context.rb
index 48f0ac1e4ac..b6f9b6bed44 100644
--- a/spec/support/shared_contexts/user_contribution_events_shared_context.rb
+++ b/spec/support/shared_contexts/user_contribution_events_shared_context.rb
@@ -14,16 +14,20 @@ RSpec.shared_context 'with user contribution events' do
# milestone
let_it_be(:milestone) { create(:milestone, project: project) }
- # note
- let_it_be(:note_on_issue) { create(:note_on_issue, noteable: issue, project: project) }
-
# design
let_it_be(:design) { create(:design, project: project, issue: issue, author: user) }
+ # note
+ let_it_be(:note_on_issue) { create(:note_on_issue, noteable: issue, project: project) }
+ let_it_be(:note_on_merge_request) { create(:note_on_merge_request, noteable: merge_request, project: project) }
+ let_it_be(:note_on_project_snippet) { create(:note_on_project_snippet, project: project) }
+ let_it_be(:note_on_design) { create(:note_on_design, noteable: design) }
+ let_it_be(:note_on_personal_snippet) do
+ create(:note, project: nil, noteable: create(:personal_snippet, author: user))
+ end
+
# work item
let_it_be(:incident) { create(:work_item, :incident, author: user, project: project) }
- let_it_be(:test_case) { create(:work_item, :test_case, author: user, project: project) }
- let_it_be(:requirement) { create(:work_item, :requirement, author: user, project: project) }
let_it_be(:task) { create(:work_item, :task, author: user, project: project) }
# events
@@ -36,17 +40,39 @@ RSpec.shared_context 'with user contribution events' do
# closed
let_it_be(:closed_issue_event) { create(:event, :closed, author: user, project: project, target: issue) }
let_it_be(:closed_milestone_event) { create(:event, :closed, author: user, project: project, target: milestone) }
- let_it_be(:closed_incident_event) { create(:event, :closed, author: user, project: project, target: incident) }
- let_it_be(:closed_test_case_event) { create(:event, :closed, author: user, project: project, target: test_case) }
let_it_be(:closed_merge_request_event) do
create(:event, :closed, author: user, project: project, target: merge_request)
end
+ let_it_be(:closed_task_event) do
+ create(:event, :closed, :for_work_item, author: user, project: project, target: task)
+ end
+
+ let_it_be(:closed_incident_event) do
+ create(:event, :closed, :for_work_item, author: user, project: project, target: incident)
+ end
+
# commented
- let_it_be(:commented_event) do
+ let_it_be(:commented_issue_event) do
create(:event, :commented, author: user, project: project, target: note_on_issue)
end
+ let_it_be(:commented_merge_request_event) do
+ create(:event, :commented, author: user, project: project, target: note_on_merge_request)
+ end
+
+ let_it_be(:commented_project_snippet_event) do
+ create(:event, :commented, author: user, target: note_on_project_snippet)
+ end
+
+ let_it_be(:commented_personal_snippet_event) do
+ create(:event, :commented, project: nil, author: user, target: note_on_personal_snippet)
+ end
+
+ let_it_be(:commented_design_event) do
+ create(:event, :commented, author: user, target: note_on_design)
+ end
+
# created
let_it_be(:created_issue_event) { create(:event, :created, author: user, project: project, target: issue) }
let_it_be(:created_milestone_event) { create(:event, :created, author: user, project: project, target: milestone) }
@@ -57,14 +83,6 @@ RSpec.shared_context 'with user contribution events' do
create(:event, :created, :for_work_item, author: user, project: project, target: incident)
end
- let_it_be(:created_test_case_event) do
- create(:event, :created, :for_work_item, author: user, project: project, target: test_case)
- end
-
- let_it_be(:created_requirement_event) do
- create(:event, :created, :for_work_item, author: user, project: project, target: requirement)
- end
-
let_it_be(:created_task_event) do
create(:event, :created, :for_work_item, author: user, project: project, target: task)
end
@@ -147,8 +165,8 @@ RSpec.shared_context 'with user contribution events' do
# reopened
let_it_be(:reopened_issue_event) { create(:event, :reopened, author: user, project: project, target: issue) }
let_it_be(:reopened_milestone_event) { create(:event, :reopened, author: user, project: project, target: milestone) }
+ let_it_be(:reopened_task_event) { create(:event, :reopened, author: user, project: project, target: task) }
let_it_be(:reopened_incident_event) { create(:event, :reopened, author: user, project: project, target: incident) }
- let_it_be(:reopened_test_case_event) { create(:event, :reopened, author: user, project: project, target: test_case) }
let_it_be(:reopened_merge_request_event) do
create(:event, :reopened, author: user, project: project, target: merge_request)
end
diff --git a/spec/support/shared_examples/bulk_imports/visibility_level_examples.rb b/spec/support/shared_examples/bulk_imports/visibility_level_examples.rb
index 02eae250e6a..23601134537 100644
--- a/spec/support/shared_examples/bulk_imports/visibility_level_examples.rb
+++ b/spec/support/shared_examples/bulk_imports/visibility_level_examples.rb
@@ -1,87 +1,80 @@
# frozen_string_literal: true
-RSpec.shared_examples 'visibility level settings' do
- context 'when public' do
- let(:data) { { 'visibility' => 'public' } }
-
- context 'when destination is a public group' do
- let(:destination_group) { create(:group, :public) }
-
- it 'sets visibility level to public' do
- expect(transformed_data[:visibility_level]).to eq(Gitlab::VisibilityLevel::PUBLIC)
- end
- end
-
- context 'when destination is a internal group' do
- let(:destination_group) { create(:group, :internal) }
-
- it 'sets visibility level to internal' do
- expect(transformed_data[:visibility_level]).to eq(Gitlab::VisibilityLevel::INTERNAL)
- end
- end
-
- context 'when destination is a private group' do
- let(:destination_group) { create(:group, :private) }
-
- it 'sets visibility level to private' do
- expect(transformed_data[:visibility_level]).to eq(Gitlab::VisibilityLevel::PRIVATE)
- end
- end
+RSpec.shared_examples 'visibility level settings' do |skip_nil_destination_tests|
+ using RSpec::Parameterized::TableSyntax
+
+ let_it_be(:public_group) { create(:group, :public) }
+ let_it_be(:internal_group) { create(:group, :internal) }
+ let_it_be(:private_group) { create(:group, :private) }
+ let(:data) { { 'visibility' => visibility_level } }
+
+ subject(:transformed_data) { described_class.new.transform(context, data) }
+
+ where(
+ :visibility_level,
+ :destination_group,
+ :restricted_level,
+ :expected
+ ) do
+ 'public' | ref(:public_group) | nil | 20
+ 'public' | ref(:public_group) | 20 | 10
+ 'public' | ref(:public_group) | 10 | 20
+ 'public' | ref(:public_group) | 0 | 20
+ 'public' | ref(:internal_group) | nil | 10
+ 'public' | ref(:internal_group) | 20 | 10
+ 'public' | ref(:internal_group) | 10 | 0
+ 'public' | ref(:internal_group) | 0 | 10
+ 'public' | ref(:private_group) | nil | 0
+ 'public' | ref(:private_group) | 20 | 0
+ 'public' | ref(:private_group) | 10 | 0
+ 'public' | ref(:private_group) | 0 | 0
+ 'public' | nil | nil | 20
+ 'public' | nil | 20 | 10
+ 'public' | nil | 10 | 20
+ 'public' | nil | 0 | 20
+ 'internal' | ref(:public_group) | nil | 10
+ 'internal' | ref(:public_group) | 20 | 10
+ 'internal' | ref(:public_group) | 10 | 0
+ 'internal' | ref(:public_group) | 0 | 10
+ 'internal' | ref(:internal_group) | nil | 10
+ 'internal' | ref(:internal_group) | 20 | 10
+ 'internal' | ref(:internal_group) | 10 | 0
+ 'internal' | ref(:internal_group) | 0 | 10
+ 'internal' | ref(:private_group) | nil | 0
+ 'internal' | ref(:private_group) | 20 | 0
+ 'internal' | ref(:private_group) | 10 | 0
+ 'internal' | ref(:private_group) | 0 | 0
+ 'internal' | nil | nil | 10
+ 'internal' | nil | 20 | 10
+ 'internal' | nil | 10 | 0
+ 'internal' | nil | 0 | 10
+ 'private' | ref(:public_group) | nil | 0
+ 'private' | ref(:public_group) | 20 | 0
+ 'private' | ref(:public_group) | 10 | 0
+ 'private' | ref(:public_group) | 0 | 0
+ 'private' | ref(:internal_group) | nil | 0
+ 'private' | ref(:internal_group) | 20 | 0
+ 'private' | ref(:internal_group) | 10 | 0
+ 'private' | ref(:internal_group) | 0 | 0
+ 'private' | ref(:private_group) | nil | 0
+ 'private' | ref(:private_group) | 20 | 0
+ 'private' | ref(:private_group) | 10 | 0
+ 'private' | ref(:private_group) | 0 | 0
+ 'private' | nil | nil | 0
+ 'private' | nil | 20 | 0
+ 'private' | nil | 10 | 0
+ 'private' | nil | 0 | 0
end
- context 'when internal' do
- let(:data) { { 'visibility' => 'internal' } }
-
- context 'when destination is a public group' do
- let(:destination_group) { create(:group, :public) }
-
- it 'sets visibility level to internal' do
- expect(transformed_data[:visibility_level]).to eq(Gitlab::VisibilityLevel::INTERNAL)
- end
- end
-
- context 'when destination is a internal group' do
- let(:destination_group) { create(:group, :internal) }
-
- it 'sets visibility level to internal' do
- expect(transformed_data[:visibility_level]).to eq(Gitlab::VisibilityLevel::INTERNAL)
- end
- end
-
- context 'when destination is a private group' do
- let(:destination_group) { create(:group, :private) }
-
- it 'sets visibility level to private' do
- expect(transformed_data[:visibility_level]).to eq(Gitlab::VisibilityLevel::PRIVATE)
- end
- end
- end
-
- context 'when private' do
- let(:data) { { 'visibility' => 'private' } }
-
- context 'when destination is a public group' do
- let(:destination_group) { create(:group, :public) }
-
- it 'sets visibility level to private' do
- expect(transformed_data[:visibility_level]).to eq(Gitlab::VisibilityLevel::PRIVATE)
- end
- end
-
- context 'when destination is a internal group' do
- let(:destination_group) { create(:group, :internal) }
-
- it 'sets visibility level to private' do
- expect(transformed_data[:visibility_level]).to eq(Gitlab::VisibilityLevel::PRIVATE)
- end
+ with_them do
+ before do
+ stub_application_setting(restricted_visibility_levels: [restricted_level])
end
- context 'when destination is a private group' do
- let(:destination_group) { create(:group, :private) }
+ it 'has the correct visibility level' do
+ next if destination_group.nil? && skip_nil_destination_tests
- it 'sets visibility level to private' do
- expect(transformed_data[:visibility_level]).to eq(Gitlab::VisibilityLevel::PRIVATE)
- end
+ expect(transformed_data[:visibility_level]).to eq(expected)
end
end
end
diff --git a/spec/support/shared_examples/channels/noteable/notes_channel_shared_examples.rb b/spec/support/shared_examples/channels/noteable/notes_channel_shared_examples.rb
new file mode 100644
index 00000000000..cb7001a9faf
--- /dev/null
+++ b/spec/support/shared_examples/channels/noteable/notes_channel_shared_examples.rb
@@ -0,0 +1,30 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'handle subscription based on user access' do
+ it 'subscribes to the noteable stream when user has access' do
+ subscribe(subscribe_params)
+
+ expect(subscription).to be_confirmed
+ expect(subscription).to have_stream_for(noteable)
+ end
+
+ it 'rejects the subscription when the user does not have access' do
+ stub_action_cable_connection current_user: nil
+
+ subscribe(subscribe_params)
+
+ expect(subscription).to be_rejected
+ end
+
+ context 'when action_cable_notes is disabled' do
+ before do
+ stub_feature_flags(action_cable_notes: false)
+ end
+
+ it 'rejects the subscription' do
+ subscribe(subscribe_params)
+
+ expect(subscription).to be_rejected
+ end
+ end
+end
diff --git a/spec/support/shared_examples/ci/deployable_policy_shared_examples.rb b/spec/support/shared_examples/ci/deployable_policy_shared_examples.rb
new file mode 100644
index 00000000000..73bdc094237
--- /dev/null
+++ b/spec/support/shared_examples/ci/deployable_policy_shared_examples.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'a deployable job policy' do |factory_type|
+ let_it_be_with_refind(:project) { create(:project, :private) }
+ let_it_be_with_refind(:user) { create(:user) }
+
+ let(:job) { create(factory_type, project: project, user: user, environment: 'production', ref: 'development') }
+ let(:policy) { described_class.new(user, job) }
+
+ context 'when the job triggerer is a project maintainer' do
+ before_all do
+ project.add_maintainer(user)
+ end
+
+ it { expect(policy).to be_allowed :update_build }
+
+ context 'when job is oudated deployment job' do
+ before do
+ allow(job).to receive(:outdated_deployment?).and_return(true)
+ end
+
+ it { expect(policy).not_to be_allowed :update_build }
+ end
+ end
+end
diff --git a/spec/support/shared_examples/ci/deployable_policy_shared_examples_ee.rb b/spec/support/shared_examples/ci/deployable_policy_shared_examples_ee.rb
new file mode 100644
index 00000000000..b1057b3f67a
--- /dev/null
+++ b/spec/support/shared_examples/ci/deployable_policy_shared_examples_ee.rb
@@ -0,0 +1,57 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'a deployable job policy in EE' do |factory_type|
+ using RSpec::Parameterized::TableSyntax
+
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, :repository, group: group) }
+
+ let(:user) { create(:user) }
+ let(:pipeline) { create(:ci_empty_pipeline, project: project) }
+ let(:environment) { create(:environment, project: project, name: 'production') }
+
+ let(:job) do
+ create(factory_type, pipeline: pipeline, project: project, environment: 'production', ref: 'development')
+ end
+
+ describe '#update_build?' do
+ subject { user.can?(:update_build, job) }
+
+ it_behaves_like 'protected environments access', direct_access: true
+ end
+
+ describe '#update_commit_status?' do
+ subject { user.can?(:update_commit_status, job) }
+
+ it_behaves_like 'protected environments access', direct_access: true
+ end
+
+ describe '#erase_build?' do
+ subject { user.can?(:erase_build, job) }
+
+ context 'when the job triggerer is a project maintainer' do
+ let_it_be_with_refind(:user) { create(:user).tap { |u| project.add_maintainer(u) } }
+
+ before do
+ stub_licensed_features(protected_environments: true)
+ end
+
+ it 'returns true for ci_build' do
+ # Currently, we allow users to delete normal jobs only.
+ if factory_type == :ci_build
+ is_expected.to eq(true)
+ else
+ is_expected.to eq(false)
+ end
+ end
+
+ context 'when environment is protected' do
+ before do
+ create(:protected_environment, name: environment.name, project: project)
+ end
+
+ it { is_expected.to eq(false) }
+ end
+ end
+ end
+end
diff --git a/spec/support/shared_examples/ci/deployable_shared_examples.rb b/spec/support/shared_examples/ci/deployable_shared_examples.rb
new file mode 100644
index 00000000000..b51a8fa20e2
--- /dev/null
+++ b/spec/support/shared_examples/ci/deployable_shared_examples.rb
@@ -0,0 +1,582 @@
+# frozen_string_literal: true
+
+# rubocop:disable Layout/LineLength
+# rubocop:disable RSpec/ContextWording
+RSpec.shared_examples 'a deployable job' do
+ it { is_expected.to have_one(:deployment) }
+
+ shared_examples 'calling proper BuildFinishedWorker' do
+ it 'calls Ci::BuildFinishedWorker' do
+ skip unless described_class == ::Ci::Build
+
+ expect(Ci::BuildFinishedWorker).to receive(:perform_async)
+
+ subject
+ end
+ end
+
+ describe '#outdated_deployment?' do
+ subject { job.outdated_deployment? }
+
+ let(:job) { create(factory_type, :created, :with_deployment, project: project, pipeline: pipeline, environment: 'production') }
+
+ context 'when job has no environment' do
+ let(:job) { create(factory_type, :created, pipeline: pipeline, environment: nil) }
+
+ it { expect(subject).to be_falsey }
+ end
+
+ context 'when project has forward deployment disabled' do
+ before do
+ project.ci_cd_settings.update!(forward_deployment_enabled: false)
+ end
+
+ it { expect(subject).to be_falsey }
+ end
+
+ context 'when job is not an outdated deployment' do
+ before do
+ allow(job.deployment).to receive(:older_than_last_successful_deployment?).and_return(false)
+ end
+
+ it { expect(subject).to be_falsey }
+ end
+
+ context 'when job is older than the latest deployment and still pending status' do
+ before do
+ allow(job.deployment).to receive(:older_than_last_successful_deployment?).and_return(true)
+ end
+
+ it { expect(subject).to be_truthy} # rubocop: disable Layout/SpaceInsideBlockBraces
+ end
+
+ context 'when job is older than the latest deployment but succeeded once' do
+ let(:job) { create(factory_type, :success, :with_deployment, project: project, pipeline: pipeline, environment: 'production') }
+
+ before do
+ allow(job.deployment).to receive(:older_than_last_successful_deployment?).and_return(true)
+ end
+
+ it 'returns false for allowing rollback' do
+ expect(subject).to be_falsey
+ end
+
+ context 'when forward_deployment_rollback_allowed option is disabled' do
+ before do
+ project.ci_cd_settings.update!(forward_deployment_rollback_allowed: false)
+ end
+
+ it 'returns true for disallowing rollback' do
+ expect(subject).to eq(true)
+ end
+ end
+ end
+ end
+
+ describe 'state transition as a deployable' do
+ subject { job.send(event) }
+
+ let!(:job) { create(factory_type, :with_deployment, :start_review_app, status: :pending, pipeline: pipeline) }
+ let(:deployment) { job.deployment }
+ let(:environment) { deployment.environment }
+
+ before do
+ allow(Deployments::LinkMergeRequestWorker).to receive(:perform_async)
+ allow(Deployments::HooksWorker).to receive(:perform_async)
+ end
+
+ it 'has deployments record with created status' do
+ expect(deployment).to be_created
+ expect(environment.name).to eq('review/master')
+ end
+
+ shared_examples_for 'avoid deadlock' do
+ it 'executes UPDATE in the right order' do
+ recorded = with_cross_database_modification_prevented do
+ ActiveRecord::QueryRecorder.new { subject }
+ end
+
+ index_for_build = recorded.log.index { |l| l.include?("UPDATE #{Ci::Build.quoted_table_name}") }
+ index_for_deployment = recorded.log.index { |l| l.include?("UPDATE \"deployments\"") }
+
+ expect(index_for_build).to be < index_for_deployment
+ end
+ end
+
+ context 'when transits to running' do
+ let(:event) { :run! }
+
+ it_behaves_like 'avoid deadlock'
+
+ it 'transits deployment status to running' do
+ with_cross_database_modification_prevented do
+ subject
+ end
+
+ expect(deployment).to be_running
+ end
+
+ context 'when deployment is already running state' do
+ before do
+ job.deployment.success!
+ end
+
+ it 'does not change deployment status and tracks an error' do
+ expect(Gitlab::ErrorTracking)
+ .to receive(:track_exception).with(
+ instance_of(Deployment::StatusSyncError), deployment_id: deployment.id, job_id: job.id)
+
+ with_cross_database_modification_prevented do
+ expect { subject }.not_to change { deployment.reload.status }
+ end
+ end
+ end
+ end
+
+ context 'when transits to success' do
+ let(:event) { :success! }
+
+ before do
+ allow(Deployments::UpdateEnvironmentWorker).to receive(:perform_async)
+ allow(Deployments::HooksWorker).to receive(:perform_async)
+ end
+
+ it_behaves_like 'avoid deadlock'
+ it_behaves_like 'calling proper BuildFinishedWorker'
+
+ it 'transits deployment status to success' do
+ with_cross_database_modification_prevented do
+ subject
+ end
+
+ expect(deployment).to be_success
+ end
+ end
+
+ context 'when transits to failed' do
+ let(:event) { :drop! }
+
+ it_behaves_like 'avoid deadlock'
+ it_behaves_like 'calling proper BuildFinishedWorker'
+
+ it 'transits deployment status to failed' do
+ with_cross_database_modification_prevented do
+ subject
+ end
+
+ expect(deployment).to be_failed
+ end
+ end
+
+ context 'when transits to skipped' do
+ let(:event) { :skip! }
+
+ it_behaves_like 'avoid deadlock'
+
+ it 'transits deployment status to skipped' do
+ with_cross_database_modification_prevented do
+ subject
+ end
+
+ expect(deployment).to be_skipped
+ end
+ end
+
+ context 'when transits to canceled' do
+ let(:event) { :cancel! }
+
+ it_behaves_like 'avoid deadlock'
+ it_behaves_like 'calling proper BuildFinishedWorker'
+
+ it 'transits deployment status to canceled' do
+ with_cross_database_modification_prevented do
+ subject
+ end
+
+ expect(deployment).to be_canceled
+ end
+ end
+
+ # Mimic playing a manual job that needs another job.
+ # `needs + when:manual` scenario, see: https://gitlab.com/gitlab-org/gitlab/-/issues/347502
+ context 'when transits from skipped to created to running' do
+ before do
+ job.skip!
+ end
+
+ context 'during skipped to created' do
+ let(:event) { :process! }
+
+ it 'transitions to created' do
+ subject
+
+ expect(deployment).to be_created
+ end
+ end
+
+ context 'during created to running' do
+ let(:event) { :run! }
+
+ before do
+ job.process!
+ job.enqueue!
+ end
+
+ it 'transitions to running and calls webhook' do
+ freeze_time do
+ expect(Deployments::HooksWorker)
+ .to receive(:perform_async).with(hash_including({ 'deployment_id' => deployment.id, 'status' => 'running', 'status_changed_at' => Time.current.to_s }))
+
+ subject
+ end
+
+ expect(deployment).to be_running
+ end
+ end
+ end
+ end
+
+ describe '#on_stop' do
+ subject { job.on_stop }
+
+ context 'when a job has a specification that it can be stopped from the other job' do
+ let(:job) { create(factory_type, :start_review_app, pipeline: pipeline) }
+
+ it 'returns the other job name' do
+ is_expected.to eq('stop_review_app')
+ end
+ end
+
+ context 'when a job does not have environment information' do
+ let(:job) { create(factory_type, pipeline: pipeline) }
+
+ it 'returns nil' do
+ is_expected.to be_nil
+ end
+ end
+ end
+
+ describe '#environment_tier_from_options' do
+ subject { job.environment_tier_from_options }
+
+ let(:job) { Ci::Build.new(options: options) }
+ let(:options) { { environment: { deployment_tier: 'production' } } }
+
+ it { is_expected.to eq('production') }
+
+ context 'when options does not include deployment_tier' do
+ let(:options) { { environment: { name: 'production' } } }
+
+ it { is_expected.to be_nil }
+ end
+ end
+
+ describe '#environment_tier' do
+ subject { job.environment_tier }
+
+ let(:options) { { environment: { deployment_tier: 'production' } } }
+ let!(:environment) { create(:environment, name: 'production', tier: 'development', project: project) }
+ let(:job) { Ci::Build.new(options: options, environment: 'production', project: project) }
+
+ it { is_expected.to eq('production') }
+
+ context 'when options does not include deployment_tier' do
+ let(:options) { { environment: { name: 'production' } } }
+
+ it 'uses tier from environment' do
+ is_expected.to eq('development')
+ end
+
+ context 'when persisted environment is absent' do
+ let(:environment) { nil }
+
+ it { is_expected.to be_nil }
+ end
+ end
+ end
+
+ describe 'environment' do
+ describe '#has_environment_keyword?' do
+ subject { job.has_environment_keyword? }
+
+ context 'when environment is defined' do
+ before do
+ job.update!(environment: 'review')
+ end
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'when environment is not defined' do
+ before do
+ job.update!(environment: nil)
+ end
+
+ it { is_expected.to be_falsey }
+ end
+ end
+
+ describe '#expanded_environment_name' do
+ subject { job.expanded_environment_name }
+
+ context 'when environment uses $CI_COMMIT_REF_NAME' do
+ let(:job) do
+ create(
+ factory_type,
+ ref: 'master',
+ environment: 'review/$CI_COMMIT_REF_NAME',
+ pipeline: pipeline
+ )
+ end
+
+ it { is_expected.to eq('review/master') }
+ end
+
+ context 'when environment uses yaml_variables containing symbol keys' do
+ let(:job) do
+ create(
+ factory_type,
+ yaml_variables: [{ key: :APP_HOST, value: 'host' }],
+ environment: 'review/$APP_HOST',
+ pipeline: pipeline
+ )
+ end
+
+ it 'returns an expanded environment name with a list of variables' do
+ is_expected.to eq('review/host')
+ end
+
+ context 'when job metadata has already persisted the expanded environment name' do
+ before do
+ job.metadata.expanded_environment_name = 'review/foo'
+ end
+
+ it 'returns a persisted expanded environment name without a list of variables' do
+ expect(job).not_to receive(:simple_variables)
+
+ is_expected.to eq('review/foo')
+ end
+ end
+ end
+
+ context 'when using persisted variables' do
+ let(:job) do
+ create(factory_type, environment: 'review/x$CI_JOB_ID', pipeline: pipeline)
+ end
+
+ it { is_expected.to eq('review/x') }
+ end
+
+ context 'when environment name uses a nested variable' do
+ let(:yaml_variables) do
+ [
+ { key: 'ENVIRONMENT_NAME', value: '${CI_COMMIT_REF_NAME}' }
+ ]
+ end
+
+ let(:job) do
+ create(
+ factory_type,
+ ref: 'master',
+ yaml_variables: yaml_variables,
+ environment: 'review/$ENVIRONMENT_NAME',
+ pipeline: pipeline
+ )
+ end
+
+ it { is_expected.to eq('review/master') }
+ end
+ end
+
+ describe '#expanded_kubernetes_namespace' do
+ let(:job) { create(factory_type, environment: environment, options: options, pipeline: pipeline) }
+
+ subject { job.expanded_kubernetes_namespace }
+
+ context 'environment and namespace are not set' do
+ let(:environment) { nil }
+ let(:options) { nil }
+
+ it { is_expected.to be_nil }
+ end
+
+ context 'environment is specified' do
+ let(:environment) { 'production' }
+
+ context 'namespace is not set' do
+ let(:options) { nil }
+
+ it { is_expected.to be_nil }
+ end
+
+ context 'namespace is provided' do
+ let(:options) do
+ {
+ environment: {
+ name: environment,
+ kubernetes: {
+ namespace: namespace
+ }
+ }
+ }
+ end
+
+ context 'with a static value' do
+ let(:namespace) { 'production' }
+
+ it { is_expected.to eq namespace }
+ end
+
+ context 'with a dynamic value' do
+ let(:namespace) { 'deploy-$CI_COMMIT_REF_NAME' }
+
+ it { is_expected.to eq 'deploy-master' }
+ end
+ end
+ end
+ end
+
+ describe '#deployment_job?' do
+ subject { job.deployment_job? }
+
+ context 'when environment is defined' do
+ before do
+ job.update!(environment: 'review')
+ end
+
+ context 'no action is defined' do
+ it { is_expected.to be_truthy }
+ end
+
+ context 'and start action is defined' do
+ before do
+ job.update!(options: { environment: { action: 'start' } })
+ end
+
+ it { is_expected.to be_truthy }
+ end
+ end
+
+ context 'when environment is not defined' do
+ before do
+ job.update!(environment: nil)
+ end
+
+ it { is_expected.to be_falsey }
+ end
+ end
+
+ describe '#stops_environment?' do
+ subject { job.stops_environment? }
+
+ context 'when environment is defined' do
+ before do
+ job.update!(environment: 'review')
+ end
+
+ context 'no action is defined' do
+ it { is_expected.to be_falsey }
+ end
+
+ context 'and stop action is defined' do
+ before do
+ job.update!(options: { environment: { action: 'stop' } })
+ end
+
+ it { is_expected.to be_truthy }
+ end
+ end
+
+ context 'when environment is not defined' do
+ before do
+ job.update!(environment: nil)
+ end
+
+ it { is_expected.to be_falsey }
+ end
+ end
+ end
+
+ describe '#persisted_environment' do
+ let!(:environment) do
+ create(:environment, project: project, name: "foo-#{project.default_branch}")
+ end
+
+ subject { job.persisted_environment }
+
+ context 'when referenced literally' do
+ let(:job) do
+ create(factory_type, pipeline: pipeline, environment: "foo-#{project.default_branch}")
+ end
+
+ it { is_expected.to eq(environment) }
+ end
+
+ context 'when referenced with a variable' do
+ let(:job) do
+ create(factory_type, pipeline: pipeline, environment: "foo-$CI_COMMIT_REF_NAME")
+ end
+
+ it { is_expected.to eq(environment) }
+ end
+
+ context 'when there is no environment' do
+ it { is_expected.to be_nil }
+ end
+
+ context 'when job has a stop environment' do
+ let(:job) { create(factory_type, :stop_review_app, pipeline: pipeline, environment: "foo-#{project.default_branch}") }
+
+ it 'expands environment name' do
+ expect(job).to receive(:expanded_environment_name).and_call_original
+
+ is_expected.to eq(environment)
+ end
+ end
+ end
+
+ describe '#deployment_status' do
+ before do
+ allow_any_instance_of(Ci::Build).to receive(:create_deployment) # rubocop:disable RSpec/AnyInstanceOf
+ end
+
+ context 'when job is a last deployment' do
+ let(:job) { create(factory_type, :success, environment: 'production', pipeline: pipeline) }
+ let(:environment) { create(:environment, name: 'production', project: job.project) }
+ let!(:deployment) { create(:deployment, :success, environment: environment, project: environment.project, deployable: job) }
+
+ it { expect(job.deployment_status).to eq(:last) }
+ end
+
+ context 'when there is a newer job with deployment' do
+ let(:job) { create(factory_type, :success, environment: 'production', pipeline: pipeline) }
+ let(:environment) { create(:environment, name: 'production', project: job.project) }
+ let!(:deployment) { create(:deployment, :success, environment: environment, project: environment.project, deployable: job) }
+ let!(:last_deployment) { create(:deployment, :success, environment: environment, project: environment.project) }
+
+ it { expect(job.deployment_status).to eq(:out_of_date) }
+ end
+
+ context 'when job with deployment has failed' do
+ let(:job) { create(factory_type, :failed, environment: 'production', pipeline: pipeline) }
+ let(:environment) { create(:environment, name: 'production', project: job.project) }
+ let!(:deployment) { create(:deployment, :success, environment: environment, project: environment.project, deployable: job) }
+
+ it { expect(job.deployment_status).to eq(:failed) }
+ end
+
+ context 'when job with deployment is running' do
+ let(:job) { create(factory_type, environment: 'production', pipeline: pipeline) }
+ let(:environment) { create(:environment, name: 'production', project: job.project) }
+ let!(:deployment) { create(:deployment, :success, environment: environment, project: environment.project, deployable: job) }
+
+ it { expect(job.deployment_status).to eq(:creating) }
+ end
+ end
+
+ def factory_type
+ described_class.name.underscore.tr('/', '_')
+ end
+end
+# rubocop:enable Layout/LineLength
+# rubocop:enable RSpec/ContextWording
diff --git a/spec/support/shared_examples/ci/deployable_shared_examples_ee.rb b/spec/support/shared_examples/ci/deployable_shared_examples_ee.rb
new file mode 100644
index 00000000000..803af36eabb
--- /dev/null
+++ b/spec/support/shared_examples/ci/deployable_shared_examples_ee.rb
@@ -0,0 +1,34 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'a deployable job in EE' do
+ describe 'when the job is waiting for deployment approval' do
+ let(:job) { create(factory_type, :manual, environment: 'production', pipeline: pipeline) }
+ let!(:deployment) { create(:deployment, :blocked, deployable: job) }
+
+ before do
+ allow(deployment).to receive(:waiting_for_approval?).and_return(true)
+ end
+
+ it 'does not allow the job to be enqueued' do
+ expect { job.enqueue! }.to raise_error(StateMachines::InvalidTransition)
+ end
+ end
+
+ describe '#playable?' do
+ context 'when job is waiting for deployment approval' do
+ subject { build_stubbed(factory_type, :manual, environment: 'production', pipeline: pipeline) }
+
+ let!(:deployment) { create(:deployment, :blocked, deployable: subject) }
+
+ before do
+ allow(deployment).to receive(:waiting_for_approval?).and_return(true)
+ end
+
+ it { is_expected.not_to be_playable }
+ end
+ end
+
+ def factory_type
+ described_class.name.underscore.tr('/', '_')
+ end
+end
diff --git a/spec/support/shared_examples/ci/pipeline_schedules_create_or_update_shared_examples.rb b/spec/support/shared_examples/ci/pipeline_schedules_create_or_update_shared_examples.rb
new file mode 100644
index 00000000000..399225c13b2
--- /dev/null
+++ b/spec/support/shared_examples/ci/pipeline_schedules_create_or_update_shared_examples.rb
@@ -0,0 +1,121 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'pipeline schedules checking variables permission' do
+ let(:params) do
+ {
+ description: 'desc',
+ ref: 'patch-x',
+ active: false,
+ cron: '*/1 * * * *',
+ cron_timezone: 'UTC',
+ variables_attributes: variables_attributes
+ }
+ end
+
+ shared_examples 'success response' do
+ it 'saves values with passed params' do
+ result = service.execute
+
+ expect(result.status).to eq(:success)
+ expect(result.payload).to have_attributes(
+ description: 'desc',
+ ref: 'patch-x',
+ active: false,
+ cron: '*/1 * * * *',
+ cron_timezone: 'UTC'
+ )
+ end
+ end
+
+ shared_examples 'failure response' do
+ it 'does not save' do
+ result = service.execute
+
+ expect(result.status).to eq(:error)
+ expect(result.reason).to eq(:forbidden)
+ expect(result.message).to match_array(
+ ['The current user is not authorized to set pipeline schedule variables']
+ )
+ end
+ end
+
+ context 'when sending variables' do
+ let(:variables_attributes) do
+ [{ key: 'VAR2', secret_value: 'secret 2' }]
+ end
+
+ shared_examples 'success response with variables' do
+ it_behaves_like 'success response'
+
+ it 'saves variables' do
+ result = service.execute
+
+ variables = result.payload.variables.map { |v| [v.key, v.value] }
+
+ expect(variables).to include(
+ ['VAR2', 'secret 2']
+ )
+ end
+ end
+
+ context 'when user is maintainer' do
+ it_behaves_like 'success response with variables'
+ end
+
+ context 'when user is developer' do
+ before_all do
+ project.add_developer(user)
+ end
+
+ it_behaves_like 'success response with variables'
+ end
+
+ context 'when restrict_user_defined_variables is true' do
+ before_all do
+ project.update!(restrict_user_defined_variables: true)
+ end
+
+ it_behaves_like 'success response with variables'
+
+ context 'when user is developer' do
+ before_all do
+ project.add_developer(user)
+ end
+
+ it_behaves_like 'failure response'
+ end
+ end
+ end
+
+ context 'when not sending variables' do
+ let(:variables_attributes) { [] }
+
+ context 'when user is maintainer' do
+ it_behaves_like 'success response'
+ end
+
+ context 'when user is developer' do
+ before_all do
+ project.add_developer(user)
+ end
+
+ it_behaves_like 'success response'
+ end
+
+ context 'when restrict_user_defined_variables is true' do
+ before_all do
+ project.update!(restrict_user_defined_variables: true)
+ end
+
+ it_behaves_like 'success response'
+
+ context 'when user is developer' do
+ before_all do
+ project.add_developer(user)
+ end
+
+ it_behaves_like 'success response'
+ end
+ end
+ end
+end
diff --git a/spec/support/shared_examples/ci/stage_shared_examples.rb b/spec/support/shared_examples/ci/stage_shared_examples.rb
index cdb1058e584..a2849e00d27 100644
--- a/spec/support/shared_examples/ci/stage_shared_examples.rb
+++ b/spec/support/shared_examples/ci/stage_shared_examples.rb
@@ -21,7 +21,7 @@ RSpec.shared_examples 'manual playable stage' do |stage_type|
context 'when is skipped' do
let(:status) { 'skipped' }
- it { is_expected.to be_falsy }
+ it { is_expected.to be_truthy }
end
end
end
diff --git a/spec/support/shared_examples/ci/waiting_for_approval_status_shared_examples.rb b/spec/support/shared_examples/ci/waiting_for_approval_status_shared_examples.rb
new file mode 100644
index 00000000000..44226345fd2
--- /dev/null
+++ b/spec/support/shared_examples/ci/waiting_for_approval_status_shared_examples.rb
@@ -0,0 +1,79 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'a deployment job waiting for approval' do |factory_type|
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:job) { create(factory_type, :manual, environment: 'production', project: project) }
+
+ subject { described_class.new(Gitlab::Ci::Status::Core.new(job, user)) }
+
+ describe '.matches?' do
+ subject { described_class.matches?(job, user) }
+
+ let(:job) { create(factory_type, :manual, environment: 'production', project: project) }
+ let!(:deployment) { create(:deployment, deployment_status, deployable: job, project: project) }
+
+ context 'when job is waiting for approval' do
+ let(:deployment_status) { :blocked }
+
+ before do
+ allow(deployment).to receive(:waiting_for_approval?).and_return(true)
+ end
+
+ it 'is a correct match' do
+ expect(subject).to be_truthy
+ end
+ end
+
+ context 'when job is not waiting for approval' do
+ let(:deployment_status) { :created }
+
+ it 'does not match' do
+ expect(subject).to be_falsey
+ end
+ end
+ end
+
+ describe '#illustration' do
+ before do
+ environment = create(:environment, name: 'production', project: project)
+ create(:deployment, :blocked, project: project, environment: environment, deployable: job)
+ end
+
+ it { expect(subject.illustration).to include(:image, :size) }
+ it { expect(subject.illustration[:title]).to eq('Waiting for approvals') }
+
+ it do
+ expect(subject.illustration[:content]).to include('This job deploys to the protected environment "production"')
+ end
+ end
+
+ describe '#has_action?' do
+ it { expect(subject.has_action?).to be_truthy }
+ end
+
+ describe '#action_icon' do
+ it { expect(subject.action_icon).to be_nil }
+ end
+
+ describe '#action_title' do
+ it { expect(subject.action_title).to be_nil }
+ end
+
+ describe '#action_button_title' do
+ it { expect(subject.action_button_title).to eq('View environment details page') }
+ end
+
+ describe '#action_path' do
+ before do
+ environment = create(:environment, name: 'production', project: project)
+ create(:deployment, :blocked, project: project, environment: environment, deployable: job)
+ end
+
+ it { expect(subject.action_path).to include('environments') }
+ end
+
+ describe '#action_method' do
+ it { expect(subject.action_method).to eq(:get) }
+ end
+end
diff --git a/spec/support/shared_examples/controllers/internal_event_tracking_examples.rb b/spec/support/shared_examples/controllers/internal_event_tracking_examples.rb
index e2a4fb31361..05068cd60af 100644
--- a/spec/support/shared_examples/controllers/internal_event_tracking_examples.rb
+++ b/spec/support/shared_examples/controllers/internal_event_tracking_examples.rb
@@ -10,8 +10,6 @@
RSpec.shared_examples 'internal event tracking' do
let(:fake_tracker) { instance_spy(Gitlab::Tracking::Destinations::Snowplow) }
- let(:namespace) { nil }
- let(:proejct) { nil }
before do
allow(Gitlab::Tracking).to receive(:tracker).and_return(fake_tracker)
@@ -23,18 +21,23 @@ RSpec.shared_examples 'internal event tracking' do
it 'logs to Snowplow', :aggregate_failures do
subject
+ project = try(:project)
+ user = try(:user)
+ namespace = try(:namespace)
+
expect(Gitlab::Tracking::StandardContext)
.to have_received(:new)
.with(
project_id: project&.id,
- user_id: user.id,
+ user_id: user&.id,
namespace_id: namespace&.id,
plan_name: namespace&.actual_plan_name
- )
+ ).at_least(:once)
expect(Gitlab::Tracking::ServicePingContext)
.to have_received(:new)
.with(data_source: :redis_hll, event: action)
+ .at_least(:once)
expect(fake_tracker).to have_received(:event)
.with(
@@ -45,6 +48,5 @@ RSpec.shared_examples 'internal event tracking' do
an_instance_of(SnowplowTracker::SelfDescribingJson)
]
)
- .exactly(:once)
end
end
diff --git a/spec/support/shared_examples/controllers/known_sign_in_shared_examples.rb b/spec/support/shared_examples/controllers/known_sign_in_shared_examples.rb
index 3f147f942ba..77dd67c77a4 100644
--- a/spec/support/shared_examples/controllers/known_sign_in_shared_examples.rb
+++ b/spec/support/shared_examples/controllers/known_sign_in_shared_examples.rb
@@ -9,10 +9,8 @@ RSpec.shared_examples 'known sign in' do
user.update!(current_sign_in_ip: ip)
end
- def stub_cookie(value = user.id)
- cookies.encrypted[KnownSignIn::KNOWN_SIGN_IN_COOKIE] = {
- value: value, expires: KnownSignIn::KNOWN_SIGN_IN_COOKIE_EXPIRY
- }
+ def stub_cookie(value = user.id, expires = KnownSignIn::KNOWN_SIGN_IN_COOKIE_EXPIRY)
+ cookies.encrypted[KnownSignIn::KNOWN_SIGN_IN_COOKIE] = { value: value, expires: expires }
end
context 'when the remote IP and the last sign in IP match' do
@@ -57,15 +55,13 @@ RSpec.shared_examples 'known sign in' do
end
it 'notifies the user when the cookie is expired' do
- stub_cookie
-
- travel_to((KnownSignIn::KNOWN_SIGN_IN_COOKIE_EXPIRY + 1.day).from_now) do
- expect_next_instance_of(NotificationService) do |instance|
- expect(instance).to receive(:unknown_sign_in)
- end
+ stub_cookie(user.id, 1.day.ago)
- post_action
+ expect_next_instance_of(NotificationService) do |instance|
+ expect(instance).to receive(:unknown_sign_in)
end
+
+ post_action
end
context 'when notify_on_unknown_sign_in global setting is false' do
diff --git a/spec/support/shared_examples/controllers/unique_visits_shared_examples.rb b/spec/support/shared_examples/controllers/unique_visits_shared_examples.rb
index ac7680f7ddb..7f33ece854b 100644
--- a/spec/support/shared_examples/controllers/unique_visits_shared_examples.rb
+++ b/spec/support/shared_examples/controllers/unique_visits_shared_examples.rb
@@ -10,7 +10,7 @@ RSpec.shared_examples 'tracking unique visits' do |method|
ids.each do |id|
expect(Gitlab::UsageDataCounters::HLLRedisCounter)
- .to receive(:track_event).with(id, values: kind_of(String))
+ .to receive(:track_event).with(id, values: anything)
end
get method, params: request_params, format: :html
@@ -21,7 +21,7 @@ RSpec.shared_examples 'tracking unique visits' do |method|
ids.each do |id|
expect(Gitlab::UsageDataCounters::HLLRedisCounter)
- .to receive(:track_event).with(id, values: kind_of(String))
+ .to receive(:track_event).with(id, values: anything)
end
stub_do_not_track('0')
diff --git a/spec/support/shared_examples/database_health_status_indicators/prometheus_alert_based_shared_examples.rb b/spec/support/shared_examples/database_health_status_indicators/prometheus_alert_based_shared_examples.rb
new file mode 100644
index 00000000000..109a349a652
--- /dev/null
+++ b/spec/support/shared_examples/database_health_status_indicators/prometheus_alert_based_shared_examples.rb
@@ -0,0 +1,133 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'Prometheus Alert based health indicator' do
+ let(:schema) { :main }
+ let(:connection) { Gitlab::Database.database_base_models[schema].connection }
+
+ around do |example|
+ Gitlab::Database::SharedModel.using_connection(connection) do
+ example.run
+ end
+ end
+
+ describe '#evaluate' do
+ let(:prometheus_url) { 'http://thanos:9090' }
+ let(:prometheus_config) { [prometheus_url, { allow_local_requests: true, verify: true }] }
+
+ let(:prometheus_client) { instance_double(Gitlab::PrometheusClient) }
+
+ let(:context) do
+ Gitlab::Database::HealthStatus::Context.new(
+ described_class,
+ connection,
+ ['users'],
+ gitlab_schema
+ )
+ end
+
+ let(:gitlab_schema) { "gitlab_#{schema}" }
+ let(:client_ready) { true }
+ let(:indicator_name) { described_class.name.demodulize }
+ let(:indicator) { described_class.new(context) }
+
+ subject(:evaluate) { indicator.evaluate }
+
+ before do
+ stub_application_setting(prometheus_alert_db_indicators_settings: prometheus_alert_db_indicators_settings)
+
+ allow(Gitlab::PrometheusClient).to receive(:new).with(*prometheus_config).and_return(prometheus_client)
+ allow(prometheus_client).to receive(:ready?).and_return(client_ready)
+ end
+
+ shared_examples 'Patroni Apdex Evaluator' do |schema|
+ context "with #{schema} schema" do
+ let(:schema) { schema }
+
+ it 'returns NoSignal signal in case the feature flag is disabled' do
+ stub_feature_flags(feature_flag => false)
+
+ expect(evaluate).to be_a(Gitlab::Database::HealthStatus::Signals::NotAvailable)
+ expect(evaluate.reason).to include('indicator disabled')
+ end
+
+ context 'without prometheus_alert_db_indicators_settings' do
+ let(:prometheus_alert_db_indicators_settings) { nil }
+
+ it 'returns Unknown signal' do
+ expect(evaluate).to be_a(Gitlab::Database::HealthStatus::Signals::Unknown)
+ expect(evaluate.reason).to include('Prometheus Settings not configured')
+ end
+ end
+
+ context 'when Prometheus client is not ready' do
+ let(:client_ready) { false }
+
+ it 'returns Unknown signal' do
+ expect(evaluate).to be_a(Gitlab::Database::HealthStatus::Signals::Unknown)
+ expect(evaluate.reason).to include('Prometheus client is not ready')
+ end
+ end
+
+ context 'when apdex SLI query is not configured' do
+ let(:"sli_query_#{schema}") { nil }
+
+ it 'returns Unknown signal' do
+ expect(evaluate).to be_a(Gitlab::Database::HealthStatus::Signals::Unknown)
+ expect(evaluate.reason).to include("#{indicator_name} SLI query is not configured")
+ end
+ end
+
+ context 'when slo is not configured' do
+ let(:"slo_#{schema}") { nil }
+
+ it 'returns Unknown signal' do
+ expect(evaluate).to be_a(Gitlab::Database::HealthStatus::Signals::Unknown)
+ expect(evaluate.reason).to include("#{indicator_name} SLO is not configured")
+ end
+ end
+
+ it 'returns Normal signal when SLI condition is met' do
+ expect(prometheus_client).to receive(:query)
+ .with(send("sli_query_#{schema}"))
+ .and_return([{ "value" => [1662423310.878, sli_with_good_condition[schema]] }])
+ expect(evaluate).to be_a(Gitlab::Database::HealthStatus::Signals::Normal)
+ expect(evaluate.reason).to include("#{indicator_name} SLI condition met")
+ end
+
+ it 'returns Stop signal when SLI condition is not met' do
+ expect(prometheus_client).to receive(:query)
+ .with(send("sli_query_#{schema}"))
+ .and_return([{ "value" => [1662423310.878, sli_with_bad_condition[schema]] }])
+ expect(evaluate).to be_a(Gitlab::Database::HealthStatus::Signals::Stop)
+ expect(evaluate.reason).to include("#{indicator_name} SLI condition not met")
+ end
+
+ context 'when SLI can not be calculated' do
+ where(:result) do
+ [
+ nil,
+ [],
+ [{}],
+ [{ 'value' => 1 }],
+ [{ 'value' => [1] }]
+ ]
+ end
+
+ with_them do
+ it 'returns Unknown signal' do
+ expect(prometheus_client).to receive(:query).and_return(result)
+ expect(evaluate).to be_a(Gitlab::Database::HealthStatus::Signals::Unknown)
+ expect(evaluate.reason).to include("#{indicator_name} can not be calculated")
+ end
+ end
+ end
+ end
+ end
+
+ Gitlab::Database.database_base_models.each do |database_base_model, connection|
+ next unless connection.present?
+
+ it_behaves_like 'Patroni Apdex Evaluator', database_base_model.to_sym
+ end
+ end
+end
diff --git a/spec/services/deployments/create_for_build_service_spec.rb b/spec/support/shared_examples/deployments/create_for_job_shared_examples.rb
index c07fc07cfbf..4f6f5b9a91a 100644
--- a/spec/services/deployments/create_for_build_service_spec.rb
+++ b/spec/support/shared_examples/deployments/create_for_job_shared_examples.rb
@@ -1,31 +1,24 @@
# frozen_string_literal: true
-require 'spec_helper'
-
-RSpec.describe Deployments::CreateForBuildService, feature_category: :continuous_delivery do
- let_it_be(:project) { create(:project, :repository) }
- let_it_be(:user) { create(:user) }
-
- let(:service) { described_class.new }
-
+RSpec.shared_examples 'create deployment for job' do
describe '#execute' do
- subject { service.execute(build) }
+ subject { service.execute(job) }
context 'with a deployment job' do
- let!(:build) { create(:ci_build, :start_review_app, project: project) }
- let!(:environment) { create(:environment, project: project, name: build.expanded_environment_name) }
+ let!(:job) { create(factory_type, :start_review_app, project: project) }
+ let!(:environment) { create(:environment, project: project, name: job.expanded_environment_name) }
it 'creates a deployment record' do
expect { subject }.to change { Deployment.count }.by(1)
- build.reset
- expect(build.deployment.project).to eq(build.project)
- expect(build.deployment.ref).to eq(build.ref)
- expect(build.deployment.sha).to eq(build.sha)
- expect(build.deployment.deployable).to eq(build)
- expect(build.deployment.deployable_type).to eq('CommitStatus')
- expect(build.deployment.environment).to eq(build.persisted_environment)
- expect(build.deployment.valid?).to be_truthy
+ job.reset
+ expect(job.deployment.project).to eq(job.project)
+ expect(job.deployment.ref).to eq(job.ref)
+ expect(job.deployment.sha).to eq(job.sha)
+ expect(job.deployment.deployable).to eq(job)
+ expect(job.deployment.deployable_type).to eq('CommitStatus')
+ expect(job.deployment.environment).to eq(job.persisted_environment)
+ expect(job.deployment.valid?).to be_truthy
end
context 'when creation failure occures' do
@@ -43,52 +36,44 @@ RSpec.describe Deployments::CreateForBuildService, feature_category: :continuous
end
context 'when the corresponding environment does not exist' do
- let!(:environment) {}
+ let!(:environment) {} # rubocop:disable Lint/EmptyBlock
it 'does not create a deployment record' do
expect { subject }.not_to change { Deployment.count }
- expect(build.deployment).to be_nil
+ expect(job.deployment).to be_nil
end
end
end
context 'with a teardown job' do
- let!(:build) { create(:ci_build, :stop_review_app, project: project) }
- let!(:environment) { create(:environment, name: build.expanded_environment_name) }
+ let!(:job) { create(factory_type, :stop_review_app, project: project) }
+ let!(:environment) { create(:environment, name: job.expanded_environment_name) }
it 'does not create a deployment record' do
expect { subject }.not_to change { Deployment.count }
- expect(build.deployment).to be_nil
+ expect(job.deployment).to be_nil
end
end
context 'with a normal job' do
- let!(:build) { create(:ci_build, project: project) }
+ let!(:job) { create(factory_type, project: project) }
it 'does not create a deployment record' do
expect { subject }.not_to change { Deployment.count }
- expect(build.deployment).to be_nil
- end
- end
-
- context 'with a bridge' do
- let!(:build) { create(:ci_bridge, project: project) }
-
- it 'does not create a deployment record' do
- expect { subject }.not_to change { Deployment.count }
+ expect(job.deployment).to be_nil
end
end
- context 'when build has environment attribute' do
- let!(:build) do
- create(:ci_build, environment: 'production', project: project,
- options: { environment: { name: 'production', **kubernetes_options } })
+ context 'when job has environment attribute' do
+ let!(:job) do
+ create(factory_type, environment: 'production', project: project,
+ options: { environment: { name: 'production', **kubernetes_options } }) # rubocop:disable Layout/ArgumentAlignment
end
- let!(:environment) { create(:environment, project: project, name: build.expanded_environment_name) }
+ let!(:environment) { create(:environment, project: project, name: job.expanded_environment_name) }
let(:kubernetes_options) { {} }
@@ -128,27 +113,27 @@ RSpec.describe Deployments::CreateForBuildService, feature_category: :continuous
end
end
- context 'when build already has deployment' do
- let!(:build) { create(:ci_build, :with_deployment, project: project, environment: 'production') }
- let!(:environment) {}
+ context 'when job already has deployment' do
+ let!(:job) { create(factory_type, :with_deployment, project: project, environment: 'production') }
+ let!(:environment) {} # rubocop:disable Lint/EmptyBlock
it 'returns the persisted deployment' do
expect { subject }.not_to change { Deployment.count }
- is_expected.to eq(build.deployment)
+ is_expected.to eq(job.deployment)
end
end
end
- context 'when build does not start environment' do
+ context 'when job does not start environment' do
where(:action) do
%w[stop prepare verify access]
end
with_them do
- let!(:build) do
- create(:ci_build, environment: 'production', project: project,
- options: { environment: { name: 'production', action: action } })
+ let!(:job) do
+ create(factory_type, environment: 'production', project: project,
+ options: { environment: { name: 'production', action: action } }) # rubocop:disable Layout/ArgumentAlignment
end
it 'returns nothing' do
@@ -157,8 +142,8 @@ RSpec.describe Deployments::CreateForBuildService, feature_category: :continuous
end
end
- context 'when build does not have environment attribute' do
- let!(:build) { create(:ci_build, project: project) }
+ context 'when job does not have environment attribute' do
+ let!(:job) { create(factory_type, project: project) }
it 'returns nothing' do
is_expected.to be_nil
diff --git a/spec/services/environments/create_for_build_service_spec.rb b/spec/support/shared_examples/environments/create_for_job_shared_examples.rb
index 223401a243d..3acdc8c142f 100644
--- a/spec/services/environments/create_for_build_service_spec.rb
+++ b/spec/support/shared_examples/environments/create_for_job_shared_examples.rb
@@ -1,15 +1,8 @@
# frozen_string_literal: true
-require 'spec_helper'
-
-RSpec.describe Environments::CreateForBuildService, feature_category: :continuous_delivery do
- let_it_be(:project) { create(:project, :repository) }
- let_it_be(:user) { create(:user) }
- let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
-
- let!(:job) { build(:ci_build, project: project, pipeline: pipeline, **attributes) }
- let(:service) { described_class.new }
- let(:merge_request) {}
+RSpec.shared_examples 'create environment for job' do
+ let!(:job) { build(factory_type, project: project, pipeline: pipeline, **attributes) }
+ let(:merge_request) {} # rubocop:disable Lint/EmptyBlock
describe '#execute' do
subject { service.execute(job) }
@@ -218,7 +211,7 @@ RSpec.describe Environments::CreateForBuildService, feature_category: :continuou
context 'when a pipeline contains a deployment job' do
let(:pipeline) { create(:ci_pipeline, project: project, merge_request: merge_request) }
- let!(:job) { build(:ci_build, :start_review_app, project: project, pipeline: pipeline) }
+ let!(:job) { build(factory_type, :start_review_app, project: project, pipeline: pipeline) }
context 'and the environment does not exist' do
it 'creates the environment specified by the job' do
@@ -280,7 +273,7 @@ RSpec.describe Environments::CreateForBuildService, feature_category: :continuou
end
context 'when a pipeline contains a teardown job' do
- let!(:job) { build(:ci_build, :stop_review_app, project: project) }
+ let!(:job) { build(factory_type, :stop_review_app, project: project) }
it 'ensures environment existence for the job' do
expect { subject }.to change { Environment.count }.by(1)
@@ -292,7 +285,7 @@ RSpec.describe Environments::CreateForBuildService, feature_category: :continuou
end
context 'when a pipeline does not contain a deployment job' do
- let!(:job) { build(:ci_build, project: project) }
+ let!(:job) { build(factory_type, project: project) }
it 'does not create any environments' do
expect { subject }.not_to change { Environment.count }
diff --git a/spec/support/shared_examples/features/access_tokens_shared_examples.rb b/spec/support/shared_examples/features/access_tokens_shared_examples.rb
index 3c78869ffaa..34e3ba95b0d 100644
--- a/spec/support/shared_examples/features/access_tokens_shared_examples.rb
+++ b/spec/support/shared_examples/features/access_tokens_shared_examples.rb
@@ -15,6 +15,8 @@ RSpec.shared_examples 'resource access tokens creation' do |resource_type|
name = 'My access token'
visit resource_settings_access_tokens_path
+
+ click_button 'Add new token'
fill_in 'Token name', with: name
# Set date to 1st of next month
diff --git a/spec/support/shared_examples/features/content_editor_shared_examples.rb b/spec/support/shared_examples/features/content_editor_shared_examples.rb
index 254bc3c83ac..fff8ef915eb 100644
--- a/spec/support/shared_examples/features/content_editor_shared_examples.rb
+++ b/spec/support/shared_examples/features/content_editor_shared_examples.rb
@@ -27,6 +27,19 @@ RSpec.shared_examples 'edits content using the content editor' do |params = { wi
expect(page).to have_text('Typing text in the content editor')
end
+ it 'autofocuses the rich text editor when switching to rich text' do
+ switch_to_content_editor
+
+ expect(page).to have_css("#{content_editor_testid}:focus")
+ end
+
+ it 'autofocuses the plain text editor when switching back to markdown' do
+ switch_to_content_editor
+ switch_to_markdown_editor
+
+ expect(page).to have_css("textarea:focus")
+ end
+
describe 'creating and editing links' do
before do
switch_to_content_editor
diff --git a/spec/support/shared_examples/features/deploy_token_shared_examples.rb b/spec/support/shared_examples/features/deploy_token_shared_examples.rb
index 80f5f1d805c..b621a0c8cca 100644
--- a/spec/support/shared_examples/features/deploy_token_shared_examples.rb
+++ b/spec/support/shared_examples/features/deploy_token_shared_examples.rb
@@ -6,7 +6,7 @@ RSpec.shared_examples 'a deploy token in settings' do
visit page_path
- within('.deploy-tokens') do
+ within('#js-deploy-tokens') do
expect(page).to have_content(deploy_token.name)
expect(page).to have_content('read_repository')
expect(page).to have_content('read_registry')
@@ -16,6 +16,7 @@ RSpec.shared_examples 'a deploy token in settings' do
it 'add a new deploy token', :js do
visit page_path
+ click_button "Add token"
within('#js-deploy-tokens') do
fill_in _('Name'), with: 'new_deploy_key'
@@ -28,7 +29,7 @@ RSpec.shared_examples 'a deploy token in settings' do
expect(page).to have_content("Your new #{entity_type} deploy token has been created")
- within('.created-deploy-token-container') do
+ within('#new-deploy-token-alert') do
expect(find("input[name='deploy-token-user']").value).to eq("deployer")
expect(find("input[name='deploy-token'][readonly='readonly']")).to be_visible
end
@@ -40,6 +41,7 @@ RSpec.shared_examples 'a deploy token in settings' do
context "with form errors", :js do
before do
visit page_path
+ click_button "Add token"
fill_in _('Name'), with: "new_deploy_key"
fill_in _('Username (optional)'), with: "deployer"
click_button "Create deploy token"
@@ -63,7 +65,7 @@ RSpec.shared_examples 'a deploy token in settings' do
it 'shows absolute times for expires_at' do
visit page_path
- within('.deploy-tokens') do
+ within('#js-deploy-tokens') do
expect(page).to have_content(deploy_token.expires_at.strftime('%b %-d'))
end
end
diff --git a/spec/support/shared_examples/features/discussion_comments_shared_example.rb b/spec/support/shared_examples/features/discussion_comments_shared_example.rb
index 430a8ac39d7..82bddb9f5a4 100644
--- a/spec/support/shared_examples/features/discussion_comments_shared_example.rb
+++ b/spec/support/shared_examples/features/discussion_comments_shared_example.rb
@@ -278,9 +278,7 @@ RSpec.shared_examples 'thread comments for issue, epic and merge request' do |re
expect(page).to have_css('.discussion-notes .note', count: 1)
expect(page).to have_content '1 reply'
end
- end
- if resource_name == 'merge request'
let(:note_id) { find("#{comments_selector} .note:first-child", match: :first)['data-note-id'] }
let(:reply_id) { all("#{comments_selector} [data-note-id]")[1]['data-note-id'] }
diff --git a/spec/support/shared_examples/features/editable_merge_request_shared_examples.rb b/spec/support/shared_examples/features/editable_merge_request_shared_examples.rb
index f802404518b..9f884683f47 100644
--- a/spec/support/shared_examples/features/editable_merge_request_shared_examples.rb
+++ b/spec/support/shared_examples/features/editable_merge_request_shared_examples.rb
@@ -125,11 +125,6 @@ RSpec.shared_examples 'an editable merge request' do
it 'allows to unselect "Remove source branch"', :js do
expect(merge_request.merge_params['force_remove_source_branch']).to be_truthy
- begin
- visit edit_project_merge_request_path(target_project, merge_request)
- rescue Selenium::WebDriver::Error::UnexpectedAlertOpenError
- end
-
uncheck 'Delete source branch when merge request is accepted'
click_button 'Save changes'
diff --git a/spec/support/shared_examples/features/manage_applications_shared_examples.rb b/spec/support/shared_examples/features/manage_applications_shared_examples.rb
index b8fd58e7efa..05b1c991cdb 100644
--- a/spec/support/shared_examples/features/manage_applications_shared_examples.rb
+++ b/spec/support/shared_examples/features/manage_applications_shared_examples.rb
@@ -10,6 +10,8 @@ RSpec.shared_examples 'manage applications' do
expect(page).to have_content 'Add new application'
+ click_button 'Add new application' if page.has_css?('.gl-new-card-header')
+
fill_in :doorkeeper_application_name, with: application_name
fill_in :doorkeeper_application_redirect_uri, with: application_redirect_uri
check :doorkeeper_application_scopes_read_user
diff --git a/spec/support/shared_examples/features/protected_branches_access_control_ce_shared_examples.rb b/spec/support/shared_examples/features/protected_branches_access_control_ce_shared_examples.rb
index 2d3f1949716..fb882ef8a23 100644
--- a/spec/support/shared_examples/features/protected_branches_access_control_ce_shared_examples.rb
+++ b/spec/support/shared_examples/features/protected_branches_access_control_ce_shared_examples.rb
@@ -7,6 +7,7 @@ RSpec.shared_examples "protected branches > access control > CE" do
it "allows creating protected branches that #{access_type_name} can push to" do
visit project_protected_branches_path(project)
+ show_add_form
set_protected_branch_name('master')
set_allowed_to('merge', no_one)
set_allowed_to('push', access_type_name)
@@ -19,6 +20,7 @@ RSpec.shared_examples "protected branches > access control > CE" do
it "allows creating protected branches that #{access_type_name} can merge to" do
visit project_protected_branches_path(project)
+ show_add_form
set_protected_branch_name('master')
set_allowed_to('merge', access_type_name)
set_allowed_to('push', no_one)
@@ -31,6 +33,7 @@ RSpec.shared_examples "protected branches > access control > CE" do
it "allows updating protected branches so that #{access_type_name} can push to them" do
visit project_protected_branches_path(project)
+ show_add_form
set_protected_branch_name('master')
set_allowed_to('merge', no_one)
set_allowed_to('push', no_one)
@@ -52,6 +55,7 @@ RSpec.shared_examples "protected branches > access control > CE" do
it "allows updating protected branches so that #{access_type_name} can merge to them" do
visit project_protected_branches_path(project)
+ show_add_form
set_protected_branch_name('master')
set_allowed_to('merge', no_one)
set_allowed_to('push', no_one)
diff --git a/spec/support/shared_examples/features/protected_branches_with_deploy_keys_examples.rb b/spec/support/shared_examples/features/protected_branches_with_deploy_keys_examples.rb
index 90b0e600228..a15ee47de34 100644
--- a/spec/support/shared_examples/features/protected_branches_with_deploy_keys_examples.rb
+++ b/spec/support/shared_examples/features/protected_branches_with_deploy_keys_examples.rb
@@ -20,6 +20,7 @@ RSpec.shared_examples 'Deploy keys with protected branches' do
it "shows all dropdown sections in the 'Allowed to push' main dropdown, with only one deploy key" do
visit project_protected_branches_path(project)
+ click_button 'Add protected branch'
find(".js-allowed-to-push").click
wait_for_requests
@@ -35,6 +36,7 @@ RSpec.shared_examples 'Deploy keys with protected branches' do
it "shows all sections but not deploy keys in the 'Allowed to merge' main dropdown" do
visit project_protected_branches_path(project)
+ click_button 'Add protected branch'
find(".js-allowed-to-merge").click
wait_for_requests
@@ -65,6 +67,7 @@ RSpec.shared_examples 'Deploy keys with protected branches' do
it "just shows all sections but not deploy keys in the 'Allowed to push' dropdown" do
visit project_protected_branches_path(project)
+ click_button 'Add protected branch'
find(".js-allowed-to-push").click
wait_for_requests
diff --git a/spec/support/shared_examples/features/protected_tags_with_deploy_keys_examples.rb b/spec/support/shared_examples/features/protected_tags_with_deploy_keys_examples.rb
index cc0984b6226..703ba5b018a 100644
--- a/spec/support/shared_examples/features/protected_tags_with_deploy_keys_examples.rb
+++ b/spec/support/shared_examples/features/protected_tags_with_deploy_keys_examples.rb
@@ -14,6 +14,7 @@ RSpec.shared_examples 'Deploy keys with protected tags' do
it "shows all dropdown sections in the 'Allowed to create' main dropdown, with only one deploy key" do
visit project_protected_tags_path(project)
+ click_button('Add tag')
find(".js-allowed-to-create").click
wait_for_requests
@@ -31,6 +32,7 @@ RSpec.shared_examples 'Deploy keys with protected tags' do
create(:protected_tag, :no_one_can_create, project: project, name: 'v1.0.0')
visit project_protected_tags_path(project)
+ click_button('Add tag')
within(".js-protected-tag-edit-form") do
find(".js-allowed-to-create").click
@@ -46,6 +48,7 @@ RSpec.shared_examples 'Deploy keys with protected tags' do
context 'when no deploy key can push' do
it "just shows all sections but not deploy keys in the 'Allowed to create' dropdown" do
visit project_protected_tags_path(project)
+ click_button('Add tag')
find(".js-allowed-to-create").click
wait_for_requests
diff --git a/spec/support/shared_examples/features/runners_shared_examples.rb b/spec/support/shared_examples/features/runners_shared_examples.rb
index 54a4db0e81d..0c043f48c5f 100644
--- a/spec/support/shared_examples/features/runners_shared_examples.rb
+++ b/spec/support/shared_examples/features/runners_shared_examples.rb
@@ -57,7 +57,7 @@ RSpec.shared_examples 'shows and resets runner registration token' do
click_on dropdown_text
click_on 'Click to reveal'
- expect(old_registration_token).not_to eq registration_token
+ expect(find_field('token-value').value).not_to eq old_registration_token
end
end
end
diff --git a/spec/support/shared_examples/features/sidebar_shared_examples.rb b/spec/support/shared_examples/features/sidebar_shared_examples.rb
index c2c50e8762f..f402a1bc91a 100644
--- a/spec/support/shared_examples/features/sidebar_shared_examples.rb
+++ b/spec/support/shared_examples/features/sidebar_shared_examples.rb
@@ -100,7 +100,7 @@ RSpec.shared_examples 'issue boards sidebar' do
context 'when notifications have been disabled' do
before do
- project.update_attribute(:emails_disabled, true)
+ project.update_attribute(:emails_enabled, false)
refresh_and_click_first_card
end
diff --git a/spec/support/shared_examples/features/variable_list_drawer_shared_examples.rb b/spec/support/shared_examples/features/variable_list_drawer_shared_examples.rb
new file mode 100644
index 00000000000..9f01c69608d
--- /dev/null
+++ b/spec/support/shared_examples/features/variable_list_drawer_shared_examples.rb
@@ -0,0 +1,23 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'variable list drawer' do
+ it 'adds a new CI variable' do
+ click_button('Add variable')
+
+ # For now, we just check that the drawer is displayed
+ expect(page).to have_selector('[data-testid="ci-variable-drawer"]')
+
+ # TODO: Add tests for ADDING a variable via drawer when feature is available
+ end
+
+ it 'edits a variable' do
+ page.within('[data-testid="ci-variable-table"]') do
+ click_button('Edit')
+ end
+
+ # For now, we just check that the drawer is displayed
+ expect(page).to have_selector('[data-testid="ci-variable-drawer"]')
+
+ # TODO: Add tests for EDITING a variable via drawer when feature is available
+ end
+end
diff --git a/spec/support/shared_examples/features/work_items_shared_examples.rb b/spec/support/shared_examples/features/work_items_shared_examples.rb
index 4c15b682458..d3863c9a675 100644
--- a/spec/support/shared_examples/features/work_items_shared_examples.rb
+++ b/spec/support/shared_examples/features/work_items_shared_examples.rb
@@ -15,17 +15,17 @@ RSpec.shared_examples 'work items title' do
end
end
-RSpec.shared_examples 'work items status' do
- let(:state_selector) { '[data-testid="work-item-state-select"]' }
+RSpec.shared_examples 'work items toggle status button' do
+ let(:state_button) { '[data-testid="work-item-state-toggle"]' }
it 'successfully shows and changes the status of the work item' do
- expect(find(state_selector)).to have_content 'Open'
+ expect(find(state_button, match: :first)).to have_content 'Close'
- find(state_selector).select("Closed")
+ find(state_button, match: :first).click
wait_for_requests
- expect(find(state_selector)).to have_content 'Closed'
+ expect(find(state_button, match: :first)).to have_content 'Reopen'
expect(work_item.reload.state).to eq('closed')
end
end
@@ -316,7 +316,7 @@ end
RSpec.shared_examples 'work items notifications' do
let(:actions_dropdown_selector) { '[data-testid="work-item-actions-dropdown"]' }
- let(:notifications_toggle_selector) { '[data-testid="notifications-toggle-action"] > button' }
+ let(:notifications_toggle_selector) { '[data-testid="notifications-toggle-action"] button[role="switch"]' }
it 'displays toast when notification is toggled' do
find(actions_dropdown_selector).click
diff --git a/spec/support/shared_examples/finders/issues_finder_shared_examples.rb b/spec/support/shared_examples/finders/issues_finder_shared_examples.rb
index 30041456d00..19001abcbe2 100644
--- a/spec/support/shared_examples/finders/issues_finder_shared_examples.rb
+++ b/spec/support/shared_examples/finders/issues_finder_shared_examples.rb
@@ -22,6 +22,14 @@ RSpec.shared_examples 'issues or work items finder' do |factory, execute_context
it 'returns no items' do
expect(items).to be_empty
end
+
+ context 'when there are group-level work items' do
+ let!(:group_work_item) { create(:work_item, namespace: create(:group)) }
+
+ it 'returns no items' do
+ expect(items).to be_empty
+ end
+ end
end
context 'when filtering by group id' do
diff --git a/spec/support/shared_examples/graphql/mutations/update_time_estimate_shared_examples.rb b/spec/support/shared_examples/graphql/mutations/update_time_estimate_shared_examples.rb
new file mode 100644
index 00000000000..d6d360bb413
--- /dev/null
+++ b/spec/support/shared_examples/graphql/mutations/update_time_estimate_shared_examples.rb
@@ -0,0 +1,59 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'updating time estimate' do
+ context 'when setting time estimate', :aggregate_failures do
+ using RSpec::Parameterized::TableSyntax
+
+ let(:input_params) { input.merge(extra_params).merge({ timeEstimate: time_estimate }) }
+
+ context 'when time estimate is not a valid numerical value' do
+ let(:time_estimate) { '-3.5d' }
+
+ it 'does not update' do
+ expect { post_graphql_mutation(mutation, current_user: current_user) }.not_to change { resource.time_estimate }
+ end
+
+ it 'returns error' do
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ expect(graphql_errors).to include(a_hash_including('message' => /must be greater than or equal to zero/))
+ end
+ end
+
+ context 'when time estimate is not a number' do
+ let(:time_estimate) { 'nonsense' }
+
+ it 'does not update' do
+ expect { post_graphql_mutation(mutation, current_user: current_user) }.not_to change { resource.time_estimate }
+ end
+
+ it 'returns error' do
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ expect(graphql_errors).to include(a_hash_including('message' => /must be formatted correctly/))
+ end
+ end
+
+ context 'when time estimate is valid' do
+ let(:time_estimate) { "1h" }
+
+ before do
+ post_graphql_mutation(mutation, current_user: current_user)
+ end
+
+ it_behaves_like 'a working GraphQL mutation'
+
+ where(:time_estimate, :value) do
+ '1h' | 3600
+ '0h' | 0
+ '-0h' | 0
+ end
+
+ with_them do
+ specify do
+ expect(graphql_data_at(mutation_name, resource.class.to_s.underscore, 'timeEstimate')).to eq(value)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/support/shared_examples/graphql/notes_quick_actions_for_work_items_shared_examples.rb b/spec/support/shared_examples/graphql/notes_quick_actions_for_work_items_shared_examples.rb
index c666b72d492..0577ac329e6 100644
--- a/spec/support/shared_examples/graphql/notes_quick_actions_for_work_items_shared_examples.rb
+++ b/spec/support/shared_examples/graphql/notes_quick_actions_for_work_items_shared_examples.rb
@@ -163,63 +163,77 @@ RSpec.shared_examples 'work item supports type change via quick actions' do
noteable.update!(work_item_type: task_type)
end
- it 'updates type' do
- expect do
- post_graphql_mutation(mutation, current_user: current_user)
- noteable.reload
- end.to change { noteable.work_item_type.base_type }.from('task').to('issue')
-
- expect(response).to have_gitlab_http_status(:success)
- end
-
- context 'when update service returns errors' do
- let_it_be(:issue) { create(:work_item, :issue, project: project) }
-
- before do
- create(:parent_link, work_item: noteable, work_item_parent: issue)
- end
-
- it 'mutation response include the errors' do
+ shared_examples 'a quick command that changes type' do
+ it 'updates type' do
expect do
post_graphql_mutation(mutation, current_user: current_user)
noteable.reload
- end.not_to change { noteable.work_item_type.base_type }
+ end.to change { noteable.work_item_type.base_type }.from('task').to('issue')
expect(response).to have_gitlab_http_status(:success)
- expect(mutation_response['errors'])
- .to include('Validation Work item type cannot be changed to issue when linked to a parent issue.')
end
- end
- context 'when quick command for unsupported widget is present' do
- let(:body) { "\n/type Issue\n/assign @#{assignee.username}" }
+ context 'when update service returns errors' do
+ let_it_be(:issue) { create(:work_item, :issue, project: project) }
- before do
- WorkItems::Type.default_by_type(:issue).widget_definitions
- .find_by_widget_type(:assignees).update!(disabled: true)
+ before do
+ create(:parent_link, work_item: noteable, work_item_parent: issue)
+ end
+
+ it 'mutation response include the errors' do
+ expect do
+ post_graphql_mutation(mutation, current_user: current_user)
+ noteable.reload
+ end.not_to change { noteable.work_item_type.base_type }
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(mutation_response['errors'])
+ .to include('Validation Work item type cannot be changed to issue when linked to a parent issue.')
+ end
end
- it 'updates only type' do
- expect do
- post_graphql_mutation(mutation, current_user: current_user)
- noteable.reload
- end.to change { noteable.work_item_type.base_type }.from('task').to('issue')
- .and change { noteable.assignees }.to([])
+ context 'when quick command for unsupported widget is present' do
+ let(:body) { "\n/type Issue\n/assign @#{assignee.username}" }
+
+ before do
+ WorkItems::Type.default_by_type(:issue).widget_definitions
+ .find_by_widget_type(:assignees).update!(disabled: true)
+ end
+
+ it 'updates only type' do
+ expect do
+ post_graphql_mutation(mutation, current_user: current_user)
+ noteable.reload
+ end.to change { noteable.work_item_type.base_type }.from('task').to('issue')
+ .and change { noteable.assignees }.to([])
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(mutation_response['errors'])
+ .to include("Commands only Type changed successfully. Assigned @#{assignee.username}.")
+ end
+ end
- expect(response).to have_gitlab_http_status(:success)
- expect(mutation_response['errors'])
- .to include("Commands only Type changed successfully. Assigned @#{assignee.username}.")
+ context 'when the type name is upper case' do
+ let(:body) { "Updating type.\n/type Issue" }
+
+ it 'changes type to issue' do
+ expect do
+ post_graphql_mutation(mutation, current_user: current_user)
+ noteable.reload
+ end.to change { noteable.work_item_type.base_type }.from('task').to('issue')
+ end
end
end
- context 'when the type name is upper case' do
- let(:body) { "Updating type.\n/type Issue" }
+ context 'with /type quick command' do
+ let(:body) { "Updating type.\n/type issue" }
- it 'changes type to issue' do
- expect do
- post_graphql_mutation(mutation, current_user: current_user)
- noteable.reload
- end.to change { noteable.work_item_type.base_type }.from('task').to('issue')
- end
+ it_behaves_like 'a quick command that changes type'
+ end
+
+ context 'with /promote_to quick command' do
+ let(:body) { "Updating type.\n/promote_to issue" }
+
+ it_behaves_like 'a quick command that changes type'
end
end
diff --git a/spec/support/shared_examples/helpers/runners_shared_examples.rb b/spec/support/shared_examples/helpers/runners_shared_examples.rb
new file mode 100644
index 00000000000..e509f7a65a5
--- /dev/null
+++ b/spec/support/shared_examples/helpers/runners_shared_examples.rb
@@ -0,0 +1,12 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'admin_runners_data_attributes contains data' do
+ it 'returns data' do
+ expect(subject).to include(
+ runner_install_help_page: 'https://docs.gitlab.com/runner/install/',
+ registration_token: Gitlab::CurrentSettings.runners_registration_token,
+ online_contact_timeout_secs: 7200,
+ stale_timeout_secs: 7889238
+ )
+ end
+end
diff --git a/spec/support/shared_examples/helpers/super_sidebar_shared_examples.rb b/spec/support/shared_examples/helpers/super_sidebar_shared_examples.rb
new file mode 100644
index 00000000000..9da804b3140
--- /dev/null
+++ b/spec/support/shared_examples/helpers/super_sidebar_shared_examples.rb
@@ -0,0 +1,37 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'shared super sidebar context' do
+ it 'returns sidebar values for logged-in users and logged-out users', :use_clean_rails_memory_store_caching do
+ expect(subject).to include({
+ current_menu_items: nil,
+ current_context_header: nil,
+ support_path: helper.support_url,
+ display_whats_new: helper.display_whats_new?,
+ whats_new_most_recent_release_items_count: helper.whats_new_most_recent_release_items_count,
+ whats_new_version_digest: helper.whats_new_version_digest,
+ show_version_check: helper.show_version_check?,
+ gitlab_version: Gitlab.version_info,
+ gitlab_version_check: helper.gitlab_version_check,
+ search: {
+ search_path: search_path,
+ issues_path: issues_dashboard_path,
+ mr_path: merge_requests_dashboard_path,
+ autocomplete_path: search_autocomplete_path,
+ search_context: helper.header_search_context
+ },
+ panel_type: panel_type
+ })
+ end
+end
+
+RSpec.shared_examples 'logged-out super-sidebar context' do
+ subject do
+ helper.super_sidebar_context(nil, group: nil, project: nil, panel: panel, panel_type: panel_type)
+ end
+
+ it_behaves_like 'shared super sidebar context'
+
+ it { is_expected.to include({ is_logged_in: false }) }
+
+ it { expect(subject[:context_switcher_links]).to be_an(Array) }
+end
diff --git a/spec/support/shared_examples/lib/gitlab/cache/json_cache_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/cache/json_cache_shared_examples.rb
index 0472bb87e62..f60974beaf8 100644
--- a/spec/support/shared_examples/lib/gitlab/cache/json_cache_shared_examples.rb
+++ b/spec/support/shared_examples/lib/gitlab/cache/json_cache_shared_examples.rb
@@ -18,7 +18,7 @@ RSpec.shared_examples 'Json Cache class' do
it 'parses the cached value' do
allow(backend).to receive(:read).with(expanded_key).and_return(json_value(broadcast_message))
- expect(cache.read(key, BroadcastMessage)).to eq(broadcast_message)
+ expect(cache.read(key, System::BroadcastMessage)).to eq(broadcast_message)
end
it 'returns nil when klass is nil' do
@@ -30,14 +30,14 @@ RSpec.shared_examples 'Json Cache class' do
it 'gracefully handles an empty hash' do
allow(backend).to receive(:read).with(expanded_key).and_return(json_value({}))
- expect(cache.read(key, BroadcastMessage)).to be_a(BroadcastMessage)
+ expect(cache.read(key, System::BroadcastMessage)).to be_a(System::BroadcastMessage)
end
context 'when the cached value is a JSON true value' do
it 'parses the cached value' do
allow(backend).to receive(:read).with(expanded_key).and_return(json_value(true))
- expect(cache.read(key, BroadcastMessage)).to eq(true)
+ expect(cache.read(key, System::BroadcastMessage)).to eq(true)
end
end
@@ -45,7 +45,7 @@ RSpec.shared_examples 'Json Cache class' do
it 'parses the cached value' do
allow(backend).to receive(:read).with(expanded_key).and_return(json_value(false))
- expect(cache.read(key, BroadcastMessage)).to eq(false)
+ expect(cache.read(key, System::BroadcastMessage)).to eq(false)
end
end
@@ -53,23 +53,23 @@ RSpec.shared_examples 'Json Cache class' do
it 'gracefully handles bad cached entry' do
allow(backend).to receive(:read).with(expanded_key).and_return('{')
- expect(cache.read(key, BroadcastMessage)).to be_nil
+ expect(cache.read(key, System::BroadcastMessage)).to be_nil
end
it 'gracefully handles unknown attributes' do
read_value = json_value(broadcast_message.attributes.merge(unknown_attribute: 1))
allow(backend).to receive(:read).with(expanded_key).and_return(read_value)
- expect(cache.read(key, BroadcastMessage)).to be_nil
+ expect(cache.read(key, System::BroadcastMessage)).to be_nil
end
it 'gracefully handles excluded fields from attributes during serialization' do
read_value = json_value(broadcast_message.attributes.except("message_html"))
allow(backend).to receive(:read).with(expanded_key).and_return(read_value)
- result = cache.read(key, BroadcastMessage)
+ result = cache.read(key, System::BroadcastMessage)
- BroadcastMessage.cached_markdown_fields.html_fields.each do |field|
+ System::BroadcastMessage.cached_markdown_fields.html_fields.each do |field|
expect(result.public_send(field)).to be_nil
end
end
@@ -79,7 +79,7 @@ RSpec.shared_examples 'Json Cache class' do
it 'parses the cached value' do
allow(backend).to receive(:read).with(expanded_key).and_return(json_value([broadcast_message]))
- expect(cache.read(key, BroadcastMessage)).to eq([broadcast_message])
+ expect(cache.read(key, System::BroadcastMessage)).to eq([broadcast_message])
end
it 'returns an empty array when klass is nil' do
@@ -91,20 +91,20 @@ RSpec.shared_examples 'Json Cache class' do
it 'gracefully handles bad cached entry' do
allow(backend).to receive(:read).with(expanded_key).and_return('[')
- expect(cache.read(key, BroadcastMessage)).to be_nil
+ expect(cache.read(key, System::BroadcastMessage)).to be_nil
end
it 'gracefully handles an empty array' do
allow(backend).to receive(:read).with(expanded_key).and_return(json_value([]))
- expect(cache.read(key, BroadcastMessage)).to eq([])
+ expect(cache.read(key, System::BroadcastMessage)).to eq([])
end
it 'gracefully handles items with unknown attributes' do
read_value = json_value([{ unknown_attribute: 1 }, broadcast_message.attributes])
allow(backend).to receive(:read).with(expanded_key).and_return(read_value)
- expect(cache.read(key, BroadcastMessage)).to eq([broadcast_message])
+ expect(cache.read(key, System::BroadcastMessage)).to eq([broadcast_message])
end
end
end
@@ -206,20 +206,20 @@ RSpec.shared_examples 'Json Cache class' do
end
it 'parses the cached value' do
- result = cache.fetch(key, as: BroadcastMessage) { 'block result' }
+ result = cache.fetch(key, as: System::BroadcastMessage) { 'block result' }
expect(result).to eq(broadcast_message)
end
it 'decodes enums correctly' do
- result = cache.fetch(key, as: BroadcastMessage) { 'block result' }
+ result = cache.fetch(key, as: System::BroadcastMessage) { 'block result' }
expect(result.broadcast_type).to eq(broadcast_message.broadcast_type)
end
context 'when the cached value is an instance of ActiveRecord::Base' do
it 'returns a persisted record when id is set' do
- result = cache.fetch(key, as: BroadcastMessage) { 'block result' }
+ result = cache.fetch(key, as: System::BroadcastMessage) { 'block result' }
expect(result).to be_persisted
end
@@ -227,7 +227,7 @@ RSpec.shared_examples 'Json Cache class' do
it 'returns a new record when id is nil' do
backend.write(expanded_key, json_value(build(:broadcast_message)))
- result = cache.fetch(key, as: BroadcastMessage) { 'block result' }
+ result = cache.fetch(key, as: System::BroadcastMessage) { 'block result' }
expect(result).to be_new_record
end
@@ -235,7 +235,7 @@ RSpec.shared_examples 'Json Cache class' do
it 'returns a new record when id is missing' do
backend.write(expanded_key, json_value(build(:broadcast_message).attributes.except('id')))
- result = cache.fetch(key, as: BroadcastMessage) { 'block result' }
+ result = cache.fetch(key, as: System::BroadcastMessage) { 'block result' }
expect(result).to be_new_record
end
@@ -243,7 +243,7 @@ RSpec.shared_examples 'Json Cache class' do
it 'gracefully handles bad cached entry' do
allow(backend).to receive(:read).with(expanded_key).and_return('{')
- result = cache.fetch(key, as: BroadcastMessage) { 'block result' }
+ result = cache.fetch(key, as: System::BroadcastMessage) { 'block result' }
expect(result).to eq 'block result'
end
@@ -251,14 +251,14 @@ RSpec.shared_examples 'Json Cache class' do
it 'gracefully handles an empty hash' do
allow(backend).to receive(:read).with(expanded_key).and_return(json_value({}))
- expect(cache.fetch(key, as: BroadcastMessage)).to be_a(BroadcastMessage)
+ expect(cache.fetch(key, as: System::BroadcastMessage)).to be_a(System::BroadcastMessage)
end
it 'gracefully handles unknown attributes' do
read_value = json_value(broadcast_message.attributes.merge(unknown_attribute: 1))
allow(backend).to receive(:read).with(expanded_key).and_return(read_value)
- result = cache.fetch(key, as: BroadcastMessage) { 'block result' }
+ result = cache.fetch(key, as: System::BroadcastMessage) { 'block result' }
expect(result).to eq 'block result'
end
@@ -267,9 +267,9 @@ RSpec.shared_examples 'Json Cache class' do
read_value = json_value(broadcast_message.attributes.except("message_html"))
allow(backend).to receive(:read).with(expanded_key).and_return(read_value)
- result = cache.fetch(key, as: BroadcastMessage) { 'block result' }
+ result = cache.fetch(key, as: System::BroadcastMessage) { 'block result' }
- BroadcastMessage.cached_markdown_fields.html_fields.each do |field|
+ System::BroadcastMessage.cached_markdown_fields.html_fields.each do |field|
expect(result.public_send(field)).to be_nil
end
end
@@ -294,7 +294,7 @@ RSpec.shared_examples 'Json Cache class' do
end
it 'parses the cached value' do
- result = cache.fetch(key, as: BroadcastMessage) { 'block result' }
+ result = cache.fetch(key, as: System::BroadcastMessage) { 'block result' }
expect(result).to eq([broadcast_message])
end
diff --git a/spec/support/shared_examples/lib/gitlab/search_archived_filter_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/search_archived_filter_shared_examples.rb
index 7bcefd07fc4..6b296d0e78a 100644
--- a/spec/support/shared_examples/lib/gitlab/search_archived_filter_shared_examples.rb
+++ b/spec/support/shared_examples/lib/gitlab/search_archived_filter_shared_examples.rb
@@ -1,30 +1,43 @@
# frozen_string_literal: true
-RSpec.shared_examples 'search results filtered by archived' do
+RSpec.shared_examples 'search results filtered by archived' do |feature_flag_name|
context 'when filter not provided (all behavior)' do
let(:filters) { {} }
- it 'returns unarchived results only', :aggregate_failures do
- expect(results.objects('projects')).to include unarchived_project
- expect(results.objects('projects')).not_to include archived_project
+ it 'returns unarchived results only' do
+ expect(results.objects(scope)).to include unarchived_result
+ expect(results.objects(scope)).not_to include archived_result
end
end
context 'when include_archived is true' do
let(:filters) { { include_archived: true } }
- it 'returns archived and unarchived results', :aggregate_failures do
- expect(results.objects('projects')).to include unarchived_project
- expect(results.objects('projects')).to include archived_project
+ it 'returns archived and unarchived results' do
+ expect(results.objects(scope)).to include unarchived_result
+ expect(results.objects(scope)).to include archived_result
end
end
context 'when include_archived filter is false' do
let(:filters) { { include_archived: false } }
- it 'returns unarchived results only', :aggregate_failures do
- expect(results.objects('projects')).to include unarchived_project
- expect(results.objects('projects')).not_to include archived_project
+ it 'returns unarchived results only' do
+ expect(results.objects(scope)).to include unarchived_result
+ expect(results.objects(scope)).not_to include archived_result
+ end
+ end
+
+ context "when the #{feature_flag_name} feature flag is disabled" do
+ let(:filters) { {} }
+
+ before do
+ stub_feature_flags("#{feature_flag_name}": false)
+ end
+
+ it 'returns archived and unarchived results' do
+ expect(results.objects(scope)).to include unarchived_result
+ expect(results.objects(scope)).to include archived_result
end
end
end
diff --git a/spec/support/shared_examples/metrics/active_record_subscriber_shared_examples.rb b/spec/support/shared_examples/metrics/active_record_subscriber_shared_examples.rb
index dc92e56d013..74fba9416e2 100644
--- a/spec/support/shared_examples/metrics/active_record_subscriber_shared_examples.rb
+++ b/spec/support/shared_examples/metrics/active_record_subscriber_shared_examples.rb
@@ -34,7 +34,7 @@ RSpec.shared_examples 'store ActiveRecord info in RequestStore' do |db_role|
it 'prevents db counters from leaking to the next transaction' do
2.times do
- Gitlab::WithRequestStore.with_request_store do
+ Gitlab::SafeRequestStore.ensure_request_store do
subscriber.sql(event)
expected = case db_role
diff --git a/spec/support/shared_examples/migrations/add_work_item_widget_shared_examples.rb b/spec/support/shared_examples/migrations/add_work_item_widget_shared_examples.rb
index 28eac52256f..fdb31fa5d9d 100644
--- a/spec/support/shared_examples/migrations/add_work_item_widget_shared_examples.rb
+++ b/spec/support/shared_examples/migrations/add_work_item_widget_shared_examples.rb
@@ -3,12 +3,13 @@
RSpec.shared_examples 'migration that adds widget to work items definitions' do |widget_name:|
let(:migration) { described_class.new }
let(:work_item_definitions) { table(:work_item_widget_definitions) }
+ let(:work_item_type_count) { 7 }
describe '#up' do
it "creates widget definition in all types" do
work_item_definitions.where(name: widget_name).delete_all
- expect { migrate! }.to change { work_item_definitions.count }.by(7)
+ expect { migrate! }.to change { work_item_definitions.count }.by(work_item_type_count)
expect(work_item_definitions.all.pluck(:name)).to include(widget_name)
end
@@ -26,7 +27,7 @@ RSpec.shared_examples 'migration that adds widget to work items definitions' do
it "removes definitions for widget" do
migrate!
- expect { migration.down }.to change { work_item_definitions.count }.by(-7)
+ expect { migration.down }.to change { work_item_definitions.count }.by(-work_item_type_count)
expect(work_item_definitions.all.pluck(:name)).not_to include(widget_name)
end
end
diff --git a/spec/support/shared_examples/models/concerns/auto_disabling_hooks_shared_examples.rb b/spec/support/shared_examples/models/concerns/auto_disabling_hooks_shared_examples.rb
index a196b63585c..33b62564e5f 100644
--- a/spec/support/shared_examples/models/concerns/auto_disabling_hooks_shared_examples.rb
+++ b/spec/support/shared_examples/models/concerns/auto_disabling_hooks_shared_examples.rb
@@ -1,6 +1,13 @@
# frozen_string_literal: true
RSpec.shared_examples 'a hook that gets automatically disabled on failure' do
+ let(:logger) { instance_double('Gitlab::WebHooks::Logger') }
+
+ before do
+ allow(hook).to receive(:logger).and_return(logger)
+ allow(logger).to receive(:info)
+ end
+
shared_examples 'is tolerant of invalid records' do
specify do
hook.url = nil
@@ -83,6 +90,20 @@ RSpec.shared_examples 'a hook that gets automatically disabled on failure' do
expect(find_hooks.disabled).to be_empty
end
end
+
+ context 'when silent mode is enabled' do
+ before do
+ stub_application_setting(silent_mode_enabled: true)
+ end
+
+ it 'causes no hooks to be considered executable' do
+ expect(find_hooks.executable).to be_empty
+ end
+
+ it 'causes all hooks to be considered disabled' do
+ expect(find_hooks.disabled.count).to eq(16)
+ end
+ end
end
describe '#executable?', :freeze_time do
@@ -157,6 +178,23 @@ RSpec.shared_examples 'a hook that gets automatically disabled on failure' do
expect { hook.enable! }.to change { hook.executable? }.from(false).to(true)
end
+ it 'logs relevant information' do
+ hook.recent_failures = 1000
+ hook.disabled_until = 1.hour.from_now
+
+ expect(logger)
+ .to receive(:info)
+ .with(a_hash_including(
+ hook_id: hook.id,
+ action: 'enable',
+ recent_failures: 0,
+ disabled_until: nil,
+ backoff_count: 0
+ ))
+
+ hook.enable!
+ end
+
it 'does not update hooks unless necessary' do
hook
@@ -174,11 +212,25 @@ RSpec.shared_examples 'a hook that gets automatically disabled on failure' do
end
end
- describe '#backoff!' do
+ describe '#backoff!', :freeze_time do
context 'when we have not backed off before' do
it 'does not disable the hook' do
expect { hook.backoff! }.not_to change { hook.executable? }.from(true)
end
+
+ it 'increments recent_failures' do
+ expect { hook.backoff! }.to change { hook.recent_failures }.from(0).to(1)
+ end
+
+ it 'logs relevant information' do
+ expect(logger)
+ .to receive(:info)
+ .with(a_hash_including(
+ hook_id: hook.id, action: 'backoff', recent_failures: 1
+ ))
+
+ hook.backoff!
+ end
end
context 'when we have exhausted the grace period' do
@@ -186,6 +238,32 @@ RSpec.shared_examples 'a hook that gets automatically disabled on failure' do
hook.update!(recent_failures: WebHooks::AutoDisabling::FAILURE_THRESHOLD)
end
+ it 'disables the hook' do
+ expect { hook.backoff! }.to change { hook.executable? }.from(true).to(false)
+ end
+
+ it 'increments backoff_count' do
+ expect { hook.backoff! }.to change { hook.backoff_count }.from(0).to(1)
+ end
+
+ it 'sets disabled_until' do
+ expect { hook.backoff! }.to change { hook.disabled_until }.from(nil).to(1.minute.from_now)
+ end
+
+ it 'logs relevant information' do
+ expect(logger)
+ .to receive(:info)
+ .with(a_hash_including(
+ hook_id: hook.id,
+ action: 'backoff',
+ recent_failures: WebHooks::AutoDisabling::FAILURE_THRESHOLD + 1,
+ disabled_until: 1.minute.from_now,
+ backoff_count: 1
+ ))
+
+ hook.backoff!
+ end
+
context 'when the hook is permanently disabled' do
before do
allow(hook).to receive(:permanently_disabled?).and_return(true)
@@ -204,15 +282,15 @@ RSpec.shared_examples 'a hook that gets automatically disabled on failure' do
def run_expectation
expect { hook.backoff! }.to change { hook.backoff_count }.by(1)
end
+ end
- context 'when the flag is disabled' do
- before do
- stub_feature_flags(auto_disabling_web_hooks: false)
- end
+ context 'when the flag is disabled' do
+ before do
+ stub_feature_flags(auto_disabling_web_hooks: false)
+ end
- it 'does not increment backoff count' do
- expect { hook.failed! }.not_to change { hook.backoff_count }
- end
+ it 'does not increment backoff count' do
+ expect { hook.failed! }.not_to change { hook.backoff_count }
end
end
end
@@ -236,36 +314,6 @@ RSpec.shared_examples 'a hook that gets automatically disabled on failure' do
end
end
- describe '#disable!' do
- it 'disables a hook' do
- expect { hook.disable! }.to change { hook.executable? }.from(true).to(false)
- end
-
- context 'when the flag is disabled' do
- before do
- stub_feature_flags(auto_disabling_web_hooks: false)
- end
-
- it 'does not disable the hook' do
- expect { hook.disable! }.not_to change { hook.executable? }
- end
- end
-
- it 'does nothing if the hook is already disabled' do
- allow(hook).to receive(:permanently_disabled?).and_return(true)
-
- sql_count = ActiveRecord::QueryRecorder.new { hook.disable! }.count
-
- expect(sql_count).to eq(0)
- end
-
- include_examples 'is tolerant of invalid records' do
- def run_expectation
- expect { hook.disable! }.to change { hook.executable? }.from(true).to(false)
- end
- end
- end
-
describe '#temporarily_disabled?' do
it 'is false when not temporarily disabled' do
expect(hook).not_to be_temporarily_disabled
@@ -310,7 +358,7 @@ RSpec.shared_examples 'a hook that gets automatically disabled on failure' do
context 'when hook has been disabled' do
before do
- hook.disable!
+ hook.update!(recent_failures: WebHooks::AutoDisabling::EXCEEDED_FAILURE_THRESHOLD)
end
it 'is true' do
@@ -336,7 +384,7 @@ RSpec.shared_examples 'a hook that gets automatically disabled on failure' do
context 'when hook has been disabled' do
before do
- hook.disable!
+ hook.update!(recent_failures: WebHooks::AutoDisabling::EXCEEDED_FAILURE_THRESHOLD)
end
it { is_expected.to eq :disabled }
@@ -352,7 +400,7 @@ RSpec.shared_examples 'a hook that gets automatically disabled on failure' do
context 'when hook has been backed off' do
before do
- hook.update!(recent_failures: WebHooks::AutoDisabling::FAILURE_THRESHOLD + 1)
+ hook.update!(recent_failures: WebHooks::AutoDisabling::EXCEEDED_FAILURE_THRESHOLD)
hook.disabled_until = 1.hour.from_now
end
diff --git a/spec/support/shared_examples/models/concerns/has_repository_shared_examples.rb b/spec/support/shared_examples/models/concerns/has_repository_shared_examples.rb
index 0a07c9d677b..187c0b3ab43 100644
--- a/spec/support/shared_examples/models/concerns/has_repository_shared_examples.rb
+++ b/spec/support/shared_examples/models/concerns/has_repository_shared_examples.rb
@@ -162,10 +162,24 @@ RSpec.shared_examples 'model with repository' do
end
describe '#after_repository_change_head' do
+ let(:event) { instance_double('Repositories::DefaultBranchChangedEvent') }
+ let(:event_data) { { container_id: stubbed_container.id, container_type: stubbed_container.class.name } }
+
it 'calls #reload_default_branch' do
expect(stubbed_container).to receive(:reload_default_branch)
stubbed_container.after_repository_change_head
end
+
+ it 'publishes an Repositories::DefaultBranchChangedEvent event' do
+ allow(Repositories::DefaultBranchChangedEvent)
+ .to receive(:new)
+ .with(data: event_data)
+ .and_return(event)
+
+ expect(Gitlab::EventStore).to receive(:publish).with(event).once
+
+ stubbed_container.after_repository_change_head
+ end
end
end
diff --git a/spec/support/shared_examples/models/concerns/integrations/slack_mattermost_notifier_shared_examples.rb b/spec/support/shared_examples/models/concerns/integrations/slack_mattermost_notifier_shared_examples.rb
index 28d2d4f1597..2985763426f 100644
--- a/spec/support/shared_examples/models/concerns/integrations/slack_mattermost_notifier_shared_examples.rb
+++ b/spec/support/shared_examples/models/concerns/integrations/slack_mattermost_notifier_shared_examples.rb
@@ -327,7 +327,7 @@ RSpec.shared_examples Integrations::SlackMattermostNotifier do |integration_name
end
context 'on a protected branch' do
- before(:all) do
+ before_all do
create(:protected_branch, :create_branch_on_repository, project: project, name: 'a-protected-branch')
end
@@ -369,7 +369,7 @@ RSpec.shared_examples Integrations::SlackMattermostNotifier do |integration_name
end
context 'on a protected branch with protected branches defined using wildcards' do
- before(:all) do
+ before_all do
create(:protected_branch, :create_branch_on_repository, repository_branch_name: '1-stable', project: project, name: '*-stable')
end
@@ -578,7 +578,7 @@ RSpec.shared_examples Integrations::SlackMattermostNotifier do |integration_name
end
context 'on a protected branch' do
- before(:all) do
+ before_all do
create(:protected_branch, :create_branch_on_repository, project: project, name: 'a-protected-branch')
end
@@ -606,7 +606,7 @@ RSpec.shared_examples Integrations::SlackMattermostNotifier do |integration_name
end
context 'on a protected branch with protected branches defined usin wildcards' do
- before(:all) do
+ before_all do
create(:protected_branch, :create_branch_on_repository, repository_branch_name: '1-stable', project: project, name: '*-stable')
end
@@ -682,7 +682,7 @@ RSpec.shared_examples Integrations::SlackMattermostNotifier do |integration_name
it_behaves_like "triggered #{integration_name} integration", event_type: "deployment"
context 'on a protected branch' do
- before(:all) do
+ before_all do
create(:protected_branch, :create_branch_on_repository, project: project, name: 'a-protected-branch')
end
diff --git a/spec/support/shared_examples/models/concerns/linkable_items_shared_examples.rb b/spec/support/shared_examples/models/concerns/linkable_items_shared_examples.rb
new file mode 100644
index 00000000000..efd27a051fe
--- /dev/null
+++ b/spec/support/shared_examples/models/concerns/linkable_items_shared_examples.rb
@@ -0,0 +1,82 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'includes LinkableItem concern' do
+ describe 'validation' do
+ let_it_be(:task) { create(:work_item, :task, project: project) }
+ let_it_be(:issue) { create(:work_item, :issue, project: project) }
+
+ subject(:link) { build(link_factory, source_id: source.id, target_id: target.id) }
+
+ describe '#check_existing_parent_link' do
+ shared_examples 'invalid due to existing link' do
+ it do
+ is_expected.to be_invalid
+ expect(link.errors.messages[:source]).to include("is a parent or child of this #{item_type}")
+ end
+ end
+
+ context 'without existing link parent' do
+ let(:source) { issue }
+ let(:target) { task }
+
+ it 'is valid' do
+ is_expected.to be_valid
+ expect(link.errors).to be_empty
+ end
+ end
+
+ context 'with existing link parent' do
+ let_it_be(:relationship) { create(:parent_link, work_item_parent: issue, work_item: task) }
+
+ it_behaves_like 'invalid due to existing link' do
+ let(:source) { issue }
+ let(:target) { task }
+ end
+
+ it_behaves_like 'invalid due to existing link' do
+ let(:source) { task }
+ let(:target) { issue }
+ end
+ end
+ end
+ end
+
+ describe 'Scopes' do
+ describe '.for_source' do
+ it 'includes linked items for source' do
+ source = item
+ link_1 = create(link_factory, source: source, target: item1)
+ link_2 = create(link_factory, source: source, target: item2)
+
+ result = described_class.for_source(source)
+
+ expect(result).to contain_exactly(link_1, link_2)
+ end
+ end
+
+ describe '.for_target' do
+ it 'includes linked items for target' do
+ target = item
+ link_1 = create(link_factory, source: item1, target: target)
+ link_2 = create(link_factory, source: item2, target: target)
+
+ result = described_class.for_target(target)
+
+ expect(result).to contain_exactly(link_1, link_2)
+ end
+ end
+
+ describe '.for_items' do
+ let_it_be(:source_link) { create(link_factory, source: item, target: item1) }
+ let_it_be(:target_link) { create(link_factory, source: item2, target: item) }
+
+ it 'includes links when item is source' do
+ expect(described_class.for_items(item, item1)).to contain_exactly(source_link)
+ end
+
+ it 'includes links when item is target' do
+ expect(described_class.for_items(item, item2)).to contain_exactly(target_link)
+ end
+ end
+ end
+end
diff --git a/spec/support/shared_examples/models/concerns/unstoppable_hooks_shared_examples.rb b/spec/support/shared_examples/models/concerns/unstoppable_hooks_shared_examples.rb
index f98528ffedc..32e36c74a73 100644
--- a/spec/support/shared_examples/models/concerns/unstoppable_hooks_shared_examples.rb
+++ b/spec/support/shared_examples/models/concerns/unstoppable_hooks_shared_examples.rb
@@ -2,7 +2,7 @@
RSpec.shared_examples 'a hook that does not get automatically disabled on failure' do
describe '.executable/.disabled', :freeze_time do
- let!(:executables) do
+ let!(:webhooks) do
[
[0, Time.current],
[0, 1.minute.from_now],
@@ -29,9 +29,23 @@ RSpec.shared_examples 'a hook that does not get automatically disabled on failur
it 'finds the correct set of project hooks' do
expect(find_hooks).to all(be_executable)
- expect(find_hooks.executable).to match_array executables
+ expect(find_hooks.executable).to match_array(webhooks)
expect(find_hooks.disabled).to be_empty
end
+
+ context 'when silent mode is enabled' do
+ before do
+ stub_application_setting(silent_mode_enabled: true)
+ end
+
+ it 'causes no hooks to be considered executable' do
+ expect(find_hooks.executable).to be_empty
+ end
+
+ it 'causes all hooks to be considered disabled' do
+ expect(find_hooks.disabled).to match_array(webhooks)
+ end
+ end
end
describe '#executable?', :freeze_time do
@@ -123,12 +137,6 @@ RSpec.shared_examples 'a hook that does not get automatically disabled on failur
end
end
- describe '#disable!' do
- it 'does not disable a group hook' do
- expect { hook.disable! }.not_to change { hook.executable? }.from(true)
- end
- end
-
describe '#temporarily_disabled?' do
it 'is false' do
# Initially
@@ -150,7 +158,7 @@ RSpec.shared_examples 'a hook that does not get automatically disabled on failur
# Initially
expect(hook).not_to be_permanently_disabled
- hook.disable!
+ hook.update!(recent_failures: WebHooks::AutoDisabling::EXCEEDED_FAILURE_THRESHOLD)
expect(hook).not_to be_permanently_disabled
end
@@ -163,7 +171,7 @@ RSpec.shared_examples 'a hook that does not get automatically disabled on failur
context 'when hook has been disabled' do
before do
- hook.disable!
+ hook.update!(recent_failures: WebHooks::AutoDisabling::EXCEEDED_FAILURE_THRESHOLD)
end
it { is_expected.to eq :executable }
@@ -171,7 +179,7 @@ RSpec.shared_examples 'a hook that does not get automatically disabled on failur
context 'when hook has been backed off' do
before do
- hook.update!(recent_failures: WebHooks::AutoDisabling::FAILURE_THRESHOLD + 1)
+ hook.update!(recent_failures: WebHooks::AutoDisabling::EXCEEDED_FAILURE_THRESHOLD)
hook.disabled_until = 1.hour.from_now
end
diff --git a/spec/support/shared_examples/models/issuable_link_shared_examples.rb b/spec/support/shared_examples/models/issuable_link_shared_examples.rb
index 42c7be5ddc3..af96b77edaf 100644
--- a/spec/support/shared_examples/models/issuable_link_shared_examples.rb
+++ b/spec/support/shared_examples/models/issuable_link_shared_examples.rb
@@ -7,8 +7,8 @@
# issuable_link_factory
RSpec.shared_examples 'issuable link' do
describe 'Associations' do
- it { is_expected.to belong_to(:source).class_name(issuable.class.name) }
- it { is_expected.to belong_to(:target).class_name(issuable.class.name) }
+ it { is_expected.to belong_to(:source).class_name(issuable_class) }
+ it { is_expected.to belong_to(:target).class_name(issuable_class) }
end
describe 'Validation' do
@@ -27,7 +27,8 @@ RSpec.shared_examples 'issuable link' do
issuable_link = create_issuable_link(subject.target, subject.source)
expect(issuable_link).to be_invalid
- expect(issuable_link.errors[:source]).to include("is already related to this #{issuable.class.name.downcase}")
+ expect(issuable_link.errors[:source])
+ .to include("is already related to this #{issuable.issuable_type.humanize(capitalize: false)}")
end
context 'when it relates to itself' do
diff --git a/spec/support/shared_examples/protected_tags/access_control_ce_shared_examples.rb b/spec/support/shared_examples/protected_tags/access_control_ce_shared_examples.rb
index f308b4ad372..371f33f2b29 100644
--- a/spec/support/shared_examples/protected_tags/access_control_ce_shared_examples.rb
+++ b/spec/support/shared_examples/protected_tags/access_control_ce_shared_examples.rb
@@ -4,6 +4,7 @@ RSpec.shared_examples "protected tags > access control > CE" do
ProtectedRef::AccessLevel.human_access_levels.each do |(access_type_id, access_type_name)|
it "allows creating protected tags that #{access_type_name} can create" do
visit project_protected_tags_path(project)
+ click_button('Add tag')
set_protected_tag_name('master')
set_allowed_to('create', access_type_name)
@@ -15,6 +16,7 @@ RSpec.shared_examples "protected tags > access control > CE" do
it "allows updating protected tags so that #{access_type_name} can create them" do
visit project_protected_tags_path(project)
+ click_button('Add tag')
set_protected_tag_name('master')
set_allowed_to('create', 'No one')
diff --git a/spec/support/shared_examples/quick_actions/issuable/time_tracking_quick_action_shared_examples.rb b/spec/support/shared_examples/quick_actions/issuable/time_tracking_quick_action_shared_examples.rb
index 56a1cee44c8..344f827dbb2 100644
--- a/spec/support/shared_examples/quick_actions/issuable/time_tracking_quick_action_shared_examples.rb
+++ b/spec/support/shared_examples/quick_actions/issuable/time_tracking_quick_action_shared_examples.rb
@@ -90,6 +90,15 @@ RSpec.shared_examples 'issuable time tracker' do |issuable_type|
end
end
+ it 'shows the set time estimate form when add button is clicked' do
+ click_button _('Set estimate')
+
+ page.within '[data-testid="set-time-estimate-modal"]' do
+ expect(page).to have_content 'Set time estimate'
+ expect(page).to have_content 'Estimate'
+ end
+ end
+
it 'shows the time tracking report when link is clicked' do
submit_time('/estimate 1w')
submit_time('/spend 1d')
diff --git a/spec/support/shared_examples/quick_actions/merge_request/merge_quick_action_shared_examples.rb b/spec/support/shared_examples/quick_actions/merge_request/merge_quick_action_shared_examples.rb
index cf5a67f6096..f9bcfb1f304 100644
--- a/spec/support/shared_examples/quick_actions/merge_request/merge_quick_action_shared_examples.rb
+++ b/spec/support/shared_examples/quick_actions/merge_request/merge_quick_action_shared_examples.rb
@@ -36,6 +36,8 @@ RSpec.shared_examples 'merge quick action' do
create(:ci_pipeline, :detached_merge_request_pipeline,
project: project, merge_request: merge_request)
merge_request.update_head_pipeline
+
+ stub_licensed_features(merge_request_approvers: true) if Gitlab.ee?
end
it 'schedules to merge the MR' do
diff --git a/spec/support/shared_examples/quick_actions/work_item/type_change_quick_actions_shared_examples.rb b/spec/support/shared_examples/quick_actions/work_item/type_change_quick_actions_shared_examples.rb
index 0fc914d71d5..67e0b2c4b65 100644
--- a/spec/support/shared_examples/quick_actions/work_item/type_change_quick_actions_shared_examples.rb
+++ b/spec/support/shared_examples/quick_actions/work_item/type_change_quick_actions_shared_examples.rb
@@ -78,16 +78,6 @@ RSpec.shared_examples 'quick actions that change work item type' do
end
it_behaves_like 'action with validation errors'
-
- context 'when task has a parent' do
- let_it_be(:parent) { create(:work_item, :issue, project: project) }
-
- before do
- create(:parent_link, work_item: task, work_item_parent: parent)
- end
-
- it_behaves_like 'quick command error', 'A task cannot be promoted when a parent issue is present', 'promote'
- end
end
end
end
diff --git a/spec/support/shared_examples/requests/api/draft_notes_shared_examples.rb b/spec/support/shared_examples/requests/api/draft_notes_shared_examples.rb
new file mode 100644
index 00000000000..40825cdd5ed
--- /dev/null
+++ b/spec/support/shared_examples/requests/api/draft_notes_shared_examples.rb
@@ -0,0 +1,91 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'diff draft notes API' do |id_name|
+ describe "post /projects/:id/merge_requests/:merge_request_id/draft_notes" do
+ it "creates a new diff draft note" do
+ line_range = {
+ "start" => {
+ "line_code" => Gitlab::Git.diff_line_code(draft_note.position.file_path, 1, 1),
+ "type" => draft_note.position.type
+ },
+ "end" => {
+ "line_code" => Gitlab::Git.diff_line_code(draft_note.position.file_path, 2, 2),
+ "type" => draft_note.position.type
+ }
+ }
+
+ position = draft_note.position.to_h.merge({ line_range: line_range }).except(:ignore_whitespace_change)
+
+ post api("/projects/#{project.id}/merge_requests/#{merge_request[id_name]}/draft_notes", user),
+ params: { note: 'hi!', position: position }
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response['note']).to eq('hi!')
+ expect(json_response['position']).to eq(position.stringify_keys)
+ end
+
+ context "when position is invalid" do
+ it "returns a 400 bad request error when position is not plausible" do
+ position = draft_note.position.to_h.merge(new_line: '100000')
+
+ post api("/projects/#{project.id}/merge_requests/#{merge_request[id_name]}/draft_notes", user),
+ params: { body: 'hi!', position: position }
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+
+ it "returns a 400 bad request error when the position is not valid for this discussion" do
+ position = draft_note.position.to_h.merge(new_line: '588440f66559714280628a4f9799f0c4eb880a4a')
+
+ post api("/projects/#{project.id}/merge_requests/#{merge_request[id_name]}/draft_notes", user),
+ params: { body: 'hi!', position: position }
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+ end
+ end
+
+ describe "put /projects/:id/merge_requests/:merge_request_id/draft_notes/:draft_note_id" do
+ it "modifies a draft note" do
+ line_range = {
+ "start" => {
+ "line_code" => Gitlab::Git.diff_line_code(draft_note.position.file_path, 3, 3),
+ "type" => draft_note.position.type
+ },
+ "end" => {
+ "line_code" => Gitlab::Git.diff_line_code(draft_note.position.file_path, 4, 4),
+ "type" => draft_note.position.type
+ }
+ }
+
+ position = draft_note.position.to_h.merge({ line_range: line_range }).except(:ignore_whitespace_change)
+
+ put api("/projects/#{project.id}/merge_requests/#{merge_request[id_name]}/draft_notes/#{draft_note.id}", user),
+ params: { note: 'hola!', position: position }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['note']).to eq('hola!')
+ expect(json_response['position']).to eq(position.stringify_keys)
+ end
+
+ it "returns bad request for an empty note" do
+ line_range = {
+ "start" => {
+ "line_code" => Gitlab::Git.diff_line_code(draft_note.position.file_path, 3, 3),
+ "type" => draft_note.position.type
+ },
+ "end" => {
+ "line_code" => Gitlab::Git.diff_line_code(draft_note.position.file_path, 4, 4),
+ "type" => draft_note.position.type
+ }
+ }
+
+ position = draft_note.position.to_h.merge({ line_range: line_range }).except(:ignore_whitespace_change)
+
+ put api("/projects/#{project.id}/merge_requests/#{merge_request[id_name]}/draft_notes/#{draft_note.id}", user),
+ params: { note: '', position: position }
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+ end
+end
diff --git a/spec/support/shared_examples/requests/api/graphql/packages/group_and_project_packages_list_shared_examples.rb b/spec/support/shared_examples/requests/api/graphql/packages/group_and_project_packages_list_shared_examples.rb
index 5e9dfc826d4..36832113b30 100644
--- a/spec/support/shared_examples/requests/api/graphql/packages/group_and_project_packages_list_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/graphql/packages/group_and_project_packages_list_shared_examples.rb
@@ -44,7 +44,7 @@ RSpec.shared_examples 'group and project packages query' do
post_graphql(query, current_user: current_user)
end
- it_behaves_like 'a working graphql query'
+ it_behaves_like 'a working graphql query that returns data'
it 'returns packages successfully' do
expect(package_names).to contain_exactly(
@@ -84,11 +84,7 @@ RSpec.shared_examples 'group and project packages query' do
post_graphql(query, current_user: current_user)
end
- it_behaves_like 'a working graphql query'
-
- it 'returns nil' do
- expect(packages).to be_nil
- end
+ it_behaves_like 'a working graphql query that returns no data'
end
context 'when the user is not authenticated' do
@@ -96,11 +92,7 @@ RSpec.shared_examples 'group and project packages query' do
post_graphql(query)
end
- it_behaves_like 'a working graphql query'
-
- it 'returns nil' do
- expect(packages).to be_nil
- end
+ it_behaves_like 'a working graphql query that returns no data'
end
describe 'sorting and pagination' do
diff --git a/spec/support/shared_examples/requests/api/graphql/remote_development_shared_examples.rb b/spec/support/shared_examples/requests/api/graphql/remote_development_shared_examples.rb
index 7c32c7bf2a9..83e22945361 100644
--- a/spec/support/shared_examples/requests/api/graphql/remote_development_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/graphql/remote_development_shared_examples.rb
@@ -10,9 +10,7 @@ RSpec.shared_examples 'workspaces query in licensed environment and with feature
it_behaves_like 'a working graphql query'
- # noinspection RubyResolve
it { is_expected.to match_array(a_hash_including('name' => workspace.name)) }
- # noinspection RubyResolve
context 'when user is not authorized' do
let(:current_user) { create(:user) }
diff --git a/spec/support/shared_examples/requests/api/hooks_shared_examples.rb b/spec/support/shared_examples/requests/api/hooks_shared_examples.rb
index a2c34aa6a54..7489dc7c1d6 100644
--- a/spec/support/shared_examples/requests/api/hooks_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/hooks_shared_examples.rb
@@ -121,7 +121,7 @@ RSpec.shared_examples 'web-hook API endpoints' do |prefix|
context 'the hook is disabled' do
before do
- hook.disable!
+ hook.update!(recent_failures: hook.class::EXCEEDED_FAILURE_THRESHOLD)
end
it "has the correct alert status", :aggregate_failures do
diff --git a/spec/support/shared_examples/requests/api/nuget_endpoints_shared_examples.rb b/spec/support/shared_examples/requests/api/nuget_endpoints_shared_examples.rb
index 432e67ee21e..150e9a4e004 100644
--- a/spec/support/shared_examples/requests/api/nuget_endpoints_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/nuget_endpoints_shared_examples.rb
@@ -1,8 +1,10 @@
# frozen_string_literal: true
-RSpec.shared_examples 'handling nuget service requests' do
+RSpec.shared_examples 'handling nuget service requests' do |v2: false|
subject { get api(url) }
+ it { is_expected.to have_request_urgency(v2 ? :low : :default) }
+
context 'with valid target' do
using RSpec::Parameterized::TableSyntax
@@ -20,15 +22,17 @@ RSpec.shared_examples 'handling nuget service requests' do
end
with_them do
- let(:snowplow_gitlab_standard_context) { snowplow_context(user_role: :anonymous) }
-
- subject { get api(url) }
+ let(:snowplow_gitlab_standard_context) do
+ snowplow_context(user_role: :anonymous).tap do |ctx|
+ ctx[:feed] = 'v2' if v2
+ end
+ end
before do
update_visibility_to(Gitlab::VisibilityLevel.const_get(visibility_level, false))
end
- it_behaves_like params[:shared_examples_name], params[:user_role], params[:expected_status], params[:member]
+ it_behaves_like params[:shared_examples_name], params[:user_role], params[:expected_status], params[:member], v2
end
end
diff --git a/spec/support/shared_examples/requests/api/nuget_packages_shared_examples.rb b/spec/support/shared_examples/requests/api/nuget_packages_shared_examples.rb
index d6a0055700d..2e66bae26ba 100644
--- a/spec/support/shared_examples/requests/api/nuget_packages_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/nuget_packages_shared_examples.rb
@@ -18,7 +18,7 @@ RSpec.shared_examples 'rejects nuget packages access' do |user_type, status, add
end
end
-RSpec.shared_examples 'process nuget service index request' do |user_type, status, add_member = true|
+RSpec.shared_examples 'process nuget service index request' do |user_type, status, add_member = true, v2 = false|
context "for user type #{user_type}" do
before do
target.send("add_#{user_type}", user) if add_member && user_type != :anonymous
@@ -28,15 +28,22 @@ RSpec.shared_examples 'process nuget service index request' do |user_type, statu
it_behaves_like 'a package tracking event', 'API::NugetPackages', 'cli_metadata'
- it 'returns a valid json response' do
+ it 'returns a valid json or xml response' do
subject
- expect(response.media_type).to eq('application/json')
- expect(json_response).to match_schema('public_api/v4/packages/nuget/service_index')
- expect(json_response).to be_a(Hash)
+ if v2
+ expect(response.media_type).to eq('application/xml')
+ expect(body).to have_xpath('//service')
+ .and have_xpath('//service/workspace')
+ .and have_xpath('//service/workspace/collection[@href]')
+ else
+ expect(response.media_type).to eq('application/json')
+ expect(json_response).to match_schema('public_api/v4/packages/nuget/service_index')
+ expect(json_response).to be_a(Hash)
+ end
end
- context 'with invalid format' do
+ context 'with invalid format', unless: v2 do
let(:url) { "/#{target_type}/#{target.id}/packages/nuget/index.xls" }
it_behaves_like 'rejects nuget packages access', :anonymous, :not_found
@@ -44,6 +51,34 @@ RSpec.shared_examples 'process nuget service index request' do |user_type, statu
end
end
+RSpec.shared_examples 'process nuget v2 $metadata service request' do |user_type, status, add_member = true|
+ context "for user type #{user_type}" do
+ before do
+ target.send("add_#{user_type}", user) if add_member && user_type != :anonymous
+ end
+
+ it_behaves_like 'returning response status', status
+
+ it 'returns a valid xml response' do
+ api_request
+
+ doc = Nokogiri::XML(body)
+
+ expect(response.media_type).to eq('application/xml')
+ expect(doc.at_xpath('//edmx:Edmx')).to be_present
+ expect(doc.at_xpath('//edmx:Edmx/edmx:DataServices')).to be_present
+ expect(doc.css('*').map(&:name)).to include(
+ 'Schema', 'EntityType', 'Key', 'PropertyRef', 'EntityContainer', 'EntitySet', 'FunctionImport', 'Parameter'
+ )
+ expect(doc.css('*').select { |el| el.name == 'Property' }.map { |el| el.attribute_nodes.first.value })
+ .to match_array(%w[Id Version Authors Dependencies Description DownloadCount IconUrl Published ProjectUrl
+ Tags Title LicenseUrl]
+ )
+ expect(doc.css('*').detect { |el| el.name == 'FunctionImport' }.attr('Name')).to eq('FindPackagesById')
+ end
+ end
+end
+
RSpec.shared_examples 'returning nuget metadata json response with json schema' do |json_schema|
it 'returns a valid json response' do
subject
@@ -320,6 +355,33 @@ RSpec.shared_examples 'process nuget download content request' do |user_type, st
expect(response.media_type).to eq('application/octet-stream')
end
end
+
+ context 'with normalized package version' do
+ let(:normalized_version) { '0.1.0' }
+ let(:url) { "/projects/#{target.id}/packages/nuget/download/#{package.name}/#{normalized_version}/#{package.name}.#{package.version}.#{format}" }
+
+ before do
+ package.nuget_metadatum.update_column(:normalized_version, normalized_version)
+ end
+
+ it_behaves_like 'returning response status', status
+
+ it 'returns a valid package archive' do
+ subject
+
+ expect(response.media_type).to eq('application/octet-stream')
+ end
+
+ it_behaves_like 'bumping the package last downloaded at field'
+
+ context 'when nuget_normalized_version feature flag is disabled' do
+ before do
+ stub_feature_flags(nuget_normalized_version: false)
+ end
+
+ it_behaves_like 'returning response status', :not_found
+ end
+ end
end
end
@@ -439,6 +501,13 @@ end
RSpec.shared_examples 'nuget authorize upload endpoint' do
using RSpec::Parameterized::TableSyntax
+ include_context 'workhorse headers'
+
+ let(:headers) { {} }
+
+ subject { put api(url), headers: headers }
+
+ it { is_expected.to have_request_urgency(:low) }
context 'with valid project' do
where(:visibility_level, :user_role, :member, :user_token, :sent_through, :shared_examples_name, :expected_status) do
@@ -517,6 +586,26 @@ end
RSpec.shared_examples 'nuget upload endpoint' do |symbol_package: false|
using RSpec::Parameterized::TableSyntax
+ include_context 'workhorse headers'
+
+ let(:headers) { {} }
+ let(:file_name) { symbol_package ? 'package.snupkg' : 'package.nupkg' }
+ let(:params) { { package: temp_file(file_name) } }
+ let(:file_key) { :package }
+ let(:send_rewritten_field) { true }
+
+ subject do
+ workhorse_finalize(
+ api(url),
+ method: :put,
+ file_key: file_key,
+ params: params,
+ headers: headers,
+ send_rewritten_field: send_rewritten_field
+ )
+ end
+
+ it { is_expected.to have_request_urgency(:low) }
context 'with valid project' do
where(:visibility_level, :user_role, :member, :user_token, :sent_through, :shared_examples_name, :expected_status) do
@@ -573,7 +662,12 @@ RSpec.shared_examples 'nuget upload endpoint' do |symbol_package: false|
end
let(:headers) { user_headers.merge(workhorse_headers) }
- let(:snowplow_gitlab_standard_context) { { project: project, user: user, namespace: project.namespace, property: 'i_package_nuget_user' } }
+
+ let(:snowplow_gitlab_standard_context) do
+ { project: project, user: user, namespace: project.namespace, property: 'i_package_nuget_user' }.tap do |ctx|
+ ctx[:feed] = 'v2' if url.include?('nuget/v2')
+ end
+ end
before do
update_visibility_to(Gitlab::VisibilityLevel.const_get(visibility_level, false))
@@ -604,4 +698,16 @@ RSpec.shared_examples 'nuget upload endpoint' do |symbol_package: false|
it_behaves_like 'returning response status', :bad_request
end
+
+ context 'when ObjectStorage::RemoteStoreError is raised' do
+ let(:headers) { basic_auth_header(deploy_token.username, deploy_token.token).merge(workhorse_headers) }
+
+ before do
+ allow_next_instance_of(::Packages::CreatePackageFileService) do |instance|
+ allow(instance).to receive(:execute).and_raise(ObjectStorage::RemoteStoreError)
+ end
+ end
+
+ it_behaves_like 'returning response status', :forbidden
+ end
end
diff --git a/spec/support/shared_examples/requests/api/time_tracking_shared_examples.rb b/spec/support/shared_examples/requests/api/time_tracking_shared_examples.rb
index 398421c7a79..dec15cb68b3 100644
--- a/spec/support/shared_examples/requests/api/time_tracking_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/time_tracking_shared_examples.rb
@@ -20,40 +20,49 @@ RSpec.shared_examples 'time tracking endpoints' do |issuable_name|
issuable_collection_name = issuable_name.pluralize
describe "POST /projects/:id/#{issuable_collection_name}/:#{issuable_name}_id/time_estimate" do
+ subject(:set_time_estimate) do
+ post(api("/projects/#{project.id}/#{issuable_collection_name}/#{issuable.iid}/time_estimate", user), params: { duration: duration })
+ end
+
+ let(:duration) { '2h' }
+
context 'with an unauthorized user' do
- subject { post(api("/projects/#{project.id}/#{issuable_collection_name}/#{issuable.iid}/time_estimate", non_member), params: { duration: '1w' }) }
+ let(:user) { non_member }
it_behaves_like 'an unauthorized API user'
it_behaves_like 'API user with insufficient permissions'
end
- it "sets the time estimate for #{issuable_name}" do
- post api("/projects/#{project.id}/#{issuable_collection_name}/#{issuable.iid}/time_estimate", user), params: { duration: '1w' }
+ context 'with an authorized user' do
+ it "sets the time estimate for #{issuable_name}" do
+ set_time_estimate
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response['human_time_estimate']).to eq('1w')
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['time_estimate']).to eq(7200)
+ end
end
describe 'updating the current estimate' do
before do
- post api("/projects/#{project.id}/#{issuable_collection_name}/#{issuable.iid}/time_estimate", user), params: { duration: '1w' }
+ post(api("/projects/#{project.id}/#{issuable_collection_name}/#{issuable.iid}/time_estimate", user), params: { duration: '2h' })
end
- context 'when duration has a bad format' do
- it 'does not modify the original estimate' do
- post api("/projects/#{project.id}/#{issuable_collection_name}/#{issuable.iid}/time_estimate", user), params: { duration: 'foo' }
+ using RSpec::Parameterized::TableSyntax
- expect(response).to have_gitlab_http_status(:bad_request)
- expect(issuable.reload.human_time_estimate).to eq('1w')
- end
+ where(:updated_duration, :expected_http_status, :expected_time_estimate) do
+ 'foo' | :bad_request | 7200
+ '-1' | :bad_request | 7200
+ '1h' | :ok | 3600
+ '0' | :ok | 0
end
- context 'with a valid duration' do
- it 'updates the estimate' do
- post api("/projects/#{project.id}/#{issuable_collection_name}/#{issuable.iid}/time_estimate", user), params: { duration: '3w1h' }
+ with_them do
+ let(:duration) { updated_duration }
+ it 'returns expected HTTP status and time estimate' do
+ set_time_estimate
- expect(response).to have_gitlab_http_status(:ok)
- expect(issuable.reload.human_time_estimate).to eq('3w 1h')
+ expect(response).to have_gitlab_http_status(expected_http_status)
+ expect(issuable.reload.time_estimate).to eq(expected_time_estimate)
end
end
end
diff --git a/spec/support/shared_examples/requests/graphql_shared_examples.rb b/spec/support/shared_examples/requests/graphql_shared_examples.rb
index 2c08f946468..69933bafbea 100644
--- a/spec/support/shared_examples/requests/graphql_shared_examples.rb
+++ b/spec/support/shared_examples/requests/graphql_shared_examples.rb
@@ -5,11 +5,31 @@ RSpec.shared_examples 'a working graphql query' do
it 'returns a successful response', :aggregate_failures do
expect(response).to have_gitlab_http_status(:success)
- expect(graphql_errors).to be_nil
+ expect_graphql_errors_to_be_empty
expect(json_response.keys).to include('data')
end
end
+RSpec.shared_examples 'a working graphql query that returns no data' do
+ include GraphqlHelpers
+
+ it_behaves_like 'a working graphql query'
+
+ it 'contains no data' do
+ expect(graphql_data.compact).to be_empty
+ end
+end
+
+RSpec.shared_examples 'a working graphql query that returns data' do
+ include GraphqlHelpers
+
+ it_behaves_like 'a working graphql query'
+
+ it 'contains data' do
+ expect(graphql_data.compact).not_to be_empty
+ end
+end
+
RSpec.shared_examples 'a working GraphQL mutation' do
include GraphqlHelpers
@@ -20,11 +40,7 @@ RSpec.shared_examples 'a working GraphQL mutation' do
shared_examples 'allows access to the mutation' do
let(:scopes) { ['api'] }
- it_behaves_like 'a working graphql query' do
- it 'returns data' do
- expect(graphql_data.compact).not_to be_empty
- end
- end
+ it_behaves_like 'a working graphql query that returns data'
end
shared_examples 'prevents access to the mutation' do
diff --git a/spec/support/shared_examples/services/container_registry_auth_service_shared_examples.rb b/spec/support/shared_examples/services/container_registry_auth_service_shared_examples.rb
index 493a96b8dae..34188a8d18a 100644
--- a/spec/support/shared_examples/services/container_registry_auth_service_shared_examples.rb
+++ b/spec/support/shared_examples/services/container_registry_auth_service_shared_examples.rb
@@ -58,6 +58,12 @@ RSpec.shared_examples 'with auth_type' do
let(:current_params) { super().merge(auth_type: :foo) }
it { expect(payload['auth_type']).to eq('foo') }
+
+ it "contains the auth_type as part of the encoded user information in the payload" do
+ user_info = decode_user_info_from_payload(payload)
+
+ expect(user_info["token_type"]).to eq("foo")
+ end
end
RSpec.shared_examples 'a browsable' do
@@ -971,7 +977,16 @@ RSpec.shared_examples 'a container registry auth service' do
let(:authentication_abilities) { [:read_container_image] }
it_behaves_like 'an authenticated'
+
it { expect(payload['auth_type']).to eq('deploy_token') }
+
+ it "has encoded user information in the payload" do
+ user_info = decode_user_info_from_payload(payload)
+
+ expect(user_info["token_type"]).to eq('deploy_token')
+ expect(user_info["username"]).to eq(deploy_token.username)
+ expect(user_info["deploy_token_id"]).to eq(deploy_token.id)
+ end
end
end
@@ -1198,6 +1213,15 @@ RSpec.shared_examples 'a container registry auth service' do
it_behaves_like 'a pushable'
it_behaves_like 'container repository factory'
end
+
+ it "has encoded user information in the payload" do
+ user_info = decode_user_info_from_payload(payload)
+
+ expect(user_info["username"]).to eq(current_user.username)
+ expect(user_info["user_id"]).to eq(current_user.id)
+ end
+
+ it_behaves_like 'with auth_type'
end
end
@@ -1293,4 +1317,8 @@ RSpec.shared_examples 'a container registry auth service' do
end
end
end
+
+ def decode_user_info_from_payload(payload)
+ JWT.decode(payload["user"], nil, false)[0]["user_info"]
+ end
end
diff --git a/spec/support/shared_examples/services/import_csv_service_shared_examples.rb b/spec/support/shared_examples/services/import_csv_service_shared_examples.rb
index 1555497ae48..b09d087f518 100644
--- a/spec/support/shared_examples/services/import_csv_service_shared_examples.rb
+++ b/spec/support/shared_examples/services/import_csv_service_shared_examples.rb
@@ -36,3 +36,15 @@ RSpec.shared_examples 'correctly handles invalid files' do
it_behaves_like 'invalid file'
end
end
+
+RSpec.shared_examples 'performs a spam check' do |perform_check|
+ it 'initializes issue create service with expected spam check parameter' do
+ expect(Issues::CreateService)
+ .to receive(:new)
+ .at_least(:once)
+ .with(hash_including(perform_spam_check: perform_check))
+ .and_call_original
+
+ subject
+ end
+end
diff --git a/spec/support/shared_examples/services/issuable/issuable_update_service_shared_examples.rb b/spec/support/shared_examples/services/issuable/issuable_update_service_shared_examples.rb
index 85a05bbe56d..3f95d6060ea 100644
--- a/spec/support/shared_examples/services/issuable/issuable_update_service_shared_examples.rb
+++ b/spec/support/shared_examples/services/issuable/issuable_update_service_shared_examples.rb
@@ -64,6 +64,87 @@ RSpec.shared_examples 'issuable update service' do
end
end
+RSpec.shared_examples 'updating issuable labels' do
+ context 'when add_label_ids and label_ids are passed' do
+ let(:params) { { label_ids: [label_a.id], add_label_ids: [label_c.id] } }
+
+ it 'replaces the labels with the ones in label_ids and adds those in add_label_ids' do
+ issuable.update!(labels: [label_b])
+ update_issuable(params)
+
+ expect(issuable.label_ids).to contain_exactly(label_a.id, label_c.id)
+ end
+ end
+
+ context 'when remove_label_ids and label_ids are passed' do
+ let(:params) { { label_ids: [label_a.id, label_b.id, label_c.id], remove_label_ids: [label_a.id] } }
+
+ it 'replaces the labels with the ones in label_ids and removes those in remove_label_ids' do
+ issuable.update!(labels: [label_a, label_c])
+ update_issuable(params)
+
+ expect(issuable.label_ids).to contain_exactly(label_b.id, label_c.id)
+ end
+ end
+
+ context 'when add_label_ids and remove_label_ids are passed' do
+ let(:params) { { add_label_ids: [label_c.id], remove_label_ids: [label_a.id] } }
+
+ before do
+ issuable.update!(labels: [label_a])
+ update_issuable(params)
+ end
+
+ it 'adds the passed labels' do
+ expect(issuable.label_ids).to include(label_c.id)
+ end
+
+ it 'removes the passed labels' do
+ expect(issuable.label_ids).not_to include(label_a.id)
+ end
+ end
+
+ context 'when same id is passed as add_label_ids and remove_label_ids' do
+ let(:params) { { add_label_ids: [label_a.id], remove_label_ids: [label_a.id] } }
+
+ context 'for a label assigned to an issue' do
+ it 'removes the label' do
+ issuable.update!(labels: [label_a])
+ update_issuable(params)
+
+ expect(issuable.label_ids).to be_empty
+ end
+ end
+
+ context 'for a label not assigned to an issue' do
+ it 'does not add the label' do
+ expect(issuable.label_ids).to be_empty
+ end
+ end
+ end
+
+ context 'when duplicate label titles are given' do
+ let(:params) { { labels: [label_c.title, label_c.title] } }
+
+ it 'assigns the label once' do
+ update_issuable(params)
+
+ expect(issuable.labels).to contain_exactly(label_c)
+ end
+ end
+
+ context 'when remove_label_ids contains a locked label' do
+ let(:params) { { remove_label_ids: [label_locked.id] } }
+
+ it 'removes locked labels for non-merged issuables' do
+ issuable.update!(labels: [label_a, label_locked])
+ update_issuable(params)
+
+ expect(issuable.label_ids).to contain_exactly(label_a.id)
+ end
+ end
+end
+
RSpec.shared_examples 'keeps issuable labels sorted after update' do
before do
update_issuable(label_ids: [label_b.id])
diff --git a/spec/support/shared_examples/services/issuable_links/create_links_shared_examples.rb b/spec/support/shared_examples/services/issuable_links/create_links_shared_examples.rb
index 0bf8bc4ff04..83a2f3136b4 100644
--- a/spec/support/shared_examples/services/issuable_links/create_links_shared_examples.rb
+++ b/spec/support/shared_examples/services/issuable_links/create_links_shared_examples.rb
@@ -1,16 +1,30 @@
# frozen_string_literal: true
-RSpec.shared_examples 'issuable link creation' do
+RSpec.shared_examples 'issuable link creation' do |use_references: true|
+ let(:items_param) { use_references ? :issuable_references : :target_issuable }
+ let(:response_keys) { [:status, :created_references] }
+ let(:already_assigned_error_msg) { "#{issuable_type.capitalize}(s) already assigned" }
+ let(:permission_error_status) { issuable_type == :issue ? 403 : 404 }
+ let(:permission_error_msg) do
+ if issuable_type == :issue
+ "Couldn't link issue. You must have at least the Reporter role in both projects."
+ else
+ no_found_error_msg
+ end
+ end
+
+ let(:no_found_error_msg) do
+ "No matching #{issuable_type} found. Make sure that you are adding a valid #{issuable_type} URL."
+ end
+
describe '#execute' do
subject { described_class.new(issuable, user, params).execute }
- context 'when the reference list is empty' do
- let(:params) do
- { issuable_references: [] }
- end
+ context 'when the items list is empty' do
+ let(:params) { set_params([]) }
it 'returns error' do
- is_expected.to eq(message: "No matching #{issuable_type} found. Make sure that you are adding a valid #{issuable_type} URL.", status: :error, http_status: 404)
+ is_expected.to eq(message: no_found_error_msg, status: :error, http_status: 404)
end
end
@@ -20,7 +34,7 @@ RSpec.shared_examples 'issuable link creation' do
end
it 'returns error' do
- is_expected.to eq(message: "No matching #{issuable_type} found. Make sure that you are adding a valid #{issuable_type} URL.", status: :error, http_status: 404)
+ is_expected.to eq(message: no_found_error_msg, status: :error, http_status: 404)
end
it 'no relationship is created' do
@@ -29,16 +43,10 @@ RSpec.shared_examples 'issuable link creation' do
end
context 'when user has no permission to target issuable' do
- let(:params) do
- { issuable_references: [restricted_issuable.to_reference(issuable_parent)] }
- end
+ let(:params) { set_params([restricted_issuable]) }
it 'returns error' do
- if issuable_type == :issue
- is_expected.to eq(message: "Couldn't link #{issuable_type}. You must have at least the Reporter role in both projects.", status: :error, http_status: 403)
- else
- is_expected.to eq(message: "No matching #{issuable_type} found. Make sure that you are adding a valid #{issuable_type} URL.", status: :error, http_status: 404)
- end
+ is_expected.to eq(message: permission_error_msg, status: :error, http_status: permission_error_status)
end
it 'no relationship is created' do
@@ -47,9 +55,7 @@ RSpec.shared_examples 'issuable link creation' do
end
context 'source and target are the same issuable' do
- let(:params) do
- { issuable_references: [issuable.to_reference] }
- end
+ let(:params) { set_params([issuable]) }
it 'does not create notes' do
expect(SystemNoteService).not_to receive(:relate_issuable)
@@ -63,9 +69,7 @@ RSpec.shared_examples 'issuable link creation' do
end
context 'when there is an issuable to relate' do
- let(:params) do
- { issuable_references: [issuable2.to_reference, issuable3.to_reference(issuable_parent)] }
- end
+ let(:params) { set_params([issuable2, issuable3]) }
it 'creates relationships' do
expect { subject }.to change { issuable_link_class.count }.by(2)
@@ -75,7 +79,7 @@ RSpec.shared_examples 'issuable link creation' do
end
it 'returns success status and created links', :aggregate_failures do
- expect(subject.keys).to match_array([:status, :created_references])
+ expect(subject.keys).to match_array(response_keys)
expect(subject[:status]).to eq(:success)
expect(subject[:created_references].map(&:target_id)).to match_array([issuable2.id, issuable3.id])
end
@@ -98,15 +102,7 @@ RSpec.shared_examples 'issuable link creation' do
end
context 'when reference of any already related issue is present' do
- let(:params) do
- {
- issuable_references: [
- issuable_a.to_reference,
- issuable_b.to_reference
- ],
- link_type: IssueLink::TYPE_RELATES_TO
- }
- end
+ let(:params) { set_params([issuable_a, issuable_b]) }
it 'creates notes only for new relations' do
expect(SystemNoteService).to receive(:relate_issuable).with(issuable, issuable_a, anything)
@@ -118,22 +114,18 @@ RSpec.shared_examples 'issuable link creation' do
end
end
- context 'when there are invalid references' do
- let(:params) do
- { issuable_references: [issuable.to_reference, issuable_a.to_reference] }
- end
-
- it 'creates links only for valid references' do
- expect { subject }.to change { issuable_link_class.count }.by(1)
- end
+ context 'when reference of all related issue are present' do
+ let(:params) { set_params([issuable_b]) }
it 'returns error status' do
- expect(subject).to eq(
- status: :error,
- http_status: 422,
- message: "#{issuable.to_reference} cannot be added: cannot be related to itself"
- )
+ expect(subject).to eq(status: :error, http_status: 409, message: already_assigned_error_msg)
end
end
end
+
+ def set_params(items)
+ items_list = items_param == :issuable_references ? items.map { |item| item.to_reference(issuable_parent) } : items
+
+ { items_param => items_list }
+ end
end
diff --git a/spec/support/shared_examples/services/metrics/dashboard_shared_examples.rb b/spec/support/shared_examples/services/metrics/dashboard_shared_examples.rb
index bdb01b12607..9b2e038a331 100644
--- a/spec/support/shared_examples/services/metrics/dashboard_shared_examples.rb
+++ b/spec/support/shared_examples/services/metrics/dashboard_shared_examples.rb
@@ -124,27 +124,6 @@ RSpec.shared_examples 'valid dashboard cloning process' do |dashboard_template,
end
end
-RSpec.shared_examples 'valid dashboard update process' do
- let(:dashboard_attrs) do
- {
- commit_message: commit_message,
- branch_name: branch,
- start_branch: project.default_branch,
- encoding: 'text',
- file_path: ".gitlab/dashboards/#{file_name}",
- file_content: ::PerformanceMonitoring::PrometheusDashboard.from_json(file_content_hash).to_yaml
- }
- end
-
- it 'delegates commit creation to Files::UpdateService', :aggregate_failures do
- service_instance = instance_double(::Files::UpdateService)
- expect(::Files::UpdateService).to receive(:new).with(project, user, dashboard_attrs).and_return(service_instance)
- expect(service_instance).to receive(:execute).and_return(status: :success)
-
- service_call
- end
-end
-
RSpec.shared_examples 'misconfigured dashboard service response with stepable' do |status_code, message = nil|
it 'returns an appropriate message and status code', :aggregate_failures do
result = service_call
diff --git a/spec/support/shared_examples/services/namespace_package_settings_shared_examples.rb b/spec/support/shared_examples/services/namespace_package_settings_shared_examples.rb
index 11a786fdefb..6f0fd1aa4ed 100644
--- a/spec/support/shared_examples/services/namespace_package_settings_shared_examples.rb
+++ b/spec/support/shared_examples/services/namespace_package_settings_shared_examples.rb
@@ -9,6 +9,8 @@ RSpec.shared_examples 'updating the namespace package setting attributes' do |to
.and change { namespace.package_settings.reload.maven_duplicate_exception_regex }.from(from[:maven_duplicate_exception_regex]).to(to[:maven_duplicate_exception_regex])
.and change { namespace.package_settings.reload.generic_duplicates_allowed }.from(from[:generic_duplicates_allowed]).to(to[:generic_duplicates_allowed])
.and change { namespace.package_settings.reload.generic_duplicate_exception_regex }.from(from[:generic_duplicate_exception_regex]).to(to[:generic_duplicate_exception_regex])
+ .and change { namespace.package_settings.reload.nuget_duplicates_allowed }.from(from[:nuget_duplicates_allowed]).to(to[:nuget_duplicates_allowed])
+ .and change { namespace.package_settings.reload.nuget_duplicate_exception_regex }.from(from[:nuget_duplicate_exception_regex]).to(to[:nuget_duplicate_exception_regex])
end
end
@@ -30,6 +32,8 @@ RSpec.shared_examples 'creating the namespace package setting' do
expect(namespace.package_setting_relation.maven_duplicate_exception_regex).to eq(package_settings[:maven_duplicate_exception_regex])
expect(namespace.package_setting_relation.generic_duplicates_allowed).to eq(package_settings[:generic_duplicates_allowed])
expect(namespace.package_setting_relation.generic_duplicate_exception_regex).to eq(package_settings[:generic_duplicate_exception_regex])
+ expect(namespace.package_setting_relation.nuget_duplicates_allowed).to eq(package_settings[:nuget_duplicates_allowed])
+ expect(namespace.package_setting_relation.nuget_duplicate_exception_regex).to eq(package_settings[:nuget_duplicate_exception_regex])
end
it_behaves_like 'returning a success'
diff --git a/spec/support/shared_examples/services/notification_service_shared_examples.rb b/spec/support/shared_examples/services/notification_service_shared_examples.rb
index cfd674e3c43..df1ae67a590 100644
--- a/spec/support/shared_examples/services/notification_service_shared_examples.rb
+++ b/spec/support/shared_examples/services/notification_service_shared_examples.rb
@@ -8,16 +8,16 @@ RSpec.shared_examples 'project emails are disabled' do |check_delivery_jobs_queu
before do
reset_delivered_emails!
- target_project.clear_memoization(:emails_disabled)
+ target_project.project_setting.clear_memoization(:emails_enabled?)
end
it 'sends no emails with project emails disabled' do
- target_project.update_attribute(:emails_disabled, true)
+ target_project.project_setting.update_attribute(:emails_enabled, false)
notification_trigger
if check_delivery_jobs_queue
- # Only check enqueud jobs, not delivered emails
+ # Only check enqueued jobs, not delivered emails
expect_no_delivery_jobs
else
# Deprecated: Check actual delivered emails
@@ -26,12 +26,12 @@ RSpec.shared_examples 'project emails are disabled' do |check_delivery_jobs_queu
end
it 'sends emails to someone' do
- target_project.update_attribute(:emails_disabled, false)
+ target_project.project_setting.update_attribute(:emails_enabled, true)
notification_trigger
if check_delivery_jobs_queue
- # Only check enqueud jobs, not delivered emails
+ # Only check enqueued jobs, not delivered emails
expect_any_delivery_jobs
else
# Deprecated: Check actual delivered emails
diff --git a/spec/support/shared_examples/services/security/ci_configuration/create_service_shared_examples.rb b/spec/support/shared_examples/services/security/ci_configuration/create_service_shared_examples.rb
index 21dc3c2bf70..fd2c7455c5f 100644
--- a/spec/support/shared_examples/services/security/ci_configuration/create_service_shared_examples.rb
+++ b/spec/support/shared_examples/services/security/ci_configuration/create_service_shared_examples.rb
@@ -111,11 +111,14 @@ RSpec.shared_examples_for 'services security ci configuration create service' do
YAML
end
- it 'fails with error' do
+ it 'returns a ServiceResponse error' do
expect(project).to receive(:ci_config_for).and_return(unsupported_yaml)
- expect { result }.to raise_error(Gitlab::Graphql::Errors::MutationError, Gitlab::Utils::ErrorMessage.to_user_facing(
- _(".gitlab-ci.yml with aliases/anchors is not supported. Please change the CI configuration manually.")))
+ expect(result).to be_kind_of(ServiceResponse)
+ expect(result.status).to eq(:error)
+ expect(result.message).to eq(
+ _(".gitlab-ci.yml with aliases/anchors is not supported. Please change the CI configuration manually.")
+ )
end
end
@@ -133,11 +136,13 @@ RSpec.shared_examples_for 'services security ci configuration create service' do
YAML
end
- it 'fails with error' do
+ it 'returns a ServiceResponse error' do
expect(project).to receive(:ci_config_for).and_return(invalid_yaml)
expect(YAML).to receive(:safe_load).and_raise(Psych::Exception)
- expect { result }.to raise_error(Gitlab::Graphql::Errors::MutationError, /merge request creation mutation failed/)
+ expect(result).to be_kind_of(ServiceResponse)
+ expect(result.status).to eq(:error)
+ expect(result.message).to match(/merge request creation failed/)
end
end
@@ -166,14 +171,13 @@ RSpec.shared_examples_for 'services security ci configuration create service' do
let(:params) { nil }
let_it_be(:project) { create(:project_empty_repo) }
- it 'returns an error' do
- expect { result }.to raise_error { |error|
- expect(error).to be_a(Gitlab::Graphql::Errors::MutationError)
- expect(error.message).to eq('UF You must <a target="_blank" rel="noopener noreferrer" ' \
- 'href="http://localhost/help/user/project/repository/index.md' \
- '#add-files-to-a-repository">add at least one file to the repository' \
- '</a> before using Security features.')
- }
+ it 'returns a ServiceResponse error' do
+ expect(result).to be_kind_of(ServiceResponse)
+ expect(result.status).to eq(:error)
+ expect(result.message).to eq('You must <a target="_blank" rel="noopener noreferrer" ' \
+ 'href="http://localhost/help/user/project/repository/index.md' \
+ '#add-files-to-a-repository">add at least one file to the repository' \
+ '</a> before using Security features.')
end
end
end
diff --git a/spec/support/shared_examples/usage_data_counters/work_item_activity_unique_counter_shared_examples.rb b/spec/support/shared_examples/usage_data_counters/work_item_activity_unique_counter_shared_examples.rb
index 4655585a092..83119046377 100644
--- a/spec/support/shared_examples/usage_data_counters/work_item_activity_unique_counter_shared_examples.rb
+++ b/spec/support/shared_examples/usage_data_counters/work_item_activity_unique_counter_shared_examples.rb
@@ -1,41 +1,27 @@
# frozen_string_literal: true
-RSpec.shared_examples 'counter that does not track the event' do
- it 'does not track the event' do
- expect { 3.times { track_event } }.to not_change {
+RSpec.shared_examples 'work item unique counter' do
+ it 'tracks a unique event only once' do
+ expect { 3.times { track_event } }.to change {
Gitlab::UsageDataCounters::HLLRedisCounter.unique_events(
event_names: event_name,
start_date: 2.weeks.ago,
end_date: 2.weeks.from_now
)
- }
+ }.by(1)
end
-end
-RSpec.shared_examples 'work item unique counter' do
- context 'when track_work_items_activity FF is enabled' do
- it 'tracks a unique event only once' do
- expect { 3.times { track_event } }.to change {
+ context 'when author is nil' do
+ let(:user) { nil }
+
+ it 'does not track the event' do
+ expect { 3.times { track_event } }.to not_change {
Gitlab::UsageDataCounters::HLLRedisCounter.unique_events(
event_names: event_name,
start_date: 2.weeks.ago,
end_date: 2.weeks.from_now
)
- }.by(1)
+ }
end
-
- context 'when author is nil' do
- let(:user) { nil }
-
- it_behaves_like 'counter that does not track the event'
- end
- end
-
- context 'when track_work_items_activity FF is disabled' do
- before do
- stub_feature_flags(track_work_items_activity: false)
- end
-
- it_behaves_like 'counter that does not track the event'
end
end
diff --git a/spec/support/shared_examples/work_item_hierarchy_restrictions_importer.rb b/spec/support/shared_examples/work_item_hierarchy_restrictions_importer.rb
index b75aa27b2b7..d61458db3b3 100644
--- a/spec/support/shared_examples/work_item_hierarchy_restrictions_importer.rb
+++ b/spec/support/shared_examples/work_item_hierarchy_restrictions_importer.rb
@@ -3,7 +3,7 @@
RSpec.shared_examples 'work item hierarchy restrictions importer' do
shared_examples_for 'adds restrictions' do
it "adds all restrictions if they don't exist" do
- expect { subject }.to change { WorkItems::HierarchyRestriction.count }.from(0).to(4)
+ expect { subject }.to change { WorkItems::HierarchyRestriction.count }.from(0).to(7)
end
end
@@ -53,7 +53,7 @@ RSpec.shared_examples 'work item hierarchy restrictions importer' do
expect { subject }.to make_queries_matching(/INSERT/, 1).and(
change { WorkItems::HierarchyRestriction.count }.by(1)
)
- expect(WorkItems::HierarchyRestriction.count).to eq(4)
+ expect(WorkItems::HierarchyRestriction.count).to eq(7)
end
end
end
diff --git a/spec/support_specs/helpers/stub_feature_flags_spec.rb b/spec/support_specs/helpers/stub_feature_flags_spec.rb
index a59d8a20a40..f90b4c9f50d 100644
--- a/spec/support_specs/helpers/stub_feature_flags_spec.rb
+++ b/spec/support_specs/helpers/stub_feature_flags_spec.rb
@@ -17,7 +17,7 @@ RSpec.describe StubFeatureFlags do
# We inject dummy feature flag defintion
# to ensure that we strong validate it's usage
# as well
- before(:all) do
+ before_all do
Feature::Definition.definitions[dummy_feature_flag] = dummy_definition
end
@@ -154,6 +154,7 @@ RSpec.describe StubFeatureFlags do
it { expect(let_it_be_var).to eq true }
end
+ # rubocop: disable RSpec/BeforeAll
context 'before_all variable' do
before_all do
@suite_var = Feature.enabled?(dummy_feature_flag)
@@ -169,6 +170,7 @@ RSpec.describe StubFeatureFlags do
it { expect(@suite_var).to eq true }
end
+ # rubocop: enable RSpec/BeforeAll
context 'with stub_feature_flags meta' do
let(:var) { Feature.enabled?(dummy_feature_flag) }
diff --git a/spec/tasks/gitlab/audit_event_types/audit_event_types_rake_spec.rb b/spec/tasks/gitlab/audit_event_types/audit_event_types_rake_spec.rb
new file mode 100644
index 00000000000..14196ce4c5d
--- /dev/null
+++ b/spec/tasks/gitlab/audit_event_types/audit_event_types_rake_spec.rb
@@ -0,0 +1,43 @@
+# frozen_string_literal: true
+
+require 'rake_helper'
+require_relative '../../../../lib/tasks/gitlab/audit_event_types/check_docs_task'
+require_relative '../../../../lib/tasks/gitlab/audit_event_types/compile_docs_task'
+
+RSpec.describe 'gitlab:audit_event_types rake tasks', :silence_stdout, feature_category: :audit_events do
+ before do
+ Rake.application.rake_require('tasks/gitlab/audit_event_types/audit_event_types')
+ stub_env('VERBOSE' => 'true')
+ Gitlab::Audit::Type::Definition.clear_memoization(:definitions)
+ end
+
+ describe 'compile_docs' do
+ it 'invokes Gitlab::AuditEventTypes::CompileDocsTask with correct arguments' do
+ compile_docs_task = instance_double(Tasks::Gitlab::AuditEventTypes::CompileDocsTask)
+
+ expect(Tasks::Gitlab::AuditEventTypes::CompileDocsTask).to receive(:new).with(
+ Rails.root.join("doc/administration/audit_event_streaming"),
+ Rails.root.join("doc/administration/audit_event_streaming/audit_event_types.md"),
+ Rails.root.join("tooling/audit_events/docs/templates/audit_event_types.md.erb")).and_return(compile_docs_task)
+
+ expect(compile_docs_task).to receive(:run)
+
+ run_rake_task('gitlab:audit_event_types:compile_docs')
+ end
+ end
+
+ describe 'check_docs' do
+ it 'invokes Gitlab::AuditEventTypes::CheckDocsTask with correct arguments' do
+ check_docs_task = instance_double(Tasks::Gitlab::AuditEventTypes::CheckDocsTask)
+
+ expect(Tasks::Gitlab::AuditEventTypes::CheckDocsTask).to receive(:new).with(
+ Rails.root.join("doc/administration/audit_event_streaming"),
+ Rails.root.join("doc/administration/audit_event_streaming/audit_event_types.md"),
+ Rails.root.join("tooling/audit_events/docs/templates/audit_event_types.md.erb")).and_return(check_docs_task)
+
+ expect(check_docs_task).to receive(:run)
+
+ run_rake_task('gitlab:audit_event_types:check_docs')
+ end
+ end
+end
diff --git a/spec/tasks/gitlab/audit_event_types/check_docs_task_spec.rb b/spec/tasks/gitlab/audit_event_types/check_docs_task_spec.rb
new file mode 100644
index 00000000000..5dd7599696b
--- /dev/null
+++ b/spec/tasks/gitlab/audit_event_types/check_docs_task_spec.rb
@@ -0,0 +1,67 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_relative '../../../../lib/tasks/gitlab/audit_event_types/check_docs_task'
+require_relative '../../../../lib/tasks/gitlab/audit_event_types/compile_docs_task'
+
+RSpec.describe Tasks::Gitlab::AuditEventTypes::CheckDocsTask, feature_category: :audit_events do
+ let_it_be(:docs_dir) { Rails.root.join("tmp/tests/doc/administration/audit_event_streaming") }
+ let_it_be(:docs_path) { Rails.root.join(docs_dir, 'audit_event_types.md') }
+ let_it_be(:template_erb_path) { Rails.root.join("tooling/audit_events/docs/templates/audit_event_types.md.erb") }
+
+ subject(:check_docs_task) { described_class.new(docs_dir, docs_path, template_erb_path) }
+
+ describe '#run' do
+ before do
+ Gitlab::Audit::Type::Definition.clear_memoization(:definitions)
+ Tasks::Gitlab::AuditEventTypes::CompileDocsTask.new(docs_dir, docs_path, template_erb_path).run
+ end
+
+ context 'when audit_event_types.md is up to date' do
+ it 'outputs success message after checking the documentation' do
+ expect { subject.run }.to output("Audit event types documentation is up to date.\n").to_stdout
+ end
+ end
+
+ context 'when audit_event_types.md is updated manually' do
+ before do
+ File.write(docs_path, "Manually adding this line at the end of the audit_event_types.md", mode: 'a+')
+ end
+
+ it 'raises an error' do
+ expected_output = "Audit event types documentation is outdated! Please update it " \
+ "by running `bundle exec rake gitlab:audit_event_types:compile_docs`"
+
+ expect { subject.run }.to raise_error(SystemExit).and output(/#{expected_output}/).to_stdout
+ end
+ end
+
+ context 'when an existing audit event type is removed' do
+ let_it_be(:updated_definition) { Gitlab::Audit::Type::Definition.definitions.except(:feature_flag_created) }
+
+ it 'raises an error' do
+ expect(Gitlab::Audit::Type::Definition).to receive(:definitions).and_return(updated_definition)
+
+ expected_output = "Audit event types documentation is outdated! Please update it " \
+ "by running `bundle exec rake gitlab:audit_event_types:compile_docs`"
+
+ expect { subject.run }.to raise_error(SystemExit).and output(/#{expected_output}/).to_stdout
+ end
+ end
+
+ context 'when an existing audit event type is updated' do
+ let_it_be(:updated_definition) { Gitlab::Audit::Type::Definition.definitions }
+
+ it 'raises an error' do
+ updated_definition[:feature_flag_created].attributes[:streamed] = false
+
+ expect(Gitlab::Audit::Type::Definition).to receive(:definitions).and_return(updated_definition)
+
+ expected_output = "Audit event types documentation is outdated! Please update it " \
+ "by running `bundle exec rake gitlab:audit_event_types:compile_docs`"
+
+ expect { subject.run }.to raise_error(SystemExit).and output(/#{expected_output}/).to_stdout
+ end
+ end
+ end
+end
diff --git a/spec/tasks/gitlab/audit_event_types/compile_docs_task_spec.rb b/spec/tasks/gitlab/audit_event_types/compile_docs_task_spec.rb
new file mode 100644
index 00000000000..a881d17d3b8
--- /dev/null
+++ b/spec/tasks/gitlab/audit_event_types/compile_docs_task_spec.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_relative '../../../../lib/tasks/gitlab/audit_event_types/compile_docs_task'
+
+RSpec.describe Tasks::Gitlab::AuditEventTypes::CompileDocsTask, feature_category: :audit_events do
+ let_it_be(:docs_dir) { Rails.root.join("tmp/tests/doc/administration/audit_event_streaming") }
+ let_it_be(:docs_path) { Rails.root.join(docs_dir, 'audit_event_types.md') }
+ let_it_be(:template_erb_path) { Rails.root.join("tooling/audit_events/docs/templates/audit_event_types.md.erb") }
+
+ subject(:compile_docs_task) { described_class.new(docs_dir, docs_path, template_erb_path) }
+
+ describe '#run' do
+ it 'outputs message after compiling the documentation' do
+ expect { subject.run }.to output("Documentation compiled.\n").to_stdout
+ end
+
+ it 'creates audit_event_types.md', :aggregate_failures do
+ FileUtils.rm_f(docs_path)
+
+ expect { File.read(docs_path) }.to raise_error(Errno::ENOENT)
+
+ subject.run
+
+ expect(File.read(docs_path).size).not_to eq(0)
+ expect(File.read(docs_path)).to match(/This documentation is auto generated by a Rake task/)
+ end
+ end
+end
diff --git a/spec/tasks/gitlab/backup_rake_spec.rb b/spec/tasks/gitlab/backup_rake_spec.rb
index d534e59d8a6..04634af12a8 100644
--- a/spec/tasks/gitlab/backup_rake_spec.rb
+++ b/spec/tasks/gitlab/backup_rake_spec.rb
@@ -38,7 +38,7 @@ RSpec.describe 'gitlab:backup namespace rake tasks', :delete, feature_category:
%w[db repositories]
end
- before(:all) do
+ before(:all) do # rubocop:disable RSpec/BeforeAll
Rake.application.rake_require 'active_record/railties/databases'
Rake.application.rake_require 'tasks/gitlab/backup'
Rake.application.rake_require 'tasks/gitlab/shell'
@@ -578,7 +578,8 @@ RSpec.describe 'gitlab:backup namespace rake tasks', :delete, feature_category:
anything,
max_parallelism: 5,
storage_parallelism: 2,
- incremental: false
+ incremental: false,
+ server_side: false
).and_call_original
expect { run_rake_task('gitlab:backup:create') }.to output.to_stdout_from_any_process
diff --git a/spec/tasks/gitlab/ci_secure_files/migrate_rake_spec.rb b/spec/tasks/gitlab/ci_secure_files/migrate_rake_spec.rb
index ed6b5914f3e..37ae0d694eb 100644
--- a/spec/tasks/gitlab/ci_secure_files/migrate_rake_spec.rb
+++ b/spec/tasks/gitlab/ci_secure_files/migrate_rake_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe 'gitlab:ci_secure_files', feature_category: :mobile_devops do
let(:logger) { instance_double(Logger) }
let(:helper) { double }
- before(:all) do
+ before_all do
Rake.application.rake_require 'tasks/gitlab/ci_secure_files/migrate'
end
diff --git a/spec/tasks/gitlab/container_registry_rake_spec.rb b/spec/tasks/gitlab/container_registry_rake_spec.rb
index f19e93fc6cb..f4bd8560cd0 100644
--- a/spec/tasks/gitlab/container_registry_rake_spec.rb
+++ b/spec/tasks/gitlab/container_registry_rake_spec.rb
@@ -5,7 +5,7 @@ require 'rake_helper'
RSpec.describe 'gitlab:container_registry namespace rake tasks', :silence_stdout do
let_it_be(:api_url) { 'http://registry.gitlab' }
- before :all do
+ before_all do
Rake.application.rake_require 'tasks/gitlab/container_registry'
end
diff --git a/spec/tasks/gitlab/db/cells/bump_cell_sequences_rake_spec.rb b/spec/tasks/gitlab/db/cells/bump_cell_sequences_rake_spec.rb
new file mode 100644
index 00000000000..a5dd7c0ff09
--- /dev/null
+++ b/spec/tasks/gitlab/db/cells/bump_cell_sequences_rake_spec.rb
@@ -0,0 +1,82 @@
+# frozen_string_literal: true
+
+require 'rake_helper'
+
+RSpec.describe 'gitlab:db:cells:bump_cell_sequences', :silence_stdout,
+ :suppress_gitlab_schemas_validate_connection, feature_category: :cell, query_analyzers: false do
+ before_all do
+ Rake.application.rake_require 'tasks/gitlab/db/cells/bump_cell_sequences'
+
+ # empty task as env is already loaded
+ Rake::Task.define_task :environment
+ end
+
+ let(:main_sequence_name) { 'users_id_seq' }
+ let(:main_cell_sequence_name) { 'namespaces_id_seq' }
+
+ # This is just to make sure that all of the sequences start with `is_called=True`
+ # which means that the next call to nextval() is going to increment the sequence.
+ # To give predictable test results.
+ before do
+ ApplicationRecord.connection.select_value("select nextval($1)", nil, [main_cell_sequence_name])
+ end
+
+ context 'when run in production environment' do
+ let(:expected_error_message) do
+ <<~HEREDOC
+ This rake task cannot be run in production environment
+ HEREDOC
+ end
+
+ it 'will print error message and exit' do
+ allow(Gitlab).to receive(:dev_or_test_env?).and_return(false)
+
+ expect do
+ run_rake_task('gitlab:db:cells:bump_cell_sequences', '10')
+ end.to raise_error(SystemExit) { |error| expect(error.status).to eq(1) }
+ .and output(expected_error_message).to_stdout
+ end
+ end
+
+ context 'when passing wrong argument' do
+ let(:expected_error_message) do
+ <<~HEREDOC
+ Please specify a positive integer `increase_by` value
+ Example: rake gitlab:db:cells:bump_cell_sequences[100000]
+ HEREDOC
+ end
+
+ it 'will print an error message and exit when passing no argument' do
+ expect do
+ run_rake_task('gitlab:db:cells:bump_cell_sequences')
+ end.to raise_error(SystemExit) { |error| expect(error.status).to eq(1) }
+ .and output(expected_error_message).to_stdout
+ end
+
+ it 'will print an error message and exit when passing a non positive integer value' do
+ expect do
+ run_rake_task('gitlab:db:cells:bump_cell_sequences', '-5')
+ end.to raise_error(SystemExit) { |error| expect(error.status).to eq(1) }
+ .and output(expected_error_message).to_stdout
+ end
+ end
+
+ context 'when bumping the sequences' do
+ it 'increments the sequence of the tables in the given schema, but not in other schemas' do
+ expect do
+ run_rake_task('gitlab:db:cells:bump_cell_sequences', '10')
+ end.to change {
+ last_value_of_sequence(ApplicationRecord.connection, main_sequence_name)
+ }.by(0)
+ .and change {
+ last_value_of_sequence(ApplicationRecord.connection, main_cell_sequence_name)
+ }.by(11) # the +1 is because the sequence has is_called = true
+ end
+ end
+end
+
+def last_value_of_sequence(connection, sequence_name)
+ allow_cross_joins_across_databases(url: 'https://gitlab.com/gitlab-org/gitlab/-/issues/408220') do
+ connection.select_value("select last_value from #{sequence_name}")
+ end
+end
diff --git a/spec/tasks/gitlab/db/decomposition/connection_status_rake_spec.rb b/spec/tasks/gitlab/db/decomposition/connection_status_rake_spec.rb
index 4c161faf733..5116ee5663e 100644
--- a/spec/tasks/gitlab/db/decomposition/connection_status_rake_spec.rb
+++ b/spec/tasks/gitlab/db/decomposition/connection_status_rake_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe 'gitlab:db:decomposition:connection_status', feature_category: :c
subject { run_rake_task('gitlab:db:decomposition:connection_status') }
- before :all do
+ before_all do
Rake.application.rake_require 'tasks/gitlab/db/decomposition/connection_status'
end
diff --git a/spec/tasks/gitlab/db/decomposition/rollback/bump_ci_sequences_rake_spec.rb b/spec/tasks/gitlab/db/decomposition/rollback/bump_ci_sequences_rake_spec.rb
index 5a9d44221ba..f923d09bdaa 100644
--- a/spec/tasks/gitlab/db/decomposition/rollback/bump_ci_sequences_rake_spec.rb
+++ b/spec/tasks/gitlab/db/decomposition/rollback/bump_ci_sequences_rake_spec.rb
@@ -4,7 +4,7 @@ require 'rake_helper'
RSpec.describe 'gitlab:db:decomposition:rollback:bump_ci_sequences', :silence_stdout,
:suppress_gitlab_schemas_validate_connection, feature_category: :cell, query_analyzers: false do
- before :all do
+ before_all do
Rake.application.rake_require 'tasks/gitlab/db/decomposition/rollback/bump_ci_sequences'
# empty task as env is already loaded
diff --git a/spec/tasks/gitlab/db/lock_writes_rake_spec.rb b/spec/tasks/gitlab/db/lock_writes_rake_spec.rb
index 90612bcf9f7..069f5dc7d84 100644
--- a/spec/tasks/gitlab/db/lock_writes_rake_spec.rb
+++ b/spec/tasks/gitlab/db/lock_writes_rake_spec.rb
@@ -3,7 +3,7 @@
require 'rake_helper'
RSpec.describe 'gitlab:db:lock_writes', :reestablished_active_record_base, feature_category: :cell do
- before :all do
+ before_all do
Rake.application.rake_require 'active_record/railties/databases'
Rake.application.rake_require 'tasks/seed_fu'
Rake.application.rake_require 'tasks/gitlab/db/validate_config'
diff --git a/spec/tasks/gitlab/db/migration_fix_15_11_rake_spec.rb b/spec/tasks/gitlab/db/migration_fix_15_11_rake_spec.rb
index 3ff07698ad4..41d77d6efc7 100644
--- a/spec/tasks/gitlab/db/migration_fix_15_11_rake_spec.rb
+++ b/spec/tasks/gitlab/db/migration_fix_15_11_rake_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe 'migration_fix_15_11', :reestablished_active_record_base, feature
let(:target_init_schema) { '20220314184009' }
let(:earlier_init_schema) { '20210101010101' }
- before :all do
+ before_all do
Rake.application.rake_require 'active_record/railties/databases'
Rake.application.rake_require 'tasks/gitlab/db/migration_fix_15_11'
diff --git a/spec/tasks/gitlab/db/truncate_legacy_tables_rake_spec.rb b/spec/tasks/gitlab/db/truncate_legacy_tables_rake_spec.rb
index 301da891244..78d2bcba8a2 100644
--- a/spec/tasks/gitlab/db/truncate_legacy_tables_rake_spec.rb
+++ b/spec/tasks/gitlab/db/truncate_legacy_tables_rake_spec.rb
@@ -9,7 +9,7 @@ RSpec.describe 'gitlab:db:truncate_legacy_tables', :silence_stdout, :reestablish
let(:test_gitlab_main_table) { '_test_gitlab_main_table' }
let(:test_gitlab_ci_table) { '_test_gitlab_ci_table' }
- before :all do
+ before_all do
Rake.application.rake_require 'active_record/railties/databases'
Rake.application.rake_require 'tasks/seed_fu'
Rake.application.rake_require 'tasks/gitlab/db/validate_config'
diff --git a/spec/tasks/gitlab/db/validate_config_rake_spec.rb b/spec/tasks/gitlab/db/validate_config_rake_spec.rb
index 94808232d7e..e2e1cf249f0 100644
--- a/spec/tasks/gitlab/db/validate_config_rake_spec.rb
+++ b/spec/tasks/gitlab/db/validate_config_rake_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe 'gitlab:db:validate_config', :silence_stdout, :suppress_gitlab_sc
# which would not be cleaned either by `DbCleaner`
self.use_transactional_tests = false
- before :all do
+ before_all do
Rake.application.rake_require 'active_record/railties/databases'
Rake.application.rake_require 'tasks/seed_fu'
Rake.application.rake_require 'tasks/gitlab/db/validate_config'
diff --git a/spec/tasks/gitlab/db_rake_spec.rb b/spec/tasks/gitlab/db_rake_spec.rb
index 11c541ddfed..344429dc6ec 100644
--- a/spec/tasks/gitlab/db_rake_spec.rb
+++ b/spec/tasks/gitlab/db_rake_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
require 'rake'
RSpec.describe 'gitlab:db namespace rake task', :silence_stdout, feature_category: :database do
- before :all do
+ before(:all) do # rubocop:disable RSpec/BeforeAll
Rake.application.rake_require 'active_record/railties/databases'
Rake.application.rake_require 'tasks/seed_fu'
Rake.application.rake_require 'tasks/gitlab/db'
diff --git a/spec/tasks/gitlab/dependency_proxy/migrate_rake_spec.rb b/spec/tasks/gitlab/dependency_proxy/migrate_rake_spec.rb
index edd56f1667f..e1504a8aaf5 100644
--- a/spec/tasks/gitlab/dependency_proxy/migrate_rake_spec.rb
+++ b/spec/tasks/gitlab/dependency_proxy/migrate_rake_spec.rb
@@ -3,7 +3,7 @@
require 'rake_helper'
RSpec.describe 'gitlab:dependency_proxy namespace rake task', :silence_stdout do
- before :all do
+ before_all do
Rake.application.rake_require 'tasks/gitlab/dependency_proxy/migrate'
end
diff --git a/spec/tasks/gitlab/gitaly_rake_spec.rb b/spec/tasks/gitlab/gitaly_rake_spec.rb
index a161f33373d..7eca2773cf2 100644
--- a/spec/tasks/gitlab/gitaly_rake_spec.rb
+++ b/spec/tasks/gitlab/gitaly_rake_spec.rb
@@ -3,7 +3,7 @@
require 'rake_helper'
RSpec.describe 'gitlab:gitaly namespace rake task', :silence_stdout do
- before :all do
+ before_all do
Rake.application.rake_require 'tasks/gitlab/gitaly'
end
diff --git a/spec/tasks/gitlab/lfs/migrate_rake_spec.rb b/spec/tasks/gitlab/lfs/migrate_rake_spec.rb
index bc3113c2926..09c95783867 100644
--- a/spec/tasks/gitlab/lfs/migrate_rake_spec.rb
+++ b/spec/tasks/gitlab/lfs/migrate_rake_spec.rb
@@ -3,7 +3,7 @@
require 'rake_helper'
RSpec.describe 'gitlab:lfs namespace rake task', :silence_stdout do
- before :all do
+ before_all do
Rake.application.rake_require 'tasks/gitlab/lfs/migrate'
end
diff --git a/spec/tasks/gitlab/packages/migrate_rake_spec.rb b/spec/tasks/gitlab/packages/migrate_rake_spec.rb
index bf34034ee57..be69990a745 100644
--- a/spec/tasks/gitlab/packages/migrate_rake_spec.rb
+++ b/spec/tasks/gitlab/packages/migrate_rake_spec.rb
@@ -3,7 +3,7 @@
require 'rake_helper'
RSpec.describe 'gitlab:packages namespace rake task', :silence_stdout do
- before :all do
+ before_all do
Rake.application.rake_require 'tasks/gitlab/packages/migrate'
end
diff --git a/spec/tasks/gitlab/snippets_rake_spec.rb b/spec/tasks/gitlab/snippets_rake_spec.rb
index c50b04b4600..f0ba5ac2d92 100644
--- a/spec/tasks/gitlab/snippets_rake_spec.rb
+++ b/spec/tasks/gitlab/snippets_rake_spec.rb
@@ -9,7 +9,7 @@ RSpec.describe 'gitlab:snippets namespace rake task', :silence_stdout do
let(:non_migrated) { create_list(:personal_snippet, 3, author: user) }
let(:non_migrated_ids) { non_migrated.pluck(:id) }
- before :all do
+ before_all do
Rake.application.rake_require 'tasks/gitlab/snippets'
end
diff --git a/spec/tasks/gitlab/terraform/migrate_rake_spec.rb b/spec/tasks/gitlab/terraform/migrate_rake_spec.rb
index 8d911010a2e..0547d351065 100644
--- a/spec/tasks/gitlab/terraform/migrate_rake_spec.rb
+++ b/spec/tasks/gitlab/terraform/migrate_rake_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe 'gitlab:terraform_states', :silence_stdout do
let(:logger) { instance_double(Logger) }
let(:helper) { double }
- before(:all) do
+ before_all do
Rake.application.rake_require 'tasks/gitlab/terraform/migrate'
end
diff --git a/spec/tasks/gitlab/user_management_rake_spec.rb b/spec/tasks/gitlab/user_management_rake_spec.rb
index b13b004aaa4..e8de4511c1d 100644
--- a/spec/tasks/gitlab/user_management_rake_spec.rb
+++ b/spec/tasks/gitlab/user_management_rake_spec.rb
@@ -2,7 +2,7 @@
require 'rake_helper'
-RSpec.describe 'gitlab:user_management tasks', :silence_stdout do
+RSpec.describe 'gitlab:user_management tasks', :silence_stdout, feature_category: :groups_and_projects do
before do
Rake.application.rake_require 'tasks/gitlab/user_management'
end
@@ -18,7 +18,7 @@ RSpec.describe 'gitlab:user_management tasks', :silence_stdout do
context 'with users' do
let(:user_1) { create(:user, projects_limit: 10, can_create_group: true) }
- let(:user_2) { create(:user, projects_limit: 10, can_create_group: true) }
+ let(:user_2) { create(:user, :blocked, projects_limit: 10, can_create_group: true) }
let(:user_other) { create(:user, projects_limit: 10, can_create_group: true) }
shared_examples 'updates proper users' do
@@ -78,6 +78,19 @@ RSpec.describe 'gitlab:user_management tasks', :silence_stdout do
it_behaves_like 'updates proper users'
end
+
+ context 'when updated rows do not match the member count' do
+ before do
+ group.add_developer(user_1)
+ group.add_developer(user_2)
+
+ allow(User).to receive_message_chain(:where, :update_all).and_return(1)
+ end
+
+ it 'returns an error message' do
+ expect { run_rake }.to output(/.*Something went wrong.*/).to_stdout
+ end
+ end
end
end
end
diff --git a/spec/tasks/gitlab/workhorse_rake_spec.rb b/spec/tasks/gitlab/workhorse_rake_spec.rb
index 4255e16b0e4..17f3133ecdc 100644
--- a/spec/tasks/gitlab/workhorse_rake_spec.rb
+++ b/spec/tasks/gitlab/workhorse_rake_spec.rb
@@ -3,7 +3,7 @@
require 'rake_helper'
RSpec.describe 'gitlab:workhorse namespace rake task', :silence_stdout, feature_category: :source_code_management do
- before :all do
+ before_all do
Rake.application.rake_require 'tasks/gitlab/workhorse'
end
diff --git a/spec/tasks/gitlab/x509/update_rake_spec.rb b/spec/tasks/gitlab/x509/update_rake_spec.rb
index dca4f07cda7..abf8316d978 100644
--- a/spec/tasks/gitlab/x509/update_rake_spec.rb
+++ b/spec/tasks/gitlab/x509/update_rake_spec.rb
@@ -3,7 +3,7 @@
require 'rake_helper'
RSpec.describe 'gitlab:x509 namespace rake task', :silence_stdout do
- before :all do
+ before_all do
Rake.application.rake_require 'tasks/gitlab/x509/update'
end
diff --git a/spec/tasks/migrate/schema_check_rake_spec.rb b/spec/tasks/migrate/schema_check_rake_spec.rb
index ede55f23ba8..5afad752982 100644
--- a/spec/tasks/migrate/schema_check_rake_spec.rb
+++ b/spec/tasks/migrate/schema_check_rake_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe 'schema_version_check rake task', :silence_stdout do
include StubENV
let(:valid_schema_version) { 20211004170422 }
- before :all do
+ before_all do
Rake.application.rake_require 'active_record/railties/databases'
Rake.application.rake_require 'tasks/migrate/schema_check'
diff --git a/spec/tooling/danger/bulk_database_actions_spec.rb b/spec/tooling/danger/bulk_database_actions_spec.rb
new file mode 100644
index 00000000000..620b4ac2b18
--- /dev/null
+++ b/spec/tooling/danger/bulk_database_actions_spec.rb
@@ -0,0 +1,124 @@
+# frozen_string_literal: true
+
+require 'gitlab-dangerfiles'
+require 'danger'
+require 'danger/plugins/internal/helper'
+require 'gitlab/dangerfiles/spec_helper'
+require 'rspec-parameterized'
+
+require_relative '../../../tooling/danger/bulk_database_actions'
+require_relative '../../../tooling/danger/project_helper'
+
+RSpec.describe Tooling::Danger::BulkDatabaseActions, feature_category: :tooling do
+ include_context "with dangerfile"
+
+ let(:fake_danger) { DangerSpecHelper.fake_danger.include(described_class) }
+ let(:fake_project_helper) { instance_double(Tooling::Danger::ProjectHelper) }
+
+ let(:mr_url) { 'https://gitlab.com/gitlab-org/gitlab/-/merge_requests/1' }
+ let(:doc_link) { described_class::DOCUMENTATION_LINK }
+
+ let(:comment_text) { "\n#{described_class::COMMENT_TEXT}" }
+
+ let(:file_lines) do
+ file_diff.map { |line| line.delete_prefix('+') }
+ end
+
+ before do
+ allow(bulk_database_actions).to receive(:project_helper).and_return(fake_project_helper)
+ allow(bulk_database_actions.project_helper).to receive(:file_lines).and_return(file_lines)
+ allow(bulk_database_actions.helper).to receive(:added_files).and_return([filename])
+ allow(bulk_database_actions.helper).to receive(:changed_lines).with(filename).and_return(file_diff)
+ allow(bulk_database_actions.helper).to receive(:mr_web_url).and_return(mr_url)
+ end
+
+ subject(:bulk_database_actions) { fake_danger.new(helper: fake_helper) }
+
+ shared_examples 'no Danger comment' do
+ it 'does not comment on the bulk update action usage' do
+ expect(bulk_database_actions).not_to receive(:markdown)
+
+ bulk_database_actions.add_comment_for_bulk_database_action_method_usage
+ end
+ end
+
+ describe '#add_comment_for_bulk_database_action_method_usage' do
+ context 'for single line method call' do
+ let(:file_diff) do
+ [
+ "+ def execute",
+ "+ pat_family.active.#{method_call}",
+ "+",
+ "+ ServiceResponse.success",
+ "+ end"
+ ]
+ end
+
+ context 'when file is a non-spec Ruby file' do
+ let(:filename) { 'app/services/personal_access_tokens/revoke_token_family_service.rb' }
+
+ using RSpec::Parameterized::TableSyntax
+
+ where(:method_call, :expect_comment?) do
+ 'update_all(revoked: true)' | true
+ 'destroy_all' | true
+ 'delete_all' | true
+ 'update(revoked: true)' | true
+ 'delete' | true
+ 'update_two_factor' | false
+ 'delete_keys(key)' | false
+ 'destroy_hook(hook)' | false
+ 'destroy_all_merged' | false
+ 'update_all_mirrors' | false
+ end
+
+ with_them do
+ it "correctly handles potential bulk database action" do
+ if expect_comment?
+ expect(bulk_database_actions).to receive(:markdown).with(comment_text, file: filename, line: 2)
+ else
+ expect(bulk_database_actions).not_to receive(:markdown)
+ end
+
+ bulk_database_actions.add_comment_for_bulk_database_action_method_usage
+ end
+ end
+ end
+
+ context 'for spec directories' do
+ let(:method_call) { 'update_all(revoked: true)' }
+
+ context 'for FOSS spec file' do
+ let(:filename) { 'spec/services/personal_access_tokens/revoke_token_family_service_spec.rb' }
+
+ it_behaves_like 'no Danger comment'
+ end
+
+ context 'for EE spec file' do
+ let(:filename) { 'ee/spec/services/personal_access_tokens/revoke_token_family_service_spec.rb' }
+
+ it_behaves_like 'no Danger comment'
+ end
+
+ context 'for JiHu spec file' do
+ let(:filename) { 'jh/spec/services/personal_access_tokens/revoke_token_family_service_spec.rb' }
+
+ it_behaves_like 'no Danger comment'
+ end
+ end
+ end
+
+ context 'for strings' do
+ let(:filename) { 'app/services/personal_access_tokens/revoke_token_family_service.rb' }
+ let(:file_diff) do
+ [
+ '+ expect { subject }.to output(',
+ '+ "ERROR: Could not update tag"',
+ '+ ).to_stderr'
+ ]
+ end
+
+ it_behaves_like 'no Danger comment'
+ end
+ end
+end
diff --git a/spec/tooling/danger/database_spec.rb b/spec/tooling/danger/database_spec.rb
index ddcfa279dc3..a342014cf6b 100644
--- a/spec/tooling/danger/database_spec.rb
+++ b/spec/tooling/danger/database_spec.rb
@@ -1,5 +1,6 @@
# frozen_string_literal: true
+require 'rspec-parameterized'
require 'gitlab-dangerfiles'
require 'danger'
require 'danger/plugins/internal/helper'
@@ -41,11 +42,98 @@ RSpec.describe Tooling::Danger::Database, feature_category: :tooling do
let(:cutoff) { Date.parse('2022-10-01') - 21 }
- subject(:database) { fake_danger.new }
+ subject(:database) { fake_danger.new(helper: fake_helper) }
describe '#find_migration_files_before' do
it 'returns migrations that are before the cutoff' do
expect(database.find_migration_files_before(migration_files, cutoff).length).to eq(8)
end
end
+
+ describe '#changes' do
+ using RSpec::Parameterized::TableSyntax
+
+ where do
+ {
+ 'with database changes to a migration file' => {
+ modified_files: %w[
+ db/migrate/20230720114001_test_migration.rb
+ db/schema_migrations/20230720114001
+ db/structure.sql
+ app/models/test.rb
+ ],
+ changed_lines: [],
+ changes_by_category: {
+ database: %w[
+ db/migrate/20230720114001_test_migration.rb
+ db/schema_migrations/20230720114001
+ db/structure.sql
+ ]
+ },
+ impacted_files: %w[
+ db/migrate/20230720114001_test_migration.rb
+ db/schema_migrations/20230720114001
+ db/structure.sql
+ ]
+ },
+ 'with non-database changes' => {
+ modified_files: %w[
+ app/models/test.rb
+ ],
+ changed_lines: %w[
+ +# Comment explaining scope :blah
+ ],
+ changes_by_category: {
+ database: []
+ },
+ impacted_files: []
+ },
+ 'with database changes in a doc' => {
+ modified_files: %w[doc/development/database/test.md],
+ changed_lines: [
+ '+scope :blah, ->() { where(hidden: false) }'
+ ],
+ changes_by_category: {
+ database: []
+ },
+ impacted_files: []
+ },
+ 'with database changes in a model' => {
+ modified_files: %w[app/models/test.rb],
+ changed_lines: [
+ '+# Comment explaining scope :blah',
+ '+scope :blah, ->() { where(hidden: false) }'
+ ],
+ changes_by_category: {
+ database: []
+ },
+ impacted_files: %w[app/models/test.rb]
+ },
+ 'with database changes in a concern' => {
+ modified_files: %w[app/models/concerns/test.rb],
+ changed_lines: [
+ '- .where(hidden: false)',
+ '+ .where(hidden: true)'
+ ],
+ changes_by_category: {
+ database: []
+ },
+ impacted_files: %w[app/models/concerns/test.rb]
+ }
+ }
+ end
+
+ with_them do
+ before do
+ allow(fake_helper).to receive(:modified_files).and_return(modified_files)
+ allow(fake_helper).to receive(:all_changed_files).and_return(modified_files)
+ allow(fake_helper).to receive(:changed_lines).and_return(changed_lines)
+ allow(fake_helper).to receive(:changes_by_category).and_return(changes_by_category)
+ end
+
+ it 'returns database changes' do
+ expect(database.changes).to match impacted_files
+ end
+ end
+ end
end
diff --git a/spec/tooling/danger/model_validations_spec.rb b/spec/tooling/danger/model_validations_spec.rb
new file mode 100644
index 00000000000..18ff4b83b6e
--- /dev/null
+++ b/spec/tooling/danger/model_validations_spec.rb
@@ -0,0 +1,111 @@
+# frozen_string_literal: true
+
+require 'gitlab-dangerfiles'
+require 'danger'
+require 'danger/plugins/internal/helper'
+require 'gitlab/dangerfiles/spec_helper'
+
+require_relative '../../../tooling/danger/model_validations'
+require_relative '../../../tooling/danger/project_helper'
+
+RSpec.describe Tooling::Danger::ModelValidations, feature_category: :tooling do
+ include_context "with dangerfile"
+
+ let(:fake_danger) { DangerSpecHelper.fake_danger.include(described_class) }
+ let(:fake_project_helper) { instance_double(Tooling::Danger::ProjectHelper) }
+
+ subject(:model_validations) { fake_danger.new(helper: fake_helper) }
+
+ before do
+ allow(model_validations).to receive(:project_helper).and_return(fake_project_helper)
+ end
+
+ describe '#add_comment_for_added_validations' do
+ let(:file_lines) { file_diff.map { |line| line.delete_prefix('+').delete_prefix('-') } }
+ let(:filename) { 'app/models/user.rb' }
+ let(:added_filename) { 'app/models/user.rb' }
+
+ before do
+ allow(model_validations.project_helper).to receive(:file_lines).and_return(file_lines)
+ allow(model_validations.helper).to receive(:added_files).and_return([added_filename])
+ allow(model_validations.helper).to receive(:modified_files).and_return([filename])
+ allow(model_validations.helper).to receive(:changed_lines).with(filename).and_return(file_diff)
+ end
+
+ context 'when model has a newly added validation' do
+ let(:file_diff) do
+ [
+ "+ scope :admins, -> { where(admin: true) }",
+ "+ validates :name, presence: true, length: { maximum: 255 }",
+ "+ validates_with UserValidator",
+ "+ validate :check_password_weakness",
+ "+ validates_each :restricted_visibility_levels do |record, attr, value|",
+ "+ validates_associated :members",
+ "+ with_options if: :is_admin? do |admin|",
+ "+ admin.validates :password, length: { minimum: 10 }",
+ "+ admin.validates :email, presence: true",
+ "+ end",
+ "+ with_options if: :is_admin? { |admin| admin.validates :email, presence: true }",
+ "- validates :first_name, length: { maximum: 127 }"
+ ]
+ end
+
+ it 'adds suggestions at the correct line' do
+ suggested_line = "\n#{described_class::SUGGEST_MR_COMMENT.chomp}"
+
+ matching_line_numbers = [*2..6, 8, 9, 11]
+ matching_line_numbers.each do |line_number|
+ expect(model_validations).to receive(:markdown).with(suggested_line, file: filename, line: line_number)
+ end
+
+ model_validations.add_comment_for_added_validations
+ end
+ end
+
+ context 'when model does not have a newly added validation' do
+ let(:file_diff) do
+ [
+ "+ scope :admins, -> { where(admin: true) }",
+ "- validates :first_name, length: { maximum: 127 }"
+ ]
+ end
+
+ it 'does not add suggestion' do
+ expect(model_validations).not_to receive(:markdown)
+
+ model_validations.add_comment_for_added_validations
+ end
+ end
+ end
+
+ describe '#changed_model_files' do
+ let(:expected_files) do
+ %w[
+ app/models/user.rb
+ app/models/users/user_follow_user.rb
+ ee/app/models/ee/user.rb
+ ee/app/models/sca/license_policy.rb
+ app/models/concerns/presentable.rb
+ ]
+ end
+
+ before do
+ added_files = %w[app/models/user_preferences.rb app/models/concerns/presentable.rb]
+ modified_files = %w[
+ app/models/user.rb
+ app/models/users/user_follow_user.rb
+ ee/app/models/ee/user.rb
+ ee/app/models/sca/license_policy.rb
+ config/metrics/count_7d/new_metric.yml
+ app/assets/index.js
+ ]
+
+ allow(model_validations.helper).to receive(:added_files).and_return(added_files)
+ allow(model_validations.helper).to receive(:modified_files).and_return(modified_files)
+ end
+
+ it 'returns added and modified files' do
+ expect(model_validations.changed_model_files).to match_array(expected_files)
+ end
+ end
+end
diff --git a/spec/tooling/danger/project_helper_spec.rb b/spec/tooling/danger/project_helper_spec.rb
index 5ae0a8695eb..28b8b2278d0 100644
--- a/spec/tooling/danger/project_helper_spec.rb
+++ b/spec/tooling/danger/project_helper_spec.rb
@@ -75,8 +75,6 @@ RSpec.describe Tooling::Danger::ProjectHelper do
'ee/spec/frontend/bar' | [:frontend]
'ee/spec/frontend_integration/bar' | [:frontend]
- '.gitlab/ci/frontend.gitlab-ci.yml' | %i[frontend tooling]
-
'app/models/foo' | [:backend]
'bin/foo' | [:backend]
'config/foo' | [:backend]
@@ -116,57 +114,28 @@ RSpec.describe Tooling::Danger::ProjectHelper do
'Rakefile' | [:backend]
'FOO_VERSION' | [:backend]
- 'scripts/glfm/bar.rb' | [:backend]
- 'scripts/glfm/bar.js' | [:frontend]
- 'scripts/remote_development/run-smoke-test-suite.sh' | [:remote_development_be]
- 'scripts/lib/glfm/bar.rb' | [:backend]
- 'scripts/lib/glfm/bar.js' | [:frontend]
- 'scripts/bar.rb' | [:backend, :tooling]
- 'scripts/bar.js' | [:frontend, :tooling]
- 'scripts/subdir/bar.rb' | [:backend, :tooling]
- 'scripts/subdir/bar.js' | [:frontend, :tooling]
- 'scripts/foo' | [:tooling]
-
- 'Dangerfile' | [:tooling]
- 'danger/bundle_size/Dangerfile' | [:tooling]
- 'ee/danger/bundle_size/Dangerfile' | [:tooling]
- 'danger/bundle_size/' | [:tooling]
- 'ee/danger/bundle_size/' | [:tooling]
- '.gitlab-ci.yml' | [:tooling]
- '.gitlab/ci/cng.gitlab-ci.yml' | [:tooling]
- '.gitlab/ci/ee-specific-checks.gitlab-ci.yml' | [:tooling]
- 'tooling/danger/foo' | [:tooling]
- 'ee/tooling/danger/foo' | [:tooling]
- 'lefthook.yml' | [:tooling]
- '.editorconfig' | [:tooling]
- 'tooling/bin/find_foss_tests' | [:tooling]
- '.codeclimate.yml' | [:tooling]
- '.gitlab/CODEOWNERS' | [:tooling]
- 'gems/gem.gitlab-ci.yml' | [:tooling]
- 'gems/config/rubocop.yml' | [:tooling]
-
'lib/gitlab/ci/templates/Security/SAST.gitlab-ci.yml' | [:ci_template]
'lib/gitlab/ci/templates/dotNET-Core.yml' | [:ci_template]
'ee/FOO_VERSION' | [:none]
- 'db/schema.rb' | [:database]
- 'db/structure.sql' | [:database]
- 'db/migrate/foo' | [:database, :migration]
- 'db/post_migrate/foo' | [:database, :migration]
- 'ee/db/geo/migrate/foo' | [:database, :migration]
- 'ee/db/geo/post_migrate/foo' | [:database, :migration]
- 'app/models/project_authorization.rb' | [:database, :backend]
- 'app/services/users/refresh_authorized_projects_service.rb' | [:database, :backend]
+ 'db/schema.rb' | [:database]
+ 'db/structure.sql' | [:database]
+ 'db/migrate/foo' | [:database]
+ 'db/post_migrate/foo' | [:database]
+ 'ee/db/geo/migrate/foo' | [:database]
+ 'ee/db/geo/post_migrate/foo' | [:database]
+ 'app/models/project_authorization.rb' | [:database, :backend]
+ 'app/services/users/refresh_authorized_projects_service.rb' | [:database, :backend]
'app/services/authorized_project_update/find_records_due_for_refresh_service.rb' | [:database, :backend]
- 'lib/gitlab/background_migration.rb' | [:database, :backend]
- 'lib/gitlab/background_migration/foo' | [:database, :backend]
- 'ee/lib/gitlab/background_migration/foo' | [:database, :backend]
- 'lib/gitlab/database.rb' | [:database, :backend]
- 'lib/gitlab/database/foo' | [:database, :backend]
- 'ee/lib/gitlab/database/foo' | [:database, :backend]
- 'lib/gitlab/sql/foo' | [:database, :backend]
- 'rubocop/cop/migration/foo' | [:database]
+ 'lib/gitlab/background_migration.rb' | [:database, :backend]
+ 'lib/gitlab/background_migration/foo' | [:database, :backend]
+ 'ee/lib/gitlab/background_migration/foo' | [:database, :backend]
+ 'lib/gitlab/database.rb' | [:database, :backend]
+ 'lib/gitlab/database/foo' | [:database, :backend]
+ 'ee/lib/gitlab/database/foo' | [:database, :backend]
+ 'lib/gitlab/sql/foo' | [:database, :backend]
+ 'rubocop/cop/migration/foo' | [:database]
'db/fixtures/foo.rb' | [:backend]
'ee/db/fixtures/foo.rb' | [:backend]
@@ -281,11 +250,11 @@ RSpec.describe Tooling::Danger::ProjectHelper do
[:backend, :analytics_instrumentation] | '+ count(User.active)' | ['lib/gitlab/usage_data/topology.rb']
[:backend, :analytics_instrumentation] | '+ foo_count(User.active)' | ['lib/gitlab/usage_data.rb']
[:backend] | '+ count(User.active)' | ['user.rb']
- [:import_integrate_be, :database, :migration] | '+ add_column :integrations, :foo, :text' | ['db/migrate/foo.rb']
- [:import_integrate_be, :database, :migration] | '+ create_table :zentao_tracker_data do |t|' | ['ee/db/post_migrate/foo.rb']
- [:import_integrate_be, :backend] | '+ Integrations::Foo' | ['app/foo/bar.rb']
- [:import_integrate_be, :backend] | '+ project.execute_hooks(foo, :bar)' | ['ee/lib/ee/foo.rb']
- [:import_integrate_be, :backend] | '+ project.execute_integrations(foo, :bar)' | ['app/foo.rb']
+ [:import_integrate_be, :database] | '+ add_column :integrations, :foo, :text' | ['db/migrate/foo.rb']
+ [:import_integrate_be, :database] | '+ create_table :zentao_tracker_data do |t|' | ['ee/db/post_migrate/foo.rb']
+ [:import_integrate_be, :backend] | '+ Integrations::Foo' | ['app/foo/bar.rb']
+ [:import_integrate_be, :backend] | '+ project.execute_hooks(foo, :bar)' | ['ee/lib/ee/foo.rb']
+ [:import_integrate_be, :backend] | '+ project.execute_integrations(foo, :bar)' | ['app/foo.rb']
[:frontend, :analytics_instrumentation] | '+ api.trackRedisCounterEvent("foo")' | ['app/assets/javascripts/telemetry.js', 'ee/app/assets/javascripts/mr_widget.vue']
[:frontend, :analytics_instrumentation] | '+ api.trackRedisHllUserEvent("bar")' | ['app/assets/javascripts/telemetry.js', 'ee/app/assets/javascripts/mr_widget.vue']
end
diff --git a/spec/tooling/danger/required_stops_spec.rb b/spec/tooling/danger/required_stops_spec.rb
new file mode 100644
index 00000000000..7a90f19ac09
--- /dev/null
+++ b/spec/tooling/danger/required_stops_spec.rb
@@ -0,0 +1,98 @@
+# frozen_string_literal: true
+
+require 'gitlab-dangerfiles'
+require 'danger'
+require 'danger/plugins/internal/helper'
+require 'gitlab/dangerfiles/spec_helper'
+
+require_relative '../../../tooling/danger/required_stops'
+require_relative '../../../tooling/danger/project_helper'
+
+RSpec.describe Tooling::Danger::RequiredStops, feature_category: :tooling do
+ include_context "with dangerfile"
+
+ let(:fake_danger) { DangerSpecHelper.fake_danger.include(described_class) }
+ let(:fake_project_helper) { instance_double(Tooling::Danger::ProjectHelper) }
+ let(:warning_comment) { described_class::WARNING_COMMENT.chomp }
+
+ subject(:required_stops) { fake_danger.new(helper: fake_helper) }
+
+ before do
+ allow(required_stops).to receive(:project_helper).and_return(fake_project_helper)
+ end
+
+ describe '#add_comment_for_finalized_migrations' do
+ let(:file_lines) { file_diff.map { |line| line.delete_prefix('+').delete_prefix('-') } }
+
+ before do
+ allow(required_stops.project_helper).to receive(:file_lines).and_return(file_lines)
+ allow(required_stops.helper).to receive(:all_changed_files).and_return([filename])
+ allow(required_stops.helper).to receive(:changed_lines).with(filename).and_return(file_diff)
+ end
+
+ shared_examples "adds comment to added migration finalizations" do
+ context 'when model has a newly added migration finalization' do
+ let(:file_diff) do
+ [
+ "+ def up",
+ "+ finalize_background_migration(MIGRATION)",
+ "+ end",
+ "+ def up",
+ "+ finalize_background_migration('MyMigration')",
+ "+ end",
+ "+ def up",
+ "+ ensure_batched_background_migration_is_finished(",
+ "+ end",
+ "+ def up",
+ "+ ensure_batched_background_migration_is_finished('MyMigration')",
+ "+ end",
+ "+ def up",
+ "+ finalize_batched_background_migration(",
+ "+ end",
+ "+ def up",
+ "+ finalize_batched_background_migration('MyMigration')",
+ "+ end"
+ ]
+ end
+
+ it 'adds comment at the correct line' do
+ matching_line_numbers = [2, 5, 8, 11, 14, 17]
+ matching_line_numbers.each do |line_number|
+ expect(required_stops).to receive(:markdown).with("\n#{warning_comment}", file: filename, line: line_number)
+ end
+
+ required_stops.add_comment_for_finalized_migrations
+ end
+ end
+
+ context 'when model does not have migration finalization statement' do
+ let(:file_diff) do
+ [
+ "+ queue_batched_background_migration(",
+ "- ensure_batched_background_migration_is_finished("
+ ]
+ end
+
+ it 'does not add comment' do
+ expect(required_stops).not_to receive(:markdown)
+
+ required_stops.add_comment_for_finalized_migrations
+ end
+ end
+ end
+
+ context 'when model has a newly added migration finalization' do
+ context 'with regular migration' do
+ let(:filename) { 'db/migrate/my_migration.rb' }
+
+ include_examples 'adds comment to added migration finalizations'
+ end
+
+ context 'with post migration' do
+ let(:filename) { 'db/post_migrate/my_migration.rb' }
+
+ include_examples 'adds comment to added migration finalizations'
+ end
+ end
+ end
+end
diff --git a/spec/tooling/danger/specs/project_factory_suggestion_spec.rb b/spec/tooling/danger/specs/project_factory_suggestion_spec.rb
index 9b10ab1a6f4..b765d5073af 100644
--- a/spec/tooling/danger/specs/project_factory_suggestion_spec.rb
+++ b/spec/tooling/danger/specs/project_factory_suggestion_spec.rb
@@ -18,17 +18,16 @@ RSpec.describe Tooling::Danger::Specs::ProjectFactorySuggestion, feature_categor
%<suggested_line>s
```
- Project creations are very slow. Using `let_it_be`, `build` or `build_stubbed` can improve test performance.
+ Project creations are very slow. To improve test performance, consider using `let_it_be`, `build`, or `build_stubbed` instead.
- Warning: `let_it_be` may not be suitable if your test modifies data as this could result in state leaks!
+ ⚠️ **Warning**: If your test modifies data, `let_it_be` may be unsuitable, and cause state leaks! Use `let_it_be_with_reload` or `let_it_be_with_refind` instead.
- In those cases, please use `let_it_be_with_reload` or `let_it_be_with_refind` instead.
-
- If your are unsure which is the right method to use,
- please refer to [testing best practices](https://docs.gitlab.com/ee/development/testing_guide/best_practices.html#optimize-factory-usage)
+ Unsure which method to use? See the [testing best practices](https://docs.gitlab.com/ee/development/testing_guide/best_practices.html#optimize-factory-usage)
for background information and alternative options for optimizing factory usage.
- Feel free to ignore this comment if you know `let` or `let!` are the better options and/or worry about causing state leaks.
+ If you're concerned about causing state leaks, or if you know `let` or `let!` are the better options, ignore this comment.
+
+ ([Improve this message?](https://gitlab.com/gitlab-org/gitlab/-/blob/master/tooling/danger/specs/project_factory_suggestion.rb))
MARKDOWN
end
diff --git a/spec/validators/import/gitlab_projects/remote_file_validator_spec.rb b/spec/validators/import/gitlab_projects/remote_file_validator_spec.rb
index 996fe16dc7f..27ab01c0334 100644
--- a/spec/validators/import/gitlab_projects/remote_file_validator_spec.rb
+++ b/spec/validators/import/gitlab_projects/remote_file_validator_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe ::Import::GitlabProjects::RemoteFileValidator, :aggregate_failures do
+RSpec.describe ::Import::GitlabProjects::RemoteFileValidator, :aggregate_failures, feature_category: :importers do
let(:validated_class) do
Class.new do
include ActiveModel::Validations
@@ -20,11 +20,15 @@ RSpec.describe ::Import::GitlabProjects::RemoteFileValidator, :aggregate_failure
end
end
- let(:validated_object) { validated_class.new(content_length: 1.gigabytes, content_type: 'application/gzip') }
+ let(:validated_object) { validated_class.new(content_length: 10.megabytes, content_type: 'application/gzip') }
subject { described_class.new }
- it 'does nothing when the oject is valid' do
+ before do
+ stub_application_setting(max_import_remote_file_size: 100)
+ end
+
+ it 'does nothing when the object is valid' do
subject.validate(validated_object)
expect(validated_object.errors.full_messages).to be_empty
@@ -41,12 +45,24 @@ RSpec.describe ::Import::GitlabProjects::RemoteFileValidator, :aggregate_failure
end
it 'is invalid with file too large' do
- validated_object.content_length = (described_class::FILE_SIZE_LIMIT + 1).gigabytes
+ validated_object.content_length = 200.megabytes
subject.validate(validated_object)
expect(validated_object.errors.full_messages)
- .to include('Content length is too big (should be at most 10 GiB)')
+ .to include('Content length is too big (should be at most 100 MiB)')
+ end
+
+ context 'when max_import_remote_file_size is 0' do
+ it 'does not validate file size' do
+ stub_application_setting(max_import_remote_file_size: 0)
+
+ validated_object.content_length = 200.megabytes
+
+ subject.validate(validated_object)
+
+ expect(validated_object.errors.full_messages).to be_empty
+ end
end
end
diff --git a/spec/views/admin/application_settings/_ai_access.html.haml_spec.rb b/spec/views/admin/application_settings/_ai_access.html.haml_spec.rb
deleted file mode 100644
index e9e640f7cc6..00000000000
--- a/spec/views/admin/application_settings/_ai_access.html.haml_spec.rb
+++ /dev/null
@@ -1,38 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'admin/application_settings/_ai_access.html.haml', feature_category: :code_suggestions do
- let_it_be(:admin) { build_stubbed(:admin) }
- let(:page) { Capybara::Node::Simple.new(rendered) }
-
- before do
- allow(::Gitlab).to receive(:org_or_com?).and_return(false) # Will not render partial for .com or .org
- assign(:application_setting, application_setting)
- allow(view).to receive(:current_user) { admin }
- allow(view).to receive(:expanded).and_return(true)
- end
-
- context 'when ai_access_token is not set' do
- let(:application_setting) { build(:application_setting) }
-
- it 'renders an empty password field' do
- render
- expect(rendered).to have_field('Personal access token', type: 'password')
- expect(page.find_field('Personal access token').value).to be_blank
- end
- end
-
- context 'when ai_access_token is set' do
- let(:application_setting) do
- build(:application_setting, ai_access_token: 'ai_access_token',
- instance_level_code_suggestions_enabled: true)
- end
-
- it 'renders masked password field' do
- render
- expect(rendered).to have_field('Enter new personal access token', type: 'password')
- expect(page.find_field('Enter new personal access token').value).to eq(ApplicationSettingMaskedAttrs::MASK)
- end
- end
-end
diff --git a/spec/views/admin/application_settings/general.html.haml_spec.rb b/spec/views/admin/application_settings/general.html.haml_spec.rb
index ee518041fbd..3b3a8a675a0 100644
--- a/spec/views/admin/application_settings/general.html.haml_spec.rb
+++ b/spec/views/admin/application_settings/general.html.haml_spec.rb
@@ -114,29 +114,31 @@ RSpec.describe 'admin/application_settings/general.html.haml' do
end
end
- describe 'instance-level code suggestions settings', feature_category: :code_suggestions do
+ # for the licensed tests, refer to ee/spec/views/admin/application_settings/general.html.haml_spec.rb
+ describe 'instance-level code suggestions settings', :without_license, feature_category: :code_suggestions do
before do
allow(::Gitlab).to receive(:org_or_com?).and_return(gitlab_org_or_com?)
render
end
- context 'when on .com or .org' do
- let(:gitlab_org_or_com?) { true }
-
+ shared_examples 'does not render the form' do
it 'does not render the form' do
expect(rendered).not_to have_field('application_setting_instance_level_code_suggestions_enabled')
expect(rendered).not_to have_field('application_setting_ai_access_token')
end
end
+ context 'when on .com or .org' do
+ let(:gitlab_org_or_com?) { true }
+
+ it_behaves_like 'does not render the form'
+ end
+
context 'when not on .com and not on .org' do
let(:gitlab_org_or_com?) { false }
- it 'renders the form' do
- expect(rendered).to have_field('application_setting_instance_level_code_suggestions_enabled')
- expect(rendered).to have_field('application_setting_ai_access_token')
- end
+ it_behaves_like 'does not render the form'
end
end
end
diff --git a/spec/views/devise/registrations/new.html.haml_spec.rb b/spec/views/devise/registrations/new.html.haml_spec.rb
deleted file mode 100644
index 55025424573..00000000000
--- a/spec/views/devise/registrations/new.html.haml_spec.rb
+++ /dev/null
@@ -1,30 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'devise/registrations/new', feature_category: :user_management do
- describe 'broadcast messaging' do
- before do
- allow(view).to receive(:devise_mapping).and_return(Devise.mappings[:user])
- allow(view).to receive(:resource).and_return(build(:user))
- allow(view).to receive(:resource_name).and_return(:user)
- allow(view).to receive(:registration_path_params).and_return({})
- allow(view).to receive(:glm_tracking_params).and_return({})
- allow(view).to receive(:arkose_labs_enabled?).and_return(true)
- end
-
- it 'does not render the broadcast layout' do
- render
-
- expect(rendered).not_to render_template('layouts/_broadcast')
- end
-
- context 'when SaaS', :saas do
- it 'does not render the broadcast layout' do
- render
-
- expect(rendered).not_to render_template('layouts/_broadcast')
- end
- end
- end
-end
diff --git a/spec/views/devise/sessions/new.html.haml_spec.rb b/spec/views/devise/sessions/new.html.haml_spec.rb
index adfe68824c5..70ca0bb2195 100644
--- a/spec/views/devise/sessions/new.html.haml_spec.rb
+++ b/spec/views/devise/sessions/new.html.haml_spec.rb
@@ -102,27 +102,6 @@ RSpec.describe 'devise/sessions/new' do
end
end
- describe 'broadcast messaging' do
- before do
- stub_devise
- disable_captcha
- end
-
- it 'renders the broadcast layout' do
- render
-
- expect(rendered).to render_template('layouts/_broadcast')
- end
-
- context 'when SaaS', :saas do
- it 'does not render the broadcast layout' do
- render
-
- expect(rendered).not_to render_template('layouts/_broadcast')
- end
- end
- end
-
def disable_other_signin_methods
allow(view).to receive(:password_authentication_enabled_for_web?).and_return(false)
allow(view).to receive(:omniauth_enabled?).and_return(false)
diff --git a/spec/views/layouts/application.html.haml_spec.rb b/spec/views/layouts/application.html.haml_spec.rb
index a3613329984..825e295b73d 100644
--- a/spec/views/layouts/application.html.haml_spec.rb
+++ b/spec/views/layouts/application.html.haml_spec.rb
@@ -3,74 +3,91 @@
require 'spec_helper'
RSpec.describe 'layouts/application' do
- let(:user) { create(:user) }
+ context 'when user is signed in' do
+ let(:user) { create(:user) }
- before do
- allow(view).to receive(:current_user).and_return(user)
- allow(view).to receive(:current_user_mode).and_return(Gitlab::Auth::CurrentUserMode.new(user))
- end
+ before do
+ allow(view).to receive(:current_user).and_return(user)
+ allow(view).to receive(:current_user_mode).and_return(Gitlab::Auth::CurrentUserMode.new(user))
+ end
- it_behaves_like 'a layout which reflects the application theme setting'
- it_behaves_like 'a layout which reflects the preferred language'
+ it_behaves_like 'a layout which reflects the application theme setting'
+ it_behaves_like 'a layout which reflects the preferred language'
- describe "visual review toolbar" do
- context "ENV['REVIEW_APPS_ENABLED'] is set to true" do
- before do
- stub_env(
- 'REVIEW_APPS_ENABLED' => true,
- 'REVIEW_APPS_MERGE_REQUEST_IID' => '123'
- )
+ describe "visual review toolbar" do
+ context "ENV['REVIEW_APPS_ENABLED'] is set to true" do
+ before do
+ stub_env(
+ 'REVIEW_APPS_ENABLED' => true,
+ 'REVIEW_APPS_MERGE_REQUEST_IID' => '123'
+ )
+ end
+
+ it 'renders the visual review toolbar' do
+ render
+
+ expect(rendered).to include('review-app-toolbar-script')
+ end
end
- it 'renders the visual review toolbar' do
- render
+ context "ENV['REVIEW_APPS_ENABLED'] is set to false" do
+ before do
+ stub_env('REVIEW_APPS_ENABLED', false)
+ end
- expect(rendered).to include('review-app-toolbar-script')
+ it 'does not render the visual review toolbar' do
+ render
+
+ expect(rendered).not_to include('review-app-toolbar-script')
+ end
end
end
- context "ENV['REVIEW_APPS_ENABLED'] is set to false" do
- before do
- stub_env('REVIEW_APPS_ENABLED', false)
+ context 'body data elements for pageview context' do
+ let(:body_data) do
+ {
+ body_data_page: 'projects:issues:show',
+ body_data_page_type_id: '1',
+ body_data_project_id: '2',
+ body_data_namespace_id: '3'
+ }
end
- it 'does not render the visual review toolbar' do
+ before do
+ allow(view).to receive(:body_data).and_return(body_data)
render
-
- expect(rendered).not_to include('review-app-toolbar-script')
end
- end
- end
- context 'body data elements for pageview context' do
- let(:body_data) do
- {
- body_data_page: 'projects:issues:show',
- body_data_page_type_id: '1',
- body_data_project_id: '2',
- body_data_namespace_id: '3'
- }
- end
+ it 'includes the body element page' do
+ expect(rendered).to include('data-page="projects:issues:show"')
+ end
- before do
- allow(view).to receive(:body_data).and_return(body_data)
- render
- end
+ it 'includes the body element page_type_id' do
+ expect(rendered).to include('data-page-type-id="1"')
+ end
- it 'includes the body element page' do
- expect(rendered).to include('data-page="projects:issues:show"')
- end
+ it 'includes the body element project_id' do
+ expect(rendered).to include('data-project-id="2"')
+ end
- it 'includes the body element page_type_id' do
- expect(rendered).to include('data-page-type-id="1"')
+ it 'includes the body element namespace_id' do
+ expect(rendered).to include('data-namespace-id="3"')
+ end
end
+ end
- it 'includes the body element project_id' do
- expect(rendered).to include('data-project-id="2"')
+ context 'when user is not signed in' do
+ before do
+ allow(view).to receive(:current_user).and_return(nil)
+ allow(view).to receive(:current_user_mode).and_return(Gitlab::Auth::CurrentUserMode.new(nil))
+ Feature.enable(:super_sidebar_logged_out)
end
- it 'includes the body element namespace_id' do
- expect(rendered).to include('data-namespace-id="3"')
+ it 'renders the new marketing header for logged-out users' do
+ allow(view).to receive(:render)
+ allow(view).to receive(:render).with({ template: "layouts/application" }, {}).and_call_original
+ render
+ expect(view).to have_received(:render).with({ partial: "layouts/header/super_sidebar_logged_out" })
end
end
end
diff --git a/spec/views/layouts/header/_super_sidebar_logged_out.html.haml_spec.rb b/spec/views/layouts/header/_super_sidebar_logged_out.html.haml_spec.rb
new file mode 100644
index 00000000000..89a03d72a90
--- /dev/null
+++ b/spec/views/layouts/header/_super_sidebar_logged_out.html.haml_spec.rb
@@ -0,0 +1,39 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'layouts/header/_super_sidebar_logged_out', feature_category: :navigation do
+ before do
+ allow(view).to receive(:current_user_mode).and_return(Gitlab::Auth::CurrentUserMode.new(nil))
+ Feature.enable(:super_sidebar_logged_out)
+ end
+
+ context 'on gitlab.com' do
+ before do
+ allow(Gitlab).to receive(:com?).and_return(true)
+ render
+ end
+
+ it 'renders marketing links' do
+ expect(rendered).to have_content('Why GitLab')
+ expect(rendered).to have_content('Pricing')
+ expect(rendered).to have_content('Contact Sales')
+ end
+ end
+
+ context 'on self-managed' do
+ it 'does not render marketing links' do
+ render
+ expect(rendered).not_to have_content('Why GitLab')
+ expect(rendered).not_to have_content('Pricing')
+ expect(rendered).not_to have_content('Contact Sales')
+ end
+ end
+
+ it 'renders links to Explore and Sign-in and Register' do
+ render
+ expect(rendered).to have_content('Explore')
+ expect(rendered).to have_content('Sign in')
+ expect(rendered).to have_content('Register')
+ end
+end
diff --git a/spec/views/profiles/keys/_key.html.haml_spec.rb b/spec/views/profiles/keys/_key.html.haml_spec.rb
index 4d14ce7c909..9ce6779d3ae 100644
--- a/spec/views/profiles/keys/_key.html.haml_spec.rb
+++ b/spec/views/profiles/keys/_key.html.haml_spec.rb
@@ -39,7 +39,7 @@ RSpec.describe 'profiles/keys/_key.html.haml', feature_category: :system_access
it 'renders "Unavailable" for last used' do
render
- expect(rendered).to have_text('Last used: Unavailable')
+ expect(rendered).to have_text('Unavailable')
end
end
@@ -62,7 +62,7 @@ RSpec.describe 'profiles/keys/_key.html.haml', feature_category: :system_access
it 'renders "Never" for last used' do
render
- expect(rendered).to have_text('Last used: Never')
+ expect(rendered).to have_text('Never')
end
end
end
@@ -85,11 +85,11 @@ RSpec.describe 'profiles/keys/_key.html.haml', feature_category: :system_access
expect(rendered).to have_text(usage_type_text)
displayed_buttons.each do |button|
- expect(rendered).to have_text(button)
+ expect(rendered).to have_css("button[aria-label=#{button}]")
end
hidden_buttons.each do |button|
- expect(rendered).not_to have_text(button)
+ expect(rendered).not_to have_css("button[aria-label=#{button}]")
end
end
end
@@ -103,17 +103,17 @@ RSpec.describe 'profiles/keys/_key.html.haml', feature_category: :system_access
it 'renders "Never" for expires' do
render
- expect(rendered).to have_text('Expires: Never')
+ expect(rendered).to have_text('Never')
end
end
context 'when the key has expired' do
let_it_be(:key) { create(:personal_key, :expired, user: user) }
- it 'renders "Expired:" as the expiration date label' do
+ it 'renders "Expired" as the expiration date label' do
render
- expect(rendered).to have_text('Expired:')
+ expect(rendered).to have_text('Expired')
end
end
diff --git a/spec/views/projects/commits/show.html.haml_spec.rb b/spec/views/projects/commits/show.html.haml_spec.rb
index e5e9906a798..9393ba046dc 100644
--- a/spec/views/projects/commits/show.html.haml_spec.rb
+++ b/spec/views/projects/commits/show.html.haml_spec.rb
@@ -3,8 +3,10 @@
require 'spec_helper'
RSpec.describe 'projects/commits/show.html.haml' do
- let(:project) { create(:project, :repository) }
- let(:commits) { [project.commit] }
+ let_it_be(:project) { create(:project, :repository) }
+
+ let(:commits) { [commit] }
+ let(:commit) { project.commit }
let(:path) { 'path/to/doc.md' }
before do
@@ -32,4 +34,32 @@ RSpec.describe 'projects/commits/show.html.haml' do
expect(rendered).to have_link(href: "#{project_commits_path(project, path)}?format=atom")
end
end
+
+ context 'commits date headers' do
+ let(:user) { build(:user, timezone: timezone) }
+ let(:committed_date) { Time.find_zone('UTC').parse('2023-01-01') }
+
+ before do
+ allow(view).to receive(:current_user).and_return(user)
+ allow(commit).to receive(:committed_date).and_return(committed_date)
+
+ render
+ end
+
+ context 'when timezone is UTC' do
+ let(:timezone) { 'UTC' }
+
+ it "renders commit date header in user's timezone" do
+ expect(rendered).to include('data-day="2023-01-01"')
+ end
+ end
+
+ context 'when timezone is UTC-6' do
+ let(:timezone) { 'America/Mexico_City' }
+
+ it "renders commit date header in user's timezone" do
+ expect(rendered).to include('data-day="2022-12-31"')
+ end
+ end
+ end
end
diff --git a/spec/views/projects/issues/show.html.haml_spec.rb b/spec/views/projects/issues/show.html.haml_spec.rb
index 3f1496a24ce..e316ff58b95 100644
--- a/spec/views/projects/issues/show.html.haml_spec.rb
+++ b/spec/views/projects/issues/show.html.haml_spec.rb
@@ -5,129 +5,6 @@ require 'spec_helper'
RSpec.describe 'projects/issues/show' do
include_context 'project show action'
- context 'when the issue is closed' do
- before do
- allow(issue).to receive(:closed?).and_return(true)
- allow(view).to receive(:current_user).and_return(user)
- end
-
- context 'when the issue was moved' do
- let(:new_issue) { create(:issue, project: project, author: user) }
-
- before do
- issue.moved_to = new_issue
- end
-
- context 'when user can see the moved issue' do
- before do
- project.add_developer(user)
- end
-
- it 'shows "Closed (moved)" if an issue has been moved and closed' do
- render
-
- expect(rendered).to have_selector('.issuable-status-badge-closed:not(.hidden)', text: 'Closed (moved)')
- end
-
- it 'shows "Closed (moved)" if an issue has been moved and discussion is locked' do
- allow(issue).to receive(:discussion_locked).and_return(true)
- render
-
- expect(rendered).to have_selector('.issuable-status-badge-closed:not(.hidden)', text: 'Closed (moved)')
- end
-
- it 'links "moved" to the new issue the original issue was moved to' do
- render
-
- expect(rendered).to have_selector("a[href=\"#{issue_path(new_issue)}\"]", text: 'moved')
- end
-
- it 'does not show "closed (moved)" if an issue has been moved and reopened (not closed)' do
- allow(issue).to receive(:closed?).and_return(false)
-
- render
-
- expect(rendered).not_to have_selector('.issuable-status-badge-closed:not(.hidden)', text: 'Closed (moved)')
- end
- end
-
- context 'when user cannot see moved issue' do
- it 'does not show moved issue link' do
- render
-
- expect(rendered).not_to have_selector("a[href=\"#{issue_path(new_issue)}\"]", text: 'moved')
- end
- end
- end
-
- context 'when the issue was duplicated' do
- let(:new_issue) { create(:issue, project: project, author: user) }
-
- before do
- issue.duplicated_to = new_issue
- end
-
- context 'when user can see the duplicated issue' do
- before do
- project.add_developer(user)
- end
-
- it 'shows "Closed (duplicated)" if an issue has been duplicated' do
- render
-
- expect(rendered).to have_selector('.issuable-status-badge-closed:not(.hidden)', text: 'Closed (duplicated)')
- end
-
- it 'links "duplicated" to the new issue the original issue was duplicated to' do
- render
-
- expect(rendered).to have_selector("a[href=\"#{issue_path(new_issue)}\"]", text: 'duplicated')
- end
- end
-
- context 'when user cannot see duplicated issue' do
- it 'does not show duplicated issue link' do
- render
-
- expect(rendered).not_to have_selector("a[href=\"#{issue_path(new_issue)}\"]", text: 'duplicated')
- end
- end
- end
-
- it 'shows "Closed" if an issue has not been moved or duplicated' do
- render
-
- expect(rendered).to have_selector('.issuable-status-badge-closed:not(.hidden)', text: 'Closed')
- end
-
- it 'shows "Closed" if discussion is locked' do
- allow(issue).to receive(:discussion_locked).and_return(true)
- render
-
- expect(rendered).to have_selector('.issuable-status-badge-closed:not(.hidden)', text: 'Closed')
- end
- end
-
- context 'when the issue is open' do
- before do
- allow(issue).to receive(:closed?).and_return(false)
- allow(issue).to receive(:discussion_locked).and_return(false)
- end
-
- it 'shows "Open" if an issue has been moved' do
- render
-
- expect(rendered).to have_selector('.issuable-status-badge-open:not(.hidden)', text: 'Open')
- end
-
- it 'shows "Open" if discussion is locked' do
- allow(issue).to receive(:discussion_locked).and_return(true)
- render
-
- expect(rendered).to have_selector('.issuable-status-badge-open:not(.hidden)', text: 'Open')
- end
- end
-
context 'when the issue is related to a sentry error' do
it 'renders a stack trace' do
sentry_issue = double(:sentry_issue, sentry_issue_identifier: '1066622')
diff --git a/spec/views/projects/pages/_pages_settings.html.haml_spec.rb b/spec/views/projects/pages/_pages_settings.html.haml_spec.rb
new file mode 100644
index 00000000000..4f54ddbdb60
--- /dev/null
+++ b/spec/views/projects/pages/_pages_settings.html.haml_spec.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'projects/pages/_pages_settings', feature_category: :pages do
+ let_it_be(:project) { build_stubbed(:project, :repository) }
+ let_it_be(:user) { build_stubbed(:user) }
+
+ before do
+ assign(:project, project)
+ allow(view).to receive(:current_user).and_return(user)
+ end
+
+ context 'for pages unique domain' do
+ it 'shows the unique domain toggle' do
+ render
+
+ expect(rendered).to have_content('Use unique domain')
+ end
+ end
+end
diff --git a/spec/views/pwa/manifest.json.erb_spec.rb b/spec/views/pwa/manifest.json.erb_spec.rb
new file mode 100644
index 00000000000..a5075bfe6fe
--- /dev/null
+++ b/spec/views/pwa/manifest.json.erb_spec.rb
@@ -0,0 +1,36 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'pwa/manifest', feature_category: :navigation do
+ describe 'view caching', :use_clean_rails_memory_store_fragment_caching do
+ let(:appearance) { build_stubbed(:appearance, pwa_name: 'My GitLab') }
+
+ context 'when appearance is unchanged' do
+ it 'reuses the cached view' do
+ allow(view).to receive(:current_appearance).and_return(appearance)
+ allow(view).to receive(:appearance_pwa_name).and_call_original
+ render
+ render
+
+ expect(view).to have_received(:appearance_pwa_name).once
+ end
+ end
+
+ context 'when appearance has changed' do
+ let(:changed_appearance) { build_stubbed(:appearance, pwa_name: 'My new GitLab') }
+
+ it 'does not use the cached view' do
+ allow(view).to receive(:current_appearance).and_return(appearance)
+ allow(view).to receive(:appearance_pwa_name).and_call_original
+ render
+
+ allow(view).to receive(:current_appearance).and_return(changed_appearance)
+ render
+
+ expect(view).to have_received(:appearance_pwa_name).twice
+ expect(rendered).to have_content 'My new GitLab'
+ end
+ end
+ end
+end
diff --git a/spec/views/registrations/welcome/show.html.haml_spec.rb b/spec/views/registrations/welcome/show.html.haml_spec.rb
index 866f4f62493..4188bd7e956 100644
--- a/spec/views/registrations/welcome/show.html.haml_spec.rb
+++ b/spec/views/registrations/welcome/show.html.haml_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe 'registrations/welcome/show', feature_category: :onboarding do
let_it_be(:user) { create(:user) }
before do
+ allow(view).to receive(:onboarding_status).and_return(Onboarding::Status.new({}, {}, user))
allow(view).to receive(:current_user).and_return(user)
allow(view).to receive(:welcome_update_params).and_return({})
diff --git a/spec/views/shared/_label_row.html.haml_spec.rb b/spec/views/shared/_label_row.html.haml_spec.rb
index eb277930c1d..ef5a479d736 100644
--- a/spec/views/shared/_label_row.html.haml_spec.rb
+++ b/spec/views/shared/_label_row.html.haml_spec.rb
@@ -25,8 +25,8 @@ RSpec.describe 'shared/_label_row.html.haml' do
expect(rendered).to have_text(label.title)
end
- it 'has a non-linked label title' do
- expect(rendered).not_to have_link(label.title)
+ it 'has a linked label title' do
+ expect(rendered).to have_link(label.title)
end
it 'has Issues link' do
@@ -57,8 +57,8 @@ RSpec.describe 'shared/_label_row.html.haml' do
expect(rendered).to have_text(label.title)
end
- it 'has a non-linked label title' do
- expect(rendered).not_to have_link(label.title)
+ it 'has a linked label title' do
+ expect(rendered).to have_link(label.title)
end
it 'has Issues link' do
@@ -85,8 +85,8 @@ RSpec.describe 'shared/_label_row.html.haml' do
expect(rendered).to have_text(label.title)
end
- it 'has a non-linked label title' do
- expect(rendered).not_to have_link(label.title)
+ it 'has a linked label title' do
+ expect(rendered).to have_link(label.title)
end
it 'has Issues link' do
@@ -111,8 +111,8 @@ RSpec.describe 'shared/_label_row.html.haml' do
expect(rendered).to have_text(label.title)
end
- it 'has a non-linked label title' do
- expect(rendered).not_to have_link(label.title)
+ it 'has a linked label title' do
+ expect(rendered).to have_link(label.title)
end
it 'does not show Issues link' do
diff --git a/spec/workers/background_migration/ci_database_worker_spec.rb b/spec/workers/background_migration/ci_database_worker_spec.rb
index 3f2977a0aaa..496e7830c94 100644
--- a/spec/workers/background_migration/ci_database_worker_spec.rb
+++ b/spec/workers/background_migration/ci_database_worker_spec.rb
@@ -2,7 +2,8 @@
require 'spec_helper'
-RSpec.describe BackgroundMigration::CiDatabaseWorker, :clean_gitlab_redis_shared_state, feature_category: :database do
+RSpec.describe BackgroundMigration::CiDatabaseWorker, :clean_gitlab_redis_shared_state,
+ :clean_gitlab_redis_cluster_shared_state, feature_category: :database do
before do
skip_if_shared_database(:ci)
end
diff --git a/spec/workers/background_migration_worker_spec.rb b/spec/workers/background_migration_worker_spec.rb
index 32ee6708736..4cffbe5be97 100644
--- a/spec/workers/background_migration_worker_spec.rb
+++ b/spec/workers/background_migration_worker_spec.rb
@@ -2,6 +2,7 @@
require 'spec_helper'
-RSpec.describe BackgroundMigrationWorker, :clean_gitlab_redis_shared_state, feature_category: :database do
+RSpec.describe BackgroundMigrationWorker, :clean_gitlab_redis_shared_state,
+ :clean_gitlab_redis_cluster_shared_state, feature_category: :database do
it_behaves_like 'it runs background migration jobs', 'main'
end
diff --git a/spec/workers/batched_git_ref_updates/cleanup_scheduler_worker_spec.rb b/spec/workers/batched_git_ref_updates/cleanup_scheduler_worker_spec.rb
new file mode 100644
index 00000000000..a52043993b7
--- /dev/null
+++ b/spec/workers/batched_git_ref_updates/cleanup_scheduler_worker_spec.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BatchedGitRefUpdates::CleanupSchedulerWorker, feature_category: :gitaly do
+ let(:stats) { { total_projects: 456 } }
+ let(:service) { instance_double(BatchedGitRefUpdates::CleanupSchedulerService, execute: stats) }
+ let(:worker) { described_class.new }
+
+ describe '#perform' do
+ before do
+ allow(BatchedGitRefUpdates::CleanupSchedulerService).to receive(:new).and_return(service)
+ end
+
+ it 'delegates to CleanupSchedulerService' do
+ expect(service).to receive(:execute)
+
+ worker.perform
+ end
+
+ it 'logs stats' do
+ worker.perform
+
+ expect(worker.logging_extras).to eq({
+ "extra.batched_git_ref_updates_cleanup_scheduler_worker.stats" => { total_projects: 456 }
+ })
+ end
+ end
+
+ it_behaves_like 'an idempotent worker'
+end
diff --git a/spec/workers/batched_git_ref_updates/project_cleanup_worker_spec.rb b/spec/workers/batched_git_ref_updates/project_cleanup_worker_spec.rb
new file mode 100644
index 00000000000..5442b9dd051
--- /dev/null
+++ b/spec/workers/batched_git_ref_updates/project_cleanup_worker_spec.rb
@@ -0,0 +1,33 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BatchedGitRefUpdates::ProjectCleanupWorker, feature_category: :gitaly do
+ let(:stats) { { total_deletes: 456 } }
+ let(:service) { instance_double(BatchedGitRefUpdates::ProjectCleanupService, execute: stats) }
+ let(:worker) { described_class.new }
+
+ describe '#perform' do
+ before do
+ allow(BatchedGitRefUpdates::ProjectCleanupService).to receive(:new).with(123).and_return(service)
+ end
+
+ it 'delegates to ProjectCleanupService' do
+ expect(service).to receive(:execute)
+
+ worker.perform(123)
+ end
+
+ it 'logs stats' do
+ worker.perform(123)
+
+ expect(worker.logging_extras).to eq({
+ "extra.batched_git_ref_updates_project_cleanup_worker.stats" => { total_deletes: 456 }
+ })
+ end
+ end
+
+ it_behaves_like 'an idempotent worker' do
+ let(:job_args) { [123] }
+ end
+end
diff --git a/spec/workers/bulk_imports/pipeline_worker_spec.rb b/spec/workers/bulk_imports/pipeline_worker_spec.rb
index 320f62dc93e..6318e925da6 100644
--- a/spec/workers/bulk_imports/pipeline_worker_spec.rb
+++ b/spec/workers/bulk_imports/pipeline_worker_spec.rb
@@ -511,40 +511,6 @@ RSpec.describe BulkImports::PipelineWorker, feature_category: :importers do
end
end
- context 'when job reaches timeout' do
- it 'marks as failed and logs the error' do
- old_created_at = pipeline_tracker.created_at
- pipeline_tracker.update!(created_at: (BulkImports::Pipeline::NDJSON_EXPORT_TIMEOUT + 1.hour).ago)
-
- expect_next_instance_of(Gitlab::Import::Logger) do |logger|
- expect(logger)
- .to receive(:error)
- .with(
- hash_including(
- 'pipeline_name' => 'NdjsonPipeline',
- 'bulk_import_entity_id' => entity.id,
- 'bulk_import_id' => entity.bulk_import_id,
- 'bulk_import_entity_type' => entity.source_type,
- 'source_full_path' => entity.source_full_path,
- 'class' => 'BulkImports::PipelineWorker',
- 'exception.backtrace' => anything,
- 'exception.class' => 'BulkImports::Pipeline::ExpiredError',
- 'exception.message' => 'Pipeline timeout',
- 'importer' => 'gitlab_migration',
- 'message' => 'Pipeline failed',
- 'source_version' => entity.bulk_import.source_version_info.to_s
- )
- )
- end
-
- subject.perform(pipeline_tracker.id, pipeline_tracker.stage, entity.id)
-
- expect(pipeline_tracker.reload.status_name).to eq(:failed)
-
- entity.update!(created_at: old_created_at)
- end
- end
-
context 'when export status is failed' do
it 'marks as failed and logs the error' do
allow_next_instance_of(BulkImports::ExportStatus) do |status|
diff --git a/spec/workers/ci/pipeline_success_unlock_artifacts_worker_spec.rb b/spec/workers/ci/pipeline_success_unlock_artifacts_worker_spec.rb
index 70821f3a833..60a34fdab53 100644
--- a/spec/workers/ci/pipeline_success_unlock_artifacts_worker_spec.rb
+++ b/spec/workers/ci/pipeline_success_unlock_artifacts_worker_spec.rb
@@ -69,4 +69,43 @@ RSpec.describe Ci::PipelineSuccessUnlockArtifactsWorker, feature_category: :buil
end
end
end
+
+ describe '.database_health_check_attrs' do
+ it 'defines expected db health check attrs' do
+ expect(described_class.database_health_check_attrs).to eq(
+ gitlab_schema: :gitlab_ci,
+ delay_by: described_class::DEFAULT_DEFER_DELAY,
+ tables: [:ci_job_artifacts]
+ )
+ end
+ end
+
+ context 'with stop signal from database health check' do
+ let(:pipeline_id) { non_existing_record_id }
+ let(:health_signal_attrs) { described_class.database_health_check_attrs }
+
+ around do |example|
+ with_sidekiq_server_middleware do |chain|
+ chain.add Gitlab::SidekiqMiddleware::SkipJobs
+ Sidekiq::Testing.inline! { example.run }
+ end
+ end
+
+ before do
+ stub_feature_flags("drop_sidekiq_jobs_#{described_class.name}": false)
+
+ stop_signal = instance_double("Gitlab::Database::HealthStatus::Signals::Stop", stop?: true)
+ allow(Gitlab::Database::HealthStatus).to receive(:evaluate).and_return([stop_signal])
+ end
+
+ it 'defers the job by set time' do
+ expect_next_instance_of(described_class) do |worker|
+ expect(worker).not_to receive(:perform).with(pipeline_id)
+ end
+
+ expect(described_class).to receive(:perform_in).with(health_signal_attrs[:delay_by], pipeline_id)
+
+ described_class.perform_async(pipeline_id)
+ end
+ end
end
diff --git a/spec/workers/click_house/events_sync_worker_spec.rb b/spec/workers/click_house/events_sync_worker_spec.rb
new file mode 100644
index 00000000000..8f328839cfd
--- /dev/null
+++ b/spec/workers/click_house/events_sync_worker_spec.rb
@@ -0,0 +1,57 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ClickHouse::EventsSyncWorker, feature_category: :value_stream_management do
+ let(:databases) { { main: :some_db } }
+ let(:worker) { described_class.new }
+
+ before do
+ allow(ClickHouse::Client.configuration).to receive(:databases).and_return(databases)
+ end
+
+ include_examples 'an idempotent worker' do
+ context 'when the event_sync_worker_for_click_house feature flag is on' do
+ before do
+ stub_feature_flags(event_sync_worker_for_click_house: true)
+ end
+
+ it 'returns true' do
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:result, { status: :processed })
+
+ worker.perform
+ end
+
+ context 'when no ClickHouse databases are configured' do
+ let(:databases) { {} }
+
+ it 'skips execution' do
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:result, { status: :disabled })
+
+ worker.perform
+ end
+ end
+
+ context 'when exclusive lease error happens' do
+ it 'skips execution' do
+ expect(worker).to receive(:in_lock).and_raise(Gitlab::ExclusiveLeaseHelpers::FailedToObtainLockError)
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:result, { status: :skipped })
+
+ worker.perform
+ end
+ end
+ end
+
+ context 'when the event_sync_worker_for_click_house feature flag is off' do
+ before do
+ stub_feature_flags(event_sync_worker_for_click_house: false)
+ end
+
+ it 'skips execution' do
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:result, { status: :disabled })
+
+ worker.perform
+ end
+ end
+ end
+end
diff --git a/spec/workers/clusters/agents/notify_git_push_worker_spec.rb b/spec/workers/clusters/agents/notify_git_push_worker_spec.rb
index 561a66b86e9..c6ef8dc3338 100644
--- a/spec/workers/clusters/agents/notify_git_push_worker_spec.rb
+++ b/spec/workers/clusters/agents/notify_git_push_worker_spec.rb
@@ -25,17 +25,5 @@ RSpec.describe Clusters::Agents::NotifyGitPushWorker, feature_category: :deploym
expect { subject }.not_to raise_error
end
end
-
- context 'when the :notify_kas_on_git_push feature flag is disabled' do
- before do
- stub_feature_flags(notify_kas_on_git_push: false)
- end
-
- it 'does not notify KAS' do
- expect(Gitlab::Kas::Client).not_to receive(:new)
-
- subject
- end
- end
end
end
diff --git a/spec/workers/concerns/gitlab/github_import/rescheduling_methods_spec.rb b/spec/workers/concerns/gitlab/github_import/rescheduling_methods_spec.rb
index cdaff2fc1f4..6475be0243c 100644
--- a/spec/workers/concerns/gitlab/github_import/rescheduling_methods_spec.rb
+++ b/spec/workers/concerns/gitlab/github_import/rescheduling_methods_spec.rb
@@ -4,7 +4,14 @@ require 'spec_helper'
RSpec.describe Gitlab::GithubImport::ReschedulingMethods, feature_category: :importers do
let(:worker) do
- Class.new { include(Gitlab::GithubImport::ReschedulingMethods) }.new
+ Class.new do
+ def self.name
+ 'MockImportWorker'
+ end
+
+ include ApplicationWorker
+ include Gitlab::GithubImport::ReschedulingMethods
+ end.new
end
describe '#perform' do
diff --git a/spec/workers/concerns/packages/error_handling_spec.rb b/spec/workers/concerns/packages/error_handling_spec.rb
new file mode 100644
index 00000000000..3db32778a88
--- /dev/null
+++ b/spec/workers/concerns/packages/error_handling_spec.rb
@@ -0,0 +1,81 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ::Packages::ErrorHandling, feature_category: :build_artifacts do
+ let_it_be(:worker_class) do
+ Class.new do
+ def self.name
+ 'Gitlab::Foo::Bar::DummyWorker'
+ end
+
+ include ApplicationWorker
+ include ::Packages::ErrorHandling
+ end
+ end
+
+ let(:worker) { worker_class.new }
+
+ describe '#process_package_file_error' do
+ let_it_be_with_reload(:package) { create(:generic_package, :processing, :with_zip_file) }
+
+ let(:package_file) { package.package_files.first }
+ let(:package_name) { 'TempProject.TempPackage' }
+ let(:exception) { StandardError.new('42') }
+ let(:extra_log_payload) { { answer: 42 } }
+ let(:expected_log_payload) do
+ {
+ project_id: package_file.project_id,
+ package_file_id: package_file.id,
+ answer: 42
+ }
+ end
+
+ subject do
+ worker.process_package_file_error(
+ package_file: package_file,
+ exception: exception,
+ extra_log_payload: extra_log_payload
+ )
+ end
+
+ it 'logs the error with the correct parameters' do
+ expect(Gitlab::ErrorTracking).to receive(:log_exception).with(exception, expected_log_payload)
+
+ subject
+ end
+
+ shared_examples 'updates the package status and status message' do |error_message:|
+ it :aggregate_failures do
+ expect { subject }
+ .to change { package.status }.to('error')
+ .and change { package.status_message }.to(error_message)
+ end
+ end
+
+ described_class::CONTROLLED_ERRORS.each do |exception_class|
+ context "with controlled exception #{exception_class}" do
+ let(:exception) { exception_class.new }
+
+ it_behaves_like 'updates the package status and status message', error_message: exception_class.new.message
+ end
+ end
+
+ context 'with all other errors' do
+ let(:exception) { StandardError.new('String that will not appear in status_message') }
+
+ it_behaves_like 'updates the package status and status message',
+ error_message: 'Unexpected error: StandardError'
+ end
+
+ context 'with a very long error message' do
+ let(:exception) { ArgumentError.new('a' * 1000) }
+
+ it 'truncates the error message' do
+ subject
+
+ expect(package.status_message.length).to eq(::Packages::Package::STATUS_MESSAGE_MAX_LENGTH)
+ end
+ end
+ end
+end
diff --git a/spec/workers/concerns/worker_attributes_spec.rb b/spec/workers/concerns/worker_attributes_spec.rb
index 959cb62c6fb..90c07a9c959 100644
--- a/spec/workers/concerns/worker_attributes_spec.rb
+++ b/spec/workers/concerns/worker_attributes_spec.rb
@@ -37,7 +37,7 @@ RSpec.describe WorkerAttributes, feature_category: :shared do
:worker_has_external_dependencies? | :worker_has_external_dependencies! | false | [] | true
:idempotent? | :idempotent! | false | [] | true
:big_payload? | :big_payload! | false | [] | true
- :database_health_check_attrs | :defer_on_database_health_signal | nil | [:gitlab_main, 1.minute, [:users]] | { gitlab_schema: :gitlab_main, delay_by: 1.minute, tables: [:users] }
+ :database_health_check_attrs | :defer_on_database_health_signal | nil | [:gitlab_main, [:users], 1.minute] | { gitlab_schema: :gitlab_main, tables: [:users], delay_by: 1.minute }
end
# rubocop: enable Layout/LineLength
@@ -148,7 +148,7 @@ RSpec.describe WorkerAttributes, feature_category: :shared do
context 'when defer_on_database_health_signal is set' do
before do
- worker.defer_on_database_health_signal(:gitlab_main, 1.minute, [:users])
+ worker.defer_on_database_health_signal(:gitlab_main, [:users], 1.minute)
end
it { is_expected.to be(true) }
diff --git a/spec/workers/build_success_worker_spec.rb b/spec/workers/environments/stop_job_success_worker_spec.rb
index be9802eb2ce..3a2db8cfb77 100644
--- a/spec/workers/build_success_worker_spec.rb
+++ b/spec/workers/environments/stop_job_success_worker_spec.rb
@@ -2,13 +2,13 @@
require 'spec_helper'
-RSpec.describe BuildSuccessWorker, feature_category: :continuous_integration do
+RSpec.describe Environments::StopJobSuccessWorker, feature_category: :continuous_delivery do
describe '#perform' do
subject { described_class.new.perform(build.id) }
context 'when build exists' do
context 'when the build will stop an environment' do
- let!(:build) { create(:ci_build, :stop_review_app, environment: environment.name, project: environment.project, status: :success) }
+ let!(:build) { create(:ci_build, :stop_review_app, environment: environment.name, project: environment.project, status: :success) } # rubocop:disable Layout/LineLength
let(:environment) { create(:environment, state: :available) }
it 'stops the environment' do
diff --git a/spec/workers/every_sidekiq_worker_spec.rb b/spec/workers/every_sidekiq_worker_spec.rb
index 38959b6d764..3cd030e678d 100644
--- a/spec/workers/every_sidekiq_worker_spec.rb
+++ b/spec/workers/every_sidekiq_worker_spec.rb
@@ -398,7 +398,6 @@ RSpec.describe 'Every Sidekiq worker', feature_category: :shared do
'PipelineProcessWorker' => 3,
'PostReceive' => 3,
'ProcessCommitWorker' => 3,
- 'ProductAnalytics::InitializeAnalyticsWorker' => 3,
'ProductAnalytics::InitializeSnowplowProductAnalyticsWorker' => 1,
'ProjectCacheWorker' => 3,
'ProjectDestroyWorker' => 3,
diff --git a/spec/workers/integrations/group_mention_worker_spec.rb b/spec/workers/integrations/group_mention_worker_spec.rb
index 111e3f5a107..e79b654184d 100644
--- a/spec/workers/integrations/group_mention_worker_spec.rb
+++ b/spec/workers/integrations/group_mention_worker_spec.rb
@@ -37,12 +37,10 @@ RSpec.describe Integrations::GroupMentionWorker, :clean_gitlab_redis_shared_stat
context 'when mentionable_type is not supported' do
let(:args) do
- {
+ super().merge(
mentionable_type: 'Unsupported',
- mentionable_id: 23,
- hook_data: {},
- is_confidential: false
- }
+ mentionable_id: 23
+ )
end
it 'does not execute the service' do
@@ -61,5 +59,15 @@ RSpec.describe Integrations::GroupMentionWorker, :clean_gitlab_redis_shared_stat
worker.perform(args)
end
end
+
+ context 'when mentionable cannot be found' do
+ let(:args) { super().merge(mentionable_id: non_existing_record_id) }
+
+ it 'does not execute the service' do
+ expect(service_class).not_to receive(:new)
+
+ worker.perform(args)
+ end
+ end
end
end
diff --git a/spec/workers/integrations/slack_event_worker_spec.rb b/spec/workers/integrations/slack_event_worker_spec.rb
index 6754801a2bd..019d68b40e0 100644
--- a/spec/workers/integrations/slack_event_worker_spec.rb
+++ b/spec/workers/integrations/slack_event_worker_spec.rb
@@ -2,7 +2,8 @@
require 'spec_helper'
-RSpec.describe Integrations::SlackEventWorker, :clean_gitlab_redis_shared_state, feature_category: :integrations do
+RSpec.describe Integrations::SlackEventWorker, :clean_gitlab_redis_shared_state,
+ :clean_gitlab_redis_cluster_shared_state, feature_category: :integrations do
describe '.event?' do
subject { described_class.event?(event) }
diff --git a/spec/workers/loose_foreign_keys/cleanup_worker_spec.rb b/spec/workers/loose_foreign_keys/cleanup_worker_spec.rb
index e49b4707eb3..2e77f38e221 100644
--- a/spec/workers/loose_foreign_keys/cleanup_worker_spec.rb
+++ b/spec/workers/loose_foreign_keys/cleanup_worker_spec.rb
@@ -70,7 +70,7 @@ RSpec.describe LooseForeignKeys::CleanupWorker, feature_category: :cell do
let(:loose_fk_child_table_1_2) { table(:_test_loose_fk_child_table_1_2) }
let(:loose_fk_child_table_2_1) { table(:_test_loose_fk_child_table_2_1) }
- before(:all) do
+ before_all do
create_table_structure
end
diff --git a/spec/workers/members/expiring_email_notification_worker_spec.rb b/spec/workers/members/expiring_email_notification_worker_spec.rb
new file mode 100644
index 00000000000..600a81b37b8
--- /dev/null
+++ b/spec/workers/members/expiring_email_notification_worker_spec.rb
@@ -0,0 +1,50 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Members::ExpiringEmailNotificationWorker, type: :worker, feature_category: :system_access do
+ subject(:worker) { described_class.new }
+
+ let_it_be(:member) { create(:project_member, :guest, expires_at: 7.days.from_now.to_date) }
+ let_it_be(:notified_member) do
+ create(:project_member, :guest, expires_at: 7.days.from_now.to_date, expiry_notified_at: Date.today)
+ end
+
+ describe '#perform' do
+ context "with not notified member" do
+ it "notify member" do
+ expect_next_instance_of(NotificationService) do |notification_service|
+ expect(notification_service).to receive(:member_about_to_expire).with(member)
+ end
+
+ worker.perform(member.id)
+
+ expect(member.reload.expiry_notified_at).to be_present
+ end
+ end
+
+ context "with notified member" do
+ it "not notify member" do
+ expect_next_instance_of(NotificationService) do |notification_service|
+ expect(notification_service).not_to receive(:member_about_to_expire).with(notified_member)
+ end
+
+ worker.perform(notified_member.id)
+ end
+ end
+
+ context "when feature member_expiring_email_notification is disabled" do
+ before do
+ stub_feature_flags(member_expiring_email_notification: false)
+ end
+
+ it "not notify member" do
+ expect_next_instance_of(NotificationService) do |notification_service|
+ expect(notification_service).not_to receive(:member_about_to_expire).with(member)
+ end
+
+ worker.perform(member.id)
+ end
+ end
+ end
+end
diff --git a/spec/workers/members/expiring_worker_spec.rb b/spec/workers/members/expiring_worker_spec.rb
new file mode 100644
index 00000000000..3f46548dbb3
--- /dev/null
+++ b/spec/workers/members/expiring_worker_spec.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Members::ExpiringWorker, type: :worker, feature_category: :system_access do
+ subject(:worker) { described_class.new }
+
+ describe '#perform' do
+ let_it_be(:expiring_7_days_project_member) { create(:project_member, :guest, expires_at: 7.days.from_now) }
+ let_it_be(:expiring_7_days_group_member) { create(:group_member, :guest, expires_at: 7.days.from_now) }
+ let_it_be(:expiring_10_days_project_member) { create(:project_member, :guest, expires_at: 10.days.from_now) }
+ let_it_be(:expiring_5_days_project_member) { create(:project_member, :guest, expires_at: 5.days.from_now) }
+ let_it_be(:expiring_7_days_blocked_project_member) do
+ create(:project_member, :guest, :blocked, expires_at: 7.days.from_now)
+ end
+
+ let(:notifiy_worker) { Members::ExpiringEmailNotificationWorker }
+
+ it "notifies only active users with membership expiring in less than 7 days" do
+ expect(notifiy_worker).to receive(:perform_async).with(expiring_7_days_project_member.id)
+ expect(notifiy_worker).to receive(:perform_async).with(expiring_7_days_group_member.id)
+ expect(notifiy_worker).to receive(:perform_async).with(expiring_5_days_project_member.id)
+
+ worker.perform
+ end
+ end
+end
diff --git a/spec/workers/merge_requests/mergeability_check_batch_worker_spec.rb b/spec/workers/merge_requests/mergeability_check_batch_worker_spec.rb
index 828ffb0c811..35d06ea6e86 100644
--- a/spec/workers/merge_requests/mergeability_check_batch_worker_spec.rb
+++ b/spec/workers/merge_requests/mergeability_check_batch_worker_spec.rb
@@ -40,25 +40,6 @@ RSpec.describe MergeRequests::MergeabilityCheckBatchWorker, feature_category: :c
subject.perform([merge_request_1.id, merge_request_2.id, merge_request_3.id, 1234], user.id)
end
- context 'when restrict_merge_status_recheck FF is off' do
- before do
- stub_feature_flags(restrict_merge_status_recheck: false)
- end
-
- it 'executes MergeabilityCheckService on merge requests that needs to be checked' do
- expect_next_instance_of(MergeRequests::MergeabilityCheckService, merge_request_1) do |service|
- expect(service).to receive(:execute).and_return(ServiceResponse.success)
- end
- expect_next_instance_of(MergeRequests::MergeabilityCheckService, merge_request_2) do |service|
- expect(service).to receive(:execute).and_return(ServiceResponse.success)
- end
- expect(MergeRequests::MergeabilityCheckService).not_to receive(:new).with(merge_request_3.id)
- expect(MergeRequests::MergeabilityCheckService).not_to receive(:new).with(1234)
-
- subject.perform([merge_request_1.id, merge_request_2.id, merge_request_3.id, 1234], user.id)
- end
- end
-
it 'structurally logs a failed mergeability check' do
expect_next_instance_of(MergeRequests::MergeabilityCheckService, merge_request_1) do |service|
expect(service).to receive(:execute).and_return(ServiceResponse.error(message: "solar flares"))
diff --git a/spec/workers/packages/debian/process_package_file_worker_spec.rb b/spec/workers/packages/debian/process_package_file_worker_spec.rb
index 1ef3119ecd3..0c60633ef45 100644
--- a/spec/workers/packages/debian/process_package_file_worker_spec.rb
+++ b/spec/workers/packages/debian/process_package_file_worker_spec.rb
@@ -29,6 +29,17 @@ RSpec.describe Packages::Debian::ProcessPackageFileWorker, type: :worker, featur
subject { worker.perform(package_file_id, distribution_name, component_name) }
+ shared_context 'with changes file' do
+ let(:package) { temp_with_changes }
+ let(:package_file) { package.package_files.first }
+ let(:distribution_name) { nil }
+ let(:component_name) { nil }
+
+ before do
+ distribution.update! suite: 'unstable'
+ end
+ end
+
context 'with non existing package file' do
let(:package_file_id) { non_existing_record_id }
@@ -59,13 +70,12 @@ RSpec.describe Packages::Debian::ProcessPackageFileWorker, type: :worker, featur
end
end
- context 'when the service raises an error' do
- let(:package_file) { package.package_files.with_file_name('sample_1.2.3~alpha2.tar.xz').first }
-
+ shared_examples 'handling error' do |error_message:, error_class:|
it 'marks the package as errored', :aggregate_failures do
expect(Gitlab::ErrorTracking).to receive(:log_exception).with(
- instance_of(ArgumentError),
+ instance_of(error_class),
package_file_id: package_file_id,
+ project_id: package.project_id,
distribution_name: distribution_name,
component_name: component_name
)
@@ -75,21 +85,128 @@ RSpec.describe Packages::Debian::ProcessPackageFileWorker, type: :worker, featur
.and not_change { package.package_files.count }
.and change { package_file.reload.status }.to('error')
.and change { package.reload.status }.from('processing').to('error')
+ .and change { package.status_message }.to(error_message)
end
end
- context 'with a Debian changes file' do
- let(:package) { temp_with_changes }
- let(:package_file) { package.package_files.first }
- let(:distribution_name) { nil }
- let(:component_name) { nil }
+ context 'with controlled errors' do
+ context 'with a package file' do
+ context 'when component name is blank' do
+ let(:component_name) { '' }
+
+ it_behaves_like 'handling error',
+ error_message: 'missing component name',
+ error_class: ArgumentError
+ end
+
+ context 'when distribution name is blank' do
+ let(:distribution_name) { '' }
+
+ it_behaves_like 'handling error',
+ error_message: 'missing distribution name',
+ error_class: ArgumentError
+ end
+
+ context 'when package file is not deb, ddeb or udeb' do
+ let(:package_file) { package.package_files.with_file_name('sample_1.2.3~alpha2.tar.xz').first }
+
+ it_behaves_like 'handling error',
+ error_message: 'invalid package file type: source',
+ error_class: ArgumentError
+ end
+ end
+
+ context 'with changes file' do
+ include_context 'with changes file'
+
+ let(:file_metadata_source) { 'src' }
+ let(:file_metadata_version) { '0.1' }
+ let(:file_metadata_distribution) { 'Breezy Badger' }
+ let(:file_metadata) do
+ {
+ fields: {
+ 'Source' => file_metadata_source,
+ 'Version' => file_metadata_version,
+ 'Distribution' => file_metadata_distribution
+ }
+ }
+ end
+
+ context 'when component name is blank' do
+ let(:component_name) { '' }
+
+ it_behaves_like 'handling error',
+ error_message: 'unwanted component name',
+ error_class: ArgumentError
+ end
+
+ context 'with distribution name is blank' do
+ let(:distribution_name) { '' }
+
+ it_behaves_like 'handling error',
+ error_message: 'unwanted distribution name',
+ error_class: ArgumentError
+ end
+ context 'with missing file metadata fields' do
+ before do
+ allow_next_instance_of(::Packages::Debian::ExtractChangesMetadataService) do |instance|
+ allow(instance).to receive(:execute).and_return(file_metadata)
+ end
+ end
+
+ context 'when source field is missing' do
+ before do
+ file_metadata[:fields].delete('Source')
+ end
+
+ it_behaves_like 'handling error',
+ error_message: 'missing Source field',
+ error_class: ArgumentError
+ end
+
+ context 'when Version field is missing' do
+ before do
+ file_metadata[:fields].delete('Version')
+ end
+
+ it_behaves_like 'handling error',
+ error_message: 'missing Version field',
+ error_class: ArgumentError
+ end
+
+ context 'when Distribution field is missing' do
+ before do
+ file_metadata[:fields].delete('Distribution')
+ end
+
+ it_behaves_like 'handling error',
+ error_message: 'missing Distribution field',
+ error_class: ArgumentError
+ end
+ end
+ end
+ end
+
+ context 'with uncontrolled errors' do
before do
- distribution.update! suite: 'unstable'
+ allow_next_instance_of(::Packages::Debian::ProcessPackageFileService) do |instance|
+ allow(instance).to receive(:execute).and_raise(StandardError.new('Boom'))
+ end
end
- it_behaves_like 'an idempotent worker' do
- let(:job_args) { [package_file.id, distribution_name, component_name] }
+ it_behaves_like 'handling error',
+ error_message: 'Unexpected error: StandardError',
+ error_class: StandardError
+ end
+
+ context 'with correct job arguments' do
+ let(:job_args) { [package_file.id, distribution_name, component_name] }
+
+ it_behaves_like 'an idempotent worker'
+
+ context 'with a Debian changes file' do
+ include_context 'with changes file'
it 'sets the Debian file type to changes', :aggregate_failures do
expect(::Packages::Debian::GenerateDistributionWorker)
@@ -110,39 +227,38 @@ RSpec.describe Packages::Debian::ProcessPackageFileWorker, type: :worker, featur
.and not_change { debian_file_metadatum.component }
end
end
- end
-
- using RSpec::Parameterized::TableSyntax
- where(:case_name, :expected_file_type, :file_name, :component_name) do
- 'with a deb' | 'deb' | 'libsample0_1.2.3~alpha2_amd64.deb' | 'main'
- 'with an udeb' | 'udeb' | 'sample-udeb_1.2.3~alpha2_amd64.udeb' | 'contrib'
- 'with a ddeb' | 'ddeb' | 'sample-ddeb_1.2.3~alpha2_amd64.ddeb' | 'main'
- end
+ context 'with Debian files' do
+ using RSpec::Parameterized::TableSyntax
- with_them do
- let(:package_file) { package.package_files.with_file_name(file_name).first }
+ where(:case_name, :expected_file_type, :file_name, :component_name) do
+ 'with a deb' | 'deb' | 'libsample0_1.2.3~alpha2_amd64.deb' | 'main'
+ 'with an udeb' | 'udeb' | 'sample-udeb_1.2.3~alpha2_amd64.udeb' | 'contrib'
+ 'with a ddeb' | 'ddeb' | 'sample-ddeb_1.2.3~alpha2_amd64.ddeb' | 'main'
+ end
- it_behaves_like 'an idempotent worker' do
- let(:job_args) { [package_file.id, distribution_name, component_name] }
+ with_them do
+ let(:package_file) { package.package_files.with_file_name(file_name).first }
+ let(:job_args) { [package_file.id, distribution_name, component_name] }
- it 'sets the correct Debian file type', :aggregate_failures do
- expect(::Packages::Debian::GenerateDistributionWorker)
- .to receive(:perform_async).with(:project, distribution.id)
- expect(Gitlab::ErrorTracking).not_to receive(:log_exception)
+ it 'sets the correct Debian file type', :aggregate_failures do
+ expect(::Packages::Debian::GenerateDistributionWorker)
+ .to receive(:perform_async).with(:project, distribution.id)
+ expect(Gitlab::ErrorTracking).not_to receive(:log_exception)
- # Using subject inside this block will process the job multiple times
- expect { subject }
- .to not_change(Packages::Package, :count)
- .and not_change(Packages::PackageFile, :count)
- .and change { Packages::Debian::Publication.count }.by(1)
- .and not_change(package.package_files, :count)
- .and change { package.reload.name }.to('sample')
- .and change { package.version }.to('1.2.3~alpha2')
- .and change { package.status }.from('processing').to('default')
- .and change { package.debian_publication }.from(nil)
- .and change { debian_file_metadatum.reload.file_type }.from('unknown').to(expected_file_type)
- .and change { debian_file_metadatum.component }.from(nil).to(component_name)
+ # Using subject inside this block will process the job multiple times
+ expect { subject }
+ .to not_change(Packages::Package, :count)
+ .and not_change(Packages::PackageFile, :count)
+ .and change { Packages::Debian::Publication.count }.by(1)
+ .and not_change(package.package_files, :count)
+ .and change { package.reload.name }.to('sample')
+ .and change { package.version }.to('1.2.3~alpha2')
+ .and change { package.status }.from('processing').to('default')
+ .and change { package.debian_publication }.from(nil)
+ .and change { debian_file_metadatum.reload.file_type }.from('unknown').to(expected_file_type)
+ .and change { debian_file_metadatum.component }.from(nil).to(component_name)
+ end
end
end
end
diff --git a/spec/workers/packages/helm/extraction_worker_spec.rb b/spec/workers/packages/helm/extraction_worker_spec.rb
index a764c2ad939..d6c1bdfcd6a 100644
--- a/spec/workers/packages/helm/extraction_worker_spec.rb
+++ b/spec/workers/packages/helm/extraction_worker_spec.rb
@@ -23,16 +23,22 @@ RSpec.describe Packages::Helm::ExtractionWorker, type: :worker, feature_category
subject { described_class.new.perform(channel, package_file_id) }
- shared_examples 'handling error' do |error_class = Packages::Helm::ExtractFileMetadataService::ExtractionError|
+ shared_examples 'handling error' do |error_message:,
+ error_class: Packages::Helm::ExtractFileMetadataService::ExtractionError|
it 'mark the package as errored', :aggregate_failures do
expect(Gitlab::ErrorTracking).to receive(:log_exception).with(
instance_of(error_class),
- project_id: package_file.package.project_id
+ {
+ package_file_id: package_file.id,
+ project_id: package_file.project_id
+ }
)
expect { subject }
.to not_change { Packages::Package.count }
.and not_change { Packages::PackageFile.count }
.and change { package.reload.status }.from('processing').to('error')
+
+ expect(package.status_message).to match(error_message)
end
end
@@ -69,34 +75,46 @@ RSpec.describe Packages::Helm::ExtractionWorker, type: :worker, feature_category
end
end
- context 'with an empty package file' do
- before do
- expect_next_instance_of(Gem::Package::TarReader) do |tar_reader|
- expect(tar_reader).to receive(:each).and_return([])
+ context 'with controlled errors' do
+ context 'with an empty package file' do
+ before do
+ expect_next_instance_of(Gem::Package::TarReader) do |tar_reader|
+ expect(tar_reader).to receive(:each).and_return([])
+ end
end
- end
- it_behaves_like 'handling error'
- end
+ it_behaves_like 'handling error', error_message: /Chart.yaml not found/
+ end
- context 'with an invalid YAML' do
- before do
- expect_next_instance_of(Gem::Package::TarReader::Entry) do |entry|
- expect(entry).to receive(:read).and_return('{')
+ context 'with an invalid YAML' do
+ before do
+ expect_next_instance_of(Gem::Package::TarReader::Entry) do |entry|
+ expect(entry).to receive(:read).and_return('{')
+ end
end
+
+ it_behaves_like 'handling error', error_message: /Error while parsing Chart.yaml/
end
- it_behaves_like 'handling error'
+ context 'with an invalid Chart.yaml' do
+ before do
+ expect_next_instance_of(Gem::Package::TarReader::Entry) do |entry|
+ expect(entry).to receive(:read).and_return('{}')
+ end
+ end
+
+ it_behaves_like 'handling error', error_class: ActiveRecord::RecordInvalid, error_message: /Validation failed/
+ end
end
- context 'with an invalid Chart.yaml' do
+ context 'with uncontrolled errors' do
before do
- expect_next_instance_of(Gem::Package::TarReader::Entry) do |entry|
- expect(entry).to receive(:read).and_return('{}')
+ allow_next_instance_of(::Packages::Helm::ProcessFileService) do |instance|
+ allow(instance).to receive(:execute).and_raise(StandardError.new('Boom'))
end
end
- it_behaves_like 'handling error', ActiveRecord::RecordInvalid
+ it_behaves_like 'handling error', error_class: StandardError, error_message: 'Unexpected error: StandardError'
end
end
end
diff --git a/spec/workers/packages/nuget/extraction_worker_spec.rb b/spec/workers/packages/nuget/extraction_worker_spec.rb
index 11eaa1b5dde..d261002a339 100644
--- a/spec/workers/packages/nuget/extraction_worker_spec.rb
+++ b/spec/workers/packages/nuget/extraction_worker_spec.rb
@@ -13,16 +13,21 @@ RSpec.describe Packages::Nuget::ExtractionWorker, type: :worker, feature_categor
subject { described_class.new.perform(package_file_id) }
- shared_examples 'handling the metadata error' do |exception_class: ::Packages::Nuget::UpdatePackageFromMetadataService::InvalidMetadataError|
+ shared_examples 'handling error' do |error_message:,
+ error_class: ::Packages::Nuget::UpdatePackageFromMetadataService::InvalidMetadataError|
it 'updates package status to error', :aggregate_failures do
expect(Gitlab::ErrorTracking).to receive(:log_exception).with(
- instance_of(exception_class),
- project_id: package.project_id
+ instance_of(error_class),
+ {
+ package_file_id: package_file.id,
+ project_id: package.project_id
+ }
)
subject
expect(package.reload).to be_error
+ expect(package.status_message).to match(error_message)
end
end
@@ -56,64 +61,101 @@ RSpec.describe Packages::Nuget::ExtractionWorker, type: :worker, feature_categor
end
end
- context 'with package file not containing a nuspec file' do
- before do
- allow_any_instance_of(Zip::File).to receive(:glob).and_return([])
- end
-
- it_behaves_like 'handling the metadata error', exception_class: ::Packages::Nuget::ExtractMetadataFileService::ExtractionError
- end
+ context 'with controlled errors' do
+ context 'with package file not containing a nuspec file' do
+ before do
+ allow_any_instance_of(Zip::File).to receive(:glob).and_return([])
+ end
- context 'with package with an invalid package name' do
- invalid_names = [
- '',
- 'My/package',
- '../../../my_package',
- '%2e%2e%2fmy_package'
- ]
+ it_behaves_like 'handling error',
+ error_class: ::Packages::Nuget::ExtractMetadataFileService::ExtractionError,
+ error_message: 'nuspec file not found'
+ end
- invalid_names.each do |invalid_name|
- context "with #{invalid_name}" do
+ context 'with invalid metadata' do
+ shared_context 'with a blank attribute' do
before do
allow_next_instance_of(::Packages::Nuget::UpdatePackageFromMetadataService) do |service|
- allow(service).to receive(:package_name).and_return(invalid_name)
+ allow(service).to receive(attribute).and_return('')
end
end
+ end
+
+ context 'with a blank package name' do
+ include_context 'with a blank attribute' do
+ let(:attribute) { :package_name }
- it_behaves_like 'handling the metadata error'
+ it_behaves_like 'handling error', error_message: /not found in metadata/
+ end
end
- end
- end
- context 'with package with an invalid package version' do
- invalid_versions = [
- '',
- '555',
- '1./2.3',
- '../../../../../1.2.3',
- '%2e%2e%2f1.2.3'
- ]
-
- invalid_versions.each do |invalid_version|
- context "with #{invalid_version}" do
- before do
- allow_next_instance_of(::Packages::Nuget::UpdatePackageFromMetadataService) do |service|
- allow(service).to receive(:package_version).and_return(invalid_version)
+ context 'with package with an invalid package name' do
+ invalid_names = [
+ 'My/package',
+ '../../../my_package',
+ '%2e%2e%2fmy_package'
+ ]
+
+ invalid_names.each do |invalid_name|
+ context "with #{invalid_name}" do
+ before do
+ allow_next_instance_of(::Packages::Nuget::UpdatePackageFromMetadataService) do |service|
+ allow(service).to receive(:package_name).and_return(invalid_name)
+ end
+ end
+
+ it_behaves_like 'handling error', error_message: 'Validation failed: Name is invalid'
+ end
+ end
+ end
+
+ context 'with package with a blank package version' do
+ include_context 'with a blank attribute' do
+ let(:attribute) { :package_version }
+
+ it_behaves_like 'handling error', error_message: /not found in metadata/
+ end
+ end
+
+ context 'with package with an invalid package version' do
+ invalid_versions = [
+ '555',
+ '1./2.3',
+ '../../../../../1.2.3',
+ '%2e%2e%2f1.2.3'
+ ]
+
+ invalid_versions.each do |invalid_version|
+ context "with #{invalid_version}" do
+ before do
+ allow_next_instance_of(::Packages::Nuget::UpdatePackageFromMetadataService) do |service|
+ allow(service).to receive(:package_version).and_return(invalid_version)
+ end
+ end
+
+ it_behaves_like 'handling error', error_message: 'Validation failed: Version is invalid'
end
end
+ end
+ end
- it_behaves_like 'handling the metadata error'
+ context 'handling a Zip::Error exception' do
+ before do
+ allow_any_instance_of(::Packages::UpdatePackageFileService).to receive(:execute).and_raise(::Zip::Error)
end
+
+ it_behaves_like 'handling error',
+ error_class: ::Packages::Nuget::UpdatePackageFromMetadataService::ZipError,
+ error_message: 'Could not open the .nupkg file'
end
end
- context 'handles a processing an unaccounted for error' do
+ context 'with uncontrolled errors' do
before do
- expect(::Packages::Nuget::UpdatePackageFromMetadataService).to receive(:new)
- .and_raise(Zip::Error)
+ allow_any_instance_of(::Packages::Nuget::UpdatePackageFromMetadataService).to receive(:execute).and_raise(StandardError.new('Boom'))
end
- it_behaves_like 'handling the metadata error', exception_class: Zip::Error
+ it_behaves_like 'handling error', error_class: StandardError, error_message: 'Unexpected error: StandardError'
end
end
end
diff --git a/spec/workers/packages/rubygems/extraction_worker_spec.rb b/spec/workers/packages/rubygems/extraction_worker_spec.rb
index 8ad4c2e6447..4ae8f729117 100644
--- a/spec/workers/packages/rubygems/extraction_worker_spec.rb
+++ b/spec/workers/packages/rubygems/extraction_worker_spec.rb
@@ -14,41 +14,71 @@ RSpec.describe Packages::Rubygems::ExtractionWorker, type: :worker, feature_cate
subject { described_class.new.perform(*job_args) }
- it 'processes the gem', :aggregate_failures do
- expect { subject }
- .to change { Packages::Package.count }.by(0)
- .and change { Packages::PackageFile.count }.by(1)
+ context 'without errors' do
+ let_it_be(:package_for_processing) { create(:rubygems_package, :processing) }
+ let(:package_file) { package_for_processing.package_files.first }
- expect(Packages::Package.last.id).to be(package.id)
- expect(package.name).not_to be(package_name)
- end
+ it 'processes the gem', :aggregate_failures do
+ expect { subject }
+ .to change { Packages::Package.count }.by(0)
+ .and change { Packages::PackageFile.count }.by(1)
- it 'handles a processing failure', :aggregate_failures do
- expect(::Packages::Rubygems::ProcessGemService).to receive(:new)
- .and_raise(::Packages::Rubygems::ProcessGemService::ExtractionError)
+ expect(Packages::Package.last.id).to be(package_for_processing.id)
+ expect(package_for_processing.name).not_to be(package_name)
+ end
+ end
- expect(Gitlab::ErrorTracking).to receive(:log_exception).with(
- instance_of(::Packages::Rubygems::ProcessGemService::ExtractionError),
- project_id: package.project_id
- )
+ shared_examples 'handling error' do |error_message:, error_class:|
+ it 'mark the package as errored', :aggregate_failures do
+ expect(Gitlab::ErrorTracking).to receive(:log_exception).with(
+ instance_of(error_class),
+ {
+ package_file_id: package_file.id,
+ project_id: package.project_id
+ }
+ )
- subject
+ expect { subject }
+ .to not_change { Packages::Package.count }
+ .and not_change { Packages::PackageFile.count }
+ .and change { package.reload.status }.from('processing').to('error')
- expect(package.reload).to be_error
+ expect(package.status_message).to match(error_message)
+ end
end
- it 'handles processing an unaccounted for error', :aggregate_failures do
- expect(::Packages::Rubygems::ProcessGemService).to receive(:new)
- .and_raise(Zip::Error)
+ context 'with controlled errors' do
+ context 'handling metadata with invalid size' do
+ include_context 'with invalid Rubygems metadata'
+
+ it_behaves_like 'handling error',
+ error_class: ::Packages::Rubygems::ProcessGemService::InvalidMetadataError,
+ error_message: 'Invalid metadata'
+ end
- expect(Gitlab::ErrorTracking).to receive(:log_exception).with(
- instance_of(Zip::Error),
- project_id: package.project_id
- )
+ context 'handling a file error' do
+ before do
+ package_file.file = nil
+ end
- subject
+ it_behaves_like 'handling error',
+ error_class: ::Packages::Rubygems::ProcessGemService::ExtractionError,
+ error_message: 'Unable to read gem file'
+ end
+ end
- expect(package.reload).to be_error
+ context 'with uncontrolled errors' do
+ [Zip::Error, StandardError].each do |exception|
+ context "handling #{exception}", :aggregate_failures do
+ before do
+ allow(::Packages::Rubygems::ProcessGemService).to receive(:new).and_raise(exception)
+ end
+
+ it_behaves_like 'handling error',
+ error_class: exception,
+ error_message: "Unexpected error: #{exception}"
+ end
+ end
end
context 'returns when there is no package file' do
diff --git a/spec/workers/process_commit_worker_spec.rb b/spec/workers/process_commit_worker_spec.rb
index c95119b0d02..02221285ad3 100644
--- a/spec/workers/process_commit_worker_spec.rb
+++ b/spec/workers/process_commit_worker_spec.rb
@@ -9,6 +9,11 @@ RSpec.describe ProcessCommitWorker, feature_category: :source_code_management do
let(:issue) { create(:issue, project: project, author: user) }
let(:commit) { project.commit }
+ it "is deduplicated" do
+ expect(described_class.get_deduplicate_strategy).to eq(:until_executed)
+ expect(described_class.get_deduplication_options).to include(feature_flag: :deduplicate_process_commit_worker)
+ end
+
describe '#perform' do
it 'does not process the commit when the project does not exist' do
expect(worker).not_to receive(:close_issues)
diff --git a/spec/workers/redis_migration_worker_spec.rb b/spec/workers/redis_migration_worker_spec.rb
index ad0186e929d..9f29c84a948 100644
--- a/spec/workers/redis_migration_worker_spec.rb
+++ b/spec/workers/redis_migration_worker_spec.rb
@@ -46,7 +46,7 @@ RSpec.describe RedisMigrationWorker, :clean_gitlab_redis_shared_state, feature_c
end
it 'runs migration logic on scanned keys' do
- expect(migrator).to receive(:perform)
+ expect(migrator).to receive(:perform).at_least(:once)
subject.perform(job_class_name, '0')
end
diff --git a/spec/workers/service_desk/custom_email_verification_cleanup_worker_spec.rb b/spec/workers/service_desk/custom_email_verification_cleanup_worker_spec.rb
new file mode 100644
index 00000000000..7bb641a2102
--- /dev/null
+++ b/spec/workers/service_desk/custom_email_verification_cleanup_worker_spec.rb
@@ -0,0 +1,44 @@
+# frozen_string_literal: true
+
+require "spec_helper"
+
+RSpec.describe ServiceDesk::CustomEmailVerificationCleanupWorker, type: :worker, feature_category: :service_desk do
+ describe '#perform' do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:other_project) { create(:project) }
+ let!(:credential) { create(:service_desk_custom_email_credential, project: project) }
+ let!(:settings) { create(:service_desk_setting, project: project, custom_email: 'user@example.com') }
+ let!(:verification) { create(:service_desk_custom_email_verification, :overdue, project: project) }
+
+ it 'calls the custom email verification update service' do
+ expect_next_instance_of(ServiceDesk::CustomEmailVerifications::UpdateService) do |instance|
+ expect(instance.project).to eq project
+ expect(instance).to receive(:execute).once
+ end
+
+ described_class.new.perform
+ end
+
+ context 'with more than one verification being overdue' do
+ let!(:other_credential) { create(:service_desk_custom_email_credential, project: other_project) }
+ let!(:other_settings) do
+ create(:service_desk_setting, project: other_project, custom_email: 'support@example.com')
+ end
+
+ let!(:other_verification) { create(:service_desk_custom_email_verification, :overdue, project: other_project) }
+
+ it 'calls the custom email verification update service for each project' do
+ project_id_call_order = []
+ expect_next_instances_of(ServiceDesk::CustomEmailVerifications::UpdateService, 2) do |instance|
+ project_id_call_order << instance.project.id
+ expect(instance).to receive(:execute).once
+ end
+
+ described_class.new.perform
+
+ # Also check for order as find_each oders by primary key (project_id) for batching
+ expect(project_id_call_order).to eq [project.id, other_project.id]
+ end
+ end
+ end
+end
diff --git a/spec/workers/users/deactivate_dormant_users_worker_spec.rb b/spec/workers/users/deactivate_dormant_users_worker_spec.rb
index 39d282a6e18..c28be165fd7 100644
--- a/spec/workers/users/deactivate_dormant_users_worker_spec.rb
+++ b/spec/workers/users/deactivate_dormant_users_worker_spec.rb
@@ -10,6 +10,13 @@ RSpec.describe Users::DeactivateDormantUsersWorker, feature_category: :seat_cost
let_it_be(:inactive) { create(:user, last_activity_on: nil, created_at: User::MINIMUM_DAYS_CREATED.days.ago.to_date) }
let_it_be(:inactive_recently_created) { create(:user, last_activity_on: nil, created_at: (User::MINIMUM_DAYS_CREATED - 1).days.ago.to_date) }
+ let(:admin_bot) { create(:user, :admin_bot) }
+ let(:deactivation_service) { instance_spy(Users::DeactivateService) }
+
+ before do
+ allow(Users::DeactivateService).to receive(:new).and_return(deactivation_service)
+ end
+
subject(:worker) { described_class.new }
it 'does not run for SaaS', :saas do
@@ -17,8 +24,7 @@ RSpec.describe Users::DeactivateDormantUsersWorker, feature_category: :seat_cost
worker.perform
- expect(User.dormant.count).to eq(1)
- expect(User.with_no_activity.count).to eq(1)
+ expect(deactivation_service).not_to have_received(:execute)
end
context 'when automatic deactivation of dormant users is enabled' do
@@ -29,29 +35,33 @@ RSpec.describe Users::DeactivateDormantUsersWorker, feature_category: :seat_cost
it 'deactivates dormant users' do
worker.perform
- expect(User.dormant.count).to eq(0)
- expect(User.with_no_activity.count).to eq(0)
+ expect(deactivation_service).to have_received(:execute).twice
end
where(:user_type, :expected_state) do
- :human | 'deactivated'
- :support_bot | 'active'
- :alert_bot | 'active'
+ :human | 'deactivated'
+ :support_bot | 'active'
+ :alert_bot | 'active'
:visual_review_bot | 'active'
- :service_user | 'deactivated'
- :ghost | 'active'
- :project_bot | 'active'
- :migration_bot | 'active'
- :security_bot | 'active'
- :automation_bot | 'active'
+ :service_user | 'deactivated'
+ :ghost | 'active'
+ :project_bot | 'active'
+ :migration_bot | 'active'
+ :security_bot | 'active'
+ :automation_bot | 'active'
end
+
with_them do
it 'deactivates certain user types' do
user = create(:user, user_type: user_type, state: :active, last_activity_on: Gitlab::CurrentSettings.deactivate_dormant_users_period.days.ago.to_date)
worker.perform
- expect(user.reload.state).to eq(expected_state)
+ if expected_state == 'deactivated'
+ expect(deactivation_service).to have_received(:execute).with(user)
+ else
+ expect(deactivation_service).not_to have_received(:execute).with(user)
+ end
end
end
@@ -61,22 +71,14 @@ RSpec.describe Users::DeactivateDormantUsersWorker, feature_category: :seat_cost
worker.perform
- expect(human_user.reload.state).to eq('blocked')
- expect(service_user.reload.state).to eq('blocked')
+ expect(deactivation_service).not_to have_received(:execute).with(human_user)
+ expect(deactivation_service).not_to have_received(:execute).with(service_user)
end
it 'does not deactivate recently created users' do
worker.perform
- expect(inactive_recently_created.reload.state).to eq('active')
- end
-
- it 'triggers update of highest user role for deactivated users', :clean_gitlab_redis_shared_state do
- [dormant, inactive].each do |user|
- expect(UpdateHighestRoleWorker).to receive(:perform_in).with(anything, user.id)
- end
-
- worker.perform
+ expect(deactivation_service).not_to have_received(:execute).with(inactive_recently_created)
end
end
@@ -88,8 +90,7 @@ RSpec.describe Users::DeactivateDormantUsersWorker, feature_category: :seat_cost
it 'does nothing' do
worker.perform
- expect(User.dormant.count).to eq(1)
- expect(User.with_no_activity.count).to eq(1)
+ expect(deactivation_service).not_to have_received(:execute)
end
end
end
diff --git a/spec/workers/web_hook_worker_spec.rb b/spec/workers/web_hook_worker_spec.rb
index be43b83ec0a..cd58dd93b80 100644
--- a/spec/workers/web_hook_worker_spec.rb
+++ b/spec/workers/web_hook_worker_spec.rb
@@ -7,10 +7,14 @@ RSpec.describe WebHookWorker, feature_category: :integrations do
let_it_be(:project_hook) { create(:project_hook) }
let_it_be(:data) { { foo: 'bar' } }
let_it_be(:hook_name) { 'push_hooks' }
+ let_it_be(:response) { ServiceResponse.success }
describe '#perform' do
it 'delegates to WebHookService' do
- expect_next(WebHookService, project_hook, data.with_indifferent_access, hook_name, anything).to receive(:execute)
+ expect_next(WebHookService, project_hook, data.with_indifferent_access, hook_name, anything)
+ .to receive(:execute).and_return(response)
+ expect(subject).to receive(:log_extra_metadata_on_done).with(:response_status, response.status)
+ expect(subject).to receive(:log_extra_metadata_on_done).with(:http_status, response[:http_status])
subject.perform(project_hook.id, data, hook_name)
end
@@ -23,7 +27,11 @@ RSpec.describe WebHookWorker, feature_category: :integrations do
uuid = SecureRandom.uuid
params = { recursion_detection_request_uuid: uuid }
- expect_next(WebHookService, project_hook, data.with_indifferent_access, hook_name, anything).to receive(:execute)
+ expect_next(WebHookService, project_hook, data.with_indifferent_access, hook_name, anything)
+ .to receive(:execute).and_return(response)
+ expect(subject).to receive(:log_extra_metadata_on_done).with(:response_status, response.status)
+ expect(subject).to receive(:log_extra_metadata_on_done).with(:http_status, response[:http_status])
+
expect { subject.perform(project_hook.id, data, hook_name, params) }
.to change { Gitlab::WebHooks::RecursionDetection::UUID.instance.request_uuid }.to(uuid)
end