Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
path: root/spec
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2023-12-19 14:01:45 +0300
committerGitLab Bot <gitlab-bot@gitlab.com>2023-12-19 14:01:45 +0300
commit9297025d0b7ddf095eb618dfaaab2ff8f2018d8b (patch)
tree865198c01d1824a9b098127baa3ab980c9cd2c06 /spec
parent6372471f43ee03c05a7c1f8b0c6ac6b8a7431dbe (diff)
Add latest changes from gitlab-org/gitlab@16-7-stable-eev16.7.0-rc42
Diffstat (limited to 'spec')
-rw-r--r--spec/channels/application_cable/connection_spec.rb10
-rw-r--r--spec/click_house/migration_support/migration_context_spec.rb233
-rw-r--r--spec/commands/sidekiq_cluster/cli_spec.rb6
-rw-r--r--spec/components/pajamas/banner_component_spec.rb19
-rw-r--r--spec/components/previews/pajamas/banner_component_preview.rb3
-rw-r--r--spec/components/projects/ml/models_index_component_spec.rb2
-rw-r--r--spec/components/projects/ml/show_ml_model_component_spec.rb37
-rw-r--r--spec/components/projects/ml/show_ml_model_version_component_spec.rb35
-rw-r--r--spec/controllers/activity_pub/projects/releases_controller_spec.rb186
-rw-r--r--spec/controllers/admin/application_settings_controller_spec.rb41
-rw-r--r--spec/controllers/admin/runner_projects_controller_spec.rb2
-rw-r--r--spec/controllers/admin/runners_controller_spec.rb2
-rw-r--r--spec/controllers/application_controller_spec.rb4
-rw-r--r--spec/controllers/concerns/autocomplete_sources/expires_in_spec.rb69
-rw-r--r--spec/controllers/concerns/product_analytics_tracking_spec.rb27
-rw-r--r--spec/controllers/groups/dependency_proxy_for_containers_controller_spec.rb80
-rw-r--r--spec/controllers/groups/runners_controller_spec.rb2
-rw-r--r--spec/controllers/groups_controller_spec.rb2
-rw-r--r--spec/controllers/import/bulk_imports_controller_spec.rb2
-rw-r--r--spec/controllers/ldap/omniauth_callbacks_controller_spec.rb2
-rw-r--r--spec/controllers/omniauth_callbacks_controller_spec.rb2
-rw-r--r--spec/controllers/profiles/preferences_controller_spec.rb4
-rw-r--r--spec/controllers/profiles_controller_spec.rb24
-rw-r--r--spec/controllers/projects/deploy_keys_controller_spec.rb104
-rw-r--r--spec/controllers/projects/discussions_controller_spec.rb26
-rw-r--r--spec/controllers/projects/group_links_controller_spec.rb44
-rw-r--r--spec/controllers/projects/issues_controller_spec.rb16
-rw-r--r--spec/controllers/projects/merge_requests/diffs_controller_spec.rb20
-rw-r--r--spec/controllers/projects/merge_requests/drafts_controller_spec.rb2
-rw-r--r--spec/controllers/projects/merge_requests_controller_spec.rb232
-rw-r--r--spec/controllers/projects/notes_controller_spec.rb4
-rw-r--r--spec/controllers/projects/runner_projects_controller_spec.rb2
-rw-r--r--spec/controllers/projects/runners_controller_spec.rb2
-rw-r--r--spec/controllers/projects_controller_spec.rb68
-rw-r--r--spec/controllers/search_controller_spec.rb21
-rw-r--r--spec/controllers/user_settings/active_sessions_controller_spec.rb (renamed from spec/controllers/profiles/active_sessions_controller_spec.rb)2
-rw-r--r--spec/controllers/user_settings/personal_access_tokens_controller_spec.rb (renamed from spec/controllers/profiles/personal_access_tokens_controller_spec.rb)4
-rw-r--r--spec/db/docs_spec.rb4
-rw-r--r--spec/db/schema_spec.rb13
-rw-r--r--spec/experiments/application_experiment_spec.rb35
-rw-r--r--spec/factories/abuse_reports.rb2
-rw-r--r--spec/factories/achievements/achievements.rb4
-rw-r--r--spec/factories/ci/builds.rb6
-rw-r--r--spec/factories/ci/catalog/resources.rb4
-rw-r--r--spec/factories/ci/catalog/resources/sync_events.rb8
-rw-r--r--spec/factories/ci/pipelines.rb4
-rw-r--r--spec/factories/ci/processable.rb10
-rw-r--r--spec/factories/ci/reports/sbom/sources.rb53
-rw-r--r--spec/factories/container_registry/protection/rules.rb2
-rw-r--r--spec/factories/deploy_tokens.rb2
-rw-r--r--spec/factories/deployments.rb4
-rw-r--r--spec/factories/environments.rb8
-rw-r--r--spec/factories/gitaly/commit.rb10
-rw-r--r--spec/factories/integrations.rb6
-rw-r--r--spec/factories/keys.rb2
-rw-r--r--spec/factories/ml/candidates.rb17
-rw-r--r--spec/factories/ml/experiments.rb8
-rw-r--r--spec/factories/ml/model_versions.rb5
-rw-r--r--spec/factories/ml/models.rb4
-rw-r--r--spec/factories/namespace_package_settings.rb2
-rw-r--r--spec/factories/organizations/organization_details.rb10
-rw-r--r--spec/factories/packages/nuget/symbol.rb6
-rw-r--r--spec/factories/personal_access_tokens.rb6
-rw-r--r--spec/factories/product_analytics_event.rb24
-rw-r--r--spec/factories/project_feature_usage.rb4
-rw-r--r--spec/factories/project_group_links.rb1
-rw-r--r--spec/factories/projects.rb54
-rw-r--r--spec/factories/releases.rb10
-rw-r--r--spec/factories/topics.rb4
-rw-r--r--spec/factories/uploads.rb2
-rw-r--r--spec/factories/user_custom_attributes.rb5
-rw-r--r--spec/factories/user_preferences.rb1
-rw-r--r--spec/factories/users.rb4
-rw-r--r--spec/factories/users/phone_number_validations.rb4
-rw-r--r--spec/features/admin/admin_abuse_reports_spec.rb18
-rw-r--r--spec/features/admin/admin_mode/login_spec.rb4
-rw-r--r--spec/features/admin/admin_mode/logout_spec.rb2
-rw-r--r--spec/features/admin/admin_mode_spec.rb2
-rw-r--r--spec/features/admin/admin_runners_spec.rb34
-rw-r--r--spec/features/admin/admin_settings_spec.rb9
-rw-r--r--spec/features/admin/users/user_spec.rb2
-rw-r--r--spec/features/boards/boards_spec.rb2
-rw-r--r--spec/features/boards/user_adds_lists_to_board_spec.rb2
-rw-r--r--spec/features/clusters/create_agent_spec.rb2
-rw-r--r--spec/features/dashboard/issues_filter_spec.rb3
-rw-r--r--spec/features/dashboard/merge_requests_spec.rb20
-rw-r--r--spec/features/dashboard/projects_spec.rb4
-rw-r--r--spec/features/dashboard/todos/todos_spec.rb10
-rw-r--r--spec/features/environments/environments_folder_spec.rb142
-rw-r--r--spec/features/explore/catalog/catalog_settings_spec.rb78
-rw-r--r--spec/features/explore/catalog/catalog_spec.rb137
-rw-r--r--spec/features/explore/catalog_spec.rb80
-rw-r--r--spec/features/explore/navbar_spec.rb12
-rw-r--r--spec/features/explore/user_explores_projects_spec.rb2
-rw-r--r--spec/features/file_uploads/graphql_add_design_spec.rb2
-rw-r--r--spec/features/frequently_visited_projects_and_groups_spec.rb2
-rw-r--r--spec/features/groups/board_sidebar_spec.rb4
-rw-r--r--spec/features/groups/board_spec.rb2
-rw-r--r--spec/features/groups/clusters/user_spec.rb25
-rw-r--r--spec/features/groups/dependency_proxy_spec.rb2
-rw-r--r--spec/features/groups/group_runners_spec.rb30
-rw-r--r--spec/features/groups/group_settings_spec.rb8
-rw-r--r--spec/features/groups/import_export/connect_instance_spec.rb2
-rw-r--r--spec/features/groups/import_export/migration_history_spec.rb2
-rw-r--r--spec/features/groups/issues_spec.rb4
-rw-r--r--spec/features/groups/members/leave_group_spec.rb20
-rw-r--r--spec/features/groups/members/manage_groups_spec.rb2
-rw-r--r--spec/features/groups/members/master_adds_member_with_expiration_date_spec.rb2
-rw-r--r--spec/features/groups/members/request_access_spec.rb42
-rw-r--r--spec/features/groups/members/search_members_spec.rb2
-rw-r--r--spec/features/groups/members/sort_members_spec.rb2
-rw-r--r--spec/features/groups/members/tabs_spec.rb4
-rw-r--r--spec/features/groups/merge_requests_spec.rb2
-rw-r--r--spec/features/groups/milestone_spec.rb4
-rw-r--r--spec/features/groups/packages_spec.rb2
-rw-r--r--spec/features/groups/settings/ci_cd_spec.rb2
-rw-r--r--spec/features/groups/settings/group_badges_spec.rb2
-rw-r--r--spec/features/groups/settings/packages_and_registries_spec.rb8
-rw-r--r--spec/features/groups/show_spec.rb8
-rw-r--r--spec/features/groups_spec.rb8
-rw-r--r--spec/features/incidents/incident_details_spec.rb25
-rw-r--r--spec/features/incidents/incident_timeline_events_spec.rb4
-rw-r--r--spec/features/integrations_settings_spec.rb6
-rw-r--r--spec/features/invites_spec.rb111
-rw-r--r--spec/features/issuables/shortcuts_issuable_spec.rb2
-rw-r--r--spec/features/issuables/sorting_list_spec.rb4
-rw-r--r--spec/features/issues/create_issue_for_discussions_in_merge_request_spec.rb6
-rw-r--r--spec/features/issues/create_issue_for_single_discussion_in_merge_request_spec.rb2
-rw-r--r--spec/features/issues/discussion_lock_spec.rb254
-rw-r--r--spec/features/issues/form_spec.rb22
-rw-r--r--spec/features/issues/incident_issue_spec.rb2
-rw-r--r--spec/features/issues/issue_detail_spec.rb8
-rw-r--r--spec/features/issues/service_desk_spec.rb6
-rw-r--r--spec/features/issues/user_interacts_with_awards_spec.rb12
-rw-r--r--spec/features/issues/user_sorts_issues_spec.rb4
-rw-r--r--spec/features/merge_request/user_merges_only_if_pipeline_succeeds_spec.rb10
-rw-r--r--spec/features/merge_request/user_posts_diff_notes_spec.rb2
-rw-r--r--spec/features/merge_request/user_rebases_merge_request_spec.rb6
-rw-r--r--spec/features/merge_request/user_resolves_conflicts_spec.rb10
-rw-r--r--spec/features/merge_request/user_resolves_wip_mr_spec.rb3
-rw-r--r--spec/features/merge_request/user_sees_avatar_on_diff_notes_spec.rb2
-rw-r--r--spec/features/merge_request/user_sees_merge_button_depending_on_unresolved_discussions_spec.rb5
-rw-r--r--spec/features/merge_request/user_sees_merge_request_file_tree_sidebar_spec.rb5
-rw-r--r--spec/features/merge_request/user_sees_merge_request_pipelines_spec.rb2
-rw-r--r--spec/features/merge_request/user_sees_merge_widget_spec.rb9
-rw-r--r--spec/features/merge_request/user_sees_versions_spec.rb8
-rw-r--r--spec/features/merge_requests/user_sorts_merge_requests_spec.rb6
-rw-r--r--spec/features/profile_spec.rb4
-rw-r--r--spec/features/profiles/keys_spec.rb2
-rw-r--r--spec/features/profiles/two_factor_auths_spec.rb2
-rw-r--r--spec/features/profiles/user_edit_profile_spec.rb14
-rw-r--r--spec/features/profiles/user_manages_applications_spec.rb2
-rw-r--r--spec/features/projects/blobs/blob_show_spec.rb11
-rw-r--r--spec/features/projects/blobs/edit_spec.rb2
-rw-r--r--spec/features/projects/blobs/user_follows_pipeline_suggest_nudge_spec.rb2
-rw-r--r--spec/features/projects/branches/download_buttons_spec.rb9
-rw-r--r--spec/features/projects/branches/new_branch_ref_dropdown_spec.rb2
-rw-r--r--spec/features/projects/branches/user_creates_branch_spec.rb2
-rw-r--r--spec/features/projects/branches/user_deletes_branch_spec.rb2
-rw-r--r--spec/features/projects/branches/user_views_branches_spec.rb2
-rw-r--r--spec/features/projects/branches_spec.rb2
-rw-r--r--spec/features/projects/cluster_agents_spec.rb4
-rw-r--r--spec/features/projects/clusters/user_spec.rb19
-rw-r--r--spec/features/projects/clusters_spec.rb2
-rw-r--r--spec/features/projects/commits/user_browses_commits_spec.rb4
-rw-r--r--spec/features/projects/environments/environment_spec.rb2
-rw-r--r--spec/features/projects/environments/environments_spec.rb16
-rw-r--r--spec/features/projects/files/download_buttons_spec.rb12
-rw-r--r--spec/features/projects/files/find_file_keyboard_spec.rb2
-rw-r--r--spec/features/projects/files/project_owner_creates_license_file_spec.rb2
-rw-r--r--spec/features/projects/files/template_selector_menu_spec.rb2
-rw-r--r--spec/features/projects/files/user_browses_files_spec.rb2
-rw-r--r--spec/features/projects/files/user_browses_lfs_files_spec.rb2
-rw-r--r--spec/features/projects/files/user_creates_directory_spec.rb2
-rw-r--r--spec/features/projects/files/user_creates_files_spec.rb7
-rw-r--r--spec/features/projects/files/user_deletes_files_spec.rb2
-rw-r--r--spec/features/projects/files/user_edits_files_spec.rb10
-rw-r--r--spec/features/projects/files/user_find_file_spec.rb2
-rw-r--r--spec/features/projects/files/user_reads_pipeline_status_spec.rb2
-rw-r--r--spec/features/projects/files/user_replaces_files_spec.rb2
-rw-r--r--spec/features/projects/files/user_searches_for_files_spec.rb2
-rw-r--r--spec/features/projects/fork_spec.rb2
-rw-r--r--spec/features/projects/gfm_autocomplete_load_spec.rb2
-rw-r--r--spec/features/projects/graph_spec.rb2
-rw-r--r--spec/features/projects/infrastructure_registry_spec.rb2
-rw-r--r--spec/features/projects/integrations/user_activates_issue_tracker_spec.rb6
-rw-r--r--spec/features/projects/integrations/user_activates_jira_spec.rb2
-rw-r--r--spec/features/projects/integrations/user_activates_mattermost_slash_command_spec.rb2
-rw-r--r--spec/features/projects/issuable_templates_spec.rb2
-rw-r--r--spec/features/projects/jobs/permissions_spec.rb5
-rw-r--r--spec/features/projects/jobs/user_browses_job_spec.rb2
-rw-r--r--spec/features/projects/jobs/user_browses_jobs_spec.rb2
-rw-r--r--spec/features/projects/jobs/user_triggers_manual_job_with_variables_spec.rb2
-rw-r--r--spec/features/projects/members/group_requester_cannot_request_access_to_project_spec.rb9
-rw-r--r--spec/features/projects/members/manage_groups_spec.rb2
-rw-r--r--spec/features/projects/members/manage_members_spec.rb11
-rw-r--r--spec/features/projects/members/member_leaves_project_spec.rb7
-rw-r--r--spec/features/projects/members/user_requests_access_spec.rb25
-rw-r--r--spec/features/projects/new_project_spec.rb20
-rw-r--r--spec/features/projects/packages_spec.rb2
-rw-r--r--spec/features/projects/pipeline_schedules_spec.rb2
-rw-r--r--spec/features/projects/pipelines/pipelines_spec.rb60
-rw-r--r--spec/features/projects/settings/branch_names_settings_spec.rb2
-rw-r--r--spec/features/projects/settings/forked_project_settings_spec.rb2
-rw-r--r--spec/features/projects/settings/merge_requests_settings_spec.rb2
-rw-r--r--spec/features/projects/settings/monitor_settings_spec.rb2
-rw-r--r--spec/features/projects/settings/registry_settings_cleanup_tags_spec.rb2
-rw-r--r--spec/features/projects/settings/repository_settings_spec.rb2
-rw-r--r--spec/features/projects/settings/secure_files_spec.rb2
-rw-r--r--spec/features/projects/settings/service_desk_setting_spec.rb6
-rw-r--r--spec/features/projects/settings/user_changes_default_branch_spec.rb2
-rw-r--r--spec/features/projects/settings/user_interacts_with_deploy_keys_spec.rb2
-rw-r--r--spec/features/projects/settings/user_manages_merge_requests_settings_spec.rb2
-rw-r--r--spec/features/projects/settings/webhooks_settings_spec.rb2
-rw-r--r--spec/features/projects/show/download_buttons_spec.rb18
-rw-r--r--spec/features/projects/show/user_interacts_with_auto_devops_banner_spec.rb2
-rw-r--r--spec/features/projects/show/user_sees_git_instructions_spec.rb1
-rw-r--r--spec/features/projects/show/user_sees_setup_shortcut_buttons_spec.rb3
-rw-r--r--spec/features/projects/tags/download_buttons_spec.rb12
-rw-r--r--spec/features/projects/tags/user_views_tags_spec.rb3
-rw-r--r--spec/features/projects/terraform_spec.rb2
-rw-r--r--spec/features/projects/user_creates_project_spec.rb39
-rw-r--r--spec/features/projects/user_sees_sidebar_spec.rb2
-rw-r--r--spec/features/projects/work_items/work_item_children_spec.rb2
-rw-r--r--spec/features/projects_spec.rb10
-rw-r--r--spec/features/registrations/oauth_registration_spec.rb4
-rw-r--r--spec/features/registrations/registration_spec.rb29
-rw-r--r--spec/features/runners_spec.rb2
-rw-r--r--spec/features/search/user_searches_for_code_spec.rb55
-rw-r--r--spec/features/search/user_searches_for_issues_spec.rb6
-rw-r--r--spec/features/search/user_searches_for_merge_requests_spec.rb6
-rw-r--r--spec/features/search/user_searches_for_milestones_spec.rb9
-rw-r--r--spec/features/search/user_searches_for_projects_spec.rb8
-rw-r--r--spec/features/search/user_searches_for_wiki_pages_spec.rb6
-rw-r--r--spec/features/search/user_uses_header_search_field_spec.rb2
-rw-r--r--spec/features/search/user_uses_search_filters_spec.rb34
-rw-r--r--spec/features/uploads/user_uploads_avatar_to_group_spec.rb2
-rw-r--r--spec/features/uploads/user_uploads_avatar_to_profile_spec.rb6
-rw-r--r--spec/features/user_sees_active_nav_items_spec.rb2
-rw-r--r--spec/features/user_settings/active_sessions_spec.rb (renamed from spec/features/profiles/active_sessions_spec.rb)6
-rw-r--r--spec/features/user_settings/password_spec.rb (renamed from spec/features/profiles/password_spec.rb)44
-rw-r--r--spec/features/user_settings/personal_access_tokens_spec.rb (renamed from spec/features/profiles/personal_access_tokens_spec.rb)32
-rw-r--r--spec/features/user_sorts_things_spec.rb17
-rw-r--r--spec/features/users/login_spec.rb11
-rw-r--r--spec/features/users/show_spec.rb4
-rw-r--r--spec/features/users/signup_spec.rb33
-rw-r--r--spec/finders/branches_finder_spec.rb59
-rw-r--r--spec/finders/ci/catalog/resources/versions_finder_spec.rb23
-rw-r--r--spec/finders/ci/runners_finder_spec.rb2
-rw-r--r--spec/finders/concerns/packages/finder_helper_spec.rb109
-rw-r--r--spec/finders/deploy_keys/deploy_keys_finder_spec.rb78
-rw-r--r--spec/finders/groups/custom_emoji_finder_spec.rb65
-rw-r--r--spec/finders/groups_finder_spec.rb32
-rw-r--r--spec/finders/members_finder_spec.rb21
-rw-r--r--spec/finders/milestones_finder_spec.rb11
-rw-r--r--spec/finders/organizations/groups_finder_spec.rb84
-rw-r--r--spec/finders/packages/maven/package_finder_spec.rb24
-rw-r--r--spec/finders/packages/pypi/packages_finder_spec.rb2
-rw-r--r--spec/finders/projects/ml/model_finder_spec.rb3
-rw-r--r--spec/finders/projects_finder_spec.rb13
-rw-r--r--spec/finders/releases_finder_spec.rb15
-rw-r--r--spec/finders/repositories/tree_finder_spec.rb56
-rw-r--r--spec/finders/tags_finder_spec.rb60
-rw-r--r--spec/finders/timelogs/timelogs_finder_spec.rb172
-rw-r--r--spec/fixtures/achievements.yml10
-rw-r--r--spec/fixtures/api/schemas/entities/diff_viewer.json6
-rw-r--r--spec/fixtures/api/schemas/graphql/container_repository.json46
-rw-r--r--spec/fixtures/api/schemas/graphql/container_repository_details.json31
-rw-r--r--spec/fixtures/api/schemas/group_link/group_group_link.json36
-rw-r--r--spec/fixtures/api/schemas/group_link/group_link.json12
-rw-r--r--spec/fixtures/api/schemas/group_link/project_group_link.json31
-rw-r--r--spec/fixtures/api/schemas/ml/get_model.json6
-rw-r--r--spec/fixtures/api/schemas/ml/get_model_version.json75
-rw-r--r--spec/fixtures/api/schemas/ml/list_models.json53
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/project_hook.json4
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/project_repository_storage_move.json33
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/snippet_repository_storage_move.json33
-rw-r--r--spec/fixtures/click_house/migrations/migrations_over_multiple_databases/1_create_some_table_on_main_db.rb15
-rw-r--r--spec/fixtures/click_house/migrations/migrations_over_multiple_databases/2_create_some_table_on_another_db.rb16
-rw-r--r--spec/fixtures/click_house/migrations/migrations_over_multiple_databases/3_change_some_table_on_main_db.rb11
-rw-r--r--spec/fixtures/click_house/migrations/table_creation_with_down_method/2_create_another_table.rb (renamed from spec/fixtures/click_house/migrations/plain_table_creation_on_invalid_database/1_create_some_table.rb)12
-rw-r--r--spec/fixtures/csv_complex.csv4
-rw-r--r--spec/fixtures/csv_gitlab_export.csv2
-rw-r--r--spec/fixtures/importers/bitbucket_server/activities.json2246
-rw-r--r--spec/fixtures/scripts/internal_events/events/ee_event_without_identifiers.yml14
-rw-r--r--spec/fixtures/scripts/internal_events/events/event_with_identifiers.yml20
-rw-r--r--spec/fixtures/scripts/internal_events/events/keyboard_smashed_event.yml20
-rw-r--r--spec/fixtures/scripts/internal_events/events/secondary_event_with_identifiers.yml20
-rw-r--r--spec/fixtures/scripts/internal_events/metrics/ee_total_28d_single_event.yml25
-rw-r--r--spec/fixtures/scripts/internal_events/metrics/ee_total_7d_single_event.yml25
-rw-r--r--spec/fixtures/scripts/internal_events/metrics/ee_total_single_event.yml25
-rw-r--r--spec/fixtures/scripts/internal_events/metrics/keyboard_smashed_metric_28d.yml28
-rw-r--r--spec/fixtures/scripts/internal_events/metrics/keyboard_smashed_metric_7d.yml28
-rw-r--r--spec/fixtures/scripts/internal_events/metrics/project_id_28d_multiple_events.yml31
-rw-r--r--spec/fixtures/scripts/internal_events/metrics/project_id_7d_multiple_events.yml31
-rw-r--r--spec/fixtures/scripts/internal_events/metrics/total_single_event.yml27
-rw-r--r--spec/fixtures/scripts/internal_events/metrics/user_id_28d_single_event.yml28
-rw-r--r--spec/fixtures/scripts/internal_events/metrics/user_id_7d_single_event.yml28
-rw-r--r--spec/fixtures/scripts/internal_events/new_events.yml183
-rw-r--r--spec/fixtures/scripts/internal_events/new_metrics.yml196
-rw-r--r--spec/fixtures/scripts/internal_events/stages.yml78
-rw-r--r--spec/frontend/__helpers__/mock_observability_client.js1
-rw-r--r--spec/frontend/access_tokens/components/__snapshots__/expires_at_field_spec.js.snap2
-rw-r--r--spec/frontend/access_tokens/components/access_token_table_app_spec.js10
-rw-r--r--spec/frontend/access_tokens/components/new_access_token_app_spec.js14
-rw-r--r--spec/frontend/admin/abuse_report/components/abuse_report_notes_spec.js12
-rw-r--r--spec/frontend/admin/abuse_report/components/notes/abuse_report_add_note_spec.js227
-rw-r--r--spec/frontend/admin/abuse_report/components/notes/abuse_report_comment_form_spec.js214
-rw-r--r--spec/frontend/admin/abuse_report/components/notes/abuse_report_discussion_spec.js72
-rw-r--r--spec/frontend/admin/abuse_report/components/notes/abuse_report_edit_note_spec.js129
-rw-r--r--spec/frontend/admin/abuse_report/components/notes/abuse_report_note_actions_spec.js79
-rw-r--r--spec/frontend/admin/abuse_report/components/notes/abuse_report_note_spec.js126
-rw-r--r--spec/frontend/admin/abuse_report/mock_data.js90
-rw-r--r--spec/frontend/admin/signup_restrictions/components/signup_checkbox_spec.js4
-rw-r--r--spec/frontend/analytics/cycle_analytics/mock_data.js12
-rw-r--r--spec/frontend/analytics/cycle_analytics/store/actions_spec.js14
-rw-r--r--spec/frontend/analytics/cycle_analytics/utils_spec.js14
-rw-r--r--spec/frontend/api/user_api_spec.js28
-rw-r--r--spec/frontend/authentication/password/components/password_input_spec.js1
-rw-r--r--spec/frontend/badges/store/actions_spec.js12
-rw-r--r--spec/frontend/behaviors/shortcuts/shortcuts_issuable_spec.js12
-rw-r--r--spec/frontend/behaviors/shortcuts/shortcuts_spec.js (renamed from spec/frontend/shortcuts_spec.js)124
-rw-r--r--spec/frontend/blob_edit/edit_blob_spec.js29
-rw-r--r--spec/frontend/boards/board_card_inner_spec.js42
-rw-r--r--spec/frontend/boards/board_list_helper.js61
-rw-r--r--spec/frontend/boards/board_list_spec.js35
-rw-r--r--spec/frontend/boards/components/board_add_new_column_spec.js193
-rw-r--r--spec/frontend/boards/components/board_app_spec.js87
-rw-r--r--spec/frontend/boards/components/board_card_move_to_position_spec.js69
-rw-r--r--spec/frontend/boards/components/board_card_spec.js101
-rw-r--r--spec/frontend/boards/components/board_column_spec.js52
-rw-r--r--spec/frontend/boards/components/board_content_sidebar_spec.js79
-rw-r--r--spec/frontend/boards/components/board_content_spec.js139
-rw-r--r--spec/frontend/boards/components/board_filtered_search_spec.js73
-rw-r--r--spec/frontend/boards/components/board_form_spec.js74
-rw-r--r--spec/frontend/boards/components/board_list_header_spec.js137
-rw-r--r--spec/frontend/boards/components/board_new_issue_spec.js117
-rw-r--r--spec/frontend/boards/components/board_settings_sidebar_spec.js113
-rw-r--r--spec/frontend/boards/components/board_top_bar_spec.js83
-rw-r--r--spec/frontend/boards/components/boards_selector_spec.js29
-rw-r--r--spec/frontend/boards/components/issue_board_filtered_search_spec.js3
-rw-r--r--spec/frontend/boards/components/sidebar/board_sidebar_title_spec.js93
-rw-r--r--spec/frontend/boards/mock_data.js71
-rw-r--r--spec/frontend/boards/stores/actions_spec.js58
-rw-r--r--spec/frontend/boards/stores/mutations_spec.js672
-rw-r--r--spec/frontend/ci/artifacts/components/job_artifacts_table_spec.js20
-rw-r--r--spec/frontend/ci/catalog/components/details/ci_resource_components_spec.js16
-rw-r--r--spec/frontend/ci/catalog/components/details/ci_resource_details_spec.js2
-rw-r--r--spec/frontend/ci/catalog/components/details/ci_resource_header_spec.js8
-rw-r--r--spec/frontend/ci/catalog/components/details/ci_resource_readme_spec.js3
-rw-r--r--spec/frontend/ci/catalog/components/list/catalog_header_spec.js18
-rw-r--r--spec/frontend/ci/catalog/components/list/catalog_search_spec.js103
-rw-r--r--spec/frontend/ci/catalog/components/list/ci_resources_list_item_spec.js118
-rw-r--r--spec/frontend/ci/catalog/components/list/empty_state_spec.js64
-rw-r--r--spec/frontend/ci/catalog/components/pages/ci_resource_details_page_spec.js8
-rw-r--r--spec/frontend/ci/catalog/components/pages/ci_resources_page_spec.js118
-rw-r--r--spec/frontend/ci/catalog/mock.js194
-rw-r--r--spec/frontend/ci/ci_variable_list/components/ci_variable_drawer_spec.js195
-rw-r--r--spec/frontend/ci/common/pipelines_table_spec.js2
-rw-r--r--spec/frontend/ci/job_details/components/job_header_spec.js2
-rw-r--r--spec/frontend/ci/job_details/components/job_log_controllers_spec.js76
-rw-r--r--spec/frontend/ci/job_details/components/log/collapsible_section_spec.js103
-rw-r--r--spec/frontend/ci/job_details/components/log/line_header_spec.js8
-rw-r--r--spec/frontend/ci/job_details/components/log/log_spec.js96
-rw-r--r--spec/frontend/ci/job_details/components/log/mock_data.js243
-rw-r--r--spec/frontend/ci/job_details/components/manual_variables_form_spec.js31
-rw-r--r--spec/frontend/ci/job_details/components/sidebar/job_container_item_spec.js2
-rw-r--r--spec/frontend/ci/job_details/components/sidebar/sidebar_job_details_container_spec.js37
-rw-r--r--spec/frontend/ci/job_details/components/sidebar/stages_dropdown_spec.js2
-rw-r--r--spec/frontend/ci/job_details/job_app_spec.js10
-rw-r--r--spec/frontend/ci/job_details/store/actions_spec.js252
-rw-r--r--spec/frontend/ci/job_details/store/mutations_spec.js56
-rw-r--r--spec/frontend/ci/job_details/store/utils_spec.js683
-rw-r--r--spec/frontend/ci/jobs_mock_data.js50
-rw-r--r--spec/frontend/ci/jobs_page/components/job_cells/actions_cell_spec.js2
-rw-r--r--spec/frontend/ci/jobs_page/components/jobs_table_spec.js2
-rw-r--r--spec/frontend/ci/pipeline_details/graph/components/job_item_spec.js2
-rw-r--r--spec/frontend/ci/pipeline_details/graph/components/job_name_component_spec.js2
-rw-r--r--spec/frontend/ci/pipeline_details/graph/components/linked_pipeline_spec.js2
-rw-r--r--spec/frontend/ci/pipeline_details/header/pipeline_details_header_spec.js130
-rw-r--r--spec/frontend/ci/pipeline_details/mock_data.js3
-rw-r--r--spec/frontend/ci/pipeline_details/test_reports/stores/utils_spec.js28
-rw-r--r--spec/frontend/ci/pipeline_details/test_reports/test_reports_spec.js22
-rw-r--r--spec/frontend/ci/pipeline_editor/components/editor/ci_editor_header_spec.js45
-rw-r--r--spec/frontend/ci/pipeline_editor/components/pipeline_editor_tabs_spec.js1
-rw-r--r--spec/frontend/ci/pipeline_editor/components/validate/ci_validate_spec.js15
-rw-r--r--spec/frontend/ci/pipeline_editor/mock_data.js3
-rw-r--r--spec/frontend/ci/pipeline_editor/pipeline_editor_home_spec.js23
-rw-r--r--spec/frontend/ci/pipeline_mini_graph/legacy_pipeline_stage_spec.js2
-rw-r--r--spec/frontend/ci/pipeline_mini_graph/linked_pipelines_mini_list_spec.js2
-rw-r--r--spec/frontend/ci/pipeline_schedules/components/table/cells/pipeline_schedule_last_pipeline_spec.js2
-rw-r--r--spec/frontend/ci/pipelines_page/components/empty_state/pipelines_ci_templates_spec.js47
-rw-r--r--spec/frontend/ci/pipelines_page/pipelines_spec.js1
-rw-r--r--spec/frontend/ci/reports/codequality_report/store/actions_spec.js190
-rw-r--r--spec/frontend/ci/reports/codequality_report/store/getters_spec.js94
-rw-r--r--spec/frontend/ci/reports/codequality_report/store/mutations_spec.js100
-rw-r--r--spec/frontend/ci/reports/codequality_report/utils/codequality_parser_spec.js (renamed from spec/frontend/ci/reports/codequality_report/store/utils/codequality_parser_spec.js)2
-rw-r--r--spec/frontend/ci/runner/admin_runners/admin_runners_app_spec.js6
-rw-r--r--spec/frontend/ci/runner/components/cells/runner_summary_cell_spec.js21
-rw-r--r--spec/frontend/ci/runner/components/runner_filtered_search_bar_spec.js27
-rw-r--r--spec/frontend/ci/runner/components/runner_job_count_spec.js74
-rw-r--r--spec/frontend/ci/runner/components/runner_managers_detail_spec.js8
-rw-r--r--spec/frontend/ci/runner/group_runners/group_runners_app_spec.js6
-rw-r--r--spec/frontend/ci/runner/mock_data.js10
-rw-r--r--spec/frontend/clusters/agents/components/integration_status_spec.js4
-rw-r--r--spec/frontend/comment_templates/components/__snapshots__/list_item_spec.js.snap2
-rw-r--r--spec/frontend/commit/commit_pipeline_status_spec.js2
-rw-r--r--spec/frontend/commit/components/commit_box_pipeline_status_spec.js2
-rw-r--r--spec/frontend/commons/nav/user_merge_requests_spec.js154
-rw-r--r--spec/frontend/content_editor/components/__snapshots__/toolbar_button_spec.js.snap2
-rw-r--r--spec/frontend/content_editor/components/bubble_menus/code_block_bubble_menu_spec.js1
-rw-r--r--spec/frontend/content_editor/components/content_editor_spec.js14
-rw-r--r--spec/frontend/content_editor/components/suggestions_dropdown_spec.js188
-rw-r--r--spec/frontend/content_editor/components/wrappers/code_block_spec.js7
-rw-r--r--spec/frontend/content_editor/components/wrappers/table_cell_base_spec.js3
-rw-r--r--spec/frontend/content_editor/extensions/copy_paste_spec.js19
-rw-r--r--spec/frontend/content_editor/extensions/reference_spec.js56
-rw-r--r--spec/frontend/content_editor/services/__snapshots__/data_source_factory_spec.js.snap256
-rw-r--r--spec/frontend/content_editor/services/asset_resolver_spec.js55
-rw-r--r--spec/frontend/content_editor/services/autocomplete_mock_data.js967
-rw-r--r--spec/frontend/content_editor/services/data_source_factory_spec.js202
-rw-r--r--spec/frontend/content_editor/services/markdown_serializer_spec.js107
-rw-r--r--spec/frontend/content_editor/services/markdown_sourcemap_spec.js109
-rw-r--r--spec/frontend/content_editor/test_constants.js15
-rw-r--r--spec/frontend/contributors/component/__snapshots__/contributors_spec.js.snap60
-rw-r--r--spec/frontend/contributors/component/contributor_area_chart_spec.js92
-rw-r--r--spec/frontend/contributors/component/contributors_spec.js2
-rw-r--r--spec/frontend/deploy_freeze/store/actions_spec.js8
-rw-r--r--spec/frontend/deploy_keys/components/key_spec.js6
-rw-r--r--spec/frontend/deploy_keys/graphql/resolvers_spec.js249
-rw-r--r--spec/frontend/design_management/components/list/__snapshots__/item_spec.js.snap4
-rw-r--r--spec/frontend/diffs/components/diff_file_spec.js24
-rw-r--r--spec/frontend/diffs/components/diff_row_spec.js6
-rw-r--r--spec/frontend/diffs/components/merge_conflict_warning_spec.js58
-rw-r--r--spec/frontend/diffs/components/shared/__snapshots__/findings_drawer_spec.js.snap225
-rw-r--r--spec/frontend/diffs/components/shared/findings_drawer_spec.js118
-rw-r--r--spec/frontend/diffs/mock_data/findings_drawer.js51
-rw-r--r--spec/frontend/diffs/mock_data/inline_findings.js10
-rw-r--r--spec/frontend/diffs/store/actions_spec.js54
-rw-r--r--spec/frontend/diffs/store/mutations_spec.js11
-rw-r--r--spec/frontend/dropzone_input_spec.js40
-rw-r--r--spec/frontend/editor/schema/ci/ci_schema_spec.js10
-rw-r--r--spec/frontend/editor/schema/ci/yaml_tests/negative_tests/auto_cancel_pipeline.yml4
-rw-r--r--spec/frontend/editor/schema/ci/yaml_tests/negative_tests/image.yml38
-rw-r--r--spec/frontend/editor/schema/ci/yaml_tests/negative_tests/services.yml14
-rw-r--r--spec/frontend/editor/schema/ci/yaml_tests/positive_tests/auto_cancel_pipeline/on_job_failure/all.yml4
-rw-r--r--spec/frontend/editor/schema/ci/yaml_tests/positive_tests/auto_cancel_pipeline/on_job_failure/none.yml4
-rw-r--r--spec/frontend/editor/schema/ci/yaml_tests/positive_tests/image.yml41
-rw-r--r--spec/frontend/editor/schema/ci/yaml_tests/positive_tests/services.yml7
-rw-r--r--spec/frontend/editor/source_editor_security_policy_schema_ext_spec.js181
-rw-r--r--spec/frontend/emoji/components/emoji_group_spec.js15
-rw-r--r--spec/frontend/emoji/index_spec.js27
-rw-r--r--spec/frontend/environments/deploy_board_wrapper_spec.js4
-rw-r--r--spec/frontend/environments/deployment_spec.js10
-rw-r--r--spec/frontend/environments/environment_flux_resource_selector_spec.js2
-rw-r--r--spec/frontend/environments/environment_folder_spec.js4
-rw-r--r--spec/frontend/environments/environment_form_spec.js168
-rw-r--r--spec/frontend/environments/environment_namespace_selector_spec.js217
-rw-r--r--spec/frontend/environments/folder/environments_folder_app_spec.js131
-rw-r--r--spec/frontend/environments/folder/environments_folder_view_spec.js5
-rw-r--r--spec/frontend/environments/graphql/mock_data.js157
-rw-r--r--spec/frontend/environments/graphql/resolvers/base_spec.js34
-rw-r--r--spec/frontend/environments/graphql/resolvers/kubernetes_spec.js297
-rw-r--r--spec/frontend/environments/helpers/k8s_integration_helper_spec.js225
-rw-r--r--spec/frontend/environments/kubernetes_overview_spec.js23
-rw-r--r--spec/frontend/environments/kubernetes_pods_spec.js57
-rw-r--r--spec/frontend/environments/kubernetes_summary_spec.js23
-rw-r--r--spec/frontend/environments/kubernetes_tabs_spec.js7
-rw-r--r--spec/frontend/environments/new_environment_item_spec.js10
-rw-r--r--spec/frontend/error_tracking/components/error_details_spec.js2
-rw-r--r--spec/frontend/error_tracking/components/error_tracking_list_spec.js30
-rw-r--r--spec/frontend/error_tracking/store/list/actions_spec.js12
-rw-r--r--spec/frontend/feature_flags/mock_data.js2
-rw-r--r--spec/frontend/feature_highlight/feature_highlight_helper_spec.js42
-rw-r--r--spec/frontend/feature_highlight/feature_highlight_popover_spec.js75
-rw-r--r--spec/frontend/filtered_search/add_extra_tokens_for_merge_requests_spec.js30
-rw-r--r--spec/frontend/filtered_search/issues_filtered_search_token_keys_spec.js22
-rw-r--r--spec/frontend/fixtures/deploy_keys.rb42
-rw-r--r--spec/frontend/fixtures/pipeline_header.rb35
-rw-r--r--spec/frontend/fixtures/runner.rb2
-rw-r--r--spec/frontend/fixtures/static/whats_new_notification.html7
-rw-r--r--spec/frontend/frequent_items/components/app_spec.js286
-rw-r--r--spec/frontend/frequent_items/components/frequent_items_list_item_spec.js161
-rw-r--r--spec/frontend/frequent_items/components/frequent_items_list_spec.js121
-rw-r--r--spec/frontend/frequent_items/components/frequent_items_search_input_spec.js74
-rw-r--r--spec/frontend/frequent_items/mock_data.js169
-rw-r--r--spec/frontend/frequent_items/store/actions_spec.js304
-rw-r--r--spec/frontend/frequent_items/store/getters_spec.js24
-rw-r--r--spec/frontend/frequent_items/store/mutations_spec.js152
-rw-r--r--spec/frontend/frequent_items/utils_spec.js131
-rw-r--r--spec/frontend/gfm_auto_complete_spec.js80
-rw-r--r--spec/frontend/groups/components/app_spec.js6
-rw-r--r--spec/frontend/groups/components/groups_spec.js13
-rw-r--r--spec/frontend/groups/service/archived_projects_service_spec.js2
-rw-r--r--spec/frontend/groups_projects/components/more_actions_dropdown_spec.js173
-rw-r--r--spec/frontend/header_search/components/app_spec.js517
-rw-r--r--spec/frontend/header_search/components/header_search_autocomplete_items_spec.js236
-rw-r--r--spec/frontend/header_search/components/header_search_default_items_spec.js103
-rw-r--r--spec/frontend/header_search/components/header_search_scoped_items_spec.js121
-rw-r--r--spec/frontend/header_search/init_spec.js54
-rw-r--r--spec/frontend/header_search/mock_data.js400
-rw-r--r--spec/frontend/header_search/store/actions_spec.js113
-rw-r--r--spec/frontend/header_search/store/getters_spec.js333
-rw-r--r--spec/frontend/header_search/store/mutations_spec.js63
-rw-r--r--spec/frontend/header_spec.js107
-rw-r--r--spec/frontend/ide/components/ide_sidebar_nav_spec.js2
-rw-r--r--spec/frontend/ide/components/ide_status_bar_spec.js6
-rw-r--r--spec/frontend/ide/components/panes/collapsible_sidebar_spec.js68
-rw-r--r--spec/frontend/ide/components/pipelines/list_spec.js2
-rw-r--r--spec/frontend/ide/components/repo_commit_section_spec.js7
-rw-r--r--spec/frontend/ide/init_gitlab_web_ide_spec.js28
-rw-r--r--spec/frontend/ide/lib/gitlab_web_ide/get_oauth_config_spec.js16
-rw-r--r--spec/frontend/ide/mock_data.js1
-rw-r--r--spec/frontend/ide/mount_oauth_callback_spec.js53
-rw-r--r--spec/frontend/ide/stores/modules/editor/actions_spec.js6
-rw-r--r--spec/frontend/import/details/components/bulk_import_details_app_spec.js14
-rw-r--r--spec/frontend/import_entities/import_groups/components/import_history_link_spec.js34
-rw-r--r--spec/frontend/import_entities/import_groups/components/import_table_spec.js92
-rw-r--r--spec/frontend/import_entities/import_groups/graphql/client_factory_spec.js4
-rw-r--r--spec/frontend/import_entities/import_groups/graphql/fixtures.js3
-rw-r--r--spec/frontend/import_entities/import_groups/graphql/services/local_storage_cache_spec.js4
-rw-r--r--spec/frontend/import_entities/import_groups/utils_spec.js2
-rw-r--r--spec/frontend/import_entities/import_projects/components/import_projects_table_spec.js37
-rw-r--r--spec/frontend/import_entities/import_projects/store/actions_spec.js39
-rw-r--r--spec/frontend/import_entities/import_projects/store/mutations_spec.js10
-rw-r--r--spec/frontend/integrations/edit/components/dynamic_field_spec.js2
-rw-r--r--spec/frontend/integrations/edit/components/integration_form_actions_spec.js2
-rw-r--r--spec/frontend/integrations/edit/components/jira_trigger_fields_spec.js18
-rw-r--r--spec/frontend/integrations/edit/components/trigger_field_spec.js17
-rw-r--r--spec/frontend/integrations/edit/components/trigger_fields_spec.js17
-rw-r--r--spec/frontend/invite_members/components/invite_groups_modal_spec.js10
-rw-r--r--spec/frontend/invite_members/components/invite_members_modal_spec.js17
-rw-r--r--spec/frontend/invite_members/components/invite_members_trigger_spec.js13
-rw-r--r--spec/frontend/invite_members/components/invite_modal_base_spec.js55
-rw-r--r--spec/frontend/invite_members/components/members_token_select_spec.js94
-rw-r--r--spec/frontend/invite_members/mock_data/member_modal.js3
-rw-r--r--spec/frontend/invite_members/mock_data/modal_base.js2
-rw-r--r--spec/frontend/issuable/popover/components/mr_popover_spec.js2
-rw-r--r--spec/frontend/issues/dashboard/components/index_spec.js18
-rw-r--r--spec/frontend/issues/list/components/issues_list_app_spec.js8
-rw-r--r--spec/frontend/issues/list/mock_data.js33
-rw-r--r--spec/frontend/issues/list/utils_spec.js13
-rw-r--r--spec/frontend/issues/show/components/app_spec.js27
-rw-r--r--spec/frontend/issues/show/components/header_actions_spec.js18
-rw-r--r--spec/frontend/jira_connect/subscriptions/pages/sign_in/sign_in_gitlab_multiversion/setup_instructions_spec.js9
-rw-r--r--spec/frontend/kubernetes_dashboard/components/page_title_spec.js35
-rw-r--r--spec/frontend/kubernetes_dashboard/components/workload_details_item_spec.js34
-rw-r--r--spec/frontend/kubernetes_dashboard/components/workload_details_spec.js53
-rw-r--r--spec/frontend/kubernetes_dashboard/components/workload_layout_spec.js141
-rw-r--r--spec/frontend/kubernetes_dashboard/components/workload_stats_spec.js43
-rw-r--r--spec/frontend/kubernetes_dashboard/components/workload_table_spec.js128
-rw-r--r--spec/frontend/kubernetes_dashboard/graphql/mock_data.js353
-rw-r--r--spec/frontend/kubernetes_dashboard/graphql/resolvers/kubernetes_spec.js459
-rw-r--r--spec/frontend/kubernetes_dashboard/helpers/k8s_integration_helper_spec.js93
-rw-r--r--spec/frontend/kubernetes_dashboard/pages/app_spec.js40
-rw-r--r--spec/frontend/kubernetes_dashboard/pages/daemon_sets_page_spec.js106
-rw-r--r--spec/frontend/kubernetes_dashboard/pages/deployments_page_spec.js106
-rw-r--r--spec/frontend/kubernetes_dashboard/pages/pods_page_spec.js102
-rw-r--r--spec/frontend/kubernetes_dashboard/pages/replica_sets_page_spec.js106
-rw-r--r--spec/frontend/kubernetes_dashboard/pages/stateful_sets_page_spec.js106
-rw-r--r--spec/frontend/lib/utils/breadcrumbs_spec.js22
-rw-r--r--spec/frontend/lib/utils/common_utils_spec.js2
-rw-r--r--spec/frontend/lib/utils/datetime/date_format_utility_spec.js12
-rw-r--r--spec/frontend/lib/utils/datetime/locale_dateformat_spec.js177
-rw-r--r--spec/frontend/lib/utils/datetime/timeago_utility_spec.js6
-rw-r--r--spec/frontend/lib/utils/datetime_utility_spec.js15
-rw-r--r--spec/frontend/lib/utils/secret_detection_spec.js1
-rw-r--r--spec/frontend/lib/utils/vuex_module_mappers_spec.js133
-rw-r--r--spec/frontend/loading_icon_for_legacy_js_spec.js4
-rw-r--r--spec/frontend/logo_spec.js55
-rw-r--r--spec/frontend/members/components/table/max_role_spec.js (renamed from spec/frontend/members/components/table/role_dropdown_spec.js)93
-rw-r--r--spec/frontend/members/components/table/members_table_spec.js18
-rw-r--r--spec/frontend/members/mock_data.js1
-rw-r--r--spec/frontend/members/store/actions_spec.js29
-rw-r--r--spec/frontend/members/store/mutations_spec.js13
-rw-r--r--spec/frontend/merge_conflicts/components/merge_conflict_resolver_app_spec.js17
-rw-r--r--spec/frontend/merge_conflicts/store/actions_spec.js10
-rw-r--r--spec/frontend/milestones/stores/actions_spec.js29
-rw-r--r--spec/frontend/ml/experiment_tracking/routes/candidates/show/ml_candidates_show_spec.js203
-rw-r--r--spec/frontend/ml/experiment_tracking/routes/candidates/show/mock_data.js41
-rw-r--r--spec/frontend/ml/model_registry/apps/index_ml_models_spec.js20
-rw-r--r--spec/frontend/ml/model_registry/apps/show_ml_model_spec.js45
-rw-r--r--spec/frontend/ml/model_registry/apps/show_ml_model_version_spec.js16
-rw-r--r--spec/frontend/ml/model_registry/components/candidate_detail_row_spec.js (renamed from spec/frontend/ml/experiment_tracking/routes/candidates/show/components/candidate_detail_row_spec.js)2
-rw-r--r--spec/frontend/ml/model_registry/components/candidate_detail_spec.js191
-rw-r--r--spec/frontend/ml/model_registry/components/candidate_list_row_spec.js39
-rw-r--r--spec/frontend/ml/model_registry/components/candidate_list_spec.js182
-rw-r--r--spec/frontend/ml/model_registry/components/empty_state_spec.js47
-rw-r--r--spec/frontend/ml/model_registry/components/model_version_detail_spec.js66
-rw-r--r--spec/frontend/ml/model_registry/components/model_version_list_spec.js184
-rw-r--r--spec/frontend/ml/model_registry/components/model_version_row_spec.js37
-rw-r--r--spec/frontend/ml/model_registry/graphql_mock_data.js116
-rw-r--r--spec/frontend/ml/model_registry/mock_data.js58
-rw-r--r--spec/frontend/ml/model_registry/routes/models/index/components/mock_data.js29
-rw-r--r--spec/frontend/nav/components/new_nav_toggle_spec.js214
-rw-r--r--spec/frontend/nav/components/responsive_app_spec.js122
-rw-r--r--spec/frontend/nav/components/responsive_header_spec.js63
-rw-r--r--spec/frontend/nav/components/responsive_home_spec.js133
-rw-r--r--spec/frontend/nav/components/top_nav_app_spec.js68
-rw-r--r--spec/frontend/nav/components/top_nav_container_view_spec.js120
-rw-r--r--spec/frontend/nav/components/top_nav_dropdown_menu_spec.js146
-rw-r--r--spec/frontend/nav/components/top_nav_menu_item_spec.js145
-rw-r--r--spec/frontend/nav/components/top_nav_menu_sections_spec.js138
-rw-r--r--spec/frontend/nav/components/top_nav_new_dropdown_spec.js142
-rw-r--r--spec/frontend/nav/mock_data.js39
-rw-r--r--spec/frontend/notes/components/comment_form_spec.js6
-rw-r--r--spec/frontend/notes/components/discussion_filter_spec.js49
-rw-r--r--spec/frontend/notes/components/note_actions_spec.js12
-rw-r--r--spec/frontend/notes/stores/actions_spec.js9
-rw-r--r--spec/frontend/observability/client_spec.js122
-rw-r--r--spec/frontend/organizations/index/components/app_spec.js202
-rw-r--r--spec/frontend/organizations/index/components/organizations_list_spec.js68
-rw-r--r--spec/frontend/organizations/index/components/organizations_view_spec.js28
-rw-r--r--spec/frontend/organizations/settings/general/components/advanced_settings_spec.js25
-rw-r--r--spec/frontend/organizations/settings/general/components/app_spec.js7
-rw-r--r--spec/frontend/organizations/settings/general/components/change_url_spec.js191
-rw-r--r--spec/frontend/organizations/settings/general/components/organization_settings_spec.js108
-rw-r--r--spec/frontend/organizations/shared/components/new_edit_form_spec.js28
-rw-r--r--spec/frontend/organizations/shared/components/organization_url_field_spec.js66
-rw-r--r--spec/frontend/organizations/users/components/app_spec.js84
-rw-r--r--spec/frontend/organizations/users/components/users_view_spec.js68
-rw-r--r--spec/frontend/organizations/users/mock_data.js34
-rw-r--r--spec/frontend/packages_and_registries/container_registry/explorer/components/details_page/tags_list_row_spec.js7
-rw-r--r--spec/frontend/packages_and_registries/infrastructure_registry/components/details/components/__snapshots__/file_sha_spec.js.snap6
-rw-r--r--spec/frontend/packages_and_registries/infrastructure_registry/components/shared/__snapshots__/package_list_row_spec.js.snap4
-rw-r--r--spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/file_sha_spec.js.snap6
-rw-r--r--spec/frontend/packages_and_registries/package_registry/components/details/additional_metadata_spec.js9
-rw-r--r--spec/frontend/packages_and_registries/package_registry/components/details/metadata/pypi_spec.js10
-rw-r--r--spec/frontend/packages_and_registries/package_registry/components/list/__snapshots__/package_list_row_spec.js.snap6
-rw-r--r--spec/frontend/packages_and_registries/package_registry/components/list/packages_search_spec.js17
-rw-r--r--spec/frontend/packages_and_registries/package_registry/mock_data.js3
-rw-r--r--spec/frontend/packages_and_registries/package_registry/pages/list_spec.js9
-rw-r--r--spec/frontend/packages_and_registries/settings/project/settings/components/registry_settings_app_spec.js18
-rw-r--r--spec/frontend/packages_and_registries/shared/utils_spec.js17
-rw-r--r--spec/frontend/pages/import/bulk_imports/history/components/bulk_imports_history_app_spec.js99
-rw-r--r--spec/frontend/pages/projects/pipeline_schedules/shared/components/interval_pattern_input_spec.js26
-rw-r--r--spec/frontend/pages/projects/shared/permissions/components/ci_catalog_settings_spec.js177
-rw-r--r--spec/frontend/pages/projects/shared/permissions/components/mock_data.js13
-rw-r--r--spec/frontend/pages/projects/shared/permissions/components/settings_panel_spec.js8
-rw-r--r--spec/frontend/pages/shared/nav/sidebar_tracking_spec.js160
-rw-r--r--spec/frontend/pages/shared/wikis/components/wiki_export_spec.js48
-rw-r--r--spec/frontend/pages/shared/wikis/components/wiki_more_dropdown_spec.js83
-rw-r--r--spec/frontend/persistent_user_callout_spec.js52
-rw-r--r--spec/frontend/profile/edit/components/profile_edit_app_spec.js6
-rw-r--r--spec/frontend/profile/edit/components/user_avatar_spec.js3
-rw-r--r--spec/frontend/projects/commit/store/actions_spec.js12
-rw-r--r--spec/frontend/projects/commits/store/actions_spec.js4
-rw-r--r--spec/frontend/projects/components/shared/delete_modal_spec.js2
-rw-r--r--spec/frontend/projects/settings/components/default_branch_selector_spec.js3
-rw-r--r--spec/frontend/projects/settings/components/new_access_dropdown_spec.js16
-rw-r--r--spec/frontend/projects/settings_service_desk/components/custom_email_spec.js23
-rw-r--r--spec/frontend/projects/settings_service_desk/components/custom_email_wrapper_spec.js27
-rw-r--r--spec/frontend/projects/settings_service_desk/components/service_desk_root_spec.js21
-rw-r--r--spec/frontend/projects/settings_service_desk/components/service_desk_setting_spec.js38
-rw-r--r--spec/frontend/read_more_spec.js8
-rw-r--r--spec/frontend/ref/components/ref_selector_spec.js28
-rw-r--r--spec/frontend/ref/stores/actions_spec.js14
-rw-r--r--spec/frontend/releases/stores/modules/detail/actions_spec.js8
-rw-r--r--spec/frontend/repository/commits_service_spec.js19
-rw-r--r--spec/frontend/repository/components/blob_content_viewer_spec.js31
-rw-r--r--spec/frontend/repository/components/blob_controls_spec.js3
-rw-r--r--spec/frontend/repository/components/table/__snapshots__/row_spec.js.snap6
-rw-r--r--spec/frontend/repository/components/tree_content_spec.js21
-rw-r--r--spec/frontend/repository/mixins/highlight_mixin_spec.js74
-rw-r--r--spec/frontend/search/sidebar/components/all_scopes_start_filters_spec.js28
-rw-r--r--spec/frontend/search/sidebar/components/app_spec.js121
-rw-r--r--spec/frontend/search/sidebar/components/archived_filter_spec.js10
-rw-r--r--spec/frontend/search/sidebar/components/blobs_filters_spec.js34
-rw-r--r--spec/frontend/search/sidebar/components/confidentiality_filter_spec.js20
-rw-r--r--spec/frontend/search/sidebar/components/filters_template_spec.js5
-rw-r--r--spec/frontend/search/sidebar/components/group_filter_spec.js (renamed from spec/frontend/search/topbar/components/group_filter_spec.js)34
-rw-r--r--spec/frontend/search/sidebar/components/issues_filters_spec.js71
-rw-r--r--spec/frontend/search/sidebar/components/label_filter_spec.js5
-rw-r--r--spec/frontend/search/sidebar/components/merge_requests_filters_spec.js34
-rw-r--r--spec/frontend/search/sidebar/components/project_filter_spec.js (renamed from spec/frontend/search/topbar/components/project_filter_spec.js)36
-rw-r--r--spec/frontend/search/sidebar/components/scope_legacy_navigation_spec.js145
-rw-r--r--spec/frontend/search/sidebar/components/scope_sidebar_navigation_spec.js7
-rw-r--r--spec/frontend/search/sidebar/components/searchable_dropdown_spec.js117
-rw-r--r--spec/frontend/search/sidebar/components/small_screen_drawer_navigation_spec.js68
-rw-r--r--spec/frontend/search/sidebar/components/status_filter_spec.js20
-rw-r--r--spec/frontend/search/store/mutations_spec.js4
-rw-r--r--spec/frontend/search/topbar/components/app_spec.js63
-rw-r--r--spec/frontend/search/topbar/components/search_type_indicator_spec.js128
-rw-r--r--spec/frontend/search/topbar/components/searchable_dropdown_item_spec.js93
-rw-r--r--spec/frontend/search/topbar/components/searchable_dropdown_spec.js220
-rw-r--r--spec/frontend/security_configuration/components/app_spec.js6
-rw-r--r--spec/frontend/security_configuration/components/continuous_vulnerability_scan_spec.js124
-rw-r--r--spec/frontend/security_configuration/components/feature_card_spec.js20
-rw-r--r--spec/frontend/security_configuration/components/training_provider_list_spec.js7
-rw-r--r--spec/frontend/security_configuration/mock_data.js2
-rw-r--r--spec/frontend/security_configuration/utils_spec.js2
-rw-r--r--spec/frontend/set_status_modal/set_status_form_spec.js18
-rw-r--r--spec/frontend/set_status_modal/set_status_modal_wrapper_spec.js4
-rw-r--r--spec/frontend/sidebar/components/labels/labels_select_vue/dropdown_contents_create_view_spec.js23
-rw-r--r--spec/frontend/sidebar/components/labels/labels_select_vue/store/actions_spec.js2
-rw-r--r--spec/frontend/sidebar/components/labels/labels_select_widget/dropdown_value_spec.js25
-rw-r--r--spec/frontend/sidebar/components/labels/labels_select_widget/embedded_labels_list_spec.js16
-rw-r--r--spec/frontend/sidebar/components/labels/labels_select_widget/mock_data.js10
-rw-r--r--spec/frontend/sidebar/components/reviewers/sidebar_reviewers_spec.js18
-rw-r--r--spec/frontend/sidebar/components/reviewers/uncollapsed_reviewer_list_spec.js27
-rw-r--r--spec/frontend/sidebar/components/sidebar_dropdown_widget_spec.js93
-rw-r--r--spec/frontend/sidebar/components/time_tracking/set_time_estimate_form_spec.js8
-rw-r--r--spec/frontend/sidebar/sidebar_mediator_spec.js1
-rw-r--r--spec/frontend/snippets/components/__snapshots__/snippet_description_edit_spec.js.snap2
-rw-r--r--spec/frontend/snippets/components/snippet_blob_edit_spec.js10
-rw-r--r--spec/frontend/snippets/components/snippet_title_spec.js123
-rw-r--r--spec/frontend/snippets/test_utils.js1
-rw-r--r--spec/frontend/super_sidebar/components/global_search/components/frequent_groups_spec.js34
-rw-r--r--spec/frontend/super_sidebar/components/global_search/components/frequent_item_spec.js43
-rw-r--r--spec/frontend/super_sidebar/components/global_search/components/frequent_items_spec.js157
-rw-r--r--spec/frontend/super_sidebar/components/global_search/components/frequent_projects_spec.js34
-rw-r--r--spec/frontend/super_sidebar/components/help_center_spec.js8
-rw-r--r--spec/frontend/super_sidebar/components/nav_item_link_spec.js2
-rw-r--r--spec/frontend/super_sidebar/components/nav_item_router_link_spec.js4
-rw-r--r--spec/frontend/super_sidebar/components/scroll_scrim_spec.js60
-rw-r--r--spec/frontend/super_sidebar/components/super_sidebar_spec.js66
-rw-r--r--spec/frontend/super_sidebar/components/user_menu_spec.js157
-rw-r--r--spec/frontend/super_sidebar/mock_data.js51
-rw-r--r--spec/frontend/super_sidebar/user_counts_manager_spec.js30
-rw-r--r--spec/frontend/super_sidebar/utils_spec.js134
-rw-r--r--spec/frontend/task_list_spec.js56
-rw-r--r--spec/frontend/tracking/internal_events_spec.js48
-rw-r--r--spec/frontend/usage_quotas/storage/components/project_storage_app_spec.js34
-rw-r--r--spec/frontend/user_lists/components/user_lists_table_spec.js3
-rw-r--r--spec/frontend/vue_merge_request_widget/components/approvals/approvals_spec.js36
-rw-r--r--spec/frontend/vue_merge_request_widget/components/checks/draft_spec.js196
-rw-r--r--spec/frontend/vue_merge_request_widget/components/checks/rebase_spec.js4
-rw-r--r--spec/frontend/vue_merge_request_widget/components/merge_checks_spec.js2
-rw-r--r--spec/frontend/vue_merge_request_widget/components/mr_widget_expandable_section_spec.js4
-rw-r--r--spec/frontend/vue_merge_request_widget/components/mr_widget_pipeline_spec.js3
-rw-r--r--spec/frontend/vue_merge_request_widget/components/mr_widget_status_icon_spec.js7
-rw-r--r--spec/frontend/vue_merge_request_widget/components/states/__snapshots__/new_ready_to_merge_spec.js.snap4
-rw-r--r--spec/frontend/vue_merge_request_widget/components/states/mr_widget_conflicts_spec.js173
-rw-r--r--spec/frontend/vue_merge_request_widget/components/states/mr_widget_merging_spec.js23
-rw-r--r--spec/frontend/vue_merge_request_widget/components/states/mr_widget_nothing_to_merge_spec.js8
-rw-r--r--spec/frontend/vue_merge_request_widget/components/states/mr_widget_ready_to_merge_spec.js82
-rw-r--r--spec/frontend/vue_merge_request_widget/components/states/work_in_progress_spec.js16
-rw-r--r--spec/frontend/vue_merge_request_widget/components/widget/__snapshots__/dynamic_content_spec.js.snap7
-rw-r--r--spec/frontend/vue_merge_request_widget/extentions/code_quality/index_spec.js224
-rw-r--r--spec/frontend/vue_merge_request_widget/extentions/code_quality/mock_data.js101
-rw-r--r--spec/frontend/vue_shared/alert_details/alert_details_spec.js103
-rw-r--r--spec/frontend/vue_shared/components/ci_icon/ci_icon_spec.js (renamed from spec/frontend/vue_shared/components/ci_icon_spec.js)2
-rw-r--r--spec/frontend/vue_shared/components/confirm_danger/confirm_danger_modal_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/diff_stats_dropdown_spec.js3
-rw-r--r--spec/frontend/vue_shared/components/dropdown/dropdown_widget_spec.js11
-rw-r--r--spec/frontend/vue_shared/components/entity_select/entity_select_spec.js10
-rw-r--r--spec/frontend/vue_shared/components/entity_select/organization_select_spec.js155
-rw-r--r--spec/frontend/vue_shared/components/filtered_search_bar/filtered_search_bar_root_spec.js193
-rw-r--r--spec/frontend/vue_shared/components/filtered_search_bar/tokens/base_token_spec.js13
-rw-r--r--spec/frontend/vue_shared/components/filtered_search_bar/tokens/date_token_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/filtered_search_bar/tokens/milestone_token_spec.js9
-rw-r--r--spec/frontend/vue_shared/components/filtered_search_bar/tokens/user_token_spec.js23
-rw-r--r--spec/frontend/vue_shared/components/keep_alive_slots_spec.js118
-rw-r--r--spec/frontend/vue_shared/components/list_selector/deploy_key_item_spec.js61
-rw-r--r--spec/frontend/vue_shared/components/list_selector/index_spec.js51
-rw-r--r--spec/frontend/vue_shared/components/markdown/header_spec.js8
-rw-r--r--spec/frontend/vue_shared/components/metric_images/store/actions_spec.js10
-rw-r--r--spec/frontend/vue_shared/components/new_resource_dropdown/new_resource_dropdown_spec.js18
-rw-r--r--spec/frontend/vue_shared/components/number_to_human_size/number_to_human_size_spec.js47
-rw-r--r--spec/frontend/vue_shared/components/source_viewer/components/chunk_new_spec.js1
-rw-r--r--spec/frontend/vue_shared/components/source_viewer/highlight_util_spec.js35
-rw-r--r--spec/frontend/vue_shared/components/source_viewer/mock_data.js1
-rw-r--r--spec/frontend/vue_shared/components/source_viewer/source_viewer_new_spec.js38
-rw-r--r--spec/frontend/vue_shared/components/time_ago_tooltip_spec.js6
-rw-r--r--spec/frontend/vue_shared/components/vuex_module_provider_spec.js39
-rw-r--r--spec/frontend/vue_shared/directives/track_event_spec.js55
-rw-r--r--spec/frontend/vue_shared/issuable/create/components/issuable_label_selector_spec.js2
-rw-r--r--spec/frontend/vue_shared/issuable/list/components/issuable_item_spec.js31
-rw-r--r--spec/frontend/vue_shared/issuable/list/components/issuable_list_root_spec.js6
-rw-r--r--spec/frontend/vue_shared/issuable/sidebar/components/issuable_sidebar_root_spec.js16
-rw-r--r--spec/frontend/vue_shared/new_namespace/new_namespace_page_spec.js37
-rw-r--r--spec/frontend/vue_shared/security_reports/components/manage_via_mr_spec.js2
-rw-r--r--spec/frontend/webhooks/components/form_url_app_spec.js20
-rw-r--r--spec/frontend/whats_new/store/actions_spec.js22
-rw-r--r--spec/frontend/whats_new/utils/notification_spec.js73
-rw-r--r--spec/frontend/work_items/components/item_title_spec.js9
-rw-r--r--spec/frontend/work_items/components/notes/work_item_note_actions_spec.js5
-rw-r--r--spec/frontend/work_items/components/notes/work_item_note_awards_list_spec.js82
-rw-r--r--spec/frontend/work_items/components/notes/work_item_notes_activity_header_spec.js18
-rw-r--r--spec/frontend/work_items/components/work_item_ancestors/disclosure_hierarchy_item_spec.js53
-rw-r--r--spec/frontend/work_items/components/work_item_ancestors/disclosure_hierarchy_spec.js99
-rw-r--r--spec/frontend/work_items/components/work_item_ancestors/mock_data.js197
-rw-r--r--spec/frontend/work_items/components/work_item_ancestors/work_item_ancestors_spec.js117
-rw-r--r--spec/frontend/work_items/components/work_item_attributes_wrapper_spec.js73
-rw-r--r--spec/frontend/work_items/components/work_item_detail_modal_spec.js1
-rw-r--r--spec/frontend/work_items/components/work_item_detail_spec.js219
-rw-r--r--spec/frontend/work_items/components/work_item_links/work_item_actions_split_button_spec.js (renamed from spec/frontend/work_items/components/work_item_links/okr_actions_split_button_spec.js)34
-rw-r--r--spec/frontend/work_items/components/work_item_links/work_item_tree_spec.js44
-rw-r--r--spec/frontend/work_items/components/work_item_notes_spec.js27
-rw-r--r--spec/frontend/work_items/components/work_item_parent_inline_spec.js (renamed from spec/frontend/work_items/components/work_item_parent_spec.js)6
-rw-r--r--spec/frontend/work_items/components/work_item_parent_with_edit_spec.js409
-rw-r--r--spec/frontend/work_items/components/work_item_state_toggle_spec.js (renamed from spec/frontend/work_items/components/work_item_state_toggle_button_spec.js)0
-rw-r--r--spec/frontend/work_items/components/work_item_sticky_header_spec.js59
-rw-r--r--spec/frontend/work_items/components/work_item_title_spec.js34
-rw-r--r--spec/frontend/work_items/mock_data.js51
-rw-r--r--spec/frontend/work_items/notes/award_utils_spec.js18
-rw-r--r--spec/frontend/work_items/pages/create_work_item_spec.js2
-rw-r--r--spec/frontend/work_items/pages/work_item_root_spec.js1
-rw-r--r--spec/frontend_integration/fly_out_nav_browser_spec.js366
-rw-r--r--spec/graphql/mutations/ci/runner/bulk_delete_spec.rb2
-rw-r--r--spec/graphql/mutations/ci/runner/delete_spec.rb2
-rw-r--r--spec/graphql/mutations/ci/runner/update_spec.rb2
-rw-r--r--spec/graphql/mutations/design_management/delete_spec.rb69
-rw-r--r--spec/graphql/mutations/namespace/package_settings/update_spec.rb9
-rw-r--r--spec/graphql/mutations/projects/star_spec.rb73
-rw-r--r--spec/graphql/resolvers/blame_resolver_spec.rb28
-rw-r--r--spec/graphql/resolvers/ci/catalog/resource_resolver_spec.rb12
-rw-r--r--spec/graphql/resolvers/ci/catalog/resources/versions_resolver_spec.rb69
-rw-r--r--spec/graphql/resolvers/ci/catalog/resources_resolver_spec.rb103
-rw-r--r--spec/graphql/resolvers/ci/catalog/versions_resolver_spec.rb66
-rw-r--r--spec/graphql/resolvers/ci/group_runners_resolver_spec.rb2
-rw-r--r--spec/graphql/resolvers/ci/project_runners_resolver_spec.rb2
-rw-r--r--spec/graphql/resolvers/ci/runner_groups_resolver_spec.rb2
-rw-r--r--spec/graphql/resolvers/ci/runner_job_count_resolver_spec.rb2
-rw-r--r--spec/graphql/resolvers/ci/runner_jobs_resolver_spec.rb2
-rw-r--r--spec/graphql/resolvers/ci/runner_platforms_resolver_spec.rb2
-rw-r--r--spec/graphql/resolvers/ci/runner_projects_resolver_spec.rb2
-rw-r--r--spec/graphql/resolvers/ci/runner_setup_resolver_spec.rb2
-rw-r--r--spec/graphql/resolvers/ci/runner_status_resolver_spec.rb2
-rw-r--r--spec/graphql/resolvers/ci/runners_resolver_spec.rb2
-rw-r--r--spec/graphql/resolvers/container_repository_tags_resolver_spec.rb8
-rw-r--r--spec/graphql/resolvers/group_milestones_resolver_spec.rb73
-rw-r--r--spec/graphql/resolvers/group_resolver_spec.rb2
-rw-r--r--spec/graphql/resolvers/kas/agent_connections_resolver_spec.rb6
-rw-r--r--spec/graphql/resolvers/ml/model_detail_resolver_spec.rb41
-rw-r--r--spec/graphql/resolvers/project_resolver_spec.rb2
-rw-r--r--spec/graphql/resolvers/timelog_resolver_spec.rb22
-rw-r--r--spec/graphql/types/analytics/cycle_analytics/value_stream_type_spec.rb2
-rw-r--r--spec/graphql/types/analytics/cycle_analytics/value_streams/stage_type_spec.rb15
-rw-r--r--spec/graphql/types/ci/catalog/resource_type_spec.rb2
-rw-r--r--spec/graphql/types/ci/catalog/resources/component_type_spec.rb18
-rw-r--r--spec/graphql/types/ci/catalog/resources/components/input_type_spec.rb17
-rw-r--r--spec/graphql/types/ci/catalog/resources/version_sort_enum_spec.rb13
-rw-r--r--spec/graphql/types/ci/catalog/resources/version_type_spec.rb22
-rw-r--r--spec/graphql/types/ci/job_base_field_spec.rb2
-rw-r--r--spec/graphql/types/ci/runner_countable_connection_type_spec.rb2
-rw-r--r--spec/graphql/types/ci/runner_manager_type_spec.rb2
-rw-r--r--spec/graphql/types/ci/runner_platform_type_spec.rb2
-rw-r--r--spec/graphql/types/ci/runner_setup_type_spec.rb2
-rw-r--r--spec/graphql/types/ci/runner_upgrade_status_enum_spec.rb2
-rw-r--r--spec/graphql/types/ci/runner_web_url_edge_spec.rb2
-rw-r--r--spec/graphql/types/container_registry/protection/rule_type_spec.rb4
-rw-r--r--spec/graphql/types/container_repository_details_type_spec.rb2
-rw-r--r--spec/graphql/types/container_repository_tag_type_spec.rb6
-rw-r--r--spec/graphql/types/container_repository_type_spec.rb6
-rw-r--r--spec/graphql/types/current_user_type_spec.rb11
-rw-r--r--spec/graphql/types/group_type_spec.rb33
-rw-r--r--spec/graphql/types/issue_type_enum_spec.rb2
-rw-r--r--spec/graphql/types/ml/candidate_links_type_spec.rb11
-rw-r--r--spec/graphql/types/ml/candidate_type_spec.rb13
-rw-r--r--spec/graphql/types/ml/model_type_spec.rb13
-rw-r--r--spec/graphql/types/ml/model_version_links_type_spec.rb11
-rw-r--r--spec/graphql/types/ml/model_version_type_spec.rb13
-rw-r--r--spec/graphql/types/namespace/package_settings_type_spec.rb1
-rw-r--r--spec/graphql/types/organizations/organization_type_spec.rb2
-rw-r--r--spec/graphql/types/permission_types/abuse_report_spec.rb15
-rw-r--r--spec/graphql/types/permission_types/container_repository_spec.rb11
-rw-r--r--spec/graphql/types/permission_types/container_repository_tag_spec.rb11
-rw-r--r--spec/graphql/types/project_feature_access_level_enum_spec.rb11
-rw-r--r--spec/graphql/types/project_feature_access_level_type_spec.rb14
-rw-r--r--spec/graphql/types/project_type_spec.rb65
-rw-r--r--spec/graphql/types/projects/service_type_enum_spec.rb1
-rw-r--r--spec/graphql/types/query_type_spec.rb18
-rw-r--r--spec/graphql/types/root_storage_statistics_type_spec.rb2
-rw-r--r--spec/graphql/types/user_preferences_type_spec.rb1
-rw-r--r--spec/graphql/types/work_item_state_counts_type_spec.rb12
-rw-r--r--spec/graphql/types/work_items/widget_definition_interface_spec.rb35
-rw-r--r--spec/graphql/types/work_items/widget_definitions/assignees_type_spec.rb13
-rw-r--r--spec/graphql/types/work_items/widget_definitions/generic_type_spec.rb11
-rw-r--r--spec/helpers/application_helper_spec.rb50
-rw-r--r--spec/helpers/artifacts_helper_spec.rb3
-rw-r--r--spec/helpers/avatars_helper_spec.rb32
-rw-r--r--spec/helpers/ci/jobs_helper_spec.rb9
-rw-r--r--spec/helpers/ci/pipeline_editor_helper_spec.rb3
-rw-r--r--spec/helpers/ci/pipelines_helper_spec.rb38
-rw-r--r--spec/helpers/ci/runners_helper_spec.rb38
-rw-r--r--spec/helpers/dashboard_helper_spec.rb20
-rw-r--r--spec/helpers/explore_helper_spec.rb8
-rw-r--r--spec/helpers/groups_helper_spec.rb41
-rw-r--r--spec/helpers/issuables_helper_spec.rb33
-rw-r--r--spec/helpers/issues_helper_spec.rb98
-rw-r--r--spec/helpers/json_helper_spec.rb36
-rw-r--r--spec/helpers/nav/new_dropdown_helper_spec.rb6
-rw-r--r--spec/helpers/nav/top_nav_helper_spec.rb487
-rw-r--r--spec/helpers/notes_helper_spec.rb17
-rw-r--r--spec/helpers/notifications_helper_spec.rb6
-rw-r--r--spec/helpers/organizations/organization_helper_spec.rb14
-rw-r--r--spec/helpers/projects/pipeline_helper_spec.rb16
-rw-r--r--spec/helpers/projects/terraform_helper_spec.rb2
-rw-r--r--spec/helpers/projects_helper_spec.rb7
-rw-r--r--spec/helpers/sidebars_helper_spec.rb10
-rw-r--r--spec/helpers/stat_anchors_helper_spec.rb4
-rw-r--r--spec/helpers/storage_helper_spec.rb2
-rw-r--r--spec/helpers/todos_helper_spec.rb13
-rw-r--r--spec/helpers/webpack_helper_spec.rb2
-rw-r--r--spec/initializers/6_validations_spec.rb14
-rw-r--r--spec/initializers/circuitbox_spec.rb7
-rw-r--r--spec/initializers/forbid_sidekiq_in_transactions_spec.rb4
-rw-r--r--spec/keeps/helpers/postgres_ai_spec.rb57
-rw-r--r--spec/lib/api/ci/helpers/runner_spec.rb2
-rw-r--r--spec/lib/api/entities/ci/job_request/image_spec.rb9
-rw-r--r--spec/lib/api/entities/ci/job_request/service_spec.rb2
-rw-r--r--spec/lib/api/entities/hook_spec.rb25
-rw-r--r--spec/lib/api/entities/ml/mlflow/run_info_spec.rb19
-rw-r--r--spec/lib/api/entities/project_import_status_spec.rb1
-rw-r--r--spec/lib/api/entities/projects/repository_storage_move_spec.rb2
-rw-r--r--spec/lib/api/entities/snippets/repository_storage_move_spec.rb2
-rw-r--r--spec/lib/api/helpers/import_github_helpers_spec.rb3
-rw-r--r--spec/lib/api/ml/mlflow/api_helpers_spec.rb76
-rw-r--r--spec/lib/backup/database_configuration_spec.rb239
-rw-r--r--spec/lib/backup/database_connection_spec.rb103
-rw-r--r--spec/lib/backup/database_model_spec.rb55
-rw-r--r--spec/lib/backup/database_spec.rb127
-rw-r--r--spec/lib/backup/dump/postgres_spec.rb76
-rw-r--r--spec/lib/backup/files_spec.rb56
-rw-r--r--spec/lib/backup/repositories_spec.rb40
-rw-r--r--spec/lib/banzai/filter/custom_emoji_filter_spec.rb8
-rw-r--r--spec/lib/banzai/filter/markdown_filter_spec.rb16
-rw-r--r--spec/lib/banzai/filter/quick_action_filter_spec.rb37
-rw-r--r--spec/lib/banzai/filter/references/alert_reference_filter_spec.rb10
-rw-r--r--spec/lib/banzai/filter/references/commit_reference_filter_spec.rb14
-rw-r--r--spec/lib/banzai/filter/references/label_reference_filter_spec.rb12
-rw-r--r--spec/lib/banzai/filter/references/milestone_reference_filter_spec.rb2
-rw-r--r--spec/lib/banzai/filter/references/project_reference_filter_spec.rb2
-rw-r--r--spec/lib/banzai/filter/references/reference_cache_spec.rb12
-rw-r--r--spec/lib/banzai/filter/references/snippet_reference_filter_spec.rb10
-rw-r--r--spec/lib/banzai/pipeline/quick_action_pipeline_spec.rb62
-rw-r--r--spec/lib/bitbucket/connection_spec.rb45
-rw-r--r--spec/lib/bitbucket/exponential_backoff_spec.rb62
-rw-r--r--spec/lib/bitbucket/representation/pull_request_spec.rb7
-rw-r--r--spec/lib/bitbucket_server/client_spec.rb21
-rw-r--r--spec/lib/bitbucket_server/connection_spec.rb147
-rw-r--r--spec/lib/bitbucket_server/representation/activity_spec.rb16
-rw-r--r--spec/lib/bitbucket_server/representation/user_spec.rb19
-rw-r--r--spec/lib/bitbucket_server/retry_with_delay_spec.rb60
-rw-r--r--spec/lib/bulk_imports/clients/http_spec.rb12
-rw-r--r--spec/lib/bulk_imports/common/pipelines/entity_finisher_spec.rb9
-rw-r--r--spec/lib/bulk_imports/common/pipelines/lfs_objects_pipeline_spec.rb2
-rw-r--r--spec/lib/bulk_imports/logger_spec.rb49
-rw-r--r--spec/lib/bulk_imports/network_error_spec.rb4
-rw-r--r--spec/lib/bulk_imports/pipeline/runner_spec.rb24
-rw-r--r--spec/lib/bulk_imports/projects/pipelines/references_pipeline_spec.rb263
-rw-r--r--spec/lib/bulk_imports/projects/stage_spec.rb5
-rw-r--r--spec/lib/click_house/connection_spec.rb54
-rw-r--r--spec/lib/click_house/iterator_spec.rb43
-rw-r--r--spec/lib/click_house/migration_support/exclusive_lock_spec.rb140
-rw-r--r--spec/lib/click_house/migration_support/migration_context_spec.rb203
-rw-r--r--spec/lib/click_house/migration_support/sidekiq_middleware_spec.rb61
-rw-r--r--spec/lib/extracts_ref_spec.rb8
-rw-r--r--spec/lib/feature_spec.rb1512
-rw-r--r--spec/lib/generators/batched_background_migration/expected_files/my_batched_migration_dictionary_matcher.txt4
-rw-r--r--spec/lib/generators/batched_background_migration/expected_files/queue_my_batched_migration.txt1
-rw-r--r--spec/lib/generators/gitlab/analytics/group_fetcher_spec.rb100
-rw-r--r--spec/lib/generators/gitlab/analytics/internal_events_generator_spec.rb213
-rw-r--r--spec/lib/gitlab/access/branch_protection_spec.rb8
-rw-r--r--spec/lib/gitlab/analytics/cycle_analytics/aggregated/records_fetcher_spec.rb86
-rw-r--r--spec/lib/gitlab/application_context_spec.rb8
-rw-r--r--spec/lib/gitlab/auth/saml/config_spec.rb35
-rw-r--r--spec/lib/gitlab/auth_spec.rb56
-rw-r--r--spec/lib/gitlab/background_migration/backfill_branch_protection_namespace_setting_spec.rb76
-rw-r--r--spec/lib/gitlab/background_migration/backfill_imported_issue_search_data_spec.rb3
-rw-r--r--spec/lib/gitlab/background_migration/backfill_integrations_enable_ssl_verification_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/backfill_internal_on_notes_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/backfill_merge_request_diffs_project_id_spec.rb43
-rw-r--r--spec/lib/gitlab/background_migration/backfill_namespace_id_for_project_route_spec.rb53
-rw-r--r--spec/lib/gitlab/background_migration/backfill_namespace_id_of_vulnerability_reads_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/backfill_project_import_level_spec.rb1
-rw-r--r--spec/lib/gitlab/background_migration/backfill_project_member_namespace_id_spec.rb124
-rw-r--r--spec/lib/gitlab/background_migration/backfill_topics_title_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/backfill_user_details_fields_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/backfill_vs_code_settings_uuid_spec.rb74
-rw-r--r--spec/lib/gitlab/background_migration/backfill_vulnerability_reads_cluster_agent_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/backfill_work_item_type_id_for_issues_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/cleanup_orphaned_routes_spec.rb80
-rw-r--r--spec/lib/gitlab/background_migration/destroy_invalid_group_members_spec.rb105
-rw-r--r--spec/lib/gitlab/background_migration/destroy_invalid_members_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/destroy_invalid_project_members_spec.rb124
-rw-r--r--spec/lib/gitlab/background_migration/disable_legacy_open_source_license_for_no_issues_no_repo_projects_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/disable_legacy_open_source_license_for_one_member_no_repo_projects_spec.rb23
-rw-r--r--spec/lib/gitlab/background_migration/disable_legacy_open_source_license_for_projects_less_than_one_mb_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/expire_o_auth_tokens_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/fix_allow_descendants_override_disabled_shared_runners_spec.rb3
-rw-r--r--spec/lib/gitlab/background_migration/migrate_human_user_type_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/populate_projects_star_count_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/remove_self_managed_wiki_notes_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/reset_too_many_tags_skipped_registry_imports_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/set_legacy_open_source_license_available_for_non_public_projects_spec.rb2
-rw-r--r--spec/lib/gitlab/bitbucket_import/importer_spec.rb559
-rw-r--r--spec/lib/gitlab/bitbucket_import/importers/issues_importer_spec.rb2
-rw-r--r--spec/lib/gitlab/bitbucket_import/importers/issues_notes_importer_spec.rb2
-rw-r--r--spec/lib/gitlab/bitbucket_import/importers/pull_request_importer_spec.rb12
-rw-r--r--spec/lib/gitlab/bitbucket_import/importers/pull_requests_importer_spec.rb2
-rw-r--r--spec/lib/gitlab/bitbucket_import/importers/pull_requests_notes_importer_spec.rb2
-rw-r--r--spec/lib/gitlab/bitbucket_import/ref_converter_spec.rb9
-rw-r--r--spec/lib/gitlab/bitbucket_server_import/importers/pull_request_notes_importer_spec.rb132
-rw-r--r--spec/lib/gitlab/bitbucket_server_import/importers/pull_requests_importer_spec.rb37
-rw-r--r--spec/lib/gitlab/bitbucket_server_import/importers/users_importer_spec.rb65
-rw-r--r--spec/lib/gitlab/cache/import/caching_spec.rb10
-rw-r--r--spec/lib/gitlab/checks/container_moved_spec.rb2
-rw-r--r--spec/lib/gitlab/checks/force_push_spec.rb2
-rw-r--r--spec/lib/gitlab/checks/global_file_size_check_spec.rb5
-rw-r--r--spec/lib/gitlab/checks/lfs_check_spec.rb2
-rw-r--r--spec/lib/gitlab/checks/lfs_integrity_spec.rb2
-rw-r--r--spec/lib/gitlab/checks/matching_merge_request_spec.rb2
-rw-r--r--spec/lib/gitlab/checks/project_created_spec.rb2
-rw-r--r--spec/lib/gitlab/checks/push_check_spec.rb2
-rw-r--r--spec/lib/gitlab/checks/push_file_count_check_spec.rb2
-rw-r--r--spec/lib/gitlab/checks/single_change_access_spec.rb2
-rw-r--r--spec/lib/gitlab/checks/snippet_check_spec.rb2
-rw-r--r--spec/lib/gitlab/checks/tag_check_spec.rb129
-rw-r--r--spec/lib/gitlab/checks/timed_logger_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/build/image_spec.rb11
-rw-r--r--spec/lib/gitlab/ci/components/instance_path_spec.rb186
-rw-r--r--spec/lib/gitlab/ci/config/entry/auto_cancel_spec.rb72
-rw-r--r--spec/lib/gitlab/ci/config/entry/bridge_spec.rb9
-rw-r--r--spec/lib/gitlab/ci/config/entry/image_spec.rb58
-rw-r--r--spec/lib/gitlab/ci/config/entry/includes_spec.rb14
-rw-r--r--spec/lib/gitlab/ci/config/entry/inherit/default_spec.rb1
-rw-r--r--spec/lib/gitlab/ci/config/entry/job_spec.rb89
-rw-r--r--spec/lib/gitlab/ci/config/entry/processable_spec.rb31
-rw-r--r--spec/lib/gitlab/ci/config/entry/reports_spec.rb1
-rw-r--r--spec/lib/gitlab/ci/config/entry/retry_spec.rb86
-rw-r--r--spec/lib/gitlab/ci/config/entry/service_spec.rb59
-rw-r--r--spec/lib/gitlab/ci/config/entry/workflow_spec.rb110
-rw-r--r--spec/lib/gitlab/ci/config/external/file/local_spec.rb4
-rw-r--r--spec/lib/gitlab/ci/config/external/file/remote_spec.rb46
-rw-r--r--spec/lib/gitlab/ci/config/external/mapper_spec.rb8
-rw-r--r--spec/lib/gitlab/ci/config/external/processor_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/config/interpolation/inputs/base_input_spec.rb30
-rw-r--r--spec/lib/gitlab/ci/config/interpolation/text_interpolator_spec.rb221
-rw-r--r--spec/lib/gitlab/ci/config/interpolation/text_template_spec.rb105
-rw-r--r--spec/lib/gitlab/ci/config_spec.rb28
-rw-r--r--spec/lib/gitlab/ci/jwt_v2_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/parsers/sbom/cyclonedx_spec.rb73
-rw-r--r--spec/lib/gitlab/ci/parsers/sbom/validators/cyclonedx_schema_validator_spec.rb268
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/cancel_pending_pipelines_spec.rb15
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/evaluate_workflow_rules_spec.rb21
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/helpers_spec.rb64
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/populate_metadata_spec.rb172
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/populate_spec.rb25
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/validate/external_spec.rb7
-rw-r--r--spec/lib/gitlab/ci/reports/sbom/source_spec.rb102
-rw-r--r--spec/lib/gitlab/ci/runner_instructions_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/runner_releases_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/runner_upgrade_check_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/templates/Diffblue_Cover_spec.rb26
-rw-r--r--spec/lib/gitlab/ci/templates/templates_spec.rb1
-rw-r--r--spec/lib/gitlab/ci/variables/builder/pipeline_spec.rb77
-rw-r--r--spec/lib/gitlab/ci/variables/downstream/generator_spec.rb28
-rw-r--r--spec/lib/gitlab/ci/yaml_processor/test_cases/interruptible_spec.rb96
-rw-r--r--spec/lib/gitlab/ci/yaml_processor_spec.rb110
-rw-r--r--spec/lib/gitlab/circuit_breaker/notifier_spec.rb37
-rw-r--r--spec/lib/gitlab/circuit_breaker/store_spec.rb201
-rw-r--r--spec/lib/gitlab/circuit_breaker_spec.rb120
-rw-r--r--spec/lib/gitlab/contributions_calendar_spec.rb73
-rw-r--r--spec/lib/gitlab/counters/buffered_counter_spec.rb20
-rw-r--r--spec/lib/gitlab/database/background_migration/batched_background_migration_dictionary_spec.rb13
-rw-r--r--spec/lib/gitlab/database/background_migration/batched_migration_spec.rb14
-rw-r--r--spec/lib/gitlab/database/decomposition/migrate_spec.rb180
-rw-r--r--spec/lib/gitlab/database/dictionary_spec.rb123
-rw-r--r--spec/lib/gitlab/database/gitlab_schema_spec.rb45
-rw-r--r--spec/lib/gitlab/database/health_status/indicators/autovacuum_active_on_table_spec.rb31
-rw-r--r--spec/lib/gitlab/database/load_balancing/load_balancer_spec.rb13
-rw-r--r--spec/lib/gitlab/database/migration_spec.rb6
-rw-r--r--spec/lib/gitlab/database/migrations/batched_background_migration_helpers_spec.rb6
-rw-r--r--spec/lib/gitlab/database/migrations/pg_backend_pid_spec.rb11
-rw-r--r--spec/lib/gitlab/database/no_cross_db_foreign_keys_spec.rb16
-rw-r--r--spec/lib/gitlab/database/no_new_tables_with_gitlab_main_schema_spec.rb12
-rw-r--r--spec/lib/gitlab/database/obsolete_ignored_columns_spec.rb6
-rw-r--r--spec/lib/gitlab/database/partitioning/ci_sliding_list_strategy_spec.rb33
-rw-r--r--spec/lib/gitlab/database/postgres_index_spec.rb9
-rw-r--r--spec/lib/gitlab/database/postgres_sequences_spec.rb35
-rw-r--r--spec/lib/gitlab/database/postgresql_adapter/force_disconnectable_mixin_spec.rb4
-rw-r--r--spec/lib/gitlab/database/query_analyzers/prevent_set_operator_mismatch_spec.rb17
-rw-r--r--spec/lib/gitlab/database/schema_migrations/context_spec.rb5
-rw-r--r--spec/lib/gitlab/database/sharding_key_spec.rb153
-rw-r--r--spec/lib/gitlab/database/transaction/observer_spec.rb2
-rw-r--r--spec/lib/gitlab/database/with_lock_retries_spec.rb4
-rw-r--r--spec/lib/gitlab/diff/file_spec.rb27
-rw-r--r--spec/lib/gitlab/doctor/reset_tokens_spec.rb2
-rw-r--r--spec/lib/gitlab/email/handler/create_note_handler_spec.rb120
-rw-r--r--spec/lib/gitlab/email/handler/service_desk_handler_spec.rb119
-rw-r--r--spec/lib/gitlab/email/receiver_spec.rb61
-rw-r--r--spec/lib/gitlab/email/service_desk/custom_email_spec.rb37
-rw-r--r--spec/lib/gitlab/error_tracking/context_payload_generator_spec.rb25
-rw-r--r--spec/lib/gitlab/error_tracking_spec.rb37
-rw-r--r--spec/lib/gitlab/event_store/event_spec.rb39
-rw-r--r--spec/lib/gitlab/event_store/store_spec.rb70
-rw-r--r--spec/lib/gitlab/event_store/subscription_spec.rb142
-rw-r--r--spec/lib/gitlab/exclusive_lease_spec.rb2
-rw-r--r--spec/lib/gitlab/experiment/rollout/feature_spec.rb65
-rw-r--r--spec/lib/gitlab/file_detector_spec.rb6
-rw-r--r--spec/lib/gitlab/git/commit_spec.rb25
-rw-r--r--spec/lib/gitlab/git/compare_spec.rb110
-rw-r--r--spec/lib/gitlab/git/diff_collection_spec.rb95
-rw-r--r--spec/lib/gitlab/git/diff_spec.rb31
-rw-r--r--spec/lib/gitlab/git/repository_spec.rb95
-rw-r--r--spec/lib/gitlab/gitaly_client/conflicts_service_spec.rb23
-rw-r--r--spec/lib/gitlab/gitaly_client/repository_service_spec.rb44
-rw-r--r--spec/lib/gitlab/gitaly_client/storage_settings_spec.rb14
-rw-r--r--spec/lib/gitlab/gitaly_client_spec.rb39
-rw-r--r--spec/lib/gitlab/github_import/client_pool_spec.rb41
-rw-r--r--spec/lib/gitlab/github_import/importer/collaborators_importer_spec.rb3
-rw-r--r--spec/lib/gitlab/github_import/importer/diff_note_importer_spec.rb2
-rw-r--r--spec/lib/gitlab/github_import/importer/diff_notes_importer_spec.rb8
-rw-r--r--spec/lib/gitlab/github_import/importer/events/changed_milestone_spec.rb24
-rw-r--r--spec/lib/gitlab/github_import/importer/events/merged_spec.rb74
-rw-r--r--spec/lib/gitlab/github_import/importer/issue_event_importer_spec.rb63
-rw-r--r--spec/lib/gitlab/github_import/importer/issue_events_importer_spec.rb6
-rw-r--r--spec/lib/gitlab/github_import/importer/issues_importer_spec.rb8
-rw-r--r--spec/lib/gitlab/github_import/importer/lfs_objects_importer_spec.rb4
-rw-r--r--spec/lib/gitlab/github_import/importer/notes_importer_spec.rb8
-rw-r--r--spec/lib/gitlab/github_import/importer/protected_branches_importer_spec.rb7
-rw-r--r--spec/lib/gitlab/github_import/importer/pull_requests/reviews_importer_spec.rb4
-rw-r--r--spec/lib/gitlab/github_import/importer/pull_requests_importer_spec.rb9
-rw-r--r--spec/lib/gitlab/github_import/importer/single_endpoint_diff_notes_importer_spec.rb4
-rw-r--r--spec/lib/gitlab/github_import/importer/single_endpoint_issue_events_importer_spec.rb27
-rw-r--r--spec/lib/gitlab/github_import/importer/single_endpoint_issue_notes_importer_spec.rb4
-rw-r--r--spec/lib/gitlab/github_import/importer/single_endpoint_merge_request_notes_importer_spec.rb4
-rw-r--r--spec/lib/gitlab/github_import/issuable_finder_spec.rb28
-rw-r--r--spec/lib/gitlab/github_import/job_delay_calculator_spec.rb33
-rw-r--r--spec/lib/gitlab/github_import/label_finder_spec.rb28
-rw-r--r--spec/lib/gitlab/github_import/milestone_finder_spec.rb30
-rw-r--r--spec/lib/gitlab/github_import/object_counter_spec.rb4
-rw-r--r--spec/lib/gitlab/github_import/parallel_scheduling_spec.rb15
-rw-r--r--spec/lib/gitlab/github_import/representation/representable_spec.rb43
-rw-r--r--spec/lib/gitlab/github_import/settings_spec.rb7
-rw-r--r--spec/lib/gitlab/github_import_spec.rb23
-rw-r--r--spec/lib/gitlab/hook_data/project_builder_spec.rb120
-rw-r--r--spec/lib/gitlab/http_spec.rb39
-rw-r--r--spec/lib/gitlab/import_export/all_models.yml7
-rw-r--r--spec/lib/gitlab/import_export/safe_model_attributes.yml2
-rw-r--r--spec/lib/gitlab/import_sources_spec.rb32
-rw-r--r--spec/lib/gitlab/instrumentation/connection_pool_spec.rb69
-rw-r--r--spec/lib/gitlab/instrumentation/redis_base_spec.rb14
-rw-r--r--spec/lib/gitlab/instrumentation/redis_helper_spec.rb136
-rw-r--r--spec/lib/gitlab/instrumentation/redis_interceptor_spec.rb19
-rw-r--r--spec/lib/gitlab/internal_events_spec.rb247
-rw-r--r--spec/lib/gitlab/issuables_count_for_state_spec.rb19
-rw-r--r--spec/lib/gitlab/kas/client_spec.rb14
-rw-r--r--spec/lib/gitlab/markdown_cache/redis/store_spec.rb2
-rw-r--r--spec/lib/gitlab/memory/watchdog_spec.rb13
-rw-r--r--spec/lib/gitlab/metrics/system_spec.rb363
-rw-r--r--spec/lib/gitlab/middleware/path_traversal_check_spec.rb23
-rw-r--r--spec/lib/gitlab/middleware/request_context_spec.rb2
-rw-r--r--spec/lib/gitlab/nav/top_nav_menu_header_spec.rb16
-rw-r--r--spec/lib/gitlab/omniauth_initializer_spec.rb117
-rw-r--r--spec/lib/gitlab/pages/deployment_update_spec.rb35
-rw-r--r--spec/lib/gitlab/pages/url_builder_spec.rb157
-rw-r--r--spec/lib/gitlab/pagination/cursor_based_keyset_spec.rb87
-rw-r--r--spec/lib/gitlab/pagination/gitaly_keyset_pager_spec.rb9
-rw-r--r--spec/lib/gitlab/patch/sidekiq_scheduled_enq_spec.rb73
-rw-r--r--spec/lib/gitlab/puma/error_handler_spec.rb27
-rw-r--r--spec/lib/gitlab/quick_actions/extractor_spec.rb133
-rw-r--r--spec/lib/gitlab/redis/buffered_counter_spec.rb7
-rw-r--r--spec/lib/gitlab/redis/db_load_balancing_spec.rb36
-rw-r--r--spec/lib/gitlab/redis/sidekiq_status_spec.rb56
-rw-r--r--spec/lib/gitlab/seeders/ci/catalog/resource_seeder_spec.rb52
-rw-r--r--spec/lib/gitlab/seeders/ci/runner/runner_fleet_pipeline_seeder_spec.rb2
-rw-r--r--spec/lib/gitlab/seeders/ci/runner/runner_fleet_seeder_spec.rb2
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb18
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/pause_control/strategies/click_house_migration_spec.rb66
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/pause_control/workers_map_spec.rb37
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/pause_control_spec.rb18
-rw-r--r--spec/lib/gitlab/sidekiq_middleware_spec.rb4
-rw-r--r--spec/lib/gitlab/sidekiq_status_spec.rb36
-rw-r--r--spec/lib/gitlab/tracking/destinations/snowplow_micro_spec.rb4
-rw-r--r--spec/lib/gitlab/tracking/event_definition_spec.rb27
-rw-r--r--spec/lib/gitlab/tracking_spec.rb51
-rw-r--r--spec/lib/gitlab/url_blocker_spec.rb13
-rw-r--r--spec/lib/gitlab/usage/metric_definition_spec.rb71
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/bulk_imports_users_metric_spec.rb27
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/count_service_desk_custom_email_enabled_metric_spec.rb16
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/csv_imports_users_metric_spec.rb27
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/gitlab_config_metric_spec.rb31
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/gitlab_settings_metric_spec.rb27
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/group_imports_users_metric_spec.rb27
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/in_product_marketing_email_cta_clicked_metric_spec.rb55
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/in_product_marketing_email_sent_metric_spec.rb52
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/jira_imports_users_metric_spec.rb27
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/omniauth_enabled_metric_spec.rb17
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/project_imports_creators_metric_spec.rb31
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/prometheus_enabled_metric_spec.rb17
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/prometheus_metrics_enabled_metric_spec.rb17
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/reply_by_email_enabled_metric_spec.rb17
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/total_count_metric_spec.rb90
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/unique_users_all_imports_metric_spec.rb53
-rw-r--r--spec/lib/gitlab/usage_data_counters/editor_unique_counter_spec.rb10
-rw-r--r--spec/lib/gitlab/usage_data_queries_spec.rb21
-rw-r--r--spec/lib/gitlab/usage_data_spec.rb87
-rw-r--r--spec/lib/gitlab/utils/file_info_spec.rb4
-rw-r--r--spec/lib/gitlab/web_ide/default_oauth_application_spec.rb87
-rw-r--r--spec/lib/gitlab/workhorse_spec.rb57
-rw-r--r--spec/lib/integrations/google_cloud_platform/artifact_registry/client_spec.rb98
-rw-r--r--spec/lib/integrations/google_cloud_platform/jwt_spec.rb86
-rw-r--r--spec/lib/organization/current_organization_spec.rb66
-rw-r--r--spec/lib/peek/views/click_house_spec.rb6
-rw-r--r--spec/lib/product_analytics/event_params_spec.rb59
-rw-r--r--spec/lib/sbom/package_url_spec.rb2
-rw-r--r--spec/lib/sidebars/concerns/container_with_html_options_spec.rb6
-rw-r--r--spec/lib/sidebars/explore/menus/catalog_menu_spec.rb34
-rw-r--r--spec/lib/sidebars/groups/menus/scope_menu_spec.rb6
-rw-r--r--spec/lib/sidebars/groups/super_sidebar_menus/analyze_menu_spec.rb1
-rw-r--r--spec/lib/sidebars/organizations/menus/manage_menu_spec.rb20
-rw-r--r--spec/lib/sidebars/projects/menus/repository_menu_spec.rb2
-rw-r--r--spec/lib/sidebars/projects/menus/scope_menu_spec.rb6
-rw-r--r--spec/lib/sidebars/projects/menus/shimo_menu_spec.rb44
-rw-r--r--spec/lib/sidebars/projects/panel_spec.rb22
-rw-r--r--spec/lib/sidebars/user_settings/menus/access_tokens_menu_spec.rb2
-rw-r--r--spec/lib/sidebars/user_settings/menus/active_sessions_menu_spec.rb2
-rw-r--r--spec/lib/sidebars/user_settings/menus/applications_menu_spec.rb2
-rw-r--r--spec/lib/sidebars/user_settings/menus/authentication_log_menu_spec.rb4
-rw-r--r--spec/lib/sidebars/user_settings/menus/password_menu_spec.rb2
-rw-r--r--spec/lib/system_check/base_check_spec.rb2
-rw-r--r--spec/lib/system_check/orphans/namespace_check_spec.rb61
-rw-r--r--spec/lib/system_check/orphans/repository_check_spec.rb68
-rw-r--r--spec/lib/uploaded_file_spec.rb4
-rw-r--r--spec/lib/vite_gdk_spec.rb63
-rw-r--r--spec/mailers/emails/profile_spec.rb4
-rw-r--r--spec/mailers/emails/service_desk_spec.rb66
-rw-r--r--spec/mailers/notify_spec.rb101
-rw-r--r--spec/migrations/20221002234454_finalize_group_member_namespace_id_migration_spec.rb76
-rw-r--r--spec/migrations/20221018050323_add_objective_and_keyresult_to_work_item_types_spec.rb73
-rw-r--r--spec/migrations/20221018062308_schedule_backfill_project_namespace_details_spec.rb37
-rw-r--r--spec/migrations/20221018193635_ensure_task_note_renaming_background_migration_finished_spec.rb95
-rw-r--r--spec/migrations/20221021145820_create_routing_table_for_builds_metadata_v2_spec.rb36
-rw-r--r--spec/migrations/20221025043930_change_default_value_on_password_last_changed_at_to_user_details_spec.rb37
-rw-r--r--spec/migrations/20221028022627_add_index_on_password_last_changed_at_to_user_details_spec.rb16
-rw-r--r--spec/migrations/20221101032521_add_default_preferred_language_to_application_settings_spec.rb27
-rw-r--r--spec/migrations/20221101032600_add_text_limit_to_default_preferred_language_on_application_settings_spec.rb29
-rw-r--r--spec/migrations/20221102090940_create_next_ci_partitions_record_spec.rb63
-rw-r--r--spec/migrations/20221102090943_create_second_partition_for_builds_metadata_spec.rb61
-rw-r--r--spec/migrations/20230802085923_queue_fix_allow_descendants_override_disabled_shared_runners_spec.rb2
-rw-r--r--spec/migrations/20231107092912_queue_backfill_branch_protection_namespace_setting_spec.rb26
-rw-r--r--spec/migrations/20231115025547_queue_backfill_merge_request_diffs_project_id_spec.rb (renamed from spec/migrations/20230912105945_queue_backfill_finding_id_in_vulnerabilities_spec.rb)4
-rw-r--r--spec/migrations/20231129105945_requeue_backfill_finding_id_in_vulnerabilities3_spec.rb (renamed from spec/migrations/20231001105945_requeue_backfill_finding_id_in_vulnerabilities_spec.rb)2
-rw-r--r--spec/migrations/20231130140901_queue_backfill_vs_code_settings_uuid_spec.rb (renamed from spec/migrations/queue_backfill_user_details_fields_spec.rb)8
-rw-r--r--spec/migrations/20231201171229_detect_and_fix_duplicate_organizations_path_spec.rb31
-rw-r--r--spec/migrations/20231201204712_requeue2_backfill_has_remediations_of_vulnerability_reads_spec.rb (renamed from spec/migrations/20231011142714_queue_backfill_has_remediations_of_vulnerability_reads_spec.rb)2
-rw-r--r--spec/migrations/20231207194620_backfill_catalog_resources_visibility_level_spec.rb27
-rw-r--r--spec/migrations/cleanup_vulnerability_state_transitions_with_same_from_state_to_state_spec.rb50
-rw-r--r--spec/migrations/delete_migrate_shared_vulnerability_scanners_spec.rb73
-rw-r--r--spec/migrations/finalize_invalid_member_cleanup_spec.rb76
-rw-r--r--spec/migrations/fix_broken_user_achievements_awarded_spec.rb46
-rw-r--r--spec/migrations/fix_broken_user_achievements_revoked_spec.rb44
-rw-r--r--spec/migrations/queue_populate_projects_star_count_spec.rb24
-rw-r--r--spec/migrations/recount_epic_cache_counts_spec.rb32
-rw-r--r--spec/migrations/reschedule_migrate_shared_vulnerability_scanners_spec.rb41
-rw-r--r--spec/migrations/set_email_confirmation_setting_from_send_user_confirmation_email_setting_spec.rb41
-rw-r--r--spec/migrations/sync_new_amount_used_for_ci_namespace_monthly_usages_spec.rb43
-rw-r--r--spec/migrations/sync_new_amount_used_for_ci_project_monthly_usages_spec.rb43
-rw-r--r--spec/models/abuse_report_spec.rb3
-rw-r--r--spec/models/achievements/achievement_spec.rb15
-rw-r--r--spec/models/activity_pub/releases_subscription_spec.rb24
-rw-r--r--spec/models/admin/abuse_report_assignee_spec.rb29
-rw-r--r--spec/models/application_setting_spec.rb53
-rw-r--r--spec/models/award_emoji_spec.rb15
-rw-r--r--spec/models/blob_spec.rb30
-rw-r--r--spec/models/bulk_import_spec.rb33
-rw-r--r--spec/models/bulk_imports/batch_tracker_spec.rb15
-rw-r--r--spec/models/bulk_imports/entity_spec.rb25
-rw-r--r--spec/models/bulk_imports/export_status_spec.rb117
-rw-r--r--spec/models/bulk_imports/export_upload_spec.rb16
-rw-r--r--spec/models/bulk_imports/tracker_spec.rb26
-rw-r--r--spec/models/ci/bridge_spec.rb220
-rw-r--r--spec/models/ci/build_need_spec.rb18
-rw-r--r--spec/models/ci/build_spec.rb36
-rw-r--r--spec/models/ci/catalog/components_project_spec.rb1
-rw-r--r--spec/models/ci/catalog/listing_spec.rb292
-rw-r--r--spec/models/ci/catalog/resource_spec.rb270
-rw-r--r--spec/models/ci/catalog/resources/sync_event_spec.rb190
-rw-r--r--spec/models/ci/catalog/resources/version_spec.rb50
-rw-r--r--spec/models/ci/job_artifact_spec.rb10
-rw-r--r--spec/models/ci/job_token/scope_spec.rb12
-rw-r--r--spec/models/ci/pipeline_metadata_spec.rb18
-rw-r--r--spec/models/ci/pipeline_spec.rb126
-rw-r--r--spec/models/ci/processable_spec.rb9
-rw-r--r--spec/models/ci/runner_manager_build_spec.rb2
-rw-r--r--spec/models/ci/runner_manager_spec.rb2
-rw-r--r--spec/models/ci/runner_version_spec.rb2
-rw-r--r--spec/models/concerns/ci/partitionable_spec.rb84
-rw-r--r--spec/models/concerns/disables_sti_spec.rb15
-rw-r--r--spec/models/concerns/enums/sbom_spec.rb62
-rw-r--r--spec/models/concerns/ignorable_columns_spec.rb18
-rw-r--r--spec/models/concerns/pg_full_text_searchable_spec.rb14
-rw-r--r--spec/models/concerns/routable_spec.rb139
-rw-r--r--spec/models/concerns/transitionable_spec.rb17
-rw-r--r--spec/models/concerns/triggerable_hooks_spec.rb2
-rw-r--r--spec/models/concerns/vulnerability_finding_helpers_spec.rb27
-rw-r--r--spec/models/container_registry/protection/rule_spec.rb6
-rw-r--r--spec/models/container_repository_spec.rb4
-rw-r--r--spec/models/custom_emoji_spec.rb41
-rw-r--r--spec/models/deploy_token_spec.rb8
-rw-r--r--spec/models/deployment_spec.rb13
-rw-r--r--spec/models/diff_viewer/base_spec.rb22
-rw-r--r--spec/models/event_spec.rb71
-rw-r--r--spec/models/every_model_spec.rb11
-rw-r--r--spec/models/group_spec.rb68
-rw-r--r--spec/models/hooks/system_hook_spec.rb2
-rw-r--r--spec/models/integration_spec.rb3
-rw-r--r--spec/models/integrations/base_third_party_wiki_spec.rb46
-rw-r--r--spec/models/integrations/field_spec.rb16
-rw-r--r--spec/models/integrations/irker_spec.rb9
-rw-r--r--spec/models/integrations/shimo_spec.rb69
-rw-r--r--spec/models/issue_spec.rb27
-rw-r--r--spec/models/key_spec.rb52
-rw-r--r--spec/models/member_spec.rb4
-rw-r--r--spec/models/members/project_member_spec.rb25
-rw-r--r--spec/models/merge_request_diff_spec.rb141
-rw-r--r--spec/models/merge_request_spec.rb65
-rw-r--r--spec/models/milestone_spec.rb20
-rw-r--r--spec/models/ml/candidate_spec.rb23
-rw-r--r--spec/models/ml/model_spec.rb36
-rw-r--r--spec/models/namespace/package_setting_spec.rb3
-rw-r--r--spec/models/namespace/root_storage_statistics_spec.rb2
-rw-r--r--spec/models/namespace_spec.rb7
-rw-r--r--spec/models/notification_recipient_spec.rb4
-rw-r--r--spec/models/notification_setting_spec.rb10
-rw-r--r--spec/models/onboarding/progress_spec.rb24
-rw-r--r--spec/models/organizations/organization_detail_spec.rb26
-rw-r--r--spec/models/organizations/organization_spec.rb20
-rw-r--r--spec/models/packages/nuget/symbol_spec.rb58
-rw-r--r--spec/models/packages/package_spec.rb24
-rw-r--r--spec/models/packages/tag_spec.rb26
-rw-r--r--spec/models/pages/lookup_path_spec.rb144
-rw-r--r--spec/models/pages/virtual_domain_spec.rb92
-rw-r--r--spec/models/preloaders/runner_manager_policy_preloader_spec.rb2
-rw-r--r--spec/models/product_analytics_event_spec.rb52
-rw-r--r--spec/models/project_authorization_spec.rb33
-rw-r--r--spec/models/project_authorizations/changes_spec.rb69
-rw-r--r--spec/models/project_feature_spec.rb1
-rw-r--r--spec/models/project_group_link_spec.rb32
-rw-r--r--spec/models/project_repository_spec.rb26
-rw-r--r--spec/models/project_spec.rb204
-rw-r--r--spec/models/project_statistics_spec.rb4
-rw-r--r--spec/models/projects/repository_storage_move_spec.rb2
-rw-r--r--spec/models/projects/topic_spec.rb27
-rw-r--r--spec/models/release_spec.rb68
-rw-r--r--spec/models/repository_spec.rb145
-rw-r--r--spec/models/route_spec.rb4
-rw-r--r--spec/models/service_desk/custom_email_credential_spec.rb6
-rw-r--r--spec/models/snippets/repository_storage_move_spec.rb2
-rw-r--r--spec/models/tree_spec.rb79
-rw-r--r--spec/models/user_custom_attribute_spec.rb76
-rw-r--r--spec/models/user_highest_role_spec.rb20
-rw-r--r--spec/models/user_interacted_project_spec.rb52
-rw-r--r--spec/models/user_preference_spec.rb30
-rw-r--r--spec/models/user_spec.rb233
-rw-r--r--spec/models/users/in_product_marketing_email_spec.rb11
-rw-r--r--spec/models/users/phone_number_validation_spec.rb24
-rw-r--r--spec/models/work_item_spec.rb17
-rw-r--r--spec/models/work_items/dates_source_spec.rb23
-rw-r--r--spec/models/work_items/type_spec.rb81
-rw-r--r--spec/models/work_items/widgets/assignees_spec.rb26
-rw-r--r--spec/policies/abuse_report_policy_spec.rb2
-rw-r--r--spec/policies/ci/runner_manager_policy_spec.rb142
-rw-r--r--spec/policies/ci/runner_policy_spec.rb162
-rw-r--r--spec/policies/global_policy_spec.rb8
-rw-r--r--spec/policies/group_policy_spec.rb159
-rw-r--r--spec/policies/merge_request_policy_spec.rb31
-rw-r--r--spec/policies/organizations/organization_policy_spec.rb5
-rw-r--r--spec/policies/project_group_link_policy_spec.rb164
-rw-r--r--spec/policies/project_policy_spec.rb129
-rw-r--r--spec/policies/protected_branch_access_policy_spec.rb10
-rw-r--r--spec/policies/protected_branch_policy_spec.rb40
-rw-r--r--spec/presenters/issue_presenter_spec.rb20
-rw-r--r--spec/presenters/ml/candidate_details_presenter_spec.rb96
-rw-r--r--spec/presenters/ml/candidate_presenter_spec.rb21
-rw-r--r--spec/presenters/ml/model_presenter_spec.rb14
-rw-r--r--spec/presenters/project_presenter_spec.rb21
-rw-r--r--spec/requests/acme_challenges_controller_spec.rb9
-rw-r--r--spec/requests/api/admin/dictionary_spec.rb18
-rw-r--r--spec/requests/api/ci/job_artifacts_spec.rb44
-rw-r--r--spec/requests/api/ci/pipeline_schedules_spec.rb2
-rw-r--r--spec/requests/api/ci/runner/jobs_request_post_spec.rb57
-rw-r--r--spec/requests/api/ci/runner/jobs_request_yamls_spec.rb64
-rw-r--r--spec/requests/api/ci/runner/runners_delete_spec.rb2
-rw-r--r--spec/requests/api/ci/runner/runners_post_spec.rb2
-rw-r--r--spec/requests/api/ci/runner/runners_reset_spec.rb2
-rw-r--r--spec/requests/api/ci/runner/yamls/README.md15
-rw-r--r--spec/requests/api/ci/runner/yamls/image-basic.yml19
-rw-r--r--spec/requests/api/ci/runner/yamls/image-executor_opts-platform.yml25
-rw-r--r--spec/requests/api/ci/runner/yamls/service-basic.yml23
-rw-r--r--spec/requests/api/ci/runner/yamls/service-executor_opts-platform.yml27
-rw-r--r--spec/requests/api/ci/runner/yamls/service-variables.yml30
-rw-r--r--spec/requests/api/ci/runners_reset_registration_token_spec.rb2
-rw-r--r--spec/requests/api/ci/runners_spec.rb2
-rw-r--r--spec/requests/api/commits_spec.rb28
-rw-r--r--spec/requests/api/deploy_tokens_spec.rb1
-rw-r--r--spec/requests/api/deployments_spec.rb68
-rw-r--r--spec/requests/api/environments_spec.rb67
-rw-r--r--spec/requests/api/events_spec.rb14
-rw-r--r--spec/requests/api/graphql/abuse_report_spec.rb6
-rw-r--r--spec/requests/api/graphql/ci/catalog/resource_spec.rb188
-rw-r--r--spec/requests/api/graphql/ci/catalog/resources_spec.rb76
-rw-r--r--spec/requests/api/graphql/ci/runner_spec.rb2
-rw-r--r--spec/requests/api/graphql/ci/runner_web_url_edge_spec.rb2
-rw-r--r--spec/requests/api/graphql/ci/runners_spec.rb72
-rw-r--r--spec/requests/api/graphql/container_repository/container_repository_details_spec.rb3
-rw-r--r--spec/requests/api/graphql/custom_emoji_query_spec.rb4
-rw-r--r--spec/requests/api/graphql/group/issues_spec.rb36
-rw-r--r--spec/requests/api/graphql/group/work_item_state_counts_spec.rb107
-rw-r--r--spec/requests/api/graphql/group/work_item_types_spec.rb55
-rw-r--r--spec/requests/api/graphql/milestone_spec.rb14
-rw-r--r--spec/requests/api/graphql/mutations/admin/sidekiq_queues/delete_jobs_spec.rb4
-rw-r--r--spec/requests/api/graphql/mutations/branch_rules/update_spec.rb95
-rw-r--r--spec/requests/api/graphql/mutations/ci/catalog/resources/destroy_spec.rb41
-rw-r--r--spec/requests/api/graphql/mutations/ci/catalog/unpublish_spec.rb52
-rw-r--r--spec/requests/api/graphql/mutations/ci/runner/create_spec.rb2
-rw-r--r--spec/requests/api/graphql/mutations/ci/runners_registration_token/reset_spec.rb2
-rw-r--r--spec/requests/api/graphql/mutations/container_registry/protection/rule/create_spec.rb26
-rw-r--r--spec/requests/api/graphql/mutations/container_registry/protection/rule/delete_spec.rb102
-rw-r--r--spec/requests/api/graphql/mutations/container_registry/protection/rule/update_spec.rb143
-rw-r--r--spec/requests/api/graphql/mutations/merge_requests/set_assignees_spec.rb4
-rw-r--r--spec/requests/api/graphql/mutations/namespace/package_settings/update_spec.rb11
-rw-r--r--spec/requests/api/graphql/mutations/organizations/create_spec.rb43
-rw-r--r--spec/requests/api/graphql/mutations/organizations/update_spec.rb120
-rw-r--r--spec/requests/api/graphql/mutations/packages/bulk_destroy_spec.rb20
-rw-r--r--spec/requests/api/graphql/mutations/packages/destroy_spec.rb11
-rw-r--r--spec/requests/api/graphql/mutations/packages/protection/rule/delete_spec.rb10
-rw-r--r--spec/requests/api/graphql/mutations/packages/protection/rule/update_spec.rb134
-rw-r--r--spec/requests/api/graphql/mutations/user_preferences/update_spec.rb42
-rw-r--r--spec/requests/api/graphql/mutations/work_items/delete_task_spec.rb79
-rw-r--r--spec/requests/api/graphql/organizations/organization_query_spec.rb9
-rw-r--r--spec/requests/api/graphql/project/alert_management/integrations_spec.rb2
-rw-r--r--spec/requests/api/graphql/project/cluster_agents_spec.rb4
-rw-r--r--spec/requests/api/graphql/project/value_streams_spec.rb105
-rw-r--r--spec/requests/api/graphql/project/work_item_state_counts_spec.rb123
-rw-r--r--spec/requests/api/graphql/project/work_item_types_spec.rb55
-rw-r--r--spec/requests/api/graphql/work_item_spec.rb12
-rw-r--r--spec/requests/api/graphql/work_items_by_reference_spec.rb130
-rw-r--r--spec/requests/api/group_export_spec.rb32
-rw-r--r--spec/requests/api/group_milestones_spec.rb110
-rw-r--r--spec/requests/api/import_github_spec.rb25
-rw-r--r--spec/requests/api/integrations_spec.rb3
-rw-r--r--spec/requests/api/internal/kubernetes_spec.rb117
-rw-r--r--spec/requests/api/issues/issues_spec.rb2
-rw-r--r--spec/requests/api/maven_packages_spec.rb58
-rw-r--r--spec/requests/api/ml/mlflow/model_versions_spec.rb99
-rw-r--r--spec/requests/api/ml/mlflow/registered_models_spec.rb98
-rw-r--r--spec/requests/api/ml_model_packages_spec.rb42
-rw-r--r--spec/requests/api/npm_project_packages_spec.rb8
-rw-r--r--spec/requests/api/nuget_group_packages_spec.rb8
-rw-r--r--spec/requests/api/nuget_project_packages_spec.rb6
-rw-r--r--spec/requests/api/project_attributes.yml2
-rw-r--r--spec/requests/api/project_events_spec.rb45
-rw-r--r--spec/requests/api/project_export_spec.rb32
-rw-r--r--spec/requests/api/project_milestones_spec.rb28
-rw-r--r--spec/requests/api/project_templates_spec.rb15
-rw-r--r--spec/requests/api/projects_spec.rb47
-rw-r--r--spec/requests/api/remote_mirrors_spec.rb2
-rw-r--r--spec/requests/api/settings_spec.rb35
-rw-r--r--spec/requests/api/snippets_spec.rb4
-rw-r--r--spec/requests/api/terraform/modules/v1/packages_spec.rb218
-rw-r--r--spec/requests/api/terraform/modules/v1/project_packages_spec.rb205
-rw-r--r--spec/requests/api/user_runners_spec.rb2
-rw-r--r--spec/requests/api/users_spec.rb36
-rw-r--r--spec/requests/application_controller_spec.rb15
-rw-r--r--spec/requests/chaos_controller_spec.rb14
-rw-r--r--spec/requests/clusters/agents/dashboard_controller_spec.rb28
-rw-r--r--spec/requests/concerns/membership_actions_shared_examples.rb67
-rw-r--r--spec/requests/content_security_policy_spec.rb79
-rw-r--r--spec/requests/explore/catalog_controller_spec.rb54
-rw-r--r--spec/requests/external_redirect/external_redirect_controller_spec.rb5
-rw-r--r--spec/requests/groups/group_members_controller_spec.rb20
-rw-r--r--spec/requests/health_controller_spec.rb8
-rw-r--r--spec/requests/ide_controller_spec.rb122
-rw-r--r--spec/requests/jwks_controller_spec.rb21
-rw-r--r--spec/requests/jwt_controller_spec.rb56
-rw-r--r--spec/requests/legacy_routes_spec.rb56
-rw-r--r--spec/requests/metrics_controller_spec.rb9
-rw-r--r--spec/requests/oauth/authorizations_controller_spec.rb4
-rw-r--r--spec/requests/organizations/organizations_controller_spec.rb47
-rw-r--r--spec/requests/organizations/settings_controller_spec.rb2
-rw-r--r--spec/requests/projects/gcp/artifact_registry/docker_images_controller_spec.rb136
-rw-r--r--spec/requests/projects/gcp/artifact_registry/setup_controller_spec.rb73
-rw-r--r--spec/requests/projects/integrations/shimos_controller_spec.rb37
-rw-r--r--spec/requests/projects/merge_requests/content_spec.rb8
-rw-r--r--spec/requests/projects/ml/candidates_controller_spec.rb20
-rw-r--r--spec/requests/projects/pipelines_controller_spec.rb53
-rw-r--r--spec/requests/projects/project_members_controller_spec.rb23
-rw-r--r--spec/requests/projects/service_desk/custom_email_controller_spec.rb16
-rw-r--r--spec/requests/projects/service_desk_controller_spec.rb31
-rw-r--r--spec/requests/projects/tags_controller_spec.rb19
-rw-r--r--spec/requests/registrations_controller_spec.rb6
-rw-r--r--spec/requests/runner_setup_controller_spec.rb2
-rw-r--r--spec/requests/sessions_spec.rb6
-rw-r--r--spec/requests/user_settings_spec.rb31
-rw-r--r--spec/requests/well_known_routing_spec.rb13
-rw-r--r--spec/requests/well_known_spec.rb55
-rw-r--r--spec/routing/routing_spec.rb21
-rw-r--r--spec/routing/uploads_routing_spec.rb95
-rw-r--r--spec/rubocop/cop/background_migration/dictionary_file_spec.rb60
-rw-r--r--spec/rubocop/cop/database/avoid_using_pluck_without_limit_spec.rb144
-rw-r--r--spec/rubocop/cop/gitlab/avoid_gitlab_instance_checks_spec.rb2
-rw-r--r--spec/rubocop/cop/migration/versioned_migration_class_spec.rb12
-rw-r--r--spec/rubocop_spec_helper.rb2
-rw-r--r--spec/scripts/duo_chat/reporter_spec.rb270
-rw-r--r--spec/scripts/generate_message_to_run_e2e_pipeline_spec.rb6
-rw-r--r--spec/scripts/internal_events/cli_spec.rb866
-rw-r--r--spec/scripts/lib/glfm/update_specification_spec.rb3
-rw-r--r--spec/scripts/trigger-build_spec.rb60
-rw-r--r--spec/serializers/admin/abuse_report_details_entity_spec.rb3
-rw-r--r--spec/serializers/admin/abuse_report_details_serializer_spec.rb3
-rw-r--r--spec/serializers/build_details_entity_spec.rb12
-rw-r--r--spec/serializers/deploy_keys/basic_deploy_key_entity_spec.rb13
-rw-r--r--spec/serializers/discussion_entity_spec.rb7
-rw-r--r--spec/serializers/group_link/group_group_link_entity_spec.rb8
-rw-r--r--spec/serializers/group_link/group_link_entity_spec.rb6
-rw-r--r--spec/serializers/group_link/project_group_link_entity_spec.rb88
-rw-r--r--spec/serializers/merge_request_poll_widget_entity_spec.rb3
-rw-r--r--spec/serializers/personal_access_token_entity_spec.rb2
-rw-r--r--spec/services/activity_pub/projects/releases_follow_service_spec.rb145
-rw-r--r--spec/services/activity_pub/projects/releases_unfollow_service_spec.rb152
-rw-r--r--spec/services/auth/dependency_proxy_authentication_service_spec.rb87
-rw-r--r--spec/services/bulk_imports/batched_relation_export_service_spec.rb10
-rw-r--r--spec/services/bulk_imports/create_service_spec.rb15
-rw-r--r--spec/services/bulk_imports/file_download_service_spec.rb24
-rw-r--r--spec/services/bulk_imports/process_service_spec.rb16
-rw-r--r--spec/services/ci/catalog/resources/destroy_service_spec.rb38
-rw-r--r--spec/services/ci/components/fetch_service_spec.rb34
-rw-r--r--spec/services/ci/create_pipeline_service/workflow_auto_cancel_spec.rb169
-rw-r--r--spec/services/ci/create_pipeline_service_spec.rb4
-rw-r--r--spec/services/ci/generate_coverage_reports_service_spec.rb19
-rw-r--r--spec/services/ci/job_artifacts/create_service_spec.rb52
-rw-r--r--spec/services/ci/pipeline_creation/cancel_redundant_pipelines_service_spec.rb28
-rw-r--r--spec/services/ci/process_sync_events_service_spec.rb65
-rw-r--r--spec/services/ci/runners/assign_runner_service_spec.rb2
-rw-r--r--spec/services/ci/runners/bulk_delete_runners_service_spec.rb2
-rw-r--r--spec/services/ci/runners/create_runner_service_spec.rb2
-rw-r--r--spec/services/ci/runners/process_runner_version_update_service_spec.rb2
-rw-r--r--spec/services/ci/runners/reconcile_existing_runner_versions_service_spec.rb2
-rw-r--r--spec/services/ci/runners/register_runner_service_spec.rb2
-rw-r--r--spec/services/ci/runners/reset_registration_token_service_spec.rb2
-rw-r--r--spec/services/ci/runners/set_runner_associated_projects_service_spec.rb2
-rw-r--r--spec/services/ci/runners/stale_managers_cleanup_service_spec.rb2
-rw-r--r--spec/services/ci/runners/unassign_runner_service_spec.rb2
-rw-r--r--spec/services/ci/runners/unregister_runner_manager_service_spec.rb2
-rw-r--r--spec/services/ci/runners/unregister_runner_service_spec.rb2
-rw-r--r--spec/services/ci/runners/update_runner_service_spec.rb2
-rw-r--r--spec/services/ci/stuck_builds/drop_pending_service_spec.rb2
-rw-r--r--spec/services/ci/stuck_builds/drop_running_service_spec.rb2
-rw-r--r--spec/services/ci/stuck_builds/drop_scheduled_service_spec.rb2
-rw-r--r--spec/services/container_registry/protection/create_rule_service_spec.rb22
-rw-r--r--spec/services/container_registry/protection/delete_rule_service_spec.rb106
-rw-r--r--spec/services/container_registry/protection/update_rule_service_spec.rb167
-rw-r--r--spec/services/design_management/delete_designs_service_spec.rb2
-rw-r--r--spec/services/design_management/save_designs_service_spec.rb6
-rw-r--r--spec/services/groups/participants_service_spec.rb6
-rw-r--r--spec/services/import/github_service_spec.rb14
-rw-r--r--spec/services/integrations/google_cloud_platform/artifact_registry/list_docker_images_service_spec.rb91
-rw-r--r--spec/services/issue_email_participants/create_service_spec.rb161
-rw-r--r--spec/services/issues/import_csv_service_spec.rb5
-rw-r--r--spec/services/merge_requests/approval_service_spec.rb1
-rw-r--r--spec/services/merge_requests/close_service_spec.rb2
-rw-r--r--spec/services/merge_requests/create_service_spec.rb2
-rw-r--r--spec/services/merge_requests/mergeability/check_base_service_spec.rb16
-rw-r--r--spec/services/merge_requests/mergeability/check_broken_status_service_spec.rb4
-rw-r--r--spec/services/merge_requests/mergeability/check_ci_status_service_spec.rb4
-rw-r--r--spec/services/merge_requests/mergeability/check_conflict_status_service_spec.rb4
-rw-r--r--spec/services/merge_requests/mergeability/check_discussions_status_service_spec.rb5
-rw-r--r--spec/services/merge_requests/mergeability/check_draft_status_service_spec.rb4
-rw-r--r--spec/services/merge_requests/mergeability/check_open_status_service_spec.rb4
-rw-r--r--spec/services/merge_requests/mergeability/check_rebase_status_service_spec.rb4
-rw-r--r--spec/services/merge_requests/mergeability/detailed_merge_status_service_spec.rb14
-rw-r--r--spec/services/merge_requests/mergeability/run_checks_service_spec.rb6
-rw-r--r--spec/services/merge_requests/post_merge_service_spec.rb12
-rw-r--r--spec/services/merge_requests/reopen_service_spec.rb2
-rw-r--r--spec/services/ml/create_candidate_service_spec.rb2
-rw-r--r--spec/services/ml/create_model_service_spec.rb13
-rw-r--r--spec/services/ml/create_model_version_service_spec.rb79
-rw-r--r--spec/services/ml/destroy_model_service_spec.rb28
-rw-r--r--spec/services/ml/find_or_create_model_version_service_spec.rb18
-rw-r--r--spec/services/ml/increment_version_service_spec.rb56
-rw-r--r--spec/services/ml/model_versions/delete_service_spec.rb55
-rw-r--r--spec/services/ml/model_versions/update_model_version_service_spec.rb44
-rw-r--r--spec/services/namespaces/package_settings/update_service_spec.rb9
-rw-r--r--spec/services/notes/create_service_spec.rb21
-rw-r--r--spec/services/organizations/create_service_spec.rb43
-rw-r--r--spec/services/organizations/update_service_spec.rb76
-rw-r--r--spec/services/packages/mark_package_for_destruction_service_spec.rb2
-rw-r--r--spec/services/packages/mark_packages_for_destruction_service_spec.rb71
-rw-r--r--spec/services/packages/ml_model/create_package_file_service_spec.rb51
-rw-r--r--spec/services/packages/npm/create_package_service_spec.rb2
-rw-r--r--spec/services/packages/npm/generate_metadata_service_spec.rb16
-rw-r--r--spec/services/packages/protection/update_rule_service_spec.rb150
-rw-r--r--spec/services/pages_domains/obtain_lets_encrypt_certificate_service_spec.rb24
-rw-r--r--spec/services/product_analytics/build_activity_graph_service_spec.rb33
-rw-r--r--spec/services/product_analytics/build_graph_service_spec.rb27
-rw-r--r--spec/services/projects/after_rename_service_spec.rb56
-rw-r--r--spec/services/projects/create_service_spec.rb25
-rw-r--r--spec/services/projects/destroy_service_spec.rb6
-rw-r--r--spec/services/projects/fork_service_spec.rb2
-rw-r--r--spec/services/projects/group_links/create_service_spec.rb46
-rw-r--r--spec/services/projects/group_links/destroy_service_spec.rb56
-rw-r--r--spec/services/projects/group_links/update_service_spec.rb88
-rw-r--r--spec/services/projects/hashed_storage/base_attachment_service_spec.rb2
-rw-r--r--spec/services/projects/import_service_spec.rb153
-rw-r--r--spec/services/projects/lfs_pointers/lfs_link_service_spec.rb18
-rw-r--r--spec/services/projects/refresh_build_artifacts_size_statistics_service_spec.rb4
-rw-r--r--spec/services/projects/unlink_fork_service_spec.rb20
-rw-r--r--spec/services/projects/update_repository_storage_service_spec.rb36
-rw-r--r--spec/services/projects/update_service_spec.rb87
-rw-r--r--spec/services/protected_branches/create_service_spec.rb14
-rw-r--r--spec/services/quick_actions/interpret_service_spec.rb151
-rw-r--r--spec/services/releases/destroy_service_spec.rb2
-rw-r--r--spec/services/service_desk/custom_email_verifications/create_service_spec.rb38
-rw-r--r--spec/services/service_desk/custom_email_verifications/update_service_spec.rb85
-rw-r--r--spec/services/service_desk/custom_emails/create_service_spec.rb11
-rw-r--r--spec/services/service_desk/custom_emails/destroy_service_spec.rb11
-rw-r--r--spec/services/service_desk_settings/update_service_spec.rb11
-rw-r--r--spec/services/snippets/update_repository_storage_service_spec.rb5
-rw-r--r--spec/services/system_notes/issuables_service_spec.rb4
-rw-r--r--spec/services/users/in_product_marketing_email_records_spec.rb55
-rw-r--r--spec/services/users/migrate_records_to_ghost_user_service_spec.rb17
-rw-r--r--spec/services/work_items/delete_task_service_spec.rb88
-rw-r--r--spec/services/work_items/task_list_reference_removal_service_spec.rb152
-rw-r--r--spec/spec_helper.rb13
-rw-r--r--spec/support/database/click_house/hooks.rb18
-rw-r--r--spec/support/helpers/after_next_helpers.rb2
-rw-r--r--spec/support/helpers/click_house_test_helpers.rb15
-rw-r--r--spec/support/helpers/database/duplicate_indexes.yml52
-rw-r--r--spec/support/helpers/design_management_test_helpers.rb6
-rw-r--r--spec/support/helpers/email_helpers.rb6
-rw-r--r--spec/support/helpers/features/invite_members_modal_helpers.rb11
-rw-r--r--spec/support/helpers/features/runners_helpers.rb6
-rw-r--r--spec/support/helpers/features/sorting_helpers.rb8
-rw-r--r--spec/support/helpers/features/top_nav_spec_helpers.rb33
-rw-r--r--spec/support/helpers/login_helpers.rb15
-rw-r--r--spec/support/helpers/migrations_helpers.rb7
-rw-r--r--spec/support/helpers/models/ci/partitioning_testing/cascade_check.rb2
-rw-r--r--spec/support/helpers/models/ci/partitioning_testing/schema_helpers.rb13
-rw-r--r--spec/support/helpers/navbar_structure_helper.rb2
-rw-r--r--spec/support/helpers/next_found_instance_of.rb61
-rw-r--r--spec/support/helpers/next_instance_of.rb39
-rw-r--r--spec/support/helpers/search_helpers.rb14
-rw-r--r--spec/support/helpers/sign_up_helpers.rb10
-rw-r--r--spec/support/helpers/snippet_helpers.rb2
-rw-r--r--spec/support/helpers/stub_configuration.rb7
-rw-r--r--spec/support/helpers/stub_feature_flags.rb2
-rw-r--r--spec/support/helpers/stub_gitlab_calls.rb8
-rw-r--r--spec/support/helpers/stub_requests.rb14
-rw-r--r--spec/support/helpers/stub_saas_features.rb20
-rw-r--r--spec/support/helpers/usage_data_helpers.rb10
-rw-r--r--spec/support/matchers/not_enqueue_mail_matcher.rb3
-rw-r--r--spec/support/rspec.rb1
-rw-r--r--spec/support/rspec_order.rb25
-rw-r--r--spec/support/rspec_order_todo.yml88
-rw-r--r--spec/support/rspec_run_time.rb107
-rw-r--r--spec/support/shared_contexts/ci/catalog/resources/version_shared_context.rb8
-rw-r--r--spec/support/shared_contexts/features/integrations/project_integrations_jira_context.rb6
-rw-r--r--spec/support/shared_contexts/finders/issues_finder_shared_contexts.rb5
-rw-r--r--spec/support/shared_contexts/finders/work_items_finder_shared_contexts.rb19
-rw-r--r--spec/support/shared_contexts/graphql/types/query_type_shared_context.rb1
-rw-r--r--spec/support/shared_contexts/merge_request_edit_shared_context.rb2
-rw-r--r--spec/support/shared_contexts/navbar_structure_context.rb23
-rw-r--r--spec/support/shared_contexts/policies/group_policy_shared_context.rb4
-rw-r--r--spec/support/shared_contexts/policies/project_policy_table_shared_context.rb12
-rw-r--r--spec/support/shared_contexts/requests/api/graphql/work_items/work_item_types_shared_context.rb64
-rw-r--r--spec/support/shared_contexts/requests/api/terraform_modules_shared_context.rb27
-rw-r--r--spec/support/shared_contexts/services/service_ping/stubbed_service_ping_metrics_definitions_shared_context.rb3
-rw-r--r--spec/support/shared_contexts/single_change_access_checks_shared_context.rb2
-rw-r--r--spec/support/shared_examples/analytics/cycle_analytics/request_params_examples.rb4
-rw-r--r--spec/support/shared_examples/ci/stuck_builds_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/config/metrics/every_metric_definition_shared_examples.rb22
-rw-r--r--spec/support/shared_examples/controllers/base_action_controller_shared_examples.rb80
-rw-r--r--spec/support/shared_examples/controllers/concerns/onboarding/redirectable_shared_examples.rb8
-rw-r--r--spec/support/shared_examples/controllers/unique_visits_shared_examples.rb6
-rw-r--r--spec/support/shared_examples/features/content_editor_shared_examples.rb71
-rw-r--r--spec/support/shared_examples/features/editable_merge_request_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/features/integrations/user_activates_mattermost_slash_command_integration_shared_examples.rb6
-rw-r--r--spec/support/shared_examples/features/inviting_members_shared_examples.rb1
-rw-r--r--spec/support/shared_examples/features/runners_shared_examples.rb120
-rw-r--r--spec/support/shared_examples/features/variable_list_drawer_shared_examples.rb5
-rw-r--r--spec/support/shared_examples/features/wiki/user_deletes_wiki_page_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/features/wiki/user_updates_wiki_page_shared_examples.rb4
-rw-r--r--spec/support/shared_examples/features/wiki/user_views_wiki_page_shared_examples.rb12
-rw-r--r--spec/support/shared_examples/features/work_items_shared_examples.rb26
-rw-r--r--spec/support/shared_examples/finders/issues_finder_shared_examples.rb28
-rw-r--r--spec/support/shared_examples/graphql/mutations/boards_list_create_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/graphql/resolvers/users/pages_visits_resolvers_shared_examples.rb10
-rw-r--r--spec/support/shared_examples/keys/meets_ssh_key_restrictions_shared_examples.rb63
-rw-r--r--spec/support/shared_examples/lib/gitlab/bitbucket_server_import/object_import_shared_examples.rb12
-rw-r--r--spec/support/shared_examples/lib/gitlab/import/advance_stage_shared_examples.rb4
-rw-r--r--spec/support/shared_examples/lib/gitlab/redis/multi_store_feature_flags_shared_examples.rb51
-rw-r--r--spec/support/shared_examples/metrics_instrumentation_shared_examples.rb6
-rw-r--r--spec/support/shared_examples/models/concerns/analytics/cycle_analytics/stage_event_model_examples.rb9
-rw-r--r--spec/support/shared_examples/models/concerns/avatarable_shared_examples.rb30
-rw-r--r--spec/support/shared_examples/models/concerns/repository_storage_movable_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/models/concerns/update_namespace_statistics_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/models/cycle_analytics_stage_shared_examples.rb13
-rw-r--r--spec/support/shared_examples/models/disable_sti_shared_examples.rb28
-rw-r--r--spec/support/shared_examples/npm_sync_metadata_cache_worker_shared_examples.rb8
-rw-r--r--spec/support/shared_examples/policies/project_policy_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/policies/protected_branches.rb21
-rw-r--r--spec/support/shared_examples/quick_actions/issuable/issuable_quick_actions_shared_examples.rb10
-rw-r--r--spec/support/shared_examples/redis/redis_shared_examples.rb6
-rw-r--r--spec/support/shared_examples/requests/api/discussions_shared_examples.rb30
-rw-r--r--spec/support/shared_examples/requests/api/graphql/work_item_type_list_shared_examples.rb49
-rw-r--r--spec/support/shared_examples/requests/api/graphql_rest/milestones_shared_examples.rb83
-rw-r--r--spec/support/shared_examples/requests/api/hooks_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/requests/api/ml/mlflow/mlflow_shared_examples.rb118
-rw-r--r--spec/support/shared_examples/requests/api/ml_model_packages_shared_examples.rb42
-rw-r--r--spec/support/shared_examples/requests/api/nuget_packages_shared_examples.rb68
-rw-r--r--spec/support/shared_examples/requests/api/repository_storage_moves_shared_examples.rb4
-rw-r--r--spec/support/shared_examples/security/policies_shared_examples.rb48
-rw-r--r--spec/support/shared_examples/services/container_registry_auth_service_shared_examples.rb42
-rw-r--r--spec/support/shared_examples/services/issuable/issuable_import_csv_service_shared_examples.rb16
-rw-r--r--spec/support/shared_examples/services/mergeability_checks_service_shared_examples.rb11
-rw-r--r--spec/support/shared_examples/services/namespace_package_settings_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/services/projects/update_repository_storage_service_shared_examples.rb8
-rw-r--r--spec/support/shared_examples/services/protected_branches_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/services/schedule_bulk_repository_shard_moves_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/workers/batched_background_migration_worker_shared_examples.rb22
-rw-r--r--spec/support/shared_examples/workers/idempotency_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/workers/schedule_bulk_repository_shard_moves_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/workers/update_repository_move_shared_examples.rb157
-rw-r--r--spec/support_specs/helpers/stub_saas_features_spec.rb50
-rw-r--r--spec/tasks/gitlab/backup_rake_spec.rb5
-rw-r--r--spec/tasks/gitlab/check_rake_spec.rb2
-rw-r--r--spec/tasks/gitlab/click_house/migration_rake_spec.rb248
-rw-r--r--spec/tasks/gitlab/db/decomposition/migrate_rake_spec.rb39
-rw-r--r--spec/tasks/gitlab/db_rake_spec.rb52
-rw-r--r--spec/tasks/gitlab/doctor/secrets_rake_spec.rb2
-rw-r--r--spec/tasks/gitlab/ldap_rake_spec.rb15
-rw-r--r--spec/tasks/gitlab/seed/runner_fleet_rake_spec.rb2
-rw-r--r--spec/tasks/gitlab/shell_rake_spec.rb6
-rw-r--r--spec/tasks/rubocop_rake_spec.rb2
-rw-r--r--spec/tooling/danger/outdated_todo_spec.rb135
-rw-r--r--spec/tooling/danger/project_helper_spec.rb5
-rw-r--r--spec/tooling/lib/tooling/api/job_spec.rb95
-rw-r--r--spec/tooling/lib/tooling/api/pipeline_spec.rb63
-rw-r--r--spec/tooling/lib/tooling/api/request_spec.rb53
-rw-r--r--spec/tooling/lib/tooling/debug_spec.rb76
-rw-r--r--spec/tooling/lib/tooling/helpers/duration_formatter_spec.rb50
-rw-r--r--spec/tooling/lib/tooling/parallel_rspec_runner_spec.rb103
-rw-r--r--spec/tooling/lib/tooling/predictive_tests_spec.rb3
-rw-r--r--spec/tooling/merge_request_spec.rb2
-rw-r--r--spec/tooling/quality/test_level_spec.rb8
-rw-r--r--spec/uploaders/gitlab_uploader_spec.rb6
-rw-r--r--spec/validators/ip_cidr_array_validator_spec.rb2
-rw-r--r--spec/validators/json_schema_validator_spec.rb25
-rw-r--r--spec/validators/kubernetes_container_resources_validator_spec.rb42
-rw-r--r--spec/views/admin/application_settings/_security_txt.html.haml_spec.rb38
-rw-r--r--spec/views/admin/application_settings/general.html.haml_spec.rb3
-rw-r--r--spec/views/dashboard/projects/index.html.haml_spec.rb1
-rw-r--r--spec/views/devise/shared/_signup_box.html.haml_spec.rb4
-rw-r--r--spec/views/devise/shared/_signup_omniauth_provider_list_spec.rb10
-rw-r--r--spec/views/groups/_home_panel.html.haml_spec.rb6
-rw-r--r--spec/views/layouts/_header_search.html.haml_spec.rb113
-rw-r--r--spec/views/layouts/fullscreen.html.haml_spec.rb54
-rw-r--r--spec/views/layouts/header/_gitlab_version.html.haml_spec.rb36
-rw-r--r--spec/views/layouts/header/_new_dropdown.haml_spec.rb204
-rw-r--r--spec/views/layouts/nav/sidebar/_admin.html.haml_spec.rb161
-rw-r--r--spec/views/layouts/nav/sidebar/_group.html.haml_spec.rb187
-rw-r--r--spec/views/layouts/nav/sidebar/_profile.html.haml_spec.rb30
-rw-r--r--spec/views/layouts/nav/sidebar/_project.html.haml_spec.rb958
-rw-r--r--spec/views/profiles/keys/_form.html.haml_spec.rb4
-rw-r--r--spec/views/projects/_files.html.haml_spec.rb6
-rw-r--r--spec/views/projects/_home_panel.html.haml_spec.rb34
-rw-r--r--spec/views/projects/edit.html.haml_spec.rb26
-rw-r--r--spec/views/projects/pages/_pages_settings.html.haml_spec.rb2
-rw-r--r--spec/views/projects/tags/index.html.haml_spec.rb33
-rw-r--r--spec/views/shared/nav/_sidebar.html.haml_spec.rb43
-rw-r--r--spec/views/shared/runners/_runner_details.html.haml_spec.rb2
-rw-r--r--spec/views/shared/snippets/_snippet.html.haml_spec.rb33
-rw-r--r--spec/views/user_settings/user_settings/authentication_log.html.haml_spec.rb (renamed from spec/views/profiles/audit_log.html.haml_spec.rb)2
-rw-r--r--spec/workers/abuse/trust_score_worker_spec.rb46
-rw-r--r--spec/workers/background_migration/ci_database_worker_spec.rb2
-rw-r--r--spec/workers/background_migration_worker_spec.rb2
-rw-r--r--spec/workers/bulk_imports/entity_worker_spec.rb26
-rw-r--r--spec/workers/bulk_imports/export_request_worker_spec.rb13
-rw-r--r--spec/workers/bulk_imports/finish_batched_pipeline_worker_spec.rb46
-rw-r--r--spec/workers/bulk_imports/pipeline_worker_spec.rb251
-rw-r--r--spec/workers/bulk_imports/stuck_import_worker_spec.rb59
-rw-r--r--spec/workers/bulk_imports/transform_references_worker_spec.rb257
-rw-r--r--spec/workers/ci/catalog/resources/process_sync_events_worker_spec.rb52
-rw-r--r--spec/workers/ci/low_urgency_cancel_redundant_pipelines_worker_spec.rb9
-rw-r--r--spec/workers/ci/pipeline_artifacts/coverage_report_worker_spec.rb4
-rw-r--r--spec/workers/ci/resource_groups/assign_resource_from_resource_group_worker_spec.rb4
-rw-r--r--spec/workers/ci/runners/process_runner_version_update_worker_spec.rb2
-rw-r--r--spec/workers/ci/runners/reconcile_existing_runner_versions_cron_worker_spec.rb2
-rw-r--r--spec/workers/ci/runners/stale_machines_cleanup_cron_worker_spec.rb2
-rw-r--r--spec/workers/click_house/events_sync_worker_spec.rb34
-rw-r--r--spec/workers/concerns/click_house_worker_spec.rb88
-rw-r--r--spec/workers/concerns/gitlab/github_import/object_importer_spec.rb19
-rw-r--r--spec/workers/concerns/gitlab/github_import/stage_methods_spec.rb26
-rw-r--r--spec/workers/concerns/worker_attributes_spec.rb2
-rw-r--r--spec/workers/delete_user_worker_spec.rb77
-rw-r--r--spec/workers/every_sidekiq_worker_spec.rb58
-rw-r--r--spec/workers/gitlab/bitbucket_import/advance_stage_worker_spec.rb2
-rw-r--r--spec/workers/gitlab/bitbucket_server_import/stage/import_repository_worker_spec.rb15
-rw-r--r--spec/workers/gitlab/bitbucket_server_import/stage/import_users_worker_spec.rb77
-rw-r--r--spec/workers/gitlab/github_import/advance_stage_worker_spec.rb4
-rw-r--r--spec/workers/gitlab/github_import/refresh_import_jid_worker_spec.rb20
-rw-r--r--spec/workers/gitlab/github_import/stage/import_repository_worker_spec.rb10
-rw-r--r--spec/workers/gitlab/jira_import/import_issue_worker_spec.rb8
-rw-r--r--spec/workers/integrations/slack_event_worker_spec.rb2
-rw-r--r--spec/workers/merge_requests/set_reviewer_reviewed_worker_spec.rb4
-rw-r--r--spec/workers/packages/cleanup_package_registry_worker_spec.rb22
-rw-r--r--spec/workers/packages/npm/create_metadata_cache_worker_spec.rb8
-rw-r--r--spec/workers/packages/nuget/cleanup_stale_symbols_worker_spec.rb77
-rw-r--r--spec/workers/pages/deactivate_mr_deployments_worker_spec.rb44
-rw-r--r--spec/workers/pipeline_schedule_worker_spec.rb52
-rw-r--r--spec/workers/process_commit_worker_spec.rb254
-rw-r--r--spec/workers/projects/inactive_projects_deletion_cron_worker_spec.rb2
-rw-r--r--spec/workers/projects/update_repository_storage_worker_spec.rb2
1807 files changed, 49608 insertions, 30571 deletions
diff --git a/spec/channels/application_cable/connection_spec.rb b/spec/channels/application_cable/connection_spec.rb
index 4943669bde0..fa2518e1970 100644
--- a/spec/channels/application_cable/connection_spec.rb
+++ b/spec/channels/application_cable/connection_spec.rb
@@ -43,6 +43,16 @@ RSpec.describe ApplicationCable::Connection, :clean_gitlab_redis_sessions do
end
end
+ context 'when bearer header is provided' do
+ let(:user_pat) { create(:personal_access_token) }
+
+ it 'finds user by PAT' do
+ connect(ActionCable.server.config.mount_path, headers: { Authorization: "Bearer #{user_pat.token}" })
+
+ expect(connection.current_user).to eq(user_pat.user)
+ end
+ end
+
context 'when session cookie is not set' do
it 'sets current_user to nil' do
connect
diff --git a/spec/click_house/migration_support/migration_context_spec.rb b/spec/click_house/migration_support/migration_context_spec.rb
deleted file mode 100644
index 48ad9d9e3fa..00000000000
--- a/spec/click_house/migration_support/migration_context_spec.rb
+++ /dev/null
@@ -1,233 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-require_relative '../../../lib/click_house/migration_support/migration_error'
-
-RSpec.describe ClickHouse::MigrationSupport::MigrationContext,
- click_house: :without_migrations, feature_category: :database do
- include ClickHouseTestHelpers
-
- # We don't need to delete data since we don't modify Postgres data
- self.use_transactional_tests = false
-
- let_it_be(:schema_migration) { ClickHouse::MigrationSupport::SchemaMigration }
-
- let(:migrations_base_dir) { 'click_house/migrations' }
- let(:migrations_dir) { expand_fixture_path("#{migrations_base_dir}/#{migrations_dirname}") }
- let(:migration_context) { described_class.new(migrations_dir, schema_migration) }
- let(:target_version) { nil }
-
- after do
- clear_consts(expand_fixture_path(migrations_base_dir))
- end
-
- describe 'performs migrations' do
- subject(:migration) { migrate(target_version, migration_context) }
-
- describe 'when creating a table' do
- let(:migrations_dirname) { 'plain_table_creation' }
-
- it 'creates a table' do
- expect { migration }.to change { active_schema_migrations_count }.from(0).to(1)
-
- table_schema = describe_table('some')
- expect(schema_migrations).to contain_exactly(a_hash_including(version: '1', active: 1))
- expect(table_schema).to match({
- id: a_hash_including(type: 'UInt64'),
- date: a_hash_including(type: 'Date')
- })
- end
- end
-
- describe 'when dropping a table' do
- let(:migrations_dirname) { 'drop_table' }
- let(:target_version) { 2 }
-
- it 'drops table' do
- migrate(1, migration_context)
- expect(table_names).to include('some')
-
- migration
- expect(table_names).not_to include('some')
- end
- end
-
- context 'when a migration raises an error' do
- let(:migrations_dirname) { 'migration_with_error' }
-
- it 'passes the error to caller as a StandardError' do
- expect { migration }.to raise_error StandardError,
- "An error has occurred, all later migrations canceled:\n\nA migration error happened"
- expect(schema_migrations).to be_empty
- end
- end
-
- context 'when a migration targets an unknown database' do
- let(:migrations_dirname) { 'plain_table_creation_on_invalid_database' }
-
- it 'raises ConfigurationError' do
- expect { migration }.to raise_error ClickHouse::Client::ConfigurationError,
- "The database 'unknown_database' is not configured"
- end
- end
-
- context 'when migrations target multiple databases' do
- let_it_be(:config) { ClickHouse::Client::Configuration.new }
- let_it_be(:main_db_config) { [:main, config] }
- let_it_be(:another_db_config) { [:another_db, config] }
- let_it_be(:another_database_name) { 'gitlab_clickhouse_test_2' }
-
- let(:migrations_dirname) { 'migrations_over_multiple_databases' }
-
- before(:context) do
- # Ensure we have a second database to run the test on
- clone_database_configuration(:main, :another_db, another_database_name, config)
-
- with_net_connect_allowed do
- ClickHouse::Client.execute("CREATE DATABASE IF NOT EXISTS #{another_database_name}", :main, config)
- end
- end
-
- after(:context) do
- with_net_connect_allowed do
- ClickHouse::Client.execute("DROP DATABASE #{another_database_name}", :another_db, config)
- end
- end
-
- around do |example|
- clear_db(config)
-
- previous_config = ClickHouse::Migration.client_configuration
- ClickHouse::Migration.client_configuration = config
-
- example.run
- ensure
- ClickHouse::Migration.client_configuration = previous_config
- end
-
- def clone_database_configuration(source_db_identifier, target_db_identifier, target_db_name, target_config)
- raw_config = Rails.application.config_for(:click_house)
- raw_config.each do |database_identifier, db_config|
- register_database(target_config, database_identifier, db_config)
- end
-
- target_db_config = raw_config[source_db_identifier].merge(database: target_db_name)
- register_database(target_config, target_db_identifier, target_db_config)
- target_config.http_post_proc = ClickHouse::Client.configuration.http_post_proc
- target_config.json_parser = ClickHouse::Client.configuration.json_parser
- target_config.logger = ::Logger.new(IO::NULL)
- end
-
- it 'registers migrations on respective database', :aggregate_failures do
- expect { migrate(2, migration_context) }
- .to change { active_schema_migrations_count(*main_db_config) }.from(0).to(1)
- .and change { active_schema_migrations_count(*another_db_config) }.from(0).to(1)
-
- expect(schema_migrations(*another_db_config)).to contain_exactly(a_hash_including(version: '2', active: 1))
- expect(table_names(*main_db_config)).not_to include('some_on_another_db')
- expect(table_names(*another_db_config)).not_to include('some')
-
- expect(describe_table('some', *main_db_config)).to match({
- id: a_hash_including(type: 'UInt64'),
- date: a_hash_including(type: 'Date')
- })
- expect(describe_table('some_on_another_db', *another_db_config)).to match({
- id: a_hash_including(type: 'UInt64'),
- date: a_hash_including(type: 'Date')
- })
-
- expect { migrate(nil, migration_context) }
- .to change { active_schema_migrations_count(*main_db_config) }.to(2)
- .and not_change { active_schema_migrations_count(*another_db_config) }
-
- expect(schema_migrations(*main_db_config)).to match([
- a_hash_including(version: '1', active: 1),
- a_hash_including(version: '3', active: 1)
- ])
- expect(schema_migrations(*another_db_config)).to match_array(a_hash_including(version: '2', active: 1))
-
- expect(describe_table('some', *main_db_config)).to match({
- id: a_hash_including(type: 'UInt64'),
- timestamp: a_hash_including(type: 'Date')
- })
- end
- end
-
- context 'when target_version is incorrect' do
- let(:target_version) { 2 }
- let(:migrations_dirname) { 'plain_table_creation' }
-
- it 'raises UnknownMigrationVersionError' do
- expect { migration }.to raise_error ClickHouse::MigrationSupport::UnknownMigrationVersionError
-
- expect(active_schema_migrations_count).to eq 0
- end
- end
-
- context 'when migrations with duplicate name exist' do
- let(:migrations_dirname) { 'duplicate_name' }
-
- it 'raises DuplicateMigrationNameError' do
- expect { migration }.to raise_error ClickHouse::MigrationSupport::DuplicateMigrationNameError
-
- expect(active_schema_migrations_count).to eq 0
- end
- end
-
- context 'when migrations with duplicate version exist' do
- let(:migrations_dirname) { 'duplicate_version' }
-
- it 'raises DuplicateMigrationVersionError' do
- expect { migration }.to raise_error ClickHouse::MigrationSupport::DuplicateMigrationVersionError
-
- expect(active_schema_migrations_count).to eq 0
- end
- end
- end
-
- describe 'performs rollbacks' do
- subject(:migration) { rollback(target_version, migration_context) }
-
- before do
- migrate(nil, migration_context)
- end
-
- context 'when migrating back all the way to 0' do
- let(:target_version) { 0 }
-
- context 'when down method is present' do
- let(:migrations_dirname) { 'table_creation_with_down_method' }
-
- it 'removes migration and performs down method' do
- expect(table_names).to include('some')
-
- expect { migration }.to change { active_schema_migrations_count }.from(1).to(0)
-
- expect(table_names).not_to include('some')
- expect(schema_migrations).to contain_exactly(a_hash_including(version: '1', active: 0))
- end
- end
-
- context 'when down method is missing' do
- let(:migrations_dirname) { 'plain_table_creation' }
-
- it 'removes migration ignoring missing down method' do
- expect { migration }.to change { active_schema_migrations_count }.from(1).to(0)
- .and not_change { table_names & %w[some] }.from(%w[some])
- end
- end
- end
-
- context 'when target_version is incorrect' do
- let(:target_version) { -1 }
- let(:migrations_dirname) { 'plain_table_creation' }
-
- it 'raises UnknownMigrationVersionError' do
- expect { migration }.to raise_error ClickHouse::MigrationSupport::UnknownMigrationVersionError
-
- expect(active_schema_migrations_count).to eq 1
- end
- end
- end
-end
diff --git a/spec/commands/sidekiq_cluster/cli_spec.rb b/spec/commands/sidekiq_cluster/cli_spec.rb
index d5fa88e72a7..ceee61fb302 100644
--- a/spec/commands/sidekiq_cluster/cli_spec.rb
+++ b/spec/commands/sidekiq_cluster/cli_spec.rb
@@ -1,10 +1,10 @@
# frozen_string_literal: true
require 'spec_helper'
+require 'gitlab/rspec/next_instance_of'
require_relative '../../support/stub_settings_source'
require_relative '../../../sidekiq_cluster/cli'
-require_relative '../../support/helpers/next_instance_of'
RSpec.describe Gitlab::SidekiqCluster::CLI, feature_category: :gitlab_cli, stub_settings_source: true do # rubocop:disable RSpec/FilePath
include NextInstanceOf
@@ -248,13 +248,13 @@ RSpec.describe Gitlab::SidekiqCluster::CLI, feature_category: :gitlab_cli, stub_
if Gitlab.ee?
[
%w[incident_management_close_incident status_page_publish] + described_class::DEFAULT_QUEUES,
- %w[bulk_imports_pipeline bulk_imports_pipeline_batch bulk_imports_relation_export project_export projects_import_export_parallel_project_export projects_import_export_relation_export repository_import project_template_export] +
+ %w[bulk_imports_pipeline bulk_imports_pipeline_batch bulk_imports_relation_batch_export bulk_imports_relation_export project_export projects_import_export_parallel_project_export projects_import_export_relation_export repository_import project_template_export] +
described_class::DEFAULT_QUEUES
]
else
[
%w[incident_management_close_incident] + described_class::DEFAULT_QUEUES,
- %w[bulk_imports_pipeline bulk_imports_pipeline_batch bulk_imports_relation_export project_export projects_import_export_parallel_project_export projects_import_export_relation_export repository_import] +
+ %w[bulk_imports_pipeline bulk_imports_pipeline_batch bulk_imports_relation_batch_export bulk_imports_relation_export project_export projects_import_export_parallel_project_export projects_import_export_relation_export repository_import] +
described_class::DEFAULT_QUEUES
]
end
diff --git a/spec/components/pajamas/banner_component_spec.rb b/spec/components/pajamas/banner_component_spec.rb
index 47dc9042913..aa9c5c4fdf8 100644
--- a/spec/components/pajamas/banner_component_spec.rb
+++ b/spec/components/pajamas/banner_component_spec.rb
@@ -23,7 +23,7 @@ RSpec.describe Pajamas::BannerComponent, type: :component do
end
it 'renders its title' do
- expect(page).to have_css "h1[class='gl-banner-title']", text: title
+ expect(page).to have_css "h2[class='gl-banner-title']", text: title
end
it 'renders a close button' do
@@ -62,26 +62,11 @@ RSpec.describe Pajamas::BannerComponent, type: :component do
end
end
- describe 'embedded' do
- context 'by default (false)' do
- it 'keeps the banner\'s borders' do
- expect(page).not_to have_css ".gl-banner.gl-border-none"
- end
- end
-
- context 'when set to true' do
- let(:options) { { embedded: true } }
-
- it 'removes the banner\'s borders' do
- expect(page).to have_css ".gl-banner.gl-border-none"
- end
- end
- end
-
describe 'variant' do
context 'by default (promotion)' do
it 'does not apply introduction class' do
expect(page).not_to have_css ".gl-banner-introduction"
+ expect(page).to have_css ".gl-banner.gl-bg-gray-10\\!"
end
end
diff --git a/spec/components/previews/pajamas/banner_component_preview.rb b/spec/components/previews/pajamas/banner_component_preview.rb
index db9bf2c51d6..4c1d64e32f6 100644
--- a/spec/components/previews/pajamas/banner_component_preview.rb
+++ b/spec/components/previews/pajamas/banner_component_preview.rb
@@ -8,19 +8,16 @@ module Pajamas
# @param button_text text
# @param button_link text
# @param content textarea
- # @param embedded toggle
# @param variant select {{ Pajamas::BannerComponent::VARIANT_OPTIONS }}
def default(
button_text: "Learn more",
button_link: "https://about.gitlab.com/",
content: "Add your message here.",
- embedded: false,
variant: :promotion
)
render(Pajamas::BannerComponent.new(
button_text: button_text,
button_link: button_link,
- embedded: embedded,
svg_path: "illustrations/autodevops.svg",
variant: variant
)) do |c|
diff --git a/spec/components/projects/ml/models_index_component_spec.rb b/spec/components/projects/ml/models_index_component_spec.rb
index b662e8c0a08..f020ae5bbef 100644
--- a/spec/components/projects/ml/models_index_component_spec.rb
+++ b/spec/components/projects/ml/models_index_component_spec.rb
@@ -43,12 +43,14 @@ RSpec.describe Projects::Ml::ModelsIndexComponent, type: :component, feature_cat
{
'name' => model1.name,
'version' => model1.latest_version.version,
+ 'path' => "/#{project.full_path}/-/ml/models/#{model1.id}",
'versionPackagePath' => "/#{project.full_path}/-/packages/#{model1.latest_version.package_id}",
'versionPath' => "/#{project.full_path}/-/ml/models/#{model1.id}/versions/#{model1.latest_version.id}",
'versionCount' => 1
},
{
'name' => model2.name,
+ 'path' => "/#{project.full_path}/-/ml/models/#{model2.id}",
'version' => nil,
'versionPackagePath' => nil,
'versionPath' => nil,
diff --git a/spec/components/projects/ml/show_ml_model_component_spec.rb b/spec/components/projects/ml/show_ml_model_component_spec.rb
index ec125851d3d..34b8cbe96ca 100644
--- a/spec/components/projects/ml/show_ml_model_component_spec.rb
+++ b/spec/components/projects/ml/show_ml_model_component_spec.rb
@@ -4,14 +4,22 @@ require "spec_helper"
RSpec.describe Projects::Ml::ShowMlModelComponent, type: :component, feature_category: :mlops do
let_it_be(:project) { build_stubbed(:project) }
- let_it_be(:model1) { build_stubbed(:ml_models, :with_latest_version_and_package, project: project) }
+ let_it_be(:model1) do
+ build_stubbed(:ml_models, :with_latest_version_and_package, project: project, description: "A description")
+ end
+
+ let_it_be(:experiment) { model1.default_experiment.tap { |e| e.iid = 100 } }
+ let_it_be(:candidate) { model1.latest_version.candidate.tap { |c| c.iid = 101 } }
+ let_it_be(:candidates) { Array.new(2) { build_stubbed(:ml_candidates, experiment: experiment) } }
subject(:component) do
- described_class.new(model: model1)
+ described_class.new(model: model1, current_user: model1.user)
end
describe 'rendered' do
before do
+ allow(model1).to receive(:candidates).and_return(candidates)
+
render_inline component
end
@@ -23,11 +31,30 @@ RSpec.describe Projects::Ml::ShowMlModelComponent, type: :component, feature_cat
'id' => model1.id,
'name' => model1.name,
'path' => "/#{project.full_path}/-/ml/models/#{model1.id}",
- 'description' => 'This is a placeholder for the short description',
+ 'description' => 'A description',
'latestVersion' => {
- 'version' => model1.latest_version.version
+ 'version' => model1.latest_version.version,
+ 'description' => model1.latest_version.description,
+ 'projectPath' => "/#{project.full_path}",
+ 'packageId' => model1.latest_version.package_id,
+ 'candidate' => {
+ 'info' => {
+ 'iid' => candidate.iid,
+ 'eid' => candidate.eid,
+ 'pathToArtifact' => nil,
+ 'experimentName' => candidate.experiment.name,
+ 'pathToExperiment' => "/#{project.full_path}/-/ml/experiments/#{experiment.iid}",
+ 'status' => 'running',
+ 'path' => "/#{project.full_path}/-/ml/candidates/#{candidate.iid}",
+ 'ciJob' => nil
+ },
+ 'metrics' => [],
+ 'params' => [],
+ 'metadata' => []
+ }
},
- 'versionCount' => 1
+ 'versionCount' => 1,
+ 'candidateCount' => 2
}
})
end
diff --git a/spec/components/projects/ml/show_ml_model_version_component_spec.rb b/spec/components/projects/ml/show_ml_model_version_component_spec.rb
index 973d8123c45..8c7e40d31a2 100644
--- a/spec/components/projects/ml/show_ml_model_version_component_spec.rb
+++ b/spec/components/projects/ml/show_ml_model_version_component_spec.rb
@@ -4,11 +4,24 @@ require "spec_helper"
RSpec.describe Projects::Ml::ShowMlModelVersionComponent, type: :component, feature_category: :mlops do
let_it_be(:project) { build_stubbed(:project) }
+ let_it_be(:user) { project.owner }
let_it_be(:model) { build_stubbed(:ml_models, project: project) }
- let_it_be(:version) { build_stubbed(:ml_model_versions, model: model) }
+ let_it_be(:experiment) do
+ model.default_experiment.iid = 100
+ model.default_experiment
+ end
+
+ let_it_be(:candidate) do
+ build_stubbed(:ml_candidates, :with_artifact, experiment: experiment, user: user, project: project,
+ internal_id: 100)
+ end
+
+ let_it_be(:version) do
+ build_stubbed(:ml_model_versions, :with_package, model: model, candidate: candidate, description: 'abc')
+ end
subject(:component) do
- described_class.new(model_version: version)
+ described_class.new(model_version: version, current_user: user)
end
describe 'rendered' do
@@ -23,10 +36,28 @@ RSpec.describe Projects::Ml::ShowMlModelVersionComponent, type: :component, feat
'modelVersion' => {
'id' => version.id,
'version' => version.version,
+ 'description' => 'abc',
+ 'projectPath' => "/#{project.full_path}",
'path' => "/#{project.full_path}/-/ml/models/#{model.id}/versions/#{version.id}",
+ 'packageId' => version.package_id,
'model' => {
'name' => model.name,
'path' => "/#{project.full_path}/-/ml/models/#{model.id}"
+ },
+ 'candidate' => {
+ 'info' => {
+ 'iid' => candidate.iid,
+ 'eid' => candidate.eid,
+ 'pathToArtifact' => "/#{project.full_path}/-/packages/#{candidate.artifact.id}",
+ 'experimentName' => candidate.experiment.name,
+ 'pathToExperiment' => "/#{project.full_path}/-/ml/experiments/#{experiment.iid}",
+ 'status' => 'running',
+ 'path' => "/#{project.full_path}/-/ml/candidates/#{candidate.iid}",
+ 'ciJob' => nil
+ },
+ 'metrics' => [],
+ 'params' => [],
+ 'metadata' => []
}
}
})
diff --git a/spec/controllers/activity_pub/projects/releases_controller_spec.rb b/spec/controllers/activity_pub/projects/releases_controller_spec.rb
index 8719756b260..4102789ee43 100644
--- a/spec/controllers/activity_pub/projects/releases_controller_spec.rb
+++ b/spec/controllers/activity_pub/projects/releases_controller_spec.rb
@@ -11,13 +11,15 @@ RSpec.describe ActivityPub::Projects::ReleasesController, feature_category: :gro
let_it_be(:release_1) { create(:release, project: project, released_at: Time.zone.parse('2018-10-18')) }
let_it_be(:release_2) { create(:release, project: project, released_at: Time.zone.parse('2019-10-19')) }
+ let(:request_body) { '' }
+
before_all do
project.add_developer(developer)
end
shared_examples 'common access controls' do
it 'renders a 200' do
- get(action, params: params)
+ perform_action(verb, action, params, request_body)
expect(response).to have_gitlab_http_status(:ok)
end
@@ -27,7 +29,7 @@ RSpec.describe ActivityPub::Projects::ReleasesController, feature_category: :gro
context 'when user is not logged in' do
it 'renders a 404' do
- get(action, params: params)
+ perform_action(verb, action, params, request_body)
expect(response).to have_gitlab_http_status(:not_found)
end
@@ -39,7 +41,7 @@ RSpec.describe ActivityPub::Projects::ReleasesController, feature_category: :gro
end
it 'still renders a 404' do
- get(action, params: params)
+ perform_action(verb, action, params, request_body)
expect(response).to have_gitlab_http_status(:not_found)
end
@@ -52,7 +54,7 @@ RSpec.describe ActivityPub::Projects::ReleasesController, feature_category: :gro
end
it 'renders a 404' do
- get(action, params: params)
+ perform_action(verb, action, params, request_body)
expect(response).to have_gitlab_http_status(:not_found)
end
@@ -64,7 +66,7 @@ RSpec.describe ActivityPub::Projects::ReleasesController, feature_category: :gro
end
it 'renders a 404' do
- get(action, params: params)
+ perform_action(verb, action, params, request_body)
expect(response).to have_gitlab_http_status(:not_found)
end
@@ -83,9 +85,10 @@ RSpec.describe ActivityPub::Projects::ReleasesController, feature_category: :gro
describe 'GET #index' do
before do
- get(action, params: params)
+ perform_action(verb, action, params)
end
+ let(:verb) { :get }
let(:action) { :index }
let(:params) { { namespace_id: project.namespace, project_id: project } }
@@ -99,9 +102,10 @@ RSpec.describe ActivityPub::Projects::ReleasesController, feature_category: :gro
describe 'GET #outbox' do
before do
- get(action, params: params)
+ perform_action(verb, action, params)
end
+ let(:verb) { :get }
let(:action) { :outbox }
let(:params) { { namespace_id: project.namespace, project_id: project, page: page } }
@@ -131,4 +135,172 @@ RSpec.describe ActivityPub::Projects::ReleasesController, feature_category: :gro
end
end
end
+
+ describe 'POST #inbox' do
+ before do
+ allow(ActivityPub::Projects::ReleasesFollowService).to receive(:new) { follow_service }
+ allow(ActivityPub::Projects::ReleasesUnfollowService).to receive(:new) { unfollow_service }
+ end
+
+ let(:verb) { :post }
+ let(:action) { :inbox }
+ let(:params) { { namespace_id: project.namespace, project_id: project } }
+
+ let(:follow_service) do
+ instance_double(ActivityPub::Projects::ReleasesFollowService, execute: true, errors: ['an error'])
+ end
+
+ let(:unfollow_service) do
+ instance_double(ActivityPub::Projects::ReleasesUnfollowService, execute: true, errors: ['an error'])
+ end
+
+ context 'with a follow activity' do
+ before do
+ perform_action(verb, action, params, request_body)
+ end
+
+ let(:request_body) do
+ {
+ "@context": "https://www.w3.org/ns/activitystreams",
+ id: "http://localhost:3001/6233e6c2-d285-4aa4-bd71-ddf1824d87f8",
+ type: "Follow",
+ actor: "http://localhost:3001/users/admin",
+ object: "http://127.0.0.1:3000/flightjs/Flight/-/releases"
+ }.to_json
+ end
+
+ it_behaves_like 'common access controls'
+
+ context 'with successful subscription initialization' do
+ it 'calls the subscription service' do
+ expect(follow_service).to have_received :execute
+ end
+
+ it 'returns a successful response' do
+ expect(json_response['success']).to be_truthy
+ end
+
+ it 'does not fill any error' do
+ expect(json_response).not_to have_key 'errors'
+ end
+ end
+
+ context 'with unsuccessful subscription initialization' do
+ let(:follow_service) do
+ instance_double(ActivityPub::Projects::ReleasesFollowService, execute: false, errors: ['an error'])
+ end
+
+ it 'calls the subscription service' do
+ expect(follow_service).to have_received :execute
+ end
+
+ it 'returns a successful response' do
+ expect(json_response['success']).to be_falsey
+ end
+
+ it 'fills an error' do
+ expect(json_response['errors']).to include 'an error'
+ end
+ end
+ end
+
+ context 'with an unfollow activity' do
+ before do
+ perform_action(verb, action, params, request_body)
+ end
+
+ let(:unfollow_service) do
+ instance_double(ActivityPub::Projects::ReleasesSubscriptionService, execute: true, errors: ['an error'])
+ end
+
+ let(:request_body) do
+ {
+ "@context": "https://www.w3.org/ns/activitystreams",
+ id: "http://localhost:3001/users/admin#follows/8/undo",
+ type: "Undo",
+ actor: "http://localhost:3001/users/admin",
+ object: {
+ id: "http://localhost:3001/d4358269-71a9-4746-ac16-9a909f12ee5b",
+ type: "Follow",
+ actor: "http://localhost:3001/users/admin",
+ object: "http://127.0.0.1:3000/flightjs/Flight/-/releases"
+ }
+ }.to_json
+ end
+
+ it_behaves_like 'common access controls'
+
+ context 'with successful unfollow' do
+ it 'calls the subscription service' do
+ expect(unfollow_service).to have_received :execute
+ end
+
+ it 'returns a successful response' do
+ expect(json_response['success']).to be_truthy
+ end
+
+ it 'does not fill any error' do
+ expect(json_response).not_to have_key 'errors'
+ end
+ end
+
+ context 'with unsuccessful unfollow' do
+ let(:unfollow_service) do
+ instance_double(ActivityPub::Projects::ReleasesUnfollowService, execute: false, errors: ['an error'])
+ end
+
+ it 'calls the subscription service' do
+ expect(unfollow_service).to have_received :execute
+ end
+
+ it 'returns a successful response' do
+ expect(json_response['success']).to be_falsey
+ end
+
+ it 'fills an error' do
+ expect(json_response['errors']).to include 'an error'
+ end
+ end
+ end
+
+ context 'with an unknown activity' do
+ before do
+ perform_action(verb, action, params, request_body)
+ end
+
+ let(:request_body) do
+ {
+ "@context": "https://www.w3.org/ns/activitystreams",
+ id: "http://localhost:3001/6233e6c2-d285-4aa4-bd71-ddf1824d87f8",
+ type: "Like",
+ actor: "http://localhost:3001/users/admin",
+ object: "http://127.0.0.1:3000/flightjs/Flight/-/releases"
+ }.to_json
+ end
+
+ it 'does not call the subscription service' do
+ expect(follow_service).not_to have_received :execute
+ expect(unfollow_service).not_to have_received :execute
+ end
+
+ it 'returns a successful response' do
+ expect(json_response['success']).to be_truthy
+ end
+
+ it 'does not fill any error' do
+ expect(json_response).not_to have_key 'errors'
+ end
+ end
+
+ context 'with no activity' do
+ it 'renders a 422' do
+ perform_action(verb, action, params, request_body)
+ expect(response).to have_gitlab_http_status(:unprocessable_entity)
+ end
+ end
+ end
+end
+
+def perform_action(verb, action, params, body = nil)
+ send(verb, action, params: params, body: body)
end
diff --git a/spec/controllers/admin/application_settings_controller_spec.rb b/spec/controllers/admin/application_settings_controller_spec.rb
index 8dbdd8db99b..6d3492e5a6c 100644
--- a/spec/controllers/admin/application_settings_controller_spec.rb
+++ b/spec/controllers/admin/application_settings_controller_spec.rb
@@ -258,6 +258,7 @@ RSpec.describe Admin::ApplicationSettingsController, :do_not_mock_admin_mode_set
it_behaves_like 'updates boolean attribute', :user_defaults_to_private_profile
it_behaves_like 'updates boolean attribute', :can_create_group
+ it_behaves_like 'updates boolean attribute', :can_create_organization
it_behaves_like 'updates boolean attribute', :admin_mode
it_behaves_like 'updates boolean attribute', :require_admin_approval_after_user_signup
it_behaves_like 'updates boolean attribute', :remember_me_enabled
@@ -377,46 +378,6 @@ RSpec.describe Admin::ApplicationSettingsController, :do_not_mock_admin_mode_set
end
end
- describe 'user_email_lookup_limit aliasing' do
- let(:application_setting) { ApplicationSetting.current }
- let(:user_email_lookup_limit) { 8675 }
- let(:search_rate_limit) { 309 }
-
- context 'when search_rate_limit is specified' do
- let(:settings_params) do
- {
- user_email_lookup_limit: user_email_lookup_limit,
- search_rate_limit: search_rate_limit
- }
- end
-
- it 'updates search_rate_limit with correct value' do
- expect(application_setting.search_rate_limit).not_to eq user_email_lookup_limit
- expect(application_setting.search_rate_limit).not_to eq search_rate_limit
-
- put :update, params: { application_setting: settings_params }
-
- expect(application_setting.reload.search_rate_limit).to eq search_rate_limit
- end
- end
-
- context 'when search_rate_limit is not specified' do
- let(:settings_params) do
- {
- user_email_lookup_limit: search_rate_limit
- }
- end
-
- it 'applies user_email_lookup_limit value to search_rate_limit' do
- expect(application_setting.search_rate_limit).not_to eq search_rate_limit
-
- put :update, params: { application_setting: settings_params }
-
- expect(application_setting.reload.search_rate_limit).to eq search_rate_limit
- end
- end
- end
-
context 'pipeline creation rate limiting' do
let(:application_settings) { ApplicationSetting.current }
diff --git a/spec/controllers/admin/runner_projects_controller_spec.rb b/spec/controllers/admin/runner_projects_controller_spec.rb
index 06a73984ac0..5cfa1287e3b 100644
--- a/spec/controllers/admin/runner_projects_controller_spec.rb
+++ b/spec/controllers/admin/runner_projects_controller_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Admin::RunnerProjectsController, feature_category: :runner_fleet do
+RSpec.describe Admin::RunnerProjectsController, feature_category: :fleet_visibility do
let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project, group: group) }
diff --git a/spec/controllers/admin/runners_controller_spec.rb b/spec/controllers/admin/runners_controller_spec.rb
index 5f98004e9cf..d88fe41a869 100644
--- a/spec/controllers/admin/runners_controller_spec.rb
+++ b/spec/controllers/admin/runners_controller_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Admin::RunnersController, feature_category: :runner_fleet do
+RSpec.describe Admin::RunnersController, feature_category: :fleet_visibility do
let_it_be(:runner) { create(:ci_runner) }
let_it_be(:user) { create(:admin) }
diff --git a/spec/controllers/application_controller_spec.rb b/spec/controllers/application_controller_spec.rb
index b7ee01ce6b3..f4384dbaa69 100644
--- a/spec/controllers/application_controller_spec.rb
+++ b/spec/controllers/application_controller_spec.rb
@@ -19,7 +19,7 @@ RSpec.describe ApplicationController, feature_category: :shared do
expect(user.ldap_user?).to be_falsey
allow(controller).to receive(:current_user).and_return(user)
expect(controller).to receive(:redirect_to)
- expect(controller).to receive(:new_profile_password_path)
+ expect(controller).to receive(:new_user_settings_password_path)
controller.send(:check_password_expiration)
end
@@ -470,7 +470,7 @@ RSpec.describe ApplicationController, feature_category: :shared do
enforce_terms
- expect { get :index }.not_to exceed_query_limit(control)
+ expect { get :index }.not_to exceed_query_limit(control.count).with_threshold(1)
end
context 'when terms are enforced' do
diff --git a/spec/controllers/concerns/autocomplete_sources/expires_in_spec.rb b/spec/controllers/concerns/autocomplete_sources/expires_in_spec.rb
new file mode 100644
index 00000000000..e1ebe9bcd80
--- /dev/null
+++ b/spec/controllers/concerns/autocomplete_sources/expires_in_spec.rb
@@ -0,0 +1,69 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe AutocompleteSources::ExpiresIn, feature_category: :global_search do
+ controller(ActionController::Base) do
+ include AutocompleteSources::ExpiresIn
+
+ def members
+ render json: []
+ end
+
+ def commands
+ render json: []
+ end
+
+ def labels
+ render json: []
+ end
+
+ def not_cached
+ render json: []
+ end
+ end
+
+ before do
+ routes.draw do
+ get "members" => "anonymous#members"
+ get "commands" => "anonymous#commands"
+ get "labels" => "anonymous#labels"
+ get "not_cached" => "anonymous#not_cached"
+ end
+ end
+
+ let(:expected_cache_control) { "max-age=#{described_class::AUTOCOMPLETE_EXPIRES_IN}, private" }
+
+ described_class::AUTOCOMPLETE_CACHED_ACTIONS.each do |action|
+ context "when action is #{action} with feature flag enabled" do
+ it "sets correct cache-control" do
+ get action
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response.headers['Cache-Control']).to eq(expected_cache_control)
+ end
+ end
+
+ context "when action is #{action} with feature flag disabled" do
+ before do
+ stub_feature_flags("cache_autocomplete_sources_#{action}" => false)
+ end
+
+ it 'does not set cache-control' do
+ get action
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response.headers['Cache-Control']).to be_nil
+ end
+ end
+ end
+
+ context 'when action is not in AUTOCOMPLETE_CACHED_ACTIONS' do
+ it 'does not set cache-control' do
+ get :not_cached
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response.headers['Cache-Control']).to be_nil
+ end
+ end
+end
diff --git a/spec/controllers/concerns/product_analytics_tracking_spec.rb b/spec/controllers/concerns/product_analytics_tracking_spec.rb
index 5c69af48e65..7b48782be98 100644
--- a/spec/controllers/concerns/product_analytics_tracking_spec.rb
+++ b/spec/controllers/concerns/product_analytics_tracking_spec.rb
@@ -21,7 +21,7 @@ RSpec.describe ProductAnalyticsTracking, :snowplow, feature_category: :product_a
include ProductAnalyticsTracking
skip_before_action :authenticate_user!, only: [:index]
- track_internal_event :index, :show, name: 'g_compliance_dashboard', conditions: [:custom_condition?]
+ track_internal_event :index, :show, name: 'an_event', conditions: [:custom_condition?]
def index
render html: 'index'
@@ -47,7 +47,7 @@ RSpec.describe ProductAnalyticsTracking, :snowplow, feature_category: :product_a
end
def expect_internal_tracking(tracked_user: user)
- expect(Gitlab::InternalEvents).to receive(:track_event).with('g_compliance_dashboard',
+ expect(Gitlab::InternalEvents).to receive(:track_event).with(event_name,
user: tracked_user,
project: project,
namespace: project.namespace).once
@@ -57,6 +57,10 @@ RSpec.describe ProductAnalyticsTracking, :snowplow, feature_category: :product_a
expect(Gitlab::InternalEvents).not_to receive(:track_event)
end
+ before do
+ allow(Gitlab::InternalEvents::EventDefinitions).to receive(:known_event?).with('an_event').and_return(true)
+ end
+
context 'when user is logged in' do
let(:namespace) { project.namespace }
@@ -96,8 +100,22 @@ RSpec.describe ProductAnalyticsTracking, :snowplow, feature_category: :product_a
end
context 'when user is not logged in' do
- it 'does not track the event' do
- expect_no_internal_tracking
+ let(:user) { nil }
+
+ it 'tracks internal event' do
+ expect_internal_tracking
+
+ get :index
+ end
+
+ it 'tracks total Redis counters' do
+ expect(Gitlab::Usage::Metrics::Instrumentations::TotalCountMetric).to receive(:redis_key).twice # total and 7d
+
+ get :index
+ end
+
+ it 'does not update unique counter' do
+ expect(Gitlab::UsageDataCounters::HLLRedisCounter).not_to receive(:track_event)
get :index
end
@@ -107,7 +125,6 @@ RSpec.describe ProductAnalyticsTracking, :snowplow, feature_category: :product_a
describe '.track_event' do
before do
allow(Gitlab::UsageDataCounters::HLLRedisCounter).to receive(:track_event)
- stub_const("#{described_class}::MIGRATED_EVENTS", ['an_event'])
end
controller(ApplicationController) do
diff --git a/spec/controllers/groups/dependency_proxy_for_containers_controller_spec.rb b/spec/controllers/groups/dependency_proxy_for_containers_controller_spec.rb
index 6bb791d2fd4..9b977248435 100644
--- a/spec/controllers/groups/dependency_proxy_for_containers_controller_spec.rb
+++ b/spec/controllers/groups/dependency_proxy_for_containers_controller_spec.rb
@@ -60,6 +60,42 @@ RSpec.describe Groups::DependencyProxyForContainersController, feature_category:
it { is_expected.to have_gitlab_http_status(:not_found) }
end
+ context 'with invalid group access token' do
+ let_it_be(:user) { create(:user, :project_bot) }
+
+ context 'not under the group' do
+ it { is_expected.to have_gitlab_http_status(:not_found) }
+ end
+
+ context 'with sufficient scopes, but not active' do
+ %i[expired revoked].each do |status|
+ context status.to_s do
+ let_it_be(:pat) do
+ create(:personal_access_token, status, user: user).tap do |pat|
+ pat.update_column(:scopes, Gitlab::Auth::REGISTRY_SCOPES)
+ end
+ end
+
+ it { is_expected.to have_gitlab_http_status(:not_found) }
+ end
+ end
+ end
+
+ context 'with insufficient scopes' do
+ let_it_be(:pat) { create(:personal_access_token, user: user, scopes: [Gitlab::Auth::READ_API_SCOPE]) }
+
+ it { is_expected.to have_gitlab_http_status(:not_found) }
+
+ context 'packages_dependency_proxy_containers_scope_check disabled' do
+ before do
+ stub_feature_flags(packages_dependency_proxy_containers_scope_check: false)
+ end
+
+ it { is_expected.to have_gitlab_http_status(:not_found) }
+ end
+ end
+ end
+
context 'with deploy token from a different group,' do
let_it_be(:user) { create(:deploy_token, :group, :dependency_proxy_scopes) }
@@ -119,11 +155,7 @@ RSpec.describe Groups::DependencyProxyForContainersController, feature_category:
end
shared_examples 'authorize action with permission' do
- context 'with a valid user' do
- before do
- group.add_guest(user)
- end
-
+ shared_examples 'sends Workhorse instructions' do
it 'sends Workhorse local file instructions', :aggregate_failures do
subject
@@ -144,6 +176,32 @@ RSpec.describe Groups::DependencyProxyForContainersController, feature_category:
expect(json_response['MaximumSize']).to eq(maximum_size)
end
end
+
+ before do
+ group.add_guest(user)
+ end
+
+ context 'with a valid user' do
+ it_behaves_like 'sends Workhorse instructions'
+ end
+
+ context 'with a valid group access token' do
+ let_it_be(:user) { create(:user, :project_bot) }
+ let_it_be_with_reload(:token) { create(:personal_access_token, user: user) }
+
+ before do
+ token.update_column(:scopes, Gitlab::Auth::REGISTRY_SCOPES)
+ end
+
+ it_behaves_like 'sends Workhorse instructions'
+ end
+
+ context 'with a deploy token' do
+ let_it_be(:user) { create(:deploy_token, :dependency_proxy_scopes, :group) }
+ let_it_be(:group_deploy_token) { create(:group_deploy_token, deploy_token: user, group: group) }
+
+ it_behaves_like 'sends Workhorse instructions'
+ end
end
shared_examples 'namespace statistics refresh' do
@@ -277,6 +335,18 @@ RSpec.describe Groups::DependencyProxyForContainersController, feature_category:
it_behaves_like 'a successful manifest pull'
end
end
+
+ context 'a valid group access token' do
+ let_it_be(:user) { create(:user, :project_bot) }
+ let_it_be(:token) { create(:personal_access_token, :dependency_proxy_scopes, user: user) }
+
+ before do
+ group.add_guest(user)
+ end
+
+ it_behaves_like 'a successful manifest pull'
+ it_behaves_like 'a package tracking event', described_class.name, 'pull_manifest', false
+ end
end
it_behaves_like 'not found when disabled'
diff --git a/spec/controllers/groups/runners_controller_spec.rb b/spec/controllers/groups/runners_controller_spec.rb
index d48e9ff0d51..e732281a406 100644
--- a/spec/controllers/groups/runners_controller_spec.rb
+++ b/spec/controllers/groups/runners_controller_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Groups::RunnersController, feature_category: :runner_fleet do
+RSpec.describe Groups::RunnersController, feature_category: :fleet_visibility do
let_it_be(:user) { create(:user) }
let_it_be(:namespace_settings) { create(:namespace_settings, runner_registration_enabled: true) }
let_it_be(:group) { create(:group, namespace_settings: namespace_settings) }
diff --git a/spec/controllers/groups_controller_spec.rb b/spec/controllers/groups_controller_spec.rb
index 31257fd3a30..01bb3761a25 100644
--- a/spec/controllers/groups_controller_spec.rb
+++ b/spec/controllers/groups_controller_spec.rb
@@ -251,7 +251,7 @@ RSpec.describe GroupsController, factory_default: :keep, feature_category: :code
post :create, params: { group: { parent_id: group.id, path: 'subgroup' } }
expect(response).to be_redirect
- expect(response.body).to match(%r{http://test.host/#{group.path}/subgroup})
+ expect(response.location).to eq("http://test.host/#{group.path}/subgroup")
end
end
diff --git a/spec/controllers/import/bulk_imports_controller_spec.rb b/spec/controllers/import/bulk_imports_controller_spec.rb
index 57c723829e3..9b41089f4b8 100644
--- a/spec/controllers/import/bulk_imports_controller_spec.rb
+++ b/spec/controllers/import/bulk_imports_controller_spec.rb
@@ -333,7 +333,7 @@ RSpec.describe Import::BulkImportsController, feature_category: :importers do
it 'returns bulk imports created by current user' do
get :realtime_changes
- expect(json_response).to eq([{ 'id' => bulk_import.id, 'status_name' => bulk_import.status_name.to_s }])
+ expect(json_response).to match_array([{ 'id' => bulk_import.id, 'status_name' => bulk_import.status_name.to_s, 'has_failures' => false }])
end
it 'sets a Poll-Interval header' do
diff --git a/spec/controllers/ldap/omniauth_callbacks_controller_spec.rb b/spec/controllers/ldap/omniauth_callbacks_controller_spec.rb
index 29678706bba..b16f8ad1a58 100644
--- a/spec/controllers/ldap/omniauth_callbacks_controller_spec.rb
+++ b/spec/controllers/ldap/omniauth_callbacks_controller_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Ldap::OmniauthCallbacksController do
+RSpec.describe Ldap::OmniauthCallbacksController, type: :controller, feature_category: :system_access do
include_context 'Ldap::OmniauthCallbacksController'
it 'allows sign in' do
diff --git a/spec/controllers/omniauth_callbacks_controller_spec.rb b/spec/controllers/omniauth_callbacks_controller_spec.rb
index 847f7aeae7c..5b1fdd6388a 100644
--- a/spec/controllers/omniauth_callbacks_controller_spec.rb
+++ b/spec/controllers/omniauth_callbacks_controller_spec.rb
@@ -12,7 +12,7 @@ RSpec.describe OmniauthCallbacksController, type: :controller, feature_category:
sign_in user
end
- it "sets the session varible for provider 2FA" do
+ it "sets the session variable for provider 2FA" do
post :saml, params: { SAMLResponse: mock_saml_response }
expect(session[:provider_2FA]).to eq(true)
diff --git a/spec/controllers/profiles/preferences_controller_spec.rb b/spec/controllers/profiles/preferences_controller_spec.rb
index aaf169cd42b..505f5b2d417 100644
--- a/spec/controllers/profiles/preferences_controller_spec.rb
+++ b/spec/controllers/profiles/preferences_controller_spec.rb
@@ -3,10 +3,12 @@
require 'spec_helper'
RSpec.describe Profiles::PreferencesController do
+ let_it_be(:home_organization) { create(:organization) }
let(:user) { create(:user) }
before do
sign_in(user)
+ create(:organization_user, organization: home_organization, user: user)
allow(subject).to receive(:current_user).and_return(user)
end
@@ -28,6 +30,7 @@ RSpec.describe Profiles::PreferencesController do
params.reverse_merge!(
color_scheme_id: '1',
dashboard: 'stars',
+ home_organization_id: home_organization.id,
theme_id: '1'
)
@@ -49,6 +52,7 @@ RSpec.describe Profiles::PreferencesController do
diffs_deletion_color: '#123456',
diffs_addition_color: '#abcdef',
dashboard: 'stars',
+ home_organization_id: home_organization.id.to_s,
theme_id: '2',
first_day_of_week: '1',
preferred_language: 'jp',
diff --git a/spec/controllers/profiles_controller_spec.rb b/spec/controllers/profiles_controller_spec.rb
index 4f350ddf1ef..26144edb670 100644
--- a/spec/controllers/profiles_controller_spec.rb
+++ b/spec/controllers/profiles_controller_spec.rb
@@ -140,30 +140,6 @@ RSpec.describe ProfilesController, :request_store do
end
end
- describe 'GET audit_log' do
- let(:auth_event) { create(:authentication_event, user: user) }
-
- it 'tracks search event', :snowplow do
- sign_in(user)
-
- get :audit_log
-
- expect_snowplow_event(
- category: 'ProfilesController',
- action: 'search_audit_event',
- user: user
- )
- end
-
- it 'loads page correctly' do
- sign_in(user)
-
- get :audit_log
-
- expect(response).to have_gitlab_http_status(:success)
- end
- end
-
describe 'PUT update_username' do
let(:namespace) { user.namespace }
let(:gitlab_shell) { Gitlab::Shell.new }
diff --git a/spec/controllers/projects/deploy_keys_controller_spec.rb b/spec/controllers/projects/deploy_keys_controller_spec.rb
index 96addb4b6c5..a285a84ca0b 100644
--- a/spec/controllers/projects/deploy_keys_controller_spec.rb
+++ b/spec/controllers/projects/deploy_keys_controller_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Projects::DeployKeysController do
+RSpec.describe Projects::DeployKeysController, feature_category: :continuous_delivery do
let(:project) { create(:project, :repository) }
let(:user) { create(:user) }
let(:admin) { create(:admin) }
@@ -13,60 +13,94 @@ RSpec.describe Projects::DeployKeysController do
sign_in(user)
end
- describe 'GET index' do
- let(:params) do
- { namespace_id: project.namespace, project_id: project }
- end
+ describe 'GET actions' do
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:user) { create(:user) }
- context 'when html requested' do
- it 'redirects to project settings with the correct anchor' do
- get :index, params: params
+ let_it_be(:accessible_project) { create(:project, :internal).tap { |p| p.add_developer(user) } }
+ let_it_be(:inaccessible_project) { create(:project, :internal) }
+ let_it_be(:project_private) { create(:project, :private) }
- expect(response).to redirect_to(project_settings_repository_path(project, anchor: 'js-deploy-keys-settings'))
- end
+ let_it_be(:deploy_key_for_target_project) do
+ create(:deploy_keys_project, project: project, deploy_key: create(:deploy_key))
end
- context 'when json requested' do
- let(:project2) { create(:project, :internal) }
- let(:project_private) { create(:project, :private) }
+ let_it_be(:deploy_key_for_accessible_project) do
+ create(:deploy_keys_project, project: accessible_project, deploy_key: create(:deploy_key))
+ end
- let(:deploy_key_internal) { create(:deploy_key) }
- let(:deploy_key_actual) { create(:deploy_key) }
- let!(:deploy_key_public) { create(:deploy_key, public: true) }
+ let_it_be(:deploy_key_for_inaccessible_project) do
+ create(:deploy_keys_project, project: inaccessible_project, deploy_key: create(:deploy_key))
+ end
- let!(:deploy_keys_project_internal) do
- create(:deploy_keys_project, project: project2, deploy_key: deploy_key_internal)
- end
+ let_it_be(:deploy_keys_project_private) do
+ create(:deploy_keys_project, project: project_private, deploy_key: create(:another_deploy_key))
+ end
- let!(:deploy_keys_project_actual) do
- create(:deploy_keys_project, project: project, deploy_key: deploy_key_actual)
- end
+ let_it_be(:deploy_key_public) { create(:deploy_key, public: true) }
- let!(:deploy_keys_project_private) do
- create(:deploy_keys_project, project: project_private, deploy_key: create(:another_deploy_key))
+ describe 'GET index' do
+ let(:params) do
+ { namespace_id: project.namespace, project_id: project }
end
- context 'when user has access to all projects where deploy keys are used' do
- before do
- project2.add_developer(user)
+ context 'when html requested' do
+ it 'redirects to project settings with the correct anchor' do
+ get :index, params: params
+
+ expect(response).to redirect_to(project_settings_repository_path(project, anchor: 'js-deploy-keys-settings'))
end
+ end
+ context 'when json requested' do
it 'returns json in a correct format' do
get :index, params: params.merge(format: :json)
expect(json_response.keys).to match_array(%w[enabled_keys available_project_keys public_keys])
- expect(json_response['enabled_keys'].count).to eq(1)
- expect(json_response['available_project_keys'].count).to eq(1)
- expect(json_response['public_keys'].count).to eq(1)
+ expect(json_response['enabled_keys'].pluck('id')).to match_array(
+ [deploy_key_for_target_project.deploy_key_id]
+ )
+ expect(json_response['available_project_keys'].pluck('id')).to match_array(
+ [deploy_key_for_accessible_project.deploy_key_id]
+ )
+ expect(json_response['public_keys'].pluck('id')).to match_array([deploy_key_public.id])
end
end
+ end
- context 'when user has no access to all projects where deploy keys are used' do
- it 'returns json in a correct format' do
- get :index, params: params.merge(format: :json)
+ describe 'GET enabled_keys' do
+ let(:params) do
+ { namespace_id: project.namespace, project_id: project }
+ end
- expect(json_response['available_project_keys'].count).to eq(0)
- end
+ it 'returns only enabled keys' do
+ get :enabled_keys, params: params.merge(format: :json)
+
+ expect(json_response['keys'].pluck("id")).to match_array([deploy_key_for_target_project.deploy_key_id])
+ end
+ end
+
+ describe 'GET available_project_keys' do
+ let(:params) do
+ { namespace_id: project.namespace, project_id: project }
+ end
+
+ it 'returns available project keys' do
+ get :available_project_keys, params: params.merge(format: :json)
+
+ expect(json_response['keys'].pluck("id")).to match_array([deploy_key_for_accessible_project.deploy_key_id])
+ end
+ end
+
+ describe 'GET available_public_keys' do
+ let(:params) do
+ { namespace_id: project.namespace, project_id: project }
+ end
+
+ it 'returns available public keys' do
+ get :available_public_keys, params: params.merge(format: :json)
+
+ expect(json_response['keys'].pluck("id")).to match_array([deploy_key_public.id])
end
end
end
diff --git a/spec/controllers/projects/discussions_controller_spec.rb b/spec/controllers/projects/discussions_controller_spec.rb
index 8251b963563..bf4557dbd8a 100644
--- a/spec/controllers/projects/discussions_controller_spec.rb
+++ b/spec/controllers/projects/discussions_controller_spec.rb
@@ -162,19 +162,6 @@ RSpec.describe Projects::DiscussionsController, feature_category: :team_planning
expect(note.reload.resolved_at).not_to be_nil
expect(response).to have_gitlab_http_status(:ok)
end
-
- context 'when resolvable_issue_threads is disabled' do
- before do
- stub_feature_flags(resolvable_issue_threads: false)
- end
-
- it 'does not resolve the discussion and returns status 404' do
- post :resolve, params: request_params
-
- expect(note.reload.resolved_at).to be_nil
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
end
end
end
@@ -261,19 +248,6 @@ RSpec.describe Projects::DiscussionsController, feature_category: :team_planning
expect(note.reload.resolved_at).to be_nil
expect(response).to have_gitlab_http_status(:ok)
end
-
- context 'when resolvable_issue_threads is disabled' do
- before do
- stub_feature_flags(resolvable_issue_threads: false)
- end
-
- it 'does not unresolve the discussion and returns status 404' do
- delete :unresolve, params: request_params
-
- expect(note.reload.resolved_at).not_to be_nil
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
end
end
end
diff --git a/spec/controllers/projects/group_links_controller_spec.rb b/spec/controllers/projects/group_links_controller_spec.rb
index 4510e9e646e..e7a08c55a70 100644
--- a/spec/controllers/projects/group_links_controller_spec.rb
+++ b/spec/controllers/projects/group_links_controller_spec.rb
@@ -76,6 +76,17 @@ RSpec.describe Projects::GroupLinksController, feature_category: :system_access
expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['message']).to eq('404 Not Found')
end
+
+ context 'when MAINTAINER tries to update the link to OWNER access' do
+ let(:group_access) { Gitlab::Access::OWNER }
+
+ it 'returns 403' do
+ update_link
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ expect(json_response['message']).to eq('Forbidden')
+ end
+ end
end
describe '#destroy' do
@@ -167,12 +178,41 @@ RSpec.describe Projects::GroupLinksController, feature_category: :system_access
sign_in(user)
end
- it 'renders 404' do
- destroy_link
+ it 'returns 404' do
+ expect { destroy_link }.to not_change { project.reload.project_group_links.count }
expect(response).to have_gitlab_http_status(:not_found)
end
end
end
+
+ context 'when the user is a project maintainer' do
+ before do
+ project.add_maintainer(user)
+ sign_in(user)
+ end
+
+ context 'when they try to destroy a link with OWNER access level' do
+ let(:group_access) { Gitlab::Access::OWNER }
+
+ it 'does not destroy the link' do
+ expect { destroy_link }.to not_change { project.reload.project_group_links.count }
+
+ expect(response).to redirect_to(project_project_members_path(project, tab: :groups))
+ expect(flash[:alert]).to include('The project-group link could not be removed.')
+ end
+
+ context 'when format is js' do
+ let(:format) { :js }
+
+ it 'returns 403' do
+ expect { destroy_link }.to not_change { project.reload.project_group_links.count }
+
+ expect(json_response).to eq({ "message" => "Forbidden" })
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+ end
+ end
end
end
diff --git a/spec/controllers/projects/issues_controller_spec.rb b/spec/controllers/projects/issues_controller_spec.rb
index 5024698a769..b29a172f5b1 100644
--- a/spec/controllers/projects/issues_controller_spec.rb
+++ b/spec/controllers/projects/issues_controller_spec.rb
@@ -1079,10 +1079,10 @@ RSpec.describe Projects::IssuesController, :request_store, feature_category: :te
end
context 'when trying to create a task' do
- it 'defaults to issue type' do
+ it 'sets the correct issue_type' do
issue = post_new_issue(issue_type: 'task')
- expect(issue.work_item_type.base_type).to eq('issue')
+ expect(issue.work_item_type.base_type).to eq('task')
end
end
@@ -1797,18 +1797,6 @@ RSpec.describe Projects::IssuesController, :request_store, feature_category: :te
expect(json_response.first.keys).to match_array(%w[id reply_id expanded notes diff_discussion discussion_path individual_note resolvable commit_id for_commit project_id confidential resolve_path resolved resolved_at resolved_by resolved_by_push])
end
- context 'when resolvable_issue_threads is disabled' do
- before do
- stub_feature_flags(resolvable_issue_threads: false)
- end
-
- it 'returns discussion json without resolved fields' do
- get :discussions, params: { namespace_id: project.namespace, project_id: project, id: issue.iid }
-
- expect(json_response.first.keys).to match_array(%w[id reply_id expanded notes diff_discussion discussion_path individual_note resolvable commit_id for_commit project_id confidential])
- end
- end
-
it 'renders the author status html if there is a status' do
create(:user_status, user: discussion.author)
diff --git a/spec/controllers/projects/merge_requests/diffs_controller_spec.rb b/spec/controllers/projects/merge_requests/diffs_controller_spec.rb
index 9905b6ba512..b2b591d7929 100644
--- a/spec/controllers/projects/merge_requests/diffs_controller_spec.rb
+++ b/spec/controllers/projects/merge_requests/diffs_controller_spec.rb
@@ -625,7 +625,7 @@ RSpec.describe Projects::MergeRequests::DiffsController, feature_category: :code
end
context 'when ck param is present' do
- let(:cache_key) { merge_request.merge_head_diff.id }
+ let(:cache_key) { 'abc123' }
before do
create(:merge_request_diff, :merge_head, merge_request: merge_request)
@@ -657,5 +657,23 @@ RSpec.describe Projects::MergeRequests::DiffsController, feature_category: :code
end
end
end
+
+ context 'when collapse_generated_diff_files FF is enabled' do
+ it 'sets generated' do
+ go
+ expect(json_response["diff_files"][0]["viewer"]["generated"]).to eq(false)
+ end
+ end
+
+ context 'when collapse_generated_diff_files FF is disabled' do
+ before do
+ stub_feature_flags(collapse_generated_diff_files: false)
+ end
+
+ it 'sets generated as nil' do
+ go
+ expect(json_response["diff_files"][0]["viewer"]["generated"]).to be_nil
+ end
+ end
end
end
diff --git a/spec/controllers/projects/merge_requests/drafts_controller_spec.rb b/spec/controllers/projects/merge_requests/drafts_controller_spec.rb
index 505f9f5b19b..cc558aa2d1d 100644
--- a/spec/controllers/projects/merge_requests/drafts_controller_spec.rb
+++ b/spec/controllers/projects/merge_requests/drafts_controller_spec.rb
@@ -87,6 +87,7 @@ RSpec.describe Projects::MergeRequests::DraftsController, feature_category: :cod
end
it 'creates a draft note with quick actions' do
+ stub_commonmark_sourcepos_enabled
create_draft_note(draft_overrides: { note: "#{user2.to_reference}\n/assign #{user.to_reference}" })
expect(response).to have_gitlab_http_status(:ok)
@@ -354,6 +355,7 @@ RSpec.describe Projects::MergeRequests::DraftsController, feature_category: :cod
end
it 'publishes a draft note with quick actions and applies them', :sidekiq_inline do
+ stub_commonmark_sourcepos_enabled
project.add_developer(user2)
create(:draft_note, merge_request: merge_request, author: user, note: "/assign #{user2.to_reference}")
diff --git a/spec/controllers/projects/merge_requests_controller_spec.rb b/spec/controllers/projects/merge_requests_controller_spec.rb
index 539c6d17e0e..55741a82862 100644
--- a/spec/controllers/projects/merge_requests_controller_spec.rb
+++ b/spec/controllers/projects/merge_requests_controller_spec.rb
@@ -18,6 +18,27 @@ RSpec.describe Projects::MergeRequestsController, feature_category: :code_review
sign_in(user)
end
+ shared_examples 'a 400 response' do
+ it 'does not send polling interval' do
+ expect(Gitlab::PollingInterval).not_to receive(:set_header)
+
+ subject
+ end
+
+ it 'returns 400 HTTP status' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+
+ it 'returns an error string' do
+ subject
+
+ expect(json_response['status_reason']).to eq error_string
+ expect(json_response['errors']).to match_array [error_string]
+ end
+ end
+
describe 'GET commit_change_content' do
it 'renders commit_change_content template' do
get :commit_change_content,
@@ -100,13 +121,13 @@ RSpec.describe Projects::MergeRequestsController, feature_category: :code_review
w: '0',
page: '0',
per_page: '5',
- ck: merge_request.merge_head_diff.id))
+ ck: merge_request.merge_head_diff.patch_id_sha))
end
it 'sets diffs_batch_cache_key' do
go
- expect(assigns['diffs_batch_cache_key']).to eq(merge_request.merge_head_diff.id)
+ expect(assigns['diffs_batch_cache_key']).to eq(merge_request.merge_head_diff.patch_id_sha)
end
context 'when diffs_batch_cache_with_max_age feature flag is disabled' do
@@ -1228,20 +1249,10 @@ RSpec.describe Projects::MergeRequestsController, feature_category: :code_review
end
context 'when user created corrupted coverage reports' do
- let(:report) { { status: :error, status_reason: 'Failed to parse coverage reports' } }
-
- it 'does not send polling interval' do
- expect(Gitlab::PollingInterval).not_to receive(:set_header)
-
- subject
- end
-
- it 'returns 400 HTTP status' do
- subject
+ let(:report) { { status: :error, status_reason: error_string } }
+ let(:error_string) { 'Failed to parse coverage reports' }
- expect(response).to have_gitlab_http_status(:bad_request)
- expect(json_response).to eq({ 'status_reason' => 'Failed to parse coverage reports' })
- end
+ it_behaves_like 'a 400 response'
end
end
@@ -1475,20 +1486,10 @@ RSpec.describe Projects::MergeRequestsController, feature_category: :code_review
end
context 'when user created corrupted terraform reports' do
- let(:report) { { status: :error, status_reason: 'Failed to parse terraform reports' } }
-
- it 'does not send polling interval' do
- expect(Gitlab::PollingInterval).not_to receive(:set_header)
-
- subject
- end
+ let(:report) { { status: :error, status_reason: error_string } }
+ let(:error_string) { 'Failed to parse terraform reports' }
- it 'returns 400 HTTP status' do
- subject
-
- expect(response).to have_gitlab_http_status(:bad_request)
- expect(json_response).to eq({ 'status_reason' => 'Failed to parse terraform reports' })
- end
+ it_behaves_like 'a 400 response'
end
end
@@ -1603,20 +1604,10 @@ RSpec.describe Projects::MergeRequestsController, feature_category: :code_review
end
context 'when user created corrupted test reports' do
- let(:comparison_status) { { status: :error, status_reason: 'Failed to parse test reports' } }
+ let(:error_string) { 'Failed to parse test reports' }
+ let(:comparison_status) { { status: :error, status_reason: error_string } }
- it 'does not send polling interval' do
- expect(Gitlab::PollingInterval).not_to receive(:set_header)
-
- subject
- end
-
- it 'returns 400 HTTP status' do
- subject
-
- expect(response).to have_gitlab_http_status(:bad_request)
- expect(json_response).to eq({ 'status_reason' => 'Failed to parse test reports' })
- end
+ it_behaves_like 'a 400 response'
end
end
@@ -1723,20 +1714,10 @@ RSpec.describe Projects::MergeRequestsController, feature_category: :code_review
end
context 'when user created corrupted accessibility reports' do
- let(:accessibility_comparison) { { status: :error, status_reason: 'This merge request does not have accessibility reports' } }
-
- it 'does not send polling interval' do
- expect(Gitlab::PollingInterval).not_to receive(:set_header)
+ let(:error_string) { 'This merge request does not have accessibility reports' }
+ let(:accessibility_comparison) { { status: :error, status_reason: error_string } }
- subject
- end
-
- it 'returns 400 HTTP status' do
- subject
-
- expect(response).to have_gitlab_http_status(:bad_request)
- expect(json_response).to eq({ 'status_reason' => 'This merge request does not have accessibility reports' })
- end
+ it_behaves_like 'a 400 response'
end
end
end
@@ -1845,14 +1826,10 @@ RSpec.describe Projects::MergeRequestsController, feature_category: :code_review
end
context 'when pipeline has job without a codequality report' do
- let(:codequality_comparison) { { status: :error, status_reason: 'no codequality report' } }
-
- it 'returns a 400' do
- subject
+ let(:error_string) { 'no codequality report' }
+ let(:codequality_comparison) { { status: :error, status_reason: error_string } }
- expect(response).to have_gitlab_http_status(:bad_request)
- expect(json_response).to eq({ 'status_reason' => 'no codequality report' })
- end
+ it_behaves_like 'a 400 response'
end
end
@@ -2305,121 +2282,58 @@ RSpec.describe Projects::MergeRequestsController, feature_category: :code_review
end
context 'highlight preloading' do
- context 'when only_highlight_discussions_requested is false' do
- before do
- stub_feature_flags(only_highlight_discussions_requested: false)
+ context 'with commit diff notes' do
+ let!(:first_commit_diff_note) do
+ create(:diff_note_on_commit, project: merge_request.project)
end
- context 'with commit diff notes' do
- let!(:first_commit_diff_note) do
- create(:diff_note_on_commit, project: merge_request.project)
- end
-
- let!(:second_commit_diff_note) do
- create(:diff_note_on_commit, project: merge_request.project)
- end
-
- it 'preloads all of the notes diffs highlights' do
- expect_next_instance_of(Gitlab::DiscussionsDiff::FileCollection) do |collection|
- first_note_diff_file = first_commit_diff_note.note_diff_file
- second_note_diff_file = second_commit_diff_note.note_diff_file
+ let!(:second_commit_diff_note) do
+ create(:diff_note_on_commit, project: merge_request.project)
+ end
- expect(collection).to receive(:load_highlight).and_call_original
- expect(collection).to receive(:find_by_id).with(first_note_diff_file.id).and_call_original
- expect(collection).to receive(:find_by_id).with(second_note_diff_file.id).and_call_original
- end
+ it 'preloads all of the notes diffs highlights' do
+ expect_next_instance_of(Gitlab::DiscussionsDiff::FileCollection) do |collection|
+ first_note_diff_file = first_commit_diff_note.note_diff_file
+ second_note_diff_file = second_commit_diff_note.note_diff_file
- get :discussions, params: { namespace_id: project.namespace, project_id: project, id: merge_request.iid,
- per_page: 2 }
+ expect(collection).to receive(:load_highlight).with(diff_note_ids: [first_commit_diff_note.id, second_commit_diff_note.id]).and_call_original
+ expect(collection).to receive(:find_by_id).with(first_note_diff_file.id).and_call_original
+ expect(collection).to receive(:find_by_id).with(second_note_diff_file.id).and_call_original
end
- it 'preloads all of the notes diffs highlights when per_page is 1' do
- expect_next_instance_of(Gitlab::DiscussionsDiff::FileCollection) do |collection|
- first_note_diff_file = first_commit_diff_note.note_diff_file
- second_note_diff_file = second_commit_diff_note.note_diff_file
-
- expect(collection).to receive(:load_highlight).and_call_original
- expect(collection).to receive(:find_by_id).with(first_note_diff_file.id).and_call_original
- expect(collection).not_to receive(:find_by_id).with(second_note_diff_file.id)
- end
-
- get :discussions, params: { namespace_id: project.namespace, project_id: project, id: merge_request.iid,
- per_page: 1 }
- end
+ get :discussions, params: { namespace_id: project.namespace, project_id: project, id: merge_request.iid,
+ per_page: 2 }
end
- context 'with diff notes' do
- let!(:diff_note) do
- create(:diff_note_on_merge_request, noteable: merge_request, project: merge_request.project)
- end
-
- it 'preloads notes diffs highlights' do
- expect_next_instance_of(Gitlab::DiscussionsDiff::FileCollection) do |collection|
- note_diff_file = diff_note.note_diff_file
-
- expect(collection).to receive(:load_highlight).and_call_original
- expect(collection).to receive(:find_by_id).with(note_diff_file.id).and_call_original
- end
+ it 'preloads all of the notes diffs highlights when per_page is 1' do
+ expect_next_instance_of(Gitlab::DiscussionsDiff::FileCollection) do |collection|
+ first_note_diff_file = first_commit_diff_note.note_diff_file
+ second_note_diff_file = second_commit_diff_note.note_diff_file
- get :discussions, params: { namespace_id: project.namespace, project_id: project, id: merge_request.iid }
+ expect(collection).to receive(:load_highlight).with(diff_note_ids: [first_commit_diff_note.id]).and_call_original
+ expect(collection).to receive(:find_by_id).with(first_note_diff_file.id).and_call_original
+ expect(collection).not_to receive(:find_by_id).with(second_note_diff_file.id)
end
+
+ get :discussions, params: { namespace_id: project.namespace, project_id: project, id: merge_request.iid,
+ per_page: 1 }
end
end
- context 'when only_highlight_discussions_requested is true' do
- context 'with commit diff notes' do
- let!(:first_commit_diff_note) do
- create(:diff_note_on_commit, project: merge_request.project)
- end
-
- let!(:second_commit_diff_note) do
- create(:diff_note_on_commit, project: merge_request.project)
- end
-
- it 'preloads all of the notes diffs highlights' do
- expect_next_instance_of(Gitlab::DiscussionsDiff::FileCollection) do |collection|
- first_note_diff_file = first_commit_diff_note.note_diff_file
- second_note_diff_file = second_commit_diff_note.note_diff_file
-
- expect(collection).to receive(:load_highlight).with(diff_note_ids: [first_commit_diff_note.id, second_commit_diff_note.id]).and_call_original
- expect(collection).to receive(:find_by_id).with(first_note_diff_file.id).and_call_original
- expect(collection).to receive(:find_by_id).with(second_note_diff_file.id).and_call_original
- end
-
- get :discussions, params: { namespace_id: project.namespace, project_id: project, id: merge_request.iid,
- per_page: 2 }
- end
-
- it 'preloads all of the notes diffs highlights when per_page is 1' do
- expect_next_instance_of(Gitlab::DiscussionsDiff::FileCollection) do |collection|
- first_note_diff_file = first_commit_diff_note.note_diff_file
- second_note_diff_file = second_commit_diff_note.note_diff_file
-
- expect(collection).to receive(:load_highlight).with(diff_note_ids: [first_commit_diff_note.id]).and_call_original
- expect(collection).to receive(:find_by_id).with(first_note_diff_file.id).and_call_original
- expect(collection).not_to receive(:find_by_id).with(second_note_diff_file.id)
- end
-
- get :discussions, params: { namespace_id: project.namespace, project_id: project, id: merge_request.iid,
- per_page: 1 }
- end
+ context 'with diff notes' do
+ let!(:diff_note) do
+ create(:diff_note_on_merge_request, noteable: merge_request, project: merge_request.project)
end
- context 'with diff notes' do
- let!(:diff_note) do
- create(:diff_note_on_merge_request, noteable: merge_request, project: merge_request.project)
- end
-
- it 'preloads notes diffs highlights' do
- expect_next_instance_of(Gitlab::DiscussionsDiff::FileCollection) do |collection|
- note_diff_file = diff_note.note_diff_file
+ it 'preloads notes diffs highlights' do
+ expect_next_instance_of(Gitlab::DiscussionsDiff::FileCollection) do |collection|
+ note_diff_file = diff_note.note_diff_file
- expect(collection).to receive(:load_highlight).with(diff_note_ids: [diff_note.id]).and_call_original
- expect(collection).to receive(:find_by_id).with(note_diff_file.id).and_call_original
- end
-
- get :discussions, params: { namespace_id: project.namespace, project_id: project, id: merge_request.iid }
+ expect(collection).to receive(:load_highlight).with(diff_note_ids: [diff_note.id]).and_call_original
+ expect(collection).to receive(:find_by_id).with(note_diff_file.id).and_call_original
end
+
+ get :discussions, params: { namespace_id: project.namespace, project_id: project, id: merge_request.iid }
end
end
end
diff --git a/spec/controllers/projects/notes_controller_spec.rb b/spec/controllers/projects/notes_controller_spec.rb
index 35aa01cdfad..678991b91a5 100644
--- a/spec/controllers/projects/notes_controller_spec.rb
+++ b/spec/controllers/projects/notes_controller_spec.rb
@@ -536,7 +536,7 @@ RSpec.describe Projects::NotesController, type: :controller, feature_category: :
context 'when creating a note with quick actions' do
context 'with commands that return changes' do
- let(:note_text) { "/award :thumbsup:\n/estimate 1d\n/spend 3h" }
+ let(:note_text) { "/react :thumbsup:\n/estimate 1d\n/spend 3h" }
let(:extra_request_params) { { format: :json } }
it 'includes changes in commands_changes' do
@@ -551,7 +551,7 @@ RSpec.describe Projects::NotesController, type: :controller, feature_category: :
create!
expect(response).to have_gitlab_http_status(:ok)
- expect(json_response['command_names']).to include('award', 'estimate', 'spend')
+ expect(json_response['command_names']).to include('react', 'estimate', 'spend')
end
end
diff --git a/spec/controllers/projects/runner_projects_controller_spec.rb b/spec/controllers/projects/runner_projects_controller_spec.rb
index beedaad0fa9..42ceeb32f51 100644
--- a/spec/controllers/projects/runner_projects_controller_spec.rb
+++ b/spec/controllers/projects/runner_projects_controller_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Projects::RunnerProjectsController, feature_category: :runner_fleet do
+RSpec.describe Projects::RunnerProjectsController, feature_category: :fleet_visibility do
let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project, group: group) }
diff --git a/spec/controllers/projects/runners_controller_spec.rb b/spec/controllers/projects/runners_controller_spec.rb
index 3602a2df959..35cfe093323 100644
--- a/spec/controllers/projects/runners_controller_spec.rb
+++ b/spec/controllers/projects/runners_controller_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Projects::RunnersController, feature_category: :runner_fleet do
+RSpec.describe Projects::RunnersController, feature_category: :fleet_visibility do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project) }
let_it_be(:runner) { create(:ci_runner, :project, projects: [project]) }
diff --git a/spec/controllers/projects_controller_spec.rb b/spec/controllers/projects_controller_spec.rb
index dea359e8fee..88d9d1228e3 100644
--- a/spec/controllers/projects_controller_spec.rb
+++ b/spec/controllers/projects_controller_spec.rb
@@ -362,6 +362,25 @@ RSpec.describe ProjectsController, feature_category: :groups_and_projects do
end
end
+ context 'when project default branch is corrupted' do
+ let_it_be(:corrupted_project) { create(:project, :small_repo, :public) }
+
+ before do
+ sign_in(user)
+
+ expect_next_instance_of(Repository) do |repository|
+ expect(repository).to receive(:root_ref).and_raise(Gitlab::Git::CommandError, 'get default branch').twice
+ end
+ end
+
+ it 'renders the missing default branch view' do
+ get :show, params: { namespace_id: corrupted_project.namespace, id: corrupted_project }
+
+ expect(response).to render_template('projects/missing_default_branch')
+ expect(response).to have_gitlab_http_status(:service_unavailable)
+ end
+ end
+
context "rendering default project view" do
let_it_be(:public_project) { create(:project, :public, :repository) }
@@ -873,12 +892,50 @@ RSpec.describe ProjectsController, feature_category: :groups_and_projects do
create(:container_repository, project: project, name: :image)
end
- it 'does not allow to rename the project' do
- expect { update_project path: 'renamed_path' }
- .not_to change { project.reload.path }
+ let(:message) { 'UpdateProject|Cannot rename project because it contains container registry tags!' }
- expect(controller).to set_flash[:alert].to(s_('UpdateProject|Cannot rename project because it contains container registry tags!'))
- expect(response).to have_gitlab_http_status(:ok)
+ shared_examples 'not allowing the rename of the project' do
+ it 'does not allow to rename the project' do
+ expect { update_project path: 'renamed_path' }
+ .not_to change { project.reload.path }
+
+ expect(controller).to set_flash[:alert].to(s_(message))
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ end
+
+ context 'when Gitlab API is not supported' do
+ before do
+ allow(ContainerRegistry::GitlabApiClient).to receive(:supports_gitlab_api?).and_return(false)
+ end
+
+ it_behaves_like 'not allowing the rename of the project'
+ end
+
+ context 'when Gitlab API is supported' do
+ before do
+ allow(ContainerRegistry::GitlabApiClient).to receive(:supports_gitlab_api?).and_return(true)
+ end
+
+ it 'allows the rename of the project' do
+ allow(ContainerRegistry::GitlabApiClient).to receive(:rename_base_repository_path).and_return(:accepted, :ok)
+
+ expect { update_project path: 'renamed_path' }
+ .to change { project.reload.path }
+
+ expect(project.path).to eq('renamed_path')
+ expect(response).to have_gitlab_http_status(:found)
+ end
+
+ context 'when rename base repository dry run in the registry fails' do
+ let(:message) { 'UpdateProject|UpdateProject|Cannot rename project, the container registry path rename validation failed: Bad Request' }
+
+ before do
+ allow(ContainerRegistry::GitlabApiClient).to receive(:rename_base_repository_path).and_return(:bad_request)
+ end
+
+ it_behaves_like 'not allowing the rename of the project'
+ end
end
end
@@ -1036,6 +1093,7 @@ RSpec.describe ProjectsController, feature_category: :groups_and_projects do
monitor_access_level
infrastructure_access_level
model_experiments_access_level
+ model_registry_access_level
]
end
diff --git a/spec/controllers/search_controller_spec.rb b/spec/controllers/search_controller_spec.rb
index 9453520341b..10fe15558c5 100644
--- a/spec/controllers/search_controller_spec.rb
+++ b/spec/controllers/search_controller_spec.rb
@@ -189,6 +189,25 @@ RSpec.describe SearchController, feature_category: :global_search do
end
end
+ context 'when allow_anonymous_searches is disabled' do
+ before do
+ stub_feature_flags(allow_anonymous_searches: false)
+ end
+
+ context 'for unauthenticated user' do
+ before do
+ sign_out(user)
+ end
+
+ it 'redirects to login page' do
+ get :show, params: { scope: 'projects', search: '*' }
+
+ expect(response).to redirect_to new_user_session_path
+ expect(flash[:alert]).to match(/You must be logged in/)
+ end
+ end
+ end
+
context 'tab feature flags' do
subject { get :show, params: { scope: scope, search: 'term' }, format: :html }
@@ -583,12 +602,14 @@ RSpec.describe SearchController, feature_category: :global_search do
expect(payload[:metadata]['meta.search.type']).to eq('basic')
expect(payload[:metadata]['meta.search.level']).to eq('global')
expect(payload[:metadata]['meta.search.filters.language']).to eq('ruby')
+ expect(payload[:metadata]['meta.search.page']).to eq('2')
end
get :show, params: {
scope: 'issues',
search: 'hello world',
group_id: '123',
+ page: '2',
project_id: '456',
project_ids: %w[456 789],
confidential: true,
diff --git a/spec/controllers/profiles/active_sessions_controller_spec.rb b/spec/controllers/user_settings/active_sessions_controller_spec.rb
index 12cf4f982e9..01c1095fef5 100644
--- a/spec/controllers/profiles/active_sessions_controller_spec.rb
+++ b/spec/controllers/user_settings/active_sessions_controller_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Profiles::ActiveSessionsController do
+RSpec.describe UserSettings::ActiveSessionsController, feature_category: :system_access do
describe 'DELETE destroy' do
let_it_be(:user) { create(:user) }
diff --git a/spec/controllers/profiles/personal_access_tokens_controller_spec.rb b/spec/controllers/user_settings/personal_access_tokens_controller_spec.rb
index 9c9a9a28879..b1d6fc6f479 100644
--- a/spec/controllers/profiles/personal_access_tokens_controller_spec.rb
+++ b/spec/controllers/user_settings/personal_access_tokens_controller_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Profiles::PersonalAccessTokensController do
+RSpec.describe UserSettings::PersonalAccessTokensController, feature_category: :system_access do
let(:access_token_user) { create(:user) }
let(:token_attributes) { attributes_for(:personal_access_token) }
@@ -49,7 +49,7 @@ RSpec.describe Profiles::PersonalAccessTokensController do
end
end
- describe 'GET /-/profile/personal_access_tokens' do
+ describe 'GET /-/user_settings/personal_access_tokens' do
let(:get_access_tokens) do
get :index
response
diff --git a/spec/db/docs_spec.rb b/spec/db/docs_spec.rb
index 19edf3da0d5..03f944735a1 100644
--- a/spec/db/docs_spec.rb
+++ b/spec/db/docs_spec.rb
@@ -15,6 +15,8 @@ RSpec.shared_examples 'validate dictionary' do |objects, directory_path, require
milestone
gitlab_schema
schema_inconsistencies
+ sharding_key
+ desired_sharding_key
]
end
@@ -184,7 +186,7 @@ RSpec.describe 'Tables documentation', feature_category: :database do
database_base_models = Gitlab::Database.database_base_models.reject { |k, _| k.in?(excluded) }
tables = database_base_models.flat_map { |_, m| m.connection.tables }.sort.uniq
directory_path = File.join('db', 'docs')
- required_fields = %i[feature_categories table_name gitlab_schema]
+ required_fields = %i[feature_categories table_name gitlab_schema milestone]
include_examples 'validate dictionary', tables, directory_path, required_fields
end
diff --git a/spec/db/schema_spec.rb b/spec/db/schema_spec.rb
index ac1137e5387..7e3f2a3b61e 100644
--- a/spec/db/schema_spec.rb
+++ b/spec/db/schema_spec.rb
@@ -10,11 +10,13 @@ RSpec.describe 'Database schema', feature_category: :database do
let(:columns_name_with_jsonb) { retrieve_columns_name_with_jsonb }
IGNORED_INDEXES_ON_FKS = {
+ application_settings: %w[instance_administration_project_id instance_administrators_group_id],
# `search_index_id index_type` is the composite foreign key configured for `search_namespace_index_assignments`,
# but in Search::NamespaceIndexAssignment model, only `search_index_id` is used as foreign key and indexed
search_namespace_index_assignments: [%w[search_index_id index_type]],
slack_integrations_scopes: [%w[slack_api_scope_id]],
- notes: %w[namespace_id] # this index is added in an async manner, hence it needs to be ignored in the first phase.
+ notes: %w[namespace_id], # this index is added in an async manner, hence it needs to be ignored in the first phase.
+ users: [%w[accepted_term_id]]
}.with_indifferent_access.freeze
TABLE_PARTITIONS = %w[ci_builds_metadata].freeze
@@ -52,6 +54,7 @@ RSpec.describe 'Database schema', feature_category: :database do
ci_sources_pipelines: %w[partition_id source_partition_id source_job_id],
ci_stages: %w[partition_id],
ci_trigger_requests: %w[commit_id],
+ ci_job_artifact_states: %w[partition_id],
cluster_providers_aws: %w[security_group_id vpc_id access_key_id],
cluster_providers_gcp: %w[gcp_project_id operation_id],
compliance_management_frameworks: %w[group_id],
@@ -80,6 +83,7 @@ RSpec.describe 'Database schema', feature_category: :database do
members: %w[source_id created_by_id],
merge_requests: %w[last_edited_by_id state_id],
merge_requests_compliance_violations: %w[target_project_id],
+ merge_request_diffs: %w[project_id],
merge_request_diff_commits: %w[commit_author_id committer_id],
namespaces: %w[owner_id parent_id],
notes: %w[author_id commit_id noteable_id updated_by_id resolved_by_id confirmed_by_id discussion_id namespace_id],
@@ -89,6 +93,7 @@ RSpec.describe 'Database schema', feature_category: :database do
oauth_applications: %w[owner_id],
p_ci_builds: %w[erased_by_id trigger_request_id partition_id auto_canceled_by_partition_id],
p_batched_git_ref_updates_deletions: %w[project_id partition_id],
+ p_catalog_resource_sync_events: %w[catalog_resource_id project_id partition_id],
p_ci_finished_build_ch_sync_events: %w[build_id],
product_analytics_events_experimental: %w[event_id txn_id user_id],
project_build_artifacts_size_refreshes: %w[last_job_artifact_id],
@@ -111,7 +116,7 @@ RSpec.describe 'Database schema', feature_category: :database do
todos: %w[target_id commit_id],
uploads: %w[model_id],
user_agent_details: %w[subject_id],
- users: %w[color_scheme_id created_by_id theme_id email_opted_in_source_id managing_group_id],
+ users: %w[color_scheme_id created_by_id theme_id managing_group_id],
users_star_projects: %w[user_id],
vulnerability_identifiers: %w[external_id],
vulnerability_scanners: %w[external_id],
@@ -354,11 +359,11 @@ RSpec.describe 'Database schema', feature_category: :database do
context 'for CI partitioned table' do
# Check that each partitionable model with more than 1 column has the partition_id column at the trailing
- # position. Using PARTITIONABLE_MODELS instead of iterating tables since when partitioning existing tables,
+ # position. Using .partitionable_models instead of iterating tables since when partitioning existing tables,
# the routing table only gets created after the PK has already been created, which would be too late for a check.
skip_tables = %w[]
- partitionable_models = Ci::Partitionable::Testing::PARTITIONABLE_MODELS
+ partitionable_models = Ci::Partitionable::Testing.partitionable_models
(partitionable_models - skip_tables).each do |klass|
model = klass.safe_constantize
table_name = model.table_name
diff --git a/spec/experiments/application_experiment_spec.rb b/spec/experiments/application_experiment_spec.rb
index 00370a5b7e3..24e08241fe7 100644
--- a/spec/experiments/application_experiment_spec.rb
+++ b/spec/experiments/application_experiment_spec.rb
@@ -24,6 +24,12 @@ RSpec.describe ApplicationExperiment, :experiment, feature_category: :acquisitio
expect { experiment(:example) {} }.not_to raise_error
end
+ describe ".available?" do
+ it 'is false for foss' do
+ expect(described_class).not_to be_available
+ end
+ end
+
describe "#publish" do
it "tracks the assignment", :snowplow do
expect(application_experiment).to receive(:track).with(:assignment)
@@ -169,33 +175,6 @@ RSpec.describe ApplicationExperiment, :experiment, feature_category: :acquisitio
end
describe "#process_redirect_url" do
- using RSpec::Parameterized::TableSyntax
-
- where(:url, :processed_url) do
- 'https://about.gitlab.com/' | 'https://about.gitlab.com/'
- 'https://gitlab.com/' | 'https://gitlab.com/'
- 'http://docs.gitlab.com' | 'http://docs.gitlab.com'
- 'https://docs.gitlab.com/some/path?foo=bar' | 'https://docs.gitlab.com/some/path?foo=bar'
- 'http://badgitlab.com' | nil
- 'https://gitlab.com.nefarious.net' | nil
- 'https://unknown.gitlab.com' | nil
- "https://badplace.com\nhttps://gitlab.com" | nil
- 'https://gitlabbcom' | nil
- 'https://gitlabbcom/' | nil
- 'http://gdk.test/foo/bar' | 'http://gdk.test/foo/bar'
- 'http://localhost:3000/foo/bar' | 'http://localhost:3000/foo/bar'
- end
-
- with_them do
- it "returns the url or nil if invalid on SaaS", :saas do
- expect(application_experiment.process_redirect_url(url)).to eq(processed_url)
- end
-
- it "considers all urls invalid when not on SaaS" do
- expect(application_experiment.process_redirect_url(url)).to be_nil
- end
- end
-
it "generates the correct urls based on where the engine was mounted" do
url = Rails.application.routes.url_helpers.experiment_redirect_url(application_experiment, url: 'https://docs.gitlab.com')
expect(url).to include("/-/experiment/namespaced%2Fstub:#{application_experiment.context.key}?https://docs.gitlab.com")
@@ -227,7 +206,7 @@ RSpec.describe ApplicationExperiment, :experiment, feature_category: :acquisitio
it "tracks an event", :snowplow do
experiment(:top) { |e| e.control { experiment(:nested) {} } }
- expect(Gitlab::Tracking).to have_received(:event).with( # rubocop:disable RSpec/ExpectGitlabTracking
+ expect(Gitlab::Tracking).to have_received(:event).with( # rubocop:disable RSpec/ExpectGitlabTracking -- Testing nested functionality
'top',
:nested,
hash_including(label: 'nested')
diff --git a/spec/factories/abuse_reports.rb b/spec/factories/abuse_reports.rb
index 14a44d1108a..343ce8489f7 100644
--- a/spec/factories/abuse_reports.rb
+++ b/spec/factories/abuse_reports.rb
@@ -18,7 +18,7 @@ FactoryBot.define do
end
trait :with_assignee do
- assignee factory: :user
+ assignees { [association(:assignee)] }
end
trait :with_evidence do
diff --git a/spec/factories/achievements/achievements.rb b/spec/factories/achievements/achievements.rb
index 080a0376999..3f4d0ef082a 100644
--- a/spec/factories/achievements/achievements.rb
+++ b/spec/factories/achievements/achievements.rb
@@ -5,5 +5,9 @@ FactoryBot.define do
namespace
name { generate(:name) }
+
+ trait :with_avatar do
+ avatar { fixture_file_upload('spec/fixtures/dk.png') }
+ end
end
end
diff --git a/spec/factories/ci/builds.rb b/spec/factories/ci/builds.rb
index 18415a6079f..6662e83b564 100644
--- a/spec/factories/ci/builds.rb
+++ b/spec/factories/ci/builds.rb
@@ -637,11 +637,5 @@ FactoryBot.define do
build.build_runner_session(url: 'https://gitlab.example.com')
end
end
-
- trait :interruptible do
- after(:build) do |build|
- build.metadata.interruptible = true
- end
- end
end
end
diff --git a/spec/factories/ci/catalog/resources.rb b/spec/factories/ci/catalog/resources.rb
index c663164d449..981328dc16e 100644
--- a/spec/factories/ci/catalog/resources.rb
+++ b/spec/factories/ci/catalog/resources.rb
@@ -3,5 +3,9 @@
FactoryBot.define do
factory :ci_catalog_resource, class: 'Ci::Catalog::Resource' do
project factory: :project
+
+ trait :published do
+ state { :published }
+ end
end
end
diff --git a/spec/factories/ci/catalog/resources/sync_events.rb b/spec/factories/ci/catalog/resources/sync_events.rb
new file mode 100644
index 00000000000..0579cec648e
--- /dev/null
+++ b/spec/factories/ci/catalog/resources/sync_events.rb
@@ -0,0 +1,8 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :ci_catalog_resource_sync_event, class: 'Ci::Catalog::Resources::SyncEvent' do
+ catalog_resource factory: :ci_catalog_resource
+ project { catalog_resource.project }
+ end
+end
diff --git a/spec/factories/ci/pipelines.rb b/spec/factories/ci/pipelines.rb
index ef65cb3ec33..df4ffc4f027 100644
--- a/spec/factories/ci/pipelines.rb
+++ b/spec/factories/ci/pipelines.rb
@@ -87,6 +87,10 @@ FactoryBot.define do
status { :running }
end
+ trait :pending do
+ status { :pending }
+ end
+
trait :canceled do
status { :canceled }
end
diff --git a/spec/factories/ci/processable.rb b/spec/factories/ci/processable.rb
index 49756433713..c84a2d5d93d 100644
--- a/spec/factories/ci/processable.rb
+++ b/spec/factories/ci/processable.rb
@@ -11,6 +11,10 @@ FactoryBot.define do
scheduling_type { 'stage' }
partition_id { pipeline.partition_id }
+ options do
+ {}
+ end
+
# This factory was updated to help with the efforts of the removal of `ci_builds.stage`:
# https://gitlab.com/gitlab-org/gitlab/-/issues/364377
# These additions can be removed once the specs that use the stage attribute have been updated
@@ -52,5 +56,11 @@ FactoryBot.define do
processable.resource_group = create(:ci_resource_group, project: processable.project)
end
end
+
+ trait :interruptible do
+ after(:build) do |processable|
+ processable.metadata.interruptible = true
+ end
+ end
end
end
diff --git a/spec/factories/ci/reports/sbom/sources.rb b/spec/factories/ci/reports/sbom/sources.rb
index 688c0250b5f..a82dac1d7e2 100644
--- a/spec/factories/ci/reports/sbom/sources.rb
+++ b/spec/factories/ci/reports/sbom/sources.rb
@@ -2,21 +2,50 @@
FactoryBot.define do
factory :ci_reports_sbom_source, class: '::Gitlab::Ci::Reports::Sbom::Source' do
- type { :dependency_scanning }
+ dependency_scanning
- transient do
- sequence(:input_file_path) { |n| "subproject-#{n}/package-lock.json" }
- sequence(:source_file_path) { |n| "subproject-#{n}/package.json" }
+ trait :dependency_scanning do
+ type { :dependency_scanning }
+
+ transient do
+ sequence(:input_file_path) { |n| "subproject-#{n}/package-lock.json" }
+ sequence(:source_file_path) { |n| "subproject-#{n}/package.json" }
+ end
+
+ data do
+ {
+ 'category' => 'development',
+ 'input_file' => { 'path' => input_file_path },
+ 'source_file' => { 'path' => source_file_path },
+ 'package_manager' => { 'name' => 'npm' },
+ 'language' => { 'name' => 'JavaScript' }
+ }
+ end
end
- data do
- {
- 'category' => 'development',
- 'input_file' => { 'path' => input_file_path },
- 'source_file' => { 'path' => source_file_path },
- 'package_manager' => { 'name' => 'npm' },
- 'language' => { 'name' => 'JavaScript' }
- }
+ trait :container_scanning do
+ type { :container_scanning }
+
+ transient do
+ image_name { 'photon' }
+ sequence(:image_tag) { |n| "5.#{n}-12345678" }
+ operating_system_name { 'Photon OS' }
+ sequence(:operating_system_version) { |n| "5.#{n}" }
+ end
+
+ data do
+ {
+ 'category' => 'development',
+ 'image' => {
+ 'name' => image_name,
+ 'tag' => image_tag
+ },
+ 'operating_system' => {
+ 'name' => operating_system_name,
+ 'version' => operating_system_version
+ }
+ }
+ end
end
skip_create
diff --git a/spec/factories/container_registry/protection/rules.rb b/spec/factories/container_registry/protection/rules.rb
index cbd5c9d8652..4d2fb1411c3 100644
--- a/spec/factories/container_registry/protection/rules.rb
+++ b/spec/factories/container_registry/protection/rules.rb
@@ -3,7 +3,7 @@
FactoryBot.define do
factory :container_registry_protection_rule, class: 'ContainerRegistry::Protection::Rule' do
project
- container_path_pattern { '@my_scope/my_container' }
+ repository_path_pattern { 'my_project/my_container' }
delete_protected_up_to_access_level { :developer }
push_protected_up_to_access_level { :developer }
end
diff --git a/spec/factories/deploy_tokens.rb b/spec/factories/deploy_tokens.rb
index 45e92869e22..379178f1a41 100644
--- a/spec/factories/deploy_tokens.rb
+++ b/spec/factories/deploy_tokens.rb
@@ -22,7 +22,7 @@ FactoryBot.define do
end
trait :expired do
- expires_at { Date.today - 1.month }
+ expires_at { Date.current - 1.month }
end
trait :group do
diff --git a/spec/factories/deployments.rb b/spec/factories/deployments.rb
index 7d80ab7b15d..db56b754623 100644
--- a/spec/factories/deployments.rb
+++ b/spec/factories/deployments.rb
@@ -69,5 +69,9 @@ FactoryBot.define do
deployment.succeed!
end
end
+
+ trait :with_bridge do
+ deployable { association :ci_bridge, environment: environment.name, pipeline: association(:ci_pipeline, project: environment.project) }
+ end
end
end
diff --git a/spec/factories/environments.rb b/spec/factories/environments.rb
index 6f2cd4bf596..c4b318a5aee 100644
--- a/spec/factories/environments.rb
+++ b/spec/factories/environments.rb
@@ -35,6 +35,14 @@ FactoryBot.define do
name { 'development' }
end
+ trait :with_folders do |environment|
+ sequence(:name) { |n| "#{folder}/environment#{n}" }
+
+ transient do
+ folder { 'folder' }
+ end
+ end
+
trait :with_review_app do |environment|
sequence(:name) { |n| "review/#{n}" }
diff --git a/spec/factories/gitaly/commit.rb b/spec/factories/gitaly/commit.rb
index 4e8220e449a..ecf3e4e065e 100644
--- a/spec/factories/gitaly/commit.rb
+++ b/spec/factories/gitaly/commit.rb
@@ -16,5 +16,15 @@ FactoryBot.define do
body { subject + "\nMy body" }
author { association(:gitaly_commit_author) }
committer { association(:gitaly_commit_author) }
+
+ trailers do
+ trailers = body.lines.keep_if { |l| l =~ /.*: / }.map do |l|
+ key, value = *l.split(":").map(&:strip)
+
+ Gitaly::CommitTrailer.new(key: key, value: value)
+ end
+
+ Google::Protobuf::RepeatedField.new(:message, Gitaly::CommitTrailer, trailers)
+ end
end
end
diff --git a/spec/factories/integrations.rb b/spec/factories/integrations.rb
index 68751e68f05..74dfea585f2 100644
--- a/spec/factories/integrations.rb
+++ b/spec/factories/integrations.rb
@@ -168,12 +168,6 @@ FactoryBot.define do
end
end
- factory :shimo_integration, class: 'Integrations::Shimo' do
- project
- active { true }
- external_wiki_url { 'https://shimo.example.com/desktop' }
- end
-
factory :confluence_integration, class: 'Integrations::Confluence' do
project
active { true }
diff --git a/spec/factories/keys.rb b/spec/factories/keys.rb
index 4bd41c1faa1..2cfe1372200 100644
--- a/spec/factories/keys.rb
+++ b/spec/factories/keys.rb
@@ -17,7 +17,7 @@ FactoryBot.define do
trait :expired_today do
to_create { |key| key.save!(validate: false) }
- expires_at { Date.today.beginning_of_day + 3.hours }
+ expires_at { Date.current.beginning_of_day + 3.hours }
end
trait :without_md5_fingerprint do
diff --git a/spec/factories/ml/candidates.rb b/spec/factories/ml/candidates.rb
index 9bfb78066bd..27b31524b1f 100644
--- a/spec/factories/ml/candidates.rb
+++ b/spec/factories/ml/candidates.rb
@@ -1,10 +1,9 @@
# frozen_string_literal: true
FactoryBot.define do
factory :ml_candidates, class: '::Ml::Candidate' do
- association :project, factory: :project
- association :user
-
- experiment { association :ml_experiments, project_id: project.id }
+ project { association :project }
+ user { project.owner }
+ experiment { association :ml_experiments, project_id: project.id, user: project.owner }
trait :with_metrics_and_params do
metrics { Array.new(2) { association(:ml_candidate_metrics, candidate: instance) } }
@@ -16,13 +15,9 @@ FactoryBot.define do
end
trait :with_artifact do
- after(:create) do |candidate|
- candidate.package = FactoryBot.create(
- :generic_package,
- name: candidate.package_name,
- version: candidate.package_version,
- project: candidate.project
- )
+ artifact do
+ association(:generic_package, name: instance.package_name, version: instance.package_version || '1',
+ project: project)
end
end
end
diff --git a/spec/factories/ml/experiments.rb b/spec/factories/ml/experiments.rb
index 0acb4c5c5fc..419a530bdcc 100644
--- a/spec/factories/ml/experiments.rb
+++ b/spec/factories/ml/experiments.rb
@@ -11,5 +11,13 @@ FactoryBot.define do
e.metadata = FactoryBot.create_list(:ml_experiment_metadata, 2, experiment: e) # rubocop:disable StrategyInCallback
end
end
+
+ trait :with_candidates do
+ candidates do
+ Array.new(2) do
+ association(:ml_candidates, project: project)
+ end
+ end
+ end
end
end
diff --git a/spec/factories/ml/model_versions.rb b/spec/factories/ml/model_versions.rb
index 456d1b1e913..fd7ed857ee2 100644
--- a/spec/factories/ml/model_versions.rb
+++ b/spec/factories/ml/model_versions.rb
@@ -6,6 +6,11 @@ FactoryBot.define do
model { association :ml_models }
project { model.project }
+ description { 'Some description' }
+
+ candidate do
+ association :ml_candidates, experiment: model.default_experiment, project: project, model_version: instance
+ end
trait :with_package do
package do
diff --git a/spec/factories/ml/models.rb b/spec/factories/ml/models.rb
index 3377a54f265..ae00ade9054 100644
--- a/spec/factories/ml/models.rb
+++ b/spec/factories/ml/models.rb
@@ -20,9 +20,7 @@ FactoryBot.define do
end
trait :with_metadata do
- after(:create) do |model|
- model.metadata = FactoryBot.create_list(:ml_model_metadata, 2, model: model) # rubocop:disable StrategyInCallback
- end
+ metadata { Array.new(2) { association(:ml_model_metadata, model: instance) } }
end
end
end
diff --git a/spec/factories/namespace_package_settings.rb b/spec/factories/namespace_package_settings.rb
index 9d794e794a4..33f290d0a2d 100644
--- a/spec/factories/namespace_package_settings.rb
+++ b/spec/factories/namespace_package_settings.rb
@@ -13,6 +13,8 @@ FactoryBot.define do
nuget_duplicates_allowed { true }
nuget_duplicate_exception_regex { 'foo' }
+ nuget_symbol_server_enabled { false }
+
trait :group do
namespace { association(:group) }
end
diff --git a/spec/factories/organizations/organization_details.rb b/spec/factories/organizations/organization_details.rb
new file mode 100644
index 00000000000..4e01d2076b3
--- /dev/null
+++ b/spec/factories/organizations/organization_details.rb
@@ -0,0 +1,10 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :organization_detail, class: 'Organizations::OrganizationDetail' do
+ association(:organization)
+
+ description { '_description_' }
+ avatar { fixture_file_upload('spec/fixtures/dk.png') }
+ end
+end
diff --git a/spec/factories/packages/nuget/symbol.rb b/spec/factories/packages/nuget/symbol.rb
index 665535de939..77d53ed1439 100644
--- a/spec/factories/packages/nuget/symbol.rb
+++ b/spec/factories/packages/nuget/symbol.rb
@@ -8,5 +8,11 @@ FactoryBot.define do
size { 100.bytes }
sequence(:signature) { |n| "b91a152048fc4b3883bf3cf73fbc03f#{n}FFFFFFFF" }
file_sha256 { 'dd1aaf26c557685cc37f93f53a2b6befb2c2e679f5ace6ec7a26d12086f358be' }
+
+ trait :stale do
+ after(:create) do |entry|
+ entry.update_attribute(:package_id, nil)
+ end
+ end
end
end
diff --git a/spec/factories/personal_access_tokens.rb b/spec/factories/personal_access_tokens.rb
index c7361b11633..7e6fe96d3ed 100644
--- a/spec/factories/personal_access_tokens.rb
+++ b/spec/factories/personal_access_tokens.rb
@@ -36,5 +36,11 @@ FactoryBot.define do
trait :no_prefix do
after(:build) { |personal_access_token| personal_access_token.set_token(Devise.friendly_token) }
end
+
+ trait :dependency_proxy_scopes do
+ before(:create) do |personal_access_token|
+ personal_access_token.scopes = (personal_access_token.scopes + Gitlab::Auth::REPOSITORY_SCOPES).uniq
+ end
+ end
end
end
diff --git a/spec/factories/product_analytics_event.rb b/spec/factories/product_analytics_event.rb
deleted file mode 100644
index 168b255f6ca..00000000000
--- a/spec/factories/product_analytics_event.rb
+++ /dev/null
@@ -1,24 +0,0 @@
-# frozen_string_literal: true
-
-FactoryBot.define do
- factory :product_analytics_event do
- project
- platform { 'web' }
- collector_tstamp { DateTime.now }
- dvce_created_tstamp { DateTime.now }
- event_id { SecureRandom.uuid }
- name_tracker { 'sp' }
- v_tracker { 'js-2.14.0' }
- v_collector { 'GitLab 13.1.0-pre' }
- v_etl { 'GitLab 13.1.0-pre' }
- domain_userid { SecureRandom.uuid }
- domain_sessionidx { 4 }
- page_url { 'http://localhost:3333/products/123' }
- br_lang { 'en-US' }
- br_cookies { true }
- br_colordepth { '24' }
- os_timezone { 'America/Los_Angeles' }
- doc_charset { 'UTF-8' }
- domain_sessionid { SecureRandom.uuid }
- end
-end
diff --git a/spec/factories/project_feature_usage.rb b/spec/factories/project_feature_usage.rb
index 8265ea04392..eb9605d08d6 100644
--- a/spec/factories/project_feature_usage.rb
+++ b/spec/factories/project_feature_usage.rb
@@ -4,10 +4,6 @@ FactoryBot.define do
factory :project_feature_usage do
project
- trait :dvcs_cloud do
- jira_dvcs_cloud_last_sync_at { Time.current }
- end
-
trait :dvcs_server do
jira_dvcs_server_last_sync_at { Time.current }
end
diff --git a/spec/factories/project_group_links.rb b/spec/factories/project_group_links.rb
index 5edd57d5fe1..89204f81a6b 100644
--- a/spec/factories/project_group_links.rb
+++ b/spec/factories/project_group_links.rb
@@ -11,6 +11,7 @@ FactoryBot.define do
trait(:reporter) { group_access { Gitlab::Access::REPORTER } }
trait(:developer) { group_access { Gitlab::Access::DEVELOPER } }
trait(:maintainer) { group_access { Gitlab::Access::MAINTAINER } }
+ trait(:owner) { group_access { Gitlab::Access::OWNER } }
after(:create) do |project_group_link|
project_group_link.run_after_commit_or_now do
diff --git a/spec/factories/projects.rb b/spec/factories/projects.rb
index 1e3ade779af..a2848bd0256 100644
--- a/spec/factories/projects.rb
+++ b/spec/factories/projects.rb
@@ -43,6 +43,7 @@ FactoryBot.define do
releases_access_level { ProjectFeature::ENABLED }
infrastructure_access_level { ProjectFeature::ENABLED }
model_experiments_access_level { ProjectFeature::ENABLED }
+ model_registry_access_level { ProjectFeature::ENABLED }
# we can't assign the delegated `#ci_cd_settings` attributes directly, as the
# `#ci_cd_settings` relation needs to be created first
@@ -177,12 +178,6 @@ FactoryBot.define do
import_status { :canceled }
end
- trait :jira_dvcs_cloud do
- before(:create) do |project|
- create(:project_feature_usage, :dvcs_cloud, project: project)
- end
- end
-
trait :jira_dvcs_server do
before(:create) do |project|
create(:project_feature_usage, :dvcs_server, project: project)
@@ -241,10 +236,11 @@ FactoryBot.define do
trait :custom_repo do
transient do
files { {} }
+ object_format { Repository::FORMAT_SHA1 }
end
after :create do |project, evaluator|
- raise "Failed to create repository!" unless project.repository.exists? || project.create_repository
+ raise "Failed to create repository!" unless project.repository.exists? || project.create_repository(object_format: evaluator.object_format)
evaluator.files.each do |filename, content|
project.repository.create_file(
@@ -258,6 +254,24 @@ FactoryBot.define do
end
end
+ trait :pipeline_refs do
+ transient do
+ object_format { Repository::FORMAT_SHA1 }
+ pipeline_count { 10 }
+ end
+
+ after :create do |project, evaluator|
+ raise "Failed to create repository!" unless project.repository.exists? || project.create_repository(object_format: evaluator.object_format)
+
+ project.repository.create_file(project.creator, "README.md", "Test", message: "Test file", branch_name: project.default_branch || 'master')
+
+ evaluator.pipeline_count.times do |x|
+ project.repository.create_ref(project.repository.head_commit.id, "refs/pipelines/#{x}")
+ project.repository.create_ref(project.repository.head_commit.id, "refs/head/foo-#{x}")
+ end
+ end
+ end
+
# A catalog resource repository with a file structure set up for ci components.
trait :catalog_resource_with_components do
small_repo
@@ -348,6 +362,12 @@ FactoryBot.define do
branch_name: 'master')
project.repository.create_file(
project.creator,
+ ".gitlab/#{templates_path}/(test).md",
+ 'parentheses',
+ message: 'test 3',
+ branch_name: 'master')
+ project.repository.create_file(
+ project.creator,
".gitlab/#{templates_path}/template_test.md",
'template_test',
message: 'test 1',
@@ -381,8 +401,12 @@ FactoryBot.define do
end
trait :empty_repo do
- after(:create) do |project|
- raise "Failed to create repository!" unless project.create_repository
+ transient do
+ object_format { Repository::FORMAT_SHA1 }
+ end
+
+ after(:create) do |project, evaluator|
+ raise "Failed to create repository!" unless project.create_repository(object_format: evaluator.object_format)
end
end
@@ -584,4 +608,16 @@ FactoryBot.define do
path { 'gitlab-profile' }
files { { 'README.md' => 'Hello World' } }
end
+
+ trait :with_code_suggestions_enabled do
+ after(:create) do |project|
+ project.project_setting.update!(code_suggestions: true)
+ end
+ end
+
+ trait :with_code_suggestions_disabled do
+ after(:create) do |project|
+ project.project_setting.update!(code_suggestions: false)
+ end
+ end
end
diff --git a/spec/factories/releases.rb b/spec/factories/releases.rb
index a07d4ef6c2e..c4da8474cb7 100644
--- a/spec/factories/releases.rb
+++ b/spec/factories/releases.rb
@@ -35,5 +35,15 @@ FactoryBot.define do
create_list(:milestone, evaluator.milestones_count, project: evaluator.project, releases: [release])
end
end
+
+ trait :with_catalog_resource_version do
+ catalog_resource_version do
+ if instance.project&.catalog_resource
+ association :ci_catalog_resource_version,
+ catalog_resource: instance.project&.catalog_resource,
+ release: instance
+ end
+ end
+ end
end
end
diff --git a/spec/factories/topics.rb b/spec/factories/topics.rb
index a6e614e0c66..d5da2dd4938 100644
--- a/spec/factories/topics.rb
+++ b/spec/factories/topics.rb
@@ -4,5 +4,9 @@ FactoryBot.define do
factory :topic, class: 'Projects::Topic' do
name { generate(:name) }
title { generate(:title) }
+
+ trait :with_avatar do
+ avatar { fixture_file_upload('spec/fixtures/dk.png') }
+ end
end
end
diff --git a/spec/factories/uploads.rb b/spec/factories/uploads.rb
index 85237e2d791..31ae7354a43 100644
--- a/spec/factories/uploads.rb
+++ b/spec/factories/uploads.rb
@@ -69,7 +69,7 @@ FactoryBot.define do
trait :design_action_image_v432x230_upload do
mount_point { :image_v432x230 }
model { association(:design_action) }
- uploader { ::DesignManagement::DesignV432x230Uploader.name }
+ uploader { DesignManagement::DesignV432x230Uploader.name }
end
end
end
diff --git a/spec/factories/user_custom_attributes.rb b/spec/factories/user_custom_attributes.rb
index 7bd5c06f4ef..c068da9a39f 100644
--- a/spec/factories/user_custom_attributes.rb
+++ b/spec/factories/user_custom_attributes.rb
@@ -5,5 +5,10 @@ FactoryBot.define do
user
sequence(:key) { |n| "key#{n}" }
sequence(:value) { |n| "value#{n}" }
+
+ trait :assumed_high_risk_reason do
+ key { UserCustomAttribute::ASSUMED_HIGH_RISK_REASON }
+ value { 'reason' }
+ end
end
end
diff --git a/spec/factories/user_preferences.rb b/spec/factories/user_preferences.rb
index 19059a93625..2cfdd12d44b 100644
--- a/spec/factories/user_preferences.rb
+++ b/spec/factories/user_preferences.rb
@@ -3,6 +3,7 @@
FactoryBot.define do
factory :user_preference do
user
+ home_organization { association(:organization, :default) }
trait :only_comments do
issue_notes_filter { UserPreference::NOTES_FILTERS[:only_comments] }
diff --git a/spec/factories/users.rb b/spec/factories/users.rb
index 8b42631040e..15c140954d5 100644
--- a/spec/factories/users.rb
+++ b/spec/factories/users.rb
@@ -12,6 +12,10 @@ FactoryBot.define do
can_create_group { true }
color_scheme_id { 1 }
+ after(:build) do |user, evaluator|
+ user.assign_personal_namespace if Feature.enabled?(:create_personal_ns_outside_model, Feature.current_request)
+ end
+
trait :admin do
admin { true }
end
diff --git a/spec/factories/users/phone_number_validations.rb b/spec/factories/users/phone_number_validations.rb
index b7e6e819127..f4e373dd9bd 100644
--- a/spec/factories/users/phone_number_validations.rb
+++ b/spec/factories/users/phone_number_validations.rb
@@ -7,5 +7,9 @@ FactoryBot.define do
international_dial_code { 1 }
phone_number { '555' }
telesign_reference_xid { FFaker::Guid.guid }
+
+ trait(:validated) do
+ validated_at { Time.zone.now }
+ end
end
end
diff --git a/spec/features/admin/admin_abuse_reports_spec.rb b/spec/features/admin/admin_abuse_reports_spec.rb
index 973988560b3..5e98d2ffcf3 100644
--- a/spec/features/admin/admin_abuse_reports_spec.rb
+++ b/spec/features/admin/admin_abuse_reports_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe "Admin::AbuseReports", :js, feature_category: :insider_threat do
+ include Features::SortingHelpers
+
let_it_be(:user) { create(:user) }
let_it_be(:admin) { create(:admin) }
@@ -79,7 +81,7 @@ RSpec.describe "Admin::AbuseReports", :js, feature_category: :insider_threat do
expect(report_rows[1].text).to include(report_text(open_report2))
# updated_at asc
- sort_by 'Updated date'
+ sort_by 'Updated date', from: 'Created date'
expect(report_rows[0].text).to include(report_text(open_report2))
expect(report_rows[1].text).to include(report_text(open_report))
@@ -120,7 +122,7 @@ RSpec.describe "Admin::AbuseReports", :js, feature_category: :insider_threat do
expect(report_rows[1].text).to include(report_text(open_report2))
# created_at desc
- sort_by 'Created date'
+ sort_by 'Created date', from: 'Number of Reports'
expect(report_rows[0].text).to include(report_text(open_report2))
expect(report_rows[1].text).to include(aggregated_report_text(open_report, 2))
@@ -131,7 +133,7 @@ RSpec.describe "Admin::AbuseReports", :js, feature_category: :insider_threat do
expect(report_rows[0].text).to include(aggregated_report_text(open_report, 2))
expect(report_rows[1].text).to include(report_text(open_report2))
- sort_by 'Updated date'
+ sort_by 'Updated date', from: 'Created date'
# updated_at asc
expect(report_rows[0].text).to include(report_text(open_report2))
@@ -193,14 +195,10 @@ RSpec.describe "Admin::AbuseReports", :js, feature_category: :insider_threat do
select_tokens(*tokens, submit: true, input_text: 'Filter reports')
end
- def sort_by(sort)
+ def sort_by(sort, from: 'Number of Reports')
page.within('.vue-filtered-search-bar-container .sort-dropdown-container') do
- page.find('.gl-dropdown-toggle').click
-
- page.within('.dropdown-menu') do
- click_button sort
- wait_for_requests
- end
+ pajamas_sort_by sort, from: from
+ wait_for_requests
end
end
end
diff --git a/spec/features/admin/admin_mode/login_spec.rb b/spec/features/admin/admin_mode/login_spec.rb
index 72c7083f459..f2262464386 100644
--- a/spec/features/admin/admin_mode/login_spec.rb
+++ b/spec/features/admin/admin_mode/login_spec.rb
@@ -30,7 +30,7 @@ RSpec.describe 'Admin Mode Login', feature_category: :system_access do
repeated_otp = user.current_otp
enter_code(repeated_otp)
- gitlab_enable_admin_mode_sign_in(user)
+ gitlab_enable_admin_mode_sign_in(user, use_mock_admin_mode: false)
expect(page).to have_content(_('Enter verification code'))
@@ -47,7 +47,7 @@ RSpec.describe 'Admin Mode Login', feature_category: :system_access do
expect(page).to have_content('Enter verification code')
enter_code(user.current_otp)
- gitlab_enable_admin_mode_sign_in(user)
+ gitlab_enable_admin_mode_sign_in(user, use_mock_admin_mode: false)
expect(page).to have_content(_('Enter verification code'))
end
diff --git a/spec/features/admin/admin_mode/logout_spec.rb b/spec/features/admin/admin_mode/logout_spec.rb
index 7a33256e7a8..584151726a6 100644
--- a/spec/features/admin/admin_mode/logout_spec.rb
+++ b/spec/features/admin/admin_mode/logout_spec.rb
@@ -12,7 +12,7 @@ RSpec.describe 'Admin Mode Logout', :js, feature_category: :system_access do
# TODO: This used to use gitlab_sign_in, instead of sign_in, but that is buggy. See
# this issue to look into why: https://gitlab.com/gitlab-org/gitlab/-/issues/331851
sign_in(user)
- gitlab_enable_admin_mode_sign_in(user)
+ gitlab_enable_admin_mode_sign_in(user, use_mock_admin_mode: false)
visit admin_root_path
end
diff --git a/spec/features/admin/admin_mode_spec.rb b/spec/features/admin/admin_mode_spec.rb
index b1b44ce143f..2a655cdb1f4 100644
--- a/spec/features/admin/admin_mode_spec.rb
+++ b/spec/features/admin/admin_mode_spec.rb
@@ -69,7 +69,7 @@ RSpec.describe 'Admin mode', :js, feature_category: :shared do
context 'when in admin_mode' do
before do
- gitlab_enable_admin_mode_sign_in(admin)
+ gitlab_enable_admin_mode_sign_in(admin, use_mock_admin_mode: false)
end
it 'contains link to leave admin mode' do
diff --git a/spec/features/admin/admin_runners_spec.rb b/spec/features/admin/admin_runners_spec.rb
index 750f5f8d4b9..653458710e3 100644
--- a/spec/features/admin/admin_runners_spec.rb
+++ b/spec/features/admin/admin_runners_spec.rb
@@ -2,7 +2,8 @@
require 'spec_helper'
-RSpec.describe "Admin Runners", feature_category: :runner_fleet do
+RSpec.describe "Admin Runners", feature_category: :fleet_visibility do
+ include Features::SortingHelpers
include Features::RunnersHelpers
include Spec::Support::Helpers::ModalHelpers
@@ -11,8 +12,6 @@ RSpec.describe "Admin Runners", feature_category: :runner_fleet do
before do
sign_in(admin)
gitlab_enable_admin_mode_sign_in(admin)
-
- wait_for_requests
end
describe "Admin Runners page", :js do
@@ -21,7 +20,7 @@ RSpec.describe "Admin Runners", feature_category: :runner_fleet do
let_it_be(:namespace) { create(:namespace) }
let_it_be(:project) { create(:project, namespace: namespace, creator: user) }
- describe "runners creation" do
+ describe "runners creation and registration" do
before do
visit admin_runners_path
end
@@ -29,12 +28,6 @@ RSpec.describe "Admin Runners", feature_category: :runner_fleet do
it 'shows a create button' do
expect(page).to have_link s_('Runner|New instance runner'), href: new_admin_runner_path
end
- end
-
- describe "runners registration" do
- before do
- visit admin_runners_path
- end
it_behaves_like "shows and resets runner registration token" do
let(:dropdown_text) { s_('Runners|Register an instance runner') }
@@ -50,11 +43,7 @@ RSpec.describe "Admin Runners", feature_category: :runner_fleet do
visit admin_runners_path
end
- it_behaves_like 'shows runner in list' do
- let(:runner) { instance_runner }
- end
-
- it_behaves_like 'shows runner details from list' do
+ it_behaves_like 'shows runner summary and navigates to details' do
let(:runner) { instance_runner }
let(:runner_page_path) { admin_runner_path(instance_runner) }
end
@@ -404,11 +393,8 @@ RSpec.describe "Admin Runners", feature_category: :runner_fleet do
it_behaves_like 'shows no runners found'
- it 'shows active tab with no runner' do
+ it 'shows active tab' do
expect(page).to have_link('Instance', class: 'active')
-
- expect(page).not_to have_content 'runner-project'
- expect(page).not_to have_content 'runner-group'
end
end
end
@@ -443,10 +429,6 @@ RSpec.describe "Admin Runners", feature_category: :runner_fleet do
end
it_behaves_like 'shows no runners found'
-
- it 'shows no runner' do
- expect(page).not_to have_content 'runner-blue'
- end
end
it 'shows correct runner when tag is selected and search term is entered' do
@@ -480,8 +462,7 @@ RSpec.describe "Admin Runners", feature_category: :runner_fleet do
end
end
- click_on 'Created date' # Open "sort by" dropdown
- click_on 'Last contact'
+ pajamas_sort_by 'Last contact', from: 'Created date'
click_on 'Sort direction: Descending'
within_testid('runner-list') do
@@ -602,8 +583,6 @@ RSpec.describe "Admin Runners", feature_category: :runner_fleet do
before do
visit edit_admin_runner_path(project_runner)
-
- wait_for_requests
end
it_behaves_like 'submits edit runner form' do
@@ -633,7 +612,6 @@ RSpec.describe "Admin Runners", feature_category: :runner_fleet do
context 'when a runner is updated', :js do
before do
click_on _('Save changes')
- wait_for_requests
end
it 'show success alert and redirects to runner page' do
diff --git a/spec/features/admin/admin_settings_spec.rb b/spec/features/admin/admin_settings_spec.rb
index 4e0198b1f2b..77707a67d58 100644
--- a/spec/features/admin/admin_settings_spec.rb
+++ b/spec/features/admin/admin_settings_spec.rb
@@ -13,7 +13,7 @@ RSpec.describe 'Admin updates settings', feature_category: :shared do
before do
stub_env('IN_MEMORY_APPLICATION_SETTINGS', 'false')
sign_in(admin)
- gitlab_enable_admin_mode_sign_in(admin)
+ gitlab_enable_admin_mode_sign_in(admin, use_mock_admin_mode: false)
end
context 'General page' do
@@ -261,7 +261,7 @@ RSpec.describe 'Admin updates settings', feature_category: :shared do
expect(page).to have_content "Application settings saved successfully"
end
- it 'terms of Service' do
+ it 'terms of Service', :js do
# Already have the admin accept terms, so they don't need to accept in this spec.
_existing_terms = create(:term)
accept_terms(admin)
@@ -274,7 +274,10 @@ RSpec.describe 'Admin updates settings', feature_category: :shared do
expect(current_settings.enforce_terms).to be(true)
expect(current_settings.terms).to eq 'Be nice!'
- expect(page).to have_content 'Application settings saved successfully'
+
+ click_button 'Accept terms'
+
+ expect(page).to have_current_path(general_admin_application_settings_path, ignore_query: true)
end
it 'modify oauth providers' do
diff --git a/spec/features/admin/users/user_spec.rb b/spec/features/admin/users/user_spec.rb
index b8dc725c17f..d0110b3e013 100644
--- a/spec/features/admin/users/user_spec.rb
+++ b/spec/features/admin/users/user_spec.rb
@@ -11,7 +11,7 @@ RSpec.describe 'Admin::Users::User', feature_category: :user_management do
before do
sign_in(current_user)
- gitlab_enable_admin_mode_sign_in(current_user)
+ gitlab_enable_admin_mode_sign_in(current_user, use_mock_admin_mode: false)
end
describe 'GET /admin/users/:id' do
diff --git a/spec/features/boards/boards_spec.rb b/spec/features/boards/boards_spec.rb
index 48b978f7245..314a3fbd5f2 100644
--- a/spec/features/boards/boards_spec.rb
+++ b/spec/features/boards/boards_spec.rb
@@ -38,8 +38,6 @@ RSpec.describe 'Project issue boards', :js, feature_category: :team_planning do
project.add_maintainer(user2)
sign_in(user)
-
- set_cookie('sidebar_collapsed', 'true')
end
context 'no lists' do
diff --git a/spec/features/boards/user_adds_lists_to_board_spec.rb b/spec/features/boards/user_adds_lists_to_board_spec.rb
index d202c2a1f7d..22ee43895f4 100644
--- a/spec/features/boards/user_adds_lists_to_board_spec.rb
+++ b/spec/features/boards/user_adds_lists_to_board_spec.rb
@@ -31,8 +31,6 @@ RSpec.describe 'User adds lists', :js, feature_category: :team_planning do
before do
sign_in(user)
- set_cookie('sidebar_collapsed', 'true')
-
case board_type
when :project
visit project_board_path(project, project_board)
diff --git a/spec/features/clusters/create_agent_spec.rb b/spec/features/clusters/create_agent_spec.rb
index 79eaecdf582..960d5c78d43 100644
--- a/spec/features/clusters/create_agent_spec.rb
+++ b/spec/features/clusters/create_agent_spec.rb
@@ -17,7 +17,7 @@ RSpec.describe 'Cluster agent registration', :js, feature_category: :deployment_
double(agent_name: 'example-agent-1', path: '.gitlab/agents/example-agent-1/config.yaml'),
double(agent_name: 'example-agent-2', path: '.gitlab/agents/example-agent-2/config.yaml')
])
- allow(client).to receive(:get_connected_agents).and_return([])
+ allow(client).to receive(:get_connected_agents_by_agent_ids).and_return([])
end
allow(Devise).to receive(:friendly_token).and_return(token)
diff --git a/spec/features/dashboard/issues_filter_spec.rb b/spec/features/dashboard/issues_filter_spec.rb
index ab3aa29a3aa..9bc412d6bf4 100644
--- a/spec/features/dashboard/issues_filter_spec.rb
+++ b/spec/features/dashboard/issues_filter_spec.rb
@@ -111,8 +111,7 @@ RSpec.describe 'Dashboard Issues filtering', :js, feature_category: :team_planni
end
it 'keeps sorting issues after visiting Projects Issues page' do
- click_button 'Created date'
- click_button 'Due date'
+ pajamas_sort_by 'Due date', from: 'Created date'
visit project_issues_path(project)
diff --git a/spec/features/dashboard/merge_requests_spec.rb b/spec/features/dashboard/merge_requests_spec.rb
index 8a7652858b8..ba42b3e099b 100644
--- a/spec/features/dashboard/merge_requests_spec.rb
+++ b/spec/features/dashboard/merge_requests_spec.rb
@@ -41,6 +41,18 @@ RSpec.describe 'Dashboard Merge Requests', :js, feature_category: :code_review_w
expect(page).not_to have_selector('#js-dropdown-target-branch', visible: false)
end
+ it 'disables releases filter' do
+ visit merge_requests_dashboard_path
+
+ expect(page).not_to have_selector('#js-dropdown-release', visible: false)
+ end
+
+ it 'disables environments filter' do
+ visit merge_requests_dashboard_path
+
+ expect(page).not_to have_selector('#js-dropdown-environment', visible: false)
+ end
+
context 'new merge request dropdown' do
let(:project_with_disabled_merge_requests) { create(:project, :merge_requests_disabled) }
@@ -199,19 +211,19 @@ RSpec.describe 'Dashboard Merge Requests', :js, feature_category: :code_review_w
end
it 'shows sorted merge requests' do
- pajamas_sort_by(s_('SortOptions|Created date'))
+ pajamas_sort_by(s_('SortOptions|Priority'), from: s_('SortOptions|Created date'))
visit merge_requests_dashboard_path(assignee_username: current_user.username)
- expect(find('.issues-filters')).to have_content('Created date')
+ expect(find('.issues-filters')).to have_content(s_('SortOptions|Priority'))
end
it 'keeps sorting merge requests after visiting Projects MR page' do
- pajamas_sort_by(s_('SortOptions|Created date'))
+ pajamas_sort_by(s_('SortOptions|Priority'), from: s_('SortOptions|Created date'))
visit project_merge_requests_path(project)
- expect(find('.issues-filters')).to have_content('Created date')
+ expect(find('.issues-filters')).to have_content(s_('SortOptions|Priority'))
end
end
diff --git a/spec/features/dashboard/projects_spec.rb b/spec/features/dashboard/projects_spec.rb
index 5379dabc713..e9b55ab2900 100644
--- a/spec/features/dashboard/projects_spec.rb
+++ b/spec/features/dashboard/projects_spec.rb
@@ -151,7 +151,7 @@ RSpec.describe 'Dashboard Projects', :js, feature_category: :groups_and_projects
it 'shows that the last pipeline passed' do
visit dashboard_projects_path
- page.within('[data-testid="project_controls"]') do
+ within_testid('project_controls') do
expect(page).to have_xpath("//a[@href='#{pipelines_project_commit_path(project, project.commit, ref: pipeline.ref)}']")
expect(page).to have_css("[data-testid='ci-icon']")
expect(page).to have_css('[data-testid="status_success_borderless-icon"]')
@@ -163,7 +163,7 @@ RSpec.describe 'Dashboard Projects', :js, feature_category: :groups_and_projects
it 'does not show the pipeline status' do
visit dashboard_projects_path
- page.within('[data-testid="project_controls"]') do
+ within_testid('project_controls') do
expect(page).not_to have_xpath("//a[@href='#{pipelines_project_commit_path(project, project.commit, ref: pipeline.ref)}']")
expect(page).not_to have_css("[data-testid='ci-icon']")
expect(page).not_to have_css('[data-testid="status_success_borderless-icon"]')
diff --git a/spec/features/dashboard/todos/todos_spec.rb b/spec/features/dashboard/todos/todos_spec.rb
index 59ce873905a..21dfa1cbc0b 100644
--- a/spec/features/dashboard/todos/todos_spec.rb
+++ b/spec/features/dashboard/todos/todos_spec.rb
@@ -95,7 +95,7 @@ RSpec.describe 'Dashboard Todos', :js, feature_category: :team_planning do
shared_examples 'deleting the todo' do
before do
within first('.todo') do
- find('[data-testid="check-icon"]').click
+ find_by_testid('check-icon').click
end
end
@@ -121,9 +121,9 @@ RSpec.describe 'Dashboard Todos', :js, feature_category: :team_planning do
shared_examples 'deleting and restoring the todo' do
before do
within first('.todo') do
- find('[data-testid="check-icon"]').click
+ find_by_testid('check-icon').click
wait_for_requests
- find('[data-testid="redo-icon"]').click
+ find_by_testid('redo-icon').click
end
end
@@ -301,7 +301,7 @@ RSpec.describe 'Dashboard Todos', :js, feature_category: :team_planning do
describe 'restoring the todo' do
before do
within first('.todo') do
- find('[data-testid="todo-add-icon"]').click
+ find_by_testid('todo-add-icon').click
end
end
@@ -407,7 +407,7 @@ RSpec.describe 'Dashboard Todos', :js, feature_category: :team_planning do
context 'User has deleted a todo' do
before do
within first('.todo') do
- find('[data-testid="check-icon"]').click
+ find_by_testid('check-icon').click
end
end
diff --git a/spec/features/environments/environments_folder_spec.rb b/spec/features/environments/environments_folder_spec.rb
new file mode 100644
index 00000000000..da9de6fba0c
--- /dev/null
+++ b/spec/features/environments/environments_folder_spec.rb
@@ -0,0 +1,142 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Environments Folder page', :js, feature_category: :environment_management do
+ let(:folder_name) { 'folder' }
+
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
+ let!(:envs) { create_list(:environment, 4, :with_folders, project: project, folder: folder_name) }
+ let!(:stopped_env) { create(:environment, :stopped, :with_folders, project: project, folder: folder_name) }
+
+ def get_env_name(environment)
+ environment.name.split('/').last
+ end
+
+ def find_env_element(environment)
+ find_by_id(environment.name)
+ end
+
+ def stop_environment(environment)
+ environment_item = find_env_element(environment)
+ within(environment_item) do
+ click_button 'Stop'
+ end
+
+ within('.modal') do
+ click_button 'Stop environment'
+ end
+
+ wait_for_requests
+ end
+
+ def redeploy_environment(environment)
+ environment_item = find_env_element(environment)
+ within(environment_item) do
+ click_button 'More actions'
+ click_button 'Delete environment'
+ end
+
+ within('.modal') do
+ click_button 'Delete environment'
+ end
+
+ wait_for_requests
+ end
+
+ before_all do
+ project.add_role(user, :developer)
+ end
+
+ before do
+ create(:environment, :production, project: project)
+ end
+
+ describe 'new folders page' do
+ before do
+ sign_in(user)
+ visit folder_project_environments_path(project, folder_name)
+ wait_for_requests
+ end
+
+ it 'renders the header with a folder name' do
+ expect(page).to have_content("Environments / #{folder_name}")
+ end
+
+ it 'renders the environments' do
+ expect(page).not_to have_content('production')
+ envs.each { |env| expect(page).to have_content(get_env_name(env)) }
+ end
+
+ it 'shows scope tabs' do
+ expect(page).to have_content("Active")
+ expect(page).to have_content("Stopped")
+ end
+
+ it 'can stop the environment' do
+ environment_to_stop = envs.first
+
+ stop_environment(environment_to_stop)
+
+ expect(page).not_to have_content(get_env_name(environment_to_stop))
+ end
+
+ describe 'stopped environments tab' do
+ before do
+ element = find('a', text: 'Stopped')
+ element.click
+ wait_for_requests
+ end
+
+ it 'shows stopped environments on stopped tab' do
+ expect(page).to have_content(get_env_name(stopped_env))
+ end
+
+ it 'can re-start the environment' do
+ redeploy_environment(stopped_env)
+
+ expect(page).not_to have_content(get_env_name(stopped_env))
+ end
+ end
+
+ describe 'pagination' do
+ # rubocop:disable RSpec/FactoryBot/ExcessiveCreateList -- need >20 items to test pagination
+ let!(:envs) { create_list(:environment, 25, :with_folders, project: project, folder: folder_name) }
+
+ # rubocop:enable RSpec/FactoryBot/ExcessiveCreateList
+ it 'shows pagination' do
+ pagination = find('.pagination')
+
+ expect(pagination).to have_content('2')
+ end
+
+ it 'can navigate to the next page and updates the url' do
+ pagination = find('.pagination')
+ pagination.scroll_to(:bottom)
+ within(pagination) do
+ click_link 'Next'
+ end
+
+ wait_for_requests
+
+ expect(current_url).to include('page=2')
+ end
+ end
+ end
+
+ describe 'legacy folders page' do
+ before do
+ stub_feature_flags(environments_folder_new_look: false)
+ sign_in(user)
+ visit folder_project_environments_path(project, folder_name)
+ wait_for_requests
+ end
+
+ it 'user opens folder view' do
+ expect(page).to have_content("Environments / #{folder_name}")
+ expect(page).not_to have_content('production')
+ envs.each { |env| expect(page).to have_content(get_env_name(env)) }
+ end
+ end
+end
diff --git a/spec/features/explore/catalog/catalog_settings_spec.rb b/spec/features/explore/catalog/catalog_settings_spec.rb
new file mode 100644
index 00000000000..bf324eafd7f
--- /dev/null
+++ b/spec/features/explore/catalog/catalog_settings_spec.rb
@@ -0,0 +1,78 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'CI/CD Catalog settings', :js, feature_category: :pipeline_composition do
+ let_it_be(:user) { create(:user) }
+ let_it_be_with_reload(:namespace) { create(:group) }
+ let_it_be_with_reload(:new_project) { create(:project, :repository, namespace: namespace) }
+
+ context 'when user is not the owner' do
+ before do
+ sign_in(user)
+ visit edit_project_path(new_project)
+ wait_for_requests
+ end
+
+ it 'does not show the CI/CD toggle settings' do
+ expect(page).not_to have_content('CI/CD Catalog resource')
+ end
+ end
+
+ context 'when user is the owner' do
+ before_all do
+ namespace.add_owner(user)
+ end
+
+ before do
+ sign_in(user)
+ end
+
+ it 'shows the CI/CD toggle settings' do
+ visit edit_project_path(new_project)
+ wait_for_requests
+
+ expect(page).to have_content('CI/CD Catalog resource')
+ end
+
+ describe 'when setting a project as a Catalog resource' do
+ before do
+ visit project_path(new_project)
+ wait_for_requests
+ end
+
+ it 'adds the project to the CI/CD Catalog' do
+ expect(page).not_to have_content('CI/CD catalog resource')
+
+ visit edit_project_path(new_project)
+
+ find('[data-testid="catalog-resource-toggle"] button').click
+
+ visit project_path(new_project)
+
+ expect(page).to have_content('CI/CD catalog resource')
+ end
+ end
+
+ describe 'when unlisting a project from the CI/CD Catalog' do
+ before do
+ create(:ci_catalog_resource, project: new_project, state: :published)
+ visit project_path(new_project)
+ wait_for_requests
+ end
+
+ it 'removes the project to the CI/CD Catalog' do
+ expect(page).to have_content('CI/CD catalog resource')
+
+ visit edit_project_path(new_project)
+
+ find('[data-testid="catalog-resource-toggle"] button').click
+ click_button 'Remove from the CI/CD catalog'
+
+ visit project_path(new_project)
+
+ expect(page).not_to have_content('CI/CD catalog resource')
+ end
+ end
+ end
+end
diff --git a/spec/features/explore/catalog/catalog_spec.rb b/spec/features/explore/catalog/catalog_spec.rb
new file mode 100644
index 00000000000..00bbb02ebbf
--- /dev/null
+++ b/spec/features/explore/catalog/catalog_spec.rb
@@ -0,0 +1,137 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'CI/CD Catalog', :js, feature_category: :pipeline_composition do
+ let_it_be(:namespace) { create(:group) }
+ let_it_be(:user) { create(:user) }
+
+ before_all do
+ namespace.add_developer(user)
+ end
+
+ before do
+ sign_in(user)
+ end
+
+ describe 'GET explore/catalog' do
+ let_it_be(:project) { create(:project, :repository, namespace: namespace) }
+
+ let_it_be(:ci_resource_projects) do
+ create_list(
+ :project,
+ 3,
+ :repository,
+ description: 'A simple component',
+ namespace: namespace
+ )
+ end
+
+ let_it_be(:ci_catalog_resources) do
+ ci_resource_projects.map do |current_project|
+ create(:ci_catalog_resource, :published, project: current_project)
+ end
+ end
+
+ before do
+ visit explore_catalog_index_path
+ wait_for_requests
+ end
+
+ it 'shows CI Catalog title and description', :aggregate_failures do
+ expect(page).to have_content('CI/CD Catalog')
+ expect(page).to have_content(
+ 'Discover CI/CD components that can improve your pipeline with additional functionality'
+ )
+ end
+
+ it 'renders CI Catalog resources list' do
+ expect(find_all('[data-testid="catalog-resource-item"]').length).to be(3)
+ end
+
+ context 'when searching for a resource' do
+ let(:project_name) { ci_resource_projects[0].name }
+
+ before do
+ find('input[data-testid="catalog-search-bar"]').send_keys project_name
+ find('input[data-testid="catalog-search-bar"]').send_keys :enter
+ wait_for_requests
+ end
+
+ it 'renders only a subset of items' do
+ expect(find_all('[data-testid="catalog-resource-item"]').length).to be(1)
+ within_testid('catalog-resource-item', match: :first) do
+ expect(page).to have_content(project_name)
+ end
+ end
+ end
+
+ context 'when sorting' do
+ context 'with the creation date option' do
+ it 'sorts resources from last to first by default' do
+ expect(find_all('[data-testid="catalog-resource-item"]').length).to be(3)
+ expect(find_all('[data-testid="catalog-resource-item"]')[0]).to have_content(ci_resource_projects[2].name)
+ expect(find_all('[data-testid="catalog-resource-item"]')[2]).to have_content(ci_resource_projects[0].name)
+ end
+
+ context 'when changing the sort direction' do
+ before do
+ find('.sorting-direction-button').click
+ wait_for_requests
+ end
+
+ it 'sorts resources from first to last' do
+ expect(find_all('[data-testid="catalog-resource-item"]').length).to be(3)
+ expect(find_all('[data-testid="catalog-resource-item"]')[0]).to have_content(ci_resource_projects[0].name)
+ expect(find_all('[data-testid="catalog-resource-item"]')[2]).to have_content(ci_resource_projects[2].name)
+ end
+ end
+ end
+ end
+
+ context 'for a single CI/CD catalog resource' do
+ it 'renders resource details', :aggregate_failures do
+ within_testid('catalog-resource-item', match: :first) do
+ expect(page).to have_content(ci_resource_projects[2].name)
+ expect(page).to have_content(ci_resource_projects[2].description)
+ expect(page).to have_content(namespace.name)
+ end
+ end
+
+ context 'when clicked' do
+ before do
+ find_by_testid('ci-resource-link', match: :first).click
+ end
+
+ it 'navigates to the details page' do
+ expect(page).to have_content('Go to the project')
+ end
+ end
+ end
+ end
+
+ describe 'GET explore/catalog/:id' do
+ let_it_be(:project) { create(:project, :repository, namespace: namespace) }
+
+ before do
+ visit explore_catalog_path(new_ci_resource)
+ end
+
+ context 'when the resource is published' do
+ let(:new_ci_resource) { create(:ci_catalog_resource, :published, project: project) }
+
+ it 'navigates to the details page' do
+ expect(page).to have_content('Go to the project')
+ end
+ end
+
+ context 'when the resource is not published' do
+ let(:new_ci_resource) { create(:ci_catalog_resource, project: project, state: :draft) }
+
+ it 'returns a 404' do
+ expect(page).to have_title('Not Found')
+ expect(page).to have_content('Page Not Found')
+ end
+ end
+ end
+end
diff --git a/spec/features/explore/catalog_spec.rb b/spec/features/explore/catalog_spec.rb
deleted file mode 100644
index 52ce52e43fe..00000000000
--- a/spec/features/explore/catalog_spec.rb
+++ /dev/null
@@ -1,80 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'Global Catalog', :js, feature_category: :pipeline_composition do
- let_it_be(:namespace) { create(:group) }
- let_it_be(:user) { create(:user) }
-
- before_all do
- namespace.add_developer(user)
- end
-
- before do
- sign_in(user)
- end
-
- describe 'GET explore/catalog' do
- let_it_be(:project) { create(:project, :repository, namespace: namespace) }
- let_it_be(:ci_resource_projects) do
- create_list(
- :project,
- 3,
- :repository,
- description: 'A simple component',
- namespace: namespace
- )
- end
-
- before do
- ci_resource_projects.each do |current_project|
- create(:ci_catalog_resource, project: current_project)
- end
-
- visit explore_catalog_index_path
- wait_for_requests
- end
-
- it 'shows CI Catalog title and description', :aggregate_failures do
- expect(page).to have_content('CI/CD Catalog')
- expect(page).to have_content('Discover CI configuration resources for a seamless CI/CD experience.')
- end
-
- it 'renders CI Catalog resources list' do
- expect(find_all('[data-testid="catalog-resource-item"]').length).to be(3)
- end
-
- context 'for a single CI/CD catalog resource' do
- it 'renders resource details', :aggregate_failures do
- within_testid('catalog-resource-item', match: :first) do
- expect(page).to have_content(ci_resource_projects[2].name)
- expect(page).to have_content(ci_resource_projects[2].description)
- expect(page).to have_content(namespace.name)
- end
- end
-
- context 'when clicked' do
- before do
- find_by_testid('ci-resource-link', match: :first).click
- end
-
- it 'navigate to the details page' do
- expect(page).to have_content('Go to the project')
- end
- end
- end
- end
-
- describe 'GET explore/catalog/:id' do
- let_it_be(:project) { create(:project, :repository, namespace: namespace) }
- let_it_be(:new_ci_resource) { create(:ci_catalog_resource, project: project) }
-
- before do
- visit explore_catalog_path(id: new_ci_resource["id"])
- end
-
- it 'navigates to the details page' do
- expect(page).to have_content('Go to the project')
- end
- end
-end
diff --git a/spec/features/explore/navbar_spec.rb b/spec/features/explore/navbar_spec.rb
index c172760eb2c..f8fe9bc9af3 100644
--- a/spec/features/explore/navbar_spec.rb
+++ b/spec/features/explore/navbar_spec.rb
@@ -7,19 +7,7 @@ RSpec.describe '"Explore" navbar', :js, feature_category: :navigation do
it_behaves_like 'verified navigation bar' do
before do
- stub_feature_flags(global_ci_catalog: false)
visit explore_projects_path
end
end
-
- context "with 'global_ci_catalog' enabled" do
- include_context '"Explore" navbar structure with global_ci_catalog FF'
-
- it_behaves_like 'verified navigation bar', global_ci_catalog: true do
- before do
- stub_feature_flags(global_ci_catalog: true)
- visit explore_projects_path
- end
- end
- end
end
diff --git a/spec/features/explore/user_explores_projects_spec.rb b/spec/features/explore/user_explores_projects_spec.rb
index e1341824bfd..369297ff04a 100644
--- a/spec/features/explore/user_explores_projects_spec.rb
+++ b/spec/features/explore/user_explores_projects_spec.rb
@@ -25,7 +25,7 @@ RSpec.describe 'User explores projects', feature_category: :user_profile do
describe 'breadcrumbs' do
it 'has "Explore" as its root breadcrumb' do
- within '.breadcrumbs-list li:first' do
+ within '.gl-breadcrumb-list li:first' do
expect(page).to have_link('Explore', href: explore_root_path)
end
end
diff --git a/spec/features/file_uploads/graphql_add_design_spec.rb b/spec/features/file_uploads/graphql_add_design_spec.rb
index 0b61c952b55..714a2a29ee2 100644
--- a/spec/features/file_uploads/graphql_add_design_spec.rb
+++ b/spec/features/file_uploads/graphql_add_design_spec.rb
@@ -53,7 +53,7 @@ RSpec.describe 'Upload a design through graphQL', :js, feature_category: :design
RSpec.shared_examples 'for a design upload through graphQL' do
it 'creates proper objects' do
expect { subject }
- .to change { ::DesignManagement::Design.count }.by(1)
+ .to change { DesignManagement::Design.count }.by(1)
.and change { ::LfsObject.count }.by(1)
end
diff --git a/spec/features/frequently_visited_projects_and_groups_spec.rb b/spec/features/frequently_visited_projects_and_groups_spec.rb
index 514b642a2d4..764e88882a8 100644
--- a/spec/features/frequently_visited_projects_and_groups_spec.rb
+++ b/spec/features/frequently_visited_projects_and_groups_spec.rb
@@ -3,8 +3,6 @@
require 'spec_helper'
RSpec.describe 'Frequently visited items', :js, feature_category: :shared do
- include Features::TopNavSpecHelpers
-
let_it_be(:user) { create(:user) }
before do
diff --git a/spec/features/groups/board_sidebar_spec.rb b/spec/features/groups/board_sidebar_spec.rb
index 3fe520ea2ea..51d2e85f779 100644
--- a/spec/features/groups/board_sidebar_spec.rb
+++ b/spec/features/groups/board_sidebar_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Group Issue Boards', :js, feature_category: :groups_and_projects do
+RSpec.describe 'Group Issue Boards', :js, feature_category: :team_planning do
include BoardHelpers
let(:group) { create(:group) }
@@ -34,7 +34,7 @@ RSpec.describe 'Group Issue Boards', :js, feature_category: :groups_and_projects
wait_for_requests
- page.within('[data-testid="dropdown-content"]') do
+ within_testid('dropdown-content') do
expect(page).to have_content(project_1_label.title)
expect(page).to have_content(group_label.title)
expect(page).not_to have_content(project_2_label.title)
diff --git a/spec/features/groups/board_spec.rb b/spec/features/groups/board_spec.rb
index e6dc6055e27..1a9a53a7421 100644
--- a/spec/features/groups/board_spec.rb
+++ b/spec/features/groups/board_spec.rb
@@ -32,7 +32,7 @@ RSpec.describe 'Group Boards', feature_category: :team_planning do
fill_in 'issue_title', with: issue_title
- page.within("[data-testid='project-select-dropdown']") do
+ within_testid('project-select-dropdown') do
find('button.gl-new-dropdown-toggle').click
find('.gl-new-dropdown-item').click
diff --git a/spec/features/groups/clusters/user_spec.rb b/spec/features/groups/clusters/user_spec.rb
index d876a5804bd..9089fba1886 100644
--- a/spec/features/groups/clusters/user_spec.rb
+++ b/spec/features/groups/clusters/user_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'User Cluster', :js, feature_category: :user_profile do
+RSpec.describe 'User Cluster', :js, feature_category: :environment_management do
include GoogleApi::CloudPlatformHelpers
let(:group) { create(:group) }
@@ -112,14 +112,33 @@ RSpec.describe 'User Cluster', :js, feature_category: :user_profile do
context 'when user destroys the cluster' do
before do
click_link 'Advanced Settings'
- find('[data-testid="remove-integration-button"]').click
+ find_by_testid('remove-integration-button').click
fill_in 'confirm_cluster_name_input', with: cluster.name
- find('[data-testid="remove-integration-modal-button"]').click
+ find_by_testid('remove-integration-modal-button').click
end
it 'user sees creation form with the successful message' do
expect(page).to have_content('Kubernetes cluster integration was successfully removed.')
end
end
+
+ context 'when signed in user is an admin in admin_mode' do
+ let(:admin) { create(:admin) }
+
+ before do
+ # signs out the user with `maintainer` role in the project
+ gitlab_sign_out
+
+ gitlab_sign_in(admin)
+ gitlab_enable_admin_mode_sign_in(admin)
+
+ visit group_clusters_path(group)
+ end
+
+ it 'can visit the clusters index page', :aggregate_failures do
+ expect(page).to have_title("Kubernetes Clusters · #{group.name} · #{_('GitLab')}")
+ expect(page).to have_content('Connect a cluster')
+ end
+ end
end
end
diff --git a/spec/features/groups/dependency_proxy_spec.rb b/spec/features/groups/dependency_proxy_spec.rb
index 12c480a46b0..136c1ff0335 100644
--- a/spec/features/groups/dependency_proxy_spec.rb
+++ b/spec/features/groups/dependency_proxy_spec.rb
@@ -62,7 +62,7 @@ RSpec.describe 'Group Dependency Proxy', feature_category: :dependency_proxy do
visit settings_path
wait_for_requests
- proxy_toggle = find('[data-testid="dependency-proxy-setting-toggle"]')
+ proxy_toggle = find_by_testid('dependency-proxy-setting-toggle')
proxy_toggle_button = proxy_toggle.find('button')
expect(proxy_toggle).to have_css("button.is-checked")
diff --git a/spec/features/groups/group_runners_spec.rb b/spec/features/groups/group_runners_spec.rb
index 4e5d7c6f8e8..b9a68b75962 100644
--- a/spec/features/groups/group_runners_spec.rb
+++ b/spec/features/groups/group_runners_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe "Group Runners", feature_category: :runner_fleet do
+RSpec.describe "Group Runners", feature_category: :fleet_visibility do
include Features::RunnersHelpers
include Spec::Support::Helpers::ModalHelpers
@@ -11,7 +11,7 @@ RSpec.describe "Group Runners", feature_category: :runner_fleet do
let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project, group: group) }
- before do
+ before_all do
group.add_owner(group_owner)
group.add_maintainer(group_maintainer)
end
@@ -45,11 +45,7 @@ RSpec.describe "Group Runners", feature_category: :runner_fleet do
visit group_runners_path(group)
end
- it_behaves_like 'shows runner in list' do
- let(:runner) { group_runner }
- end
-
- it_behaves_like 'shows runner details from list' do
+ it_behaves_like 'shows runner summary and navigates to details' do
let(:runner) { group_runner }
let(:runner_page_path) { group_runner_path(group, group_runner) }
end
@@ -66,10 +62,6 @@ RSpec.describe "Group Runners", feature_category: :runner_fleet do
end
it_behaves_like 'shows no runners found'
-
- it 'shows no runner' do
- expect(page).not_to have_content 'runner-foo'
- end
end
end
@@ -82,11 +74,7 @@ RSpec.describe "Group Runners", feature_category: :runner_fleet do
visit group_runners_path(group)
end
- it_behaves_like 'shows runner in list' do
- let(:runner) { project_runner }
- end
-
- it_behaves_like 'shows runner details from list' do
+ it_behaves_like 'shows runner summary and navigates to details' do
let(:runner) { project_runner }
let(:runner_page_path) { group_runner_path(group, project_runner) }
end
@@ -110,15 +98,9 @@ RSpec.describe "Group Runners", feature_category: :runner_fleet do
context "when selecting 'Show only inherited'" do
before do
find("[data-testid='runner-membership-toggle'] button").click
-
- wait_for_requests
- end
-
- it_behaves_like 'shows runner in list' do
- let(:runner) { instance_runner }
end
- it_behaves_like 'shows runner details from list' do
+ it_behaves_like 'shows runner summary and navigates to details' do
let(:runner) { instance_runner }
let(:runner_page_path) { group_runner_path(group, instance_runner) }
end
@@ -287,7 +269,6 @@ RSpec.describe "Group Runners", feature_category: :runner_fleet do
before do
visit edit_group_runner_path(group, group_runner)
- wait_for_requests
end
it_behaves_like 'submits edit runner form' do
@@ -301,7 +282,6 @@ RSpec.describe "Group Runners", feature_category: :runner_fleet do
before do
visit edit_group_runner_path(group, project_runner)
- wait_for_requests
end
it_behaves_like 'submits edit runner form' do
diff --git a/spec/features/groups/group_settings_spec.rb b/spec/features/groups/group_settings_spec.rb
index 0437e5df6e9..a7710ea04ab 100644
--- a/spec/features/groups/group_settings_spec.rb
+++ b/spec/features/groups/group_settings_spec.rb
@@ -141,8 +141,8 @@ RSpec.describe 'Edit group settings', feature_category: :groups_and_projects do
end
describe 'transfer group', :js do
- let(:namespace_select) { page.find('[data-testid="transfer-group-namespace-select"]') }
- let(:confirm_modal) { page.find('[data-testid="confirm-danger-modal"]') }
+ let(:namespace_select) { find_by_testid('transfer-group-namespace-select') }
+ let(:confirm_modal) { find_by_testid('confirm-danger-modal') }
shared_examples 'can transfer the group' do
before do
@@ -154,7 +154,7 @@ RSpec.describe 'Edit group settings', feature_category: :groups_and_projects do
visit edit_group_path(selected_group)
- page.within('[data-testid="transfer-locations-dropdown"]') do
+ within_testid('transfer-locations-dropdown') do
click_button _('Select parent group')
fill_in _('Search'), with: target_group&.name || ''
wait_for_requests
@@ -170,7 +170,7 @@ RSpec.describe 'Edit group settings', feature_category: :groups_and_projects do
click_button 'Confirm'
end
- within('[data-testid="breadcrumb-links"]') do
+ within_testid('breadcrumb-links') do
expect(page).to have_content(target_group.name) if target_group
expect(page).to have_content(selected_group.name)
end
diff --git a/spec/features/groups/import_export/connect_instance_spec.rb b/spec/features/groups/import_export/connect_instance_spec.rb
index f6548c035f0..f3c89cce633 100644
--- a/spec/features/groups/import_export/connect_instance_spec.rb
+++ b/spec/features/groups/import_export/connect_instance_spec.rb
@@ -89,7 +89,7 @@ RSpec.describe 'Import/Export - Connect to another instance', :js, feature_categ
end
it 'renders fields and button disabled' do
- expect(page).to have_field('GitLab source instance URL', disabled: true)
+ expect(page).to have_field('GitLab source instance base URL', disabled: true)
expect(page).to have_field('Personal access token', disabled: true)
expect(page).to have_button('Connect instance', disabled: true)
end
diff --git a/spec/features/groups/import_export/migration_history_spec.rb b/spec/features/groups/import_export/migration_history_spec.rb
index 9fc9c7898d1..87b38221ad6 100644
--- a/spec/features/groups/import_export/migration_history_spec.rb
+++ b/spec/features/groups/import_export/migration_history_spec.rb
@@ -26,7 +26,7 @@ RSpec.describe 'Import/Export - GitLab migration history', :js, feature_category
wait_for_requests
- expect(page).to have_content 'GitLab Migration history'
+ expect(page).to have_content 'Direct transfer history'
expect(page.find('tbody')).to have_css('tr', count: 2)
end
end
diff --git a/spec/features/groups/issues_spec.rb b/spec/features/groups/issues_spec.rb
index 0d7e9df031c..e4f9fe34823 100644
--- a/spec/features/groups/issues_spec.rb
+++ b/spec/features/groups/issues_spec.rb
@@ -3,6 +3,7 @@
require 'spec_helper'
RSpec.describe 'Group issues page', feature_category: :groups_and_projects do
+ include Features::SortingHelpers
include FilteredSearchHelpers
include DragTo
@@ -180,8 +181,7 @@ RSpec.describe 'Group issues page', feature_category: :groups_and_projects do
end
def select_manual_sort
- click_button 'Created date'
- click_button 'Manual'
+ pajamas_sort_by 'Manual', from: 'Created date'
wait_for_requests
end
diff --git a/spec/features/groups/members/leave_group_spec.rb b/spec/features/groups/members/leave_group_spec.rb
index d864852e0d4..632155cd7e4 100644
--- a/spec/features/groups/members/leave_group_spec.rb
+++ b/spec/features/groups/members/leave_group_spec.rb
@@ -9,17 +9,22 @@ RSpec.describe 'Groups > Members > Leave group', feature_category: :groups_and_p
let(:user) { create(:user) }
let(:other_user) { create(:user) }
let(:group) { create(:group) }
+ let(:more_actions_dropdown) do
+ find('[data-testid="groups-projects-more-actions-dropdown"] .gl-new-dropdown-custom-toggle')
+ end
before do
sign_in(user)
end
- it 'guest leaves the group' do
+ it 'guest leaves the group', :js do
group.add_guest(user)
group.add_owner(other_user)
visit group_path(group)
+ more_actions_dropdown.click
click_link 'Leave group'
+ accept_gl_confirm(button_text: 'Leave group')
expect(page).to have_current_path(dashboard_groups_path, ignore_query: true)
expect(page).to have_content left_group_message(group)
@@ -31,31 +36,33 @@ RSpec.describe 'Groups > Members > Leave group', feature_category: :groups_and_p
group.add_owner(other_user)
visit group_path(group, leave: 1)
-
accept_gl_confirm(button_text: 'Leave group')
- wait_for_all_requests
expect(page).to have_current_path(dashboard_groups_path, ignore_query: true)
expect(group.users).not_to include(user)
end
- it 'guest leaves the group as last member' do
+ it 'guest leaves the group as last member', :js do
group.add_guest(user)
visit group_path(group)
+ more_actions_dropdown.click
click_link 'Leave group'
+ accept_gl_confirm(button_text: 'Leave group')
expect(page).to have_current_path(dashboard_groups_path, ignore_query: true)
expect(page).to have_content left_group_message(group)
expect(group.users).not_to include(user)
end
- it 'owner leaves the group if they are not the last owner' do
+ it 'owner leaves the group if they are not the last owner', :js do
group.add_owner(user)
group.add_owner(other_user)
visit group_path(group)
+ more_actions_dropdown.click
click_link 'Leave group'
+ accept_gl_confirm(button_text: 'Leave group')
expect(page).to have_current_path(dashboard_groups_path, ignore_query: true)
expect(page).to have_content left_group_message(group)
@@ -66,6 +73,7 @@ RSpec.describe 'Groups > Members > Leave group', feature_category: :groups_and_p
group.add_owner(user)
visit group_path(group)
+ more_actions_dropdown.click
expect(page).not_to have_content 'Leave group'
@@ -79,7 +87,7 @@ RSpec.describe 'Groups > Members > Leave group', feature_category: :groups_and_p
visit group_path(group, leave: 1)
- expect(find('[data-testid="alert-danger"]')).to have_content 'You do not have permission to leave this group'
+ expect(find_by_testid('alert-danger')).to have_content 'You do not have permission to leave this group'
end
def left_group_message(group)
diff --git a/spec/features/groups/members/manage_groups_spec.rb b/spec/features/groups/members/manage_groups_spec.rb
index 87de0e2e46b..9531ebd3c35 100644
--- a/spec/features/groups/members/manage_groups_spec.rb
+++ b/spec/features/groups/members/manage_groups_spec.rb
@@ -106,7 +106,7 @@ RSpec.describe 'Groups > Members > Manage groups', :js, feature_category: :group
page.within first_row do
expect(page).to have_field('Expiration date', with: expiration_date)
- find('[data-testid="clear-button"]').click
+ find_by_testid('clear-button').click
wait_for_requests
diff --git a/spec/features/groups/members/master_adds_member_with_expiration_date_spec.rb b/spec/features/groups/members/master_adds_member_with_expiration_date_spec.rb
index c2eedfb4063..38eb226690c 100644
--- a/spec/features/groups/members/master_adds_member_with_expiration_date_spec.rb
+++ b/spec/features/groups/members/master_adds_member_with_expiration_date_spec.rb
@@ -48,7 +48,7 @@ RSpec.describe 'Groups > Members > Owner adds member with expiration date', :js,
page.within second_row do
expect(page).to have_field('Expiration date', with: expiration_date)
- find('[data-testid="clear-button"]').click
+ find_by_testid('clear-button').click
wait_for_requests
diff --git a/spec/features/groups/members/request_access_spec.rb b/spec/features/groups/members/request_access_spec.rb
index 2d0b2e483c5..e6da2ce7cec 100644
--- a/spec/features/groups/members/request_access_spec.rb
+++ b/spec/features/groups/members/request_access_spec.rb
@@ -3,10 +3,15 @@
require 'spec_helper'
RSpec.describe 'Groups > Members > Request access', feature_category: :groups_and_projects do
+ include Spec::Support::Helpers::ModalHelpers
+
let(:user) { create(:user) }
let(:owner) { create(:user) }
let(:group) { create(:group, :public) }
let!(:project) { create(:project, :private, namespace: group) }
+ let(:more_actions_dropdown) do
+ find('[data-testid="groups-projects-more-actions-dropdown"] .gl-new-dropdown-custom-toggle')
+ end
before do
group.add_owner(owner)
@@ -14,15 +19,19 @@ RSpec.describe 'Groups > Members > Request access', feature_category: :groups_an
visit group_path(group)
end
- it 'request access feature is disabled' do
+ it 'request access feature is disabled', :js do
group.update!(request_access_enabled: false)
visit group_path(group)
+ more_actions_dropdown.click
expect(page).not_to have_content 'Request Access'
end
- it 'user can request access to a group' do
- perform_enqueued_jobs { click_link 'Request Access' }
+ it 'user can request access to a group', :js do
+ perform_enqueued_jobs do
+ more_actions_dropdown.click
+ click_link 'Request Access'
+ end
expect(ActionMailer::Base.deliveries.last.to).to eq [owner.notification_email_or_default]
expect(ActionMailer::Base.deliveries.last.subject).to match "Request to join the #{group.name} group"
@@ -30,18 +39,26 @@ RSpec.describe 'Groups > Members > Request access', feature_category: :groups_an
expect(group.requesters.exists?(user_id: user)).to be_truthy
expect(page).to have_content 'Your request for access has been queued for review.'
+ more_actions_dropdown.click
+
expect(page).to have_content 'Withdraw Access Request'
expect(page).not_to have_content 'Leave group'
end
- it 'user does not see private projects' do
- perform_enqueued_jobs { click_link 'Request Access' }
+ it 'user does not see private projects', :js do
+ perform_enqueued_jobs do
+ more_actions_dropdown.click
+ click_link 'Request Access'
+ end
expect(page).not_to have_content project.name
end
- it 'user does not see group in the Dashboard > Groups page' do
- perform_enqueued_jobs { click_link 'Request Access' }
+ it 'user does not see group in the Dashboard > Groups page', :js do
+ perform_enqueued_jobs do
+ more_actions_dropdown.click
+ click_link 'Request Access'
+ end
visit dashboard_groups_path
@@ -49,6 +66,7 @@ RSpec.describe 'Groups > Members > Request access', feature_category: :groups_an
end
it 'user is not listed in the group members page', :js do
+ more_actions_dropdown.click
click_link 'Request Access'
expect(group.requesters.exists?(user_id: user)).to be_truthy
@@ -63,20 +81,24 @@ RSpec.describe 'Groups > Members > Request access', feature_category: :groups_an
end
end
- it 'user can withdraw its request for access' do
+ it 'user can withdraw its request for access', :js do
+ more_actions_dropdown.click
click_link 'Request Access'
expect(group.requesters.exists?(user_id: user)).to be_truthy
+ more_actions_dropdown.click
click_link 'Withdraw Access Request'
+ accept_gl_confirm
- expect(group.requesters.exists?(user_id: user)).to be_falsey
expect(page).to have_content 'Your access request to the group has been withdrawn.'
+ expect(group.requesters.exists?(user_id: user)).to be_falsey
end
- it 'member does not see the request access button' do
+ it 'member does not see the request access button', :js do
group.add_owner(user)
visit group_path(group)
+ more_actions_dropdown.click
expect(page).not_to have_content 'Request Access'
end
diff --git a/spec/features/groups/members/search_members_spec.rb b/spec/features/groups/members/search_members_spec.rb
index ed2e0cd7b09..5f4ff8023c6 100644
--- a/spec/features/groups/members/search_members_spec.rb
+++ b/spec/features/groups/members/search_members_spec.rb
@@ -21,7 +21,7 @@ RSpec.describe 'Search group member', :js, feature_category: :groups_and_project
end
it 'renders member users' do
- page.within '[data-testid="members-filtered-search-bar"]' do
+ within_testid('members-filtered-search-bar') do
find_field('Filter members').click
find('input').native.send_keys(member.name)
click_button 'Search'
diff --git a/spec/features/groups/members/sort_members_spec.rb b/spec/features/groups/members/sort_members_spec.rb
index ea6f3ae1966..1cc9862ff3b 100644
--- a/spec/features/groups/members/sort_members_spec.rb
+++ b/spec/features/groups/members/sort_members_spec.rb
@@ -17,7 +17,7 @@ RSpec.describe 'Groups > Members > Sort members', :js, feature_category: :groups
end
def expect_sort_by(text, sort_direction)
- within('[data-testid="members-sort-dropdown"]') do
+ within_testid('members-sort-dropdown') do
expect(page).to have_css('button[aria-haspopup="menu"]', text: text)
expect(page).to have_button("Sort direction: #{sort_direction == :asc ? 'Ascending' : 'Descending'}")
end
diff --git a/spec/features/groups/members/tabs_spec.rb b/spec/features/groups/members/tabs_spec.rb
index cc97b367313..b96aa9293f2 100644
--- a/spec/features/groups/members/tabs_spec.rb
+++ b/spec/features/groups/members/tabs_spec.rb
@@ -62,7 +62,7 @@ RSpec.describe 'Groups > Members > Tabs', :js, feature_category: :groups_and_pro
click_link 'Invited'
- page.within '[data-testid="members-filtered-search-bar"]' do
+ within_testid('members-filtered-search-bar') do
find_field('Search invited').click
find('input').native.send_keys('email')
click_button 'Search'
@@ -75,7 +75,7 @@ RSpec.describe 'Groups > Members > Tabs', :js, feature_category: :groups_and_pro
before do
click_link 'Members'
- page.within '[data-testid="members-filtered-search-bar"]' do
+ within_testid 'members-filtered-search-bar' do
find_field('Filter members').click
find('input').native.send_keys('test')
click_button 'Search'
diff --git a/spec/features/groups/merge_requests_spec.rb b/spec/features/groups/merge_requests_spec.rb
index 0a830e6715c..84a6bc96df0 100644
--- a/spec/features/groups/merge_requests_spec.rb
+++ b/spec/features/groups/merge_requests_spec.rb
@@ -81,7 +81,7 @@ RSpec.describe 'Group merge requests page', feature_category: :code_review_workf
it 'shows projects only with merge requests feature enabled', :js do
click_button 'Select project to create merge request'
- page.within('[data-testid="new-resource-dropdown"]') do
+ within_testid('new-resource-dropdown') do
expect(page).to have_content(project.name_with_namespace)
expect(page).not_to have_content(project_with_merge_requests_disabled.name_with_namespace)
end
diff --git a/spec/features/groups/milestone_spec.rb b/spec/features/groups/milestone_spec.rb
index bb7cc3db452..3f03f62604d 100644
--- a/spec/features/groups/milestone_spec.rb
+++ b/spec/features/groups/milestone_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Group milestones', feature_category: :groups_and_projects do
+RSpec.describe 'Group milestones', feature_category: :team_planning do
let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project_empty_repo, group: group) }
let_it_be(:user) { create(:group_member, :maintainer, user: create(:user), group: group).user }
@@ -98,7 +98,7 @@ RSpec.describe 'Group milestones', feature_category: :groups_and_projects do
end
it 'counts milestones correctly' do
- page.within '[data-testid="milestones-filter"]' do
+ within_testid 'milestones-filter' do
expect(page).to have_content('Open 3')
expect(page).to have_content('Closed 3')
expect(page).to have_content('All 6')
diff --git a/spec/features/groups/packages_spec.rb b/spec/features/groups/packages_spec.rb
index 7819b1f0ab6..6b2a15daf72 100644
--- a/spec/features/groups/packages_spec.rb
+++ b/spec/features/groups/packages_spec.rb
@@ -52,7 +52,7 @@ RSpec.describe 'Group Packages', feature_category: :package_registry do
it_behaves_like 'package details link'
it 'allows you to navigate to the project page' do
- find('[data-testid="root-link"]', text: project.name).click
+ find_by_testid('root-link', text: project.name).click
expect(page).to have_current_path(project_path(project))
expect(page).to have_content(project.name)
diff --git a/spec/features/groups/settings/ci_cd_spec.rb b/spec/features/groups/settings/ci_cd_spec.rb
index a1acb73178b..bf056f535f2 100644
--- a/spec/features/groups/settings/ci_cd_spec.rb
+++ b/spec/features/groups/settings/ci_cd_spec.rb
@@ -17,7 +17,7 @@ RSpec.describe 'Group CI/CD settings', feature_category: :continuous_integration
end
describe 'Runners section' do
- let(:shared_runners_toggle) { page.find('[data-testid="shared-runners-toggle"]') }
+ let(:shared_runners_toggle) { find_by_testid('shared-runners-toggle') }
before do
visit group_settings_ci_cd_path(group)
diff --git a/spec/features/groups/settings/group_badges_spec.rb b/spec/features/groups/settings/group_badges_spec.rb
index 1f16a288882..1ae61853748 100644
--- a/spec/features/groups/settings/group_badges_spec.rb
+++ b/spec/features/groups/settings/group_badges_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Group Badges', feature_category: :groups_and_projects do
+RSpec.describe 'Group Badges', feature_category: :groups_and_projects, quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/430713' do
include WaitForRequests
let(:user) { create(:user) }
diff --git a/spec/features/groups/settings/packages_and_registries_spec.rb b/spec/features/groups/settings/packages_and_registries_spec.rb
index cbd26441e2b..934234aa171 100644
--- a/spec/features/groups/settings/packages_and_registries_spec.rb
+++ b/spec/features/groups/settings/packages_and_registries_spec.rb
@@ -65,8 +65,10 @@ RSpec.describe 'Group Package and registry settings', feature_category: :package
wait_for_requests
+ # rubocop:disable Capybara/TestidFinders -- Helper within_testid doesn't cover use case
expect(page).to be_axe_clean.within('[data-testid="packages-and-registries-group-settings"]')
.skipping :'link-in-text-block', :'heading-order'
+ # rubocop:enable Capybara/TestidFinders
end
it 'has a Duplicate packages section', :js do
@@ -81,7 +83,7 @@ RSpec.describe 'Group Package and registry settings', feature_category: :package
visit_settings_page
wait_for_requests
- within '[data-testid="maven-settings"]' do
+ within_testid 'maven-settings' do
expect(page).to have_field _('Exceptions'), disabled: true
click_button class: 'gl-toggle'
@@ -98,7 +100,7 @@ RSpec.describe 'Group Package and registry settings', feature_category: :package
visit_settings_page
wait_for_requests
- within '[data-testid="maven-settings"]' do
+ within_testid 'maven-settings' do
click_button class: 'gl-toggle'
fill_in _('Exceptions'), with: ')'
@@ -115,7 +117,7 @@ RSpec.describe 'Group Package and registry settings', feature_category: :package
visit_sub_group_settings_page
wait_for_requests
- within '[data-testid="maven-settings"]' do
+ within_testid 'maven-settings' do
expect(page).to have_content('Allow duplicates')
expect(page).to have_field _('Exceptions'), disabled: true
diff --git a/spec/features/groups/show_spec.rb b/spec/features/groups/show_spec.rb
index cf18f3cb4e5..c2ab5edf79c 100644
--- a/spec/features/groups/show_spec.rb
+++ b/spec/features/groups/show_spec.rb
@@ -38,7 +38,7 @@ RSpec.describe 'Group show page', feature_category: :groups_and_projects do
expect(page).to have_content('Collaborate with your team')
- page.within(find('[data-testid="invite-members-banner"]')) do
+ within_testid('invite-members-banner') do
click_button('Invite your colleagues')
end
@@ -48,8 +48,8 @@ RSpec.describe 'Group show page', feature_category: :groups_and_projects do
click_button('Cancel')
end
- page.within(find('[data-testid="invite-members-banner"]')) do
- find('[data-testid="close-icon"]').click
+ within_testid('invite-members-banner') do
+ find_by_testid('close-icon').click
end
expect(page).not_to have_content('Collaborate with your team')
@@ -119,7 +119,7 @@ RSpec.describe 'Group show page', feature_category: :groups_and_projects do
wait_for_requests
- page.within("[data-testid=\"group-overview-item-#{public_project.id}\"]") do
+ within_testid("group-overview-item-#{public_project.id}") do
click_button _('Less restrictive visibility')
end
diff --git a/spec/features/groups_spec.rb b/spec/features/groups_spec.rb
index 578f39181d1..c0aaa7f818a 100644
--- a/spec/features/groups_spec.rb
+++ b/spec/features/groups_spec.rb
@@ -485,7 +485,7 @@ RSpec.describe 'Group', feature_category: :groups_and_projects do
it 'only displays "New subgroup" button' do
visit group_path(group)
- page.within '[data-testid="group-buttons"]' do
+ within_testid 'group-buttons' do
expect(page).to have_link('New subgroup')
expect(page).not_to have_link('New project')
end
@@ -502,7 +502,7 @@ RSpec.describe 'Group', feature_category: :groups_and_projects do
sign_in(user)
visit group_path(group)
- page.within '[data-testid="group-buttons"]' do
+ within_testid 'group-buttons' do
expect(page).to have_link('New project')
expect(page).not_to have_link('New subgroup')
end
@@ -515,7 +515,7 @@ RSpec.describe 'Group', feature_category: :groups_and_projects do
visit group_path(group)
- page.within '[data-testid="group-buttons"]' do
+ within_testid 'group-buttons' do
expect(page).to have_link('New subgroup')
expect(page).to have_link('New project')
end
@@ -542,7 +542,7 @@ RSpec.describe 'Group', feature_category: :groups_and_projects do
it 'does not display the "New project" button' do
visit group_path(group)
- page.within '[data-testid="group-buttons"]' do
+ within_testid 'group-buttons' do
expect(page).not_to have_link('New project')
end
end
diff --git a/spec/features/incidents/incident_details_spec.rb b/spec/features/incidents/incident_details_spec.rb
index 693f2599de5..bc0674c4886 100644
--- a/spec/features/incidents/incident_details_spec.rb
+++ b/spec/features/incidents/incident_details_spec.rb
@@ -46,7 +46,7 @@ RSpec.describe 'Incident details', :js, feature_category: :incident_management d
# shows the incident tabs
page.within('.issuable-details') do
- incident_tabs = find('[data-testid="incident-tabs"]')
+ incident_tabs = find_by_testid('incident-tabs')
expect(find('h1')).to have_content(incident.title)
expect(incident_tabs).to have_content('Summary')
@@ -66,14 +66,21 @@ RSpec.describe 'Incident details', :js, feature_category: :incident_management d
describe 'escalation status' do
let(:sidebar) { page.find('.right-sidebar') }
- let(:widget) { sidebar.find('[data-testid="escalation_status_container"]') }
+ let(:widget) do
+ within sidebar do
+ find_by_testid('escalation_status_container')
+ end
+ end
+
let(:expected_dropdown_options) { escalation_status.class::STATUSES.keys.take(3).map { |key| key.to_s.titleize } }
it 'has an interactable escalation status widget', :aggregate_failures do
expect(current_status).to have_text(escalation_status.status_name.to_s.titleize)
# list the available statuses
- widget.find('[data-testid="edit-button"]').click
+ within widget do
+ find_by_testid('edit-button').click
+ end
expect(dropdown_options.map(&:text)).to eq(expected_dropdown_options)
expect(widget).not_to have_selector('#escalation-status-help')
@@ -95,7 +102,9 @@ RSpec.describe 'Incident details', :js, feature_category: :incident_management d
end
def current_status
- widget.find('[data-testid="collapsed-content"]')
+ within widget do
+ find_by_testid('collapsed-content')
+ end
end
end
end
@@ -108,9 +117,9 @@ RSpec.describe 'Incident details', :js, feature_category: :incident_management d
click_button 'Edit title and description'
wait_for_requests
- page.within('[data-testid="issuable-form"]') do
+ within_testid('issuable-form') do
click_button 'Issue'
- find('[data-testid="issue-type-list-item"]', text: 'Incident').click
+ find_by_testid('issue-type-list-item', text: 'Incident').click
click_button 'Save changes'
end
@@ -130,9 +139,9 @@ RSpec.describe 'Incident details', :js, feature_category: :incident_management d
click_button 'Edit title and description'
wait_for_requests
- page.within('[data-testid="issuable-form"]') do
+ within_testid('issuable-form') do
click_button 'Incident'
- find('[data-testid="issue-type-list-item"]', text: 'Issue').click
+ find_by_testid('issue-type-list-item', text: 'Issue').click
click_button 'Save changes'
end
diff --git a/spec/features/incidents/incident_timeline_events_spec.rb b/spec/features/incidents/incident_timeline_events_spec.rb
index bd3658ab60f..4ceae0f8780 100644
--- a/spec/features/incidents/incident_timeline_events_spec.rb
+++ b/spec/features/incidents/incident_timeline_events_spec.rb
@@ -86,8 +86,8 @@ RSpec.describe 'Incident timeline events', :js, feature_category: :incident_mana
def trigger_dropdown_action(text)
click_button _('More actions')
- page.within '[data-testid="disclosure-content"]' do
- page.find('[data-testid="disclosure-dropdown-item"]', text: text).click
+ within_testid 'disclosure-content' do
+ find_by_testid('disclosure-dropdown-item', text: text).click
end
end
end
diff --git a/spec/features/integrations_settings_spec.rb b/spec/features/integrations_settings_spec.rb
index 70ce2f55161..f460e051516 100644
--- a/spec/features/integrations_settings_spec.rb
+++ b/spec/features/integrations_settings_spec.rb
@@ -11,19 +11,17 @@ RSpec.describe 'Integration settings', feature_category: :integrations do
sign_in(user)
end
- context 'with Shimo Zentao integration records' do
+ context 'with Zentao integration records' do
before do
- create(:integration, project: project, type_new: 'Integrations::Shimo', category: 'issue_tracker')
create(:integration, project: project, type_new: 'Integrations::Zentao', category: 'issue_tracker')
end
- it 'shows settings without Shimo Zentao', :js do
+ it 'shows settings without Zentao', :js do
visit namespace_project_settings_integrations_path(namespace_id: project.namespace.full_path,
project_id: project.path)
expect(page).to have_content('Add an integration')
expect(page).not_to have_content('ZenTao')
- expect(page).not_to have_content('Shimo')
end
end
end
diff --git a/spec/features/invites_spec.rb b/spec/features/invites_spec.rb
index bc6efb63f6f..04ac94685cf 100644
--- a/spec/features/invites_spec.rb
+++ b/spec/features/invites_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe 'Group or Project invitations', :aggregate_failures, feature_category: :acquisition do
let_it_be(:owner) { create(:user, name: 'John Doe') }
- # private will ensure we really have access to the group when we land on the activity page
+ # private will ensure we really have access to the group when we land on the group page
let_it_be(:group) { create(:group, :private, name: 'Owned') }
let_it_be(:project) { create(:project, :repository, namespace: group) }
@@ -17,17 +17,6 @@ RSpec.describe 'Group or Project invitations', :aggregate_failures, feature_cate
group.add_owner(owner)
end
- def confirm_email(new_user)
- new_user_token = User.find_by_email(new_user.email).confirmation_token
-
- visit user_confirmation_path(confirmation_token: new_user_token)
- end
-
- def fill_in_welcome_form
- select 'Software Developer', from: 'user_role'
- click_button 'Get started!'
- end
-
context 'when inviting a registered user' do
let(:invite_email) { 'user@example.com' }
@@ -53,8 +42,8 @@ RSpec.describe 'Group or Project invitations', :aggregate_failures, feature_cate
expect(find_field('Username or primary email').value).to eq(group_invite.invite_email)
end
- it 'pre-fills the Email field on the sign up box with the invite_email from the invite' do
- expect(find_field('Email').value).to eq(group_invite.invite_email)
+ it 'shows the Email to be the invite_email from the invite' do
+ expect(find_by_testid('invite-email').text).to eq(group_invite.invite_email)
end
end
@@ -66,12 +55,12 @@ RSpec.describe 'Group or Project invitations', :aggregate_failures, feature_cate
visit invite_path(group_invite.raw_invite_token, invite_type: Emails::Members::INITIAL_INVITE)
end
- it 'sign in, grants access and redirects to group activity page' do
+ it 'sign in, grants access and redirects to group page' do
click_link 'Sign in'
gitlab_sign_in(user, remember: true, visit: false)
- expect_to_be_on_group_activity_page(group)
+ expect_to_be_on_group_page(group)
end
end
@@ -132,8 +121,8 @@ RSpec.describe 'Group or Project invitations', :aggregate_failures, feature_cate
end
end
- def expect_to_be_on_group_activity_page(group)
- expect(page).to have_current_path(activity_group_path(group))
+ def expect_to_be_on_group_page(group)
+ expect(page).to have_current_path(group_path(group))
end
end
end
@@ -160,7 +149,7 @@ RSpec.describe 'Group or Project invitations', :aggregate_failures, feature_cate
end
it 'does not sign the user in' do
- fill_in_sign_up_form(new_user)
+ fill_in_sign_up_form(new_user, invite: true)
expect(page).to have_current_path(new_user_session_path, ignore_query: true)
sign_up_message = 'You have signed up successfully. However, we could not sign you in because your account ' \
@@ -175,23 +164,13 @@ RSpec.describe 'Group or Project invitations', :aggregate_failures, feature_cate
end
context 'when the user signs up for an account with the invitation email address' do
- it 'redirects to the most recent membership activity page with all invitations automatically accepted' do
- fill_in_sign_up_form(new_user)
+ it 'redirects to the most recent membership group page with all invitations automatically accepted' do
+ fill_in_sign_up_form(new_user, invite: true)
- expect(page).to have_current_path(activity_group_path(group), ignore_query: true)
+ expect(page).to have_current_path(group_path(group), ignore_query: true)
expect(page).to have_content('You have been granted Owner access to group Owned.')
end
end
-
- context 'when the user sign-up using a different email address' do
- let(:invite_email) { build_stubbed(:user).email }
-
- it 'signs up and redirects to the projects dashboard' do
- fill_in_sign_up_form(new_user)
-
- expect_to_be_on_projects_dashboard_with_zero_authorized_projects
- end
- end
end
context 'with email confirmation enabled' do
@@ -199,16 +178,16 @@ RSpec.describe 'Group or Project invitations', :aggregate_failures, feature_cate
let(:new_user) { build_stubbed(:user, password: '11111111') }
it 'fails sign up and redirects back to sign up', :aggregate_failures do
- expect { fill_in_sign_up_form(new_user) }.not_to change { User.count }
+ expect { fill_in_sign_up_form(new_user, invite: true) }.not_to change { User.count }
expect(page).to have_content('prohibited this user from being saved')
expect(page).to have_current_path(user_registration_path, ignore_query: true)
- expect(find_field('Email').value).to eq(group_invite.invite_email)
+ expect(find_by_testid('invite-email').text).to eq(group_invite.invite_email)
end
end
context 'with invite email acceptance', :snowplow do
it 'tracks the accepted invite' do
- fill_in_sign_up_form(new_user)
+ fill_in_sign_up_form(new_user, invite: true)
expect_snowplow_event(
category: 'RegistrationsController',
@@ -221,51 +200,25 @@ RSpec.describe 'Group or Project invitations', :aggregate_failures, feature_cate
end
context 'when the user signs up for an account with the invitation email address' do
- it 'redirects to the most recent membership activity page with all invitations automatically accepted' do
- fill_in_sign_up_form(new_user)
+ it 'redirects to the most recent membership group page with all invitations automatically accepted' do
+ fill_in_sign_up_form(new_user, invite: true)
- expect(page).to have_current_path(activity_group_path(group), ignore_query: true)
+ expect(page).to have_current_path(group_path(group), ignore_query: true)
end
end
+ end
- context 'when the user signs up using a different email address' do
- let(:invite_email) { build_stubbed(:user).email }
-
- context 'when email confirmation is not set to `soft`' do
- before do
- allow(User).to receive(:allow_unconfirmed_access_for).and_return 0
- stub_feature_flags(identity_verification: false)
- end
-
- it 'signs up and redirects to the projects dashboard' do
- fill_in_sign_up_form(new_user)
- confirm_email(new_user)
- gitlab_sign_in(new_user, remember: true, visit: false)
-
- expect_to_be_on_projects_dashboard_with_zero_authorized_projects
- end
- end
-
- context 'when email confirmation setting is set to `soft`' do
- before do
- stub_application_setting_enum('email_confirmation_setting', 'soft')
- allow(User).to receive(:allow_unconfirmed_access_for).and_return 2.days
- end
-
- it 'signs up and redirects to the projects dashboard' do
- fill_in_sign_up_form(new_user)
-
- expect_to_be_on_projects_dashboard_with_zero_authorized_projects
- end
- end
+ context 'when the email is already taken by a registered user' do
+ before do
+ create(:user, email: invite_email)
end
- end
- def expect_to_be_on_projects_dashboard_with_zero_authorized_projects
- expect(page).to have_current_path(dashboard_projects_path)
+ it 'shows error state of email already used' do
+ fill_in_sign_up_form(new_user, invite: true)
- expect(page).to have_content _('Welcome to GitLab')
- expect(page).to have_content _('Faster releases. Better code. Less pain.')
+ expect(page).to have_content('Email has already been taken')
+ expect(find_by_testid('invite-email').text).to eq(group_invite.invite_email)
+ end
end
end
@@ -275,9 +228,9 @@ RSpec.describe 'Group or Project invitations', :aggregate_failures, feature_cate
expect(page).to have_current_path(new_user_registration_path, ignore_query: true)
- fill_in_sign_up_form(new_user, 'Register')
+ fill_in_sign_up_form(new_user, 'Register', invite: true)
- expect(page).to have_current_path(activity_group_path(group))
+ expect(page).to have_current_path(group_path(group))
expect(page).to have_content('You have been granted Owner access to group Owned.')
end
end
@@ -301,16 +254,16 @@ RSpec.describe 'Group or Project invitations', :aggregate_failures, feature_cate
gitlab_sign_in(user)
end
- it 'does not accept the pending invitation and does not redirect to the groups activity path' do
- expect(page).not_to have_current_path(activity_group_path(group), ignore_query: true)
+ it 'does not accept the pending invitation and does not redirect to the group path' do
+ expect(page).not_to have_current_path(group_path(group), ignore_query: true)
expect(group.reload.users).not_to include(user)
end
context 'when the secondary email address is confirmed' do
let(:secondary_email) { create(:email, :confirmed, user: user) }
- it 'accepts the pending invitation and redirects to the groups activity path' do
- expect(page).to have_current_path(activity_group_path(group), ignore_query: true)
+ it 'accepts the pending invitation and redirects to the group path' do
+ expect(page).to have_current_path(group_path(group), ignore_query: true)
expect(group.reload.users).to include(user)
end
end
diff --git a/spec/features/issuables/shortcuts_issuable_spec.rb b/spec/features/issuables/shortcuts_issuable_spec.rb
index 6bb453c34e6..13bec61e4da 100644
--- a/spec/features/issuables/shortcuts_issuable_spec.rb
+++ b/spec/features/issuables/shortcuts_issuable_spec.rb
@@ -86,7 +86,7 @@ RSpec.describe 'Blob shortcuts', :js, feature_category: :team_planning do
it "opens milestones dropdown for editing" do
find('body').native.send_key('m')
- expect(find('[data-testid="milestone-edit"]')).to have_selector('.gl-dropdown-inner')
+ expect(find_by_testid('milestone-edit')).to have_selector('.gl-dropdown-inner')
end
end
diff --git a/spec/features/issuables/sorting_list_spec.rb b/spec/features/issuables/sorting_list_spec.rb
index 9045124cc8c..d960081c517 100644
--- a/spec/features/issuables/sorting_list_spec.rb
+++ b/spec/features/issuables/sorting_list_spec.rb
@@ -2,6 +2,7 @@
require 'spec_helper'
RSpec.describe 'Sort Issuable List', feature_category: :team_planning do
+ include Features::SortingHelpers
include ListboxHelpers
let(:project) { create(:project, :public) }
@@ -195,8 +196,7 @@ RSpec.describe 'Sort Issuable List', feature_category: :team_planning do
it 'supports sorting in asc and desc order' do
visit_issues_with_state(project, 'opened')
- click_button('Created date')
- click_on('Updated date')
+ pajamas_sort_by 'Updated date', from: 'Created date'
expect(page).to have_css('.issue:first-child', text: last_updated_issuable.title)
expect(page).to have_css('.issue:last-child', text: first_updated_issuable.title)
diff --git a/spec/features/issues/create_issue_for_discussions_in_merge_request_spec.rb b/spec/features/issues/create_issue_for_discussions_in_merge_request_spec.rb
index 4c4a5624d00..a671edb3ceb 100644
--- a/spec/features/issues/create_issue_for_discussions_in_merge_request_spec.rb
+++ b/spec/features/issues/create_issue_for_discussions_in_merge_request_spec.rb
@@ -47,7 +47,7 @@ RSpec.describe 'Resolving all open threads in a merge request from an issue', :j
end
end
- context 'creating an issue for threads' do
+ context 'creating an issue for threads', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/420845' do
before do
find('.discussions-counter .gl-new-dropdown-toggle').click
find(resolve_all_discussions_link_selector).click
@@ -79,7 +79,9 @@ RSpec.describe 'Resolving all open threads in a merge request from an issue', :j
end
it 'shows a warning that the merge request contains unresolved threads' do
- expect(page).to have_content 'all threads must be resolved'
+ click_button 'Expand merge checks'
+
+ expect(page).to have_content 'Unresolved discussions must be resolved'
end
end
end
diff --git a/spec/features/issues/create_issue_for_single_discussion_in_merge_request_spec.rb b/spec/features/issues/create_issue_for_single_discussion_in_merge_request_spec.rb
index 73a920421a3..52a8f0b105a 100644
--- a/spec/features/issues/create_issue_for_single_discussion_in_merge_request_spec.rb
+++ b/spec/features/issues/create_issue_for_single_discussion_in_merge_request_spec.rb
@@ -57,6 +57,8 @@ RSpec.describe 'Resolve an open thread in a merge request by creating an issue',
context 'creating the issue' do
before do
+ allow(Gitlab::QueryLimiting::Transaction).to receive(:threshold).and_return(105)
+
find(resolve_discussion_selector, match: :first).click
end
diff --git a/spec/features/issues/discussion_lock_spec.rb b/spec/features/issues/discussion_lock_spec.rb
index 04d59854ddc..2ef912061e6 100644
--- a/spec/features/issues/discussion_lock_spec.rb
+++ b/spec/features/issues/discussion_lock_spec.rb
@@ -6,104 +6,240 @@ RSpec.describe 'Discussion Lock', :js, feature_category: :team_planning do
let(:user) { create(:user) }
let(:issue) { create(:issue, project: project, author: user) }
let(:project) { create(:project, :public) }
+ let(:more_dropdown) { find_by_testid('desktop-dropdown') }
+ let(:issuable_lock) { find_by_testid('issuable-lock') }
+ let(:locked_badge) { '[data-testid="locked-badge"]' }
+ let(:issuable_note_warning) { '[data-testid="issuable-note-warning"]' }
- before do
- sign_in(user)
- stub_feature_flags(moved_mr_sidebar: false)
- end
-
- context 'when a user is a team member' do
+ context 'when feature flag is disabled' do
before do
- project.add_developer(user)
+ sign_in(user)
+ stub_feature_flags(moved_mr_sidebar: false)
end
- context 'when the discussion is unlocked' do
- it 'the user can lock the issue' do
- visit project_issue_path(project, issue)
+ context 'when a user is a team member' do
+ before do
+ project.add_developer(user)
+ end
- expect(find('.issuable-sidebar')).to have_content('Unlocked')
+ context 'when the discussion is unlocked' do
+ it 'the user can lock the issue' do
+ visit project_issue_path(project, issue)
- page.within('.issuable-sidebar') do
- find('.lock-edit').click
- click_button('Lock')
- end
+ expect(find('.issuable-sidebar')).to have_content('Unlocked')
+
+ page.within('.issuable-sidebar') do
+ find('.lock-edit').click
+ click_button('Lock')
+ end
- expect(find('#notes')).to have_content('locked the discussion in this issue')
+ expect(find('#notes')).to have_content('locked the discussion in this issue')
+ end
end
- end
- context 'when the discussion is locked' do
- before do
- issue.update_attribute(:discussion_locked, true)
- visit project_issue_path(project, issue)
+ context 'when the discussion is locked' do
+ before do
+ issue.update_attribute(:discussion_locked, true)
+ visit project_issue_path(project, issue)
+ end
+
+ it 'the user can unlock the issue' do
+ expect(find('.issuable-sidebar')).to have_content('Locked')
+
+ page.within('.issuable-sidebar') do
+ find('.lock-edit').click
+ click_button('Unlock')
+ end
+
+ expect(find('#notes')).to have_content('unlocked the discussion in this issue')
+ expect(find('.issuable-sidebar')).to have_content('Unlocked')
+ end
+
+ it 'the user can create a comment' do
+ page.within('#notes .js-main-target-form') do
+ fill_in 'note[note]', with: 'Some new comment'
+ click_button 'Comment'
+ end
+
+ wait_for_requests
+
+ expect(find('div#notes')).to have_content('Some new comment')
+ end
end
+ end
- it 'the user can unlock the issue' do
- expect(find('.issuable-sidebar')).to have_content('Locked')
+ context 'when a user is not a team member' do
+ context 'when the discussion is unlocked' do
+ before do
+ visit project_issue_path(project, issue)
+ end
- page.within('.issuable-sidebar') do
- find('.lock-edit').click
- click_button('Unlock')
+ it 'the user can not lock the issue' do
+ expect(find('.issuable-sidebar')).to have_content('Unlocked')
+ expect(find('.issuable-sidebar')).not_to have_selector('.lock-edit')
end
- expect(find('#notes')).to have_content('unlocked the discussion in this issue')
- expect(find('.issuable-sidebar')).to have_content('Unlocked')
+ it 'the user can create a comment' do
+ page.within('#notes .js-main-target-form') do
+ fill_in 'note[note]', with: 'Some new comment'
+ click_button 'Comment'
+ end
+
+ wait_for_requests
+
+ expect(find('div#notes')).to have_content('Some new comment')
+ end
end
- it 'the user can create a comment' do
- page.within('#notes .js-main-target-form') do
- fill_in 'note[note]', with: 'Some new comment'
- click_button 'Comment'
+ context 'when the discussion is locked' do
+ before do
+ issue.update_attribute(:discussion_locked, true)
+ visit project_issue_path(project, issue)
end
- wait_for_requests
+ it 'the user can not unlock the issue' do
+ expect(find('.issuable-sidebar')).to have_content('Locked')
+ expect(find('.issuable-sidebar')).not_to have_selector('.lock-edit')
+ end
- expect(find('div#notes')).to have_content('Some new comment')
+ it 'the user can not create a comment' do
+ page.within('#notes') do
+ expect(page).not_to have_selector('.js-main-target-form')
+ expect(find_by_testid('disabled-comments'))
+ .to have_content('The discussion in this issue is locked. Only project members can comment.')
+ end
+ end
end
end
- end
- context 'when a user is not a team member' do
- context 'when the discussion is unlocked' do
+ context 'for axe automated accessibility testing' do
before do
+ project.add_developer(user)
+ issue.update_attribute(:discussion_locked, true)
visit project_issue_path(project, issue)
end
- it 'the user can not lock the issue' do
- expect(find('.issuable-sidebar')).to have_content('Unlocked')
- expect(find('.issuable-sidebar')).not_to have_selector('.lock-edit')
+ it 'passes tests' do
+ # rubocop:disable Capybara/TestidFinders -- within_testid does not work here
+ expect(page).to be_axe_clean.within(locked_badge)
+ expect(page).to be_axe_clean.within(issuable_note_warning)
+ # rubocop:enable Capybara/TestidFinders
+ page.within('.issuable-sidebar') do
+ find('.lock-edit').click
+ expect(page).to be_axe_clean.within('.lock-edit')
+ end
end
+ end
+ end
+
+ context 'when feature flag is enabled' do
+ before do
+ sign_in(user)
+ stub_feature_flags(moved_mr_sidebar: true)
+ end
+
+ context 'when a user is a team member' do
+ before do
+ project.add_developer(user)
+ end
+
+ context 'when the discussion is unlocked' do
+ it 'the user can lock the issue' do
+ visit project_issue_path(project, issue)
+
+ more_dropdown.click
+ expect(issuable_lock).to have_content('Lock discussion')
- it 'the user can create a comment' do
- page.within('#notes .js-main-target-form') do
- fill_in 'note[note]', with: 'Some new comment'
- click_button 'Comment'
+ issuable_lock.click
+ expect(find('#notes')).to have_content('locked the discussion in this issue')
+ end
+ end
+
+ context 'when the discussion is locked' do
+ before do
+ issue.update_attribute(:discussion_locked, true)
+ visit project_issue_path(project, issue)
end
- wait_for_requests
+ it 'the user can unlock the issue' do
+ more_dropdown.click
+ expect(issuable_lock).to have_content('Unlock discussion')
+
+ issuable_lock.click
+ expect(find('#notes')).to have_content('unlocked the discussion in this issue')
+ expect(issuable_lock).to have_content('Lock discussion')
+ end
- expect(find('div#notes')).to have_content('Some new comment')
+ it 'the user can create a comment' do
+ page.within('#notes .js-main-target-form') do
+ fill_in 'note[note]', with: 'Some new comment'
+ click_button 'Comment'
+ end
+
+ wait_for_requests
+
+ expect(find('div#notes')).to have_content('Some new comment')
+ end
end
end
- context 'when the discussion is locked' do
- before do
- issue.update_attribute(:discussion_locked, true)
- visit project_issue_path(project, issue)
- end
+ context 'when a user is not a team member' do
+ context 'when the discussion is unlocked' do
+ before do
+ visit project_issue_path(project, issue)
+ end
- it 'the user can not unlock the issue' do
- expect(find('.issuable-sidebar')).to have_content('Locked')
- expect(find('.issuable-sidebar')).not_to have_selector('.lock-edit')
+ it 'the user can not lock the issue' do
+ more_dropdown.click
+ expect(issuable_lock).to have_content('Lock discussion')
+ end
+
+ it 'the user can create a comment' do
+ page.within('#notes .js-main-target-form') do
+ fill_in 'note[note]', with: 'Some new comment'
+ click_button 'Comment'
+ end
+
+ wait_for_requests
+
+ expect(find('div#notes')).to have_content('Some new comment')
+ end
end
- it 'the user can not create a comment' do
- page.within('#notes') do
- expect(page).not_to have_selector('js-main-target-form')
- expect(page.find('.disabled-comments'))
- .to have_content('The discussion in this issue is locked. Only project members can comment.')
+ context 'when the discussion is locked' do
+ before do
+ issue.update_attribute(:discussion_locked, true)
+ visit project_issue_path(project, issue)
+ end
+
+ it 'the user can not unlock the issue' do
+ more_dropdown.click
+ expect(issuable_lock).to have_content('Unlock discussion')
+ end
+
+ it 'the user can not create a comment' do
+ page.within('#notes') do
+ expect(page).not_to have_selector('js-main-target-form')
+ expect(find_by_testid('disabled-comments'))
+ .to have_content('The discussion in this issue is locked. Only project members can comment.')
+ end
end
end
end
+
+ it 'passes axe automated accessibility testing' do
+ project.add_developer(user)
+ issue.update_attribute(:discussion_locked, true)
+ visit project_issue_path(project, issue)
+ wait_for_all_requests
+
+ # rubocop:disable Capybara/TestidFinders -- within_testid does not work here
+ expect(page).to be_axe_clean.within(locked_badge)
+ expect(page).to be_axe_clean.within(issuable_note_warning)
+
+ more_dropdown.click
+ expect(page).to be_axe_clean.within('[data-testid="lock-issue-toggle"] button')
+ # rubocop:enable Capybara/TestidFinders
+ end
end
end
diff --git a/spec/features/issues/form_spec.rb b/spec/features/issues/form_spec.rb
index 2fb30469691..896d86e4004 100644
--- a/spec/features/issues/form_spec.rb
+++ b/spec/features/issues/form_spec.rb
@@ -148,12 +148,12 @@ RSpec.describe 'New/edit issue', :js, feature_category: :team_planning do
click_button _('Select label')
wait_for_all_requests
- page.within '[data-testid="sidebar-labels"]' do
+ within_testid('sidebar-labels') do
click_button label.title
click_button label2.title
click_button _('Close')
wait_for_requests
- page.within('[data-testid="embedded-labels-list"]') do
+ within_testid('embedded-labels-list') do
expect(page).to have_content(label.title)
expect(page).to have_content(label2.title)
end
@@ -188,13 +188,13 @@ RSpec.describe 'New/edit issue', :js, feature_category: :team_planning do
wait_for_all_requests
- page.within '[data-testid="sidebar-labels"]' do
+ within_testid 'sidebar-labels' do
click_button label.title
click_button _('Close')
wait_for_requests
- page.within('[data-testid="embedded-labels-list"]') do
+ within_testid('embedded-labels-list') do
expect(page).to have_content(label.title)
end
@@ -205,7 +205,7 @@ RSpec.describe 'New/edit issue', :js, feature_category: :team_planning do
wait_for_all_requests
- page.within '[data-testid="sidebar-labels"]' do
+ within_testid 'sidebar-labels' do
click_button label.title, class: 'dropdown-item'
click_button _('Close')
@@ -221,7 +221,7 @@ RSpec.describe 'New/edit issue', :js, feature_category: :team_planning do
wait_for_all_requests
- page.within '[data-testid="sidebar-labels"]' do
+ within_testid 'sidebar-labels' do
search_field = find('input[type="search"]')
search_field.native.send_keys(label.title)
@@ -238,7 +238,7 @@ RSpec.describe 'New/edit issue', :js, feature_category: :team_planning do
it 'displays an error message when submitting an invalid form' do
click_button 'Create issue'
- page.within('[data-testid="issue-title-input-field"]') do
+ within_testid('issue-title-input-field') do
expect(page).to have_text(_('This field is required.'))
end
end
@@ -463,14 +463,14 @@ RSpec.describe 'New/edit issue', :js, feature_category: :team_planning do
wait_for_all_requests
- page.within '[data-testid="sidebar-labels"]' do
+ within_testid 'sidebar-labels' do
click_button label.title
click_button label2.title
click_button _('Close')
wait_for_requests
- page.within('[data-testid="embedded-labels-list"]') do
+ within_testid('embedded-labels-list') do
expect(page).to have_content(label.title)
expect(page).to have_content(label2.title)
end
@@ -580,14 +580,14 @@ RSpec.describe 'New/edit issue', :js, feature_category: :team_planning do
wait_for_all_requests
- page.within '[data-testid="sidebar-labels"]' do
+ within_testid 'sidebar-labels' do
click_button _('Create project label')
wait_for_requests
end
page.within '.js-labels-create' do
- find('[data-testid="label-title-input"]').fill_in with: 'test label'
+ find_by_testid('label-title-input').fill_in with: 'test label'
first('.suggest-colors-dropdown a').click
click_button 'Create'
diff --git a/spec/features/issues/incident_issue_spec.rb b/spec/features/issues/incident_issue_spec.rb
index 5197f5d1e33..9225ba03003 100644
--- a/spec/features/issues/incident_issue_spec.rb
+++ b/spec/features/issues/incident_issue_spec.rb
@@ -35,7 +35,7 @@ RSpec.describe 'Incident Detail', :js, feature_category: :team_planning do
it 'shows incident and alert data' do
page.within('.issuable-details') do
- incident_tabs = find('[data-testid="incident-tabs"]')
+ incident_tabs = find_by_testid('incident-tabs')
aggregate_failures 'shows title and Summary tab' do
expect(find('h1')).to have_content(incident.title)
diff --git a/spec/features/issues/issue_detail_spec.rb b/spec/features/issues/issue_detail_spec.rb
index c8802a9cc71..15cb1678c55 100644
--- a/spec/features/issues/issue_detail_spec.rb
+++ b/spec/features/issues/issue_detail_spec.rb
@@ -138,7 +138,7 @@ RSpec.describe 'Issue Detail', :js, feature_category: :team_planning do
it 'cannot see Incident option' do
open_issue_edit_form
- page.within('[data-testid="issuable-form"]') do
+ within_testid('issuable-form') do
expect(page).to have_content('Issue')
expect(page).not_to have_content('Incident')
end
@@ -151,7 +151,7 @@ RSpec.describe 'Issue Detail', :js, feature_category: :team_planning do
it 'routes the user to the incident details page when the `issue_type` is set to incident' do
open_issue_edit_form
- page.within('[data-testid="issuable-form"]') do
+ within_testid('issuable-form') do
update_type_select('Issue', 'Incident')
expect(page).to have_current_path(incident_project_issues_path(project, issue))
@@ -181,7 +181,7 @@ RSpec.describe 'Issue Detail', :js, feature_category: :team_planning do
it 'routes the user to the issue details page when the `issue_type` is set to issue' do
open_issue_edit_form
- page.within('[data-testid="issuable-form"]') do
+ within_testid('issuable-form') do
update_type_select('Incident', 'Issue')
expect(page).to have_current_path(project_issue_path(project, incident))
@@ -193,7 +193,7 @@ RSpec.describe 'Issue Detail', :js, feature_category: :team_planning do
def update_type_select(from, to)
click_button from
- find('[data-testid="issue-type-list-item"]', text: to).click
+ find_by_testid('issue-type-list-item', text: to).click
click_button 'Save changes'
wait_for_requests
diff --git a/spec/features/issues/service_desk_spec.rb b/spec/features/issues/service_desk_spec.rb
index 8662f0f98f5..3a2074b9b13 100644
--- a/spec/features/issues/service_desk_spec.rb
+++ b/spec/features/issues/service_desk_spec.rb
@@ -52,7 +52,7 @@ RSpec.describe 'Service Desk Issue Tracker', :js, feature_category: :service_des
aggregate_failures do
expect(page).to have_css('.empty-state')
expect(page).to have_text('Use Service Desk to connect with your users')
- expect(page).to have_link('Learn more.', href: help_page_path('user/project/service_desk'))
+ expect(page).to have_link('Learn more.', href: help_page_path('user/project/service_desk/index'))
expect(page).not_to have_link('Enable Service Desk')
expect(page).to have_content(project.service_desk_address)
end
@@ -71,7 +71,7 @@ RSpec.describe 'Service Desk Issue Tracker', :js, feature_category: :service_des
aggregate_failures do
expect(page).to have_css('.empty-state')
expect(page).to have_text('Use Service Desk to connect with your users')
- expect(page).to have_link('Learn more.', href: help_page_path('user/project/service_desk'))
+ expect(page).to have_link('Learn more.', href: help_page_path('user/project/service_desk/index'))
expect(page).not_to have_link('Enable Service Desk')
expect(page).not_to have_content(project.service_desk_address)
end
@@ -93,7 +93,7 @@ RSpec.describe 'Service Desk Issue Tracker', :js, feature_category: :service_des
it 'displays the small info box, documentation, a button to configure service desk, and the address' do
aggregate_failures do
- expect(page).to have_link('Learn more.', href: help_page_path('user/project/service_desk'))
+ expect(page).to have_link('Learn more.', href: help_page_path('user/project/service_desk/index'))
expect(page).not_to have_link('Enable Service Desk')
expect(page).to have_content(project.service_desk_address)
end
diff --git a/spec/features/issues/user_interacts_with_awards_spec.rb b/spec/features/issues/user_interacts_with_awards_spec.rb
index 813fdeea0a1..d47b6535688 100644
--- a/spec/features/issues/user_interacts_with_awards_spec.rb
+++ b/spec/features/issues/user_interacts_with_awards_spec.rb
@@ -306,18 +306,6 @@ RSpec.describe 'User interacts with awards', feature_category: :team_planning do
expect(page).not_to have_selector(emoji_counter)
end
end
-
- context 'execute /award quick action' do
- xit 'toggles the emoji award on noteable', :js do
- execute_quick_action('/award :100:')
-
- expect(find(noteable_award_counter)).to have_text("1")
-
- execute_quick_action('/award :100:')
-
- expect(page).not_to have_selector(noteable_award_counter)
- end
- end
end
end
diff --git a/spec/features/issues/user_sorts_issues_spec.rb b/spec/features/issues/user_sorts_issues_spec.rb
index 206544b32a4..5c6198785d0 100644
--- a/spec/features/issues/user_sorts_issues_spec.rb
+++ b/spec/features/issues/user_sorts_issues_spec.rb
@@ -3,6 +3,7 @@
require "spec_helper"
RSpec.describe "User sorts issues", feature_category: :team_planning do
+ include Features::SortingHelpers
include SortingHelper
include IssueHelpers
@@ -46,8 +47,7 @@ RSpec.describe "User sorts issues", feature_category: :team_planning do
it 'sorts by popularity', :js do
visit(project_issues_path(project))
- click_button 'Created date'
- click_on 'Popularity'
+ pajamas_sort_by 'Popularity', from: 'Created date'
page.within(".issues-list") do
page.within("li.issue:nth-child(1)") do
diff --git a/spec/features/merge_request/user_merges_only_if_pipeline_succeeds_spec.rb b/spec/features/merge_request/user_merges_only_if_pipeline_succeeds_spec.rb
index 78814e36cfe..1a7b72e6809 100644
--- a/spec/features/merge_request/user_merges_only_if_pipeline_succeeds_spec.rb
+++ b/spec/features/merge_request/user_merges_only_if_pipeline_succeeds_spec.rb
@@ -62,7 +62,10 @@ RSpec.describe 'Merge request > User merges only if pipeline succeeds', :js, fea
wait_for_requests
expect(page).not_to have_button('Merge', exact: true)
- expect(page).to have_content('Merge blocked: pipeline must succeed. Push a commit that fixes the failure or learn about other solutions.')
+
+ click_button 'Expand merge checks'
+
+ expect(page).to have_content('Pipeline must succeed.')
end
end
@@ -75,7 +78,10 @@ RSpec.describe 'Merge request > User merges only if pipeline succeeds', :js, fea
wait_for_requests
expect(page).not_to have_button('Merge', exact: true)
- expect(page).to have_content('Merge blocked: pipeline must succeed. Push a commit that fixes the failure or learn about other solutions.')
+
+ click_button 'Expand merge checks'
+
+ expect(page).to have_content('Pipeline must succeed.')
end
end
diff --git a/spec/features/merge_request/user_posts_diff_notes_spec.rb b/spec/features/merge_request/user_posts_diff_notes_spec.rb
index f13c68a60ee..e5eabd7e510 100644
--- a/spec/features/merge_request/user_posts_diff_notes_spec.rb
+++ b/spec/features/merge_request/user_posts_diff_notes_spec.rb
@@ -15,8 +15,6 @@ RSpec.describe 'Merge request > User posts diff notes', :js, feature_category: :
let(:test_note_comment) { 'this is a test note!' }
before do
- set_cookie('sidebar_collapsed', 'true')
-
project.add_developer(user)
sign_in(user)
end
diff --git a/spec/features/merge_request/user_rebases_merge_request_spec.rb b/spec/features/merge_request/user_rebases_merge_request_spec.rb
index 1b46b4e1d7f..1dc785724d3 100644
--- a/spec/features/merge_request/user_rebases_merge_request_spec.rb
+++ b/spec/features/merge_request/user_rebases_merge_request_spec.rb
@@ -14,11 +14,15 @@ RSpec.describe "User rebases a merge request", :js, feature_category: :code_revi
it "rebases" do
visit(merge_request_path(merge_request))
+ wait_for_requests
+
+ click_button 'Expand merge checks'
+
expect(page).to have_button("Rebase")
click_button("Rebase")
- expect(page).to have_content("Rebase in progress")
+ expect(find_by_testid('standard-rebase-button')).to have_selector(".gl-spinner")
end
end
diff --git a/spec/features/merge_request/user_resolves_conflicts_spec.rb b/spec/features/merge_request/user_resolves_conflicts_spec.rb
index 0f283f1194f..2fdd0a7dd51 100644
--- a/spec/features/merge_request/user_resolves_conflicts_spec.rb
+++ b/spec/features/merge_request/user_resolves_conflicts_spec.rb
@@ -96,6 +96,8 @@ RSpec.describe 'Merge request > User resolves conflicts', :js, feature_category:
before do
visit project_merge_request_path(project, merge_request)
+
+ click_button 'Expand merge checks'
end
it 'shows a link to the conflict resolution page' do
@@ -127,6 +129,9 @@ RSpec.describe 'Merge request > User resolves conflicts', :js, feature_category:
before do
visit project_merge_request_path(project, merge_request)
+
+ click_button 'Expand merge checks'
+
click_link('conflicts', href: %r{/conflicts\Z})
end
@@ -169,6 +174,9 @@ RSpec.describe 'Merge request > User resolves conflicts', :js, feature_category:
before do
visit project_merge_request_path(project, merge_request)
+
+ click_button 'Expand merge checks'
+
click_link('conflicts', href: %r{/conflicts\Z})
end
@@ -193,6 +201,8 @@ RSpec.describe 'Merge request > User resolves conflicts', :js, feature_category:
project.add_developer(user)
sign_in(user)
visit project_merge_request_path(project, merge_request)
+
+ click_button 'Expand merge checks'
end
it 'does not show a link to the conflict resolution page' do
diff --git a/spec/features/merge_request/user_resolves_wip_mr_spec.rb b/spec/features/merge_request/user_resolves_wip_mr_spec.rb
index 15a7755429b..dd1e73cf57f 100644
--- a/spec/features/merge_request/user_resolves_wip_mr_spec.rb
+++ b/spec/features/merge_request/user_resolves_wip_mr_spec.rb
@@ -32,6 +32,9 @@ RSpec.describe 'Merge request > User resolves Draft', :js, feature_category: :co
context 'when there is active pipeline for merge request' do
before do
create(:ci_build, pipeline: pipeline)
+
+ stub_feature_flags(merge_blocked_component: false)
+
sign_in(user)
visit project_merge_request_path(project, merge_request)
wait_for_requests
diff --git a/spec/features/merge_request/user_sees_avatar_on_diff_notes_spec.rb b/spec/features/merge_request/user_sees_avatar_on_diff_notes_spec.rb
index 98ea72b9569..7c73eed5c1f 100644
--- a/spec/features/merge_request/user_sees_avatar_on_diff_notes_spec.rb
+++ b/spec/features/merge_request/user_sees_avatar_on_diff_notes_spec.rb
@@ -29,8 +29,6 @@ RSpec.describe 'Merge request > User sees avatars on diff notes', :js, feature_c
before do
sign_in user
-
- set_cookie('sidebar_collapsed', 'true')
end
context 'discussion tab' do
diff --git a/spec/features/merge_request/user_sees_merge_button_depending_on_unresolved_discussions_spec.rb b/spec/features/merge_request/user_sees_merge_button_depending_on_unresolved_discussions_spec.rb
index 9955c13b769..5843585a95c 100644
--- a/spec/features/merge_request/user_sees_merge_button_depending_on_unresolved_discussions_spec.rb
+++ b/spec/features/merge_request/user_sees_merge_button_depending_on_unresolved_discussions_spec.rb
@@ -22,7 +22,10 @@ RSpec.describe 'Merge request > User sees merge button depending on unresolved t
context 'with unresolved threads' do
it 'does not allow to merge' do
expect(page).not_to have_button('Merge', exact: true)
- expect(page).to have_content('all threads must be resolved')
+
+ click_button 'Expand merge checks'
+
+ expect(page).to have_content('Unresolved discussions must be resolved')
end
end
diff --git a/spec/features/merge_request/user_sees_merge_request_file_tree_sidebar_spec.rb b/spec/features/merge_request/user_sees_merge_request_file_tree_sidebar_spec.rb
index 8caa13c6297..8a039bf9840 100644
--- a/spec/features/merge_request/user_sees_merge_request_file_tree_sidebar_spec.rb
+++ b/spec/features/merge_request/user_sees_merge_request_file_tree_sidebar_spec.rb
@@ -62,6 +62,11 @@ RSpec.describe 'Merge request > User sees merge request file tree sidebar', :js,
context 'when navigating to the next file' do
before do
click_link 'Next'
+ wait_for_requests
+ # when we click the Next button the viewport will be scrolled a bit into the diffs view
+ # this will cause for the file tree sidebar height to be recalculated
+ # because this logic is async and debounced twice we have to wait for the layout to stabilize
+ sleep(1)
end
it_behaves_like 'last entry clickable'
diff --git a/spec/features/merge_request/user_sees_merge_request_pipelines_spec.rb b/spec/features/merge_request/user_sees_merge_request_pipelines_spec.rb
index 5e683ddf7ba..b5c0c163f98 100644
--- a/spec/features/merge_request/user_sees_merge_request_pipelines_spec.rb
+++ b/spec/features/merge_request/user_sees_merge_request_pipelines_spec.rb
@@ -371,7 +371,7 @@ RSpec.describe 'Merge request > User sees pipelines triggered by merge request',
sha: merge_request.diff_head_sha,
user: user,
merge_request: merge_request,
- status: :running)
+ status: :pending)
merge_request.update_head_pipeline
end
diff --git a/spec/features/merge_request/user_sees_merge_widget_spec.rb b/spec/features/merge_request/user_sees_merge_widget_spec.rb
index c18b2c97f96..4327a08efb9 100644
--- a/spec/features/merge_request/user_sees_merge_widget_spec.rb
+++ b/spec/features/merge_request/user_sees_merge_widget_spec.rb
@@ -173,9 +173,10 @@ RSpec.describe 'Merge request > User sees merge widget', :js, feature_category:
end
it 'shows information about blocked pipeline' do
+ click_button 'Expand merge checks'
+
expect(page).to have_content("Merge blocked")
- expect(page).to have_content(
- "pipeline must succeed. It's waiting for a manual action to continue.")
+ expect(page).to have_content("Pipeline must succeed.")
expect(page).to have_css('[data-testid="status_manual_borderless-icon"]')
end
end
@@ -399,7 +400,9 @@ RSpec.describe 'Merge request > User sees merge widget', :js, feature_category:
# Wait for the `ci_status` and `merge_check` requests
wait_for_requests
- expect(page).to have_content('Merge blocked: fast-forward merge is not possible. To merge this request, first rebase locally.')
+ click_button 'Expand merge checks'
+
+ expect(page).to have_content('Merge request must be rebased, because a fast-forward merge is not possible.')
end
end
diff --git a/spec/features/merge_request/user_sees_versions_spec.rb b/spec/features/merge_request/user_sees_versions_spec.rb
index 715cc2f73be..2df9d91c8ee 100644
--- a/spec/features/merge_request/user_sees_versions_spec.rb
+++ b/spec/features/merge_request/user_sees_versions_spec.rb
@@ -56,6 +56,10 @@ RSpec.describe 'Merge request > User sees versions', :js, feature_category: :cod
expect(page).to have_content '8 files'
end
+ it 'has the correct count for changes' do
+ expect(page).to have_content 'Changes 8'
+ end
+
it_behaves_like 'allows commenting',
file_name: '.gitmodules',
line_text: '[submodule "six"]',
@@ -123,6 +127,10 @@ RSpec.describe 'Merge request > User sees versions', :js, feature_category: :cod
end
end
+ it 'has the correct count for changes' do
+ expect(page).to have_content 'Changes 4'
+ end
+
it 'has a path with comparison context and shows comments that were last relevant at that version' do
expect(page).to have_current_path diffs_project_merge_request_path(
project,
diff --git a/spec/features/merge_requests/user_sorts_merge_requests_spec.rb b/spec/features/merge_requests/user_sorts_merge_requests_spec.rb
index 5ccc24ebca1..3c2e1c4b37e 100644
--- a/spec/features/merge_requests/user_sorts_merge_requests_spec.rb
+++ b/spec/features/merge_requests/user_sorts_merge_requests_spec.rb
@@ -23,7 +23,7 @@ RSpec.describe 'User sorts merge requests', :js, feature_category: :code_review_
end
it 'keeps the sort option' do
- pajamas_sort_by(s_('SortOptions|Milestone'))
+ pajamas_sort_by(s_('SortOptions|Milestone'), from: s_('SortOptions|Created date'))
visit(merge_requests_dashboard_path(assignee_username: user.username))
@@ -49,7 +49,7 @@ RSpec.describe 'User sorts merge requests', :js, feature_category: :code_review_
it 'separates remember sorting with issues', :js do
create(:issue, project: project)
- pajamas_sort_by(s_('SortOptions|Milestone'))
+ pajamas_sort_by(s_('SortOptions|Milestone'), from: s_('SortOptions|Created date'))
visit(project_issues_path(project))
@@ -66,7 +66,7 @@ RSpec.describe 'User sorts merge requests', :js, feature_category: :code_review_
end
it 'sorts by popularity' do
- pajamas_sort_by(s_('SortOptions|Popularity'))
+ pajamas_sort_by(s_('SortOptions|Popularity'), from: s_('SortOptions|Created date'))
page.within('.mr-list') do
page.within('li.merge-request:nth-child(1)') do
diff --git a/spec/features/profile_spec.rb b/spec/features/profile_spec.rb
index b6c96555767..6c6deef5b74 100644
--- a/spec/features/profile_spec.rb
+++ b/spec/features/profile_spec.rb
@@ -66,7 +66,7 @@ RSpec.describe 'Profile account page', :js, feature_category: :user_profile do
end
it 'allows resetting of feed token' do
- visit profile_personal_access_tokens_path
+ visit user_settings_personal_access_tokens_path
previous_token = ''
@@ -88,7 +88,7 @@ RSpec.describe 'Profile account page', :js, feature_category: :user_profile do
it 'allows resetting of incoming email token' do
allow(Gitlab.config.incoming_email).to receive(:enabled).and_return(true)
- visit profile_personal_access_tokens_path
+ visit user_settings_personal_access_tokens_path
previous_token = ''
diff --git a/spec/features/profiles/keys_spec.rb b/spec/features/profiles/keys_spec.rb
index cb270b669d3..65b65c356fa 100644
--- a/spec/features/profiles/keys_spec.rb
+++ b/spec/features/profiles/keys_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Profile > SSH Keys', feature_category: :user_profile do
+RSpec.describe 'Profile > SSH Keys', feature_category: :source_code_management do
let(:user) { create(:user) }
before do
diff --git a/spec/features/profiles/two_factor_auths_spec.rb b/spec/features/profiles/two_factor_auths_spec.rb
index 15ab79684d9..cea3172898f 100644
--- a/spec/features/profiles/two_factor_auths_spec.rb
+++ b/spec/features/profiles/two_factor_auths_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Two factor auths', feature_category: :user_profile do
+RSpec.describe 'Two factor auths', feature_category: :system_access do
include Spec::Support::Helpers::ModalHelpers
context 'when signed in' do
diff --git a/spec/features/profiles/user_edit_profile_spec.rb b/spec/features/profiles/user_edit_profile_spec.rb
index 439839cfad5..e60589a161b 100644
--- a/spec/features/profiles/user_edit_profile_spec.rb
+++ b/spec/features/profiles/user_edit_profile_spec.rb
@@ -188,7 +188,7 @@ RSpec.describe 'User edit profile', feature_category: :user_profile do
end
context 'user status', :js do
- def select_emoji(emoji_name, is_modal = false)
+ def select_emoji(emoji_name)
toggle_button = find('.emoji-menu-toggle-button')
toggle_button.click
emoji_button = find("gl-emoji[data-name=\"#{emoji_name}\"]")
@@ -330,10 +330,12 @@ RSpec.describe 'User edit profile', feature_category: :user_profile do
find_by_testid('user-dropdown').click
within_testid('user-dropdown') do
- find('.js-set-status-modal-trigger.ready')
+ expect(page).to have_button(text: button_text, visible: :visible)
click_button button_text
end
+
+ expect(page.find('#set-user-status-modal')).to be_visible
end
def open_user_status_modal
@@ -386,7 +388,7 @@ RSpec.describe 'User edit profile', feature_category: :user_profile do
it 'adds emoji to user status' do
emoji = 'grinning'
open_user_status_modal
- select_emoji(emoji, true)
+ select_emoji(emoji)
set_user_status_in_modal
visit_user
@@ -415,7 +417,7 @@ RSpec.describe 'User edit profile', feature_category: :user_profile do
it 'opens the emoji modal again after closing it' do
open_user_status_modal
- select_emoji('grinning', true)
+ select_emoji('grinning')
find('.emoji-menu-toggle-button').click
@@ -428,7 +430,7 @@ RSpec.describe 'User edit profile', feature_category: :user_profile do
emoji = 'grinning'
open_user_status_modal
- select_emoji(emoji, true)
+ select_emoji(emoji)
expect(page.all('.award-control .js-counter')).to all(have_content('0'))
end
@@ -451,7 +453,7 @@ RSpec.describe 'User edit profile', feature_category: :user_profile do
emoji = 'grinning'
message = 'Playing outside'
open_user_status_modal
- select_emoji(emoji, true)
+ select_emoji(emoji)
find_field(s_("SetStatusModal|What's your status?")).native.send_keys(message)
set_user_status_in_modal
diff --git a/spec/features/profiles/user_manages_applications_spec.rb b/spec/features/profiles/user_manages_applications_spec.rb
index e3c4a797431..b4010cccbbc 100644
--- a/spec/features/profiles/user_manages_applications_spec.rb
+++ b/spec/features/profiles/user_manages_applications_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe 'User manages applications', feature_category: :user_profile do
let_it_be(:user) { create(:user) }
- let_it_be(:new_application_path) { applications_profile_path }
+ let_it_be(:new_application_path) { user_settings_applications_path }
let_it_be(:index_path) { oauth_applications_path }
before do
diff --git a/spec/features/projects/blobs/blob_show_spec.rb b/spec/features/projects/blobs/blob_show_spec.rb
index 36665f2b77d..2089c9df145 100644
--- a/spec/features/projects/blobs/blob_show_spec.rb
+++ b/spec/features/projects/blobs/blob_show_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'File blob', :js, feature_category: :groups_and_projects do
+RSpec.describe 'File blob', :js, feature_category: :source_code_management do
include MobileHelpers
let(:project) { create(:project, :public, :repository) }
@@ -42,9 +42,6 @@ RSpec.describe 'File blob', :js, feature_category: :groups_and_projects do
expect(page).to have_css(".js-syntax-highlight")
expect(page).to have_content("require 'fileutils'")
- # does not show a viewer switcher
- expect(page).not_to have_selector('.js-blob-viewer-switcher')
-
# shows an enabled copy button
expect(page).to have_selector('.js-copy-blob-source-btn:not(.disabled)')
@@ -299,9 +296,6 @@ RSpec.describe 'File blob', :js, feature_category: :groups_and_projects do
# shows text
expect(page).to have_content('size 1575078')
- # does not show a viewer switcher
- expect(page).not_to have_selector('.js-blob-viewer-switcher')
-
# shows an enabled copy button
expect(page).to have_selector('.js-copy-blob-source-btn:not(.disabled)')
@@ -430,9 +424,6 @@ RSpec.describe 'File blob', :js, feature_category: :groups_and_projects do
# shows text
expect(page).to have_content('size 1575078')
- # does not show a viewer switcher
- expect(page).not_to have_selector('.js-blob-viewer-switcher')
-
# shows an enabled copy button
expect(page).to have_selector('.js-copy-blob-source-btn:not(.disabled)')
diff --git a/spec/features/projects/blobs/edit_spec.rb b/spec/features/projects/blobs/edit_spec.rb
index 9c4f70a68b8..e70e155bd01 100644
--- a/spec/features/projects/blobs/edit_spec.rb
+++ b/spec/features/projects/blobs/edit_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Editing file blob', :js, feature_category: :groups_and_projects do
+RSpec.describe 'Editing file blob', :js, feature_category: :source_code_management do
include Features::SourceEditorSpecHelpers
include TreeHelper
include Features::BlobSpecHelpers
diff --git a/spec/features/projects/blobs/user_follows_pipeline_suggest_nudge_spec.rb b/spec/features/projects/blobs/user_follows_pipeline_suggest_nudge_spec.rb
index cd7601aa94e..cc819ccb5b9 100644
--- a/spec/features/projects/blobs/user_follows_pipeline_suggest_nudge_spec.rb
+++ b/spec/features/projects/blobs/user_follows_pipeline_suggest_nudge_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'User follows pipeline suggest nudge spec when feature is enabled', :js, feature_category: :groups_and_projects do
+RSpec.describe 'User follows pipeline suggest nudge spec when feature is enabled', :js, feature_category: :source_code_management do
include CookieHelper
let(:project) { create(:project, :empty_repo) }
diff --git a/spec/features/projects/branches/download_buttons_spec.rb b/spec/features/projects/branches/download_buttons_spec.rb
index a888b5b977d..89d9cfff1e0 100644
--- a/spec/features/projects/branches/download_buttons_spec.rb
+++ b/spec/features/projects/branches/download_buttons_spec.rb
@@ -40,15 +40,14 @@ RSpec.describe 'Download buttons in branches page', feature_category: :groups_an
let(:path_to_visit) { project_branches_filtered_path(project, state: 'all', search: ref) }
end
- context 'with artifacts' do
+ context 'with download source code button' do
before do
visit project_branches_filtered_path(project, state: 'all', search: 'binary-encoding')
end
- it 'shows download artifacts button' do
- href = latest_succeeded_project_artifacts_path(project, 'binary-encoding/download', job: 'build')
-
- expect(page).to have_link build.name, href: href
+ it 'passes axe automated accessibility testing', :js do
+ find_by_testid('download-source-code-button').click
+ expect(page).to be_axe_clean.within('.project-action-button')
end
end
end
diff --git a/spec/features/projects/branches/new_branch_ref_dropdown_spec.rb b/spec/features/projects/branches/new_branch_ref_dropdown_spec.rb
index 0badde99bdb..c1b468a3746 100644
--- a/spec/features/projects/branches/new_branch_ref_dropdown_spec.rb
+++ b/spec/features/projects/branches/new_branch_ref_dropdown_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'New Branch Ref Dropdown', :js, feature_category: :groups_and_projects do
+RSpec.describe 'New Branch Ref Dropdown', :js, feature_category: :source_code_management do
include ListboxHelpers
let(:user) { create(:user) }
diff --git a/spec/features/projects/branches/user_creates_branch_spec.rb b/spec/features/projects/branches/user_creates_branch_spec.rb
index 8d636dacb75..70a387e3ceb 100644
--- a/spec/features/projects/branches/user_creates_branch_spec.rb
+++ b/spec/features/projects/branches/user_creates_branch_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'User creates branch', :js, feature_category: :groups_and_projects do
+RSpec.describe 'User creates branch', :js, feature_category: :source_code_management do
include Features::BranchesHelpers
let_it_be(:group) { create(:group, :public) }
diff --git a/spec/features/projects/branches/user_deletes_branch_spec.rb b/spec/features/projects/branches/user_deletes_branch_spec.rb
index 7e7ab4b2a47..d468d3ec7e7 100644
--- a/spec/features/projects/branches/user_deletes_branch_spec.rb
+++ b/spec/features/projects/branches/user_deletes_branch_spec.rb
@@ -2,7 +2,7 @@
require "spec_helper"
-RSpec.describe "User deletes branch", :js, feature_category: :groups_and_projects do
+RSpec.describe "User deletes branch", :js, feature_category: :source_code_management do
include Spec::Support::Helpers::ModalHelpers
let_it_be(:user) { create(:user) }
diff --git a/spec/features/projects/branches/user_views_branches_spec.rb b/spec/features/projects/branches/user_views_branches_spec.rb
index e396455b371..7cb27abc260 100644
--- a/spec/features/projects/branches/user_views_branches_spec.rb
+++ b/spec/features/projects/branches/user_views_branches_spec.rb
@@ -2,7 +2,7 @@
require "spec_helper"
-RSpec.describe "User views branches", :js, feature_category: :groups_and_projects do
+RSpec.describe "User views branches", :js, feature_category: :source_code_management do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { project.first_owner }
diff --git a/spec/features/projects/branches_spec.rb b/spec/features/projects/branches_spec.rb
index 7915f446ee0..a29d643b15b 100644
--- a/spec/features/projects/branches_spec.rb
+++ b/spec/features/projects/branches_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Branches', feature_category: :groups_and_projects do
+RSpec.describe 'Branches', feature_category: :source_code_management do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project, :public, :repository) }
let(:repository) { project.repository }
diff --git a/spec/features/projects/cluster_agents_spec.rb b/spec/features/projects/cluster_agents_spec.rb
index dcda24eb2e4..ab566b3b433 100644
--- a/spec/features/projects/cluster_agents_spec.rb
+++ b/spec/features/projects/cluster_agents_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'ClusterAgents', :js, feature_category: :groups_and_projects do
+RSpec.describe 'ClusterAgents', :js, feature_category: :environment_management do
let_it_be(:token) { create(:cluster_agent_token, description: 'feature test token') }
let(:agent) { token.agent }
@@ -12,7 +12,7 @@ RSpec.describe 'ClusterAgents', :js, feature_category: :groups_and_projects do
before do
allow(Gitlab::Kas).to receive(:enabled?).and_return(true)
allow_next_instance_of(Gitlab::Kas::Client) do |client|
- allow(client).to receive(:get_connected_agents).and_return([])
+ allow(client).to receive(:get_connected_agents_by_agent_ids).and_return([])
end
gitlab_sign_in(user)
diff --git a/spec/features/projects/clusters/user_spec.rb b/spec/features/projects/clusters/user_spec.rb
index e256b44c4dc..067963e06e0 100644
--- a/spec/features/projects/clusters/user_spec.rb
+++ b/spec/features/projects/clusters/user_spec.rb
@@ -111,4 +111,23 @@ RSpec.describe 'User Cluster', :js, feature_category: :deployment_management do
end
end
end
+
+ context 'when signed in user is an admin in admin_mode' do
+ let(:admin) { create(:admin) }
+
+ before do
+ # signs out the user with `maintainer` role in the project
+ gitlab_sign_out
+
+ gitlab_sign_in(admin)
+ gitlab_enable_admin_mode_sign_in(admin)
+
+ visit project_clusters_path(project)
+ end
+
+ it 'can visit the clusters index page', :aggregate_failures do
+ expect(page).to have_title("Kubernetes Clusters · #{project.full_name} · #{_('GitLab')}")
+ expect(page).to have_content('Connect a cluster')
+ end
+ end
end
diff --git a/spec/features/projects/clusters_spec.rb b/spec/features/projects/clusters_spec.rb
index d799fbc49ef..241d18d5223 100644
--- a/spec/features/projects/clusters_spec.rb
+++ b/spec/features/projects/clusters_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Clusters', :js, feature_category: :groups_and_projects do
+RSpec.describe 'Clusters', :js, feature_category: :environment_management do
include GoogleApi::CloudPlatformHelpers
let(:project) { create(:project) }
diff --git a/spec/features/projects/commits/user_browses_commits_spec.rb b/spec/features/projects/commits/user_browses_commits_spec.rb
index 3513e249b63..5a0b70532aa 100644
--- a/spec/features/projects/commits/user_browses_commits_spec.rb
+++ b/spec/features/projects/commits/user_browses_commits_spec.rb
@@ -42,8 +42,8 @@ RSpec.describe 'User browses commits', feature_category: :source_code_management
it 'renders breadcrumbs on specific commit path' do
visit project_commits_path(project, project.repository.root_ref + '/files/ruby/regex.rb', limit: 5)
- expect(page).to have_selector('ul.breadcrumb')
- .and have_selector('ul.breadcrumb a', count: 4)
+ expect(page).to have_selector('#content-body ul.breadcrumb')
+ .and have_selector('#content-body ul.breadcrumb a', count: 4)
end
it 'renders diff links to both the previous and current image', :js do
diff --git a/spec/features/projects/environments/environment_spec.rb b/spec/features/projects/environments/environment_spec.rb
index fdedaa3f469..de4ce3a23b6 100644
--- a/spec/features/projects/environments/environment_spec.rb
+++ b/spec/features/projects/environments/environment_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Environment', feature_category: :groups_and_projects do
+RSpec.describe 'Environment', feature_category: :environment_management do
let_it_be(:project) { create(:project, :repository) }
let(:user) { create(:user) }
let(:role) { :developer }
diff --git a/spec/features/projects/environments/environments_spec.rb b/spec/features/projects/environments/environments_spec.rb
index 0a54f5923f2..c7e0fdb3fc8 100644
--- a/spec/features/projects/environments/environments_spec.rb
+++ b/spec/features/projects/environments/environments_spec.rb
@@ -461,22 +461,6 @@ RSpec.describe 'Environments page', :js, feature_category: :continuous_delivery
end
end
- describe 'environments folders view' do
- before do
- create(:environment, project: project, name: 'staging.review/review-1', state: :available)
- create(:environment, project: project, name: 'staging.review/review-2', state: :available)
- end
-
- it 'user opens folder view' do
- visit folder_project_environments_path(project, 'staging.review')
- wait_for_requests
-
- expect(page).to have_content('Environments / staging.review')
- expect(page).to have_content('review-1')
- expect(page).to have_content('review-2')
- end
- end
-
def have_terminal_button
have_link(_('Terminal'), href: terminal_project_environment_path(project, environment))
end
diff --git a/spec/features/projects/files/download_buttons_spec.rb b/spec/features/projects/files/download_buttons_spec.rb
index 81bd0523c70..c731cdab1ad 100644
--- a/spec/features/projects/files/download_buttons_spec.rb
+++ b/spec/features/projects/files/download_buttons_spec.rb
@@ -22,16 +22,4 @@ RSpec.describe 'Projects > Files > Download buttons in files tree', feature_cate
it_behaves_like 'archive download buttons' do
let(:path_to_visit) { project_tree_path(project, project.default_branch) }
end
-
- context 'with artifacts' do
- before do
- visit project_tree_path(project, project.default_branch)
- end
-
- it 'shows download artifacts button' do
- href = latest_succeeded_project_artifacts_path(project, "#{project.default_branch}/download", job: 'build')
-
- expect(page).to have_link build.name, href: href
- end
- end
end
diff --git a/spec/features/projects/files/find_file_keyboard_spec.rb b/spec/features/projects/files/find_file_keyboard_spec.rb
index 85ecd1c2d96..ec2fe146b3f 100644
--- a/spec/features/projects/files/find_file_keyboard_spec.rb
+++ b/spec/features/projects/files/find_file_keyboard_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Projects > Files > Find file keyboard shortcuts', :js, feature_category: :groups_and_projects do
+RSpec.describe 'Projects > Files > Find file keyboard shortcuts', :js, feature_category: :source_code_management do
let(:project) { create(:project, :repository) }
let(:user) { project.first_owner }
diff --git a/spec/features/projects/files/project_owner_creates_license_file_spec.rb b/spec/features/projects/files/project_owner_creates_license_file_spec.rb
index 18041bbb00a..2e6b470dd7d 100644
--- a/spec/features/projects/files/project_owner_creates_license_file_spec.rb
+++ b/spec/features/projects/files/project_owner_creates_license_file_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Projects > Files > Project owner creates a license file', :js, feature_category: :groups_and_projects do
+RSpec.describe 'Projects > Files > Project owner creates a license file', :js, feature_category: :source_code_management do
let_it_be(:project_maintainer) { create(:user) }
let_it_be(:project) { create(:project, :repository, namespace: project_maintainer.namespace) }
diff --git a/spec/features/projects/files/template_selector_menu_spec.rb b/spec/features/projects/files/template_selector_menu_spec.rb
index 920da6e72ce..e8aa59a262c 100644
--- a/spec/features/projects/files/template_selector_menu_spec.rb
+++ b/spec/features/projects/files/template_selector_menu_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Template selector menu', :js, feature_category: :groups_and_projects do
+RSpec.describe 'Template selector menu', :js, feature_category: :team_planning do
let(:project) { create(:project, :repository) }
let(:user) { create(:user) }
diff --git a/spec/features/projects/files/user_browses_files_spec.rb b/spec/features/projects/files/user_browses_files_spec.rb
index e93c9427c91..7c7e5811ad9 100644
--- a/spec/features/projects/files/user_browses_files_spec.rb
+++ b/spec/features/projects/files/user_browses_files_spec.rb
@@ -2,7 +2,7 @@
require "spec_helper"
-RSpec.describe "User browses files", :js, feature_category: :groups_and_projects do
+RSpec.describe "User browses files", :js, feature_category: :source_code_management do
include RepoHelpers
include ListboxHelpers
diff --git a/spec/features/projects/files/user_browses_lfs_files_spec.rb b/spec/features/projects/files/user_browses_lfs_files_spec.rb
index d8c1c8e4f2a..12acb98684c 100644
--- a/spec/features/projects/files/user_browses_lfs_files_spec.rb
+++ b/spec/features/projects/files/user_browses_lfs_files_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Projects > Files > User browses LFS files', feature_category: :groups_and_projects do
+RSpec.describe 'Projects > Files > User browses LFS files', feature_category: :source_code_management do
let(:project) { create(:project, :repository) }
let(:user) { project.first_owner }
diff --git a/spec/features/projects/files/user_creates_directory_spec.rb b/spec/features/projects/files/user_creates_directory_spec.rb
index d824b3b1759..2c3b6e6d2a5 100644
--- a/spec/features/projects/files/user_creates_directory_spec.rb
+++ b/spec/features/projects/files/user_creates_directory_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Projects > Files > User creates a directory', :js, feature_category: :groups_and_projects do
+RSpec.describe 'Projects > Files > User creates a directory', :js, feature_category: :source_code_management do
let(:fork_message) do
"You're not allowed to make changes to this project directly. "\
"A fork of this project has been created that you can make changes in, so you can submit a merge request."
diff --git a/spec/features/projects/files/user_creates_files_spec.rb b/spec/features/projects/files/user_creates_files_spec.rb
index de82f3062a2..edc504240a7 100644
--- a/spec/features/projects/files/user_creates_files_spec.rb
+++ b/spec/features/projects/files/user_creates_files_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Projects > Files > User creates files', :js, feature_category: :groups_and_projects do
+RSpec.describe 'Projects > Files > User creates files', :js, feature_category: :source_code_management do
include Features::SourceEditorSpecHelpers
include Features::BlobSpecHelpers
@@ -65,6 +65,11 @@ RSpec.describe 'Projects > Files > User creates files', :js, feature_category: :
expect(page).to have_selector('.file-editor')
end
+ it 'shows full path instead of ref when creating a file' do
+ expect(page).to have_selector('#editor_path')
+ expect(page).not_to have_selector('#editor_ref')
+ end
+
def submit_new_file(options)
file_name = find('#file_name')
file_name.set options[:file_name] || 'README.md'
diff --git a/spec/features/projects/files/user_deletes_files_spec.rb b/spec/features/projects/files/user_deletes_files_spec.rb
index c526084b35d..68a996451f6 100644
--- a/spec/features/projects/files/user_deletes_files_spec.rb
+++ b/spec/features/projects/files/user_deletes_files_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Projects > Files > User deletes files', :js, feature_category: :groups_and_projects do
+RSpec.describe 'Projects > Files > User deletes files', :js, feature_category: :source_code_management do
let(:fork_message) do
"You're not allowed to make changes to this project directly. "\
"A fork of this project has been created that you can make changes in, so you can submit a merge request."
diff --git a/spec/features/projects/files/user_edits_files_spec.rb b/spec/features/projects/files/user_edits_files_spec.rb
index 5612f6a53b2..b7fe8b3e5bb 100644
--- a/spec/features/projects/files/user_edits_files_spec.rb
+++ b/spec/features/projects/files/user_edits_files_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Projects > Files > User edits files', :js, feature_category: :groups_and_projects do
+RSpec.describe 'Projects > Files > User edits files', :js, feature_category: :source_code_management do
include Features::SourceEditorSpecHelpers
include ProjectForksHelper
include Features::BlobSpecHelpers
@@ -53,6 +53,14 @@ RSpec.describe 'Projects > Files > User edits files', :js, feature_category: :gr
expect(find('.monaco-editor')).to have_content('*.rbca')
end
+ it 'shows ref instead of full path when editing a file' do
+ click_link('.gitignore')
+ edit_in_single_file_editor
+
+ expect(page).not_to have_selector('#editor_path')
+ expect(page).to have_selector('#editor_ref')
+ end
+
it 'does not show the edit link if a file is binary' do
binary_file = File.join(project.repository.root_ref, 'files/images/logo-black.png')
visit(project_blob_path(project, binary_file))
diff --git a/spec/features/projects/files/user_find_file_spec.rb b/spec/features/projects/files/user_find_file_spec.rb
index b6e739e8082..bf6680cd57a 100644
--- a/spec/features/projects/files/user_find_file_spec.rb
+++ b/spec/features/projects/files/user_find_file_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'User find project file', feature_category: :groups_and_projects do
+RSpec.describe 'User find project file', feature_category: :source_code_management do
include ListboxHelpers
let(:user) { create :user }
diff --git a/spec/features/projects/files/user_reads_pipeline_status_spec.rb b/spec/features/projects/files/user_reads_pipeline_status_spec.rb
index 24dd673501c..610c583f060 100644
--- a/spec/features/projects/files/user_reads_pipeline_status_spec.rb
+++ b/spec/features/projects/files/user_reads_pipeline_status_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'user reads pipeline status', :js, feature_category: :groups_and_projects do
+RSpec.describe 'user reads pipeline status', :js, feature_category: :continuous_integration do
let(:project) { create(:project, :repository) }
let(:user) { create(:user) }
let(:v110_pipeline) { create_pipeline('v1.1.0', 'success') }
diff --git a/spec/features/projects/files/user_replaces_files_spec.rb b/spec/features/projects/files/user_replaces_files_spec.rb
index bd951b974d1..a5d80c387d5 100644
--- a/spec/features/projects/files/user_replaces_files_spec.rb
+++ b/spec/features/projects/files/user_replaces_files_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Projects > Files > User replaces files', :js, feature_category: :groups_and_projects do
+RSpec.describe 'Projects > Files > User replaces files', :js, feature_category: :source_code_management do
include DropzoneHelper
let(:fork_message) do
diff --git a/spec/features/projects/files/user_searches_for_files_spec.rb b/spec/features/projects/files/user_searches_for_files_spec.rb
index 030d5a8ec40..587e87a21dd 100644
--- a/spec/features/projects/files/user_searches_for_files_spec.rb
+++ b/spec/features/projects/files/user_searches_for_files_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Projects > Files > User searches for files', feature_category: :groups_and_projects do
+RSpec.describe 'Projects > Files > User searches for files', feature_category: :source_code_management do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project, :repository, namespace: user.namespace) }
diff --git a/spec/features/projects/fork_spec.rb b/spec/features/projects/fork_spec.rb
index c159b40003c..e849f110014 100644
--- a/spec/features/projects/fork_spec.rb
+++ b/spec/features/projects/fork_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Project fork', feature_category: :groups_and_projects do
+RSpec.describe 'Project fork', feature_category: :source_code_management do
include ListboxHelpers
include ProjectForksHelper
diff --git a/spec/features/projects/gfm_autocomplete_load_spec.rb b/spec/features/projects/gfm_autocomplete_load_spec.rb
index 35ab0094d51..9e23c05a739 100644
--- a/spec/features/projects/gfm_autocomplete_load_spec.rb
+++ b/spec/features/projects/gfm_autocomplete_load_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'GFM autocomplete loading', :js, feature_category: :groups_and_projects do
+RSpec.describe 'GFM autocomplete loading', :js, feature_category: :team_planning do
let(:project) { create(:project) }
before do
diff --git a/spec/features/projects/graph_spec.rb b/spec/features/projects/graph_spec.rb
index e9c05fd7f7f..effe743ac81 100644
--- a/spec/features/projects/graph_spec.rb
+++ b/spec/features/projects/graph_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Project Graph', :js, feature_category: :groups_and_projects do
+RSpec.describe 'Project Graph', :js, feature_category: :source_code_management do
let(:user) { create(:user) }
let(:project) { create(:project, :repository, namespace: user.namespace) }
let(:branch_name) { 'master' }
diff --git a/spec/features/projects/infrastructure_registry_spec.rb b/spec/features/projects/infrastructure_registry_spec.rb
index a648a4fc1ce..10ac61973fb 100644
--- a/spec/features/projects/infrastructure_registry_spec.rb
+++ b/spec/features/projects/infrastructure_registry_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Infrastructure Registry', feature_category: :groups_and_projects do
+RSpec.describe 'Infrastructure Registry', feature_category: :package_registry do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project) }
diff --git a/spec/features/projects/integrations/user_activates_issue_tracker_spec.rb b/spec/features/projects/integrations/user_activates_issue_tracker_spec.rb
index 944a2c164d5..02cec948127 100644
--- a/spec/features/projects/integrations/user_activates_issue_tracker_spec.rb
+++ b/spec/features/projects/integrations/user_activates_issue_tracker_spec.rb
@@ -10,10 +10,10 @@ RSpec.describe 'User activates issue tracker', :js, feature_category: :integrati
def fill_form(disable: false, skip_new_issue_url: false)
click_active_checkbox if disable
- fill_in 'service_project_url', with: url
- fill_in 'service_issues_url', with: "#{url}/:id"
+ fill_in 'service-project_url', with: url
+ fill_in 'service-issues_url', with: "#{url}/:id"
- fill_in 'service_new_issue_url', with: url unless skip_new_issue_url
+ fill_in 'service-new_issue_url', with: url unless skip_new_issue_url
end
shared_examples 'external issue tracker activation' do |tracker:, skip_new_issue_url: false, skip_test: false|
diff --git a/spec/features/projects/integrations/user_activates_jira_spec.rb b/spec/features/projects/integrations/user_activates_jira_spec.rb
index cc0d4c6f564..704e959ff0e 100644
--- a/spec/features/projects/integrations/user_activates_jira_spec.rb
+++ b/spec/features/projects/integrations/user_activates_jira_spec.rb
@@ -39,7 +39,7 @@ RSpec.describe 'User activates Jira', :js, feature_category: :integrations do
it 'shows errors when some required fields are not filled in' do
visit_project_integration('Jira')
- fill_in 'service_password', with: 'password'
+ fill_in 'service-password', with: 'password'
click_test_integration
page.within('[data-testid="integration-settings-form"]') do
diff --git a/spec/features/projects/integrations/user_activates_mattermost_slash_command_spec.rb b/spec/features/projects/integrations/user_activates_mattermost_slash_command_spec.rb
index e490e32149e..14a94152a94 100644
--- a/spec/features/projects/integrations/user_activates_mattermost_slash_command_spec.rb
+++ b/spec/features/projects/integrations/user_activates_mattermost_slash_command_spec.rb
@@ -143,7 +143,7 @@ RSpec.describe 'Set up Mattermost slash commands', :js, feature_category: :integ
end
it 'shows a token placeholder' do
- token_placeholder = find_field('service_token')['placeholder']
+ token_placeholder = find_field('service-token')['placeholder']
expect(token_placeholder).to eq('')
end
diff --git a/spec/features/projects/issuable_templates_spec.rb b/spec/features/projects/issuable_templates_spec.rb
index bc67cdbfad1..59fea7d7265 100644
--- a/spec/features/projects/issuable_templates_spec.rb
+++ b/spec/features/projects/issuable_templates_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'issuable templates', :js, feature_category: :groups_and_projects do
+RSpec.describe 'issuable templates', :js, feature_category: :team_planning do
include ProjectForksHelper
let(:user) { create(:user) }
diff --git a/spec/features/projects/jobs/permissions_spec.rb b/spec/features/projects/jobs/permissions_spec.rb
index 73635480b95..7759875c2a5 100644
--- a/spec/features/projects/jobs/permissions_spec.rb
+++ b/spec/features/projects/jobs/permissions_spec.rb
@@ -89,7 +89,10 @@ RSpec.describe 'Project Jobs Permissions', feature_category: :groups_and_project
it_behaves_like 'recent job page details responds with status', 200 do
it 'renders job details', :js do
expect(page).to have_content(job.name)
- expect(page).to have_css '.log-line'
+
+ within_testid('job-log-content') do
+ expect(page).to have_content('Job succeeded')
+ end
end
end
diff --git a/spec/features/projects/jobs/user_browses_job_spec.rb b/spec/features/projects/jobs/user_browses_job_spec.rb
index 448db8b6d89..1be1a58d212 100644
--- a/spec/features/projects/jobs/user_browses_job_spec.rb
+++ b/spec/features/projects/jobs/user_browses_job_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'User browses a job', :js, feature_category: :groups_and_projects do
+RSpec.describe 'User browses a job', :js, feature_category: :continuous_integration do
include Spec::Support::Helpers::ModalHelpers
let(:user) { create(:user) }
diff --git a/spec/features/projects/jobs/user_browses_jobs_spec.rb b/spec/features/projects/jobs/user_browses_jobs_spec.rb
index 115b3dda5b2..5c1dc36a31c 100644
--- a/spec/features/projects/jobs/user_browses_jobs_spec.rb
+++ b/spec/features/projects/jobs/user_browses_jobs_spec.rb
@@ -8,7 +8,7 @@ def visit_jobs_page
wait_for_requests
end
-RSpec.describe 'User browses jobs', feature_category: :groups_and_projects do
+RSpec.describe 'User browses jobs', feature_category: :continuous_integration do
describe 'Jobs', :js do
let(:project) { create(:project, :repository) }
let(:user) { create(:user) }
diff --git a/spec/features/projects/jobs/user_triggers_manual_job_with_variables_spec.rb b/spec/features/projects/jobs/user_triggers_manual_job_with_variables_spec.rb
index 41a21b1155b..21274572352 100644
--- a/spec/features/projects/jobs/user_triggers_manual_job_with_variables_spec.rb
+++ b/spec/features/projects/jobs/user_triggers_manual_job_with_variables_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'User triggers manual job with variables', :js, feature_category: :groups_and_projects do
+RSpec.describe 'User triggers manual job with variables', :js, feature_category: :continuous_integration do
let(:user) { create(:user) }
let(:user_access_level) { :developer }
let(:project) { create(:project, :repository, namespace: user.namespace) }
diff --git a/spec/features/projects/members/group_requester_cannot_request_access_to_project_spec.rb b/spec/features/projects/members/group_requester_cannot_request_access_to_project_spec.rb
index 47cd0d612b5..08de3d95060 100644
--- a/spec/features/projects/members/group_requester_cannot_request_access_to_project_spec.rb
+++ b/spec/features/projects/members/group_requester_cannot_request_access_to_project_spec.rb
@@ -8,16 +8,23 @@ RSpec.describe 'Projects > Members > Group requester cannot request access to pr
let(:owner) { create(:user) }
let(:group) { create(:group, :public) }
let(:project) { create(:project, :public, namespace: group) }
+ let(:more_actions_dropdown) do
+ find('[data-testid="groups-projects-more-actions-dropdown"] .gl-new-dropdown-custom-toggle')
+ end
before do
group.add_owner(owner)
sign_in(user)
visit group_path(group)
- perform_enqueued_jobs { click_link 'Request Access' }
+ perform_enqueued_jobs do
+ more_actions_dropdown.click
+ click_link 'Request Access'
+ end
visit project_path(project)
end
it 'group requester does not see the request access / withdraw access request button' do
+ expect(page).not_to have_css '[data-testid="groups-projects-more-actions-dropdown"]'
expect(page).not_to have_content 'Request Access'
expect(page).not_to have_content 'Withdraw Access Request'
end
diff --git a/spec/features/projects/members/manage_groups_spec.rb b/spec/features/projects/members/manage_groups_spec.rb
index 63ff1ba8455..7bd7c822a5d 100644
--- a/spec/features/projects/members/manage_groups_spec.rb
+++ b/spec/features/projects/members/manage_groups_spec.rb
@@ -228,6 +228,6 @@ RSpec.describe 'Project > Members > Manage groups', :js, feature_category: :grou
end
def invite_group_selector
- 'button[data-test-id="invite-group-button"]'
+ 'button[data-testid="invite-group-button"]'
end
end
diff --git a/spec/features/projects/members/manage_members_spec.rb b/spec/features/projects/members/manage_members_spec.rb
index 76b2a73e170..3423c636c2b 100644
--- a/spec/features/projects/members/manage_members_spec.rb
+++ b/spec/features/projects/members/manage_members_spec.rb
@@ -108,7 +108,10 @@ RSpec.describe 'Projects > Members > Manage members', :js, feature_category: :on
let(:current_user) { project_owner }
it 'shows Owner in the dropdown' do
- expect(page).to have_select('Select a role', options: %w[Guest Reporter Developer Maintainer Owner])
+ within_modal do
+ toggle_listbox
+ expect_listbox_items(%w[Guest Reporter Developer Maintainer Owner])
+ end
end
end
@@ -116,8 +119,10 @@ RSpec.describe 'Projects > Members > Manage members', :js, feature_category: :on
let(:current_user) { project_maintainer }
it 'does not show the Owner option' do
- expect(page).to have_select('Select a role', options: %w[Guest Reporter Developer Maintainer])
- expect(page).not_to have_select('Select a role', options: %w[Owner])
+ within_modal do
+ toggle_listbox
+ expect_listbox_items(%w[Guest Reporter Developer Maintainer])
+ end
end
end
end
diff --git a/spec/features/projects/members/member_leaves_project_spec.rb b/spec/features/projects/members/member_leaves_project_spec.rb
index e0c64547e9f..43c6ba48906 100644
--- a/spec/features/projects/members/member_leaves_project_spec.rb
+++ b/spec/features/projects/members/member_leaves_project_spec.rb
@@ -8,16 +8,21 @@ RSpec.describe 'Projects > Members > Member leaves project', feature_category: :
let(:user) { create(:user) }
let(:project) { create(:project, :repository, :with_namespace_settings) }
+ let(:more_actions_dropdown) do
+ find('[data-testid="groups-projects-more-actions-dropdown"] .gl-new-dropdown-custom-toggle')
+ end
before do
project.add_developer(user)
sign_in(user)
end
- it 'user leaves project' do
+ it 'user leaves project', :js do
visit project_path(project)
+ more_actions_dropdown.click
click_link 'Leave project'
+ accept_gl_confirm(button_text: 'Leave project')
expect(page).to have_current_path(dashboard_projects_path, ignore_query: true)
expect(project.users.exists?(user.id)).to be_falsey
diff --git a/spec/features/projects/members/user_requests_access_spec.rb b/spec/features/projects/members/user_requests_access_spec.rb
index e7f99a4048c..14df7e7bafe 100644
--- a/spec/features/projects/members/user_requests_access_spec.rb
+++ b/spec/features/projects/members/user_requests_access_spec.rb
@@ -10,6 +10,9 @@ RSpec.describe 'Projects > Members > User requests access', :js, feature_categor
let_it_be(:project) { create(:project, :public, :repository) }
let(:owner) { project.first_owner }
+ let(:more_actions_dropdown) do
+ find('[data-testid="groups-projects-more-actions-dropdown"] .gl-new-dropdown-custom-toggle')
+ end
before do
sign_in(user)
@@ -17,39 +20,46 @@ RSpec.describe 'Projects > Members > User requests access', :js, feature_categor
visit project_path(project)
end
- it 'request access feature is disabled' do
+ it 'request access feature is disabled', :js do
project.update!(request_access_enabled: false)
visit project_path(project)
+ more_actions_dropdown.click
expect(page).not_to have_content 'Request Access'
end
- it 'user can request access to a project' do
- perform_enqueued_jobs { click_link 'Request Access' }
+ it 'user can request access to a project', :js do
+ perform_enqueued_jobs do
+ more_actions_dropdown.click
+ click_link 'Request Access'
+ end
expect(ActionMailer::Base.deliveries.map(&:to)).to match_array([[owner.notification_email_or_default], [maintainer.notification_email_or_default]])
expect(ActionMailer::Base.deliveries.last.subject).to eq "Request to join the #{project.full_name} project"
expect(project.requesters.exists?(user_id: user)).to be_truthy
+ more_actions_dropdown.click
expect(page).to have_content 'Withdraw Access Request'
expect(page).not_to have_content 'Leave Project'
end
context 'code access is restricted' do
- it 'user can request access' do
+ it 'user can request access', :js do
project.project_feature.update!(
repository_access_level: ProjectFeature::PRIVATE,
builds_access_level: ProjectFeature::PRIVATE,
merge_requests_access_level: ProjectFeature::PRIVATE
)
visit project_path(project)
+ more_actions_dropdown.click
expect(page).to have_content 'Request Access'
end
end
- it 'user is not listed in the project members page' do
+ it 'user is not listed in the project members page', :js do
+ more_actions_dropdown.click
click_link 'Request Access'
expect(project.requesters.exists?(user_id: user)).to be_truthy
@@ -64,13 +74,16 @@ RSpec.describe 'Projects > Members > User requests access', :js, feature_categor
end
end
- it 'user can withdraw its request for access' do
+ it 'user can withdraw its request for access', :js do
+ more_actions_dropdown.click
click_link 'Request Access'
expect(project.requesters.exists?(user_id: user)).to be_truthy
+ more_actions_dropdown.click
accept_gl_confirm { click_link 'Withdraw Access Request' }
+ more_actions_dropdown.click
expect(page).not_to have_content 'Withdraw Access Request'
expect(page).to have_content 'Request Access'
end
diff --git a/spec/features/projects/new_project_spec.rb b/spec/features/projects/new_project_spec.rb
index a3cbb86da2c..a0df18ea6f3 100644
--- a/spec/features/projects/new_project_spec.rb
+++ b/spec/features/projects/new_project_spec.rb
@@ -3,8 +3,6 @@
require 'spec_helper'
RSpec.describe 'New project', :js, feature_category: :groups_and_projects do
- include Features::TopNavSpecHelpers
-
before do
stub_application_setting(import_sources: Gitlab::ImportSources.values)
end
@@ -81,21 +79,13 @@ RSpec.describe 'New project', :js, feature_category: :groups_and_projects do
expect(page).to have_content 'Other visibility settings have been disabled by the administrator.'
end
- context 'with prevent_visibility_restriction feature flag off' do
- before do
- stub_feature_flags(prevent_visibility_restriction: false)
- end
+ it 'shows a message if all levels are restricted' do
+ stub_application_setting(restricted_visibility_levels: Gitlab::VisibilityLevel.values)
- it 'shows a message if all levels are restricted' do
- Gitlab::CurrentSettings.update!(
- restricted_visibility_levels: Gitlab::VisibilityLevel.values
- )
-
- visit new_project_path
- click_link 'Create blank project'
+ visit new_project_path
+ click_link 'Create blank project'
- expect(page).to have_content 'Visibility settings have been disabled by the administrator.'
- end
+ expect(page).to have_content 'Visibility settings have been disabled by the administrator.'
end
end
diff --git a/spec/features/projects/packages_spec.rb b/spec/features/projects/packages_spec.rb
index 5073c147b6c..4e222a67b87 100644
--- a/spec/features/projects/packages_spec.rb
+++ b/spec/features/projects/packages_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Packages', feature_category: :groups_and_projects do
+RSpec.describe 'Packages', feature_category: :package_registry do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project) }
diff --git a/spec/features/projects/pipeline_schedules_spec.rb b/spec/features/projects/pipeline_schedules_spec.rb
index 5bcd0d28fd9..d481d90792d 100644
--- a/spec/features/projects/pipeline_schedules_spec.rb
+++ b/spec/features/projects/pipeline_schedules_spec.rb
@@ -111,7 +111,7 @@ RSpec.describe 'Pipeline Schedules', :js, feature_category: :continuous_integrat
page.within('[data-testid="pipeline-schedule-table-row"]') do
expect(page).to have_content('pipeline schedule')
expect(find('[data-testid="next-run-cell"] time')['title'])
- .to include(pipeline_schedule.real_next_run.strftime('%b %-d, %Y'))
+ .to include(pipeline_schedule.real_next_run.strftime('%B %-d, %Y'))
expect(page).to have_link('master')
expect(find("[data-testid='last-pipeline-status'] a")['href']).to include(pipeline.id.to_s)
end
diff --git a/spec/features/projects/pipelines/pipelines_spec.rb b/spec/features/projects/pipelines/pipelines_spec.rb
index 30d3303dfbb..d8bb4893831 100644
--- a/spec/features/projects/pipelines/pipelines_spec.rb
+++ b/spec/features/projects/pipelines/pipelines_spec.rb
@@ -803,15 +803,73 @@ RSpec.describe 'Pipelines', :js, feature_category: :continuous_integration do
end
describe 'Empty State' do
- let(:project) { create(:project, :repository) }
+ let_it_be_with_reload(:project) { create(:project, :repository) }
before do
visit project_pipelines_path(project)
+
+ wait_for_requests
end
it 'renders empty state' do
expect(page).to have_content 'Try test template'
end
+
+ it 'does not show Jenkins Migration Prompt' do
+ expect(page).not_to have_content _('Migrate to GitLab CI/CD from Jenkins')
+ end
+ end
+
+ describe 'Jenkins migration prompt' do
+ let_it_be_with_reload(:project) { create(:project, :repository) }
+
+ before do
+ allow_next_instance_of(Repository) do |instance|
+ allow(instance).to receive(:jenkinsfile?).and_return(true)
+ end
+ end
+
+ context 'when jenkinsfile is present' do
+ it 'shows Jenkins Migration Prompt' do
+ visit project_pipelines_path(project)
+
+ wait_for_requests
+
+ expect(page).to have_content _('Migrate to GitLab CI/CD from Jenkins')
+ expect(page).to have_content _('Start with a migration plan')
+ end
+ end
+
+ context 'when gitlab ci file is present' do
+ before do
+ allow_next_instance_of(Repository) do |instance|
+ allow(instance).to receive(:gitlab_ci_yml).and_return(true)
+ end
+ end
+
+ it 'does not show migration prompt' do
+ expect_not_to_show_prompt(project)
+ end
+ end
+
+ context 'when AutoDevops is enabled' do
+ before do
+ project.update!(auto_devops_attributes: { enabled: true })
+ end
+
+ it 'does not show migration prompt' do
+ expect_not_to_show_prompt(project)
+ end
+ end
+
+ def expect_not_to_show_prompt(project)
+ visit project_pipelines_path(project)
+
+ wait_for_requests
+
+ expect(page).not_to have_content _('Migrate to GitLab CI/CD from Jenkins')
+ expect(page).not_to have_content _('Start with a migration plan')
+ end
end
end
diff --git a/spec/features/projects/settings/branch_names_settings_spec.rb b/spec/features/projects/settings/branch_names_settings_spec.rb
index cc7396513cb..1b82e8edb00 100644
--- a/spec/features/projects/settings/branch_names_settings_spec.rb
+++ b/spec/features/projects/settings/branch_names_settings_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Project settings > repositories > Branch names', :js, feature_category: :groups_and_projects do
+RSpec.describe 'Project settings > repositories > Branch names', :js, feature_category: :source_code_management do
let_it_be(:project) { create(:project, :public) }
let(:user) { create(:user) }
diff --git a/spec/features/projects/settings/forked_project_settings_spec.rb b/spec/features/projects/settings/forked_project_settings_spec.rb
index c2a5b36a81b..e8e22247f04 100644
--- a/spec/features/projects/settings/forked_project_settings_spec.rb
+++ b/spec/features/projects/settings/forked_project_settings_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Projects > Settings > For a forked project', :js, feature_category: :groups_and_projects do
+RSpec.describe 'Projects > Settings > For a forked project', :js, feature_category: :source_code_management do
include ProjectForksHelper
let(:user) { create(:user) }
let(:original_project) { create(:project) }
diff --git a/spec/features/projects/settings/merge_requests_settings_spec.rb b/spec/features/projects/settings/merge_requests_settings_spec.rb
index 8b5f9b67890..c1b4caae014 100644
--- a/spec/features/projects/settings/merge_requests_settings_spec.rb
+++ b/spec/features/projects/settings/merge_requests_settings_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Projects > Settings > Merge requests', feature_category: :groups_and_projects do
+RSpec.describe 'Projects > Settings > Merge requests', feature_category: :code_review_workflow do
include ProjectForksHelper
let(:user) { create(:user) }
diff --git a/spec/features/projects/settings/monitor_settings_spec.rb b/spec/features/projects/settings/monitor_settings_spec.rb
index fca10d9c0b0..4baa169238b 100644
--- a/spec/features/projects/settings/monitor_settings_spec.rb
+++ b/spec/features/projects/settings/monitor_settings_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Projects > Settings > For a forked project', :js, feature_category: :groups_and_projects do
+RSpec.describe 'Projects > Settings > For a forked project', :js, feature_category: :incident_management do
include ListboxHelpers
let_it_be(:user) { create(:user) }
diff --git a/spec/features/projects/settings/registry_settings_cleanup_tags_spec.rb b/spec/features/projects/settings/registry_settings_cleanup_tags_spec.rb
index 1b53a6222e6..96ccc061491 100644
--- a/spec/features/projects/settings/registry_settings_cleanup_tags_spec.rb
+++ b/spec/features/projects/settings/registry_settings_cleanup_tags_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe 'Project > Settings > Packages and registries > Container registry tag expiration policy',
- feature_category: :groups_and_projects do
+ feature_category: :container_registry do
let_it_be(:user) { create(:user) }
let_it_be(:project, reload: true) { create(:project, namespace: user.namespace) }
diff --git a/spec/features/projects/settings/repository_settings_spec.rb b/spec/features/projects/settings/repository_settings_spec.rb
index 838ac67ee3d..8576518666c 100644
--- a/spec/features/projects/settings/repository_settings_spec.rb
+++ b/spec/features/projects/settings/repository_settings_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Projects > Settings > Repository settings', feature_category: :groups_and_projects do
+RSpec.describe 'Projects > Settings > Repository settings', feature_category: :source_code_management do
include Features::MirroringHelpers
let(:project) { create(:project_empty_repo) }
diff --git a/spec/features/projects/settings/secure_files_spec.rb b/spec/features/projects/settings/secure_files_spec.rb
index 5f94e215a5f..c456e760709 100644
--- a/spec/features/projects/settings/secure_files_spec.rb
+++ b/spec/features/projects/settings/secure_files_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Secure Files', :js, feature_category: :groups_and_projects do
+RSpec.describe 'Secure Files', :js, feature_category: :source_code_management do
let(:project) { create(:project) }
let(:user) { create(:user) }
diff --git a/spec/features/projects/settings/service_desk_setting_spec.rb b/spec/features/projects/settings/service_desk_setting_spec.rb
index 2cd4d793f9c..540c4691465 100644
--- a/spec/features/projects/settings/service_desk_setting_spec.rb
+++ b/spec/features/projects/settings/service_desk_setting_spec.rb
@@ -102,12 +102,6 @@ RSpec.describe 'Service Desk Setting', :js, :clean_gitlab_redis_cache, feature_c
end
end
- it 'pushes service_desk_custom_email feature flag to frontend' do
- visit edit_project_path(project)
-
- expect(page).to have_pushed_frontend_feature_flags(serviceDeskCustomEmail: true)
- end
-
it 'pushes issue_email_participants feature flag to frontend' do
visit edit_project_path(project)
diff --git a/spec/features/projects/settings/user_changes_default_branch_spec.rb b/spec/features/projects/settings/user_changes_default_branch_spec.rb
index 5886699a192..a448daf0e7a 100644
--- a/spec/features/projects/settings/user_changes_default_branch_spec.rb
+++ b/spec/features/projects/settings/user_changes_default_branch_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Projects > Settings > User changes default branch', feature_category: :groups_and_projects do
+RSpec.describe 'Projects > Settings > User changes default branch', feature_category: :source_code_management do
include ListboxHelpers
let(:user) { create(:user) }
diff --git a/spec/features/projects/settings/user_interacts_with_deploy_keys_spec.rb b/spec/features/projects/settings/user_interacts_with_deploy_keys_spec.rb
index 4e8f42ae792..9305467cbe4 100644
--- a/spec/features/projects/settings/user_interacts_with_deploy_keys_spec.rb
+++ b/spec/features/projects/settings/user_interacts_with_deploy_keys_spec.rb
@@ -2,7 +2,7 @@
require "spec_helper"
-RSpec.describe "User interacts with deploy keys", :js, feature_category: :groups_and_projects do
+RSpec.describe "User interacts with deploy keys", :js, feature_category: :continuous_delivery do
let(:project) { create(:project, :repository) }
let(:user) { project.first_owner }
diff --git a/spec/features/projects/settings/user_manages_merge_requests_settings_spec.rb b/spec/features/projects/settings/user_manages_merge_requests_settings_spec.rb
index 1189f5590f8..6fe8a2c3f10 100644
--- a/spec/features/projects/settings/user_manages_merge_requests_settings_spec.rb
+++ b/spec/features/projects/settings/user_manages_merge_requests_settings_spec.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
require 'spec_helper'
-RSpec.describe 'Projects > Settings > User manages merge request settings', feature_category: :groups_and_projects do
+RSpec.describe 'Projects > Settings > User manages merge request settings', feature_category: :code_review_workflow do
include ProjectForksHelper
let(:user) { create(:user) }
diff --git a/spec/features/projects/settings/webhooks_settings_spec.rb b/spec/features/projects/settings/webhooks_settings_spec.rb
index af7c790c692..506f7666286 100644
--- a/spec/features/projects/settings/webhooks_settings_spec.rb
+++ b/spec/features/projects/settings/webhooks_settings_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Projects > Settings > Webhook Settings', feature_category: :groups_and_projects do
+RSpec.describe 'Projects > Settings > Webhook Settings', feature_category: :webhooks do
let(:project) { create(:project) }
let(:user) { create(:user) }
let(:webhooks_path) { project_hooks_path(project) }
diff --git a/spec/features/projects/show/download_buttons_spec.rb b/spec/features/projects/show/download_buttons_spec.rb
index 8e27b4b2ede..2616d474991 100644
--- a/spec/features/projects/show/download_buttons_spec.rb
+++ b/spec/features/projects/show/download_buttons_spec.rb
@@ -36,23 +36,5 @@ RSpec.describe 'Projects > Show > Download buttons', feature_category: :groups_a
describe 'when checking project main page' do
it_behaves_like 'archive download buttons'
-
- context 'with artifacts' do
- before do
- visit project_path(project)
- end
-
- it 'shows download artifacts button' do
- href = latest_succeeded_project_artifacts_path(project, "#{project.default_branch}/download", job: 'build')
-
- expect(page).to have_link build.name, href: href
- end
-
- it 'download links have download attribute' do
- page.all('a', text: 'Download').each do |link|
- expect(link[:download]).to eq ''
- end
- end
- end
end
end
diff --git a/spec/features/projects/show/user_interacts_with_auto_devops_banner_spec.rb b/spec/features/projects/show/user_interacts_with_auto_devops_banner_spec.rb
index 98714da34f2..52438340312 100644
--- a/spec/features/projects/show/user_interacts_with_auto_devops_banner_spec.rb
+++ b/spec/features/projects/show/user_interacts_with_auto_devops_banner_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe 'Project > Show > User interacts with auto devops implicitly enabled banner',
- feature_category: :groups_and_projects do
+ feature_category: :auto_devops do
let(:project) { create(:project, :repository) }
let(:user) { create(:user) }
diff --git a/spec/features/projects/show/user_sees_git_instructions_spec.rb b/spec/features/projects/show/user_sees_git_instructions_spec.rb
index 5e6857843a6..4933b3f239c 100644
--- a/spec/features/projects/show/user_sees_git_instructions_spec.rb
+++ b/spec/features/projects/show/user_sees_git_instructions_spec.rb
@@ -10,6 +10,7 @@ RSpec.describe 'Projects > Show > User sees Git instructions', feature_category:
# validation failure on NotificationSetting.
# See https://gitlab.com/gitlab-org/gitlab/-/issues/299822#note_492817174
user.notification_settings.reset
+ stub_feature_flags(project_overview_reorg: false)
end
shared_examples_for 'redirects to the sign in page' do
diff --git a/spec/features/projects/show/user_sees_setup_shortcut_buttons_spec.rb b/spec/features/projects/show/user_sees_setup_shortcut_buttons_spec.rb
index 41eab966895..674c7db83f1 100644
--- a/spec/features/projects/show/user_sees_setup_shortcut_buttons_spec.rb
+++ b/spec/features/projects/show/user_sees_setup_shortcut_buttons_spec.rb
@@ -17,8 +17,8 @@ RSpec.describe 'Projects > Show > User sees setup shortcut buttons', feature_cat
describe 'as a normal user' do
before do
+ stub_feature_flags(project_overview_reorg: false)
sign_in(user)
-
visit project_path(project)
end
@@ -40,6 +40,7 @@ RSpec.describe 'Projects > Show > User sees setup shortcut buttons', feature_cat
describe 'as a maintainer' do
before do
+ stub_feature_flags(project_overview_reorg: false)
project.add_maintainer(user)
sign_in(user)
diff --git a/spec/features/projects/tags/download_buttons_spec.rb b/spec/features/projects/tags/download_buttons_spec.rb
index 275d364f267..f025ffba85a 100644
--- a/spec/features/projects/tags/download_buttons_spec.rb
+++ b/spec/features/projects/tags/download_buttons_spec.rb
@@ -38,17 +38,5 @@ RSpec.describe 'Download buttons in tags page', feature_category: :source_code_m
let(:path_to_visit) { project_tags_path(project) }
let(:ref) { tag }
end
-
- context 'with artifacts' do
- before do
- visit project_tags_path(project)
- end
-
- it 'shows download artifacts button' do
- href = latest_succeeded_project_artifacts_path(project, "#{tag}/download", job: 'build')
-
- expect(page).to have_link build.name, href: href
- end
- end
end
end
diff --git a/spec/features/projects/tags/user_views_tags_spec.rb b/spec/features/projects/tags/user_views_tags_spec.rb
index 26f2e81e3df..28a9ced69e6 100644
--- a/spec/features/projects/tags/user_views_tags_spec.rb
+++ b/spec/features/projects/tags/user_views_tags_spec.rb
@@ -38,6 +38,9 @@ RSpec.describe 'User views tags', :feature, feature_category: :source_code_manag
it_behaves_like "it has an RSS button with current_user's feed token"
it_behaves_like "an autodiscoverable RSS feed with current_user's feed token"
it_behaves_like 'has access to the tags RSS feed'
+ it 'passes axe automated accessibility testing', :js do
+ expect(page).to be_axe_clean.within('#content-body')
+ end
end
context 'when user signed out' do
diff --git a/spec/features/projects/terraform_spec.rb b/spec/features/projects/terraform_spec.rb
index 518fa79f003..aefc7be7ced 100644
--- a/spec/features/projects/terraform_spec.rb
+++ b/spec/features/projects/terraform_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Terraform', :js, feature_category: :groups_and_projects do
+RSpec.describe 'Terraform', :js, feature_category: :package_registry do
let_it_be(:project) { create(:project) }
let_it_be(:terraform_state) { create(:terraform_state, :locked, :with_version, project: project) }
diff --git a/spec/features/projects/user_creates_project_spec.rb b/spec/features/projects/user_creates_project_spec.rb
index 484808dcfd1..a590d637801 100644
--- a/spec/features/projects/user_creates_project_spec.rb
+++ b/spec/features/projects/user_creates_project_spec.rb
@@ -55,6 +55,45 @@ RSpec.describe 'User creates a project', :js, feature_category: :groups_and_proj
expect(page).to have_content('README.md Initial commit')
end
+ context 'when creating a project with SHA256 repository' do
+ let(:sha256_field) { 'Use SHA-256 as the repository hashing algorithm' }
+
+ it 'creates a new project' do
+ visit(new_project_path)
+
+ click_link 'Create blank project'
+ fill_in(:project_name, with: 'With initial commits')
+
+ expect(page).to have_checked_field 'Initialize repository with a README'
+ expect(page).to have_unchecked_field sha256_field
+
+ check sha256_field
+
+ page.within('#content-body') do
+ click_button('Create project')
+ end
+
+ project = Project.last
+
+ expect(page).to have_current_path(project_path(project), ignore_query: true)
+ expect(page).to have_content('With initial commits')
+ end
+
+ context 'when "support_sha256_repositories" feature flag is disabled' do
+ before do
+ stub_feature_flags(support_sha256_repositories: false)
+ end
+
+ it 'does not display a SHA256 option' do
+ visit(new_project_path)
+
+ click_link 'Create blank project'
+
+ expect(page).not_to have_content(sha256_field)
+ end
+ end
+ end
+
context 'in a subgroup they do not own' do
let(:parent) { create(:group) }
let!(:subgroup) { create(:group, parent: parent) }
diff --git a/spec/features/projects/user_sees_sidebar_spec.rb b/spec/features/projects/user_sees_sidebar_spec.rb
index 61225b45760..88c16ab4c9f 100644
--- a/spec/features/projects/user_sees_sidebar_spec.rb
+++ b/spec/features/projects/user_sees_sidebar_spec.rb
@@ -207,7 +207,7 @@ RSpec.describe 'Projects > User sees sidebar', :js, feature_category: :groups_an
visit project_path(project)
within('.project-repo-buttons') do
- expect(page).not_to have_selector '.project-clone-holder'
+ expect(page).not_to have_selector '.project-code-holder'
end
end
diff --git a/spec/features/projects/work_items/work_item_children_spec.rb b/spec/features/projects/work_items/work_item_children_spec.rb
index 0970752157d..28f7ee2db10 100644
--- a/spec/features/projects/work_items/work_item_children_spec.rb
+++ b/spec/features/projects/work_items/work_item_children_spec.rb
@@ -23,7 +23,7 @@ RSpec.describe 'Work item children', :js, feature_category: :team_planning do
it 'are not displayed when issue does not have work item children', :aggregate_failures do
page.within('[data-testid="work-item-links"]') do
- expect(find('[data-testid="links-empty"]')).to have_content(_('No tasks are currently assigned.'))
+ expect(find('[data-testid="links-empty"]')).to have_content(_('No child items are currently assigned.'))
expect(page).not_to have_selector('[data-testid="add-links-form"]')
expect(page).not_to have_selector('[data-testid="links-child"]')
end
diff --git a/spec/features/projects_spec.rb b/spec/features/projects_spec.rb
index c6966e47f0a..1fb1e21ac67 100644
--- a/spec/features/projects_spec.rb
+++ b/spec/features/projects_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Project', feature_category: :groups_and_projects do
+RSpec.describe 'Project', feature_category: :source_code_management do
include ProjectForksHelper
include MobileHelpers
@@ -12,6 +12,7 @@ RSpec.describe 'Project', feature_category: :groups_and_projects do
before do
sign_in user
visit new_project_path
+ stub_feature_flags(project_overview_reorg: false)
end
shared_examples 'creates from template' do |template, sub_template_tab = nil|
@@ -99,6 +100,7 @@ RSpec.describe 'Project', feature_category: :groups_and_projects do
before do
sign_in(project.first_owner)
+ stub_feature_flags(project_overview_reorg: false)
end
it 'parses Markdown' do
@@ -164,6 +166,7 @@ RSpec.describe 'Project', feature_category: :groups_and_projects do
before do
sign_in(project.first_owner)
visit path
+ stub_feature_flags(project_overview_reorg: false)
end
it 'shows project topics' do
@@ -195,6 +198,7 @@ RSpec.describe 'Project', feature_category: :groups_and_projects do
before do
sign_in(project.first_owner)
visit path
+ stub_feature_flags(project_overview_reorg: false)
end
context 'desktop component' do
@@ -427,6 +431,10 @@ RSpec.describe 'Project', feature_category: :groups_and_projects do
let(:project) { create(:project, :repository) }
let(:user) { create(:user) }
+ before do
+ stub_feature_flags(project_overview_reorg: false)
+ end
+
it 'does not contain default branch information in its content', :js do
default_branch = 'merge-commit-analyze-side-branch'
diff --git a/spec/features/registrations/oauth_registration_spec.rb b/spec/features/registrations/oauth_registration_spec.rb
index 98300cbeaaa..eb21d285bd0 100644
--- a/spec/features/registrations/oauth_registration_spec.rb
+++ b/spec/features/registrations/oauth_registration_spec.rb
@@ -105,12 +105,12 @@ RSpec.describe 'OAuth Registration', :js, :allow_forgery_protection, feature_cat
mock_auth_hash(provider, uid, invite_email, additional_info: additional_info)
end
- it 'redirects to the activity page with all the projects/groups invitations accepted' do
+ it 'redirects to the group page with all the projects/groups invitations accepted' do
visit invite_path(group_invite.raw_invite_token, extra_params)
click_link_or_button "oauth-login-#{provider}"
expect(page).to have_content('You have been granted Owner access to group Owned.')
- expect(page).to have_current_path(activity_group_path(group), ignore_query: true)
+ expect(page).to have_current_path(group_path(group), ignore_query: true)
end
end
end
diff --git a/spec/features/registrations/registration_spec.rb b/spec/features/registrations/registration_spec.rb
index 7a409b3934e..b408ef268f2 100644
--- a/spec/features/registrations/registration_spec.rb
+++ b/spec/features/registrations/registration_spec.rb
@@ -3,11 +3,11 @@
require 'spec_helper'
RSpec.describe 'Registrations', feature_category: :system_access do
- context 'when the user visits the registration page when already signed in', :clean_gitlab_redis_sessions do
- let_it_be(:current_user) { create(:user) }
+ let_it_be(:user) { create(:user) }
+ context 'when the user visits the registration page when already signed in', :clean_gitlab_redis_sessions do
before do
- sign_in(current_user)
+ sign_in(user)
end
it 'does not show an "You are already signed in" error message' do
@@ -18,4 +18,27 @@ RSpec.describe 'Registrations', feature_category: :system_access do
expect(page).not_to have_content(I18n.t('devise.failure.already_authenticated'))
end
end
+
+ context 'when the user registers having an invitation', :js do
+ let(:group) { create(:group, :private) }
+ let(:new_user) { build(:user) }
+
+ before do
+ stub_application_setting_enum('email_confirmation_setting', 'soft')
+ stub_application_setting(require_admin_approval_after_user_signup: false)
+ end
+
+ it 'becomes a member after confirmation' do
+ create(:group_member, :invited, :developer, source: group, invite_email: new_user.email)
+
+ visit new_user_registration_path
+ fill_in_sign_up_form(new_user)
+
+ confirm_email(new_user)
+ visit polymorphic_path(group)
+
+ expect(page).to have_content(group.name)
+ expect(page).not_to have_content('Page Not Found')
+ end
+ end
end
diff --git a/spec/features/runners_spec.rb b/spec/features/runners_spec.rb
index 091c318459b..e7047610180 100644
--- a/spec/features/runners_spec.rb
+++ b/spec/features/runners_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Runners', feature_category: :runner_fleet do
+RSpec.describe 'Runners', feature_category: :fleet_visibility do
let_it_be(:user) { create(:user) }
before do
diff --git a/spec/features/search/user_searches_for_code_spec.rb b/spec/features/search/user_searches_for_code_spec.rb
index 976324a5032..9329b1f2a5e 100644
--- a/spec/features/search/user_searches_for_code_spec.rb
+++ b/spec/features/search/user_searches_for_code_spec.rb
@@ -40,7 +40,7 @@ RSpec.describe 'User searches for code', :js, :disable_rate_limiter, feature_cat
wait_for_requests
page.within('[data-testid="project-filter"]') do
- click_on(project.name)
+ select_listbox_item(project.name)
end
end
@@ -53,8 +53,8 @@ RSpec.describe 'User searches for code', :js, :disable_rate_limiter, feature_cat
let(:expected_result) { 'Update capybara, rspec-rails, poltergeist to recent versions' }
before do
- fill_in('dashboard_search', with: 'rspec')
- find('.gl-search-box-by-click-search-button').click
+ submit_dashboard_search('rspec')
+ select_search_scope('Code')
end
it 'finds code and links to blob' do
@@ -81,8 +81,8 @@ RSpec.describe 'User searches for code', :js, :disable_rate_limiter, feature_cat
search = 'for naming files'
ref_selector = 'v1.0.0'
- fill_in('dashboard_search', with: search)
- find('.gl-search-box-by-click-search-button').click
+ submit_dashboard_search(search)
+ select_search_scope('Code')
expect(page).to have_selector('.results', text: expected_result)
@@ -99,51 +99,6 @@ RSpec.describe 'User searches for code', :js, :disable_rate_limiter, feature_cat
end
end
- context 'when header search' do
- context 'search code within refs' do
- let(:ref_name) { 'v1.0.0' }
-
- before do
- visit(project_tree_path(project, ref_name))
-
- submit_search('gitlab-grack')
- select_search_scope('Code')
- end
-
- it 'shows ref switcher in code result summary' do
- expect(find('.ref-selector')).to have_text(ref_name)
- end
-
- it 'persists branch name across search' do
- find('.gl-search-box-by-click-search-button').click
- expect(find('.ref-selector')).to have_text(ref_name)
- end
-
- # this example is use to test the design that the refs is not
- # only represent the branch as well as the tags.
- it 'ref switcher list all the branches and tags' do
- find('.ref-selector').click
- wait_for_requests
-
- page.within('.ref-selector') do
- expect(page).to have_selector('li', text: 'add-ipython-files')
- expect(page).to have_selector('li', text: 'v1.0.0')
- end
- end
-
- it 'search result changes when refs switched' do
- expect(find('.results')).not_to have_content('path = gitlab-grack')
-
- find('.ref-selector').click
- wait_for_requests
-
- select_listbox_item('add-ipython-files')
-
- expect(page).to have_selector('.results', text: 'path = gitlab-grack')
- end
- end
- end
-
it 'no ref switcher shown in issue result summary' do
issue = create(:issue, title: 'test', project: project)
visit(project_tree_path(project))
diff --git a/spec/features/search/user_searches_for_issues_spec.rb b/spec/features/search/user_searches_for_issues_spec.rb
index 9451e337db1..caddf8b698e 100644
--- a/spec/features/search/user_searches_for_issues_spec.rb
+++ b/spec/features/search/user_searches_for_issues_spec.rb
@@ -3,6 +3,7 @@
require 'spec_helper'
RSpec.describe 'User searches for issues', :js, :clean_gitlab_redis_rate_limiting, feature_category: :global_search do
+ include ListboxHelpers
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project, namespace: user.namespace) }
@@ -10,8 +11,7 @@ RSpec.describe 'User searches for issues', :js, :clean_gitlab_redis_rate_limitin
let!(:issue2) { create(:issue, :closed, :confidential, title: 'issue Bar', project: project) }
def search_for_issue(search)
- fill_in('dashboard_search', with: search)
- find('.gl-search-box-by-click-search-button').click
+ submit_dashboard_search(search)
select_search_scope('Issues')
end
@@ -92,7 +92,7 @@ RSpec.describe 'User searches for issues', :js, :clean_gitlab_redis_rate_limitin
wait_for_requests
page.within('[data-testid="project-filter"]') do
- click_on(project.name)
+ select_listbox_item project.name
end
search_for_issue(issue1.title)
diff --git a/spec/features/search/user_searches_for_merge_requests_spec.rb b/spec/features/search/user_searches_for_merge_requests_spec.rb
index d7b52d9e07a..7819e036f21 100644
--- a/spec/features/search/user_searches_for_merge_requests_spec.rb
+++ b/spec/features/search/user_searches_for_merge_requests_spec.rb
@@ -3,14 +3,14 @@
require 'spec_helper'
RSpec.describe 'User searches for merge requests', :js, :clean_gitlab_redis_rate_limiting, feature_category: :global_search do
+ include ListboxHelpers
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project, namespace: user.namespace) }
let_it_be(:merge_request1) { create(:merge_request, title: 'Merge Request Foo', source_project: project, target_project: project, created_at: 1.hour.ago) }
let_it_be(:merge_request2) { create(:merge_request, :simple, title: 'Merge Request Bar', source_project: project, target_project: project) }
def search_for_mr(search)
- fill_in('dashboard_search', with: search)
- find('.gl-search-box-by-click-search-button').click
+ submit_dashboard_search(search)
select_search_scope('Merge requests')
end
@@ -60,7 +60,7 @@ RSpec.describe 'User searches for merge requests', :js, :clean_gitlab_redis_rate
wait_for_requests
page.within('[data-testid="project-filter"]') do
- click_on(project.name)
+ select_listbox_item project.name
end
search_for_mr(merge_request1.title)
diff --git a/spec/features/search/user_searches_for_milestones_spec.rb b/spec/features/search/user_searches_for_milestones_spec.rb
index 7ca7958f61b..334fe6f0170 100644
--- a/spec/features/search/user_searches_for_milestones_spec.rb
+++ b/spec/features/search/user_searches_for_milestones_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe 'User searches for milestones', :js, :clean_gitlab_redis_rate_limiting,
feature_category: :global_search do
+ include ListboxHelpers
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project, namespace: user.namespace) }
let_it_be(:milestone1) { create(:milestone, title: 'Foo', project: project) }
@@ -20,8 +21,7 @@ RSpec.describe 'User searches for milestones', :js, :clean_gitlab_redis_rate_lim
include_examples 'search timeouts', 'milestones'
it 'finds a milestone' do
- fill_in('dashboard_search', with: milestone1.title)
- find('.gl-search-box-by-click-search-button').click
+ submit_dashboard_search(milestone1.title)
select_search_scope('Milestones')
page.within('.results') do
@@ -37,11 +37,10 @@ RSpec.describe 'User searches for milestones', :js, :clean_gitlab_redis_rate_lim
wait_for_requests
page.within('[data-testid="project-filter"]') do
- click_on(project.name)
+ select_listbox_item project.name
end
- fill_in('dashboard_search', with: milestone1.title)
- find('.gl-search-box-by-click-search-button').click
+ submit_dashboard_search(milestone1.title)
select_search_scope('Milestones')
page.within('.results') do
diff --git a/spec/features/search/user_searches_for_projects_spec.rb b/spec/features/search/user_searches_for_projects_spec.rb
index 48a94161927..ee5a3ec9806 100644
--- a/spec/features/search/user_searches_for_projects_spec.rb
+++ b/spec/features/search/user_searches_for_projects_spec.rb
@@ -16,9 +16,7 @@ RSpec.describe 'User searches for projects', :js, :disable_rate_limiter, feature
it 'finds a project' do
visit(search_path)
-
- fill_in('dashboard_search', with: project.name[0..3])
- click_button('Search')
+ submit_dashboard_search(project.name[0..3])
expect(page).to have_link(project.name)
end
@@ -26,7 +24,7 @@ RSpec.describe 'User searches for projects', :js, :disable_rate_limiter, feature
it 'preserves the group being searched in' do
visit(search_path(group_id: project.namespace.id))
- submit_search('foo')
+ submit_dashboard_search('foo')
expect(find('#group_id', visible: false).value).to eq(project.namespace.id.to_s)
end
@@ -34,7 +32,7 @@ RSpec.describe 'User searches for projects', :js, :disable_rate_limiter, feature
it 'preserves the project being searched in' do
visit(search_path(project_id: project.id))
- submit_search('foo')
+ submit_dashboard_search('foo')
expect(find('#project_id', visible: false).value).to eq(project.id.to_s)
end
diff --git a/spec/features/search/user_searches_for_wiki_pages_spec.rb b/spec/features/search/user_searches_for_wiki_pages_spec.rb
index 65f262075f9..4de28a99c21 100644
--- a/spec/features/search/user_searches_for_wiki_pages_spec.rb
+++ b/spec/features/search/user_searches_for_wiki_pages_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe 'User searches for wiki pages', :js, :clean_gitlab_redis_rate_limiting,
feature_category: :global_search do
+ include ListboxHelpers
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project, :repository, :wiki_repo, namespace: user.namespace) }
let_it_be(:wiki_page) do
@@ -29,11 +30,10 @@ RSpec.describe 'User searches for wiki pages', :js, :clean_gitlab_redis_rate_lim
wait_for_requests
page.within('[data-testid="project-filter"]') do
- click_on(project.name)
+ select_listbox_item project.name
end
- fill_in('dashboard_search', with: search_term)
- find('.gl-search-box-by-click-search-button').click
+ submit_dashboard_search(search_term)
select_search_scope('Wiki')
page.within('.results') do
diff --git a/spec/features/search/user_uses_header_search_field_spec.rb b/spec/features/search/user_uses_header_search_field_spec.rb
index 1ab47f6fd59..e36b04636ce 100644
--- a/spec/features/search/user_uses_header_search_field_spec.rb
+++ b/spec/features/search/user_uses_header_search_field_spec.rb
@@ -83,7 +83,7 @@ RSpec.describe 'User uses header search field', :js, :disable_rate_limiter, feat
end
end
- context 'when clicking merge requests', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/332317' do
+ context 'when clicking merge requests' do
let!(:merge_request) { create(:merge_request, source_project: project, author: user, assignees: [user]) }
it 'shows assigned merge requests' do
diff --git a/spec/features/search/user_uses_search_filters_spec.rb b/spec/features/search/user_uses_search_filters_spec.rb
index 5e553cb0869..b95421fab59 100644
--- a/spec/features/search/user_uses_search_filters_spec.rb
+++ b/spec/features/search/user_uses_search_filters_spec.rb
@@ -3,6 +3,7 @@
require 'spec_helper'
RSpec.describe 'User uses search filters', :js, feature_category: :global_search do
+ include ListboxHelpers
let(:group) { create(:group) }
let!(:group_project) { create(:project, group: group) }
let(:project) { create(:project, namespace: user.namespace) }
@@ -23,7 +24,7 @@ RSpec.describe 'User uses search filters', :js, feature_category: :global_search
wait_for_requests
page.within('[data-testid="group-filter"]') do
- click_on(group.name)
+ select_listbox_item group.name
end
expect(find('[data-testid="group-filter"]')).to have_content(group.name)
@@ -33,7 +34,7 @@ RSpec.describe 'User uses search filters', :js, feature_category: :global_search
wait_for_requests
page.within('[data-testid="project-filter"]') do
- click_on(group_project.name)
+ select_listbox_item group_project.name
end
expect(find('[data-testid="project-filter"]')).to have_content(group_project.name)
@@ -46,12 +47,17 @@ RSpec.describe 'User uses search filters', :js, feature_category: :global_search
describe 'clear filter button' do
it 'removes Group and Project filters' do
- find('[data-testid="group-filter"] [data-testid="clear-icon"]').click
+ page.within '[data-testid="group-filter"]' do
+ toggle_listbox
+ wait_for_requests
- wait_for_requests
+ find('[data-testid="listbox-reset-button"]').click
- expect(page).to have_current_path(search_path, ignore_query: true) do |uri|
- uri.normalized_query(:sorted) == "scope=blobs&search=test"
+ wait_for_requests
+
+ expect(page).to have_current_path(search_path, ignore_query: true) do |uri|
+ uri.normalized_query(:sorted) == "scope=blobs&search=test"
+ end
end
end
end
@@ -67,7 +73,7 @@ RSpec.describe 'User uses search filters', :js, feature_category: :global_search
wait_for_requests
page.within('[data-testid="project-filter"]') do
- click_on(project.name)
+ select_listbox_item project.name
end
expect(find('[data-testid="project-filter"]')).to have_content(project.name)
@@ -82,11 +88,17 @@ RSpec.describe 'User uses search filters', :js, feature_category: :global_search
describe 'clear filter button' do
it 'removes Project filters' do
- find('[data-testid="project-filter"] [data-testid="clear-icon"]').click
- wait_for_requests
+ page.within '[data-testid="project-filter"]' do
+ toggle_listbox
+ wait_for_requests
+
+ find('[data-testid="listbox-reset-button"]').click
+
+ wait_for_requests
- expect(page).to have_current_path(search_path, ignore_query: true) do |uri|
- uri.normalized_query(:sorted) == "scope=blobs&search=test"
+ expect(page).to have_current_path(search_path, ignore_query: true) do |uri|
+ uri.normalized_query(:sorted) == "scope=blobs&search=test"
+ end
end
end
end
diff --git a/spec/features/uploads/user_uploads_avatar_to_group_spec.rb b/spec/features/uploads/user_uploads_avatar_to_group_spec.rb
index 2872446ed6b..b8fb509c887 100644
--- a/spec/features/uploads/user_uploads_avatar_to_group_spec.rb
+++ b/spec/features/uploads/user_uploads_avatar_to_group_spec.rb
@@ -22,7 +22,7 @@ RSpec.describe 'User uploads avatar to group', feature_category: :user_profile d
visit group_path(group)
- expect(page).to have_selector(%(img[data-src$="/uploads/-/system/group/avatar/#{group.id}/dk.png"]))
+ expect(page).to have_selector(%(img[src$="/uploads/-/system/group/avatar/#{group.id}/dk.png?width=16"]))
# Cheating here to verify something that isn't user-facing, but is important
expect(group.reload.avatar.file).to exist
diff --git a/spec/features/uploads/user_uploads_avatar_to_profile_spec.rb b/spec/features/uploads/user_uploads_avatar_to_profile_spec.rb
index 83eb7cb989e..5d121d9eeba 100644
--- a/spec/features/uploads/user_uploads_avatar_to_profile_spec.rb
+++ b/spec/features/uploads/user_uploads_avatar_to_profile_spec.rb
@@ -18,8 +18,10 @@ RSpec.describe 'User uploads avatar to profile', feature_category: :user_profile
wait_for_all_requests
- data_uri = find('.avatar-image .gl-avatar')['src']
- within_testid('user-dropdown') { expect(find('.gl-avatar')['src']).to eq data_uri }
+ within_testid('user-dropdown') do
+ # We are setting a blob URL
+ expect(find('.gl-avatar')['src']).to start_with 'blob:'
+ end
visit profile_path
diff --git a/spec/features/user_sees_active_nav_items_spec.rb b/spec/features/user_sees_active_nav_items_spec.rb
index 966b8491374..1e6b2b8f189 100644
--- a/spec/features/user_sees_active_nav_items_spec.rb
+++ b/spec/features/user_sees_active_nav_items_spec.rb
@@ -32,7 +32,7 @@ RSpec.describe 'User sees correct active nav items in the super sidebar', :js, f
context 'when visiting authentication logs' do
before do
- visit audit_log_profile_path
+ visit user_settings_authentication_log_path
end
it 'renders the side navigation with the correct submenu set as active' do
diff --git a/spec/features/profiles/active_sessions_spec.rb b/spec/features/user_settings/active_sessions_spec.rb
index 2e800ae88b6..5d1d4bc6490 100644
--- a/spec/features/profiles/active_sessions_spec.rb
+++ b/spec/features/user_settings/active_sessions_spec.rb
@@ -55,7 +55,7 @@ RSpec.describe 'Profile > Active Sessions', :clean_gitlab_redis_shared_state, fe
end
using_session :session1 do
- visit profile_active_sessions_path
+ visit user_settings_active_sessions_path
expect(page).to(have_selector('ul.list-group li.list-group-item', text: 'Signed in on', count: 2))
@@ -93,7 +93,7 @@ RSpec.describe 'Profile > Active Sessions', :clean_gitlab_redis_shared_state, fe
using_session :session1 do
gitlab_sign_in(user)
- visit profile_active_sessions_path
+ visit user_settings_active_sessions_path
expect(page).to have_link('Revoke', count: 1)
@@ -105,7 +105,7 @@ RSpec.describe 'Profile > Active Sessions', :clean_gitlab_redis_shared_state, fe
end
using_session :session2 do
- visit profile_active_sessions_path
+ visit user_settings_active_sessions_path
expect(page).to have_content('You need to sign in or sign up before continuing.')
end
diff --git a/spec/features/profiles/password_spec.rb b/spec/features/user_settings/password_spec.rb
index 05e492e7021..76e3f85e021 100644
--- a/spec/features/profiles/password_spec.rb
+++ b/spec/features/user_settings/password_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Profile > Password', feature_category: :user_profile do
+RSpec.describe 'User Settings > Password', feature_category: :user_profile do
let(:user) { create(:user) }
def fill_passwords(password, confirmation)
@@ -12,16 +12,16 @@ RSpec.describe 'Profile > Password', feature_category: :user_profile do
click_button 'Save password'
end
- context 'Password authentication enabled' do
+ context 'when password authentication enabled' do
let(:new_password) { User.random_password }
let(:user) { create(:user, password_automatically_set: true) }
before do
sign_in(user)
- visit edit_profile_password_path
+ visit edit_user_settings_password_path
end
- context 'User with password automatically set' do
+ context 'when User with password automatically set' do
describe 'User puts different passwords in the field and in the confirmation' do
it 'shows an error message' do
fill_passwords(new_password, "#{new_password}2")
@@ -42,7 +42,7 @@ RSpec.describe 'Profile > Password', feature_category: :user_profile do
it 'shows a success message' do
fill_passwords(new_password, new_password)
- page.within('[data-testid="alert-info"]') do
+ within_testid('alert-info') do
expect(page).to have_content('Password was successfully updated. Please sign in again.')
end
end
@@ -50,8 +50,8 @@ RSpec.describe 'Profile > Password', feature_category: :user_profile do
end
end
- context 'Password authentication unavailable' do
- context 'Regular user' do
+ context 'when password authentication unavailable' do
+ context 'with Regular user' do
before do
gitlab_sign_in(user)
end
@@ -62,13 +62,13 @@ RSpec.describe 'Profile > Password', feature_category: :user_profile do
stub_application_setting(password_authentication_enabled_for_web: false)
stub_application_setting(password_authentication_enabled_for_git: false)
- visit edit_profile_password_path
+ visit edit_user_settings_password_path
expect(page).to have_gitlab_http_status(:not_found)
end
end
- context 'LDAP user' do
+ context 'with LDAP user' do
include LdapHelpers
let(:ldap_settings) { { enabled: true } }
@@ -87,7 +87,7 @@ RSpec.describe 'Profile > Password', feature_category: :user_profile do
end
it 'renders 404', :js do
- visit edit_profile_password_path
+ visit edit_user_settings_password_path
expect(page).to have_title('Not Found')
expect(page).to have_content('Page Not Found')
@@ -95,12 +95,12 @@ RSpec.describe 'Profile > Password', feature_category: :user_profile do
end
end
- context 'Change password' do
+ context 'when changing password' do
let(:new_password) { User.random_password }
before do
sign_in(user)
- visit(edit_profile_password_path)
+ visit(edit_user_settings_password_path)
end
shared_examples 'user enters an incorrect current password' do
@@ -113,7 +113,8 @@ RSpec.describe 'Profile > Password', feature_category: :user_profile do
it 'handles the invalid password attempt, and prompts the user to try again', :aggregate_failures do
expect(Gitlab::AppLogger).to receive(:info)
- .with(message: 'Invalid current password when attempting to update user password', username: user.username, ip: user.current_sign_in_ip)
+ .with(message: 'Invalid current password when attempting to update user password', username: user.username,
+ ip: user.current_sign_in_ip)
subject
@@ -121,7 +122,7 @@ RSpec.describe 'Profile > Password', feature_category: :user_profile do
expect(user.failed_attempts).to eq(1)
expect(user.valid_password?(new_password)).to eq(false)
- expect(page).to have_current_path(edit_profile_password_path, ignore_query: true)
+ expect(page).to have_current_path(edit_user_settings_password_path, ignore_query: true)
page.within '.flash-container' do
expect(page).to have_content('You must provide a valid current password')
@@ -174,7 +175,7 @@ RSpec.describe 'Profile > Password', feature_category: :user_profile do
expect_snowplow_event(
category: 'Gitlab::Tracking::Helpers::WeakPasswordErrorEvent',
action: 'track_weak_password_error',
- controller: 'Profiles::PasswordsController',
+ controller: 'UserSettings::PasswordsController',
method: 'update'
)
end
@@ -207,13 +208,12 @@ RSpec.describe 'Profile > Password', feature_category: :user_profile do
user.update!(password_expires_at: 1.hour.ago)
user.identities.delete
- expect(user.ldap_user?).to eq false
end
it 'needs change user password' do
- visit edit_profile_password_path
+ visit edit_user_settings_password_path
- expect(page).to have_current_path new_profile_password_path, ignore_query: true
+ expect(page).to have_current_path new_user_settings_password_path, ignore_query: true
fill_in :user_password, with: user.password
fill_in :user_new_password, with: new_password
@@ -224,9 +224,9 @@ RSpec.describe 'Profile > Password', feature_category: :user_profile do
end
it 'tracks weak password error' do
- visit edit_profile_password_path
+ visit edit_user_settings_password_path
- expect(page).to have_current_path new_profile_password_path, ignore_query: true
+ expect(page).to have_current_path new_user_settings_password_path, ignore_query: true
fill_in :user_password, with: user.password
fill_in :user_new_password, with: "password"
@@ -235,7 +235,7 @@ RSpec.describe 'Profile > Password', feature_category: :user_profile do
expect_snowplow_event(
category: 'Gitlab::Tracking::Helpers::WeakPasswordErrorEvent',
action: 'track_weak_password_error',
- controller: 'Profiles::PasswordsController',
+ controller: 'UserSettings::PasswordsController',
method: 'create'
)
end
@@ -246,7 +246,7 @@ RSpec.describe 'Profile > Password', feature_category: :user_profile do
visit profile_path
- expect(page).to have_current_path new_profile_password_path, ignore_query: true
+ expect(page).to have_current_path new_user_settings_password_path, ignore_query: true
end
end
end
diff --git a/spec/features/profiles/personal_access_tokens_spec.rb b/spec/features/user_settings/personal_access_tokens_spec.rb
index 094855393be..55f1edfd26a 100644
--- a/spec/features/profiles/personal_access_tokens_spec.rb
+++ b/spec/features/user_settings/personal_access_tokens_spec.rb
@@ -2,12 +2,15 @@
require 'spec_helper'
-RSpec.describe 'Profile > Personal Access Tokens', :js, feature_category: :user_profile do
+RSpec.describe 'User Settings > Personal Access Tokens', :js, feature_category: :system_access do
include Spec::Support::Helpers::ModalHelpers
include Features::AccessTokenHelpers
let(:user) { create(:user) }
- let(:pat_create_service) { double('PersonalAccessTokens::CreateService', execute: ServiceResponse.error(message: 'error', payload: { personal_access_token: PersonalAccessToken.new })) }
+ let(:pat_create_service) do
+ instance_double('PersonalAccessTokens::CreateService',
+ execute: ServiceResponse.error(message: 'error', payload: { personal_access_token: PersonalAccessToken.new }))
+ end
before do
sign_in(user)
@@ -17,7 +20,7 @@ RSpec.describe 'Profile > Personal Access Tokens', :js, feature_category: :user_
it "allows creation of a personal access token" do
name = 'My PAT'
- visit profile_personal_access_tokens_path
+ visit user_settings_personal_access_tokens_path
click_button 'Add new token'
fill_in "Token name", with: name
@@ -44,7 +47,7 @@ RSpec.describe 'Profile > Personal Access Tokens', :js, feature_category: :user_
context "when creation fails" do
it "displays an error message" do
number_tokens_before = PersonalAccessToken.count
- visit profile_personal_access_tokens_path
+ visit user_settings_personal_access_tokens_path
click_button 'Add new token'
fill_in "Token name", with: 'My PAT'
@@ -64,7 +67,7 @@ RSpec.describe 'Profile > Personal Access Tokens', :js, feature_category: :user_
let!(:personal_access_token) { create(:personal_access_token, user: user) }
it 'only shows personal access tokens' do
- visit profile_personal_access_tokens_path
+ visit user_settings_personal_access_tokens_path
expect(active_access_tokens).to have_text(personal_access_token.name)
expect(active_access_tokens).not_to have_text(impersonation_token.name)
@@ -76,7 +79,7 @@ RSpec.describe 'Profile > Personal Access Tokens', :js, feature_category: :user_
end
it 'shows absolute times for expires_at' do
- visit profile_personal_access_tokens_path
+ visit user_settings_personal_access_tokens_path
expect(active_access_tokens).to have_text(PersonalAccessToken.last.expires_at.strftime('%b %-d'))
end
@@ -87,7 +90,7 @@ RSpec.describe 'Profile > Personal Access Tokens', :js, feature_category: :user_
let!(:personal_access_token) { create(:personal_access_token, user: user) }
it "allows revocation of an active token" do
- visit profile_personal_access_tokens_path
+ visit user_settings_personal_access_tokens_path
accept_gl_confirm(button_text: 'Revoke') { click_on "Revoke" }
expect(active_access_tokens).to have_text("This user has no active personal access tokens.")
@@ -95,7 +98,7 @@ RSpec.describe 'Profile > Personal Access Tokens', :js, feature_category: :user_
it "removes expired tokens from 'active' section" do
personal_access_token.update!(expires_at: 5.days.ago)
- visit profile_personal_access_tokens_path
+ visit user_settings_personal_access_tokens_path
expect(active_access_tokens).to have_text("This user has no active personal access tokens.")
end
@@ -105,7 +108,7 @@ RSpec.describe 'Profile > Personal Access Tokens', :js, feature_category: :user_
allow_next_instance_of(PersonalAccessTokens::RevokeService) do |instance|
allow(instance).to receive(:revocation_permitted?).and_return(false)
end
- visit profile_personal_access_tokens_path
+ visit user_settings_personal_access_tokens_path
accept_gl_confirm(button_text: "Revoke") { click_on "Revoke" }
expect(active_access_tokens).to have_text(personal_access_token.name)
@@ -115,15 +118,16 @@ RSpec.describe 'Profile > Personal Access Tokens', :js, feature_category: :user_
describe "feed token" do
def feed_token_description
- "Your feed token authenticates you when your RSS reader loads a personalized RSS feed or when your calendar application loads a personalized calendar. It is visible in those feed URLs."
+ "Your feed token authenticates you when your RSS reader loads a personalized RSS feed or when your calendar\
+ application loads a personalized calendar. It is visible in those feed URLs."
end
context "when enabled" do
it "displays feed token" do
allow(Gitlab::CurrentSettings).to receive(:disable_feed_token).and_return(false)
- visit profile_personal_access_tokens_path
+ visit user_settings_personal_access_tokens_path
- within('[data-testid="feed-token-container"]') do
+ within_testid('feed-token-container') do
click_button('Click to reveal')
expect(page).to have_field('Feed token', with: user.feed_token)
@@ -135,7 +139,7 @@ RSpec.describe 'Profile > Personal Access Tokens', :js, feature_category: :user_
context "when disabled" do
it "does not display feed token" do
allow(Gitlab::CurrentSettings).to receive(:disable_feed_token).and_return(true)
- visit profile_personal_access_tokens_path
+ visit user_settings_personal_access_tokens_path
expect(page).not_to have_content(feed_token_description)
expect(page).not_to have_field('Feed token')
@@ -147,7 +151,7 @@ RSpec.describe 'Profile > Personal Access Tokens', :js, feature_category: :user_
name = 'My PAT'
scopes = 'api,read_user'
- visit profile_personal_access_tokens_path({ name: name, scopes: scopes })
+ visit user_settings_personal_access_tokens_path({ name: name, scopes: scopes })
click_button 'Add new token'
expect(page).to have_field("Token name", with: name)
diff --git a/spec/features/user_sorts_things_spec.rb b/spec/features/user_sorts_things_spec.rb
index bc377fb1f8f..c53ad961d84 100644
--- a/spec/features/user_sorts_things_spec.rb
+++ b/spec/features/user_sorts_things_spec.rb
@@ -11,13 +11,13 @@ RSpec.describe "User sorts things", :js do
include DashboardHelper
let_it_be(:project) { create(:project_empty_repo, :public) }
- let_it_be(:current_user) { create(:user) } # Using `current_user` instead of just `user` because of the hardoced call in `assigned_mrs_dashboard_path` which is used below.
- let_it_be(:issue) { create(:issue, project: project, author: current_user) }
- let_it_be(:merge_request) { create(:merge_request, target_project: project, source_project: project, author: current_user) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:issue) { create(:issue, project: project, author: user) }
+ let_it_be(:merge_request) { create(:merge_request, target_project: project, source_project: project, author: user) }
before do
- project.add_developer(current_user)
- sign_in(current_user)
+ project.add_developer(user)
+ sign_in(user)
end
it "issues -> project home page -> issues", feature_category: :team_planning do
@@ -25,8 +25,7 @@ RSpec.describe "User sorts things", :js do
visit(project_issues_path(project))
- click_button s_('SortOptions|Created date')
- click_button sort_option
+ pajamas_sort_by sort_option, from: s_('SortOptions|Created date')
visit(project_path(project))
visit(project_issues_path(project))
@@ -39,9 +38,9 @@ RSpec.describe "User sorts things", :js do
visit(project_merge_requests_path(project))
- pajamas_sort_by(sort_option)
+ pajamas_sort_by sort_option, from: s_('SortOptions|Created date')
- visit(assigned_mrs_dashboard_path)
+ visit(merge_requests_dashboard_path(assignee_username: user.username))
expect(find(".issues-filters")).to have_content(sort_option)
end
diff --git a/spec/features/users/login_spec.rb b/spec/features/users/login_spec.rb
index 0f086af227c..87ed4ced684 100644
--- a/spec/features/users/login_spec.rb
+++ b/spec/features/users/login_spec.rb
@@ -578,7 +578,7 @@ RSpec.describe 'Login', :clean_gitlab_redis_sessions, feature_category: :system_
fill_in 'user_password', with: user.password
click_button 'Sign in'
- expect(page).to have_current_path(new_profile_password_path, ignore_query: true)
+ expect(page).to have_current_path(new_user_settings_password_path, ignore_query: true)
end
end
end
@@ -785,6 +785,13 @@ RSpec.describe 'Login', :clean_gitlab_redis_sessions, feature_category: :system_
ensure_no_tabs
end
+ it 'renders logo', :js do
+ visit new_user_session_path
+
+ image = find('img.js-portrait-logo-detection')
+ expect(image['class']).to include('gl-h-9')
+ end
+
it 'renders link to sign up path' do
visit new_user_session_path
@@ -1048,7 +1055,7 @@ RSpec.describe 'Login', :clean_gitlab_redis_sessions, feature_category: :system_
expect_to_be_on_terms_page
click_button 'Accept terms'
- expect(page).to have_current_path(new_profile_password_path, ignore_query: true)
+ expect(page).to have_current_path(new_user_settings_password_path, ignore_query: true)
new_password = User.random_password
diff --git a/spec/features/users/show_spec.rb b/spec/features/users/show_spec.rb
index 2821e8286a4..c56b261fe28 100644
--- a/spec/features/users/show_spec.rb
+++ b/spec/features/users/show_spec.rb
@@ -410,7 +410,7 @@ RSpec.describe 'User page', feature_category: :user_profile do
subject
- page.within '.navbar-gitlab' do
+ within_testid('navbar') do
expect(page).to have_link('Sign in')
expect(page).not_to have_link('Register')
end
@@ -423,7 +423,7 @@ RSpec.describe 'User page', feature_category: :user_profile do
subject
- page.within '.navbar-gitlab' do
+ within_testid('navbar') do
expect(page).to have_link(_('Sign in'), exact: true)
expect(page).to have_link(_('Register'), exact: true)
end
diff --git a/spec/features/users/signup_spec.rb b/spec/features/users/signup_spec.rb
index d873c4846fd..bb08ed7d07d 100644
--- a/spec/features/users/signup_spec.rb
+++ b/spec/features/users/signup_spec.rb
@@ -62,17 +62,11 @@ RSpec.describe 'Signup', :js, feature_category: :user_management do
let(:terms_text) do
<<~TEXT.squish
By clicking Register or registering through a third party you accept the
- Terms of Use and acknowledge the Privacy Policy and Cookie Policy
+ Terms of Use and acknowledge the Privacy Statement and Cookie Policy
TEXT
end
shared_examples 'signup process' do
- def confirm_email
- new_user_token = User.find_by_email(new_user.email).confirmation_token
-
- visit user_confirmation_path(confirmation_token: new_user_token)
- end
-
before do
stub_feature_flags(arkose_labs_signup_challenge: false)
stub_application_setting(require_admin_approval_after_user_signup: false)
@@ -220,7 +214,7 @@ RSpec.describe 'Signup', :js, feature_category: :user_management do
expect(page).to have_current_path users_almost_there_path, ignore_query: true
expect(page).to have_content("Please check your email (#{new_user.email}) to confirm your account")
- confirm_email
+ confirm_email(new_user)
expect(find_field('Username or primary email').value).to eq(new_user.email)
end
@@ -345,11 +339,24 @@ RSpec.describe 'Signup', :js, feature_category: :user_management do
end
context 'when invisible captcha detects malicious behaviour' do
- it 'prevents from signing up' do
- visit new_user_registration_path
+ context 'with form submitted quicker than timestamp_threshold', :freeze_time do
+ it 'prevents from signing up' do
+ visit new_user_registration_path
- expect { fill_in_sign_up_form(new_user) }.not_to change { User.count }
- expect(page).to have_content('That was a bit too quick! Please resubmit.')
+ expect { fill_in_sign_up_form(new_user) }.not_to change { User.count }
+ expect(page).to have_content('That was a bit too quick! Please resubmit.')
+ end
+ end
+
+ context 'with honeypot field is filled' do
+ it 'prevents from signing up' do
+ visit new_user_registration_path
+
+ find_field('If you are human, please ignore this field.',
+ visible: false).execute_script("this.value = 'bot'")
+
+ expect { fill_in_sign_up_form(new_user) }.not_to change { User.count }
+ end
end
end
end
@@ -389,7 +396,7 @@ RSpec.describe 'Signup', :js, feature_category: :user_management do
let(:terms_text) do
<<~TEXT.squish
By clicking Register, I agree that I have read and accepted the Terms of
- Use and Privacy Policy
+ Use and Privacy Statement
TEXT
end
diff --git a/spec/finders/branches_finder_spec.rb b/spec/finders/branches_finder_spec.rb
index 004629eda95..3d80ed19eb6 100644
--- a/spec/finders/branches_finder_spec.rb
+++ b/spec/finders/branches_finder_spec.rb
@@ -3,8 +3,9 @@
require 'spec_helper'
RSpec.describe BranchesFinder, feature_category: :source_code_management do
- let(:user) { create(:user) }
- let(:project) { create(:project, :repository) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :repository) }
+
let(:repository) { project.repository }
let(:branch_finder) { described_class.new(repository, params) }
@@ -344,6 +345,60 @@ RSpec.describe BranchesFinder, feature_category: :source_code_management do
end
end
+ describe '#next_cursor' do
+ subject { branch_finder.next_cursor }
+
+ it 'always nil before #execute call' do
+ is_expected.to be_nil
+ end
+
+ context 'after #execute' do
+ context 'with gitaly pagination' do
+ before do
+ branch_finder.execute(gitaly_pagination: true)
+ end
+
+ context 'without pagination params' do
+ it { is_expected.to be_nil }
+ end
+
+ context 'with pagination params' do
+ let(:params) { { per_page: 5 } }
+
+ it { is_expected.to be_present }
+
+ context 'when all objects can be returned on the same page' do
+ let(:params) { { per_page: 100 } }
+
+ it { is_expected.to be_present }
+ end
+ end
+ end
+
+ context 'without gitaly pagination' do
+ before do
+ branch_finder.execute(gitaly_pagination: false)
+ end
+
+ context 'without pagination params' do
+ it { is_expected.to be_nil }
+ end
+
+ context 'with pagination params' do
+ let(:params) { { per_page: 5 } }
+
+ it { is_expected.to be_nil }
+
+ context 'when all objects can be returned on the same page' do
+ let(:params) { { per_page: 100 } }
+
+ it { is_expected.to be_nil }
+ end
+ end
+ end
+ end
+ end
+
describe '#total' do
subject { branch_finder.total }
diff --git a/spec/finders/ci/catalog/resources/versions_finder_spec.rb b/spec/finders/ci/catalog/resources/versions_finder_spec.rb
index b2418aa45dd..b541b84f198 100644
--- a/spec/finders/ci/catalog/resources/versions_finder_spec.rb
+++ b/spec/finders/ci/catalog/resources/versions_finder_spec.rb
@@ -22,13 +22,13 @@ RSpec.describe Ci::Catalog::Resources::VersionsFinder, feature_category: :pipeli
end.not_to exceed_query_limit(control_count)
end
- context 'when the user is not authorized for any catalog resource' do
+ context 'when the user is not authorized' do
it 'returns empty response' do
is_expected.to be_empty
end
end
- describe 'versions' do
+ context 'when the user is authorized' do
before_all do
resource1.project.add_guest(current_user)
end
@@ -74,7 +74,7 @@ RSpec.describe Ci::Catalog::Resources::VersionsFinder, feature_category: :pipeli
end
end
- describe 'latest versions' do
+ context 'when `latest` parameter is true' do
before_all do
resource1.project.add_guest(current_user)
resource2.project.add_guest(current_user)
@@ -85,22 +85,5 @@ RSpec.describe Ci::Catalog::Resources::VersionsFinder, feature_category: :pipeli
it 'returns the latest version for each authorized catalog resource' do
expect(execute).to match_array([v1_1, v2_1])
end
-
- context 'when one catalog resource does not have versions' do
- it 'returns the latest version of only the catalog resource with versions' do
- resource1.versions.delete_all(:delete_all)
-
- is_expected.to match_array([v2_1])
- end
- end
-
- context 'when no catalog resource has versions' do
- it 'returns empty response' do
- resource1.versions.delete_all(:delete_all)
- resource2.versions.delete_all(:delete_all)
-
- is_expected.to be_empty
- end
- end
end
end
diff --git a/spec/finders/ci/runners_finder_spec.rb b/spec/finders/ci/runners_finder_spec.rb
index 7f680f50297..fbe44244dba 100644
--- a/spec/finders/ci/runners_finder_spec.rb
+++ b/spec/finders/ci/runners_finder_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Ci::RunnersFinder, feature_category: :runner_fleet do
+RSpec.describe Ci::RunnersFinder, feature_category: :fleet_visibility do
context 'admin' do
let_it_be(:admin) { create(:user, :admin) }
diff --git a/spec/finders/concerns/packages/finder_helper_spec.rb b/spec/finders/concerns/packages/finder_helper_spec.rb
index f81e940c7ed..4145e1e2a54 100644
--- a/spec/finders/concerns/packages/finder_helper_spec.rb
+++ b/spec/finders/concerns/packages/finder_helper_spec.rb
@@ -27,6 +27,115 @@ RSpec.describe ::Packages::FinderHelper, feature_category: :package_registry do
it { is_expected.to eq [package1] }
end
+ describe '#packages_for' do
+ using RSpec::Parameterized::TableSyntax
+
+ let_it_be_with_reload(:group) { create(:group) }
+ let_it_be_with_reload(:subgroup) { create(:group, parent: group) }
+ let_it_be(:project) { create(:project, namespace: group) }
+ let_it_be(:project2) { create(:project, namespace: subgroup) }
+ let_it_be(:package1) { create(:package, project: project) }
+ let_it_be(:package2) { create(:package, project: project2) }
+ let_it_be(:package3) { create(:package, :error, project: project2) }
+
+ let(:finder_class) do
+ Class.new do
+ include ::Packages::FinderHelper
+
+ def initialize(user)
+ @current_user = user
+ end
+
+ def execute(group)
+ packages_for(@current_user, within_group: group)
+ end
+ end
+ end
+
+ let(:finder) { finder_class.new(user) }
+
+ subject { finder.execute(group) }
+
+ shared_examples 'returning both packages' do
+ it { is_expected.to contain_exactly(package1, package2) }
+ end
+
+ shared_examples 'returning no packages' do
+ it { is_expected.to be_empty }
+ end
+
+ shared_examples 'returning package2' do
+ it { is_expected.to contain_exactly(package2) }
+ end
+
+ context 'with an user' do
+ let_it_be(:user) { create(:user) }
+
+ where(:group_visibility, :subgroup_visibility, :shared_example_name) do
+ 'public' | 'public' | 'returning both packages'
+ # All packages are returned because of the parent group visibility set to `public`
+ # and all users will have `read_group` permission.
+ 'public' | 'private' | 'returning both packages'
+ # No packages are returned because of the parent group visibility set to `private`
+ # and non-members won't have `read_group` permission.
+ 'private' | 'private' | 'returning no packages'
+ end
+
+ with_them do
+ before do
+ subgroup.update!(visibility: subgroup_visibility)
+ group.update!(visibility: group_visibility)
+ end
+
+ it_behaves_like params[:shared_example_name]
+ end
+
+ context 'without a group' do
+ subject { finder.execute(nil) }
+
+ it_behaves_like 'returning no packages'
+ end
+
+ context 'with a subgroup' do
+ subject { finder.execute(subgroup) }
+
+ it_behaves_like 'returning package2'
+ end
+ end
+
+ context 'with a deploy token' do
+ let_it_be(:user) { create(:deploy_token, :group, read_package_registry: true) }
+ let_it_be(:group_deploy_token) { create(:group_deploy_token, deploy_token: user, group: group) }
+
+ where(:group_visibility, :subgroup_visibility, :shared_example_name) do
+ 'public' | 'public' | 'returning both packages'
+ 'public' | 'private' | 'returning both packages'
+ 'private' | 'private' | 'returning both packages'
+ end
+
+ with_them do
+ before do
+ subgroup.update!(visibility: subgroup_visibility)
+ group.update!(visibility: group_visibility)
+ end
+
+ it_behaves_like params[:shared_example_name]
+ end
+
+ context 'without a group' do
+ subject { finder.execute(nil) }
+
+ it_behaves_like 'returning no packages'
+ end
+
+ context 'with a subgroup' do
+ subject { finder.execute(subgroup) }
+
+ it_behaves_like 'returning both packages'
+ end
+ end
+ end
+
describe '#packages_visible_to_user' do
using RSpec::Parameterized::TableSyntax
diff --git a/spec/finders/deploy_keys/deploy_keys_finder_spec.rb b/spec/finders/deploy_keys/deploy_keys_finder_spec.rb
new file mode 100644
index 00000000000..f0d3935cc95
--- /dev/null
+++ b/spec/finders/deploy_keys/deploy_keys_finder_spec.rb
@@ -0,0 +1,78 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe DeployKeys::DeployKeysFinder, feature_category: :continuous_delivery do
+ describe '#execute' do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :repository) }
+
+ let_it_be(:accessible_project) { create(:project, :internal).tap { |p| p.add_developer(user) } }
+ let_it_be(:inaccessible_project) { create(:project, :internal) }
+ let_it_be(:project_private) { create(:project, :private) }
+
+ let_it_be(:deploy_key_for_target_project) do
+ create(:deploy_keys_project, project: project, deploy_key: create(:deploy_key))
+ end
+
+ let_it_be(:deploy_key_for_accessible_project) do
+ create(:deploy_keys_project, project: accessible_project, deploy_key: create(:deploy_key))
+ end
+
+ let_it_be(:deploy_key_for_inaccessible_project) do
+ create(:deploy_keys_project, project: inaccessible_project, deploy_key: create(:deploy_key))
+ end
+
+ let_it_be(:deploy_keys_project_private) do
+ create(:deploy_keys_project, project: project_private, deploy_key: create(:another_deploy_key))
+ end
+
+ let_it_be(:deploy_key_public) { create(:deploy_key, public: true) }
+
+ let(:params) { {} }
+
+ subject(:result) { described_class.new(project, user, params).execute }
+
+ context 'with access' do
+ before_all do
+ project.add_maintainer(user)
+ end
+
+ context 'when filtering for enabled_keys' do
+ let(:params) { { filter: :enabled_keys } }
+
+ it 'returns the correct result' do
+ expect(result.map(&:id)).to match_array([deploy_key_for_target_project.deploy_key_id])
+ end
+ end
+
+ context 'when filtering for available project keys' do
+ let(:params) { { filter: :available_project_keys } }
+
+ it 'returns the correct result' do
+ expect(result.map(&:id)).to match_array([deploy_key_for_accessible_project.deploy_key_id])
+ end
+ end
+
+ context 'when filtering for available public keys' do
+ let(:params) { { filter: :available_public_keys } }
+
+ it 'returns the correct result' do
+ expect(result.map(&:id)).to match_array([deploy_key_public.id])
+ end
+ end
+
+ context 'when there are no set filters' do
+ it 'returns an empty collection' do
+ expect(result).to eq DeployKey.none
+ end
+ end
+ end
+
+ context 'without access' do
+ it 'returns an empty collection' do
+ expect(result).to eq DeployKey.none
+ end
+ end
+ end
+end
diff --git a/spec/finders/groups/custom_emoji_finder_spec.rb b/spec/finders/groups/custom_emoji_finder_spec.rb
new file mode 100644
index 00000000000..f1044997d4f
--- /dev/null
+++ b/spec/finders/groups/custom_emoji_finder_spec.rb
@@ -0,0 +1,65 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Groups::CustomEmojiFinder, feature_category: :code_review_workflow do
+ describe '#execute' do
+ let(:params) { {} }
+
+ subject(:execute) { described_class.new(group, params).execute }
+
+ context 'when inside a group' do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:custom_emoji) { create(:custom_emoji, group: group) }
+
+ it 'returns custom emoji from group' do
+ expect(execute).to contain_exactly(custom_emoji)
+ end
+ end
+
+ context 'when group is nil' do
+ let_it_be(:group) { nil }
+
+ it 'returns nil' do
+ expect(execute).to be_empty
+ end
+ end
+
+ context 'when group is a subgroup' do
+ let_it_be(:parent) { create(:group) }
+ let_it_be(:group) { create(:group, parent: parent) }
+ let_it_be(:custom_emoji) { create(:custom_emoji, group: group) }
+
+ it 'returns custom emoji' do
+ expect(described_class.new(group, params).execute).to contain_exactly(custom_emoji)
+ end
+ end
+
+ describe 'when custom emoji is in parent group' do
+ let_it_be(:parent) { create(:group) }
+ let_it_be(:group) { create(:group, parent: parent) }
+ let_it_be(:custom_emoji) { create(:custom_emoji, group: parent) }
+ let(:params) { { include_ancestor_groups: true } }
+
+ it 'returns custom emoji' do
+ expect(execute).to contain_exactly(custom_emoji)
+ end
+
+ context 'when params is empty' do
+ let(:params) { {} }
+
+ it 'returns empty record' do
+ expect(execute).to eq([])
+ end
+ end
+
+ context 'when include_ancestor_groups is false' do
+ let(:params) { { include_ancestor_groups: false } }
+
+ it 'returns empty record' do
+ expect(execute).to eq([])
+ end
+ end
+ end
+ end
+end
diff --git a/spec/finders/groups_finder_spec.rb b/spec/finders/groups_finder_spec.rb
index f20c03c9658..5d69988a761 100644
--- a/spec/finders/groups_finder_spec.rb
+++ b/spec/finders/groups_finder_spec.rb
@@ -274,6 +274,38 @@ RSpec.describe GroupsFinder, feature_category: :groups_and_projects do
end
end
+ context 'with organization' do
+ let_it_be(:organization_user) { create(:organization_user) }
+ let_it_be(:organization) { organization_user.organization }
+ let_it_be(:user) { organization_user.user }
+ let_it_be(:public_group) { create(:group, name: 'public-group', organization: organization) }
+ let_it_be(:outside_organization_group) { create(:group) }
+ let_it_be(:private_group) { create(:group, :private, name: 'private-group', organization: organization) }
+ let_it_be(:no_access_group_in_org) { create(:group, :private, name: 'no-access', organization: organization) }
+
+ let(:current_user) { user }
+ let(:params) { { organization: organization } }
+ let(:finder) { described_class.new(current_user, params) }
+
+ subject(:result) { finder.execute.to_a }
+
+ before_all do
+ private_group.add_developer(user)
+ public_group.add_developer(user)
+ outside_organization_group.add_developer(user)
+ end
+
+ context 'when user is only authorized to read the public group' do
+ let(:current_user) { create(:user) }
+
+ it { is_expected.to contain_exactly(public_group) }
+ end
+
+ it 'return all groups inside the organization' do
+ expect(result).to contain_exactly(public_group, private_group)
+ end
+ end
+
context 'with include_ancestors' do
let_it_be(:user) { create(:user) }
diff --git a/spec/finders/members_finder_spec.rb b/spec/finders/members_finder_spec.rb
index 4df6197e3b0..e0fc494d033 100644
--- a/spec/finders/members_finder_spec.rb
+++ b/spec/finders/members_finder_spec.rb
@@ -161,6 +161,27 @@ RSpec.describe MembersFinder, feature_category: :groups_and_projects do
expect(result).to eq([member3, member2, member1])
end
+ it 'avoids N+1 database queries on accessing user records' do
+ project.add_maintainer(user2)
+
+ # warm up
+ # We need this warm up because there is 1 query being fired in one of the policies,
+ # and policy results are cached. Without a warm up, the control_count will be X queries
+ # but the test phase will only fire X-1 queries, due the fact that the
+ # result of the policy is already available in the cache.
+ described_class.new(project, user2).execute.map(&:user)
+
+ control_count = ActiveRecord::QueryRecorder.new do
+ described_class.new(project, user2).execute.map(&:user)
+ end
+
+ create_list(:project_member, 3, project: project)
+
+ expect do
+ described_class.new(project, user2).execute.map(&:user)
+ end.to issue_same_number_of_queries_as(control_count)
+ end
+
context 'with :shared_into_ancestors' do
let_it_be(:invited_group) do
create(:group).tap do |invited_group|
diff --git a/spec/finders/milestones_finder_spec.rb b/spec/finders/milestones_finder_spec.rb
index 118679a4911..dee73625cb8 100644
--- a/spec/finders/milestones_finder_spec.rb
+++ b/spec/finders/milestones_finder_spec.rb
@@ -101,6 +101,13 @@ RSpec.describe MilestonesFinder do
expect(result).to contain_exactly(milestone_1, milestone_2)
end
+ it 'filters by id or title' do
+ params[:ids] = [milestone_2.id]
+ params[:title] = [milestone_1.title]
+
+ expect(result).to contain_exactly(milestone_1, milestone_2)
+ end
+
it 'filters by active state' do
params[:state] = 'active'
@@ -182,9 +189,9 @@ RSpec.describe MilestonesFinder do
expect(result).to contain_exactly(milestone_2, milestone_3, milestone_4)
end
- context 'when include_parent_milestones is true' do
+ context 'when include_ancestors is true' do
it 'ignores the iid filter' do
- params[:include_parent_milestones] = true
+ params[:include_ancestors] = true
expect(result).to contain_exactly(milestone_1, milestone_2, milestone_3, milestone_4)
end
diff --git a/spec/finders/organizations/groups_finder_spec.rb b/spec/finders/organizations/groups_finder_spec.rb
deleted file mode 100644
index 08c5604149b..00000000000
--- a/spec/finders/organizations/groups_finder_spec.rb
+++ /dev/null
@@ -1,84 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Organizations::GroupsFinder, feature_category: :cell do
- let_it_be(:organization_user) { create(:organization_user) }
- let_it_be(:organization) { organization_user.organization }
- let_it_be(:user) { organization_user.user }
- let_it_be(:public_group) { create(:group, name: 'public-group', organization: organization) }
- let_it_be(:other_group) { create(:group, name: 'other-group', organization: organization) }
- let_it_be(:outside_organization_group) { create(:group) }
- let_it_be(:private_group) do
- create(:group, :private, name: 'private-group', organization: organization)
- end
-
- let_it_be(:no_access_group_in_org) do
- create(:group, :private, name: 'no-access', organization: organization)
- end
-
- let(:current_user) { user }
- let(:params) { {} }
- let(:finder) { described_class.new(organization: organization, current_user: current_user, params: params) }
-
- before_all do
- private_group.add_developer(user)
- public_group.add_developer(user)
- other_group.add_developer(user)
- outside_organization_group.add_developer(user)
- end
-
- subject(:result) { finder.execute.to_a }
-
- describe '#execute' do
- context 'when user is not authorized to read the organization' do
- let(:current_user) { create(:user) }
-
- it { is_expected.to be_empty }
- end
-
- context 'when organization is nil' do
- let(:organization) { nil }
-
- it { is_expected.to be_empty }
- end
-
- context 'when user is authorized to read the organization' do
- it 'return all accessible groups' do
- expect(result).to contain_exactly(public_group, private_group, other_group)
- end
-
- context 'when search param is passed' do
- let(:params) { { search: 'the' } }
-
- it 'filters the groups by search' do
- expect(result).to contain_exactly(other_group)
- end
- end
-
- context 'when sort param is not passed' do
- it 'return groups sorted by name in ascending order by default' do
- expect(result).to eq([other_group, private_group, public_group])
- end
- end
-
- context 'when sort param is passed' do
- using RSpec::Parameterized::TableSyntax
-
- where(:field, :direction, :sorted_groups) do
- 'name' | 'asc' | lazy { [other_group, private_group, public_group] }
- 'name' | 'desc' | lazy { [public_group, private_group, other_group] }
- 'path' | 'asc' | lazy { [other_group, private_group, public_group] }
- 'path' | 'desc' | lazy { [public_group, private_group, other_group] }
- end
-
- with_them do
- let(:params) { { sort: { field: field, direction: direction } } }
- it 'sorts the groups' do
- expect(result).to eq(sorted_groups)
- end
- end
- end
- end
- end
-end
diff --git a/spec/finders/packages/maven/package_finder_spec.rb b/spec/finders/packages/maven/package_finder_spec.rb
index f769471fcc7..e5ece42baaa 100644
--- a/spec/finders/packages/maven/package_finder_spec.rb
+++ b/spec/finders/packages/maven/package_finder_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe ::Packages::Maven::PackageFinder do
+RSpec.describe ::Packages::Maven::PackageFinder, feature_category: :package_registry do
let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project, namespace: group) }
@@ -13,10 +13,6 @@ RSpec.describe ::Packages::Maven::PackageFinder do
let(:param_order_by_package_file) { false }
let(:finder) { described_class.new(user, project_or_group, path: param_path, order_by_package_file: param_order_by_package_file) }
- before do
- group.add_developer(user)
- end
-
describe '#execute' do
subject { finder.execute }
@@ -58,6 +54,24 @@ RSpec.describe ::Packages::Maven::PackageFinder do
let(:project_or_group) { group }
it_behaves_like 'handling valid and invalid paths'
+
+ context 'when the FF maven_remove_permissions_check_from_finder disabled' do
+ before do
+ stub_feature_flags(maven_remove_permissions_check_from_finder: false)
+ end
+
+ it 'returns an empty array' do
+ is_expected.to be_empty
+ end
+
+ context 'when an user assigned the developer role' do
+ before do
+ group.add_developer(user)
+ end
+
+ it_behaves_like 'handling valid and invalid paths'
+ end
+ end
end
context 'across all projects' do
diff --git a/spec/finders/packages/pypi/packages_finder_spec.rb b/spec/finders/packages/pypi/packages_finder_spec.rb
index 26cfaa29a0c..bf0f56c2fb2 100644
--- a/spec/finders/packages/pypi/packages_finder_spec.rb
+++ b/spec/finders/packages/pypi/packages_finder_spec.rb
@@ -67,7 +67,7 @@ RSpec.describe Packages::Pypi::PackagesFinder do
context 'when package registry is disabled for one project' do
before do
- project2.project_feature.update!(package_registry_access_level: ProjectFeature::DISABLED)
+ project2.update!(package_registry_access_level: 'disabled', packages_enabled: false)
end
it 'filters the packages from the disabled project' do
diff --git a/spec/finders/projects/ml/model_finder_spec.rb b/spec/finders/projects/ml/model_finder_spec.rb
index a2c2836a63d..0395e387c8f 100644
--- a/spec/finders/projects/ml/model_finder_spec.rb
+++ b/spec/finders/projects/ml/model_finder_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe Projects::Ml::ModelFinder, feature_category: :mlops do
let_it_be(:project) { create(:project) }
let_it_be(:model1) { create(:ml_models, :with_versions, project: project) }
let_it_be(:model2) { create(:ml_models, :with_versions, project: project) }
- let_it_be(:model3) { create(:ml_models, name: "#{model1.name}_1", project: project) }
+ let_it_be(:model3) { create(:ml_models, name: "#{model1.name}_1", project: project, updated_at: 1.week.ago) }
let_it_be(:other_model) { create(:ml_models) }
let_it_be(:project_models) { [model1, model2, model3] }
@@ -52,6 +52,7 @@ RSpec.describe Projects::Ml::ModelFinder, feature_category: :mlops do
'by column' | 'name' | 'ASC' | [0, 2, 1]
'invalid sort' | nil | 'UP' | [2, 1, 0]
'invalid order by' | 'INVALID' | nil | [2, 1, 0]
+ 'order by updated_at' | 'updated_at' | nil | [1, 0, 2]
end
with_them do
let(:params) { { order_by: order_by, sort: direction } }
diff --git a/spec/finders/projects_finder_spec.rb b/spec/finders/projects_finder_spec.rb
index f7afd96fa09..e570b49e1da 100644
--- a/spec/finders/projects_finder_spec.rb
+++ b/spec/finders/projects_finder_spec.rb
@@ -482,6 +482,19 @@ RSpec.describe ProjectsFinder, feature_category: :groups_and_projects do
it { is_expected.to match_array([internal_project]) }
end
+ describe 'filter by organization_id' do
+ let_it_be(:organization) { create(:organization) }
+ let_it_be(:organization_project) { create(:project, organization: organization) }
+
+ let(:params) { { organization_id: organization.id } }
+
+ before do
+ organization_project.add_maintainer(current_user)
+ end
+
+ it { is_expected.to match_array([organization_project]) }
+ end
+
describe 'when with_issues_enabled is true' do
let(:params) { { with_issues_enabled: true } }
diff --git a/spec/finders/releases_finder_spec.rb b/spec/finders/releases_finder_spec.rb
index bee0ae0d5c1..2603a205c42 100644
--- a/spec/finders/releases_finder_spec.rb
+++ b/spec/finders/releases_finder_spec.rb
@@ -26,6 +26,18 @@ RSpec.describe ReleasesFinder, feature_category: :release_orchestration do
end
end
+ shared_examples_for 'when a release is tagless' do
+ # There shouldn't be tags in this state, but because some exist in production and cause page loading errors, this
+ # test exists. We can test empty string but not the nil value since there is a not null constraint at the database
+ # level.
+ it 'does not return the tagless release' do
+ empty_string_tag = create(:release, project: project, tag: 'v99.0.0')
+ empty_string_tag.update_column(:tag, '')
+
+ expect(subject).not_to include(empty_string_tag)
+ end
+ end
+
shared_examples_for 'preload' do
it 'preloads associations' do
expect(Release).to receive(:preloaded).once.and_call_original
@@ -89,6 +101,7 @@ RSpec.describe ReleasesFinder, feature_category: :release_orchestration do
it_behaves_like 'preload'
it_behaves_like 'when a tag parameter is passed'
+ it_behaves_like 'when a release is tagless'
end
end
@@ -132,6 +145,7 @@ RSpec.describe ReleasesFinder, feature_category: :release_orchestration do
it_behaves_like 'preload'
it_behaves_like 'when a tag parameter is passed'
+ it_behaves_like 'when a release is tagless'
context 'with sorting parameters' do
it 'sorted by released_at in descending order by default' do
@@ -223,6 +237,7 @@ RSpec.describe ReleasesFinder, feature_category: :release_orchestration do
end
it_behaves_like 'preload'
+ it_behaves_like 'when a release is tagless'
end
end
end
diff --git a/spec/finders/repositories/tree_finder_spec.rb b/spec/finders/repositories/tree_finder_spec.rb
index 42b4047c4e8..7c81572d13c 100644
--- a/spec/finders/repositories/tree_finder_spec.rb
+++ b/spec/finders/repositories/tree_finder_spec.rb
@@ -2,7 +2,7 @@
require "spec_helper"
-RSpec.describe Repositories::TreeFinder do
+RSpec.describe Repositories::TreeFinder, feature_category: :source_code_management do
include RepoHelpers
let_it_be(:user) { create(:user) }
@@ -61,6 +61,60 @@ RSpec.describe Repositories::TreeFinder do
end
end
+ describe '#next_cursor' do
+ subject { tree_finder.next_cursor }
+
+ it 'always nil before #execute call' do
+ is_expected.to be_nil
+ end
+
+ context 'after #execute' do
+ context 'with gitaly pagination' do
+ before do
+ tree_finder.execute(gitaly_pagination: true)
+ end
+
+ context 'without pagination params' do
+ it { is_expected.to be_present }
+ end
+
+ context 'with pagination params' do
+ let(:params) { { per_page: 5 } }
+
+ it { is_expected.to be_present }
+
+ context 'when all objects can be returned on the same page' do
+ let(:params) { { per_page: 100 } }
+
+ it { is_expected.to eq('') }
+ end
+ end
+ end
+
+ context 'without gitaly pagination' do
+ before do
+ tree_finder.execute(gitaly_pagination: false)
+ end
+
+ context 'without pagination params' do
+ it { is_expected.to be_nil }
+ end
+
+ context 'with pagination params' do
+ let(:params) { { per_page: 5 } }
+
+ it { is_expected.to be_nil }
+
+ context 'when all objects can be returned on the same page' do
+ let(:params) { { per_page: 100 } }
+
+ it { is_expected.to be_nil }
+ end
+ end
+ end
+ end
+ end
+
describe "#total", :use_clean_rails_memory_store_caching do
subject { tree_finder.total }
diff --git a/spec/finders/tags_finder_spec.rb b/spec/finders/tags_finder_spec.rb
index 525c19ba137..378acc67a50 100644
--- a/spec/finders/tags_finder_spec.rb
+++ b/spec/finders/tags_finder_spec.rb
@@ -2,11 +2,15 @@
require 'spec_helper'
-RSpec.describe TagsFinder do
+RSpec.describe TagsFinder, feature_category: :source_code_management do
+ subject(:tags_finder) { described_class.new(repository, params) }
+
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project, :repository) }
let_it_be(:repository) { project.repository }
+ let(:params) { {} }
+
def load_tags(params, gitaly_pagination: false)
described_class.new(repository, params).execute(gitaly_pagination: gitaly_pagination)
end
@@ -210,4 +214,58 @@ RSpec.describe TagsFinder do
end
end
end
+
+ describe '#next_cursor' do
+ subject { tags_finder.next_cursor }
+
+ it 'always nil before #execute call' do
+ is_expected.to be_nil
+ end
+
+ context 'after #execute' do
+ context 'with gitaly pagination' do
+ before do
+ tags_finder.execute(gitaly_pagination: true)
+ end
+
+ context 'without pagination params' do
+ it { is_expected.to be_nil }
+ end
+
+ context 'with pagination params' do
+ let(:params) { { per_page: 5 } }
+
+ it { is_expected.to be_present }
+
+ context 'when all objects can be returned on the same page' do
+ let(:params) { { per_page: 100 } }
+
+ it { is_expected.to be_present }
+ end
+ end
+ end
+
+ context 'without gitaly pagination' do
+ before do
+ tags_finder.execute(gitaly_pagination: false)
+ end
+
+ context 'without pagination params' do
+ it { is_expected.to be_nil }
+ end
+
+ context 'with pagination params' do
+ let(:params) { { per_page: 5 } }
+
+ it { is_expected.to be_nil }
+
+ context 'when all objects can be returned on the same page' do
+ let(:params) { { per_page: 100 } }
+
+ it { is_expected.to be_nil }
+ end
+ end
+ end
+ end
+ end
end
diff --git a/spec/finders/timelogs/timelogs_finder_spec.rb b/spec/finders/timelogs/timelogs_finder_spec.rb
new file mode 100644
index 00000000000..35691a46e23
--- /dev/null
+++ b/spec/finders/timelogs/timelogs_finder_spec.rb
@@ -0,0 +1,172 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Timelogs::TimelogsFinder, feature_category: :team_planning do
+ let_it_be(:current_user) { create(:user) }
+ let_it_be(:group_a) { create(:group) }
+ let_it_be(:group_b) { create(:group) }
+ let_it_be(:project_a) { create(:project, :empty_repo, :public, group: group_a) }
+ let_it_be(:project_b) { create(:project, :empty_repo, :public, group: group_a) }
+ let_it_be(:project_c) { create(:project, :empty_repo, :public, group: group_b) }
+
+ let_it_be(:issue_a) { create(:issue, project: project_a) }
+ let_it_be(:issue_b) { create(:issue, project: project_b) }
+ let_it_be(:issue_c) { create(:issue, project: project_c) }
+ let_it_be(:merge_request) { create(:merge_request, source_project: project_a) }
+
+ let_it_be(:timelog1) do
+ create(:issue_timelog, issue: issue_a, user: current_user, spent_at: 2.days.ago.beginning_of_day, time_spent: 3000)
+ end
+
+ let_it_be(:timelog2) do
+ create(:issue_timelog, issue: issue_a, user: create(:user), spent_at: 2.days.ago.end_of_day, time_spent: 4000)
+ end
+
+ let_it_be(:timelog3) do
+ create(:merge_request_timelog,
+ merge_request: merge_request,
+ user: current_user,
+ spent_at: 10.days.ago,
+ time_spent: 2000)
+ end
+
+ let_it_be(:timelog4) do
+ create(:issue_timelog, issue: issue_b, user: current_user, spent_at: 1.hour.ago, time_spent: 500)
+ end
+
+ let_it_be(:timelog5) do
+ create(:issue_timelog, issue: issue_c, user: create(:user), spent_at: 7.days.ago.end_of_day, time_spent: 6000)
+ end
+
+ subject(:finder_results) { described_class.new(issuable, params).execute }
+
+ describe '#execute' do
+ let(:issuable) { nil }
+ let(:params) { {} }
+
+ context 'when params is empty' do
+ it 'returns all timelogs' do
+ expect(finder_results).to contain_exactly(timelog1, timelog2, timelog3, timelog4, timelog5)
+ end
+ end
+
+ context 'when an issuable is provided' do
+ let(:issuable) { issue_a }
+
+ it 'returns the issuable timelogs' do
+ expect(finder_results).to contain_exactly(timelog1, timelog2)
+ end
+ end
+
+ context 'when a username is provided' do
+ let(:params) { { username: current_user.username } }
+
+ it 'returns all timelogs created by the user' do
+ expect(finder_results).to contain_exactly(timelog1, timelog3, timelog4)
+ end
+ end
+
+ context 'when a group is provided' do
+ let(:params) { { group_id: group_a.id } }
+
+ it 'returns all timelogs of issuables inside that group' do
+ expect(finder_results).to contain_exactly(timelog1, timelog2, timelog3, timelog4)
+ end
+
+ context 'when the group does not exist' do
+ let(:params) { { group_id: non_existing_record_id } }
+
+ it 'raises an exception' do
+ expect { finder_results }.to raise_error(
+ ActiveRecord::RecordNotFound, /Group with id '\d+' could not be found/)
+ end
+ end
+ end
+
+ context 'when a project is provided' do
+ let(:params) { { project_id: project_a.id } }
+
+ it 'returns all timelogs of issuables inside that project' do
+ expect(finder_results).to contain_exactly(timelog1, timelog2, timelog3)
+ end
+
+ context 'when the project does not exist' do
+ let(:params) { { project_id: non_existing_record_id } }
+
+ it 'returns an empty list and does not raise an exception' do
+ expect(finder_results).to be_empty
+ expect { finder_results }.not_to raise_error
+ end
+ end
+ end
+
+ context 'when a start datetime is provided' do
+ let(:params) { { start_time: 3.days.ago.beginning_of_day } }
+
+ it 'returns all timelogs created after that date' do
+ expect(finder_results).to contain_exactly(timelog1, timelog2, timelog4)
+ end
+ end
+
+ context 'when an end datetime is provided' do
+ let(:params) { { end_time: 3.days.ago.beginning_of_day } }
+
+ it 'returns all timelogs created before that date' do
+ expect(finder_results).to contain_exactly(timelog3, timelog5)
+ end
+ end
+
+ context 'when both a start and an end datetime are provided' do
+ let(:params) { { start_time: 2.days.ago.beginning_of_day, end_time: 1.day.ago.beginning_of_day } }
+
+ it 'returns all timelogs created between those dates' do
+ expect(finder_results).to contain_exactly(timelog1, timelog2)
+ end
+
+ context 'when start time is after end time' do
+ let(:params) { { start_time: 1.day.ago.beginning_of_day, end_time: 2.days.ago.beginning_of_day } }
+
+ it 'raises an exception' do
+ expect { finder_results }.to raise_error(ArgumentError, /Start argument must be before End argument/)
+ end
+ end
+ end
+
+ context 'when sort is provided' do
+ let(:params) { { sort: sort_value } }
+
+ context 'when sorting by spent_at desc' do
+ let(:sort_value) { :spent_at_desc }
+
+ it 'returns timelogs sorted accordingly' do
+ expect(finder_results).to eq([timelog4, timelog2, timelog1, timelog5, timelog3])
+ end
+ end
+
+ context 'when sorting by spent_at asc' do
+ let(:sort_value) { :spent_at_asc }
+
+ it 'returns timelogs sorted accordingly' do
+ expect(finder_results).to eq([timelog3, timelog5, timelog1, timelog2, timelog4])
+ end
+ end
+
+ context 'when sorting by time_spent desc' do
+ let(:sort_value) { :time_spent_desc }
+
+ it 'returns timelogs sorted accordingly' do
+ expect(finder_results).to eq([timelog5, timelog2, timelog1, timelog3, timelog4])
+ end
+ end
+
+ context 'when sorting by time_spent asc' do
+ let(:sort_value) { :time_spent_asc }
+
+ it 'returns timelogs sorted accordingly' do
+ expect(finder_results).to eq([timelog4, timelog3, timelog1, timelog2, timelog5])
+ end
+ end
+ end
+ end
+end
diff --git a/spec/fixtures/achievements.yml b/spec/fixtures/achievements.yml
deleted file mode 100644
index a24cf42413b..00000000000
--- a/spec/fixtures/achievements.yml
+++ /dev/null
@@ -1,10 +0,0 @@
----
-table_name: achievements
-classes:
-- Achievements::Achievement
-feature_categories:
-- feature_category_example
-description: Achievements which can be created by namespaces to award them to users
-introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/105871
-milestone: '15.7'
-gitlab_schema: gitlab_main
diff --git a/spec/fixtures/api/schemas/entities/diff_viewer.json b/spec/fixtures/api/schemas/entities/diff_viewer.json
index b16f8d8b1a2..6fa1604e505 100644
--- a/spec/fixtures/api/schemas/entities/diff_viewer.json
+++ b/spec/fixtures/api/schemas/entities/diff_viewer.json
@@ -26,6 +26,12 @@
"boolean"
]
},
+ "generated": {
+ "type": [
+ "boolean",
+ "null"
+ ]
+ },
"whitespace_only": {
"type": [
"boolean",
diff --git a/spec/fixtures/api/schemas/graphql/container_repository.json b/spec/fixtures/api/schemas/graphql/container_repository.json
index 2bb598a14cb..95cc5e28b34 100644
--- a/spec/fixtures/api/schemas/graphql/container_repository.json
+++ b/spec/fixtures/api/schemas/graphql/container_repository.json
@@ -1,6 +1,19 @@
{
"type": "object",
- "required": ["id", "name", "path", "location", "createdAt", "updatedAt", "tagsCount", "canDelete", "expirationPolicyCleanupStatus", "project", "lastCleanupDeletedTagsCount"],
+ "required": [
+ "id",
+ "name",
+ "path",
+ "location",
+ "createdAt",
+ "updatedAt",
+ "tagsCount",
+ "canDelete",
+ "expirationPolicyCleanupStatus",
+ "project",
+ "lastCleanupDeletedTagsCount",
+ "userPermissions"
+ ],
"properties": {
"id": {
"type": "string"
@@ -21,10 +34,16 @@
"type": "string"
},
"expirationPolicyStartedAt": {
- "type": ["string", "null"]
+ "type": [
+ "string",
+ "null"
+ ]
},
"status": {
- "type": ["string", "null"]
+ "type": [
+ "string",
+ "null"
+ ]
},
"tagsCount": {
"type": "integer"
@@ -34,13 +53,30 @@
},
"expirationPolicyCleanupStatus": {
"type": "string",
- "enum": ["UNSCHEDULED", "SCHEDULED", "UNFINISHED", "ONGOING"]
+ "enum": [
+ "UNSCHEDULED",
+ "SCHEDULED",
+ "UNFINISHED",
+ "ONGOING"
+ ]
},
"project": {
"type": "object"
},
"lastCleanupDeletedTagsCount": {
- "type": ["string", "null"]
+ "type": [
+ "string",
+ "null"
+ ]
+ },
+ "userPermissions": {
+ "type": "object",
+ "additionalProperties": false,
+ "properties": {
+ "destroyContainerRepository": {
+ "type": "boolean"
+ }
+ }
}
}
}
diff --git a/spec/fixtures/api/schemas/graphql/container_repository_details.json b/spec/fixtures/api/schemas/graphql/container_repository_details.json
index 3156b6d58d5..dccdd85274d 100644
--- a/spec/fixtures/api/schemas/graphql/container_repository_details.json
+++ b/spec/fixtures/api/schemas/graphql/container_repository_details.json
@@ -1,17 +1,31 @@
{
"type": "object",
- "required": ["tags"],
- "allOf": [{ "$ref": "./container_repository.json" }],
+ "required": [
+ "tags"
+ ],
+ "allOf": [
+ {
+ "$ref": "./container_repository.json"
+ }
+ ],
"properties": {
"tags": {
"type": "object",
- "required": ["nodes"],
+ "required": [
+ "nodes"
+ ],
"properties": {
"nodes": {
"type": "array",
"items": {
"type": "object",
- "required": ["name", "path", "location", "canDelete"],
+ "required": [
+ "name",
+ "path",
+ "location",
+ "canDelete",
+ "userPermissions"
+ ],
"properties": {
"name": {
"type": "string"
@@ -39,6 +53,15 @@
},
"canDelete": {
"type": "boolean"
+ },
+ "userPermissions": {
+ "type": "object",
+ "additionalProperties": false,
+ "properties": {
+ "destroyContainerRepositoryTag": {
+ "type": "boolean"
+ }
+ }
}
}
}
diff --git a/spec/fixtures/api/schemas/group_link/group_group_link.json b/spec/fixtures/api/schemas/group_link/group_group_link.json
index 689679cbc0f..d67fdaaf762 100644
--- a/spec/fixtures/api/schemas/group_link/group_group_link.json
+++ b/spec/fixtures/api/schemas/group_link/group_group_link.json
@@ -1,21 +1,45 @@
{
"type": "object",
"allOf": [
- { "$ref": "group_link.json" },
+ {
+ "$ref": "group_link.json"
+ },
{
"required": [
- "source"
+ "source",
+ "valid_roles",
+ "can_update",
+ "can_remove"
],
"properties": {
"source": {
"type": "object",
- "required": ["id", "full_name", "web_url"],
+ "required": [
+ "id",
+ "full_name",
+ "web_url"
+ ],
"properties": {
- "id": { "type": "integer" },
- "full_name": { "type": "string" },
- "web_url": { "type": "string" }
+ "id": {
+ "type": "integer"
+ },
+ "full_name": {
+ "type": "string"
+ },
+ "web_url": {
+ "type": "string"
+ }
},
"additionalProperties": false
+ },
+ "valid_roles": {
+ "type": "object"
+ },
+ "can_update": {
+ "type": "boolean"
+ },
+ "can_remove": {
+ "type": "boolean"
}
}
}
diff --git a/spec/fixtures/api/schemas/group_link/group_link.json b/spec/fixtures/api/schemas/group_link/group_link.json
index 4db38952ecc..421864b2bd7 100644
--- a/spec/fixtures/api/schemas/group_link/group_link.json
+++ b/spec/fixtures/api/schemas/group_link/group_link.json
@@ -5,9 +5,6 @@
"created_at",
"expires_at",
"access_level",
- "valid_roles",
- "can_update",
- "can_remove",
"is_direct_member"
],
"properties": {
@@ -41,9 +38,6 @@
},
"additionalProperties": false
},
- "valid_roles": {
- "type": "object"
- },
"is_shared_with_group_private": {
"type": "boolean"
},
@@ -82,12 +76,6 @@
},
"additionalProperties": false
},
- "can_update": {
- "type": "boolean"
- },
- "can_remove": {
- "type": "boolean"
- },
"is_direct_member": {
"type": "boolean"
}
diff --git a/spec/fixtures/api/schemas/group_link/project_group_link.json b/spec/fixtures/api/schemas/group_link/project_group_link.json
index 615c808e5aa..854ae3c8693 100644
--- a/spec/fixtures/api/schemas/group_link/project_group_link.json
+++ b/spec/fixtures/api/schemas/group_link/project_group_link.json
@@ -1,20 +1,41 @@
{
"type": "object",
"allOf": [
- { "$ref": "group_link.json" },
+ {
+ "$ref": "group_link.json"
+ },
{
"required": [
- "source"
+ "source",
+ "valid_roles",
+ "can_update",
+ "can_remove"
],
"properties": {
"source": {
"type": "object",
- "required": ["id", "full_name"],
+ "required": [
+ "id",
+ "full_name"
+ ],
"properties": {
- "id": { "type": "integer" },
- "full_name": { "type": "string" }
+ "id": {
+ "type": "integer"
+ },
+ "full_name": {
+ "type": "string"
+ }
},
"additionalProperties": false
+ },
+ "valid_roles": {
+ "type": "object"
+ },
+ "can_update": {
+ "type": "boolean"
+ },
+ "can_remove": {
+ "type": "boolean"
}
}
}
diff --git a/spec/fixtures/api/schemas/ml/get_model.json b/spec/fixtures/api/schemas/ml/get_model.json
index 6b7ced6845b..2e97fab5134 100644
--- a/spec/fixtures/api/schemas/ml/get_model.json
+++ b/spec/fixtures/api/schemas/ml/get_model.json
@@ -19,13 +19,13 @@
"type": "string"
},
"user_id": {
- "type": "integer"
+ "type": "string"
},
"creation_timestamp": {
- "type": "string"
+ "type": "integer"
},
"last_updated_timestamp": {
- "type": "string"
+ "type": "integer"
},
"tags": {
"type": "array",
diff --git a/spec/fixtures/api/schemas/ml/get_model_version.json b/spec/fixtures/api/schemas/ml/get_model_version.json
new file mode 100644
index 00000000000..214c993ed73
--- /dev/null
+++ b/spec/fixtures/api/schemas/ml/get_model_version.json
@@ -0,0 +1,75 @@
+{
+ "type": "object",
+ "required": [
+ "model_version"
+ ],
+ "properties": {
+ "model_version": {
+ "type": "object",
+ "required": [
+ "name",
+ "version",
+ "creation_timestamp",
+ "last_updated_timestamp",
+ "user_id",
+ "current_stage",
+ "description",
+ "source",
+ "run_id",
+ "status",
+ "status_message",
+ "metadata",
+ "run_link",
+ "aliases"
+ ],
+ "properties": {
+ "name": {
+ "type": "string"
+ },
+ "version": {
+ "type": "string"
+ },
+ "creation_timestamp": {
+ "type": "integer"
+ },
+ "last_updated_timestamp": {
+ "type": "integer"
+ },
+ "user_id": {
+ "type": "null"
+ },
+ "current_stage": {
+ "type": "string"
+ },
+ "description": {
+ "type": "string"
+ },
+ "source": {
+ "type": "string"
+ },
+ "run_id": {
+ "type": "string"
+ },
+ "status": {
+ "type": "string"
+ },
+ "status_message": {
+ "type": "string"
+ },
+ "metadata": {
+ "type": "array",
+ "items": {
+ }
+ },
+ "run_link": {
+ "type": "string"
+ },
+ "aliases": {
+ "type": "array",
+ "items": {
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/spec/fixtures/api/schemas/ml/list_models.json b/spec/fixtures/api/schemas/ml/list_models.json
new file mode 100644
index 00000000000..eaeaa268dde
--- /dev/null
+++ b/spec/fixtures/api/schemas/ml/list_models.json
@@ -0,0 +1,53 @@
+{
+ "type": "object",
+ "required": [
+ "registered_models",
+ "next_page_token"
+ ],
+ "properties": {
+ "registered_models": {
+ "type": "array",
+ "items": {
+ "type": "object",
+ "required": [
+
+ ],
+ "properties": {
+ "name": {
+ "type": "string"
+ },
+ "description": {
+ "type": "string"
+ },
+ "user_id": {
+ "type": "string"
+ },
+ "creation_timestamp": {
+ "type": "integer"
+ },
+ "last_updated_timestamp": {
+ "type": "integer"
+ },
+ "tags": {
+ "type": "array",
+ "items": {
+ "type": "object",
+ "required": [
+ "key",
+ "value"
+ ],
+ "properties": {
+ "key": {
+ "type": "string"
+ },
+ "value": {
+ "type": "string"
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/spec/fixtures/api/schemas/public_api/v4/project_hook.json b/spec/fixtures/api/schemas/public_api/v4/project_hook.json
index b89f5af8078..c42a4cad712 100644
--- a/spec/fixtures/api/schemas/public_api/v4/project_hook.json
+++ b/spec/fixtures/api/schemas/public_api/v4/project_hook.json
@@ -22,9 +22,11 @@
"releases_events",
"alert_status",
"disabled_until",
- "url_variables",
"emoji_events"
],
+ "optional": [
+ "url_variables"
+ ],
"properties": {
"id": {
"type": "integer"
diff --git a/spec/fixtures/api/schemas/public_api/v4/project_repository_storage_move.json b/spec/fixtures/api/schemas/public_api/v4/project_repository_storage_move.json
index cfaa1c28bb7..21f466bc418 100644
--- a/spec/fixtures/api/schemas/public_api/v4/project_repository_storage_move.json
+++ b/spec/fixtures/api/schemas/public_api/v4/project_repository_storage_move.json
@@ -8,13 +8,32 @@
"destination_storage_name",
"project"
],
- "properties" : {
- "id": { "type": "integer" },
- "created_at": { "type": "string", "format": "date-time" },
- "state": { "type": "string" },
- "source_storage_name": { "type": "string" },
- "destination_storage_name": { "type": "string" },
- "project": { "type": "object" }
+ "properties": {
+ "id": {
+ "type": "integer"
+ },
+ "created_at": {
+ "type": "string",
+ "format": "date-time"
+ },
+ "state": {
+ "type": "string"
+ },
+ "source_storage_name": {
+ "type": "string"
+ },
+ "destination_storage_name": {
+ "type": "string"
+ },
+ "error_message": {
+ "type": [
+ "string",
+ "null"
+ ]
+ },
+ "project": {
+ "type": "object"
+ }
},
"additionalProperties": false
}
diff --git a/spec/fixtures/api/schemas/public_api/v4/snippet_repository_storage_move.json b/spec/fixtures/api/schemas/public_api/v4/snippet_repository_storage_move.json
index b0633a6ff2d..65494e27f5c 100644
--- a/spec/fixtures/api/schemas/public_api/v4/snippet_repository_storage_move.json
+++ b/spec/fixtures/api/schemas/public_api/v4/snippet_repository_storage_move.json
@@ -8,13 +8,32 @@
"destination_storage_name",
"snippet"
],
- "properties" : {
- "id": { "type": "integer" },
- "created_at": { "type": "string", "format": "date-time" },
- "state": { "type": "string" },
- "source_storage_name": { "type": "string" },
- "destination_storage_name": { "type": "string" },
- "snippet": { "type": "object" }
+ "properties": {
+ "id": {
+ "type": "integer"
+ },
+ "created_at": {
+ "type": "string",
+ "format": "date-time"
+ },
+ "state": {
+ "type": "string"
+ },
+ "source_storage_name": {
+ "type": "string"
+ },
+ "destination_storage_name": {
+ "type": "string"
+ },
+ "error_message": {
+ "type": [
+ "string",
+ "null"
+ ]
+ },
+ "snippet": {
+ "type": "object"
+ }
},
"additionalProperties": false
}
diff --git a/spec/fixtures/click_house/migrations/migrations_over_multiple_databases/1_create_some_table_on_main_db.rb b/spec/fixtures/click_house/migrations/migrations_over_multiple_databases/1_create_some_table_on_main_db.rb
deleted file mode 100644
index 98d71d9507b..00000000000
--- a/spec/fixtures/click_house/migrations/migrations_over_multiple_databases/1_create_some_table_on_main_db.rb
+++ /dev/null
@@ -1,15 +0,0 @@
-# frozen_string_literal: true
-
-# rubocop: disable Gitlab/NamespacedClass -- Fixtures do not need to be namespaced
-class CreateSomeTableOnMainDb < ClickHouse::Migration
- def up
- execute <<~SQL
- CREATE TABLE some (
- id UInt64,
- date Date
- ) ENGINE = MergeTree
- PRIMARY KEY(id)
- SQL
- end
-end
-# rubocop: enable Gitlab/NamespacedClass
diff --git a/spec/fixtures/click_house/migrations/migrations_over_multiple_databases/2_create_some_table_on_another_db.rb b/spec/fixtures/click_house/migrations/migrations_over_multiple_databases/2_create_some_table_on_another_db.rb
deleted file mode 100644
index b8cd86a67f5..00000000000
--- a/spec/fixtures/click_house/migrations/migrations_over_multiple_databases/2_create_some_table_on_another_db.rb
+++ /dev/null
@@ -1,16 +0,0 @@
-# frozen_string_literal: true
-
-# rubocop: disable Gitlab/NamespacedClass -- Fixtures do not need to be namespaced
-class CreateSomeTableOnAnotherDb < ClickHouse::Migration
- SCHEMA = :another_db
-
- def up
- execute <<~SQL
- CREATE TABLE some_on_another_db (
- id UInt64,
- date Date
- ) ENGINE = Memory
- SQL
- end
-end
-# rubocop: enable Gitlab/NamespacedClass
diff --git a/spec/fixtures/click_house/migrations/migrations_over_multiple_databases/3_change_some_table_on_main_db.rb b/spec/fixtures/click_house/migrations/migrations_over_multiple_databases/3_change_some_table_on_main_db.rb
deleted file mode 100644
index 9112ab79fc5..00000000000
--- a/spec/fixtures/click_house/migrations/migrations_over_multiple_databases/3_change_some_table_on_main_db.rb
+++ /dev/null
@@ -1,11 +0,0 @@
-# frozen_string_literal: true
-
-# rubocop: disable Gitlab/NamespacedClass -- Fixtures do not need to be namespaced
-class ChangeSomeTableOnMainDb < ClickHouse::Migration
- def up
- execute <<~SQL
- ALTER TABLE some RENAME COLUMN date to timestamp
- SQL
- end
-end
-# rubocop: enable Gitlab/NamespacedClass
diff --git a/spec/fixtures/click_house/migrations/plain_table_creation_on_invalid_database/1_create_some_table.rb b/spec/fixtures/click_house/migrations/table_creation_with_down_method/2_create_another_table.rb
index ee900ef24c5..4a6f841a727 100644
--- a/spec/fixtures/click_house/migrations/plain_table_creation_on_invalid_database/1_create_some_table.rb
+++ b/spec/fixtures/click_house/migrations/table_creation_with_down_method/2_create_another_table.rb
@@ -1,16 +1,20 @@
# frozen_string_literal: true
# rubocop: disable Gitlab/NamespacedClass -- Fixtures do not need to be namespaced
-class CreateSomeTable < ClickHouse::Migration
- SCHEMA = :unknown_database
-
+class CreateAnotherTable < ClickHouse::Migration
def up
execute <<~SQL
- CREATE TABLE some (
+ CREATE TABLE another (
id UInt64,
date Date
) ENGINE = Memory
SQL
end
+
+ def down
+ execute <<~SQL
+ DROP TABLE another
+ SQL
+ end
end
# rubocop: enable Gitlab/NamespacedClass
diff --git a/spec/fixtures/csv_complex.csv b/spec/fixtures/csv_complex.csv
index 60d8aa5d6f7..b42a5e99d88 100644
--- a/spec/fixtures/csv_complex.csv
+++ b/spec/fixtures/csv_complex.csv
@@ -1,6 +1,6 @@
-title,description,due date
+title,description,due date,milestone
Issue in 中文,Test description,
"Hello","World",
"Title with quote""","Description
/assign @csv_assignee
-/estimate 1h",2022-06-28
+/estimate 1h",2022-06-28,15.10
diff --git a/spec/fixtures/csv_gitlab_export.csv b/spec/fixtures/csv_gitlab_export.csv
index 65422509eef..a39260f9dff 100644
--- a/spec/fixtures/csv_gitlab_export.csv
+++ b/spec/fixtures/csv_gitlab_export.csv
@@ -2,4 +2,4 @@ Issue ID,URL,Title,State,Description,Author,Author Username,Assignee,Assignee Us
1,http://localhost:3000/jashkenas/underscore/issues/1,Title,Open,,Elva Jerde,jamel,Tierra Effertz,aurora_hahn,No,No,,2020-01-17 10:36:26,2020-02-19 10:36:26,,v1.0,,"Brene,Cutlass,Escort,GVM",0,0,,
3,http://localhost:3000/jashkenas/underscore/issues/3,Nihil impedit neque quos totam ut aut enim cupiditate doloribus molestiae.,Open,Omnis aliquid sint laudantium quam.,Marybeth Goodwin,rocio.blanda,Annemarie Von,reynalda_howe,No,No,,2020-01-23 10:36:26,2020-02-19 10:36:27,,v1.0,,"Brene,Cutlass,Escort,GVM",0,0,,
34,http://localhost:3000/jashkenas/underscore/issues/34,Dismiss Cipher with no integrity,Open,,Marybeth Goodwin,rocio.blanda,"","",No,No,,2020-02-19 10:38:49,2020-02-19 10:38:49,,,,,0,0,,
-35,http://localhost:3000/jashkenas/underscore/issues/35,Test Title,Open,Test Description,Marybeth Goodwin,rocio.blanda,"","",No,No,,2020-02-19 10:38:49,2020-02-19 10:38:49,,,,,0,0,,
+35,http://localhost:3000/jashkenas/underscore/issues/35,Test Title,Open,Test Description,Marybeth Goodwin,rocio.blanda,"","",No,No,,2020-02-19 10:38:49,2020-02-19 10:38:49,,v1.0,,,0,0,,
diff --git a/spec/fixtures/importers/bitbucket_server/activities.json b/spec/fixtures/importers/bitbucket_server/activities.json
index ddcb94b8f58..e8873702a61 100644
--- a/spec/fixtures/importers/bitbucket_server/activities.json
+++ b/spec/fixtures/importers/bitbucket_server/activities.json
@@ -1,1146 +1,1208 @@
{
- "isLastPage": true,
- "limit": 25,
- "size": 8,
- "start": 0,
- "values": [
- {
- "action": "COMMENTED",
- "comment": {
+ "isLastPage": true,
+ "limit": 25,
+ "size": 8,
+ "start": 0,
+ "values": [
+ {
+ "action": "COMMENTED",
+ "comment": {
+ "author": {
+ "active": true,
+ "displayName": "root",
+ "emailAddress": "test.user@example.com",
+ "id": 1,
+ "links": {
+ "self": [
+ {
+ "href": "http://localhost:7990/users/root"
+ }
+ ]
+ },
+ "name": "root",
+ "slug": "slug",
+ "username": "username",
+ "type": "NORMAL"
+ },
+ "comments": [
+ {
+ "author": {
+ "active": true,
+ "displayName": "root",
+ "emailAddress": "test.user@example.com",
+ "id": 1,
+ "links": {
+ "self": [
+ {
+ "href": "http://localhost:7990/users/root"
+ }
+ ]
+ },
+ "name": "root",
+ "slug": "slug",
+ "username": "username",
+ "type": "NORMAL"
+ },
+ "comments": [
+ {
"author": {
- "active": true,
- "displayName": "root",
- "emailAddress": "test.user@example.com",
- "id": 1,
- "links": {
- "self": [
- {
- "href": "http://localhost:7990/users/root"
- }
- ]
- },
- "name": "root",
- "slug": "slug",
- "username": "username",
- "type": "NORMAL"
+ "active": true,
+ "displayName": "root",
+ "emailAddress": "test.user@example.com",
+ "id": 1,
+ "links": {
+ "self": [
+ {
+ "href": "http://localhost:7990/users/root"
+ }
+ ]
+ },
+ "name": "root",
+ "slug": "slug",
+ "username": "username",
+ "type": "NORMAL"
},
"comments": [
- {
- "author": {
- "active": true,
- "displayName": "root",
- "emailAddress": "test.user@example.com",
- "id": 1,
- "links": {
- "self": [
- {
- "href": "http://localhost:7990/users/root"
- }
- ]
- },
- "name": "root",
- "slug": "slug",
- "username": "username",
- "type": "NORMAL"
- },
- "comments": [
- {
- "author": {
- "active": true,
- "displayName": "root",
- "emailAddress": "test.user@example.com",
- "id": 1,
- "links": {
- "self": [
- {
- "href": "http://localhost:7990/users/root"
- }
- ]
- },
- "name": "root",
- "slug": "slug",
- "username": "username",
- "type": "NORMAL"
- },
- "comments": [],
- "createdDate": 1530164016725,
- "id": 11,
- "permittedOperations": {
- "deletable": true,
- "editable": true
- },
- "properties": {
- "repositoryId": 1
- },
- "tasks": [
- {
- "anchor": {
- "author": {
- "active": true,
- "displayName": "root",
- "emailAddress": "test.user@example.com",
- "id": 1,
- "links": {
- "self": [
- {
- "href": "http://localhost:7990/users/root"
- }
- ]
- },
- "name": "root",
- "slug": "slug",
- "username": "username",
- "type": "NORMAL"
- },
- "createdDate": 1530164016725,
- "id": 11,
- "permittedOperations": {
- "deletable": true,
- "editable": true
- },
- "properties": {
- "repositoryId": 1
- },
- "text": "Ok",
- "type": "COMMENT",
- "updatedDate": 1530164016725,
- "version": 0
- },
- "author": {
- "active": true,
- "displayName": "root",
- "emailAddress": "test.user@example.com",
- "id": 1,
- "links": {
- "self": [
- {
- "href": "http://localhost:7990/users/root"
- }
- ]
- },
- "name": "root",
- "slug": "slug",
- "username": "username",
- "type": "NORMAL"
- },
- "createdDate": 1530164026000,
- "id": 1,
- "permittedOperations": {
- "deletable": true,
- "editable": true,
- "transitionable": true
- },
- "state": "OPEN",
- "text": "here's a task"
- }
- ],
- "text": "Ok",
- "updatedDate": 1530164016725,
- "version": 0
- },
- {
- "author": {
- "active": true,
- "displayName": "root",
- "emailAddress": "test.user@example.com",
- "id": 1,
- "links": {
- "self": [
- {
- "href": "http://localhost:7990/users/root"
- }
- ]
- },
- "name": "root",
- "slug": "slug",
- "username": "username",
- "type": "NORMAL"
- },
- "comments": [],
- "createdDate": 1530165543990,
- "id": 12,
- "permittedOperations": {
- "deletable": true,
- "editable": true
- },
- "properties": {
- "repositoryId": 1
- },
- "tasks": [],
- "text": "hi",
- "updatedDate": 1530165543990,
- "version": 0
- }
- ],
- "createdDate": 1530164013718,
- "id": 10,
- "permittedOperations": {
- "deletable": true,
- "editable": true
- },
- "properties": {
- "repositoryId": 1
- },
- "tasks": [],
- "text": "Hello world",
- "updatedDate": 1530164013718,
- "version": 0
- },
- {
- "author": {
- "active": true,
- "displayName": "root",
- "emailAddress": "test.user@example.com",
- "id": 1,
- "links": {
- "self": [
- {
- "href": "http://localhost:7990/users/root"
- }
- ]
- },
- "name": "root",
- "slug": "slug",
- "username": "username",
- "type": "NORMAL"
- },
- "comments": [],
- "createdDate": 1530165549932,
- "id": 13,
- "permittedOperations": {
- "deletable": true,
- "editable": true
- },
- "properties": {
- "repositoryId": 1
- },
- "tasks": [],
- "text": "hello",
- "updatedDate": 1530165549932,
- "version": 0
- }
+
],
- "createdDate": 1530161499144,
- "id": 9,
+ "createdDate": 1530164016725,
+ "id": 11,
"permittedOperations": {
- "deletable": true,
- "editable": true
+ "deletable": true,
+ "editable": true
},
"properties": {
- "repositoryId": 1
- },
- "tasks": [],
- "text": "is this a new line?",
- "updatedDate": 1530161499144,
- "version": 0
- },
- "commentAction": "ADDED",
- "commentAnchor": {
- "diffType": "EFFECTIVE",
- "fileType": "TO",
- "fromHash": "c5f4288162e2e6218180779c7f6ac1735bb56eab",
- "line": 1,
- "lineType": "ADDED",
- "orphaned": false,
- "path": "CHANGELOG.md",
- "toHash": "a4c2164330f2549f67c13f36a93884cf66e976be"
- },
- "createdDate": 1530161499144,
- "diff": {
- "destination": {
- "components": [
- "CHANGELOG.md"
- ],
- "extension": "md",
- "name": "CHANGELOG.md",
- "parent": "",
- "toString": "CHANGELOG.md"
+ "repositoryId": 1
},
- "hunks": [
- {
- "destinationLine": 1,
- "destinationSpan": 11,
- "segments": [
+ "tasks": [
+ {
+ "anchor": {
+ "author": {
+ "active": true,
+ "displayName": "root",
+ "emailAddress": "test.user@example.com",
+ "id": 1,
+ "links": {
+ "self": [
{
- "lines": [
- {
- "commentIds": [
- 9
- ],
- "destination": 1,
- "line": "# Edit 1",
- "source": 1,
- "truncated": false
- },
- {
- "destination": 2,
- "line": "",
- "source": 1,
- "truncated": false
- }
- ],
- "truncated": false,
- "type": "ADDED"
- },
- {
- "lines": [
- {
- "destination": 3,
- "line": "# ChangeLog",
- "source": 1,
- "truncated": false
- },
- {
- "destination": 4,
- "line": "",
- "source": 2,
- "truncated": false
- },
- {
- "destination": 5,
- "line": "This log summarizes the changes in each released version of rouge. The versioning scheme",
- "source": 3,
- "truncated": false
- },
- {
- "destination": 6,
- "line": "we use is semver, although we will often release new lexers in minor versions, as a",
- "source": 4,
- "truncated": false
- },
- {
- "destination": 7,
- "line": "practical matter.",
- "source": 5,
- "truncated": false
- },
- {
- "destination": 8,
- "line": "",
- "source": 6,
- "truncated": false
- },
- {
- "destination": 9,
- "line": "## version TBD: (unreleased)",
- "source": 7,
- "truncated": false
- },
- {
- "destination": 10,
- "line": "",
- "source": 8,
- "truncated": false
- },
- {
- "destination": 11,
- "line": "* General",
- "source": 9,
- "truncated": false
- }
- ],
- "truncated": false,
- "type": "CONTEXT"
+ "href": "http://localhost:7990/users/root"
}
- ],
- "sourceLine": 1,
- "sourceSpan": 9,
- "truncated": false
- }
- ],
- "properties": {
- "current": true,
- "fromHash": "c5f4288162e2e6218180779c7f6ac1735bb56eab",
- "toHash": "a4c2164330f2549f67c13f36a93884cf66e976be"
- },
- "source": null,
- "truncated": false
- },
- "id": 19,
- "user": {
- "active": true,
- "displayName": "root",
- "emailAddress": "test.user@example.com",
- "id": 1,
- "links": {
- "self": [
- {
- "href": "http://localhost:7990/users/root"
- }
- ]
- },
- "name": "root",
- "slug": "slug",
- "username": "username",
- "type": "NORMAL"
- }
- },
- {
- "action": "COMMENTED",
- "comment": {
- "author": {
- "active": true,
- "displayName": "root",
- "emailAddress": "test.user@example.com",
- "id": 1,
- "links": {
+ ]
+ },
+ "name": "root",
+ "slug": "slug",
+ "username": "username",
+ "type": "NORMAL"
+ },
+ "createdDate": 1530164016725,
+ "id": 11,
+ "permittedOperations": {
+ "deletable": true,
+ "editable": true
+ },
+ "properties": {
+ "repositoryId": 1
+ },
+ "text": "Ok",
+ "type": "COMMENT",
+ "updatedDate": 1530164016725,
+ "version": 0
+ },
+ "author": {
+ "active": true,
+ "displayName": "root",
+ "emailAddress": "test.user@example.com",
+ "id": 1,
+ "links": {
"self": [
- {
- "href": "http://localhost:7990/users/root"
- }
+ {
+ "href": "http://localhost:7990/users/root"
+ }
]
+ },
+ "name": "root",
+ "slug": "slug",
+ "username": "username",
+ "type": "NORMAL"
},
- "name": "root",
- "slug": "slug",
- "username": "username",
- "type": "NORMAL"
+ "createdDate": 1530164026000,
+ "id": 1,
+ "permittedOperations": {
+ "deletable": true,
+ "editable": true,
+ "transitionable": true
+ },
+ "state": "OPEN",
+ "text": "here's a task"
+ }
+ ],
+ "text": "Ok",
+ "updatedDate": 1530164016725,
+ "version": 0
+ },
+ {
+ "author": {
+ "active": true,
+ "displayName": "root",
+ "emailAddress": "test.user@example.com",
+ "id": 1,
+ "links": {
+ "self": [
+ {
+ "href": "http://localhost:7990/users/root"
+ }
+ ]
+ },
+ "name": "root",
+ "slug": "slug",
+ "username": "username",
+ "type": "NORMAL"
},
- "comments": [],
- "createdDate": 1530053198463,
- "id": 7,
+ "comments": [
+
+ ],
+ "createdDate": 1530165543990,
+ "id": 12,
"permittedOperations": {
- "deletable": true,
- "editable": true
+ "deletable": true,
+ "editable": true
},
"properties": {
- "repositoryId": 1
+ "repositoryId": 1
},
- "tasks": [],
- "text": "What about this line?",
- "updatedDate": 1530053198463,
+ "tasks": [
+
+ ],
+ "text": "hi",
+ "updatedDate": 1530165543990,
"version": 0
+ }
+ ],
+ "createdDate": 1530164013718,
+ "id": 10,
+ "permittedOperations": {
+ "deletable": true,
+ "editable": true
+ },
+ "properties": {
+ "repositoryId": 1
+ },
+ "tasks": [
+
+ ],
+ "text": "Hello world",
+ "updatedDate": 1530164013718,
+ "version": 0
+ },
+ {
+ "author": {
+ "active": true,
+ "displayName": "root",
+ "emailAddress": "test.user@example.com",
+ "id": 1,
+ "links": {
+ "self": [
+ {
+ "href": "http://localhost:7990/users/root"
+ }
+ ]
+ },
+ "name": "root",
+ "slug": "slug",
+ "username": "username",
+ "type": "NORMAL"
+ },
+ "comments": [
+
+ ],
+ "createdDate": 1530165549932,
+ "id": 13,
+ "permittedOperations": {
+ "deletable": true,
+ "editable": true
},
- "commentAction": "ADDED",
- "commentAnchor": {
- "diffType": "EFFECTIVE",
- "fileType": "FROM",
- "fromHash": "c5f4288162e2e6218180779c7f6ac1735bb56eab",
- "line": 9,
- "lineType": "CONTEXT",
- "orphaned": false,
- "path": "CHANGELOG.md",
- "toHash": "a4c2164330f2549f67c13f36a93884cf66e976be"
+ "properties": {
+ "repositoryId": 1
},
- "createdDate": 1530053198463,
- "diff": {
- "destination": {
- "components": [
- "CHANGELOG.md"
+ "tasks": [
+
+ ],
+ "text": "hello",
+ "updatedDate": 1530165549932,
+ "version": 0
+ }
+ ],
+ "createdDate": 1530161499144,
+ "id": 9,
+ "permittedOperations": {
+ "deletable": true,
+ "editable": true
+ },
+ "properties": {
+ "repositoryId": 1
+ },
+ "tasks": [
+
+ ],
+ "text": "is this a new line?",
+ "updatedDate": 1530161499144,
+ "version": 0
+ },
+ "commentAction": "ADDED",
+ "commentAnchor": {
+ "diffType": "EFFECTIVE",
+ "fileType": "TO",
+ "fromHash": "c5f4288162e2e6218180779c7f6ac1735bb56eab",
+ "line": 1,
+ "lineType": "ADDED",
+ "orphaned": false,
+ "path": "CHANGELOG.md",
+ "toHash": "a4c2164330f2549f67c13f36a93884cf66e976be"
+ },
+ "createdDate": 1530161499144,
+ "diff": {
+ "destination": {
+ "components": [
+ "CHANGELOG.md"
+ ],
+ "extension": "md",
+ "name": "CHANGELOG.md",
+ "parent": "",
+ "toString": "CHANGELOG.md"
+ },
+ "hunks": [
+ {
+ "destinationLine": 1,
+ "destinationSpan": 11,
+ "segments": [
+ {
+ "lines": [
+ {
+ "commentIds": [
+ 9
],
- "extension": "md",
- "name": "CHANGELOG.md",
- "parent": "",
- "toString": "CHANGELOG.md"
- },
- "hunks": [
- {
- "destinationLine": 1,
- "destinationSpan": 12,
- "segments": [
- {
- "lines": [
- {
- "destination": 1,
- "line": "# Edit 1",
- "source": 1,
- "truncated": false
- },
- {
- "destination": 2,
- "line": "",
- "source": 1,
- "truncated": false
- }
- ],
- "truncated": false,
- "type": "ADDED"
- },
- {
- "lines": [
- {
- "destination": 3,
- "line": "# ChangeLog",
- "source": 1,
- "truncated": false
- },
- {
- "destination": 4,
- "line": "",
- "source": 2,
- "truncated": false
- },
- {
- "destination": 5,
- "line": "This log summarizes the changes in each released version of rouge. The versioning scheme",
- "source": 3,
- "truncated": false
- },
- {
- "destination": 6,
- "line": "we use is semver, although we will often release new lexers in minor versions, as a",
- "source": 4,
- "truncated": false
- },
- {
- "destination": 7,
- "line": "practical matter.",
- "source": 5,
- "truncated": false
- },
- {
- "destination": 8,
- "line": "",
- "source": 6,
- "truncated": false
- },
- {
- "destination": 9,
- "line": "## version TBD: (unreleased)",
- "source": 7,
- "truncated": false
- },
- {
- "destination": 10,
- "line": "",
- "source": 8,
- "truncated": false
- },
- {
- "commentIds": [
- 7
- ],
- "destination": 11,
- "line": "* General",
- "source": 9,
- "truncated": false
- },
- {
- "destination": 12,
- "line": " * Load pastie theme ([#809](https://github.com/jneen/rouge/pull/809) by rramsden)",
- "source": 10,
- "truncated": false
- }
- ],
- "truncated": false,
- "type": "CONTEXT"
- }
- ],
- "sourceLine": 1,
- "sourceSpan": 10,
- "truncated": false
- }
+ "destination": 1,
+ "line": "# Edit 1",
+ "source": 1,
+ "truncated": false
+ },
+ {
+ "destination": 2,
+ "line": "",
+ "source": 1,
+ "truncated": false
+ }
],
- "properties": {
- "current": true,
- "fromHash": "c5f4288162e2e6218180779c7f6ac1735bb56eab",
- "toHash": "a4c2164330f2549f67c13f36a93884cf66e976be"
- },
- "source": null,
- "truncated": false
- },
- "id": 14,
- "user": {
- "active": true,
- "displayName": "root",
- "emailAddress": "test.user@example.com",
- "id": 1,
- "links": {
- "self": [
- {
- "href": "http://localhost:7990/users/root"
- }
- ]
- },
- "name": "root",
- "slug": "slug",
- "username": "username",
- "type": "NORMAL"
+ "truncated": false,
+ "type": "ADDED"
+ },
+ {
+ "lines": [
+ {
+ "destination": 3,
+ "line": "# ChangeLog",
+ "source": 1,
+ "truncated": false
+ },
+ {
+ "destination": 4,
+ "line": "",
+ "source": 2,
+ "truncated": false
+ },
+ {
+ "destination": 5,
+ "line": "This log summarizes the changes in each released version of rouge. The versioning scheme",
+ "source": 3,
+ "truncated": false
+ },
+ {
+ "destination": 6,
+ "line": "we use is semver, although we will often release new lexers in minor versions, as a",
+ "source": 4,
+ "truncated": false
+ },
+ {
+ "destination": 7,
+ "line": "practical matter.",
+ "source": 5,
+ "truncated": false
+ },
+ {
+ "destination": 8,
+ "line": "",
+ "source": 6,
+ "truncated": false
+ },
+ {
+ "destination": 9,
+ "line": "## version TBD: (unreleased)",
+ "source": 7,
+ "truncated": false
+ },
+ {
+ "destination": 10,
+ "line": "",
+ "source": 8,
+ "truncated": false
+ },
+ {
+ "destination": 11,
+ "line": "* General",
+ "source": 9,
+ "truncated": false
+ }
+ ],
+ "truncated": false,
+ "type": "CONTEXT"
+ }
+ ],
+ "sourceLine": 1,
+ "sourceSpan": 9,
+ "truncated": false
+ }
+ ],
+ "properties": {
+ "current": true,
+ "fromHash": "c5f4288162e2e6218180779c7f6ac1735bb56eab",
+ "toHash": "a4c2164330f2549f67c13f36a93884cf66e976be"
+ },
+ "source": null,
+ "truncated": false
+ },
+ "id": 19,
+ "user": {
+ "active": true,
+ "displayName": "root",
+ "emailAddress": "test.user@example.com",
+ "id": 1,
+ "links": {
+ "self": [
+ {
+ "href": "http://localhost:7990/users/root"
+ }
+ ]
+ },
+ "name": "root",
+ "slug": "slug",
+ "username": "username",
+ "type": "NORMAL"
+ }
+ },
+ {
+ "action": "COMMENTED",
+ "comment": {
+ "author": {
+ "active": true,
+ "displayName": "root",
+ "emailAddress": "test.user@example.com",
+ "id": 1,
+ "links": {
+ "self": [
+ {
+ "href": "http://localhost:7990/users/root"
+ }
+ ]
+ },
+ "name": "root",
+ "slug": "slug",
+ "username": "username",
+ "type": "NORMAL"
+ },
+ "comments": [
+
+ ],
+ "createdDate": 1530053198463,
+ "id": 7,
+ "permittedOperations": {
+ "deletable": true,
+ "editable": true
+ },
+ "properties": {
+ "repositoryId": 1
+ },
+ "tasks": [
+
+ ],
+ "text": "What about this line?",
+ "updatedDate": 1530053198463,
+ "version": 0
+ },
+ "commentAction": "ADDED",
+ "commentAnchor": {
+ "diffType": "EFFECTIVE",
+ "fileType": "FROM",
+ "fromHash": "c5f4288162e2e6218180779c7f6ac1735bb56eab",
+ "line": 9,
+ "lineType": "CONTEXT",
+ "orphaned": false,
+ "path": "CHANGELOG.md",
+ "toHash": "a4c2164330f2549f67c13f36a93884cf66e976be"
+ },
+ "createdDate": 1530053198463,
+ "diff": {
+ "destination": {
+ "components": [
+ "CHANGELOG.md"
+ ],
+ "extension": "md",
+ "name": "CHANGELOG.md",
+ "parent": "",
+ "toString": "CHANGELOG.md"
+ },
+ "hunks": [
+ {
+ "destinationLine": 1,
+ "destinationSpan": 12,
+ "segments": [
+ {
+ "lines": [
+ {
+ "destination": 1,
+ "line": "# Edit 1",
+ "source": 1,
+ "truncated": false
+ },
+ {
+ "destination": 2,
+ "line": "",
+ "source": 1,
+ "truncated": false
+ }
+ ],
+ "truncated": false,
+ "type": "ADDED"
+ },
+ {
+ "lines": [
+ {
+ "destination": 3,
+ "line": "# ChangeLog",
+ "source": 1,
+ "truncated": false
+ },
+ {
+ "destination": 4,
+ "line": "",
+ "source": 2,
+ "truncated": false
+ },
+ {
+ "destination": 5,
+ "line": "This log summarizes the changes in each released version of rouge. The versioning scheme",
+ "source": 3,
+ "truncated": false
+ },
+ {
+ "destination": 6,
+ "line": "we use is semver, although we will often release new lexers in minor versions, as a",
+ "source": 4,
+ "truncated": false
+ },
+ {
+ "destination": 7,
+ "line": "practical matter.",
+ "source": 5,
+ "truncated": false
+ },
+ {
+ "destination": 8,
+ "line": "",
+ "source": 6,
+ "truncated": false
+ },
+ {
+ "destination": 9,
+ "line": "## version TBD: (unreleased)",
+ "source": 7,
+ "truncated": false
+ },
+ {
+ "destination": 10,
+ "line": "",
+ "source": 8,
+ "truncated": false
+ },
+ {
+ "commentIds": [
+ 7
+ ],
+ "destination": 11,
+ "line": "* General",
+ "source": 9,
+ "truncated": false
+ },
+ {
+ "destination": 12,
+ "line": " * Load pastie theme ([#809](https://github.com/jneen/rouge/pull/809) by rramsden)",
+ "source": 10,
+ "truncated": false
+ }
+ ],
+ "truncated": false,
+ "type": "CONTEXT"
+ }
+ ],
+ "sourceLine": 1,
+ "sourceSpan": 10,
+ "truncated": false
+ }
+ ],
+ "properties": {
+ "current": true,
+ "fromHash": "c5f4288162e2e6218180779c7f6ac1735bb56eab",
+ "toHash": "a4c2164330f2549f67c13f36a93884cf66e976be"
+ },
+ "source": null,
+ "truncated": false
+ },
+ "id": 14,
+ "user": {
+ "active": true,
+ "displayName": "root",
+ "emailAddress": "test.user@example.com",
+ "id": 1,
+ "links": {
+ "self": [
+ {
+ "href": "http://localhost:7990/users/root"
}
+ ]
},
- {
- "action": "COMMENTED",
- "comment": {
+ "name": "root",
+ "slug": "slug",
+ "username": "username",
+ "type": "NORMAL"
+ }
+ },
+ {
+ "action": "COMMENTED",
+ "comment": {
+ "author": {
+ "active": true,
+ "displayName": "root",
+ "emailAddress": "test.user@example.com",
+ "id": 1,
+ "links": {
+ "self": [
+ {
+ "href": "http://localhost:7990/users/root"
+ }
+ ]
+ },
+ "name": "root",
+ "slug": "slug",
+ "username": "username",
+ "type": "NORMAL"
+ },
+ "comments": [
+ {
+ "author": {
+ "active": true,
+ "displayName": "root",
+ "emailAddress": "test.user@example.com",
+ "id": 1,
+ "links": {
+ "self": [
+ {
+ "href": "http://localhost:7990/users/root"
+ }
+ ]
+ },
+ "name": "root",
+ "slug": "slug",
+ "username": "username",
+ "type": "NORMAL"
+ },
+ "comments": [
+ {
"author": {
- "active": true,
- "displayName": "root",
- "emailAddress": "test.user@example.com",
- "id": 1,
- "links": {
- "self": [
- {
- "href": "http://localhost:7990/users/root"
- }
- ]
- },
- "name": "root",
- "slug": "slug",
- "username": "username",
- "type": "NORMAL"
+ "active": true,
+ "displayName": "root",
+ "emailAddress": "test.user@example.com",
+ "id": 1,
+ "links": {
+ "self": [
+ {
+ "href": "http://localhost:7990/users/root"
+ }
+ ]
+ },
+ "name": "root",
+ "slug": "slug",
+ "username": "username",
+ "type": "NORMAL"
},
"comments": [
- {
- "author": {
- "active": true,
- "displayName": "root",
- "emailAddress": "test.user@example.com",
- "id": 1,
- "links": {
- "self": [
- {
- "href": "http://localhost:7990/users/root"
- }
- ]
- },
- "name": "root",
- "slug": "slug",
- "username": "username",
- "type": "NORMAL"
- },
- "comments": [
- {
- "author": {
- "active": true,
- "displayName": "root",
- "emailAddress": "test.user@example.com",
- "id": 1,
- "links": {
- "self": [
- {
- "href": "http://localhost:7990/users/root"
- }
- ]
- },
- "name": "root",
- "slug": "slug",
- "username": "username",
- "type": "NORMAL"
- },
- "comments": [],
- "createdDate": 1530143330513,
- "id": 8,
- "permittedOperations": {
- "deletable": true,
- "editable": true
- },
- "properties": {
- "repositoryId": 1
- },
- "tasks": [],
- "text": "How about this?",
- "updatedDate": 1530143330513,
- "version": 0
- }
- ],
- "createdDate": 1530053193795,
- "id": 6,
- "permittedOperations": {
- "deletable": true,
- "editable": true
- },
- "properties": {
- "repositoryId": 1
- },
- "tasks": [],
- "text": "It does.",
- "updatedDate": 1530053193795,
- "version": 0
- }
+
],
- "createdDate": 1530053187904,
- "id": 5,
+ "createdDate": 1530143330513,
+ "id": 8,
"permittedOperations": {
- "deletable": true,
- "editable": true
+ "deletable": true,
+ "editable": true
},
"properties": {
- "repositoryId": 1
+ "repositoryId": 1
},
- "tasks": [],
- "text": "Does this line make sense?",
- "updatedDate": 1530053187904,
+ "tasks": [
+
+ ],
+ "text": "How about this?",
+ "updatedDate": 1530143330513,
"version": 0
+ }
+ ],
+ "createdDate": 1530053193795,
+ "id": 6,
+ "permittedOperations": {
+ "deletable": true,
+ "editable": true
},
- "commentAction": "ADDED",
- "commentAnchor": {
- "diffType": "EFFECTIVE",
- "fileType": "FROM",
- "fromHash": "c5f4288162e2e6218180779c7f6ac1735bb56eab",
- "line": 3,
- "lineType": "CONTEXT",
- "orphaned": false,
- "path": "CHANGELOG.md",
- "toHash": "a4c2164330f2549f67c13f36a93884cf66e976be"
+ "properties": {
+ "repositoryId": 1
},
- "createdDate": 1530053187904,
- "diff": {
- "destination": {
- "components": [
- "CHANGELOG.md"
+ "tasks": [
+
+ ],
+ "text": "It does.",
+ "updatedDate": 1530053193795,
+ "version": 0
+ }
+ ],
+ "createdDate": 1530053187904,
+ "id": 5,
+ "permittedOperations": {
+ "deletable": true,
+ "editable": true
+ },
+ "properties": {
+ "repositoryId": 1
+ },
+ "tasks": [
+
+ ],
+ "text": "Does this line make sense?",
+ "updatedDate": 1530053187904,
+ "version": 0
+ },
+ "commentAction": "ADDED",
+ "commentAnchor": {
+ "diffType": "EFFECTIVE",
+ "fileType": "FROM",
+ "fromHash": "c5f4288162e2e6218180779c7f6ac1735bb56eab",
+ "line": 3,
+ "lineType": "CONTEXT",
+ "orphaned": false,
+ "path": "CHANGELOG.md",
+ "toHash": "a4c2164330f2549f67c13f36a93884cf66e976be"
+ },
+ "createdDate": 1530053187904,
+ "diff": {
+ "destination": {
+ "components": [
+ "CHANGELOG.md"
+ ],
+ "extension": "md",
+ "name": "CHANGELOG.md",
+ "parent": "",
+ "toString": "CHANGELOG.md"
+ },
+ "hunks": [
+ {
+ "destinationLine": 1,
+ "destinationSpan": 12,
+ "segments": [
+ {
+ "lines": [
+ {
+ "destination": 1,
+ "line": "# Edit 1",
+ "source": 1,
+ "truncated": false
+ },
+ {
+ "destination": 2,
+ "line": "",
+ "source": 1,
+ "truncated": false
+ }
+ ],
+ "truncated": false,
+ "type": "ADDED"
+ },
+ {
+ "lines": [
+ {
+ "destination": 3,
+ "line": "# ChangeLog",
+ "source": 1,
+ "truncated": false
+ },
+ {
+ "destination": 4,
+ "line": "",
+ "source": 2,
+ "truncated": false
+ },
+ {
+ "commentIds": [
+ 5
],
- "extension": "md",
- "name": "CHANGELOG.md",
- "parent": "",
- "toString": "CHANGELOG.md"
- },
- "hunks": [
- {
- "destinationLine": 1,
- "destinationSpan": 12,
- "segments": [
- {
- "lines": [
- {
- "destination": 1,
- "line": "# Edit 1",
- "source": 1,
- "truncated": false
- },
- {
- "destination": 2,
- "line": "",
- "source": 1,
- "truncated": false
- }
- ],
- "truncated": false,
- "type": "ADDED"
- },
- {
- "lines": [
- {
- "destination": 3,
- "line": "# ChangeLog",
- "source": 1,
- "truncated": false
- },
- {
- "destination": 4,
- "line": "",
- "source": 2,
- "truncated": false
- },
- {
- "commentIds": [
- 5
- ],
- "destination": 5,
- "line": "This log summarizes the changes in each released version of rouge. The versioning scheme",
- "source": 3,
- "truncated": false
- },
- {
- "destination": 6,
- "line": "we use is semver, although we will often release new lexers in minor versions, as a",
- "source": 4,
- "truncated": false
- },
- {
- "destination": 7,
- "line": "practical matter.",
- "source": 5,
- "truncated": false
- },
- {
- "destination": 8,
- "line": "",
- "source": 6,
- "truncated": false
- },
- {
- "destination": 9,
- "line": "## version TBD: (unreleased)",
- "source": 7,
- "truncated": false
- },
- {
- "destination": 10,
- "line": "",
- "source": 8,
- "truncated": false
- },
- {
- "destination": 11,
- "line": "* General",
- "source": 9,
- "truncated": false
- },
- {
- "destination": 12,
- "line": " * Load pastie theme ([#809](https://github.com/jneen/rouge/pull/809) by rramsden)",
- "source": 10,
- "truncated": false
- }
- ],
- "truncated": false,
- "type": "CONTEXT"
- }
- ],
- "sourceLine": 1,
- "sourceSpan": 10,
- "truncated": false
- }
+ "destination": 5,
+ "line": "This log summarizes the changes in each released version of rouge. The versioning scheme",
+ "source": 3,
+ "truncated": false
+ },
+ {
+ "destination": 6,
+ "line": "we use is semver, although we will often release new lexers in minor versions, as a",
+ "source": 4,
+ "truncated": false
+ },
+ {
+ "destination": 7,
+ "line": "practical matter.",
+ "source": 5,
+ "truncated": false
+ },
+ {
+ "destination": 8,
+ "line": "",
+ "source": 6,
+ "truncated": false
+ },
+ {
+ "destination": 9,
+ "line": "## version TBD: (unreleased)",
+ "source": 7,
+ "truncated": false
+ },
+ {
+ "destination": 10,
+ "line": "",
+ "source": 8,
+ "truncated": false
+ },
+ {
+ "destination": 11,
+ "line": "* General",
+ "source": 9,
+ "truncated": false
+ },
+ {
+ "destination": 12,
+ "line": " * Load pastie theme ([#809](https://github.com/jneen/rouge/pull/809) by rramsden)",
+ "source": 10,
+ "truncated": false
+ }
],
- "properties": {
- "current": true,
- "fromHash": "c5f4288162e2e6218180779c7f6ac1735bb56eab",
- "toHash": "a4c2164330f2549f67c13f36a93884cf66e976be"
- },
- "source": null,
- "truncated": false
- },
- "id": 12,
- "user": {
- "active": true,
- "displayName": "root",
- "emailAddress": "test.user@example.com",
- "id": 1,
- "links": {
- "self": [
- {
- "href": "http://localhost:7990/users/root"
- }
- ]
- },
- "name": "root",
- "slug": "slug",
- "username": "username",
- "type": "NORMAL"
+ "truncated": false,
+ "type": "CONTEXT"
+ }
+ ],
+ "sourceLine": 1,
+ "sourceSpan": 10,
+ "truncated": false
+ }
+ ],
+ "properties": {
+ "current": true,
+ "fromHash": "c5f4288162e2e6218180779c7f6ac1735bb56eab",
+ "toHash": "a4c2164330f2549f67c13f36a93884cf66e976be"
+ },
+ "source": null,
+ "truncated": false
+ },
+ "id": 12,
+ "user": {
+ "active": true,
+ "displayName": "root",
+ "emailAddress": "test.user@example.com",
+ "id": 1,
+ "links": {
+ "self": [
+ {
+ "href": "http://localhost:7990/users/root"
}
+ ]
},
- {
- "action": "COMMENTED",
- "comment": {
- "author": {
- "active": true,
- "displayName": "root",
- "emailAddress": "test.user@example.com",
- "id": 1,
- "links": {
- "self": [
- {
- "href": "http://localhost:7990/users/root"
- }
- ]
- },
- "name": "root",
- "slug": "slug",
- "username": "username",
- "type": "NORMAL"
- },
- "comments": [],
- "createdDate": 1529813304164,
- "id": 4,
- "permittedOperations": {
- "deletable": true,
- "editable": true
- },
- "properties": {
- "repositoryId": 1
- },
- "tasks": [],
- "text": "Hello world",
- "updatedDate": 1529813304164,
- "version": 0
+ "name": "root",
+ "slug": "slug",
+ "username": "username",
+ "type": "NORMAL"
+ }
+ },
+ {
+ "action": "COMMENTED",
+ "comment": {
+ "author": {
+ "active": true,
+ "displayName": "root",
+ "emailAddress": "test.user@example.com",
+ "id": 1,
+ "links": {
+ "self": [
+ {
+ "href": "http://localhost:7990/users/root"
+ }
+ ]
+ },
+ "name": "root",
+ "slug": "slug",
+ "username": "username",
+ "type": "NORMAL"
+ },
+ "comments": [
+
+ ],
+ "createdDate": 1529813304164,
+ "id": 4,
+ "permittedOperations": {
+ "deletable": true,
+ "editable": true
+ },
+ "properties": {
+ "repositoryId": 1
+ },
+ "tasks": [
+
+ ],
+ "text": "Hello world",
+ "updatedDate": 1529813304164,
+ "version": 0
+ },
+ "commentAction": "ADDED",
+ "createdDate": 1529813304164,
+ "id": 11,
+ "user": {
+ "active": true,
+ "displayName": "root",
+ "emailAddress": "test.user@example.com",
+ "id": 1,
+ "links": {
+ "self": [
+ {
+ "href": "http://localhost:7990/users/root"
+ }
+ ]
+ },
+ "name": "root",
+ "slug": "slug",
+ "username": "username",
+ "type": "NORMAL"
+ }
+ },
+ {
+ "action": "MERGED",
+ "commit": {
+ "author": {
+ "active": true,
+ "displayName": "root",
+ "emailAddress": "test.user@example.com",
+ "id": 1,
+ "links": {
+ "self": [
+ {
+ "href": "http://localhost:7990/users/root"
+ }
+ ]
+ },
+ "name": "root",
+ "slug": "slug",
+ "username": "username",
+ "type": "NORMAL"
+ },
+ "authorTimestamp": 1529727872000,
+ "committer": {
+ "active": true,
+ "displayName": "root",
+ "emailAddress": "test.user@example.com",
+ "id": 1,
+ "links": {
+ "self": [
+ {
+ "href": "http://localhost:7990/users/root"
+ }
+ ]
+ },
+ "name": "root",
+ "slug": "slug",
+ "username": "username",
+ "type": "NORMAL"
+ },
+ "committerTimestamp": 1529727872000,
+ "displayId": "839fa9a2d43",
+ "id": "839fa9a2d434eb697815b8fcafaecc51accfdbbc",
+ "message": "Merge pull request #1 in TEST/rouge from root/CHANGELOGmd-1529725646923 to master\n\n* commit '66fbe6a097803f0acb7342b19563f710657ce5a2':\n CHANGELOG.md edited online with Bitbucket",
+ "parents": [
+ {
+ "author": {
+ "emailAddress": "dblessing@users.noreply.github.com",
+ "name": "Drew Blessing"
},
- "commentAction": "ADDED",
- "createdDate": 1529813304164,
- "id": 11,
- "user": {
- "active": true,
- "displayName": "root",
- "emailAddress": "test.user@example.com",
- "id": 1,
- "links": {
- "self": [
- {
- "href": "http://localhost:7990/users/root"
- }
- ]
- },
- "name": "root",
- "slug": "slug",
- "username": "username",
- "type": "NORMAL"
+ "authorTimestamp": 1529604583000,
+ "committer": {
+ "emailAddress": "noreply@github.com",
+ "name": "GitHub"
+ },
+ "committerTimestamp": 1529604583000,
+ "displayId": "c5f4288162e",
+ "id": "c5f4288162e2e6218180779c7f6ac1735bb56eab",
+ "message": "Merge pull request #949 from jneen/dblessing-patch-1\n\nAdd 'obj-c', 'obj_c' as ObjectiveC aliases",
+ "parents": [
+ {
+ "displayId": "ea7675f741e",
+ "id": "ea7675f741ee28f3f177ff32a9bde192742ffc59"
+ },
+ {
+ "displayId": "386b95a977b",
+ "id": "386b95a977b331e267497aa5206861774656f0c5"
+ }
+ ]
+ },
+ {
+ "author": {
+ "emailAddress": "test.user@example.com",
+ "name": "root"
+ },
+ "authorTimestamp": 1529725651000,
+ "committer": {
+ "emailAddress": "test.user@example.com",
+ "name": "root"
+ },
+ "committerTimestamp": 1529725651000,
+ "displayId": "66fbe6a0978",
+ "id": "66fbe6a097803f0acb7342b19563f710657ce5a2",
+ "message": "CHANGELOG.md edited online with Bitbucket",
+ "parents": [
+ {
+ "displayId": "c5f4288162e",
+ "id": "c5f4288162e2e6218180779c7f6ac1735bb56eab"
+ }
+ ]
+ }
+ ]
+ },
+ "createdDate": 1529727872302,
+ "id": 7,
+ "user": {
+ "active": true,
+ "displayName": "root",
+ "emailAddress": "test.user@example.com",
+ "id": 1,
+ "links": {
+ "self": [
+ {
+ "href": "http://localhost:7990/users/root"
}
+ ]
},
- {
- "action": "MERGED",
- "commit": {
- "author": {
- "active": true,
- "displayName": "root",
- "emailAddress": "test.user@example.com",
- "id": 1,
- "links": {
- "self": [
- {
- "href": "http://localhost:7990/users/root"
- }
- ]
- },
- "name": "root",
- "slug": "slug",
- "username": "username",
- "type": "NORMAL"
- },
- "authorTimestamp": 1529727872000,
- "committer": {
- "active": true,
- "displayName": "root",
- "emailAddress": "test.user@example.com",
- "id": 1,
- "links": {
- "self": [
- {
- "href": "http://localhost:7990/users/root"
- }
- ]
- },
- "name": "root",
- "slug": "slug",
- "username": "username",
- "type": "NORMAL"
- },
- "committerTimestamp": 1529727872000,
- "displayId": "839fa9a2d43",
- "id": "839fa9a2d434eb697815b8fcafaecc51accfdbbc",
- "message": "Merge pull request #1 in TEST/rouge from root/CHANGELOGmd-1529725646923 to master\n\n* commit '66fbe6a097803f0acb7342b19563f710657ce5a2':\n CHANGELOG.md edited online with Bitbucket",
- "parents": [
- {
- "author": {
- "emailAddress": "dblessing@users.noreply.github.com",
- "name": "Drew Blessing"
- },
- "authorTimestamp": 1529604583000,
- "committer": {
- "emailAddress": "noreply@github.com",
- "name": "GitHub"
- },
- "committerTimestamp": 1529604583000,
- "displayId": "c5f4288162e",
- "id": "c5f4288162e2e6218180779c7f6ac1735bb56eab",
- "message": "Merge pull request #949 from jneen/dblessing-patch-1\n\nAdd 'obj-c', 'obj_c' as ObjectiveC aliases",
- "parents": [
- {
- "displayId": "ea7675f741e",
- "id": "ea7675f741ee28f3f177ff32a9bde192742ffc59"
- },
- {
- "displayId": "386b95a977b",
- "id": "386b95a977b331e267497aa5206861774656f0c5"
- }
- ]
- },
- {
- "author": {
- "emailAddress": "test.user@example.com",
- "name": "root"
- },
- "authorTimestamp": 1529725651000,
- "committer": {
- "emailAddress": "test.user@example.com",
- "name": "root"
- },
- "committerTimestamp": 1529725651000,
- "displayId": "66fbe6a0978",
- "id": "66fbe6a097803f0acb7342b19563f710657ce5a2",
- "message": "CHANGELOG.md edited online with Bitbucket",
- "parents": [
- {
- "displayId": "c5f4288162e",
- "id": "c5f4288162e2e6218180779c7f6ac1735bb56eab"
- }
- ]
- }
+ "name": "root",
+ "slug": "slug",
+ "username": "username",
+ "type": "NORMAL"
+ }
+ },
+ {
+ "action": "COMMENTED",
+ "comment": {
+ "author": {
+ "active": true,
+ "displayName": "root",
+ "emailAddress": "test.user@example.com",
+ "id": 1,
+ "links": {
+ "self": [
+ {
+ "href": "http://localhost:7990/users/root"
+ }
+ ]
+ },
+ "name": "root",
+ "slug": "slug",
+ "username": "username",
+ "type": "NORMAL"
+ },
+ "comments": [
+ {
+ "author": {
+ "active": true,
+ "displayName": "root",
+ "emailAddress": "test.user@example.com",
+ "id": 1,
+ "links": {
+ "self": [
+ {
+ "href": "http://localhost:7990/users/root"
+ }
]
+ },
+ "name": "root",
+ "slug": "slug",
+ "username": "username",
+ "type": "NORMAL"
},
- "createdDate": 1529727872302,
- "id": 7,
- "user": {
- "active": true,
- "displayName": "root",
- "emailAddress": "test.user@example.com",
- "id": 1,
- "links": {
- "self": [
- {
- "href": "http://localhost:7990/users/root"
- }
- ]
- },
- "name": "root",
- "slug": "slug",
- "username": "username",
- "type": "NORMAL"
+ "comments": [
+
+ ],
+ "createdDate": 1529813297478,
+ "id": 3,
+ "permittedOperations": {
+ "deletable": true,
+ "editable": true
+ },
+ "properties": {
+ "repositoryId": 1
+ },
+ "tasks": [
+
+ ],
+ "text": "This is a thread",
+ "updatedDate": 1529813297478,
+ "version": 0
+ }
+ ],
+ "createdDate": 1529725692591,
+ "id": 2,
+ "permittedOperations": {
+ "deletable": true,
+ "editable": true
+ },
+ "properties": {
+ "repositoryId": 1
+ },
+ "tasks": [
+
+ ],
+ "text": "What about this?",
+ "updatedDate": 1529725692591,
+ "version": 0
+ },
+ "commentAction": "ADDED",
+ "createdDate": 1529725692591,
+ "id": 6,
+ "user": {
+ "active": true,
+ "displayName": "root",
+ "emailAddress": "test.user@example.com",
+ "id": 1,
+ "links": {
+ "self": [
+ {
+ "href": "http://localhost:7990/users/root"
}
+ ]
},
- {
- "action": "COMMENTED",
- "comment": {
- "author": {
- "active": true,
- "displayName": "root",
- "emailAddress": "test.user@example.com",
- "id": 1,
- "links": {
- "self": [
- {
- "href": "http://localhost:7990/users/root"
- }
- ]
- },
- "name": "root",
- "slug": "slug",
- "username": "username",
- "type": "NORMAL"
- },
- "comments": [
- {
- "author": {
- "active": true,
- "displayName": "root",
- "emailAddress": "test.user@example.com",
- "id": 1,
- "links": {
- "self": [
- {
- "href": "http://localhost:7990/users/root"
- }
- ]
- },
- "name": "root",
- "slug": "slug",
- "username": "username",
- "type": "NORMAL"
- },
- "comments": [],
- "createdDate": 1529813297478,
- "id": 3,
- "permittedOperations": {
- "deletable": true,
- "editable": true
- },
- "properties": {
- "repositoryId": 1
- },
- "tasks": [],
- "text": "This is a thread",
- "updatedDate": 1529813297478,
- "version": 0
- }
- ],
- "createdDate": 1529725692591,
- "id": 2,
- "permittedOperations": {
- "deletable": true,
- "editable": true
- },
- "properties": {
- "repositoryId": 1
- },
- "tasks": [],
- "text": "What about this?",
- "updatedDate": 1529725692591,
- "version": 0
- },
- "commentAction": "ADDED",
- "createdDate": 1529725692591,
- "id": 6,
- "user": {
- "active": true,
- "displayName": "root",
- "emailAddress": "test.user@example.com",
- "id": 1,
- "links": {
- "self": [
- {
- "href": "http://localhost:7990/users/root"
- }
- ]
- },
- "name": "root",
- "slug": "slug",
- "username": "username",
- "type": "NORMAL"
+ "name": "root",
+ "slug": "slug",
+ "username": "username",
+ "type": "NORMAL"
+ }
+ },
+ {
+ "action": "COMMENTED",
+ "comment": {
+ "author": {
+ "active": true,
+ "displayName": "root",
+ "emailAddress": "test.user@example.com",
+ "id": 1,
+ "links": {
+ "self": [
+ {
+ "href": "http://localhost:7990/users/root"
+ }
+ ]
+ },
+ "name": "root",
+ "slug": "slug",
+ "username": "username",
+ "type": "NORMAL"
+ },
+ "comments": [
+
+ ],
+ "createdDate": 1529725685910,
+ "id": 1,
+ "permittedOperations": {
+ "deletable": true,
+ "editable": true
+ },
+ "properties": {
+ "repositoryId": 1
+ },
+ "tasks": [
+
+ ],
+ "text": "This is a test.\n\n[analyze.json](attachment:1/1f32f09d97%2Fanalyze.json)\n",
+ "updatedDate": 1529725685910,
+ "version": 0
+ },
+ "commentAction": "ADDED",
+ "createdDate": 1529725685910,
+ "id": 5,
+ "user": {
+ "active": true,
+ "displayName": "root",
+ "emailAddress": "test.user@example.com",
+ "id": 1,
+ "links": {
+ "self": [
+ {
+ "href": "http://localhost:7990/users/root"
}
+ ]
},
- {
- "action": "COMMENTED",
- "comment": {
- "author": {
- "active": true,
- "displayName": "root",
- "emailAddress": "test.user@example.com",
- "id": 1,
- "links": {
- "self": [
- {
- "href": "http://localhost:7990/users/root"
- }
- ]
- },
- "name": "root",
- "slug": "slug",
- "username": "username",
- "type": "NORMAL"
- },
- "comments": [],
- "createdDate": 1529725685910,
- "id": 1,
- "permittedOperations": {
- "deletable": true,
- "editable": true
- },
- "properties": {
- "repositoryId": 1
- },
- "tasks": [],
- "text": "This is a test.\n\n[analyze.json](attachment:1/1f32f09d97%2Fanalyze.json)\n",
- "updatedDate": 1529725685910,
- "version": 0
- },
- "commentAction": "ADDED",
- "createdDate": 1529725685910,
- "id": 5,
- "user": {
- "active": true,
- "displayName": "root",
- "emailAddress": "test.user@example.com",
- "id": 1,
- "links": {
- "self": [
- {
- "href": "http://localhost:7990/users/root"
- }
- ]
- },
- "name": "root",
- "slug": "slug",
- "username": "username",
- "type": "NORMAL"
+ "name": "root",
+ "slug": "slug",
+ "username": "username",
+ "type": "NORMAL"
+ }
+ },
+ {
+ "action": "OPENED",
+ "createdDate": 1529725657542,
+ "id": 4,
+ "user": {
+ "active": true,
+ "displayName": "root",
+ "emailAddress": "test.user@example.com",
+ "id": 1,
+ "links": {
+ "self": [
+ {
+ "href": "http://localhost:7990/users/root"
}
+ ]
},
- {
- "action": "OPENED",
- "createdDate": 1529725657542,
- "id": 4,
- "user": {
- "active": true,
- "displayName": "root",
- "emailAddress": "test.user@example.com",
- "id": 1,
- "links": {
- "self": [
- {
- "href": "http://localhost:7990/users/root"
- }
- ]
- },
- "name": "root",
- "slug": "slug",
- "username": "username",
- "type": "NORMAL"
+ "name": "root",
+ "slug": "slug",
+ "username": "username",
+ "type": "NORMAL"
+ }
+ },
+ {
+ "action": "APPROVED",
+ "createdDate": 1529725657542,
+ "id": 15,
+ "user": {
+ "active": true,
+ "displayName": "root",
+ "emailAddress": "test.user@example.com",
+ "id": 1,
+ "links": {
+ "self": [
+ {
+ "href": "http://localhost:7990/users/root"
}
- }
- ]
+ ]
+ },
+ "name": "root",
+ "slug": "slug",
+ "username": "username",
+ "type": "NORMAL"
+ }
+ }
+ ]
}
diff --git a/spec/fixtures/scripts/internal_events/events/ee_event_without_identifiers.yml b/spec/fixtures/scripts/internal_events/events/ee_event_without_identifiers.yml
new file mode 100644
index 00000000000..07f606fbe33
--- /dev/null
+++ b/spec/fixtures/scripts/internal_events/events/ee_event_without_identifiers.yml
@@ -0,0 +1,14 @@
+---
+description: Internal Event CLI is opened
+category: InternalEventTracking
+action: internal_events_cli_opened
+product_section: analytics
+product_stage: monitor
+product_group: analytics_instrumentation
+milestone: '16.6'
+introduced_by_url: TODO
+distributions:
+- ee
+tiers:
+- premium
+- ultimate
diff --git a/spec/fixtures/scripts/internal_events/events/event_with_identifiers.yml b/spec/fixtures/scripts/internal_events/events/event_with_identifiers.yml
new file mode 100644
index 00000000000..5050953920d
--- /dev/null
+++ b/spec/fixtures/scripts/internal_events/events/event_with_identifiers.yml
@@ -0,0 +1,20 @@
+---
+description: Engineer uses Internal Event CLI to define a new event
+category: InternalEventTracking
+action: internal_events_cli_used
+identifiers:
+- project
+- namespace
+- user
+product_section: analytics
+product_stage: monitor
+product_group: analytics_instrumentation
+milestone: '16.6'
+introduced_by_url: TODO
+distributions:
+- ce
+- ee
+tiers:
+- free
+- premium
+- ultimate
diff --git a/spec/fixtures/scripts/internal_events/events/keyboard_smashed_event.yml b/spec/fixtures/scripts/internal_events/events/keyboard_smashed_event.yml
new file mode 100644
index 00000000000..c0ccbc03af7
--- /dev/null
+++ b/spec/fixtures/scripts/internal_events/events/keyboard_smashed_event.yml
@@ -0,0 +1,20 @@
+---
+description: random event string
+category: InternalEventTracking
+action: random_name
+identifiers:
+- project
+- namespace
+- user
+product_section: core_platform
+product_stage: manage
+product_group: import_and_integrate
+milestone: '16.6'
+introduced_by_url: TODO
+distributions:
+- ce
+- ee
+tiers:
+- free
+- premium
+- ultimate
diff --git a/spec/fixtures/scripts/internal_events/events/secondary_event_with_identifiers.yml b/spec/fixtures/scripts/internal_events/events/secondary_event_with_identifiers.yml
new file mode 100644
index 00000000000..4e2e77e0c5c
--- /dev/null
+++ b/spec/fixtures/scripts/internal_events/events/secondary_event_with_identifiers.yml
@@ -0,0 +1,20 @@
+---
+description: Engineer closes Internal Event CLI
+category: InternalEventTracking
+action: internal_events_cli_closed
+identifiers:
+- project
+- namespace
+- user
+product_section: analytics
+product_stage: monitor
+product_group: analytics_instrumentation
+milestone: '16.6'
+introduced_by_url: TODO
+distributions:
+- ce
+- ee
+tiers:
+- free
+- premium
+- ultimate
diff --git a/spec/fixtures/scripts/internal_events/metrics/ee_total_28d_single_event.yml b/spec/fixtures/scripts/internal_events/metrics/ee_total_28d_single_event.yml
new file mode 100644
index 00000000000..ba56d782871
--- /dev/null
+++ b/spec/fixtures/scripts/internal_events/metrics/ee_total_28d_single_event.yml
@@ -0,0 +1,25 @@
+---
+key_path: counts.count_total_internal_events_cli_used_monthly
+description: Monthly count of when an event was defined using the CLI
+product_section: analytics
+product_stage: monitor
+product_group: analytics_instrumentation
+performance_indicator_type: []
+value_type: number
+status: active
+milestone: '16.6'
+introduced_by_url: TODO
+time_frame: 28d
+data_source: internal_events
+data_category: optional
+instrumentation_class: TotalCountMetric
+distribution:
+- ee
+tier:
+- premium
+- ultimate
+options:
+ events:
+ - internal_events_cli_used
+events:
+- name: internal_events_cli_used
diff --git a/spec/fixtures/scripts/internal_events/metrics/ee_total_7d_single_event.yml b/spec/fixtures/scripts/internal_events/metrics/ee_total_7d_single_event.yml
new file mode 100644
index 00000000000..e6bdcb9d2ae
--- /dev/null
+++ b/spec/fixtures/scripts/internal_events/metrics/ee_total_7d_single_event.yml
@@ -0,0 +1,25 @@
+---
+key_path: counts.count_total_internal_events_cli_used_weekly
+description: Weekly count of when an event was defined using the CLI
+product_section: analytics
+product_stage: monitor
+product_group: analytics_instrumentation
+performance_indicator_type: []
+value_type: number
+status: active
+milestone: '16.6'
+introduced_by_url: TODO
+time_frame: 7d
+data_source: internal_events
+data_category: optional
+instrumentation_class: TotalCountMetric
+distribution:
+- ee
+tier:
+- premium
+- ultimate
+options:
+ events:
+ - internal_events_cli_used
+events:
+- name: internal_events_cli_used
diff --git a/spec/fixtures/scripts/internal_events/metrics/ee_total_single_event.yml b/spec/fixtures/scripts/internal_events/metrics/ee_total_single_event.yml
new file mode 100644
index 00000000000..b1bf89dc095
--- /dev/null
+++ b/spec/fixtures/scripts/internal_events/metrics/ee_total_single_event.yml
@@ -0,0 +1,25 @@
+---
+key_path: counts.count_total_internal_events_cli_used
+description: Total count of when an event was defined using the CLI
+product_section: analytics
+product_stage: monitor
+product_group: analytics_instrumentation
+performance_indicator_type: []
+value_type: number
+status: active
+milestone: '16.6'
+introduced_by_url: TODO
+time_frame: all
+data_source: internal_events
+data_category: optional
+instrumentation_class: TotalCountMetric
+distribution:
+- ee
+tier:
+- premium
+- ultimate
+options:
+ events:
+ - internal_events_cli_used
+events:
+- name: internal_events_cli_used
diff --git a/spec/fixtures/scripts/internal_events/metrics/keyboard_smashed_metric_28d.yml b/spec/fixtures/scripts/internal_events/metrics/keyboard_smashed_metric_28d.yml
new file mode 100644
index 00000000000..8476cb8561b
--- /dev/null
+++ b/spec/fixtures/scripts/internal_events/metrics/keyboard_smashed_metric_28d.yml
@@ -0,0 +1,28 @@
+---
+key_path: redis_hll_counters.count_distinct_user_id_from_random_name_monthly
+description: Monthly count of unique users random metric string
+product_section: core_platform
+product_stage: manage
+product_group: import_and_integrate
+performance_indicator_type: []
+value_type: number
+status: active
+milestone: '16.6'
+introduced_by_url: TODO
+time_frame: 28d
+data_source: internal_events
+data_category: optional
+instrumentation_class: RedisHLLMetric
+distribution:
+- ce
+- ee
+tier:
+- free
+- premium
+- ultimate
+options:
+ events:
+ - random_name
+events:
+- name: random_name
+ unique: user.id
diff --git a/spec/fixtures/scripts/internal_events/metrics/keyboard_smashed_metric_7d.yml b/spec/fixtures/scripts/internal_events/metrics/keyboard_smashed_metric_7d.yml
new file mode 100644
index 00000000000..b4cc2fc8b55
--- /dev/null
+++ b/spec/fixtures/scripts/internal_events/metrics/keyboard_smashed_metric_7d.yml
@@ -0,0 +1,28 @@
+---
+key_path: redis_hll_counters.count_distinct_user_id_from_random_name_weekly
+description: Weekly count of unique users random metric string
+product_section: core_platform
+product_stage: manage
+product_group: import_and_integrate
+performance_indicator_type: []
+value_type: number
+status: active
+milestone: '16.6'
+introduced_by_url: TODO
+time_frame: 7d
+data_source: internal_events
+data_category: optional
+instrumentation_class: RedisHLLMetric
+distribution:
+- ce
+- ee
+tier:
+- free
+- premium
+- ultimate
+options:
+ events:
+ - random_name
+events:
+- name: random_name
+ unique: user.id
diff --git a/spec/fixtures/scripts/internal_events/metrics/project_id_28d_multiple_events.yml b/spec/fixtures/scripts/internal_events/metrics/project_id_28d_multiple_events.yml
new file mode 100644
index 00000000000..754702c8c74
--- /dev/null
+++ b/spec/fixtures/scripts/internal_events/metrics/project_id_28d_multiple_events.yml
@@ -0,0 +1,31 @@
+---
+key_path: redis_hll_counters.count_distinct_project_id_from_internal_events_cli_closed_and_internal_events_cli_used_monthly
+description: Monthly count of unique projects where a defition file was created with the CLI
+product_section: analytics
+product_stage: monitor
+product_group: analytics_instrumentation
+performance_indicator_type: []
+value_type: number
+status: active
+milestone: '16.6'
+introduced_by_url: TODO
+time_frame: 28d
+data_source: internal_events
+data_category: optional
+instrumentation_class: RedisHLLMetric
+distribution:
+- ce
+- ee
+tier:
+- free
+- premium
+- ultimate
+options:
+ events:
+ - internal_events_cli_closed
+ - internal_events_cli_used
+events:
+- name: internal_events_cli_closed
+ unique: project.id
+- name: internal_events_cli_used
+ unique: project.id
diff --git a/spec/fixtures/scripts/internal_events/metrics/project_id_7d_multiple_events.yml b/spec/fixtures/scripts/internal_events/metrics/project_id_7d_multiple_events.yml
new file mode 100644
index 00000000000..95f429e9b40
--- /dev/null
+++ b/spec/fixtures/scripts/internal_events/metrics/project_id_7d_multiple_events.yml
@@ -0,0 +1,31 @@
+---
+key_path: redis_hll_counters.count_distinct_project_id_from_internal_events_cli_closed_and_internal_events_cli_used_weekly
+description: Weekly count of unique projects where a defition file was created with the CLI
+product_section: analytics
+product_stage: monitor
+product_group: analytics_instrumentation
+performance_indicator_type: []
+value_type: number
+status: active
+milestone: '16.6'
+introduced_by_url: TODO
+time_frame: 7d
+data_source: internal_events
+data_category: optional
+instrumentation_class: RedisHLLMetric
+distribution:
+- ce
+- ee
+tier:
+- free
+- premium
+- ultimate
+options:
+ events:
+ - internal_events_cli_closed
+ - internal_events_cli_used
+events:
+- name: internal_events_cli_closed
+ unique: project.id
+- name: internal_events_cli_used
+ unique: project.id
diff --git a/spec/fixtures/scripts/internal_events/metrics/total_single_event.yml b/spec/fixtures/scripts/internal_events/metrics/total_single_event.yml
new file mode 100644
index 00000000000..5bdb4c45a52
--- /dev/null
+++ b/spec/fixtures/scripts/internal_events/metrics/total_single_event.yml
@@ -0,0 +1,27 @@
+---
+key_path: counts.count_total_internal_events_cli_used
+description: Total count of when an event was defined using the CLI
+product_section: analytics
+product_stage: monitor
+product_group: analytics_instrumentation
+performance_indicator_type: []
+value_type: number
+status: active
+milestone: '16.6'
+introduced_by_url: TODO
+time_frame: all
+data_source: internal_events
+data_category: optional
+instrumentation_class: TotalCountMetric
+distribution:
+- ce
+- ee
+tier:
+- free
+- premium
+- ultimate
+options:
+ events:
+ - internal_events_cli_used
+events:
+- name: internal_events_cli_used
diff --git a/spec/fixtures/scripts/internal_events/metrics/user_id_28d_single_event.yml b/spec/fixtures/scripts/internal_events/metrics/user_id_28d_single_event.yml
new file mode 100644
index 00000000000..b176b23b46a
--- /dev/null
+++ b/spec/fixtures/scripts/internal_events/metrics/user_id_28d_single_event.yml
@@ -0,0 +1,28 @@
+---
+key_path: redis_hll_counters.count_distinct_user_id_from_internal_events_cli_used_monthly
+description: Monthly count of unique users who defined an internal event using the CLI
+product_section: analytics
+product_stage: monitor
+product_group: analytics_instrumentation
+performance_indicator_type: []
+value_type: number
+status: active
+milestone: '16.6'
+introduced_by_url: TODO
+time_frame: 28d
+data_source: internal_events
+data_category: optional
+instrumentation_class: RedisHLLMetric
+distribution:
+- ce
+- ee
+tier:
+- free
+- premium
+- ultimate
+options:
+ events:
+ - internal_events_cli_used
+events:
+- name: internal_events_cli_used
+ unique: user.id
diff --git a/spec/fixtures/scripts/internal_events/metrics/user_id_7d_single_event.yml b/spec/fixtures/scripts/internal_events/metrics/user_id_7d_single_event.yml
new file mode 100644
index 00000000000..8a0fca2cbdc
--- /dev/null
+++ b/spec/fixtures/scripts/internal_events/metrics/user_id_7d_single_event.yml
@@ -0,0 +1,28 @@
+---
+key_path: redis_hll_counters.count_distinct_user_id_from_internal_events_cli_used_weekly
+description: Weekly count of unique users who defined an internal event using the CLI
+product_section: analytics
+product_stage: monitor
+product_group: analytics_instrumentation
+performance_indicator_type: []
+value_type: number
+status: active
+milestone: '16.6'
+introduced_by_url: TODO
+time_frame: 7d
+data_source: internal_events
+data_category: optional
+instrumentation_class: RedisHLLMetric
+distribution:
+- ce
+- ee
+tier:
+- free
+- premium
+- ultimate
+options:
+ events:
+ - internal_events_cli_used
+events:
+- name: internal_events_cli_used
+ unique: user.id
diff --git a/spec/fixtures/scripts/internal_events/new_events.yml b/spec/fixtures/scripts/internal_events/new_events.yml
new file mode 100644
index 00000000000..6f39fc5e93c
--- /dev/null
+++ b/spec/fixtures/scripts/internal_events/new_events.yml
@@ -0,0 +1,183 @@
+- description: Creates a new event and flows directly into metric creation
+ inputs:
+ keystrokes:
+ - "1\n" # Enum-select: New Event -- start tracking when an action or scenario occurs on gitlab instances
+ - "Engineer uses Internal Event CLI to define a new event\n" # Submit description
+ - "internal_events_cli_used\n" # Submit action name
+ - "1\n" # Select: [namespace, project, user]
+ - "\n" # Skip MR URL
+ - "instrumentation" # Filters to the analytics instrumentation group
+ - "\n" # Accept analytics:monitor:analytics_instrumentation
+ - "1\n" # Select: [free, premium, ultimate]
+ - "y\n" # Create file
+ - "1\n" # Select: Create Metric --- define a new metric
+ - "\e[A" # Arrow up to: Total count of events
+ - "\n" # Select: Total count of events
+ - "when an event was defined using the CLI\n" # Input description
+ - "1\n" # Select: Copy & continue
+ - "y\n" # Create file
+ - "2\n" # Exit
+ outputs:
+ files:
+ - path: config/events/internal_events_cli_used.yml
+ content: spec/fixtures/scripts/internal_events/events/event_with_identifiers.yml
+ - path: config/metrics/counts_all/count_total_internal_events_cli_used.yml
+ content: spec/fixtures/scripts/internal_events/metrics/total_single_event.yml
+
+- description: Requires description & action before continuing
+ inputs:
+ keystrokes:
+ - "1\n" # Enum-select: New Event -- start tracking when an action or scenario occurs on gitlab instances
+ - "\n" # Attempt to skip writing description --> should get help message
+ - "Engineer uses Internal Event CLI to define a new event\n" # Submit description
+ - "\n" # Attempt to skip naming action --> should get help message
+ - "internal_events_cli_used\n" # Submit action name
+ - "1\n" # Select [namespace, project, user]
+ - "\n" # Skip MR URL
+ - "instrumentation" # Filters to the analytics instrumentation group
+ - "\n" # Accept analytics:monitor:analytics_instrumentation
+ - "1\n" # Select [free, premium, ultimate]
+ - "y\n" # Create file
+ - "3\n" # Exit
+ outputs:
+ files:
+ - path: config/events/internal_events_cli_used.yml
+ content: spec/fixtures/scripts/internal_events/events/event_with_identifiers.yml
+
+- description: Does not allow existing events for action
+ inputs:
+ files:
+ - path: config/events/internal_events_cli_used.yml
+ content: spec/fixtures/scripts/internal_events/events/event_with_identifiers.yml
+ keystrokes:
+ - "1\n" # Enum-select: New Event -- start tracking when an action or scenario occurs on gitlab instances
+ - "Engineer closes Internal Event CLI\n" # Submit description
+ - "internal_events_cli_used\n" # Submit already-existing action name
+ - "internal_events_cli_closed\n" # Submit alterred action name
+ - "1\n" # Select [namespace, project, user]
+ - "\n" # Skip MR URL
+ - "instrumentation" # Filters to the analytics instrumentation group
+ - "\n" # Accept analytics:monitor:analytics_instrumentation
+ - "1\n" # Select [free, premium, ultimate]
+ - "y\n" # Create file
+ - "3\n" # Exit
+ outputs:
+ files:
+ - path: config/events/internal_events_cli_closed.yml
+ content: spec/fixtures/scripts/internal_events/events/secondary_event_with_identifiers.yml
+
+- description: Creates a new event without identifiers
+ inputs:
+ keystrokes:
+ - "1\n" # Enum-select: New Event -- start tracking when an action or scenario occurs on gitlab instances
+ - "Internal Event CLI is opened\n" # Submit description
+ - "internal_events_cli_opened\n" # Submit action name
+ - "6\n" # Select: None
+ - "\n" # Skip MR URL
+ - "instrumentation" # Filters to the analytics instrumentation group
+ - "\n" # Accept analytics:monitor:analytics_instrumentation
+ - "2\n" # Select [premium, ultimate]
+ - "y\n" # Create file
+ - "3\n" # Exit
+ outputs:
+ files:
+ - path: ee/config/events/internal_events_cli_opened.yml
+ content: spec/fixtures/scripts/internal_events/events/ee_event_without_identifiers.yml
+
+- description: Smashing the keyboard/return creates an event & metrics with the most common attributes, then shows usage
+ inputs:
+ keystrokes:
+ - "\n" # Enum-select: New Event -- start tracking when an action or scenario occurs on gitlab instances
+ - "random event string\n" # Submit keyboard-smashing description
+ - "random_name\n" # Submit keyboard-smashing action name
+ - "\n" # Select: [namespace, project, user]
+ - "\n" # Skip MR URL
+ - "\n" # Select core_platform:manage:import_and_integrate
+ - "\n" # Select [free, premium, ultimate]
+ - "\n" # Create file
+ - "\n" # Select: Create Metric --- define a new metric
+ - "\n" # Select: Weekly/Monthly count of unique users
+ - "random metric string\n" # Submit keyboard-smashing description
+ - "\n" # Accept weekly description for monthly
+ - "\n" # Select: Copy & continue
+ - "\n" # Skip URL
+ - "\n" # Create file
+ - "\n" # Create file
+ - "\n" # Select: View Usage -- look at code examples
+ - "\n" # Select: Ruby/Rails
+ - "8\n" # Exit
+ outputs:
+ files:
+ - path: config/events/random_name.yml
+ content: spec/fixtures/scripts/internal_events/events/keyboard_smashed_event.yml
+ - path: config/metrics/counts_28d/count_distinct_user_id_from_random_name_monthly.yml
+ content: spec/fixtures/scripts/internal_events/metrics/keyboard_smashed_metric_28d.yml
+ - path: config/metrics/counts_7d/count_distinct_user_id_from_random_name_weekly.yml
+ content: spec/fixtures/scripts/internal_events/metrics/keyboard_smashed_metric_7d.yml
+
+- description: Creates an event after helping the user figure out next steps
+ inputs:
+ keystrokes:
+ - "4\n" # Enum-select: ...am I in the right place?
+ - "y\n" # Yes --> Are you trying to track customer usage of a GitLab feature?
+ - "y\n" # Yes --> Can usage for the feature be measured by tracking a specific user action?
+ - "n\n" # No --> Is the event already tracked?
+ - "y\n" # Yes --> Ready to start?
+ - "Internal Event CLI is opened\n" # Submit description
+ - "internal_events_cli_opened\n" # Submit action name
+ - "6\n" # Select: None
+ - "\n" # Skip MR URL
+ - "instrumentation" # Filters to the analytics instrumentation group
+ - "\n" # Accept analytics:monitor:analytics_instrumentation
+ - "2\n" # Select [premium, ultimate]
+ - "y\n" # Create file
+ - "3\n" # Exit
+ outputs:
+ files:
+ - path: ee/config/events/internal_events_cli_opened.yml
+ content: spec/fixtures/scripts/internal_events/events/ee_event_without_identifiers.yml
+
+- description: Creates a new event and flows directly into usage examples
+ inputs:
+ keystrokes:
+ - "1\n" # Enum-select: New Event -- start tracking when an action or scenario occurs on gitlab instances
+ - "Engineer uses Internal Event CLI to define a new event\n" # Submit description
+ - "internal_events_cli_used\n" # Submit action name
+ - "1\n" # Select: [namespace, project, user]
+ - "\n" # Skip MR URL
+ - "instrumentation" # Filters to the analytics instrumentation group
+ - "\n" # Accept analytics:monitor:analytics_instrumentation
+ - "1\n" # Select: [free, premium, ultimate]
+ - "y\n" # Create file
+ - "2\n" # Select: View Usage
+ - "8\n" # Exit
+ outputs:
+ files:
+ - path: config/events/internal_events_cli_used.yml
+ content: spec/fixtures/scripts/internal_events/events/event_with_identifiers.yml
+
+- description: Skips event creation, then saves event & flows directly into metric creation
+ inputs:
+ keystrokes:
+ - "1\n" # Enum-select: New Event -- start tracking when an action or scenario occurs on gitlab instances
+ - "Engineer uses Internal Event CLI to define a new event\n" # Submit description
+ - "internal_events_cli_used\n" # Submit action name
+ - "1\n" # Select: [namespace, project, user]
+ - "\n" # Skip MR URL
+ - "instrumentation" # Filters to the analytics instrumentation group
+ - "\n" # Accept analytics:monitor:analytics_instrumentation
+ - "1\n" # Select: [free, premium, ultimate]
+ - "n\n" # Create file
+ - "1\n" # Select: Save event & create Metric --- define a new metric
+ - "\e[A" # Arrow up to: Total count of events
+ - "\n" # Select: Total count of events
+ - "when an event was defined using the CLI\n" # Input description
+ - "1\n" # Select: Copy & continue
+ - "y\n" # Create file
+ - "2\n" # Exit
+ outputs:
+ files:
+ - path: config/events/internal_events_cli_used.yml
+ content: spec/fixtures/scripts/internal_events/events/event_with_identifiers.yml
+ - path: config/metrics/counts_all/count_total_internal_events_cli_used.yml
+ content: spec/fixtures/scripts/internal_events/metrics/total_single_event.yml
diff --git a/spec/fixtures/scripts/internal_events/new_metrics.yml b/spec/fixtures/scripts/internal_events/new_metrics.yml
new file mode 100644
index 00000000000..2a207ee84f4
--- /dev/null
+++ b/spec/fixtures/scripts/internal_events/new_metrics.yml
@@ -0,0 +1,196 @@
+- description: Create a weekly/monthly metric for a single event
+ inputs:
+ files:
+ - path: config/events/internal_events_cli_used.yml
+ content: spec/fixtures/scripts/internal_events/events/event_with_identifiers.yml
+ keystrokes:
+ - "2\n" # Enum-select: New Metric -- calculate how often one or more existing events occur over time
+ - "1\n" # Enum-select: Single event -- count occurrences of a specific event or user interaction
+ - 'internal_events_cli_used' # Filters to this event
+ - "\n" # Select: config/events/internal_events_cli_used.yml
+ - "\n" # Select: Weekly count of unique users
+ - "who defined an internal event using the CLI\n" # Input description
+ - "\n" # Submit weekly description for monthly
+ - "1\n" # Enum-select: Copy & continue
+ - "y\n" # Create file
+ - "y\n" # Create file
+ - "2\n" # Exit
+ outputs:
+ files:
+ - path: config/metrics/counts_28d/count_distinct_user_id_from_internal_events_cli_used_monthly.yml
+ content: spec/fixtures/scripts/internal_events/metrics/user_id_28d_single_event.yml
+ - path: config/metrics/counts_7d/count_distinct_user_id_from_internal_events_cli_used_weekly.yml
+ content: spec/fixtures/scripts/internal_events/metrics/user_id_7d_single_event.yml
+
+- description: Create a weekly/monthly metric for a multiple events, but select only one event
+ inputs:
+ files:
+ - path: config/events/internal_events_cli_used.yml
+ content: spec/fixtures/scripts/internal_events/events/event_with_identifiers.yml
+ keystrokes:
+ - "2\n" # Enum-select: New Metric -- calculate how often one or more existing events occur over time
+ - "2\n" # Enum-select: Multiple events -- count occurrences of several separate events or interactions
+ - 'internal_events_cli_used' # Filters to this event
+ - " " # Multi-select: config/events/internal_events_cli_used.yml
+ - "\n" # Submit selections
+ - "\n" # Select: Weekly count of unique projects
+ - "who defined an internal event using the CLI\n" # Input description
+ - "\n" # Submit weekly description for monthly
+ - "1\n" # Enum-select: Copy & continue
+ - "y\n" # Create file
+ - "y\n" # Create file
+ - "2\n" # Exit
+ outputs:
+ files:
+ - path: config/metrics/counts_28d/count_distinct_user_id_from_internal_events_cli_used_monthly.yml
+ content: spec/fixtures/scripts/internal_events/metrics/user_id_28d_single_event.yml
+ - path: config/metrics/counts_7d/count_distinct_user_id_from_internal_events_cli_used_weekly.yml
+ content: spec/fixtures/scripts/internal_events/metrics/user_id_7d_single_event.yml
+
+- description: Create a weekly/monthly metric for multiple events
+ inputs:
+ files:
+ - path: config/events/internal_events_cli_used.yml
+ content: spec/fixtures/scripts/internal_events/events/event_with_identifiers.yml
+ - path: config/events/internal_events_cli_closed.yml
+ content: spec/fixtures/scripts/internal_events/events/secondary_event_with_identifiers.yml
+ keystrokes:
+ - "2\n" # Enum-select: New Metric -- calculate how often one or more existing events occur over time
+ - "2\n" # Enum-select: Multiple events -- count occurrences of several separate events or interactions
+ - 'internal_events_cli' # Filters to the relevant events
+ - ' ' # Multi-select: internal_events_cli_closed
+ - "\e[B" # Arrow down to: internal_events_cli_used
+ - ' ' # Multi-select: internal_events_cli_used
+ - "\n" # Submit selections
+ - "\e[B" # Arrow down to: Weekly count of unique projects
+ - "\n" # Select: Weekly count of unique projects
+ - "where a defition file was created with the CLI\n" # Input description
+ - "\n" # Submit weekly description for monthly
+ - "1\n" # Select: Copy & continue
+ - "y\n" # Create file
+ - "y\n" # Create file
+ - "2\n" # Exit
+ outputs:
+ files:
+ - path: config/metrics/counts_28d/count_distinct_project_id_from_internal_events_cli_closed_and_internal_events_cli_used_monthly.yml
+ content: spec/fixtures/scripts/internal_events/metrics/project_id_28d_multiple_events.yml
+ - path: config/metrics/counts_7d/count_distinct_project_id_from_internal_events_cli_closed_and_internal_events_cli_used_weekly.yml
+ content: spec/fixtures/scripts/internal_events/metrics/project_id_7d_multiple_events.yml
+
+- description: Create an all time total metric for a single event
+ inputs:
+ files:
+ - path: config/events/internal_events_cli_used.yml
+ content: spec/fixtures/scripts/internal_events/events/event_with_identifiers.yml
+ keystrokes:
+ - "2\n" # Enum-select: New Metric -- calculate how often one or more existing events occur over time
+ - "1\n" # Enum-select: Single event -- count occurrences of a specific event or user interaction
+ - 'internal_events_cli_used' # Filters to this event
+ - "\n" # Select: config/events/internal_events_cli_used.yml
+ - "\e[A" # Arrow up to: Total count of events
+ - "\n" # Select: Total count of events
+ - "when an event was defined using the CLI\n" # Input description
+ - "1\n" # Select: Copy & continue
+ - "y\n" # Create file
+ - "2\n" # Exit
+ outputs:
+ files:
+ - path: config/metrics/counts_all/count_total_internal_events_cli_used.yml
+ content: spec/fixtures/scripts/internal_events/metrics/total_single_event.yml
+
+- description: Try to create a database metric
+ inputs:
+ keystrokes:
+ - "2\n" # Enum-select: New Metric -- calculate how often one or more existing events occur over time
+ - "3\n" # Enum-select: Database -- record value of a particular field or count of database rows
+
+- description: Create an all time total metric for a single event, and confirm each attribute copied from event
+ inputs:
+ files:
+ - path: config/events/internal_events_cli_used.yml
+ content: spec/fixtures/scripts/internal_events/events/event_with_identifiers.yml
+ keystrokes:
+ - "2\n" # Enum-select: New Metric -- calculate how often one or more existing events occur over time
+ - "1\n" # Enum-select: Single event -- count occurrences of a specific event or user interaction
+ - 'internal_events_cli_used' # Filters to this event
+ - "\n" # Select: config/events/internal_events_cli_used.yml
+ - "\e[A" # Arrow up to: Total count of events
+ - "\n" # Select: Total count of events
+ - "when an event was defined using the CLI\n" # Input description
+ - "2\n" # Enum-select: Modify attributes
+ - "\n" # Accept group/section/stage from event definition
+ - "\n" # Accept URL from event definition
+ - "2\n" # Override tier -> Select: [premium, ultimate]
+ - "y\n" # Create file
+ - "2\n" # Exit
+ outputs:
+ files:
+ - path: ee/config/metrics/counts_all/count_total_internal_events_cli_used.yml
+ content: spec/fixtures/scripts/internal_events/metrics/ee_total_single_event.yml
+
+- description: Create a metric after helping the user figure out next steps
+ inputs:
+ files:
+ - path: config/events/internal_events_cli_used.yml
+ content: spec/fixtures/scripts/internal_events/events/event_with_identifiers.yml
+ keystrokes:
+ - "4\n" # Enum-select: ...am I in the right place?
+ - "y\n" # Yes --> Are you trying to track customer usage of a GitLab feature?
+ - "y\n" # Yes --> Can usage for the feature be measured by tracking a specific user action?
+ - "y\n" # Yes --> Is the event already tracked?
+ - "y\n" # Yes --> Ready to start?
+ - "1\n" # Enum-select: Single event -- count occurrences of a specific event or user interaction
+ - 'internal_events_cli_used' # Filters to this event
+ - "\n" # Select: config/events/internal_events_cli_used.yml
+ - "\e[A" # Arrow up to: Total count of events
+ - "\n" # Select: Total count of events
+ - "when an event was defined using the CLI\n" # Input description
+ - "1\n" # Select: Copy & continue
+ - "y\n" # Create file
+ - "2\n" # Exit
+ outputs:
+ files:
+ - path: config/metrics/counts_all/count_total_internal_events_cli_used.yml
+ content: spec/fixtures/scripts/internal_events/metrics/total_single_event.yml
+
+- description: User overwrites metric that already exists
+ inputs:
+ files:
+ - path: config/events/internal_events_cli_used.yml
+ content: spec/fixtures/scripts/internal_events/events/event_with_identifiers.yml
+ - path: config/metrics/counts_all/count_total_internal_events_cli_used.yml
+ content: spec/fixtures/scripts/internal_events/metrics/ee_total_7d_single_event.yml # wrong content
+ keystrokes:
+ - "2\n" # Enum-select: New Metric -- calculate how often one or more existing events occur over time
+ - "1\n" # Enum-select: Single event -- count occurrences of a specific event or user interaction
+ - 'internal_events_cli_used' # Filters to this event
+ - "\n" # Select: config/events/internal_events_cli_used.yml
+ - "\e[A" # Arrow up to: Total count of events
+ - "\n" # Select: Total count of events
+ - "when an event was defined using the CLI\n" # Input description
+ - "1\n" # Select: Copy & continue
+ - "y\n" # Overwrite file
+ - "2\n" # Exit
+ outputs:
+ files:
+ - path: config/metrics/counts_all/count_total_internal_events_cli_used.yml
+ content: spec/fixtures/scripts/internal_events/metrics/total_single_event.yml
+
+- description: User opts not to overwrite metric that already exists
+ inputs:
+ files:
+ - path: config/events/internal_events_cli_used.yml
+ content: spec/fixtures/scripts/internal_events/events/event_with_identifiers.yml
+ - path: config/metrics/counts_all/count_total_internal_events_cli_used.yml
+ content: spec/fixtures/scripts/internal_events/metrics/ee_total_7d_single_event.yml # wrong content
+ keystrokes:
+ - "2\n" # Enum-select: New Metric -- calculate how often one or more existing events occur over time
+ - "1\n" # Enum-select: Single event -- count occurrences of a specific event or user interaction
+ - 'internal_events_cli_used' # Filters to this event
+ - "\n" # Select: config/events/internal_events_cli_used.yml
+ - "\e[A" # Arrow up to: Total count of events
+ - "\n" # Select: Total count of events
+ - "when an event was defined using the CLI\n" # Input description
+ - "1\n" # Select: Copy & continue
+ - "n\n" # Don't overwrite file
+ - "2\n" # Exit
diff --git a/spec/fixtures/scripts/internal_events/stages.yml b/spec/fixtures/scripts/internal_events/stages.yml
new file mode 100644
index 00000000000..d5db9dcbe6d
--- /dev/null
+++ b/spec/fixtures/scripts/internal_events/stages.yml
@@ -0,0 +1,78 @@
+stages:
+ manage:
+ display_name: "Manage"
+ section: core_platform
+ groups:
+ import_and_integrate:
+ name: Import and Integrate
+ foundations:
+ name: Foundations
+
+ plan:
+ display_name: "Plan"
+ section: dev
+ groups:
+ project_management:
+ name: Project Management
+ product_planning:
+ name: Product Planning
+ knowledge:
+ name: Knowledge
+ optimize:
+ name: Optimize
+
+ create:
+ display_name: "Create"
+ section: dev
+ slack:
+ channel: s_create
+ groups:
+ source_code:
+ name: Source Code
+ code_review:
+ name: Code Review
+ ide:
+ name: IDE
+ editor_extensions:
+ name: Editor Extensions
+ code_creation:
+ name: Code Creation
+
+ verify:
+ display_name: "Verify"
+ section: ci
+ slack:
+ channel: s_verify
+ groups:
+ pipeline_execution:
+ name: "Pipeline Execution"
+ pipeline_authoring:
+ name: "Pipeline Authoring"
+ runner:
+ name: "Runner"
+ runner_saas:
+ name: "Runner SaaS"
+ pipeline_security:
+ name: "Pipeline Security"
+
+ package:
+ display_name: "Package"
+ section: ci
+ slack:
+ channel: s_package
+ groups:
+ package_registry:
+ name: Package Registry
+ container_registry:
+ name: Container Registry
+
+ monitor:
+ display_name: Monitor
+ section: analytics
+ groups:
+ analytics_instrumentation:
+ name: Analytics Instrumentation
+ product_analytics:
+ name: Product Analytics
+ observability:
+ name: "Observability"
diff --git a/spec/frontend/__helpers__/mock_observability_client.js b/spec/frontend/__helpers__/mock_observability_client.js
index 82425aa2842..a65b5233b73 100644
--- a/spec/frontend/__helpers__/mock_observability_client.js
+++ b/spec/frontend/__helpers__/mock_observability_client.js
@@ -7,6 +7,7 @@ export function createMockClient() {
servicesUrl: 'services-url',
operationsUrl: 'operations-url',
metricsUrl: 'metrics-url',
+ metricsSearchUrl: 'metrics-search-url',
});
Object.getOwnPropertyNames(mockClient)
diff --git a/spec/frontend/access_tokens/components/__snapshots__/expires_at_field_spec.js.snap b/spec/frontend/access_tokens/components/__snapshots__/expires_at_field_spec.js.snap
index 2bd2b17a12d..7785693ff2a 100644
--- a/spec/frontend/access_tokens/components/__snapshots__/expires_at_field_spec.js.snap
+++ b/spec/frontend/access_tokens/components/__snapshots__/expires_at_field_spec.js.snap
@@ -11,7 +11,7 @@ exports[`~/access_tokens/components/expires_at_field should render datepicker wi
arialabel=""
autocomplete=""
container=""
- data-qa-selector="expiry_date_field"
+ data-testid="expiry-date-field"
defaultdate="Wed Aug 05 2020 00:00:00 GMT+0000 (Greenwich Mean Time)"
displayfield="true"
firstday="0"
diff --git a/spec/frontend/access_tokens/components/access_token_table_app_spec.js b/spec/frontend/access_tokens/components/access_token_table_app_spec.js
index ae767f8b3f5..dd3fc3a9d98 100644
--- a/spec/frontend/access_tokens/components/access_token_table_app_spec.js
+++ b/spec/frontend/access_tokens/components/access_token_table_app_spec.js
@@ -25,7 +25,7 @@ describe('~/access_tokens/components/access_token_table_app', () => {
expires_soon: true,
expires_at: null,
revoked: false,
- revoke_path: '/-/profile/personal_access_tokens/1/revoke',
+ revoke_path: '/-/user_settings/personal_access_tokens/1/revoke',
role: 'Maintainer',
},
{
@@ -37,7 +37,7 @@ describe('~/access_tokens/components/access_token_table_app', () => {
expires_soon: false,
expires_at: new Date().toISOString(),
revoked: false,
- revoke_path: '/-/profile/personal_access_tokens/2/revoke',
+ revoke_path: '/-/user_settings/personal_access_tokens/2/revoke',
role: 'Maintainer',
},
];
@@ -153,8 +153,8 @@ describe('~/access_tokens/components/access_token_table_app', () => {
let button = cells.at(6).findComponent(GlButton);
expect(button.attributes()).toMatchObject({
'aria-label': __('Revoke'),
- 'data-qa-selector': __('revoke_button'),
- href: '/-/profile/personal_access_tokens/1/revoke',
+ 'data-testid': 'revoke-button',
+ href: '/-/user_settings/personal_access_tokens/1/revoke',
'data-confirm': sprintf(
__(
'Are you sure you want to revoke the %{accessTokenType} "%{tokenName}"? This action cannot be undone.',
@@ -172,7 +172,7 @@ describe('~/access_tokens/components/access_token_table_app', () => {
expect(cells.at(11).text()).toBe(__('Expired'));
expect(cells.at(12).text()).toBe('Maintainer');
button = cells.at(13).findComponent(GlButton);
- expect(button.attributes('href')).toBe('/-/profile/personal_access_tokens/2/revoke');
+ expect(button.attributes('href')).toBe('/-/user_settings/personal_access_tokens/2/revoke');
expect(button.props('category')).toBe('tertiary');
});
diff --git a/spec/frontend/access_tokens/components/new_access_token_app_spec.js b/spec/frontend/access_tokens/components/new_access_token_app_spec.js
index d51ac638f0e..966a69fa60a 100644
--- a/spec/frontend/access_tokens/components/new_access_token_app_spec.js
+++ b/spec/frontend/access_tokens/components/new_access_token_app_spec.js
@@ -81,20 +81,6 @@ describe('~/access_tokens/components/new_access_token_app', () => {
);
});
- it('input field should contain QA-related selectors', async () => {
- const newToken = '12345';
- await triggerSuccess(newToken);
-
- expect(findGlAlertError().exists()).toBe(false);
-
- const inputAttributes = wrapper
- .findByLabelText(sprintf(__('Your new %{accessTokenType}'), { accessTokenType }))
- .attributes();
- expect(inputAttributes).toMatchObject({
- 'data-qa-selector': 'created_access_token_field',
- });
- });
-
it('should render an info alert', async () => {
await triggerSuccess();
diff --git a/spec/frontend/admin/abuse_report/components/abuse_report_notes_spec.js b/spec/frontend/admin/abuse_report/components/abuse_report_notes_spec.js
index 166c735ffbd..a1993e2bde3 100644
--- a/spec/frontend/admin/abuse_report/components/abuse_report_notes_spec.js
+++ b/spec/frontend/admin/abuse_report/components/abuse_report_notes_spec.js
@@ -8,6 +8,7 @@ import SkeletonLoadingContainer from '~/vue_shared/components/notes/skeleton_not
import abuseReportNotesQuery from '~/admin/abuse_report/graphql/notes/abuse_report_notes.query.graphql';
import AbuseReportNotes from '~/admin/abuse_report/components/abuse_report_notes.vue';
import AbuseReportDiscussion from '~/admin/abuse_report/components/notes/abuse_report_discussion.vue';
+import AbuseReportAddNote from '~/admin/abuse_report/components/notes/abuse_report_add_note.vue';
import { mockAbuseReport, mockNotesByIdResponse } from '../mock_data';
@@ -24,6 +25,7 @@ describe('Abuse Report Notes', () => {
const findSkeletonLoaders = () => wrapper.findAllComponents(SkeletonLoadingContainer);
const findAbuseReportDiscussions = () => wrapper.findAllComponents(AbuseReportDiscussion);
+ const findAbuseReportAddNote = () => wrapper.findComponent(AbuseReportAddNote);
const createComponent = ({
queryHandler = notesQueryHandler,
@@ -78,6 +80,16 @@ describe('Abuse Report Notes', () => {
discussion: discussions[1].notes.nodes,
});
});
+
+ it('should show the comment form', () => {
+ expect(findAbuseReportAddNote().exists()).toBe(true);
+
+ expect(findAbuseReportAddNote().props()).toMatchObject({
+ abuseReportId: mockAbuseReportId,
+ discussionId: '',
+ isNewDiscussion: true,
+ });
+ });
});
describe('When there is an error fetching the notes', () => {
diff --git a/spec/frontend/admin/abuse_report/components/notes/abuse_report_add_note_spec.js b/spec/frontend/admin/abuse_report/components/notes/abuse_report_add_note_spec.js
new file mode 100644
index 00000000000..959b52beaef
--- /dev/null
+++ b/spec/frontend/admin/abuse_report/components/notes/abuse_report_add_note_spec.js
@@ -0,0 +1,227 @@
+import Vue from 'vue';
+import VueApollo from 'vue-apollo';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import { createAlert } from '~/alert';
+import { clearDraft } from '~/lib/utils/autosave';
+import waitForPromises from 'helpers/wait_for_promises';
+import createNoteMutation from '~/admin/abuse_report/graphql/notes/create_abuse_report_note.mutation.graphql';
+import AbuseReportAddNote from '~/admin/abuse_report/components/notes/abuse_report_add_note.vue';
+import AbuseReportCommentForm from '~/admin/abuse_report/components/notes/abuse_report_comment_form.vue';
+
+import { mockAbuseReport, createAbuseReportNoteResponse } from '../../mock_data';
+
+jest.mock('~/alert');
+jest.mock('~/lib/utils/autosave');
+Vue.use(VueApollo);
+
+describe('Abuse Report Add Note', () => {
+ let wrapper;
+
+ const mockAbuseReportId = mockAbuseReport.report.globalId;
+ const mockDiscussionId = 'gid://gitlab/Discussion/9c7228e06fb0339a3d1440fcda960acfd8baa43a';
+
+ const mutationSuccessHandler = jest.fn().mockResolvedValue(createAbuseReportNoteResponse);
+
+ const findTimelineEntry = () => wrapper.findByTestId('abuse-report-note-timeline-entry');
+ const findTimelineEntryInner = () =>
+ wrapper.findByTestId('abuse-report-note-timeline-entry-inner');
+ const findCommentFormWrapper = () => wrapper.findByTestId('abuse-report-comment-form-wrapper');
+
+ const findAbuseReportCommentForm = () => wrapper.findComponent(AbuseReportCommentForm);
+ const findReplyTextarea = () => wrapper.findByTestId('abuse-report-note-reply-textarea');
+
+ const createComponent = ({
+ mutationHandler = mutationSuccessHandler,
+ abuseReportId = mockAbuseReportId,
+ discussionId = '',
+ isNewDiscussion = true,
+ showCommentForm = false,
+ } = {}) => {
+ wrapper = shallowMountExtended(AbuseReportAddNote, {
+ apolloProvider: createMockApollo([[createNoteMutation, mutationHandler]]),
+ propsData: {
+ abuseReportId,
+ discussionId,
+ isNewDiscussion,
+ showCommentForm,
+ },
+ });
+ };
+
+ describe('Default', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('should show the comment form', () => {
+ expect(findAbuseReportCommentForm().exists()).toBe(true);
+ expect(findAbuseReportCommentForm().props()).toMatchObject({
+ abuseReportId: mockAbuseReportId,
+ isSubmitting: false,
+ autosaveKey: `${mockAbuseReportId}-comment`,
+ commentButtonText: 'Comment',
+ initialValue: '',
+ });
+ });
+
+ it('should not show the reply textarea', () => {
+ expect(findReplyTextarea().exists()).toBe(false);
+ });
+
+ it('should add the correct classList to timeline-entry', () => {
+ expect(findTimelineEntry().classes()).toEqual(
+ expect.arrayContaining(['timeline-entry', 'note-form']),
+ );
+
+ expect(findTimelineEntryInner().classes()).toEqual(['timeline-entry-inner']);
+ });
+ });
+
+ describe('When the main comments has replies', () => {
+ beforeEach(() => {
+ createComponent({
+ discussionId: 'gid://gitlab/Discussion/9c7228e06fb0339a3d1440fcda960acfd8baa43a',
+ isNewDiscussion: false,
+ });
+ });
+
+ it('should add the correct classLists', () => {
+ expect(findTimelineEntry().classes()).toEqual(
+ expect.arrayContaining([
+ 'note',
+ 'note-wrapper',
+ 'note-comment',
+ 'discussion-reply-holder',
+ 'gl-border-t-0!',
+ 'clearfix',
+ ]),
+ );
+
+ expect(findTimelineEntryInner().classes()).toEqual([]);
+
+ expect(findCommentFormWrapper().classes()).toEqual(
+ expect.arrayContaining([
+ 'gl-relative',
+ 'gl-display-flex',
+ 'gl-align-items-flex-start',
+ 'gl-flex-nowrap',
+ ]),
+ );
+ });
+
+ it('should show not the comment form', () => {
+ expect(findAbuseReportCommentForm().exists()).toBe(false);
+ });
+
+ it('should show the reply textarea', () => {
+ expect(findReplyTextarea().exists()).toBe(true);
+ expect(findReplyTextarea().attributes()).toMatchObject({
+ rows: '1',
+ placeholder: 'Reply…',
+ 'aria-label': 'Reply to comment',
+ });
+ });
+ });
+
+ describe('Adding a comment', () => {
+ const noteText = 'mock note';
+
+ beforeEach(() => {
+ createComponent();
+
+ findAbuseReportCommentForm().vm.$emit('submitForm', {
+ commentText: noteText,
+ });
+ });
+
+ it('should call the mutation with provided noteText', async () => {
+ expect(findAbuseReportCommentForm().props('isSubmitting')).toBe(true);
+
+ expect(mutationSuccessHandler).toHaveBeenCalledWith({
+ input: {
+ noteableId: mockAbuseReportId,
+ body: noteText,
+ discussionId: null,
+ },
+ });
+
+ await waitForPromises();
+
+ expect(findAbuseReportCommentForm().props('isSubmitting')).toBe(false);
+ });
+
+ it('should add the correct classList to comment-form wrapper', () => {
+ expect(findCommentFormWrapper().classes()).toEqual([]);
+ });
+
+ it('should clear draft from local storage', async () => {
+ await waitForPromises();
+
+ expect(clearDraft).toHaveBeenCalledWith(`${mockAbuseReportId}-comment`);
+ });
+
+ it('should emit `cancelEditing` event', async () => {
+ await waitForPromises();
+
+ expect(wrapper.emitted('cancelEditing')).toHaveLength(1);
+ });
+
+ it.each`
+ description | errorResponse
+ ${'with an error response'} | ${new Error('The discussion could not be found')}
+ ${'without an error ressponse'} | ${null}
+ `('should show an error when mutation fails $description', async ({ errorResponse }) => {
+ createComponent({
+ mutationHandler: jest.fn().mockRejectedValue(errorResponse),
+ });
+
+ findAbuseReportCommentForm().vm.$emit('submitForm', {
+ commentText: noteText,
+ });
+
+ await waitForPromises();
+
+ const errorMessage = errorResponse
+ ? 'Your comment could not be submitted because the discussion could not be found.'
+ : 'Your comment could not be submitted! Please check your network connection and try again.';
+
+ expect(createAlert).toHaveBeenCalledWith({
+ message: errorMessage,
+ captureError: true,
+ parent: expect.anything(),
+ });
+ });
+ });
+
+ describe('Replying to a comment', () => {
+ beforeEach(() => {
+ createComponent({
+ discussionId: mockDiscussionId,
+ isNewDiscussion: false,
+ showCommentForm: false,
+ });
+ });
+
+ it('should not show the comment form', () => {
+ expect(findAbuseReportCommentForm().exists()).toBe(false);
+ });
+
+ it('should show comment form when reply textarea is clicked on', async () => {
+ await findReplyTextarea().trigger('click');
+
+ expect(findAbuseReportCommentForm().exists()).toBe(true);
+ expect(findAbuseReportCommentForm().props('commentButtonText')).toBe('Reply');
+ });
+
+ it('should show comment form if `showCommentForm` is true', () => {
+ createComponent({
+ discussionId: mockDiscussionId,
+ isNewDiscussion: false,
+ showCommentForm: true,
+ });
+
+ expect(findAbuseReportCommentForm().exists()).toBe(true);
+ });
+ });
+});
diff --git a/spec/frontend/admin/abuse_report/components/notes/abuse_report_comment_form_spec.js b/spec/frontend/admin/abuse_report/components/notes/abuse_report_comment_form_spec.js
new file mode 100644
index 00000000000..2265ef7d441
--- /dev/null
+++ b/spec/frontend/admin/abuse_report/components/notes/abuse_report_comment_form_spec.js
@@ -0,0 +1,214 @@
+import { nextTick } from 'vue';
+import { shallowMount } from '@vue/test-utils';
+import waitForPromises from 'helpers/wait_for_promises';
+import { ESC_KEY, ENTER_KEY } from '~/lib/utils/keys';
+import * as autosave from '~/lib/utils/autosave';
+import * as confirmViaGlModal from '~/lib/utils/confirm_via_gl_modal/confirm_via_gl_modal';
+
+import AbuseReportCommentForm from '~/admin/abuse_report/components/notes/abuse_report_comment_form.vue';
+import MarkdownEditor from '~/vue_shared/components/markdown/markdown_editor.vue';
+
+import { mockAbuseReport } from '../../mock_data';
+
+jest.mock('~/lib/utils/autosave', () => ({
+ updateDraft: jest.fn(),
+ clearDraft: jest.fn(),
+ getDraft: jest.fn().mockReturnValue(''),
+}));
+
+jest.mock('~/lib/utils/confirm_via_gl_modal/confirm_via_gl_modal', () => ({
+ confirmAction: jest.fn().mockResolvedValue(true),
+}));
+
+describe('Abuse Report Comment Form', () => {
+ let wrapper;
+
+ const mockAbuseReportId = mockAbuseReport.report.globalId;
+ const mockAutosaveKey = `${mockAbuseReportId}-comment`;
+ const mockInitialValue = 'note text';
+
+ const findMarkdownEditor = () => wrapper.findComponent(MarkdownEditor);
+ const findCancelButton = () => wrapper.find('[data-testid="cancel-button"]');
+ const findCommentButton = () => wrapper.find('[data-testid="comment-button"]');
+
+ const createComponent = ({
+ abuseReportId = mockAbuseReportId,
+ isSubmitting = false,
+ initialValue = mockInitialValue,
+ autosaveKey = mockAutosaveKey,
+ commentButtonText = 'Comment',
+ } = {}) => {
+ wrapper = shallowMount(AbuseReportCommentForm, {
+ propsData: {
+ abuseReportId,
+ isSubmitting,
+ initialValue,
+ autosaveKey,
+ commentButtonText,
+ },
+ provide: {
+ uploadNoteAttachmentPath: 'test-upload-path',
+ },
+ });
+ };
+
+ describe('Markdown editor', () => {
+ it('should show markdown editor', () => {
+ createComponent();
+
+ expect(findMarkdownEditor().exists()).toBe(true);
+
+ expect(findMarkdownEditor().props()).toMatchObject({
+ value: mockInitialValue,
+ renderMarkdownPath: '',
+ uploadsPath: 'test-upload-path',
+ enableContentEditor: false,
+ formFieldProps: {
+ 'aria-label': 'Add a reply',
+ placeholder: 'Write a comment or drag your files here…',
+ id: 'abuse-report-add-or-edit-comment',
+ name: 'abuse-report-add-or-edit-comment',
+ },
+ markdownDocsPath: '/help/user/markdown',
+ });
+ });
+
+ it('should pass the draft from local storage if it exists', () => {
+ jest.spyOn(autosave, 'getDraft').mockImplementation(() => 'draft comment');
+ createComponent();
+
+ expect(findMarkdownEditor().props('value')).toBe('draft comment');
+ });
+
+ it('should pass an empty string if both draft and initialValue are empty', () => {
+ jest.spyOn(autosave, 'getDraft').mockImplementation(() => '');
+ createComponent({ initialValue: '' });
+
+ expect(findMarkdownEditor().props('value')).toBe('');
+ });
+ });
+
+ describe('Markdown Editor input', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('should set the correct comment text value', async () => {
+ findMarkdownEditor().vm.$emit('input', 'new comment');
+ await nextTick();
+
+ expect(findMarkdownEditor().props('value')).toBe('new comment');
+ });
+
+ it('should call `updateDraft` with correct parameters', () => {
+ findMarkdownEditor().vm.$emit('input', 'new comment');
+
+ expect(autosave.updateDraft).toHaveBeenCalledWith(mockAutosaveKey, 'new comment');
+ });
+ });
+
+ describe('Submitting a comment', () => {
+ beforeEach(() => {
+ jest.spyOn(autosave, 'getDraft').mockImplementation(() => 'draft comment');
+ createComponent();
+ });
+
+ it('should show comment button', () => {
+ expect(findCommentButton().exists()).toBe(true);
+ expect(findCommentButton().text()).toBe('Comment');
+ });
+
+ it('should show `Reply` button if its not a new discussion', () => {
+ createComponent({ commentButtonText: 'Reply' });
+ expect(findCommentButton().text()).toBe('Reply');
+ });
+
+ describe('when enter with meta key is pressed', () => {
+ beforeEach(() => {
+ findMarkdownEditor().vm.$emit(
+ 'keydown',
+ new KeyboardEvent('keydown', { key: ENTER_KEY, metaKey: true }),
+ );
+ });
+
+ it('should emit `submitForm` event with correct parameters', () => {
+ expect(wrapper.emitted('submitForm')).toEqual([[{ commentText: 'draft comment' }]]);
+ });
+ });
+
+ describe('when ctrl+enter is pressed', () => {
+ beforeEach(() => {
+ findMarkdownEditor().vm.$emit(
+ 'keydown',
+ new KeyboardEvent('keydown', { key: ENTER_KEY, ctrlKey: true }),
+ );
+ });
+
+ it('should emit `submitForm` event with correct parameters', () => {
+ expect(wrapper.emitted('submitForm')).toEqual([[{ commentText: 'draft comment' }]]);
+ });
+ });
+
+ describe('when comment button is clicked', () => {
+ beforeEach(() => {
+ findCommentButton().vm.$emit('click');
+ });
+
+ it('should emit `submitForm` event with correct parameters', () => {
+ expect(wrapper.emitted('submitForm')).toEqual([[{ commentText: 'draft comment' }]]);
+ });
+ });
+ });
+
+ describe('Cancel editing', () => {
+ beforeEach(() => {
+ jest.spyOn(autosave, 'getDraft').mockImplementation(() => 'draft comment');
+ createComponent();
+ });
+
+ it('should show cancel button', () => {
+ expect(findCancelButton().exists()).toBe(true);
+ expect(findCancelButton().text()).toBe('Cancel');
+ });
+
+ describe('when escape key is pressed', () => {
+ beforeEach(() => {
+ findMarkdownEditor().vm.$emit('keydown', new KeyboardEvent('keydown', { key: ESC_KEY }));
+
+ return waitForPromises();
+ });
+
+ it('should confirm a user action if comment text is not empty', () => {
+ expect(confirmViaGlModal.confirmAction).toHaveBeenCalled();
+ });
+
+ it('should clear draft from local storage', () => {
+ expect(autosave.clearDraft).toHaveBeenCalledWith(mockAutosaveKey);
+ });
+
+ it('should emit `cancelEditing` event', () => {
+ expect(wrapper.emitted('cancelEditing')).toHaveLength(1);
+ });
+ });
+
+ describe('when cancel button is clicked', () => {
+ beforeEach(() => {
+ findCancelButton().vm.$emit('click');
+
+ return waitForPromises();
+ });
+
+ it('should confirm a user action if comment text is not empty', () => {
+ expect(confirmViaGlModal.confirmAction).toHaveBeenCalled();
+ });
+
+ it('should clear draft from local storage', () => {
+ expect(autosave.clearDraft).toHaveBeenCalledWith(mockAutosaveKey);
+ });
+
+ it('should emit `cancelEditing` event', () => {
+ expect(wrapper.emitted('cancelEditing')).toHaveLength(1);
+ });
+ });
+ });
+});
diff --git a/spec/frontend/admin/abuse_report/components/notes/abuse_report_discussion_spec.js b/spec/frontend/admin/abuse_report/components/notes/abuse_report_discussion_spec.js
index 86f0939a938..fdc049725a4 100644
--- a/spec/frontend/admin/abuse_report/components/notes/abuse_report_discussion_spec.js
+++ b/spec/frontend/admin/abuse_report/components/notes/abuse_report_discussion_spec.js
@@ -4,6 +4,7 @@ import ToggleRepliesWidget from '~/notes/components/toggle_replies_widget.vue';
import TimelineEntryItem from '~/vue_shared/components/notes/timeline_entry_item.vue';
import AbuseReportDiscussion from '~/admin/abuse_report/components/notes/abuse_report_discussion.vue';
import AbuseReportNote from '~/admin/abuse_report/components/notes/abuse_report_note.vue';
+import AbuseReportAddNote from '~/admin/abuse_report/components/notes/abuse_report_add_note.vue';
import {
mockAbuseReport,
@@ -19,6 +20,7 @@ describe('Abuse Report Discussion', () => {
const findAbuseReportNotes = () => wrapper.findAllComponents(AbuseReportNote);
const findTimelineEntryItem = () => wrapper.findComponent(TimelineEntryItem);
const findToggleRepliesWidget = () => wrapper.findComponent(ToggleRepliesWidget);
+ const findAbuseReportAddNote = () => wrapper.findComponent(AbuseReportAddNote);
const createComponent = ({
discussion = mockDiscussionWithNoReplies,
@@ -43,6 +45,7 @@ describe('Abuse Report Discussion', () => {
expect(findAbuseReportNote().props()).toMatchObject({
abuseReportId: mockAbuseReportId,
note: mockDiscussionWithNoReplies[0],
+ showReplyButton: true,
});
});
@@ -50,9 +53,13 @@ describe('Abuse Report Discussion', () => {
expect(findTimelineEntryItem().exists()).toBe(false);
});
- it('should not show the the toggle replies widget wrapper when no replies', () => {
+ it('should not show the toggle replies widget wrapper when there are no replies', () => {
expect(findToggleRepliesWidget().exists()).toBe(false);
});
+
+ it('should not show the comment form there are no replies', () => {
+ expect(findAbuseReportAddNote().exists()).toBe(false);
+ });
});
describe('When the main comments has replies', () => {
@@ -75,5 +82,68 @@ describe('Abuse Report Discussion', () => {
await nextTick();
expect(findAbuseReportNotes()).toHaveLength(1);
});
+
+ it('should show the comment form', () => {
+ expect(findAbuseReportAddNote().exists()).toBe(true);
+
+ expect(findAbuseReportAddNote().props()).toMatchObject({
+ abuseReportId: mockAbuseReportId,
+ discussionId: mockDiscussionWithReplies[0].discussion.id,
+ isNewDiscussion: false,
+ });
+ });
+
+ it('should show the reply button only for the main comment', () => {
+ expect(findAbuseReportNotes().at(0).props('showReplyButton')).toBe(true);
+
+ expect(findAbuseReportNotes().at(1).props('showReplyButton')).toBe(false);
+ expect(findAbuseReportNotes().at(2).props('showReplyButton')).toBe(false);
+ });
+ });
+
+ describe('Replying to a comment when it has no replies', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('should show comment form when `startReplying` is emitted', async () => {
+ expect(findAbuseReportAddNote().exists()).toBe(false);
+
+ findAbuseReportNote().vm.$emit('startReplying');
+ await nextTick();
+
+ expect(findAbuseReportAddNote().exists()).toBe(true);
+ expect(findAbuseReportAddNote().props('showCommentForm')).toBe(true);
+ });
+
+ it('should hide the comment form when `cancelEditing` is emitted', async () => {
+ findAbuseReportNote().vm.$emit('startReplying');
+ await nextTick();
+
+ findAbuseReportAddNote().vm.$emit('cancelEditing');
+ await nextTick();
+
+ expect(findAbuseReportAddNote().exists()).toBe(false);
+ });
+ });
+
+ describe('Replying to a comment with replies', () => {
+ beforeEach(() => {
+ createComponent({
+ discussion: mockDiscussionWithReplies,
+ });
+ });
+
+ it('should show reply textarea, but not comment form', () => {
+ expect(findAbuseReportAddNote().exists()).toBe(true);
+ expect(findAbuseReportAddNote().props('showCommentForm')).toBe(false);
+ });
+
+ it('should show comment form when reply button on main comment is clicked', async () => {
+ findAbuseReportNotes().at(0).vm.$emit('startReplying');
+ await nextTick();
+
+ expect(findAbuseReportAddNote().props('showCommentForm')).toBe(true);
+ });
});
});
diff --git a/spec/frontend/admin/abuse_report/components/notes/abuse_report_edit_note_spec.js b/spec/frontend/admin/abuse_report/components/notes/abuse_report_edit_note_spec.js
new file mode 100644
index 00000000000..88f243b2501
--- /dev/null
+++ b/spec/frontend/admin/abuse_report/components/notes/abuse_report_edit_note_spec.js
@@ -0,0 +1,129 @@
+import Vue from 'vue';
+import VueApollo from 'vue-apollo';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import { createAlert } from '~/alert';
+import { clearDraft } from '~/lib/utils/autosave';
+import waitForPromises from 'helpers/wait_for_promises';
+import updateNoteMutation from '~/admin/abuse_report/graphql/notes/update_abuse_report_note.mutation.graphql';
+import AbuseReportEditNote from '~/admin/abuse_report/components/notes/abuse_report_edit_note.vue';
+import AbuseReportCommentForm from '~/admin/abuse_report/components/notes/abuse_report_comment_form.vue';
+
+import {
+ mockAbuseReport,
+ mockDiscussionWithNoReplies,
+ editAbuseReportNoteResponse,
+} from '../../mock_data';
+
+jest.mock('~/alert');
+jest.mock('~/lib/utils/autosave');
+Vue.use(VueApollo);
+
+describe('Abuse Report Edit Note', () => {
+ let wrapper;
+
+ const mockAbuseReportId = mockAbuseReport.report.globalId;
+ const mockNote = mockDiscussionWithNoReplies[0];
+
+ const mutationSuccessHandler = jest.fn().mockResolvedValue(editAbuseReportNoteResponse);
+
+ const findAbuseReportCommentForm = () => wrapper.findComponent(AbuseReportCommentForm);
+
+ const createComponent = ({
+ mutationHandler = mutationSuccessHandler,
+ abuseReportId = mockAbuseReportId,
+ discussionId = '',
+ note = mockNote,
+ } = {}) => {
+ wrapper = shallowMountExtended(AbuseReportEditNote, {
+ apolloProvider: createMockApollo([[updateNoteMutation, mutationHandler]]),
+ propsData: {
+ abuseReportId,
+ discussionId,
+ note,
+ },
+ });
+ };
+
+ describe('Default', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('should show the comment form', () => {
+ expect(findAbuseReportCommentForm().exists()).toBe(true);
+ expect(findAbuseReportCommentForm().props()).toMatchObject({
+ abuseReportId: mockAbuseReportId,
+ isSubmitting: false,
+ autosaveKey: `${mockNote.id}-comment`,
+ commentButtonText: 'Save comment',
+ initialValue: mockNote.body,
+ });
+ });
+ });
+
+ describe('Editing a comment', () => {
+ const noteText = 'Updated comment';
+
+ beforeEach(() => {
+ createComponent();
+
+ findAbuseReportCommentForm().vm.$emit('submitForm', {
+ commentText: noteText,
+ });
+ });
+
+ it('should call the mutation with provided noteText', async () => {
+ expect(findAbuseReportCommentForm().props('isSubmitting')).toBe(true);
+
+ expect(mutationSuccessHandler).toHaveBeenCalledWith({
+ input: {
+ id: mockNote.id,
+ body: noteText,
+ },
+ });
+
+ await waitForPromises();
+
+ expect(findAbuseReportCommentForm().props('isSubmitting')).toBe(false);
+ });
+
+ it('should clear draft from local storage', async () => {
+ await waitForPromises();
+
+ expect(clearDraft).toHaveBeenCalledWith(`${mockNote.id}-comment`);
+ });
+
+ it('should emit `cancelEditing` event', async () => {
+ await waitForPromises();
+
+ expect(wrapper.emitted('cancelEditing')).toHaveLength(1);
+ });
+
+ it.each`
+ description | errorResponse
+ ${'with an error response'} | ${new Error('The note could not be found')}
+ ${'without an error ressponse'} | ${null}
+ `('should show an error when mutation fails $description', async ({ errorResponse }) => {
+ createComponent({
+ mutationHandler: jest.fn().mockRejectedValue(errorResponse),
+ });
+
+ findAbuseReportCommentForm().vm.$emit('submitForm', {
+ commentText: noteText,
+ });
+
+ await waitForPromises();
+
+ const errorMessage = errorResponse
+ ? 'Your comment could not be updated because the note could not be found.'
+ : 'Something went wrong while editing your comment. Please try again.';
+
+ expect(createAlert).toHaveBeenCalledWith({
+ message: errorMessage,
+ captureError: true,
+ parent: wrapper.vm.$el,
+ });
+ });
+ });
+});
diff --git a/spec/frontend/admin/abuse_report/components/notes/abuse_report_note_actions_spec.js b/spec/frontend/admin/abuse_report/components/notes/abuse_report_note_actions_spec.js
new file mode 100644
index 00000000000..1ddfb6145fc
--- /dev/null
+++ b/spec/frontend/admin/abuse_report/components/notes/abuse_report_note_actions_spec.js
@@ -0,0 +1,79 @@
+import { GlButton } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import { createMockDirective } from 'helpers/vue_mock_directive';
+import ReplyButton from '~/notes/components/note_actions/reply_button.vue';
+import AbuseReportNoteActions from '~/admin/abuse_report/components/notes/abuse_report_note_actions.vue';
+
+describe('Abuse Report Note Actions', () => {
+ let wrapper;
+
+ const findReplyButton = () => wrapper.findComponent(ReplyButton);
+ const findEditButton = () => wrapper.findComponent(GlButton);
+
+ const createComponent = ({ showReplyButton = true, showEditButton = true } = {}) => {
+ wrapper = shallowMount(AbuseReportNoteActions, {
+ propsData: {
+ showReplyButton,
+ showEditButton,
+ },
+ directives: {
+ GlTooltip: createMockDirective('gl-tooltip'),
+ },
+ });
+ };
+
+ describe('Default', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('should show reply button', () => {
+ expect(findReplyButton().exists()).toBe(true);
+ });
+
+ it('should emit `startReplying` when reply button is clicked', () => {
+ findReplyButton().vm.$emit('startReplying');
+
+ expect(wrapper.emitted('startReplying')).toHaveLength(1);
+ });
+
+ it('should show edit button', () => {
+ expect(findEditButton().exists()).toBe(true);
+ expect(findEditButton().attributes()).toMatchObject({
+ icon: 'pencil',
+ title: 'Edit comment',
+ 'aria-label': 'Edit comment',
+ });
+ });
+
+ it('should emit `startEditing` when edit button is clicked', () => {
+ findEditButton().vm.$emit('click');
+
+ expect(wrapper.emitted('startEditing')).toHaveLength(1);
+ });
+ });
+
+ describe('When `showReplyButton` is false', () => {
+ beforeEach(() => {
+ createComponent({
+ showReplyButton: false,
+ });
+ });
+
+ it('should not show reply button', () => {
+ expect(findReplyButton().exists()).toBe(false);
+ });
+ });
+
+ describe('When `showEditButton` is false', () => {
+ beforeEach(() => {
+ createComponent({
+ showEditButton: false,
+ });
+ });
+
+ it('should not show edit button', () => {
+ expect(findEditButton().exists()).toBe(false);
+ });
+ });
+});
diff --git a/spec/frontend/admin/abuse_report/components/notes/abuse_report_note_spec.js b/spec/frontend/admin/abuse_report/components/notes/abuse_report_note_spec.js
index b6908853e46..bc7aa8ef5de 100644
--- a/spec/frontend/admin/abuse_report/components/notes/abuse_report_note_spec.js
+++ b/spec/frontend/admin/abuse_report/components/notes/abuse_report_note_spec.js
@@ -2,7 +2,10 @@ import { shallowMount } from '@vue/test-utils';
import { GlAvatarLink, GlAvatar } from '@gitlab/ui';
import AbuseReportNote from '~/admin/abuse_report/components/notes/abuse_report_note.vue';
import NoteHeader from '~/notes/components/note_header.vue';
-import NoteBody from '~/admin/abuse_report/components/notes/abuse_report_note_body.vue';
+import EditedAt from '~/issues/show/components/edited.vue';
+import AbuseReportNoteBody from '~/admin/abuse_report/components/notes/abuse_report_note_body.vue';
+import AbuseReportEditNote from '~/admin/abuse_report/components/notes/abuse_report_edit_note.vue';
+import AbuseReportNoteActions from '~/admin/abuse_report/components/notes/abuse_report_note_actions.vue';
import { mockAbuseReport, mockDiscussionWithNoReplies } from '../../mock_data';
@@ -10,18 +13,29 @@ describe('Abuse Report Note', () => {
let wrapper;
const mockAbuseReportId = mockAbuseReport.report.globalId;
const mockNote = mockDiscussionWithNoReplies[0];
+ const mockShowReplyButton = true;
const findAvatar = () => wrapper.findComponent(GlAvatar);
const findAvatarLink = () => wrapper.findComponent(GlAvatarLink);
const findNoteHeader = () => wrapper.findComponent(NoteHeader);
- const findNoteBody = () => wrapper.findComponent(NoteBody);
+ const findNoteBody = () => wrapper.findComponent(AbuseReportNoteBody);
- const createComponent = ({ note = mockNote, abuseReportId = mockAbuseReportId } = {}) => {
+ const findEditNote = () => wrapper.findComponent(AbuseReportEditNote);
+ const findEditedAt = () => wrapper.findComponent(EditedAt);
+
+ const findNoteActions = () => wrapper.findComponent(AbuseReportNoteActions);
+
+ const createComponent = ({
+ note = mockNote,
+ abuseReportId = mockAbuseReportId,
+ showReplyButton = mockShowReplyButton,
+ } = {}) => {
wrapper = shallowMount(AbuseReportNote, {
propsData: {
note,
abuseReportId,
+ showReplyButton,
},
});
};
@@ -77,4 +91,110 @@ describe('Abuse Report Note', () => {
});
});
});
+
+ describe('Editing', () => {
+ it('should show edit button when resolveNote is true', () => {
+ createComponent({
+ note: { ...mockNote, userPermissions: { resolveNote: true } },
+ });
+
+ expect(findNoteActions().props()).toMatchObject({
+ showEditButton: true,
+ });
+ });
+
+ it('should not show edit button when resolveNote is false', () => {
+ createComponent({
+ note: { ...mockNote, userPermissions: { resolveNote: false } },
+ });
+
+ expect(findNoteActions().props()).toMatchObject({
+ showEditButton: false,
+ });
+ });
+
+ it('should not be in edit mode by default', () => {
+ expect(findEditNote().exists()).toBe(false);
+ });
+
+ it('should trigger edit mode when `startEditing` event is emitted', async () => {
+ await findNoteActions().vm.$emit('startEditing');
+
+ expect(findEditNote().exists()).toBe(true);
+ expect(findEditNote().props()).toMatchObject({
+ abuseReportId: mockAbuseReportId,
+ note: mockNote,
+ });
+
+ expect(findNoteHeader().exists()).toBe(false);
+ expect(findNoteBody().exists()).toBe(false);
+ });
+
+ it('should hide edit mode when `cancelEditing` event is emitted', async () => {
+ await findNoteActions().vm.$emit('startEditing');
+ await findEditNote().vm.$emit('cancelEditing');
+
+ expect(findEditNote().exists()).toBe(false);
+
+ expect(findNoteHeader().exists()).toBe(true);
+ expect(findNoteBody().exists()).toBe(true);
+ });
+ });
+
+ describe('Edited At', () => {
+ it('should not show edited-at if lastEditedBy is null', () => {
+ expect(findEditedAt().exists()).toBe(false);
+ });
+
+ it('should show edited-at if lastEditedBy is not null', () => {
+ createComponent({
+ note: {
+ ...mockNote,
+ lastEditedBy: { name: 'user', webPath: '/user' },
+ lastEditedAt: '2023-10-20T02:46:50Z',
+ },
+ });
+
+ expect(findEditedAt().exists()).toBe(true);
+
+ expect(findEditedAt().props()).toMatchObject({
+ updatedAt: '2023-10-20T02:46:50Z',
+ updatedByName: 'user',
+ updatedByPath: '/user',
+ });
+
+ expect(findEditedAt().classes()).toEqual(
+ expect.arrayContaining(['gl-text-secondary', 'gl-pl-3']),
+ );
+ });
+
+ it('should add the correct classList when showReplyButton is false', () => {
+ createComponent({
+ note: {
+ ...mockNote,
+ lastEditedBy: { name: 'user', webPath: '/user' },
+ lastEditedAt: '2023-10-20T02:46:50Z',
+ },
+ showReplyButton: false,
+ });
+
+ expect(findEditedAt().classes()).toEqual(
+ expect.arrayContaining(['gl-text-secondary', 'gl-pl-8']),
+ );
+ });
+ });
+
+ describe('Replying', () => {
+ it('should show reply button', () => {
+ expect(findNoteActions().props()).toMatchObject({
+ showReplyButton: true,
+ });
+ });
+
+ it('should bubble up `startReplying` event', () => {
+ findNoteActions().vm.$emit('startReplying');
+
+ expect(wrapper.emitted('startReplying')).toHaveLength(1);
+ });
+ });
});
diff --git a/spec/frontend/admin/abuse_report/mock_data.js b/spec/frontend/admin/abuse_report/mock_data.js
index 44c8cbdad7f..9790b44c976 100644
--- a/spec/frontend/admin/abuse_report/mock_data.js
+++ b/spec/frontend/admin/abuse_report/mock_data.js
@@ -139,7 +139,7 @@ export const mockDiscussionWithNoReplies = [
body: 'Comment 1',
bodyHtml: '\u003cp data-sourcepos="1:1-1:9" dir="auto"\u003eComment 1\u003c/p\u003e',
createdAt: '2023-10-19T06:11:13Z',
- lastEditedAt: '2023-10-20T02:46:50Z',
+ lastEditedAt: null,
url: 'http://127.0.0.1:3000/admin/abuse_reports/1#note_1',
resolved: false,
author: {
@@ -153,7 +153,7 @@ export const mockDiscussionWithNoReplies = [
},
lastEditedBy: null,
userPermissions: {
- adminNote: true,
+ resolveNote: true,
__typename: 'NotePermissions',
},
discussion: {
@@ -192,7 +192,7 @@ export const mockDiscussionWithReplies = [
},
lastEditedBy: null,
userPermissions: {
- adminNote: true,
+ resolveNote: true,
__typename: 'NotePermissions',
},
discussion: {
@@ -237,7 +237,7 @@ export const mockDiscussionWithReplies = [
},
lastEditedBy: null,
userPermissions: {
- adminNote: true,
+ resolveNote: true,
__typename: 'NotePermissions',
},
discussion: {
@@ -282,7 +282,7 @@ export const mockDiscussionWithReplies = [
},
lastEditedBy: null,
userPermissions: {
- adminNote: true,
+ resolveNote: true,
__typename: 'NotePermissions',
},
discussion: {
@@ -340,3 +340,83 @@ export const mockNotesByIdResponse = {
},
},
};
+
+export const createAbuseReportNoteResponse = {
+ data: {
+ createNote: {
+ note: {
+ id: 'gid://gitlab/Note/6',
+ discussion: {
+ id: 'gid://gitlab/Discussion/90ca230051611e6e1676c50ba7178e0baeabd98d',
+ notes: {
+ nodes: [
+ {
+ id: 'gid://gitlab/Note/6',
+ body: 'Another comment',
+ bodyHtml: '<p data-sourcepos="1:1-1:15" dir="auto">Another comment</p>',
+ createdAt: '2023-11-02T02:45:46Z',
+ lastEditedAt: null,
+ url: 'http://127.0.0.1:3000/admin/abuse_reports/20#note_6',
+ resolved: false,
+ author: {
+ id: 'gid://gitlab/User/1',
+ avatarUrl:
+ 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
+ name: 'Administrator',
+ username: 'root',
+ webUrl: 'http://127.0.0.1:3000/root',
+ },
+ lastEditedBy: null,
+ userPermissions: {
+ resolveNote: true,
+ },
+ discussion: {
+ id: 'gid://gitlab/Discussion/90ca230051611e6e1676c50ba7178e0baeabd98d',
+ notes: {
+ nodes: [
+ {
+ id: 'gid://gitlab/Note/6',
+ },
+ ],
+ },
+ },
+ },
+ ],
+ },
+ },
+ },
+ errors: [],
+ },
+ },
+};
+
+export const editAbuseReportNoteResponse = {
+ data: {
+ updateNote: {
+ errors: [],
+ note: {
+ id: 'gid://gitlab/Note/1',
+ body: 'Updated comment',
+ bodyHtml: '<p data-sourcepos="1:1-1:15" dir="auto">Updated comment</p>',
+ createdAt: '2023-10-20T07:47:42Z',
+ lastEditedAt: '2023-10-20T07:47:42Z',
+ url: 'http://127.0.0.1:3000/admin/abuse_reports/1#note_1',
+ resolved: false,
+ author: {
+ id: 'gid://gitlab/User/1',
+ avatarUrl:
+ 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80\u0026d=identicon',
+ name: 'Administrator',
+ username: 'root',
+ webUrl: 'http://127.0.0.1:3000/root',
+ __typename: 'UserCore',
+ },
+ lastEditedBy: 'root',
+ userPermissions: {
+ resolveNote: true,
+ __typename: 'NotePermissions',
+ },
+ },
+ },
+ },
+};
diff --git a/spec/frontend/admin/signup_restrictions/components/signup_checkbox_spec.js b/spec/frontend/admin/signup_restrictions/components/signup_checkbox_spec.js
index 9e55716cc30..463455573ee 100644
--- a/spec/frontend/admin/signup_restrictions/components/signup_checkbox_spec.js
+++ b/spec/frontend/admin/signup_restrictions/components/signup_checkbox_spec.js
@@ -10,7 +10,7 @@ describe('Signup Form', () => {
helpText: 'some help text',
label: 'a label',
value: true,
- dataQaSelector: 'qa_selector',
+ dataTestId: 'test-id',
};
const mountComponent = () => {
@@ -55,7 +55,7 @@ describe('Signup Form', () => {
});
it('gets passed data qa selector', () => {
- expect(findCheckbox().attributes('data-qa-selector')).toBe(props.dataQaSelector);
+ expect(findCheckbox().attributes('data-testid')).toBe(props.dataTestId);
});
});
});
diff --git a/spec/frontend/analytics/cycle_analytics/mock_data.js b/spec/frontend/analytics/cycle_analytics/mock_data.js
index e0b6f4aa8c4..73387606433 100644
--- a/spec/frontend/analytics/cycle_analytics/mock_data.js
+++ b/spec/frontend/analytics/cycle_analytics/mock_data.js
@@ -159,12 +159,12 @@ export const stageMedians = {
};
export const formattedStageMedians = {
- issue: '2d',
- plan: '1d',
- review: '1w',
- code: '1d',
- test: '3d',
- staging: '4d',
+ issue: '2 days',
+ plan: '1 day',
+ review: '1 week',
+ code: '1 day',
+ test: '3 days',
+ staging: '4 days',
};
export const allowedStages = [issueStage, planStage, codeStage];
diff --git a/spec/frontend/analytics/cycle_analytics/store/actions_spec.js b/spec/frontend/analytics/cycle_analytics/store/actions_spec.js
index c3551d3da6f..897d75573f0 100644
--- a/spec/frontend/analytics/cycle_analytics/store/actions_spec.js
+++ b/spec/frontend/analytics/cycle_analytics/store/actions_spec.js
@@ -141,7 +141,7 @@ describe('Project Value Stream Analytics actions', () => {
describe('without a selected stage', () => {
it('will select the first stage from the value stream', () => {
const [firstStage] = allowedStages;
- testAction({
+ return testAction({
action: actions.setInitialStage,
state,
payload: null,
@@ -154,7 +154,7 @@ describe('Project Value Stream Analytics actions', () => {
describe('with no value stream stages available', () => {
it('will return SET_NO_ACCESS_ERROR', () => {
state = { ...state, stages: [] };
- testAction({
+ return testAction({
action: actions.setInitialStage,
state,
payload: null,
@@ -299,25 +299,23 @@ describe('Project Value Stream Analytics actions', () => {
name: 'mock default',
};
const mockValueStreams = [mockValueStream, selectedValueStream];
- it('with data, will set the first value stream', () => {
+ it('with data, will set the first value stream', () =>
testAction({
action: actions.receiveValueStreamsSuccess,
state,
payload: mockValueStreams,
expectedMutations: [{ type: 'RECEIVE_VALUE_STREAMS_SUCCESS', payload: mockValueStreams }],
expectedActions: [{ type: 'setSelectedValueStream', payload: mockValueStream }],
- });
- });
+ }));
- it('without data, will set the default value stream', () => {
+ it('without data, will set the default value stream', () =>
testAction({
action: actions.receiveValueStreamsSuccess,
state,
payload: [],
expectedMutations: [{ type: 'RECEIVE_VALUE_STREAMS_SUCCESS', payload: [] }],
expectedActions: [{ type: 'setSelectedValueStream', payload: selectedValueStream }],
- });
- });
+ }));
});
describe('fetchValueStreamStages', () => {
diff --git a/spec/frontend/analytics/cycle_analytics/utils_spec.js b/spec/frontend/analytics/cycle_analytics/utils_spec.js
index ab5d78bde51..5d2fcf97a76 100644
--- a/spec/frontend/analytics/cycle_analytics/utils_spec.js
+++ b/spec/frontend/analytics/cycle_analytics/utils_spec.js
@@ -45,13 +45,13 @@ describe('Value stream analytics utils', () => {
describe('medianTimeToParsedSeconds', () => {
it.each`
value | result
- ${1036800} | ${'1w'}
- ${259200} | ${'3d'}
- ${172800} | ${'2d'}
- ${86400} | ${'1d'}
- ${1000} | ${'16m'}
- ${61} | ${'1m'}
- ${59} | ${'<1m'}
+ ${1036800} | ${'1 week'}
+ ${259200} | ${'3 days'}
+ ${172800} | ${'2 days'}
+ ${86400} | ${'1 day'}
+ ${1000} | ${'16 minutes'}
+ ${61} | ${'1 minute'}
+ ${59} | ${'<1 minute'}
${0} | ${'-'}
`('will correctly parse $value seconds into $result', ({ value, result }) => {
expect(medianTimeToParsedSeconds(value)).toBe(result);
diff --git a/spec/frontend/api/user_api_spec.js b/spec/frontend/api/user_api_spec.js
index a6e08e1cf4b..aeddf6b9ae1 100644
--- a/spec/frontend/api/user_api_spec.js
+++ b/spec/frontend/api/user_api_spec.js
@@ -4,6 +4,8 @@ import projects from 'test_fixtures/api/users/projects/get.json';
import followers from 'test_fixtures/api/users/followers/get.json';
import following from 'test_fixtures/api/users/following/get.json';
import {
+ getUsers,
+ getGroupUsers,
followUser,
unfollowUser,
associationsCount,
@@ -36,6 +38,32 @@ describe('~/api/user_api', () => {
axiosMock.resetHistory();
});
+ describe('getUsers', () => {
+ it('calls correct URL with expected query parameters', async () => {
+ const expectedUrl = '/api/v4/users.json';
+ axiosMock.onGet(expectedUrl).replyOnce(HTTP_STATUS_OK);
+
+ await getUsers('den', { without_project_bots: true });
+
+ const { url, params } = axiosMock.history.get[0];
+ expect(url).toBe(expectedUrl);
+ expect(params).toMatchObject({ search: 'den', without_project_bots: true });
+ });
+ });
+
+ describe('getSAMLUsers', () => {
+ it('calls correct URL with expected query parameters', async () => {
+ const expectedUrl = '/api/v4/groups/34/users.json';
+ axiosMock.onGet(expectedUrl).replyOnce(HTTP_STATUS_OK);
+
+ await getGroupUsers('den', '34', { include_service_accounts: true });
+
+ const { url, params } = axiosMock.history.get[0];
+ expect(url).toBe(expectedUrl);
+ expect(params).toMatchObject({ search: 'den', include_service_accounts: true });
+ });
+ });
+
describe('followUser', () => {
it('calls correct URL and returns expected response', async () => {
const expectedUrl = '/api/v4/users/1/follow';
diff --git a/spec/frontend/authentication/password/components/password_input_spec.js b/spec/frontend/authentication/password/components/password_input_spec.js
index 5b2a9da993b..62438e824cf 100644
--- a/spec/frontend/authentication/password/components/password_input_spec.js
+++ b/spec/frontend/authentication/password/components/password_input_spec.js
@@ -9,7 +9,6 @@ describe('PasswordInput', () => {
title: 'This field is required',
id: 'new_user_password',
minimumPasswordLength: '8',
- qaSelector: 'new_user_password_field',
testid: 'new_user_password',
autocomplete: 'new-password',
name: 'new_user',
diff --git a/spec/frontend/badges/store/actions_spec.js b/spec/frontend/badges/store/actions_spec.js
index 5ca199357f9..1900ebc1e08 100644
--- a/spec/frontend/badges/store/actions_spec.js
+++ b/spec/frontend/badges/store/actions_spec.js
@@ -258,7 +258,7 @@ describe('Badges store actions', () => {
it('dispatches requestLoadBadges and receiveLoadBadges for successful response', async () => {
const dummyData = 'this is just some data';
- const dummyReponse = [
+ const dummyResponse = [
createDummyBadgeResponse(),
createDummyBadgeResponse(),
createDummyBadgeResponse(),
@@ -266,11 +266,11 @@ describe('Badges store actions', () => {
endpointMock.replyOnce(() => {
expect(dispatch.mock.calls).toEqual([['requestLoadBadges', dummyData]]);
dispatch.mockClear();
- return [HTTP_STATUS_OK, dummyReponse];
+ return [HTTP_STATUS_OK, dummyResponse];
});
await actions.loadBadges({ state, dispatch }, dummyData);
- const badges = dummyReponse.map(transformBackendBadge);
+ const badges = dummyResponse.map(transformBackendBadge);
expect(dispatch.mock.calls).toEqual([['receiveLoadBadges', badges]]);
});
@@ -377,15 +377,15 @@ describe('Badges store actions', () => {
});
it('dispatches requestRenderedBadge and receiveRenderedBadge for successful response', async () => {
- const dummyReponse = createDummyBadgeResponse();
+ const dummyResponse = createDummyBadgeResponse();
endpointMock.replyOnce(() => {
expect(dispatch.mock.calls).toEqual([['requestRenderedBadge']]);
dispatch.mockClear();
- return [HTTP_STATUS_OK, dummyReponse];
+ return [HTTP_STATUS_OK, dummyResponse];
});
await actions.renderBadge({ state, dispatch });
- const renderedBadge = transformBackendBadge(dummyReponse);
+ const renderedBadge = transformBackendBadge(dummyResponse);
expect(dispatch.mock.calls).toEqual([['receiveRenderedBadge', renderedBadge]]);
});
diff --git a/spec/frontend/behaviors/shortcuts/shortcuts_issuable_spec.js b/spec/frontend/behaviors/shortcuts/shortcuts_issuable_spec.js
index ae7f5416c0c..6db99e796d6 100644
--- a/spec/frontend/behaviors/shortcuts/shortcuts_issuable_spec.js
+++ b/spec/frontend/behaviors/shortcuts/shortcuts_issuable_spec.js
@@ -30,14 +30,11 @@ describe('ShortcutsIssuable', () => {
</div>`,
);
document.querySelector('.js-new-note-form').classList.add('js-main-target-form');
-
- window.shortcut = new ShortcutsIssuable(true);
});
afterEach(() => {
$(FORM_SELECTOR).remove();
- delete window.shortcut;
resetHTMLFixture();
});
@@ -55,6 +52,15 @@ describe('ShortcutsIssuable', () => {
});
};
+ it('sets up commands on instantiation', () => {
+ const mockShortcutsInstance = { addAll: jest.fn() };
+
+ // eslint-disable-next-line no-new
+ new ShortcutsIssuable(mockShortcutsInstance);
+
+ expect(mockShortcutsInstance.addAll).toHaveBeenCalled();
+ });
+
describe('with empty selection', () => {
it('does not return an error', () => {
ShortcutsIssuable.replyWithSelectedText(true);
diff --git a/spec/frontend/shortcuts_spec.js b/spec/frontend/behaviors/shortcuts/shortcuts_spec.js
index ca72426cb44..5f71eb24758 100644
--- a/spec/frontend/shortcuts_spec.js
+++ b/spec/frontend/behaviors/shortcuts/shortcuts_spec.js
@@ -37,6 +37,16 @@ describe('Shortcuts', () => {
resetHTMLFixture();
});
+ it('does not allow subclassing', () => {
+ const createSubclass = () => {
+ class Subclass extends Shortcuts {}
+
+ return new Subclass();
+ };
+
+ expect(createSubclass).toThrow(/cannot be subclassed/);
+ });
+
describe('markdown shortcuts', () => {
let shortcutElements;
@@ -106,7 +116,6 @@ describe('Shortcuts', () => {
let event;
beforeEach(() => {
- window.gon.use_new_navigation = true;
event = new KeyboardEvent('keydown', { cancelable: true });
Shortcuts.focusSearch(event);
});
@@ -122,12 +131,12 @@ describe('Shortcuts', () => {
});
});
- describe('bindCommand(s)', () => {
- it('bindCommand calls Mousetrap.bind correctly', () => {
+ describe('adding shortcuts', () => {
+ it('add calls Mousetrap.bind correctly', () => {
const mockCommand = { defaultKeys: ['m'] };
const mockCallback = () => {};
- shortcuts.bindCommand(mockCommand, mockCallback);
+ shortcuts.add(mockCommand, mockCallback);
expect(Mousetrap.prototype.bind).toHaveBeenCalledTimes(1);
const [callArguments] = Mousetrap.prototype.bind.mock.calls;
@@ -135,13 +144,13 @@ describe('Shortcuts', () => {
expect(callArguments[1]).toBe(mockCallback);
});
- it('bindCommands calls Mousetrap.bind correctly', () => {
+ it('addAll calls Mousetrap.bind correctly', () => {
const mockCommandsAndCallbacks = [
[{ defaultKeys: ['1'] }, () => {}],
[{ defaultKeys: ['2'] }, () => {}],
];
- shortcuts.bindCommands(mockCommandsAndCallbacks);
+ shortcuts.addAll(mockCommandsAndCallbacks);
expect(Mousetrap.prototype.bind).toHaveBeenCalledTimes(mockCommandsAndCallbacks.length);
const { calls } = Mousetrap.prototype.bind.mock;
@@ -152,4 +161,107 @@ describe('Shortcuts', () => {
});
});
});
+
+ describe('addExtension', () => {
+ it('instantiates the given extension', () => {
+ const MockExtension = jest.fn();
+
+ const returnValue = shortcuts.addExtension(MockExtension, ['foo']);
+
+ expect(MockExtension).toHaveBeenCalledTimes(1);
+ expect(MockExtension).toHaveBeenCalledWith(shortcuts, 'foo');
+ expect(returnValue).toBe(MockExtension.mock.instances[0]);
+ });
+
+ it('instantiates declared dependencies', () => {
+ const MockDependency = jest.fn();
+ const MockExtension = jest.fn();
+
+ MockExtension.dependencies = [MockDependency];
+
+ const returnValue = shortcuts.addExtension(MockExtension, ['foo']);
+
+ expect(MockDependency).toHaveBeenCalledTimes(1);
+ expect(MockDependency.mock.instances).toHaveLength(1);
+ expect(MockDependency).toHaveBeenCalledWith(shortcuts);
+
+ expect(returnValue).toBe(MockExtension.mock.instances[0]);
+ });
+
+ it('does not instantiate an extension more than once', () => {
+ const MockExtension = jest.fn();
+
+ const returnValue = shortcuts.addExtension(MockExtension, ['foo']);
+ const secondReturnValue = shortcuts.addExtension(MockExtension, ['bar']);
+
+ expect(MockExtension).toHaveBeenCalledTimes(1);
+ expect(MockExtension).toHaveBeenCalledWith(shortcuts, 'foo');
+ expect(returnValue).toBe(MockExtension.mock.instances[0]);
+ expect(secondReturnValue).toBe(MockExtension.mock.instances[0]);
+ });
+
+ it('allows extensions to redundantly depend on Shortcuts', () => {
+ const MockExtension = jest.fn();
+ MockExtension.dependencies = [Shortcuts];
+
+ shortcuts.addExtension(MockExtension);
+
+ expect(MockExtension).toHaveBeenCalledTimes(1);
+ expect(MockExtension).toHaveBeenCalledWith(shortcuts);
+
+ // Ensure it wasn't instantiated
+ expect(shortcuts.extensions.has(Shortcuts)).toBe(false);
+ });
+
+ it('allows extensions to incorrectly depend on themselves', () => {
+ const A = jest.fn();
+ A.dependencies = [A];
+ shortcuts.addExtension(A);
+ expect(A).toHaveBeenCalledTimes(1);
+ expect(A).toHaveBeenCalledWith(shortcuts);
+ });
+
+ it('handles extensions with circular dependencies', () => {
+ const A = jest.fn();
+ const B = jest.fn();
+ const C = jest.fn();
+
+ A.dependencies = [B];
+ B.dependencies = [C];
+ C.dependencies = [A];
+
+ shortcuts.addExtension(A);
+
+ expect(A).toHaveBeenCalledTimes(1);
+ expect(B).toHaveBeenCalledTimes(1);
+ expect(C).toHaveBeenCalledTimes(1);
+ });
+
+ it('handles complex (diamond) dependency graphs', () => {
+ const X = jest.fn();
+ const A = jest.fn();
+ const C = jest.fn();
+ const D = jest.fn();
+ const E = jest.fn();
+
+ // Form this dependency graph:
+ //
+ // X ───► A ───► C
+ // │ ▲
+ // └────► D ─────┘
+ // │
+ // └────► E
+ X.dependencies = [A, D];
+ A.dependencies = [C];
+ D.dependencies = [C, E];
+
+ shortcuts.addExtension(X);
+
+ expect(X).toHaveBeenCalledTimes(1);
+ expect(A).toHaveBeenCalledTimes(1);
+ expect(C).toHaveBeenCalledTimes(1);
+ expect(D).toHaveBeenCalledTimes(1);
+ expect(E).toHaveBeenCalledTimes(1);
+ });
+ });
});
diff --git a/spec/frontend/blob_edit/edit_blob_spec.js b/spec/frontend/blob_edit/edit_blob_spec.js
index e58ad4040a9..31be1a86de4 100644
--- a/spec/frontend/blob_edit/edit_blob_spec.js
+++ b/spec/frontend/blob_edit/edit_blob_spec.js
@@ -6,6 +6,7 @@ import EditBlob from '~/blob_edit/edit_blob';
import { SourceEditorExtension } from '~/editor/extensions/source_editor_extension_base';
import { FileTemplateExtension } from '~/editor/extensions/source_editor_file_template_ext';
import { EditorMarkdownExtension } from '~/editor/extensions/source_editor_markdown_ext';
+import { SecurityPolicySchemaExtension } from '~/editor/extensions/source_editor_security_policy_schema_ext';
import { EditorMarkdownPreviewExtension } from '~/editor/extensions/source_editor_markdown_livepreview_ext';
import { ToolbarExtension } from '~/editor/extensions/source_editor_toolbar_ext';
import SourceEditor from '~/editor/source_editor';
@@ -17,6 +18,7 @@ jest.mock('~/editor/extensions/source_editor_file_template_ext');
jest.mock('~/editor/extensions/source_editor_markdown_ext');
jest.mock('~/editor/extensions/source_editor_markdown_livepreview_ext');
jest.mock('~/editor/extensions/source_editor_toolbar_ext');
+jest.mock('~/editor/extensions/source_editor_security_policy_schema_ext');
const PREVIEW_MARKDOWN_PATH = '/foo/bar/preview_markdown';
const defaultExtensions = [
@@ -67,16 +69,18 @@ describe('Blob Editing', () => {
resetHTMLFixture();
});
- const editorInst = (isMarkdown) => {
+ const editorInst = ({ isMarkdown = false, isSecurityPolicy = false }) => {
blobInstance = new EditBlob({
isMarkdown,
previewMarkdownPath: PREVIEW_MARKDOWN_PATH,
+ filePath: isSecurityPolicy ? '.gitlab/security-policies/policy.yml' : '',
+ projectPath: 'path/to/project',
});
return blobInstance;
};
- const initEditor = async (isMarkdown = false) => {
- editorInst(isMarkdown);
+ const initEditor = async ({ isMarkdown = false, isSecurityPolicy = false } = {}) => {
+ editorInst({ isMarkdown, isSecurityPolicy });
await waitForPromises();
};
@@ -93,13 +97,13 @@ describe('Blob Editing', () => {
});
it('loads MarkdownExtension only for the markdown files', async () => {
- await initEditor(true);
+ await initEditor({ isMarkdown: true });
expect(useMock).toHaveBeenCalledTimes(2);
expect(useMock.mock.calls[1]).toEqual([markdownExtensions]);
});
it('correctly handles switching from markdown and un-uses markdown extensions', async () => {
- await initEditor(true);
+ await initEditor({ isMarkdown: true });
expect(unuseMock).not.toHaveBeenCalled();
await emitter.fire({ newLanguage: 'plaintext', oldLanguage: 'markdown' });
expect(unuseMock).toHaveBeenCalledWith(markdownExtensions);
@@ -115,6 +119,19 @@ describe('Blob Editing', () => {
});
});
+ describe('Security Policy Yaml', () => {
+ it('does not load SecurityPolicySchemaExtension by default', async () => {
+ await initEditor();
+ expect(SecurityPolicySchemaExtension).not.toHaveBeenCalled();
+ });
+
+ it('loads SecurityPolicySchemaExtension only for the security policies yml', async () => {
+ await initEditor({ isSecurityPolicy: true });
+ expect(useMock).toHaveBeenCalledTimes(2);
+ expect(useMock.mock.calls[1]).toEqual([[{ definition: SecurityPolicySchemaExtension }]]);
+ });
+ });
+
describe('correctly handles toggling the live-preview panel for different file types', () => {
it.each`
desc | isMarkdown | isPreviewOpened | tabToClick | shouldOpenPreview | shouldClosePreview | expectedDesc
@@ -142,7 +159,7 @@ describe('Blob Editing', () => {
},
},
});
- await initEditor(isMarkdown);
+ await initEditor({ isMarkdown });
blobInstance.markdownLivePreviewOpened = isPreviewOpened;
const elToClick = document.querySelector(`a[href='${tabToClick}']`);
elToClick.dispatchEvent(new Event('click'));
diff --git a/spec/frontend/boards/board_card_inner_spec.js b/spec/frontend/boards/board_card_inner_spec.js
index c70e461da83..8f2752b6bd8 100644
--- a/spec/frontend/boards/board_card_inner_spec.js
+++ b/spec/frontend/boards/board_card_inner_spec.js
@@ -2,8 +2,6 @@ import { GlLabel, GlLoadingIcon } from '@gitlab/ui';
import { range } from 'lodash';
import Vue, { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
-// eslint-disable-next-line no-restricted-imports
-import Vuex from 'vuex';
import createMockApollo from 'helpers/mock_apollo_helper';
import setWindowLocation from 'helpers/set_window_location_helper';
import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
@@ -13,15 +11,13 @@ import BoardCardInner from '~/boards/components/board_card_inner.vue';
import isShowingLabelsQuery from '~/graphql_shared/client/is_showing_labels.query.graphql';
import WorkItemTypeIcon from '~/work_items/components/work_item_type_icon.vue';
import eventHub from '~/boards/eventhub';
-import defaultStore from '~/boards/stores';
import { TYPE_ISSUE } from '~/issues/constants';
import { updateHistory } from '~/lib/utils/url_utility';
-import { mockLabelList, mockIssue, mockIssueFullPath } from './mock_data';
+import { mockLabelList, mockIssue, mockIssueFullPath, mockIssueDirectNamespace } from './mock_data';
jest.mock('~/lib/utils/url_utility');
jest.mock('~/boards/eventhub');
-Vue.use(Vuex);
Vue.use(VueApollo);
describe('Board card component', () => {
@@ -43,24 +39,12 @@ describe('Board card component', () => {
let wrapper;
let issue;
let list;
- let store;
const findIssuableBlockedIcon = () => wrapper.findComponent(IssuableBlockedIcon);
const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
const findHiddenIssueIcon = () => wrapper.findByTestId('hidden-icon');
const findWorkItemIcon = () => wrapper.findComponent(WorkItemTypeIcon);
- const performSearchMock = jest.fn();
-
- const createStore = () => {
- store = new Vuex.Store({
- actions: {
- performSearch: performSearchMock,
- },
- state: defaultStore.state,
- });
- };
-
const mockApollo = createMockApollo();
const createWrapper = ({ props = {}, isGroupBoard = true } = {}) => {
@@ -72,7 +56,6 @@ describe('Board card component', () => {
});
wrapper = mountExtended(BoardCardInner, {
- store,
apolloProvider: mockApollo,
propsData: {
list,
@@ -94,7 +77,6 @@ describe('Board card component', () => {
allowSubEpics: false,
issuableType: TYPE_ISSUE,
isGroupBoard,
- isApolloBoard: false,
},
});
};
@@ -108,14 +90,9 @@ describe('Board card component', () => {
weight: 1,
};
- createStore();
createWrapper({ props: { item: issue, list } });
});
- afterEach(() => {
- store = null;
- });
-
it('renders issue title', () => {
expect(wrapper.find('.board-card-title').text()).toContain(issue.title);
});
@@ -159,14 +136,15 @@ describe('Board card component', () => {
});
it('does not render item reference path', () => {
- createStore();
createWrapper({ isGroupBoard: false });
- expect(wrapper.find('.board-card-number').text()).not.toContain(mockIssueFullPath);
+ expect(wrapper.find('.board-card-number').text()).not.toContain(mockIssueDirectNamespace);
+ expect(wrapper.find('.board-item-path').exists()).toBe(false);
});
- it('renders item reference path', () => {
- expect(wrapper.find('.board-card-number').text()).toContain(mockIssueFullPath);
+ it('renders item direct namespace path with full reference path in a tooltip', () => {
+ expect(wrapper.find('.board-item-path').text()).toBe(mockIssueDirectNamespace);
+ expect(wrapper.find('.board-item-path').attributes('title')).toBe(mockIssueFullPath);
});
describe('blocked', () => {
@@ -458,10 +436,6 @@ describe('Board card component', () => {
expect(updateHistory).toHaveBeenCalledTimes(1);
});
- it('dispatches performSearch vuex action', () => {
- expect(performSearchMock).toHaveBeenCalledTimes(1);
- });
-
it('emits updateTokens event', () => {
expect(eventHub.$emit).toHaveBeenCalledTimes(1);
expect(eventHub.$emit).toHaveBeenCalledWith('updateTokens');
@@ -478,10 +452,6 @@ describe('Board card component', () => {
expect(updateHistory).not.toHaveBeenCalled();
});
- it('does not dispatch performSearch vuex action', () => {
- expect(performSearchMock).not.toHaveBeenCalled();
- });
-
it('does not emit updateTokens event', () => {
expect(eventHub.$emit).not.toHaveBeenCalled();
});
diff --git a/spec/frontend/boards/board_list_helper.js b/spec/frontend/boards/board_list_helper.js
index 5bafd9a8d0e..e3afd2dec2f 100644
--- a/spec/frontend/boards/board_list_helper.js
+++ b/spec/frontend/boards/board_list_helper.js
@@ -1,34 +1,22 @@
import { shallowMount } from '@vue/test-utils';
import Vue from 'vue';
import VueApollo from 'vue-apollo';
-// eslint-disable-next-line no-restricted-imports
-import Vuex from 'vuex';
import BoardCard from '~/boards/components/board_card.vue';
import BoardList from '~/boards/components/board_list.vue';
import BoardNewIssue from '~/boards/components/board_new_issue.vue';
import BoardNewItem from '~/boards/components/board_new_item.vue';
-import defaultState from '~/boards/stores/state';
+
import createMockApollo from 'helpers/mock_apollo_helper';
import listQuery from 'ee_else_ce/boards/graphql/board_lists_deferred.query.graphql';
-import {
- mockList,
- mockIssuesByListId,
- issues,
- mockGroupProjects,
- boardListQueryResponse,
-} from './mock_data';
+import { mockList, boardListQueryResponse } from './mock_data';
export default function createComponent({
- listIssueProps = {},
componentProps = {},
listProps = {},
apolloQueryHandlers = [],
- actions = {},
- getters = {},
provide = {},
data = {},
- state = defaultState,
stubs = {
BoardNewIssue,
BoardNewItem,
@@ -37,60 +25,25 @@ export default function createComponent({
issuesCount,
} = {}) {
Vue.use(VueApollo);
- Vue.use(Vuex);
const fakeApollo = createMockApollo([
[listQuery, jest.fn().mockResolvedValue(boardListQueryResponse({ issuesCount }))],
...apolloQueryHandlers,
]);
- const store = new Vuex.Store({
- state: {
- selectedProject: mockGroupProjects[0],
- boardItemsByListId: mockIssuesByListId,
- boardItems: issues,
- pageInfoByListId: {
- 'gid://gitlab/List/1': { hasNextPage: true },
- 'gid://gitlab/List/2': {},
- },
- listsFlags: {
- 'gid://gitlab/List/1': {},
- 'gid://gitlab/List/2': {},
- },
- selectedBoardItems: [],
- ...state,
- },
- getters: {
- isEpicBoard: () => false,
- ...getters,
- },
- actions,
- });
-
const list = {
...mockList,
...listProps,
};
- const issue = {
- title: 'Testing',
- id: 1,
- iid: 1,
- confidential: false,
- referencePath: 'gitlab-org/test-subgroup/gitlab-test#1',
- labels: [],
- assignees: [],
- ...listIssueProps,
- };
+
if (!Object.prototype.hasOwnProperty.call(listProps, 'issuesCount')) {
list.issuesCount = 1;
}
const component = shallowMount(BoardList, {
apolloProvider: fakeApollo,
- store,
propsData: {
list,
- boardItems: [issue],
canAdminList: true,
boardId: 'gid://gitlab/Board/1',
filterParams: {},
@@ -106,12 +59,12 @@ export default function createComponent({
canAdminList: true,
isIssueBoard: true,
isEpicBoard: false,
- isGroupBoard: false,
- isProjectBoard: true,
+ isGroupBoard: true,
+ isProjectBoard: false,
disabled: false,
boardType: 'group',
issuableType: 'issue',
- isApolloBoard: false,
+ isApolloBoard: true,
...provide,
},
stubs,
@@ -122,7 +75,5 @@ export default function createComponent({
},
});
- jest.spyOn(store, 'dispatch').mockImplementation(() => {});
-
return component;
}
diff --git a/spec/frontend/boards/board_list_spec.js b/spec/frontend/boards/board_list_spec.js
index 30bb4fba4e3..8d59cb2692e 100644
--- a/spec/frontend/boards/board_list_spec.js
+++ b/spec/frontend/boards/board_list_spec.js
@@ -8,8 +8,9 @@ import createComponent from 'jest/boards/board_list_helper';
import BoardCard from '~/boards/components/board_card.vue';
import eventHub from '~/boards/eventhub';
import BoardCardMoveToPosition from '~/boards/components/board_card_move_to_position.vue';
+import listIssuesQuery from '~/boards/graphql/lists_issues.query.graphql';
-import { mockIssues, mockList, mockIssuesMore } from './mock_data';
+import { mockIssues, mockList, mockIssuesMore, mockGroupIssuesResponse } from './mock_data';
describe('Board list component', () => {
let wrapper;
@@ -41,8 +42,13 @@ describe('Board list component', () => {
useFakeRequestAnimationFrame();
describe('When Expanded', () => {
- beforeEach(() => {
- wrapper = createComponent({ issuesCount: 1 });
+ beforeEach(async () => {
+ wrapper = createComponent({
+ apolloQueryHandlers: [
+ [listIssuesQuery, jest.fn().mockResolvedValue(mockGroupIssuesResponse())],
+ ],
+ });
+ await waitForPromises();
});
it('renders component', () => {
@@ -62,7 +68,7 @@ describe('Board list component', () => {
});
it('sets data attribute with issue id', () => {
- expect(wrapper.find('.board-card').attributes('data-item-id')).toBe('1');
+ expect(wrapper.find('.board-card').attributes('data-item-id')).toBe('gid://gitlab/Issue/436');
});
it('shows new issue form after eventhub event', async () => {
@@ -107,19 +113,18 @@ describe('Board list component', () => {
describe('load more issues', () => {
describe('when loading is not in progress', () => {
- beforeEach(() => {
+ beforeEach(async () => {
wrapper = createComponent({
- listProps: {
- id: 'gid://gitlab/List/1',
- },
- componentProps: {
- boardItems: mockIssuesMore,
- },
- actions: {
- fetchItemsForList: jest.fn(),
- },
- state: { listsFlags: { 'gid://gitlab/List/1': { isLoadingMore: false } } },
+ apolloQueryHandlers: [
+ [
+ listIssuesQuery,
+ jest
+ .fn()
+ .mockResolvedValue(mockGroupIssuesResponse('gid://gitlab/List/1', mockIssuesMore)),
+ ],
+ ],
});
+ await waitForPromises();
});
it('has intersection observer when the number of board list items are more than 5', () => {
diff --git a/spec/frontend/boards/components/board_add_new_column_spec.js b/spec/frontend/boards/components/board_add_new_column_spec.js
index 1a847d35900..768a93f6970 100644
--- a/spec/frontend/boards/components/board_add_new_column_spec.js
+++ b/spec/frontend/boards/components/board_add_new_column_spec.js
@@ -1,14 +1,11 @@
import { GlCollapsibleListbox } from '@gitlab/ui';
import Vue, { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
-// eslint-disable-next-line no-restricted-imports
-import Vuex from 'vuex';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import BoardAddNewColumn from '~/boards/components/board_add_new_column.vue';
import BoardAddNewColumnForm from '~/boards/components/board_add_new_column_form.vue';
-import defaultState from '~/boards/stores/state';
import createBoardListMutation from 'ee_else_ce/boards/graphql/board_list_create.mutation.graphql';
import boardLabelsQuery from '~/boards/graphql/board_labels.query.graphql';
import * as cacheUpdates from '~/boards/graphql/cache_updates';
@@ -19,7 +16,6 @@ import {
boardListsQueryResponse,
} from '../mock_data';
-Vue.use(Vuex);
Vue.use(VueApollo);
describe('BoardAddNewColumn', () => {
@@ -39,22 +35,8 @@ describe('BoardAddNewColumn', () => {
findDropdown().vm.$emit('select', id);
};
- const createStore = ({ actions = {}, getters = {}, state = {} } = {}) => {
- return new Vuex.Store({
- state: {
- ...defaultState,
- ...state,
- },
- actions,
- getters,
- });
- };
-
const mountComponent = ({
selectedId,
- labels = [],
- getListByLabelId = jest.fn(),
- actions = {},
provide = {},
lists = {},
labelsHandler = labelsQueryHandler,
@@ -83,26 +65,12 @@ describe('BoardAddNewColumn', () => {
selectedId,
};
},
- store: createStore({
- actions: {
- fetchLabels: jest.fn(),
- ...actions,
- },
- getters: {
- getListByLabelId: () => getListByLabelId,
- },
- state: {
- labels,
- labelsLoading: false,
- },
- }),
provide: {
scopedLabelsAvailable: true,
isEpicBoard: false,
issuableType: 'issue',
fullPath: 'gitlab-org/gitlab',
boardType: 'project',
- isApolloBoard: false,
...provide,
},
stubs: {
@@ -126,149 +94,94 @@ describe('BoardAddNewColumn', () => {
cacheUpdates.setError = jest.fn();
});
- describe('Add list button', () => {
- it('calls addList', async () => {
- const getListByLabelId = jest.fn().mockReturnValue(null);
- const highlightList = jest.fn();
- const createList = jest.fn();
+ describe('when list is new', () => {
+ beforeEach(() => {
+ mountComponent({ selectedId: mockLabelList.label.id });
+ });
- mountComponent({
- labels: [mockLabelList.label],
- selectedId: mockLabelList.label.id,
- getListByLabelId,
- actions: {
- createList,
- highlightList,
- },
- });
+ it('fetches labels and adds list', async () => {
+ findDropdown().vm.$emit('show');
+
+ await nextTick();
+ expect(labelsQueryHandler).toHaveBeenCalled();
+
+ selectLabel(mockLabelList.label.id);
findAddNewColumnForm().vm.$emit('add-list');
await nextTick();
- expect(highlightList).not.toHaveBeenCalled();
- expect(createList).toHaveBeenCalledWith(expect.anything(), {
+ expect(wrapper.emitted('highlight-list')).toBeUndefined();
+ expect(createBoardListQueryHandler).toHaveBeenCalledWith({
labelId: mockLabelList.label.id,
+ boardId: 'gid://gitlab/Board/1',
});
});
+ });
- it('highlights existing list if trying to re-add', async () => {
- const getListByLabelId = jest.fn().mockReturnValue(mockLabelList);
- const highlightList = jest.fn();
- const createList = jest.fn();
-
+ describe('when list already exists in board', () => {
+ beforeEach(() => {
mountComponent({
- labels: [mockLabelList.label],
- selectedId: mockLabelList.label.id,
- getListByLabelId,
- actions: {
- createList,
- highlightList,
+ lists: {
+ [mockLabelList.id]: mockLabelList,
},
+ selectedId: mockLabelList.label.id,
});
-
- findAddNewColumnForm().vm.$emit('add-list');
-
- await nextTick();
-
- expect(highlightList).toHaveBeenCalledWith(expect.anything(), mockLabelList.id);
- expect(createList).not.toHaveBeenCalled();
});
- });
- describe('Apollo boards', () => {
- describe('when list is new', () => {
- beforeEach(() => {
- mountComponent({ selectedId: mockLabelList.label.id, provide: { isApolloBoard: true } });
- });
-
- it('fetches labels and adds list', async () => {
- findDropdown().vm.$emit('show');
+ it('highlights existing list if trying to re-add', async () => {
+ findDropdown().vm.$emit('show');
- await nextTick();
- expect(labelsQueryHandler).toHaveBeenCalled();
+ await nextTick();
+ expect(labelsQueryHandler).toHaveBeenCalled();
- selectLabel(mockLabelList.label.id);
+ selectLabel(mockLabelList.label.id);
- findAddNewColumnForm().vm.$emit('add-list');
+ findAddNewColumnForm().vm.$emit('add-list');
- await nextTick();
+ await waitForPromises();
- expect(wrapper.emitted('highlight-list')).toBeUndefined();
- expect(createBoardListQueryHandler).toHaveBeenCalledWith({
- labelId: mockLabelList.label.id,
- boardId: 'gid://gitlab/Board/1',
- });
- });
+ expect(wrapper.emitted('highlight-list')).toEqual([[mockLabelList.id]]);
+ expect(createBoardListQueryHandler).not.toHaveBeenCalledWith();
});
+ });
- describe('when list already exists in board', () => {
- beforeEach(() => {
- mountComponent({
- lists: {
- [mockLabelList.id]: mockLabelList,
- },
- selectedId: mockLabelList.label.id,
- provide: { isApolloBoard: true },
- });
- });
-
- it('highlights existing list if trying to re-add', async () => {
- findDropdown().vm.$emit('show');
-
- await nextTick();
- expect(labelsQueryHandler).toHaveBeenCalled();
-
- selectLabel(mockLabelList.label.id);
-
- findAddNewColumnForm().vm.$emit('add-list');
-
- await waitForPromises();
-
- expect(wrapper.emitted('highlight-list')).toEqual([[mockLabelList.id]]);
- expect(createBoardListQueryHandler).not.toHaveBeenCalledWith();
+ describe('when fetch labels query fails', () => {
+ beforeEach(() => {
+ mountComponent({
+ labelsHandler: labelsQueryHandlerFailure,
});
});
- describe('when fetch labels query fails', () => {
- beforeEach(() => {
- mountComponent({
- provide: { isApolloBoard: true },
- labelsHandler: labelsQueryHandlerFailure,
- });
- });
+ it('sets error', async () => {
+ findDropdown().vm.$emit('show');
- it('sets error', async () => {
- findDropdown().vm.$emit('show');
-
- await waitForPromises();
- expect(cacheUpdates.setError).toHaveBeenCalled();
- });
+ await waitForPromises();
+ expect(cacheUpdates.setError).toHaveBeenCalled();
});
+ });
- describe('when create list mutation fails', () => {
- beforeEach(() => {
- mountComponent({
- selectedId: mockLabelList.label.id,
- provide: { isApolloBoard: true },
- createHandler: createBoardListQueryHandlerFailure,
- });
+ describe('when create list mutation fails', () => {
+ beforeEach(() => {
+ mountComponent({
+ selectedId: mockLabelList.label.id,
+ createHandler: createBoardListQueryHandlerFailure,
});
+ });
- it('sets error', async () => {
- findDropdown().vm.$emit('show');
+ it('sets error', async () => {
+ findDropdown().vm.$emit('show');
- await nextTick();
- expect(labelsQueryHandler).toHaveBeenCalled();
+ await nextTick();
+ expect(labelsQueryHandler).toHaveBeenCalled();
- selectLabel(mockLabelList.label.id);
+ selectLabel(mockLabelList.label.id);
- findAddNewColumnForm().vm.$emit('add-list');
+ findAddNewColumnForm().vm.$emit('add-list');
- await waitForPromises();
+ await waitForPromises();
- expect(cacheUpdates.setError).toHaveBeenCalled();
- });
+ expect(cacheUpdates.setError).toHaveBeenCalled();
});
});
});
diff --git a/spec/frontend/boards/components/board_app_spec.js b/spec/frontend/boards/components/board_app_spec.js
index b16f9b26f40..157c76b4fff 100644
--- a/spec/frontend/boards/components/board_app_spec.js
+++ b/spec/frontend/boards/components/board_app_spec.js
@@ -1,8 +1,6 @@
import { shallowMount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
-// eslint-disable-next-line no-restricted-imports
-import Vuex from 'vuex';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
@@ -15,34 +13,15 @@ import { rawIssue, boardListsQueryResponse } from '../mock_data';
describe('BoardApp', () => {
let wrapper;
- let store;
let mockApollo;
const errorMessage = 'Failed to fetch lists';
const boardListQueryHandler = jest.fn().mockResolvedValue(boardListsQueryResponse);
const boardListQueryHandlerFailure = jest.fn().mockRejectedValue(new Error(errorMessage));
- Vue.use(Vuex);
Vue.use(VueApollo);
- const createStore = ({ mockGetters = {} } = {}) => {
- store = new Vuex.Store({
- state: {},
- actions: {
- performSearch: jest.fn(),
- },
- getters: {
- isSidebarOpen: () => true,
- ...mockGetters,
- },
- });
- };
-
- const createComponent = ({
- isApolloBoard = false,
- issue = rawIssue,
- handler = boardListQueryHandler,
- } = {}) => {
+ const createComponent = ({ issue = rawIssue, handler = boardListQueryHandler } = {}) => {
mockApollo = createMockApollo([[boardListsQuery, handler]]);
mockApollo.clients.defaultClient.cache.writeQuery({
query: activeBoardItemQuery,
@@ -53,7 +32,6 @@ describe('BoardApp', () => {
wrapper = shallowMount(BoardApp, {
apolloProvider: mockApollo,
- store,
provide: {
fullPath: 'gitlab-org',
initialBoardId: 'gid://gitlab/Board/1',
@@ -62,69 +40,46 @@ describe('BoardApp', () => {
boardType: 'group',
isIssueBoard: true,
isGroupBoard: true,
- isApolloBoard,
},
});
};
- beforeEach(() => {
+ beforeEach(async () => {
cacheUpdates.setError = jest.fn();
- });
- afterEach(() => {
- store = null;
+ createComponent({ isApolloBoard: true });
+ await nextTick();
});
- it("should have 'is-compact' class when sidebar is open", () => {
- createStore();
- createComponent();
+ it('fetches lists', () => {
+ expect(boardListQueryHandler).toHaveBeenCalled();
+ });
+ it('should have is-compact class when a card is selected', () => {
expect(wrapper.classes()).toContain('is-compact');
});
- it("should not have 'is-compact' class when sidebar is closed", () => {
- createStore({ mockGetters: { isSidebarOpen: () => false } });
- createComponent();
+ it('should not have is-compact class when no card is selected', async () => {
+ createComponent({ isApolloBoard: true, issue: {} });
+ await nextTick();
expect(wrapper.classes()).not.toContain('is-compact');
});
- describe('Apollo boards', () => {
- beforeEach(async () => {
- createComponent({ isApolloBoard: true });
- await nextTick();
- });
+ it('refetches lists when updateBoard event is received', async () => {
+ jest.spyOn(eventHub, '$on').mockImplementation(() => {});
- it('fetches lists', () => {
- expect(boardListQueryHandler).toHaveBeenCalled();
- });
+ createComponent({ isApolloBoard: true });
+ await waitForPromises();
- it('should have is-compact class when a card is selected', () => {
- expect(wrapper.classes()).toContain('is-compact');
- });
-
- it('should not have is-compact class when no card is selected', async () => {
- createComponent({ isApolloBoard: true, issue: {} });
- await nextTick();
-
- expect(wrapper.classes()).not.toContain('is-compact');
- });
-
- it('refetches lists when updateBoard event is received', async () => {
- jest.spyOn(eventHub, '$on').mockImplementation(() => {});
-
- createComponent({ isApolloBoard: true });
- await waitForPromises();
-
- expect(eventHub.$on).toHaveBeenCalledWith('updateBoard', wrapper.vm.refetchLists);
- });
+ expect(eventHub.$on).toHaveBeenCalledWith('updateBoard', wrapper.vm.refetchLists);
+ });
- it('sets error on fetch lists failure', async () => {
- createComponent({ isApolloBoard: true, handler: boardListQueryHandlerFailure });
+ it('sets error on fetch lists failure', async () => {
+ createComponent({ isApolloBoard: true, handler: boardListQueryHandlerFailure });
- await waitForPromises();
+ await waitForPromises();
- expect(cacheUpdates.setError).toHaveBeenCalled();
- });
+ expect(cacheUpdates.setError).toHaveBeenCalled();
});
});
diff --git a/spec/frontend/boards/components/board_card_move_to_position_spec.js b/spec/frontend/boards/components/board_card_move_to_position_spec.js
index 20beaf2e9bd..d3c43a4e054 100644
--- a/spec/frontend/boards/components/board_card_move_to_position_spec.js
+++ b/spec/frontend/boards/components/board_card_move_to_position_spec.js
@@ -8,7 +8,7 @@ import {
BOARD_CARD_MOVE_TO_POSITIONS_END_OPTION,
} from '~/boards/constants';
import BoardCardMoveToPosition from '~/boards/components/board_card_move_to_position.vue';
-import { mockList, mockIssue2, mockIssue, mockIssue3, mockIssue4 } from 'jest/boards/mock_data';
+import { mockList, mockIssue2 } from 'jest/boards/mock_data';
import { mockTracking, unmockTracking } from 'helpers/tracking_helper';
Vue.use(Vuex);
@@ -28,30 +28,8 @@ describe('Board Card Move to position', () => {
let wrapper;
let trackingSpy;
let store;
- let dispatch;
const itemIndex = 1;
- const createStoreOptions = () => {
- const state = {
- pageInfoByListId: {
- 'gid://gitlab/List/1': {},
- 'gid://gitlab/List/2': { hasNextPage: true },
- },
- };
- const getters = {
- getBoardItemsByList: () => () => [mockIssue, mockIssue2, mockIssue3, mockIssue4],
- };
- const actions = {
- moveItem: jest.fn(),
- };
-
- return {
- state,
- getters,
- actions,
- };
- };
-
const createComponent = (propsData, isApolloBoard = false) => {
wrapper = shallowMount(BoardCardMoveToPosition, {
store,
@@ -73,7 +51,6 @@ describe('Board Card Move to position', () => {
};
beforeEach(() => {
- store = new Vuex.Store(createStoreOptions());
createComponent();
});
@@ -97,50 +74,6 @@ describe('Board Card Move to position', () => {
describe('Dropdown options', () => {
beforeEach(() => {
- createComponent({ index: itemIndex });
- trackingSpy = mockTracking(undefined, wrapper.element, jest.spyOn);
- dispatch = jest.spyOn(store, 'dispatch').mockImplementation(() => {});
- });
-
- afterEach(() => {
- unmockTracking();
- });
-
- it.each`
- dropdownIndex | dropdownItem | trackLabel | positionInList
- ${0} | ${dropdownOptions[0]} | ${'move_to_start'} | ${0}
- ${1} | ${dropdownOptions[1]} | ${'move_to_end'} | ${-1}
- `(
- 'on click of dropdown index $dropdownIndex with label $dropdownLabel should call moveItem action with tracking label $trackLabel',
- async ({ dropdownIndex, dropdownItem, trackLabel, positionInList }) => {
- await findMoveToPositionDropdown().vm.$emit('shown');
-
- expect(findDropdownItemAtIndex(dropdownIndex).text()).toBe(dropdownItem.text);
-
- await findMoveToPositionDropdown().vm.$emit('action', dropdownItem);
-
- expect(trackingSpy).toHaveBeenCalledWith('boards:list', 'click_toggle_button', {
- category: 'boards:list',
- label: trackLabel,
- property: 'type_card',
- });
-
- expect(dispatch).toHaveBeenCalledWith('moveItem', {
- fromListId: mockList.id,
- itemId: mockIssue2.id,
- itemIid: mockIssue2.iid,
- itemPath: mockIssue2.referencePath,
- positionInList,
- toListId: mockList.id,
- allItemsLoadedInList: true,
- atIndex: itemIndex,
- });
- },
- );
- });
-
- describe('Apollo boards', () => {
- beforeEach(() => {
createComponent({ index: itemIndex }, true);
trackingSpy = mockTracking(undefined, wrapper.element, jest.spyOn);
});
diff --git a/spec/frontend/boards/components/board_card_spec.js b/spec/frontend/boards/components/board_card_spec.js
index 11f9a4f6ff2..dae0db27104 100644
--- a/spec/frontend/boards/components/board_card_spec.js
+++ b/spec/frontend/boards/components/board_card_spec.js
@@ -1,7 +1,5 @@
import { GlLabel } from '@gitlab/ui';
import Vue, { nextTick } from 'vue';
-// eslint-disable-next-line no-restricted-imports
-import Vuex from 'vuex';
import VueApollo from 'vue-apollo';
import waitForPromises from 'helpers/wait_for_promises';
@@ -9,17 +7,14 @@ import createMockApollo from 'helpers/mock_apollo_helper';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import BoardCard from '~/boards/components/board_card.vue';
import BoardCardInner from '~/boards/components/board_card_inner.vue';
-import { inactiveId } from '~/boards/constants';
import selectedBoardItemsQuery from '~/boards/graphql/client/selected_board_items.query.graphql';
+import activeBoardItemQuery from '~/boards/graphql/client/active_board_item.query.graphql';
import isShowingLabelsQuery from '~/graphql_shared/client/is_showing_labels.query.graphql';
import { mockLabelList, mockIssue, DEFAULT_COLOR } from '../mock_data';
describe('Board card', () => {
let wrapper;
- let store;
- let mockActions;
- Vue.use(Vuex);
Vue.use(VueApollo);
const mockSetActiveBoardItemResolver = jest.fn();
@@ -31,23 +26,6 @@ describe('Board card', () => {
},
});
- const createStore = ({ initialState = {} } = {}) => {
- mockActions = {
- toggleBoardItem: jest.fn(),
- toggleBoardItemMultiSelection: jest.fn(),
- performSearch: jest.fn(),
- };
-
- store = new Vuex.Store({
- state: {
- activeId: inactiveId,
- selectedBoardItems: [],
- ...initialState,
- },
- actions: mockActions,
- });
- };
-
// this particular mount component needs to be used after the root beforeEach because it depends on list being initialized
const mountComponent = ({
propsData = {},
@@ -55,6 +33,7 @@ describe('Board card', () => {
stubs = { BoardCardInner },
item = mockIssue,
selectedBoardItems = [],
+ activeBoardItem = {},
} = {}) => {
mockApollo.clients.defaultClient.cache.writeQuery({
query: isShowingLabelsQuery,
@@ -68,6 +47,12 @@ describe('Board card', () => {
selectedBoardItems,
},
});
+ mockApollo.clients.defaultClient.cache.writeQuery({
+ query: activeBoardItemQuery,
+ data: {
+ activeBoardItem,
+ },
+ });
wrapper = shallowMountExtended(BoardCard, {
apolloProvider: mockApollo,
@@ -75,7 +60,6 @@ describe('Board card', () => {
...stubs,
BoardCardInner,
},
- store,
propsData: {
list: mockLabelList,
item,
@@ -92,7 +76,7 @@ describe('Board card', () => {
isGroupBoard: true,
disabled: false,
allowSubEpics: false,
- isApolloBoard: false,
+ isApolloBoard: true,
...provide,
},
});
@@ -112,47 +96,32 @@ describe('Board card', () => {
window.gon = { features: {} };
});
- afterEach(() => {
- store = null;
- });
-
describe('when GlLabel is clicked in BoardCardInner', () => {
- it('doesnt call toggleBoardItem', () => {
- createStore();
+ it("doesn't call setSelectedBoardItemsMutation", () => {
mountComponent();
wrapper.findComponent(GlLabel).trigger('mouseup');
- expect(mockActions.toggleBoardItem).toHaveBeenCalledTimes(0);
+ expect(mockSetSelectedBoardItemsResolver).toHaveBeenCalledTimes(0);
});
});
it('should not highlight the card by default', () => {
- createStore();
mountComponent();
expect(wrapper.classes()).not.toContain('is-active');
expect(wrapper.classes()).not.toContain('multi-select');
});
- it('should highlight the card with a correct style when selected', () => {
- createStore({
- initialState: {
- activeId: mockIssue.id,
- },
- });
- mountComponent();
+ it('should highlight the card with a correct style when selected', async () => {
+ mountComponent({ activeBoardItem: mockIssue });
+ await waitForPromises();
expect(wrapper.classes()).toContain('is-active');
expect(wrapper.classes()).not.toContain('multi-select');
});
it('should highlight the card with a correct style when multi-selected', () => {
- createStore({
- initialState: {
- activeId: inactiveId,
- },
- });
mountComponent({ selectedBoardItems: [mockIssue.id] });
expect(wrapper.classes()).toContain('multi-select');
@@ -161,18 +130,22 @@ describe('Board card', () => {
describe('when mouseup event is called on the card', () => {
beforeEach(() => {
- createStore();
mountComponent();
});
describe('when not using multi-select', () => {
- it('should call vuex action "toggleBoardItem" with correct parameters', async () => {
+ it('set active board item on client when clicking on card', async () => {
await selectCard();
+ await waitForPromises();
- expect(mockActions.toggleBoardItem).toHaveBeenCalledTimes(1);
- expect(mockActions.toggleBoardItem).toHaveBeenCalledWith(expect.any(Object), {
- boardItem: mockIssue,
- });
+ expect(mockSetActiveBoardItemResolver).toHaveBeenCalledWith(
+ {},
+ {
+ boardItem: mockIssue,
+ },
+ expect.anything(),
+ expect.anything(),
+ );
});
});
@@ -199,7 +172,6 @@ describe('Board card', () => {
describe('when card is loading', () => {
it('card is disabled and user cannot drag', () => {
- createStore();
mountComponent({ item: { ...mockIssue, isLoading: true } });
expect(wrapper.classes()).toContain('is-disabled');
@@ -209,7 +181,6 @@ describe('Board card', () => {
describe('when card is not loading', () => {
it('user can drag', () => {
- createStore();
mountComponent();
expect(wrapper.classes()).not.toContain('is-disabled');
@@ -220,7 +191,6 @@ describe('Board card', () => {
describe('when Epic colors are enabled', () => {
it('applies the correct color', () => {
window.gon.features = { epicColorHighlight: true };
- createStore();
mountComponent({
item: {
...mockIssue,
@@ -238,7 +208,6 @@ describe('Board card', () => {
describe('when Epic colors are not enabled', () => {
it('applies the correct color', () => {
window.gon.features = { epicColorHighlight: false };
- createStore();
mountComponent({
item: {
...mockIssue,
@@ -252,26 +221,4 @@ describe('Board card', () => {
expect(wrapper.attributes('style')).toBeUndefined();
});
});
-
- describe('Apollo boards', () => {
- beforeEach(async () => {
- createStore();
- mountComponent({ provide: { isApolloBoard: true } });
- await nextTick();
- });
-
- it('set active board item on client when clicking on card', async () => {
- await selectCard();
- await waitForPromises();
-
- expect(mockSetActiveBoardItemResolver).toHaveBeenCalledWith(
- {},
- {
- boardItem: mockIssue,
- },
- expect.anything(),
- expect.anything(),
- );
- });
- });
});
diff --git a/spec/frontend/boards/components/board_column_spec.js b/spec/frontend/boards/components/board_column_spec.js
index 5717031be20..61c53c27187 100644
--- a/spec/frontend/boards/components/board_column_spec.js
+++ b/spec/frontend/boards/components/board_column_spec.js
@@ -4,17 +4,15 @@ import { nextTick } from 'vue';
import { listObj } from 'jest/boards/mock_data';
import BoardColumn from '~/boards/components/board_column.vue';
import { ListType } from '~/boards/constants';
-import { createStore } from '~/boards/stores';
describe('Board Column Component', () => {
let wrapper;
- let store;
- const initStore = () => {
- store = createStore();
- };
-
- const createComponent = ({ listType = ListType.backlog, collapsed = false } = {}) => {
+ const createComponent = ({
+ listType = ListType.backlog,
+ collapsed = false,
+ highlightedLists = [],
+ } = {}) => {
const listMock = {
...listObj,
listType,
@@ -27,14 +25,11 @@ describe('Board Column Component', () => {
}
wrapper = shallowMount(BoardColumn, {
- store,
propsData: {
list: listMock,
boardId: 'gid://gitlab/Board/1',
filters: {},
- },
- provide: {
- isApolloBoard: false,
+ highlightedLists,
},
});
};
@@ -43,10 +38,6 @@ describe('Board Column Component', () => {
const isCollapsed = () => wrapper.classes('is-collapsed');
describe('Given different list types', () => {
- beforeEach(() => {
- initStore();
- });
-
it('is expandable when List Type is `backlog`', () => {
createComponent({ listType: ListType.backlog });
@@ -70,40 +61,11 @@ describe('Board Column Component', () => {
describe('highlighting', () => {
it('scrolls to column when highlighted', async () => {
- createComponent();
-
- store.state.highlightedLists.push(listObj.id);
+ createComponent({ highlightedLists: [listObj.id] });
await nextTick();
expect(wrapper.element.scrollIntoView).toHaveBeenCalled();
});
});
-
- describe('on mount', () => {
- beforeEach(() => {
- initStore();
- jest.spyOn(store, 'dispatch').mockImplementation();
- });
-
- describe('when list is collapsed', () => {
- it('does not call fetchItemsForList when', async () => {
- createComponent({ collapsed: true });
-
- await nextTick();
-
- expect(store.dispatch).toHaveBeenCalledTimes(0);
- });
- });
-
- describe('when the list is not collapsed', () => {
- it('calls fetchItemsForList when', async () => {
- createComponent({ collapsed: false });
-
- await nextTick();
-
- expect(store.dispatch).toHaveBeenCalledWith('fetchItemsForList', { listId: 300 });
- });
- });
- });
});
diff --git a/spec/frontend/boards/components/board_content_sidebar_spec.js b/spec/frontend/boards/components/board_content_sidebar_spec.js
index 01eea12bf0a..5fffd4d0c23 100644
--- a/spec/frontend/boards/components/board_content_sidebar_spec.js
+++ b/spec/frontend/boards/components/board_content_sidebar_spec.js
@@ -2,8 +2,6 @@ import { GlDrawer } from '@gitlab/ui';
import { MountingPortal } from 'portal-vue';
import Vue, { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
-// eslint-disable-next-line no-restricted-imports
-import Vuex from 'vuex';
import SidebarDropdownWidget from 'ee_else_ce/sidebar/components/sidebar_dropdown_widget.vue';
import createMockApollo from 'helpers/mock_apollo_helper';
import { stubComponent } from 'helpers/stub_component';
@@ -13,20 +11,17 @@ import waitForPromises from 'helpers/wait_for_promises';
import activeBoardItemQuery from 'ee_else_ce/boards/graphql/client/active_board_item.query.graphql';
import BoardContentSidebar from '~/boards/components/board_content_sidebar.vue';
import BoardSidebarTitle from '~/boards/components/sidebar/board_sidebar_title.vue';
-import { ISSUABLE } from '~/boards/constants';
import { TYPE_ISSUE } from '~/issues/constants';
import SidebarDateWidget from '~/sidebar/components/date/sidebar_date_widget.vue';
import SidebarSeverityWidget from '~/sidebar/components/severity/sidebar_severity_widget.vue';
import SidebarSubscriptionsWidget from '~/sidebar/components/subscriptions/sidebar_subscriptions_widget.vue';
import SidebarTodoWidget from '~/sidebar/components/todo_toggle/sidebar_todo_widget.vue';
import SidebarLabelsWidget from '~/sidebar/components/labels/labels_select_widget/labels_select_root.vue';
-import { mockActiveIssue, mockIssue, rawIssue } from '../mock_data';
+import { rawIssue } from '../mock_data';
-Vue.use(Vuex);
Vue.use(VueApollo);
describe('BoardContentSidebar', () => {
let wrapper;
- let store;
const mockSetActiveBoardItemResolver = jest.fn();
const mockApollo = createMockApollo([], {
@@ -35,28 +30,11 @@ describe('BoardContentSidebar', () => {
},
});
- const createStore = ({ mockGetters = {}, mockActions = {} } = {}) => {
- store = new Vuex.Store({
- state: {
- sidebarType: ISSUABLE,
- issues: { [mockIssue.id]: { ...mockIssue, epic: null } },
- activeId: mockIssue.id,
- },
- getters: {
- activeBoardItem: () => {
- return { ...mockActiveIssue, epic: null };
- },
- ...mockGetters,
- },
- actions: mockActions,
- });
- };
-
- const createComponent = ({ isApolloBoard = false } = {}) => {
+ const createComponent = ({ issuable = rawIssue } = {}) => {
mockApollo.clients.defaultClient.cache.writeQuery({
query: activeBoardItemQuery,
data: {
- activeBoardItem: rawIssue,
+ activeBoardItem: issuable,
},
});
@@ -68,9 +46,7 @@ describe('BoardContentSidebar', () => {
groupId: 1,
issuableType: TYPE_ISSUE,
isGroupBoard: false,
- isApolloBoard,
},
- store,
stubs: {
GlDrawer: stubComponent(GlDrawer, {
template: '<div><slot name="header"></slot><slot></slot></div>',
@@ -80,7 +56,6 @@ describe('BoardContentSidebar', () => {
};
beforeEach(() => {
- createStore();
createComponent();
});
@@ -97,8 +72,7 @@ describe('BoardContentSidebar', () => {
});
it('does not render GlDrawer when no active item is set', async () => {
- createStore({ mockGetters: { activeBoardItem: () => ({ id: '', iid: '' }) } });
- createComponent();
+ createComponent({ issuable: {} });
await nextTick();
@@ -155,45 +129,10 @@ describe('BoardContentSidebar', () => {
});
describe('when we emit close', () => {
- let toggleBoardItem;
-
- beforeEach(() => {
- toggleBoardItem = jest.fn();
- createStore({ mockActions: { toggleBoardItem } });
- createComponent();
- });
-
- it('calls toggleBoardItem with correct parameters', () => {
- wrapper.findComponent(GlDrawer).vm.$emit('close');
-
- expect(toggleBoardItem).toHaveBeenCalledTimes(1);
- expect(toggleBoardItem).toHaveBeenCalledWith(expect.any(Object), {
- boardItem: { ...mockActiveIssue, epic: null },
- sidebarType: ISSUABLE,
- });
- });
- });
-
- describe('incident sidebar', () => {
beforeEach(() => {
- createStore({
- mockGetters: { activeBoardItem: () => ({ ...mockIssue, epic: null, type: 'INCIDENT' }) },
- });
createComponent();
});
- it('renders SidebarSeverityWidget', () => {
- expect(wrapper.findComponent(SidebarSeverityWidget).exists()).toBe(true);
- });
- });
-
- describe('Apollo boards', () => {
- beforeEach(async () => {
- createStore();
- createComponent({ isApolloBoard: true });
- await nextTick();
- });
-
it('calls setActiveBoardItemMutation on close', async () => {
wrapper.findComponent(GlDrawer).vm.$emit('close');
@@ -209,4 +148,14 @@ describe('BoardContentSidebar', () => {
);
});
});
+
+ describe('incident sidebar', () => {
+ beforeEach(() => {
+ createComponent({ issuable: { ...rawIssue, epic: null, type: 'INCIDENT' } });
+ });
+
+ it('renders SidebarSeverityWidget', () => {
+ expect(wrapper.findComponent(SidebarSeverityWidget).exists()).toBe(true);
+ });
+ });
});
diff --git a/spec/frontend/boards/components/board_content_spec.js b/spec/frontend/boards/components/board_content_spec.js
index 675b79a8b1a..706f84ad319 100644
--- a/spec/frontend/boards/components/board_content_spec.js
+++ b/spec/frontend/boards/components/board_content_spec.js
@@ -3,14 +3,11 @@ import { shallowMount } from '@vue/test-utils';
import VueApollo from 'vue-apollo';
import Vue, { nextTick } from 'vue';
import Draggable from 'vuedraggable';
-// eslint-disable-next-line no-restricted-imports
-import Vuex from 'vuex';
import createMockApollo from 'helpers/mock_apollo_helper';
import { stubComponent } from 'helpers/stub_component';
import waitForPromises from 'helpers/wait_for_promises';
import EpicsSwimlanes from 'ee_component/boards/components/epics_swimlanes.vue';
-import getters from 'ee_else_ce/boards/stores/getters';
import * as cacheUpdates from '~/boards/graphql/cache_updates';
import BoardColumn from '~/boards/components/board_column.vue';
import BoardContent from '~/boards/components/board_content.vue';
@@ -27,11 +24,6 @@ import {
} from '../mock_data';
Vue.use(VueApollo);
-Vue.use(Vuex);
-
-const actions = {
- moveList: jest.fn(),
-};
describe('BoardContent', () => {
let wrapper;
@@ -41,26 +33,9 @@ describe('BoardContent', () => {
const errorMessage = 'Failed to update list';
const updateListHandlerFailure = jest.fn().mockRejectedValue(new Error(errorMessage));
- const defaultState = {
- isShowingEpicsSwimlanes: false,
- boardLists: mockListsById,
- error: undefined,
- issuableType: 'issue',
- };
-
- const createStore = (state = defaultState) => {
- return new Vuex.Store({
- actions,
- getters,
- state,
- });
- };
-
const createComponent = ({
- state,
props = {},
canAdminList = true,
- isApolloBoard = false,
issuableType = 'issue',
isIssueBoard = true,
isEpicBoard = false,
@@ -75,17 +50,13 @@ describe('BoardContent', () => {
data: boardListsQueryResponse.data,
});
- const store = createStore({
- ...defaultState,
- ...state,
- });
wrapper = shallowMount(BoardContent, {
apolloProvider: mockApollo,
propsData: {
boardId: 'gid://gitlab/Board/1',
filterParams: {},
isSwimlanesOn: false,
- boardListsApollo: mockListsById,
+ boardLists: mockListsById,
listQueryVariables,
addColumnFormVisible: false,
...props,
@@ -98,9 +69,7 @@ describe('BoardContent', () => {
isEpicBoard,
isGroupBoard: true,
disabled: false,
- isApolloBoard,
},
- store,
stubs: {
BoardContentSidebar: stubComponent(BoardContentSidebar, {
template: '<div></div>',
@@ -114,13 +83,26 @@ describe('BoardContent', () => {
const findDraggable = () => wrapper.findComponent(Draggable);
const findError = () => wrapper.findComponent(GlAlert);
+ const moveList = () => {
+ const movableListsOrder = [mockLists[0].id, mockLists[1].id];
+
+ findDraggable().vm.$emit('end', {
+ item: { dataset: { listId: mockLists[0].id, draggableItemType: DraggableItemTypes.list } },
+ newIndex: 1,
+ to: {
+ children: movableListsOrder.map((listId) => ({ dataset: { listId } })),
+ },
+ });
+ };
+
beforeEach(() => {
cacheUpdates.setError = jest.fn();
});
describe('default', () => {
- beforeEach(() => {
+ beforeEach(async () => {
createComponent();
+ await waitForPromises();
});
it('renders a BoardColumn component per list', () => {
@@ -146,63 +128,6 @@ describe('BoardContent', () => {
it('does not show the "add column" form', () => {
expect(findBoardAddNewColumn().exists()).toBe(false);
});
- });
-
- describe('when issuableType is not issue', () => {
- beforeEach(() => {
- createComponent({ issuableType: 'foo', isIssueBoard: false });
- });
-
- it('does not render BoardContentSidebar', () => {
- expect(wrapper.findComponent(BoardContentSidebar).exists()).toBe(false);
- });
- });
-
- describe('can admin list', () => {
- beforeEach(() => {
- createComponent({ canAdminList: true });
- });
-
- it('renders draggable component', () => {
- expect(findDraggable().exists()).toBe(true);
- });
- });
-
- describe('can not admin list', () => {
- beforeEach(() => {
- createComponent({ canAdminList: false });
- });
-
- it('does not render draggable component', () => {
- expect(findDraggable().exists()).toBe(false);
- });
- });
-
- describe('when Apollo boards FF is on', () => {
- const moveList = () => {
- const movableListsOrder = [mockLists[0].id, mockLists[1].id];
-
- findDraggable().vm.$emit('end', {
- item: { dataset: { listId: mockLists[0].id, draggableItemType: DraggableItemTypes.list } },
- newIndex: 1,
- to: {
- children: movableListsOrder.map((listId) => ({ dataset: { listId } })),
- },
- });
- };
-
- beforeEach(async () => {
- createComponent({ isApolloBoard: true });
- await waitForPromises();
- });
-
- it('renders a BoardColumn component per list', () => {
- expect(wrapper.findAllComponents(BoardColumn)).toHaveLength(mockLists.length);
- });
-
- it('renders BoardContentSidebar', () => {
- expect(wrapper.findComponent(BoardContentSidebar).exists()).toBe(true);
- });
it('reorders lists', async () => {
moveList();
@@ -212,7 +137,7 @@ describe('BoardContent', () => {
});
it('sets error on reorder lists failure', async () => {
- createComponent({ isApolloBoard: true, handler: updateListHandlerFailure });
+ createComponent({ handler: updateListHandlerFailure });
moveList();
await waitForPromises();
@@ -222,7 +147,7 @@ describe('BoardContent', () => {
describe('when error is passed', () => {
beforeEach(async () => {
- createComponent({ isApolloBoard: true, props: { apolloError: 'Error' } });
+ createComponent({ props: { apolloError: 'Error' } });
await waitForPromises();
});
@@ -239,6 +164,36 @@ describe('BoardContent', () => {
});
});
+ describe('when issuableType is not issue', () => {
+ beforeEach(() => {
+ createComponent({ issuableType: 'foo', isIssueBoard: false });
+ });
+
+ it('does not render BoardContentSidebar', () => {
+ expect(wrapper.findComponent(BoardContentSidebar).exists()).toBe(false);
+ });
+ });
+
+ describe('can admin list', () => {
+ beforeEach(() => {
+ createComponent({ canAdminList: true });
+ });
+
+ it('renders draggable component', () => {
+ expect(findDraggable().exists()).toBe(true);
+ });
+ });
+
+ describe('can not admin list', () => {
+ beforeEach(() => {
+ createComponent({ canAdminList: false });
+ });
+
+ it('does not render draggable component', () => {
+ expect(findDraggable().exists()).toBe(false);
+ });
+ });
+
describe('when "add column" form is visible', () => {
beforeEach(() => {
createComponent({ props: { addColumnFormVisible: true } });
diff --git a/spec/frontend/boards/components/board_filtered_search_spec.js b/spec/frontend/boards/components/board_filtered_search_spec.js
index 0bd936c9abd..5e96b508f37 100644
--- a/spec/frontend/boards/components/board_filtered_search_spec.js
+++ b/spec/frontend/boards/components/board_filtered_search_spec.js
@@ -1,7 +1,4 @@
import { shallowMount } from '@vue/test-utils';
-import Vue from 'vue';
-// eslint-disable-next-line no-restricted-imports
-import Vuex from 'vuex';
import BoardFilteredSearch from '~/boards/components/board_filtered_search.vue';
import { updateHistory } from '~/lib/utils/url_utility';
import {
@@ -20,9 +17,6 @@ import {
import FilteredSearchBarRoot from '~/vue_shared/components/filtered_search_bar/filtered_search_bar_root.vue';
import UserToken from '~/vue_shared/components/filtered_search_bar/tokens/user_token.vue';
import LabelToken from '~/vue_shared/components/filtered_search_bar/tokens/label_token.vue';
-import { createStore } from '~/boards/stores';
-
-Vue.use(Vuex);
jest.mock('~/lib/utils/url_utility', () => ({
updateHistory: jest.fn(),
@@ -32,7 +26,6 @@ jest.mock('~/lib/utils/url_utility', () => ({
describe('BoardFilteredSearch', () => {
let wrapper;
- let store;
const tokens = [
{
icon: 'labels',
@@ -63,10 +56,12 @@ describe('BoardFilteredSearch', () => {
];
const createComponent = ({ initialFilterParams = {}, props = {}, provide = {} } = {}) => {
- store = createStore();
wrapper = shallowMount(BoardFilteredSearch, {
- provide: { initialFilterParams, fullPath: '', isApolloBoard: false, ...provide },
- store,
+ provide: {
+ initialFilterParams,
+ fullPath: '',
+ ...provide,
+ },
propsData: {
...props,
tokens,
@@ -79,8 +74,6 @@ describe('BoardFilteredSearch', () => {
describe('default', () => {
beforeEach(() => {
createComponent();
-
- jest.spyOn(store, 'dispatch').mockImplementation();
});
it('passes the correct tokens to FilteredSearch', () => {
@@ -88,12 +81,6 @@ describe('BoardFilteredSearch', () => {
});
describe('when onFilter is emitted', () => {
- it('calls performSearch', () => {
- findFilteredSearch().vm.$emit('onFilter', [{ value: { data: '' } }]);
-
- expect(store.dispatch).toHaveBeenCalledWith('performSearch');
- });
-
it('calls historyPushState', () => {
findFilteredSearch().vm.$emit('onFilter', [{ value: { data: 'searchQuery' } }]);
@@ -104,10 +91,22 @@ describe('BoardFilteredSearch', () => {
});
});
});
+
+ it('emits setFilters and updates URL when onFilter is emitted', () => {
+ findFilteredSearch().vm.$emit('onFilter', [{ value: { data: '' } }]);
+
+ expect(updateHistory).toHaveBeenCalledWith({
+ title: '',
+ replace: true,
+ url: 'http://test.host/',
+ });
+
+ expect(wrapper.emitted('setFilters')).toHaveLength(1);
+ });
});
describe('when eeFilters is not empty', () => {
- it('passes the correct initialFilterValue to FitleredSearchBarRoot', () => {
+ it('passes the correct initialFilterValue to FilteredSearchBarRoot', () => {
createComponent({ props: { eeFilters: { labelName: ['label'] } } });
expect(findFilteredSearch().props('initialFilterValue')).toEqual([
@@ -125,8 +124,6 @@ describe('BoardFilteredSearch', () => {
describe('when searching', () => {
beforeEach(() => {
createComponent();
-
- jest.spyOn(store, 'dispatch').mockImplementation();
});
it('sets the url params to the correct results', () => {
@@ -146,7 +143,6 @@ describe('BoardFilteredSearch', () => {
findFilteredSearch().vm.$emit('onFilter', mockFilters);
- expect(store.dispatch).toHaveBeenCalledWith('performSearch');
expect(updateHistory).toHaveBeenCalledWith({
title: '',
replace: true,
@@ -193,21 +189,42 @@ describe('BoardFilteredSearch', () => {
});
});
- describe('when Apollo boards FF is on', () => {
+ describe('when iteration is passed a wildcard value with a cadence id', () => {
+ const url = (arg) => `http://test.host/?iteration_id=${arg}&iteration_cadence_id=1349`;
+
beforeEach(() => {
- createComponent({ provide: { isApolloBoard: true } });
+ createComponent();
});
- it('emits setFilters and updates URL when onFilter is emitted', () => {
- findFilteredSearch().vm.$emit('onFilter', [{ value: { data: '' } }]);
+ it.each([
+ ['Current&1349', url('Current'), 'Current'],
+ ['Any&1349', url('Any'), 'Any'],
+ ])('sets the url param %s', (iterationParam, expected, wildCardId) => {
+ Object.defineProperty(window, 'location', {
+ writable: true,
+ value: new URL(expected),
+ });
+
+ const mockFilters = [
+ { type: TOKEN_TYPE_ITERATION, value: { data: iterationParam, operator: '=' } },
+ ];
+
+ findFilteredSearch().vm.$emit('onFilter', mockFilters);
expect(updateHistory).toHaveBeenCalledWith({
title: '',
replace: true,
- url: 'http://test.host/',
+ url: expected,
});
- expect(wrapper.emitted('setFilters')).toHaveLength(1);
+ expect(wrapper.emitted('setFilters')).toStrictEqual([
+ [
+ {
+ iterationCadenceId: '1349',
+ iterationId: wildCardId,
+ },
+ ],
+ ]);
});
});
});
diff --git a/spec/frontend/boards/components/board_form_spec.js b/spec/frontend/boards/components/board_form_spec.js
index a0dacf085e2..16947a0512d 100644
--- a/spec/frontend/boards/components/board_form_spec.js
+++ b/spec/frontend/boards/components/board_form_spec.js
@@ -1,7 +1,5 @@
import { GlModal } from '@gitlab/ui';
import Vue from 'vue';
-// eslint-disable-next-line no-restricted-imports
-import Vuex from 'vuex';
import VueApollo from 'vue-apollo';
import setWindowLocation from 'helpers/set_window_location_helper';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
@@ -23,8 +21,6 @@ jest.mock('~/lib/utils/url_utility', () => ({
}));
jest.mock('~/boards/eventhub');
-Vue.use(Vuex);
-
const currentBoard = {
id: 'gid://gitlab/Board/1',
name: 'test',
@@ -55,14 +51,6 @@ describe('BoardForm', () => {
const findDeleteConfirmation = () => wrapper.findByTestId('delete-confirmation-message');
const findInput = () => wrapper.find('#board-new-name');
- const setBoardMock = jest.fn();
-
- const store = new Vuex.Store({
- actions: {
- setBoard: setBoardMock,
- },
- });
-
const defaultHandlers = {
createBoardMutationHandler: jest.fn().mockResolvedValue({
data: {
@@ -107,7 +95,6 @@ describe('BoardForm', () => {
isProjectBoard: false,
...provide,
},
- store,
attachTo: document.body,
});
};
@@ -220,7 +207,7 @@ describe('BoardForm', () => {
});
await waitForPromises();
- expect(setBoardMock).toHaveBeenCalledTimes(1);
+ expect(wrapper.emitted('addBoard')).toHaveLength(1);
});
it('sets error in state if GraphQL mutation fails', async () => {
@@ -239,31 +226,8 @@ describe('BoardForm', () => {
expect(requestHandlers.createBoardMutationHandler).toHaveBeenCalled();
await waitForPromises();
- expect(setBoardMock).not.toHaveBeenCalled();
expect(cacheUpdates.setError).toHaveBeenCalled();
});
-
- describe('when Apollo boards FF is on', () => {
- it('calls a correct GraphQL mutation and emits addBoard event when creating a board', async () => {
- createComponent({
- props: { canAdminBoard: true, currentPage: formType.new },
- provide: { isApolloBoard: true },
- });
-
- fillForm();
-
- await waitForPromises();
-
- expect(requestHandlers.createBoardMutationHandler).toHaveBeenCalledWith({
- input: expect.objectContaining({
- name: 'test',
- }),
- });
-
- await waitForPromises();
- expect(wrapper.emitted('addBoard')).toHaveLength(1);
- });
- });
});
});
@@ -314,8 +278,12 @@ describe('BoardForm', () => {
});
await waitForPromises();
- expect(setBoardMock).toHaveBeenCalledTimes(1);
expect(global.window.location.href).not.toContain('?group_by=epic');
+ expect(eventHub.$emit).toHaveBeenCalledTimes(1);
+ expect(eventHub.$emit).toHaveBeenCalledWith('updateBoard', {
+ id: 'gid://gitlab/Board/321',
+ webPath: 'test-path',
+ });
});
it('calls GraphQL mutation with correct parameters when issues are grouped by epic', async () => {
@@ -335,7 +303,6 @@ describe('BoardForm', () => {
});
await waitForPromises();
- expect(setBoardMock).toHaveBeenCalledTimes(1);
expect(global.window.location.href).toContain('?group_by=epic');
});
@@ -355,36 +322,8 @@ describe('BoardForm', () => {
expect(requestHandlers.updateBoardMutationHandler).toHaveBeenCalled();
await waitForPromises();
- expect(setBoardMock).not.toHaveBeenCalled();
expect(cacheUpdates.setError).toHaveBeenCalled();
});
-
- describe('when Apollo boards FF is on', () => {
- it('calls a correct GraphQL mutation and emits updateBoard event when updating a board', async () => {
- setWindowLocation('https://test/boards/1');
-
- createComponent({
- props: { canAdminBoard: true, currentPage: formType.edit },
- provide: { isApolloBoard: true },
- });
- findInput().trigger('keyup.enter', { metaKey: true });
-
- await waitForPromises();
-
- expect(requestHandlers.updateBoardMutationHandler).toHaveBeenCalledWith({
- input: expect.objectContaining({
- id: currentBoard.id,
- }),
- });
-
- await waitForPromises();
- expect(eventHub.$emit).toHaveBeenCalledTimes(1);
- expect(eventHub.$emit).toHaveBeenCalledWith('updateBoard', {
- id: 'gid://gitlab/Board/321',
- webPath: 'test-path',
- });
- });
- });
});
describe('when deleting a board', () => {
@@ -427,7 +366,6 @@ describe('BoardForm', () => {
destroyBoardMutationHandler: jest.fn().mockRejectedValue('Houston, we have a problem'),
},
});
- jest.spyOn(store, 'dispatch').mockImplementation(() => {});
findModal().vm.$emit('primary');
diff --git a/spec/frontend/boards/components/board_list_header_spec.js b/spec/frontend/boards/components/board_list_header_spec.js
index 76e969f1725..b59ed8b6abb 100644
--- a/spec/frontend/boards/components/board_list_header_spec.js
+++ b/spec/frontend/boards/components/board_list_header_spec.js
@@ -1,8 +1,6 @@
import { GlButtonGroup } from '@gitlab/ui';
import Vue, { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
-// eslint-disable-next-line no-restricted-imports
-import Vuex from 'vuex';
import createMockApollo from 'helpers/mock_apollo_helper';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
@@ -18,15 +16,11 @@ import * as cacheUpdates from '~/boards/graphql/cache_updates';
import listQuery from 'ee_else_ce/boards/graphql/board_lists_deferred.query.graphql';
Vue.use(VueApollo);
-Vue.use(Vuex);
describe('Board List Header Component', () => {
let wrapper;
- let store;
let fakeApollo;
- const updateListSpy = jest.fn();
- const toggleListCollapsedSpy = jest.fn();
const mockClientToggleListCollapsedResolver = jest.fn();
const updateListHandlerSuccess = jest.fn().mockResolvedValue(updateBoardListResponse);
@@ -69,10 +63,6 @@ describe('Board List Header Component', () => {
);
}
- store = new Vuex.Store({
- state: {},
- actions: { updateList: updateListSpy, toggleListCollapsed: toggleListCollapsedSpy },
- });
fakeApollo = createMockApollo(
[
[listQuery, listQueryHandler],
@@ -87,7 +77,6 @@ describe('Board List Header Component', () => {
wrapper = shallowMountExtended(BoardListHeader, {
apolloProvider: fakeApollo,
- store,
propsData: {
list: listMock,
filterParams: {},
@@ -198,26 +187,34 @@ describe('Board List Header Component', () => {
expect(icon.props('icon')).toBe('chevron-lg-right');
});
- it('should dispatch toggleListCollapse when clicking the collapse icon', async () => {
- createComponent();
+ it('set active board item on client when clicking on card', async () => {
+ createComponent({ listType: ListType.label });
+ await nextTick();
findCaret().vm.$emit('click');
-
await nextTick();
- expect(toggleListCollapsedSpy).toHaveBeenCalledTimes(1);
+
+ expect(mockClientToggleListCollapsedResolver).toHaveBeenCalledWith(
+ {},
+ {
+ list: mockLabelList,
+ collapsed: true,
+ },
+ expect.anything(),
+ expect.anything(),
+ );
});
- it("when logged in it calls list update and doesn't set localStorage", async () => {
+ it("when logged in it doesn't set localStorage", async () => {
createComponent({ withLocalStorage: false, currentUserId: 1 });
findCaret().vm.$emit('click');
await nextTick();
- expect(updateListSpy).toHaveBeenCalledTimes(1);
expect(localStorage.getItem(`${wrapper.vm.uniqueKey}.collapsed`)).toBe(null);
});
- it("when logged out it doesn't call list update and sets localStorage", async () => {
+ it('when logged out it sets localStorage', async () => {
createComponent({
currentUserId: null,
});
@@ -225,7 +222,6 @@ describe('Board List Header Component', () => {
findCaret().vm.$emit('click');
await nextTick();
- expect(updateListSpy).not.toHaveBeenCalled();
expect(localStorage.getItem(`${wrapper.vm.uniqueKey}.collapsed`)).toBe(
String(!isCollapsed()),
);
@@ -252,86 +248,67 @@ describe('Board List Header Component', () => {
});
});
- describe('Apollo boards', () => {
- beforeEach(async () => {
- createComponent({ listType: ListType.label, injectedProps: { isApolloBoard: true } });
- await nextTick();
- });
-
- it('set active board item on client when clicking on card', async () => {
- findCaret().vm.$emit('click');
- await nextTick();
-
- expect(mockClientToggleListCollapsedResolver).toHaveBeenCalledWith(
- {},
- {
- list: mockLabelList,
- collapsed: true,
- },
- expect.anything(),
- expect.anything(),
- );
- });
+ beforeEach(async () => {
+ createComponent({ listType: ListType.label });
+ await nextTick();
+ });
- it('does not call update list mutation when user is not logged in', async () => {
- createComponent({ currentUserId: null, injectedProps: { isApolloBoard: true } });
+ it('does not call update list mutation when user is not logged in', async () => {
+ createComponent({ currentUserId: null });
- findCaret().vm.$emit('click');
- await nextTick();
+ findCaret().vm.$emit('click');
+ await nextTick();
- expect(updateListHandlerSuccess).not.toHaveBeenCalled();
- });
+ expect(updateListHandlerSuccess).not.toHaveBeenCalled();
+ });
- it('calls update list mutation when user is logged in', async () => {
- createComponent({ currentUserId: 1, injectedProps: { isApolloBoard: true } });
+ it('calls update list mutation when user is logged in', async () => {
+ createComponent({ currentUserId: 1 });
- findCaret().vm.$emit('click');
- await nextTick();
+ findCaret().vm.$emit('click');
+ await nextTick();
- expect(updateListHandlerSuccess).toHaveBeenCalledWith({
- listId: mockLabelList.id,
- collapsed: true,
- });
+ expect(updateListHandlerSuccess).toHaveBeenCalledWith({
+ listId: mockLabelList.id,
+ collapsed: true,
});
+ });
- describe('when fetch list query fails', () => {
- const errorMessage = 'Failed to fetch list';
- const listQueryHandlerFailure = jest.fn().mockRejectedValue(new Error(errorMessage));
+ describe('when fetch list query fails', () => {
+ const errorMessage = 'Failed to fetch list';
+ const listQueryHandlerFailure = jest.fn().mockRejectedValue(new Error(errorMessage));
- beforeEach(() => {
- createComponent({
- listQueryHandler: listQueryHandlerFailure,
- injectedProps: { isApolloBoard: true },
- });
+ beforeEach(() => {
+ createComponent({
+ listQueryHandler: listQueryHandlerFailure,
});
+ });
- it('sets error', async () => {
- await waitForPromises();
+ it('sets error', async () => {
+ await waitForPromises();
- expect(cacheUpdates.setError).toHaveBeenCalled();
- });
+ expect(cacheUpdates.setError).toHaveBeenCalled();
});
+ });
- describe('when update list mutation fails', () => {
- const errorMessage = 'Failed to update list';
- const updateListHandlerFailure = jest.fn().mockRejectedValue(new Error(errorMessage));
+ describe('when update list mutation fails', () => {
+ const errorMessage = 'Failed to update list';
+ const updateListHandlerFailure = jest.fn().mockRejectedValue(new Error(errorMessage));
- beforeEach(() => {
- createComponent({
- currentUserId: 1,
- updateListHandler: updateListHandlerFailure,
- injectedProps: { isApolloBoard: true },
- });
+ beforeEach(() => {
+ createComponent({
+ currentUserId: 1,
+ updateListHandler: updateListHandlerFailure,
});
+ });
- it('sets error', async () => {
- await waitForPromises();
+ it('sets error', async () => {
+ await waitForPromises();
- findCaret().vm.$emit('click');
- await waitForPromises();
+ findCaret().vm.$emit('click');
+ await waitForPromises();
- expect(cacheUpdates.setError).toHaveBeenCalled();
- });
+ expect(cacheUpdates.setError).toHaveBeenCalled();
});
});
});
diff --git a/spec/frontend/boards/components/board_new_issue_spec.js b/spec/frontend/boards/components/board_new_issue_spec.js
index bf2608d0594..dad0d148449 100644
--- a/spec/frontend/boards/components/board_new_issue_spec.js
+++ b/spec/frontend/boards/components/board_new_issue_spec.js
@@ -1,7 +1,5 @@
import { shallowMount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
-// eslint-disable-next-line no-restricted-imports
-import Vuex from 'vuex';
import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
import BoardNewIssue from '~/boards/components/board_new_issue.vue';
@@ -15,18 +13,12 @@ import { WORKSPACE_GROUP, WORKSPACE_PROJECT } from '~/issues/constants';
import {
mockList,
mockGroupProjects,
- mockIssue,
- mockIssue2,
mockProjectBoardResponse,
mockGroupBoardResponse,
} from '../mock_data';
-Vue.use(Vuex);
Vue.use(VueApollo);
-const addListNewIssuesSpy = jest.fn().mockResolvedValue();
-const mockActions = { addListNewIssue: addListNewIssuesSpy };
-
const projectBoardQueryHandlerSuccess = jest.fn().mockResolvedValue(mockProjectBoardResponse);
const groupBoardQueryHandlerSuccess = jest.fn().mockResolvedValue(mockGroupBoardResponse);
@@ -36,20 +28,12 @@ const mockApollo = createMockApollo([
]);
const createComponent = ({
- state = {},
- actions = mockActions,
- getters = { getBoardItemsByList: () => () => [] },
isGroupBoard = true,
data = { selectedProject: mockGroupProjects[0] },
provide = {},
} = {}) =>
shallowMount(BoardNewIssue, {
apolloProvider: mockApollo,
- store: new Vuex.Store({
- state,
- actions,
- getters,
- }),
propsData: {
list: mockList,
boardId: 'gid://gitlab/Board/1',
@@ -63,7 +47,6 @@ const createComponent = ({
isGroupBoard,
boardType: 'group',
isEpicBoard: false,
- isApolloBoard: false,
...provide,
},
stubs: {
@@ -82,6 +65,32 @@ describe('Issue boards new issue form', () => {
await nextTick();
});
+ it.each`
+ boardType | queryHandler | notCalledHandler
+ ${WORKSPACE_GROUP} | ${groupBoardQueryHandlerSuccess} | ${projectBoardQueryHandlerSuccess}
+ ${WORKSPACE_PROJECT} | ${projectBoardQueryHandlerSuccess} | ${groupBoardQueryHandlerSuccess}
+ `(
+ 'fetches $boardType board and emits addNewIssue event',
+ async ({ boardType, queryHandler, notCalledHandler }) => {
+ wrapper = createComponent({
+ provide: {
+ boardType,
+ isProjectBoard: boardType === WORKSPACE_PROJECT,
+ isGroupBoard: boardType === WORKSPACE_GROUP,
+ },
+ });
+
+ await nextTick();
+ findBoardNewItem().vm.$emit('form-submit', { title: 'Foo' });
+
+ await nextTick();
+
+ expect(queryHandler).toHaveBeenCalled();
+ expect(notCalledHandler).not.toHaveBeenCalled();
+ expect(wrapper.emitted('addNewIssue')[0][0]).toMatchObject({ title: 'Foo' });
+ },
+ );
+
it('renders board-new-item component', () => {
const boardNewItem = findBoardNewItem();
expect(boardNewItem.exists()).toBe(true);
@@ -93,51 +102,6 @@ describe('Issue boards new issue form', () => {
});
});
- it('calls addListNewIssue action when `board-new-item` emits form-submit event', async () => {
- findBoardNewItem().vm.$emit('form-submit', { title: 'Foo' });
-
- await nextTick();
- expect(addListNewIssuesSpy).toHaveBeenCalledWith(expect.any(Object), {
- list: mockList,
- issueInput: {
- title: 'Foo',
- labelIds: [],
- assigneeIds: [],
- milestoneId: undefined,
- projectPath: mockGroupProjects[0].fullPath,
- moveAfterId: undefined,
- },
- });
- });
-
- describe('when list has an existing issues', () => {
- beforeEach(() => {
- wrapper = createComponent({
- getters: {
- getBoardItemsByList: () => () => [mockIssue, mockIssue2],
- },
- isGroupBoard: true,
- });
- });
-
- it('uses the first issue ID as moveAfterId', async () => {
- findBoardNewItem().vm.$emit('form-submit', { title: 'Foo' });
-
- await nextTick();
- expect(addListNewIssuesSpy).toHaveBeenCalledWith(expect.any(Object), {
- list: mockList,
- issueInput: {
- title: 'Foo',
- labelIds: [],
- assigneeIds: [],
- milestoneId: undefined,
- projectPath: mockGroupProjects[0].fullPath,
- moveAfterId: mockIssue.id,
- },
- });
- });
- });
-
it('emits event `toggle-issue-form` with current list Id suffix on eventHub when `board-new-item` emits form-cancel event', async () => {
jest.spyOn(eventHub, '$emit').mockImplementation();
findBoardNewItem().vm.$emit('form-cancel');
@@ -168,33 +132,4 @@ describe('Issue boards new issue form', () => {
expect(projectSelect.exists()).toBe(false);
});
});
-
- describe('Apollo boards', () => {
- it.each`
- boardType | queryHandler | notCalledHandler
- ${WORKSPACE_GROUP} | ${groupBoardQueryHandlerSuccess} | ${projectBoardQueryHandlerSuccess}
- ${WORKSPACE_PROJECT} | ${projectBoardQueryHandlerSuccess} | ${groupBoardQueryHandlerSuccess}
- `(
- 'fetches $boardType board and emits addNewIssue event',
- async ({ boardType, queryHandler, notCalledHandler }) => {
- wrapper = createComponent({
- provide: {
- boardType,
- isProjectBoard: boardType === WORKSPACE_PROJECT,
- isGroupBoard: boardType === WORKSPACE_GROUP,
- isApolloBoard: true,
- },
- });
-
- await nextTick();
- findBoardNewItem().vm.$emit('form-submit', { title: 'Foo' });
-
- await nextTick();
-
- expect(queryHandler).toHaveBeenCalled();
- expect(notCalledHandler).not.toHaveBeenCalled();
- expect(wrapper.emitted('addNewIssue')[0][0]).toMatchObject({ title: 'Foo' });
- },
- );
- });
});
diff --git a/spec/frontend/boards/components/board_settings_sidebar_spec.js b/spec/frontend/boards/components/board_settings_sidebar_spec.js
index f6ed483dfc5..71c886351b6 100644
--- a/spec/frontend/boards/components/board_settings_sidebar_spec.js
+++ b/spec/frontend/boards/components/board_settings_sidebar_spec.js
@@ -3,32 +3,23 @@ import { shallowMount } from '@vue/test-utils';
import { MountingPortal } from 'portal-vue';
import Vue, { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
-// eslint-disable-next-line no-restricted-imports
-import Vuex from 'vuex';
import createMockApollo from 'helpers/mock_apollo_helper';
import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
import waitForPromises from 'helpers/wait_for_promises';
import { stubComponent } from 'helpers/stub_component';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import BoardSettingsSidebar from '~/boards/components/board_settings_sidebar.vue';
-import { inactiveId, LIST } from '~/boards/constants';
import destroyBoardListMutation from '~/boards/graphql/board_list_destroy.mutation.graphql';
import * as cacheUpdates from '~/boards/graphql/cache_updates';
-import actions from '~/boards/stores/actions';
-import getters from '~/boards/stores/getters';
-import mutations from '~/boards/stores/mutations';
-import sidebarEventHub from '~/sidebar/event_hub';
import { mockLabelList, destroyBoardListMutationResponse } from '../mock_data';
Vue.use(VueApollo);
-Vue.use(Vuex);
describe('BoardSettingsSidebar', () => {
let wrapper;
let mockApollo;
const labelTitle = mockLabelList.label.title;
const labelColor = mockLabelList.label.color;
- const listId = mockLabelList.id;
const modalID = 'board-settings-sidebar-modal';
const destroyBoardListMutationHandlerSuccess = jest
@@ -42,26 +33,12 @@ describe('BoardSettingsSidebar', () => {
const createComponent = ({
canAdminList = false,
list = {},
- sidebarType = LIST,
- activeId = inactiveId,
destroyBoardListMutationHandler = destroyBoardListMutationHandlerSuccess,
- isApolloBoard = false,
} = {}) => {
- const boardLists = {
- [listId]: list,
- };
- const store = new Vuex.Store({
- state: { sidebarType, activeId, boardLists },
- getters,
- mutations,
- actions,
- });
-
mockApollo = createMockApollo([[destroyBoardListMutation, destroyBoardListMutationHandler]]);
wrapper = extendedWrapper(
shallowMount(BoardSettingsSidebar, {
- store,
apolloProvider: mockApollo,
provide: {
canAdminList,
@@ -69,7 +46,6 @@ describe('BoardSettingsSidebar', () => {
isIssueBoard: true,
boardType: 'group',
issuableType: 'issue',
- isApolloBoard,
},
propsData: {
listId: list.id || '',
@@ -100,90 +76,50 @@ describe('BoardSettingsSidebar', () => {
cacheUpdates.setError = jest.fn();
});
- it('finds a MountingPortal component', () => {
- createComponent();
-
- expect(wrapper.findComponent(MountingPortal).props()).toMatchObject({
- mountTo: '#js-right-sidebar-portal',
- append: true,
- name: 'board-settings-sidebar',
- });
- });
-
- describe('when sidebarType is "list"', () => {
- it('finds a GlDrawer component', () => {
+ describe('default', () => {
+ beforeEach(() => {
createComponent();
+ });
+ it('renders a MountingPortal component', () => {
+ expect(wrapper.findComponent(MountingPortal).props()).toMatchObject({
+ mountTo: '#js-right-sidebar-portal',
+ append: true,
+ name: 'board-settings-sidebar',
+ });
+ });
+ it('renders a GlDrawer component', () => {
expect(findDrawer().exists()).toBe(true);
});
describe('on close', () => {
it('closes the sidebar', async () => {
- createComponent();
-
findDrawer().vm.$emit('close');
await nextTick();
expect(wrapper.findComponent(GlDrawer).props('open')).toBe(false);
});
-
- it('closes the sidebar when emitting the correct event', async () => {
- createComponent();
-
- sidebarEventHub.$emit('sidebar.closeAll');
-
- await nextTick();
-
- expect(wrapper.findComponent(GlDrawer).props('open')).toBe(false);
- });
});
- describe('when activeId is zero', () => {
+ describe('when there is no active list', () => {
it('renders GlDrawer with open false', () => {
createComponent();
expect(findDrawer().props('open')).toBe(false);
+ expect(findLabel().exists()).toBe(false);
});
});
- describe('when activeId is greater than zero', () => {
- it('renders GlDrawer with open true', () => {
- createComponent({ list: mockLabelList, activeId: listId });
+ describe('when there is an active list', () => {
+ it('renders GlDrawer with list title and label', () => {
+ createComponent({ list: mockLabelList });
expect(findDrawer().props('open')).toBe(true);
- });
- });
-
- describe('when activeId is in state', () => {
- it('renders label title', () => {
- createComponent({ list: mockLabelList, activeId: listId });
-
expect(findLabel().props('title')).toBe(labelTitle);
- });
-
- it('renders label background color', () => {
- createComponent({ list: mockLabelList, activeId: listId });
-
expect(findLabel().props('backgroundColor')).toBe(labelColor);
});
});
-
- describe('when activeId is not in state', () => {
- it('does not render GlLabel', () => {
- createComponent({ list: mockLabelList });
-
- expect(findLabel().exists()).toBe(false);
- });
- });
- });
-
- describe('when sidebarType is not List', () => {
- it('does not render GlDrawer', () => {
- createComponent({ sidebarType: '' });
-
- expect(findDrawer().props('open')).toBe(false);
- });
});
it('does not render "Remove list" when user cannot admin the boards list', () => {
@@ -193,20 +129,15 @@ describe('BoardSettingsSidebar', () => {
});
describe('when user can admin the boards list', () => {
- it('renders "Remove list" button', () => {
- createComponent({ canAdminList: true, activeId: listId, list: mockLabelList });
+ beforeEach(() => {
+ createComponent({ canAdminList: true, list: mockLabelList });
+ });
+ it('renders "Remove list" button', () => {
expect(findRemoveButton().exists()).toBe(true);
});
it('removes the list', () => {
- createComponent({
- canAdminList: true,
- activeId: listId,
- list: mockLabelList,
- isApolloBoard: true,
- });
-
findRemoveButton().vm.$emit('click');
wrapper.findComponent(GlModal).vm.$emit('primary');
@@ -215,23 +146,19 @@ describe('BoardSettingsSidebar', () => {
});
it('has the correct ID on the button', () => {
- createComponent({ canAdminList: true, activeId: listId, list: mockLabelList });
const binding = getBinding(findRemoveButton().element, 'gl-modal');
expect(binding.value).toBe(modalID);
});
it('has the correct ID on the modal', () => {
- createComponent({ canAdminList: true, activeId: listId, list: mockLabelList });
expect(findModal().props('modalId')).toBe(modalID);
});
it('sets error when destroy list mutation fails', async () => {
createComponent({
canAdminList: true,
- activeId: listId,
list: mockLabelList,
destroyBoardListMutationHandler: destroyBoardListMutationHandlerFailure,
- isApolloBoard: true,
});
findRemoveButton().vm.$emit('click');
diff --git a/spec/frontend/boards/components/board_top_bar_spec.js b/spec/frontend/boards/components/board_top_bar_spec.js
index 87abe630688..03526600114 100644
--- a/spec/frontend/boards/components/board_top_bar_spec.js
+++ b/spec/frontend/boards/components/board_top_bar_spec.js
@@ -1,8 +1,6 @@
import { shallowMount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
-// eslint-disable-next-line no-restricted-imports
-import Vuex from 'vuex';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
@@ -21,18 +19,11 @@ import projectBoardQuery from '~/boards/graphql/project_board.query.graphql';
import { mockProjectBoardResponse, mockGroupBoardResponse } from '../mock_data';
Vue.use(VueApollo);
-Vue.use(Vuex);
describe('BoardTopBar', () => {
let wrapper;
let mockApollo;
- const createStore = () => {
- return new Vuex.Store({
- state: {},
- });
- };
-
const projectBoardQueryHandlerSuccess = jest.fn().mockResolvedValue(mockProjectBoardResponse);
const groupBoardQueryHandlerSuccess = jest.fn().mockResolvedValue(mockGroupBoardResponse);
const errorMessage = 'Failed to fetch board';
@@ -43,14 +34,12 @@ describe('BoardTopBar', () => {
projectBoardQueryHandler = projectBoardQueryHandlerSuccess,
groupBoardQueryHandler = groupBoardQueryHandlerSuccess,
} = {}) => {
- const store = createStore();
mockApollo = createMockApollo([
[projectBoardQuery, projectBoardQueryHandler],
[groupBoardQuery, groupBoardQueryHandler],
]);
wrapper = shallowMount(BoardTopBar, {
- store,
apolloProvider: mockApollo,
propsData: {
boardId: 'gid://gitlab/Board/1',
@@ -67,7 +56,7 @@ describe('BoardTopBar', () => {
isIssueBoard: true,
isEpicBoard: false,
isGroupBoard: true,
- isApolloBoard: false,
+ // isApolloBoard: false,
...provide,
},
stubs: { IssueBoardFilteredSearch },
@@ -127,45 +116,41 @@ describe('BoardTopBar', () => {
});
});
- describe('Apollo boards', () => {
- it.each`
- boardType | queryHandler | notCalledHandler
- ${WORKSPACE_GROUP} | ${groupBoardQueryHandlerSuccess} | ${projectBoardQueryHandlerSuccess}
- ${WORKSPACE_PROJECT} | ${projectBoardQueryHandlerSuccess} | ${groupBoardQueryHandlerSuccess}
- `('fetches $boardType boards', async ({ boardType, queryHandler, notCalledHandler }) => {
- createComponent({
- provide: {
- boardType,
- isProjectBoard: boardType === WORKSPACE_PROJECT,
- isGroupBoard: boardType === WORKSPACE_GROUP,
- isApolloBoard: true,
- },
- });
-
- await nextTick();
-
- expect(queryHandler).toHaveBeenCalled();
- expect(notCalledHandler).not.toHaveBeenCalled();
+ it.each`
+ boardType | queryHandler | notCalledHandler
+ ${WORKSPACE_GROUP} | ${groupBoardQueryHandlerSuccess} | ${projectBoardQueryHandlerSuccess}
+ ${WORKSPACE_PROJECT} | ${projectBoardQueryHandlerSuccess} | ${groupBoardQueryHandlerSuccess}
+ `('fetches $boardType boards', async ({ boardType, queryHandler, notCalledHandler }) => {
+ createComponent({
+ provide: {
+ boardType,
+ isProjectBoard: boardType === WORKSPACE_PROJECT,
+ isGroupBoard: boardType === WORKSPACE_GROUP,
+ },
});
- it.each`
- boardType
- ${WORKSPACE_GROUP}
- ${WORKSPACE_PROJECT}
- `('sets error when $boardType board query fails', async ({ boardType }) => {
- createComponent({
- provide: {
- boardType,
- isProjectBoard: boardType === WORKSPACE_PROJECT,
- isGroupBoard: boardType === WORKSPACE_GROUP,
- isApolloBoard: true,
- },
- groupBoardQueryHandler: boardQueryHandlerFailure,
- projectBoardQueryHandler: boardQueryHandlerFailure,
- });
-
- await waitForPromises();
- expect(cacheUpdates.setError).toHaveBeenCalled();
+ await nextTick();
+
+ expect(queryHandler).toHaveBeenCalled();
+ expect(notCalledHandler).not.toHaveBeenCalled();
+ });
+
+ it.each`
+ boardType
+ ${WORKSPACE_GROUP}
+ ${WORKSPACE_PROJECT}
+ `('sets error when $boardType board query fails', async ({ boardType }) => {
+ createComponent({
+ provide: {
+ boardType,
+ isProjectBoard: boardType === WORKSPACE_PROJECT,
+ isGroupBoard: boardType === WORKSPACE_GROUP,
+ },
+ groupBoardQueryHandler: boardQueryHandlerFailure,
+ projectBoardQueryHandler: boardQueryHandlerFailure,
});
+
+ await waitForPromises();
+ expect(cacheUpdates.setError).toHaveBeenCalled();
});
});
diff --git a/spec/frontend/boards/components/boards_selector_spec.js b/spec/frontend/boards/components/boards_selector_spec.js
index 0a628af9939..8766b1c25f2 100644
--- a/spec/frontend/boards/components/boards_selector_spec.js
+++ b/spec/frontend/boards/components/boards_selector_spec.js
@@ -1,8 +1,6 @@
import { GlCollapsibleListbox } from '@gitlab/ui';
import Vue, { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
-// eslint-disable-next-line no-restricted-imports
-import Vuex from 'vuex';
import waitForPromises from 'helpers/wait_for_promises';
import { TEST_HOST } from 'spec/test_constants';
import BoardsSelector from '~/boards/components/boards_selector.vue';
@@ -29,23 +27,10 @@ import {
const throttleDuration = 1;
Vue.use(VueApollo);
-Vue.use(Vuex);
describe('BoardsSelector', () => {
let wrapper;
let fakeApollo;
- let store;
-
- const createStore = () => {
- store = new Vuex.Store({
- actions: {
- setBoardConfig: jest.fn(),
- },
- state: {
- board: mockBoard,
- },
- });
- };
const findDropdown = () => wrapper.findComponent(GlCollapsibleListbox);
@@ -91,10 +76,10 @@ describe('BoardsSelector', () => {
]);
wrapper = shallowMountExtended(BoardsSelector, {
- store,
apolloProvider: fakeApollo,
propsData: {
throttleDuration,
+ board: mockBoard,
...props,
},
attachTo: document.body,
@@ -109,7 +94,7 @@ describe('BoardsSelector', () => {
boardType: isGroupBoard ? 'group' : 'project',
isGroupBoard,
isProjectBoard,
- isApolloBoard: false,
+ // isApolloBoard: false,
...provide,
},
});
@@ -125,7 +110,6 @@ describe('BoardsSelector', () => {
describe('template', () => {
beforeEach(() => {
- createStore();
createComponent({ isProjectBoard: true });
});
@@ -137,9 +121,6 @@ describe('BoardsSelector', () => {
it('shows loading spinner', async () => {
createComponent({
- provide: {
- isApolloBoard: true,
- },
props: {
isCurrentBoardLoading: true,
},
@@ -243,7 +224,6 @@ describe('BoardsSelector', () => {
${WORKSPACE_GROUP} | ${groupBoardsQueryHandlerSuccess} | ${projectBoardsQueryHandlerSuccess}
${WORKSPACE_PROJECT} | ${projectBoardsQueryHandlerSuccess} | ${groupBoardsQueryHandlerSuccess}
`('fetches $boardType boards', async ({ boardType, queryHandler, notCalledHandler }) => {
- createStore();
createComponent({
isGroupBoard: boardType === WORKSPACE_GROUP,
isProjectBoard: boardType === WORKSPACE_PROJECT,
@@ -265,7 +245,6 @@ describe('BoardsSelector', () => {
${WORKSPACE_GROUP}
${WORKSPACE_PROJECT}
`('sets error when fetching $boardType boards fails', async ({ boardType }) => {
- createStore();
createComponent({
isGroupBoard: boardType === WORKSPACE_GROUP,
isProjectBoard: boardType === WORKSPACE_PROJECT,
@@ -287,7 +266,6 @@ describe('BoardsSelector', () => {
describe('dropdown visibility', () => {
describe('when multipleIssueBoardsAvailable is enabled', () => {
it('show dropdown', () => {
- createStore();
createComponent({ provide: { multipleIssueBoardsAvailable: true } });
expect(findDropdown().exists()).toBe(true);
expect(findDropdown().props('toggleText')).toBe('Select board');
@@ -296,7 +274,6 @@ describe('BoardsSelector', () => {
describe('when multipleIssueBoardsAvailable is disabled but it hasMissingBoards', () => {
it('show dropdown', () => {
- createStore();
createComponent({
provide: { multipleIssueBoardsAvailable: false, hasMissingBoards: true },
});
@@ -307,7 +284,6 @@ describe('BoardsSelector', () => {
describe("when multipleIssueBoardsAvailable is disabled and it dosn't hasMissingBoards", () => {
it('hide dropdown', () => {
- createStore();
createComponent({
provide: { multipleIssueBoardsAvailable: false, hasMissingBoards: false },
});
@@ -320,7 +296,6 @@ describe('BoardsSelector', () => {
it('displays loading state of dropdown while current board is being fetched', () => {
createComponent({
props: { isCurrentBoardLoading: true },
- provide: { isApolloBoard: true },
});
expect(findDropdown().props('loading')).toBe(true);
expect(findDropdown().props('toggleText')).toBe('Select board');
diff --git a/spec/frontend/boards/components/issue_board_filtered_search_spec.js b/spec/frontend/boards/components/issue_board_filtered_search_spec.js
index 1edb6812af0..39cdde295aa 100644
--- a/spec/frontend/boards/components/issue_board_filtered_search_spec.js
+++ b/spec/frontend/boards/components/issue_board_filtered_search_spec.js
@@ -23,6 +23,9 @@ describe('IssueBoardFilter', () => {
fullPath: 'gitlab-org',
isGroupBoard: true,
},
+ mocks: {
+ $apollo: {},
+ },
});
};
diff --git a/spec/frontend/boards/components/sidebar/board_sidebar_title_spec.js b/spec/frontend/boards/components/sidebar/board_sidebar_title_spec.js
index f354067e226..77b557e7ccd 100644
--- a/spec/frontend/boards/components/sidebar/board_sidebar_title_spec.js
+++ b/spec/frontend/boards/components/sidebar/board_sidebar_title_spec.js
@@ -6,7 +6,6 @@ import waitForPromises from 'helpers/wait_for_promises';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import BoardEditableItem from '~/boards/components/sidebar/board_editable_item.vue';
import BoardSidebarTitle from '~/boards/components/sidebar/board_sidebar_title.vue';
-import { createStore } from '~/boards/stores';
import issueSetTitleMutation from '~/boards/graphql/issue_set_title.mutation.graphql';
import * as cacheUpdates from '~/boards/graphql/cache_updates';
import updateEpicTitleMutation from '~/sidebar/queries/update_epic_title.mutation.graphql';
@@ -32,11 +31,10 @@ const TEST_ISSUE_B = {
describe('BoardSidebarTitle', () => {
let wrapper;
- let store;
- let storeDispatch;
let mockApollo;
const issueSetTitleMutationHandlerSuccess = jest.fn().mockResolvedValue(updateIssueTitleResponse);
+ const issueSetTitleMutationHandlerFailure = jest.fn().mockRejectedValue(new Error('error'));
const updateEpicTitleMutationHandlerSuccess = jest
.fn()
.mockResolvedValue(updateEpicTitleResponse);
@@ -47,28 +45,25 @@ describe('BoardSidebarTitle', () => {
afterEach(() => {
localStorage.clear();
- store = null;
});
- const createWrapper = ({ item = TEST_ISSUE_A, provide = {} } = {}) => {
- store = createStore();
- store.state.boardItems = { [item.id]: { ...item } };
- store.dispatch('setActiveId', { id: item.id });
+ const createWrapper = ({
+ item = TEST_ISSUE_A,
+ provide = {},
+ issueSetTitleMutationHandler = issueSetTitleMutationHandlerSuccess,
+ } = {}) => {
mockApollo = createMockApollo([
- [issueSetTitleMutation, issueSetTitleMutationHandlerSuccess],
+ [issueSetTitleMutation, issueSetTitleMutationHandler],
[updateEpicTitleMutation, updateEpicTitleMutationHandlerSuccess],
]);
- storeDispatch = jest.spyOn(store, 'dispatch');
wrapper = shallowMountExtended(BoardSidebarTitle, {
- store,
apolloProvider: mockApollo,
provide: {
canUpdate: true,
fullPath: 'gitlab-org',
issuableType: 'issue',
isEpicBoard: false,
- isApolloBoard: false,
...provide,
},
propsData: {
@@ -122,13 +117,6 @@ describe('BoardSidebarTitle', () => {
expect(findCollapsed().isVisible()).toBe(true);
});
- it('commits change to the server', () => {
- expect(storeDispatch).toHaveBeenCalledWith('setActiveItemTitle', {
- projectPath: 'h/b',
- title: 'New item title',
- });
- });
-
it('renders correct title', async () => {
createWrapper({ item: { ...TEST_ISSUE_A, title: TEST_TITLE } });
await waitForPromises();
@@ -137,6 +125,31 @@ describe('BoardSidebarTitle', () => {
});
});
+ it.each`
+ issuableType | isEpicBoard | queryHandler | notCalledHandler
+ ${'issue'} | ${false} | ${issueSetTitleMutationHandlerSuccess} | ${updateEpicTitleMutationHandlerSuccess}
+ ${'epic'} | ${true} | ${updateEpicTitleMutationHandlerSuccess} | ${issueSetTitleMutationHandlerSuccess}
+ `(
+ 'updates $issuableType title',
+ async ({ issuableType, isEpicBoard, queryHandler, notCalledHandler }) => {
+ createWrapper({
+ provide: {
+ issuableType,
+ isEpicBoard,
+ },
+ });
+
+ await nextTick();
+
+ findFormInput().vm.$emit('input', TEST_TITLE);
+ findForm().vm.$emit('submit', { preventDefault: () => {} });
+ await nextTick();
+
+ expect(queryHandler).toHaveBeenCalled();
+ expect(notCalledHandler).not.toHaveBeenCalled();
+ },
+ );
+
describe('when submitting and invalid title', () => {
beforeEach(async () => {
createWrapper();
@@ -146,8 +159,8 @@ describe('BoardSidebarTitle', () => {
await nextTick();
});
- it('commits change to the server', () => {
- expect(storeDispatch).not.toHaveBeenCalled();
+ it('does not update title', () => {
+ expect(issueSetTitleMutationHandlerSuccess).not.toHaveBeenCalled();
});
});
@@ -194,7 +207,7 @@ describe('BoardSidebarTitle', () => {
});
it('collapses sidebar and render former title', () => {
- expect(storeDispatch).not.toHaveBeenCalled();
+ expect(issueSetTitleMutationHandlerSuccess).not.toHaveBeenCalled();
expect(findCollapsed().isVisible()).toBe(true);
expect(findTitle().text()).toBe(TEST_ISSUE_B.title);
});
@@ -202,47 +215,23 @@ describe('BoardSidebarTitle', () => {
describe('when the mutation fails', () => {
beforeEach(async () => {
- createWrapper({ item: TEST_ISSUE_B });
+ createWrapper({
+ item: TEST_ISSUE_B,
+ issueSetTitleMutationHandler: issueSetTitleMutationHandlerFailure,
+ });
findFormInput().vm.$emit('input', 'Invalid title');
findForm().vm.$emit('submit', { preventDefault: () => {} });
await nextTick();
});
- it('collapses sidebar and renders former item title', () => {
+ it('collapses sidebar and renders former item title', async () => {
expect(findCollapsed().isVisible()).toBe(true);
expect(findTitle().text()).toContain(TEST_ISSUE_B.title);
+ await waitForPromises();
expect(cacheUpdates.setError).toHaveBeenCalledWith(
expect.objectContaining({ message: 'An error occurred when updating the title' }),
);
});
});
-
- describe('Apollo boards', () => {
- it.each`
- issuableType | isEpicBoard | queryHandler | notCalledHandler
- ${'issue'} | ${false} | ${issueSetTitleMutationHandlerSuccess} | ${updateEpicTitleMutationHandlerSuccess}
- ${'epic'} | ${true} | ${updateEpicTitleMutationHandlerSuccess} | ${issueSetTitleMutationHandlerSuccess}
- `(
- 'updates $issuableType title',
- async ({ issuableType, isEpicBoard, queryHandler, notCalledHandler }) => {
- createWrapper({
- provide: {
- issuableType,
- isEpicBoard,
- isApolloBoard: true,
- },
- });
-
- await nextTick();
-
- findFormInput().vm.$emit('input', TEST_TITLE);
- findForm().vm.$emit('submit', { preventDefault: () => {} });
- await nextTick();
-
- expect(queryHandler).toHaveBeenCalled();
- expect(notCalledHandler).not.toHaveBeenCalled();
- },
- );
- });
});
diff --git a/spec/frontend/boards/mock_data.js b/spec/frontend/boards/mock_data.js
index 0be17db9450..3a5e108ac07 100644
--- a/spec/frontend/boards/mock_data.js
+++ b/spec/frontend/boards/mock_data.js
@@ -275,6 +275,7 @@ export const labels = [
];
export const mockIssueFullPath = 'gitlab-org/test-subgroup/gitlab-test';
+export const mockIssueDirectNamespace = 'gitlab-test';
export const mockEpicFullPath = 'gitlab-org/test-subgroup';
export const rawIssue = {
@@ -331,15 +332,17 @@ export const mockIssue = {
confidential: false,
referencePath: `${mockIssueFullPath}#27`,
path: `/${mockIssueFullPath}/-/issues/27`,
- assignees,
- labels: [
- {
- id: 1,
- title: 'test',
- color: '#F0AD4E',
- description: 'testing',
- },
- ],
+ assignees: { nodes: assignees },
+ labels: {
+ nodes: [
+ {
+ id: 1,
+ title: 'test',
+ color: '#F0AD4E',
+ description: 'testing',
+ },
+ ],
+ },
epic: {
id: 'gid://gitlab/Epic/41',
},
@@ -411,6 +414,7 @@ export const mockActiveIssue = {
};
export const mockIssue2 = {
+ ...rawIssue,
id: 'gid://gitlab/Issue/437',
iid: 28,
title: 'Issue 2',
@@ -420,14 +424,13 @@ export const mockIssue2 = {
confidential: false,
referencePath: 'gitlab-org/test-subgroup/gitlab-test#28',
path: '/gitlab-org/test-subgroup/gitlab-test/-/issues/28',
- assignees,
- labels,
epic: {
id: 'gid://gitlab/Epic/40',
},
};
export const mockIssue3 = {
+ ...rawIssue,
id: 'gid://gitlab/Issue/438',
iid: 29,
title: 'Issue 3',
@@ -436,12 +439,11 @@ export const mockIssue3 = {
timeEstimate: 0,
confidential: false,
path: '/gitlab-org/gitlab-test/-/issues/28',
- assignees,
- labels,
epic: null,
};
export const mockIssue4 = {
+ ...rawIssue,
id: 'gid://gitlab/Issue/439',
iid: 30,
title: 'Issue 4',
@@ -450,12 +452,11 @@ export const mockIssue4 = {
timeEstimate: 0,
confidential: false,
path: '/gitlab-org/gitlab-test/-/issues/28',
- assignees,
- labels,
epic: null,
};
export const mockIssue5 = {
+ ...rawIssue,
id: 'gid://gitlab/Issue/440',
iid: 40,
title: 'Issue 5',
@@ -464,12 +465,11 @@ export const mockIssue5 = {
timeEstimate: 0,
confidential: false,
path: '/gitlab-org/gitlab-test/-/issues/40',
- assignees,
- labels,
epic: null,
};
export const mockIssue6 = {
+ ...rawIssue,
id: 'gid://gitlab/Issue/441',
iid: 41,
title: 'Issue 6',
@@ -478,12 +478,11 @@ export const mockIssue6 = {
timeEstimate: 0,
confidential: false,
path: '/gitlab-org/gitlab-test/-/issues/41',
- assignees,
- labels,
epic: null,
};
export const mockIssue7 = {
+ ...rawIssue,
id: 'gid://gitlab/Issue/442',
iid: 42,
title: 'Issue 6',
@@ -492,8 +491,6 @@ export const mockIssue7 = {
timeEstimate: 0,
confidential: false,
path: '/gitlab-org/gitlab-test/-/issues/42',
- assignees,
- labels,
epic: null,
};
@@ -1085,4 +1082,36 @@ export const mockGroupProjectsResponse = (projects = mockProjects) => ({
},
});
+export const mockGroupIssuesResponse = (
+ listId = 'gid://gitlab/List/1',
+ rawIssues = [rawIssue],
+) => ({
+ data: {
+ group: {
+ id: 'gid://gitlab/Group/1',
+ board: {
+ __typename: 'Board',
+ id: 'gid://gitlab/Board/1',
+ lists: {
+ nodes: [
+ {
+ id: listId,
+ listType: 'backlog',
+ issues: {
+ nodes: rawIssues,
+ pageInfo: {
+ endCursor: null,
+ hasNextPage: true,
+ },
+ },
+ __typename: 'BoardList',
+ },
+ ],
+ },
+ },
+ __typename: 'Group',
+ },
+ },
+});
+
export const DEFAULT_COLOR = '#1068bf';
diff --git a/spec/frontend/boards/stores/actions_spec.js b/spec/frontend/boards/stores/actions_spec.js
index 358cb340802..616bb083211 100644
--- a/spec/frontend/boards/stores/actions_spec.js
+++ b/spec/frontend/boards/stores/actions_spec.js
@@ -165,7 +165,7 @@ describe('setFilters', () => {
issuableType: TYPE_ISSUE,
};
- testAction(
+ return testAction(
actions.setFilters,
filters,
state,
@@ -441,7 +441,7 @@ describe('fetchMilestones', () => {
describe('createList', () => {
it('should dispatch createIssueList action', () => {
- testAction({
+ return testAction({
action: actions.createList,
payload: { backlog: true },
expectedActions: [{ type: 'createIssueList', payload: { backlog: true } }],
@@ -560,7 +560,7 @@ describe('addList', () => {
};
it('should commit RECEIVE_ADD_LIST_SUCCESS mutation and dispatch fetchItemsForList action', () => {
- testAction({
+ return testAction({
action: actions.addList,
payload: mockLists[1],
state: { ...getters },
@@ -1007,7 +1007,7 @@ describe('moveItem', () => {
it('should dispatch moveIssue action with payload', () => {
const payload = { mock: 'payload' };
- testAction({
+ return testAction({
action: actions.moveItem,
payload,
expectedActions: [{ type: 'moveIssue', payload }],
@@ -1017,7 +1017,7 @@ describe('moveItem', () => {
describe('moveIssue', () => {
it('should dispatch a correct set of actions', () => {
- testAction({
+ return testAction({
action: actions.moveIssue,
payload: mockMoveIssueParams,
state: mockMoveState,
@@ -1092,7 +1092,7 @@ describe('moveIssueCard and undoMoveIssueCard', () => {
});
it('moveIssueCard commits a correct set of actions', () => {
- testAction({
+ return testAction({
action: actions.moveIssueCard,
state,
payload: getMoveData(state, params),
@@ -1101,7 +1101,7 @@ describe('moveIssueCard and undoMoveIssueCard', () => {
});
it('undoMoveIssueCard commits a correct set of actions', () => {
- testAction({
+ return testAction({
action: actions.undoMoveIssueCard,
state,
payload: getMoveData(state, params),
@@ -1169,7 +1169,7 @@ describe('moveIssueCard and undoMoveIssueCard', () => {
});
it('moveIssueCard commits a correct set of actions', () => {
- testAction({
+ return testAction({
action: actions.moveIssueCard,
state,
payload: getMoveData(state, params),
@@ -1178,7 +1178,7 @@ describe('moveIssueCard and undoMoveIssueCard', () => {
});
it('undoMoveIssueCard commits a correct set of actions', () => {
- testAction({
+ return testAction({
action: actions.undoMoveIssueCard,
state,
payload: getMoveData(state, params),
@@ -1244,7 +1244,7 @@ describe('moveIssueCard and undoMoveIssueCard', () => {
});
it('moveIssueCard commits a correct set of actions', () => {
- testAction({
+ return testAction({
action: actions.moveIssueCard,
state,
payload: getMoveData(state, params),
@@ -1253,7 +1253,7 @@ describe('moveIssueCard and undoMoveIssueCard', () => {
});
it('undoMoveIssueCard commits a correct set of actions', () => {
- testAction({
+ return testAction({
action: actions.undoMoveIssueCard,
state,
payload: getMoveData(state, params),
@@ -1298,7 +1298,7 @@ describe('updateMovedIssueCard', () => {
])(
'should commit UPDATE_BOARD_ITEM with a correctly updated issue data when %s',
(_, { state, moveData, updatedIssue }) => {
- testAction({
+ return testAction({
action: actions.updateMovedIssue,
payload: moveData,
state,
@@ -1363,7 +1363,7 @@ describe('updateIssueOrder', () => {
},
});
- testAction(
+ return testAction(
actions.updateIssueOrder,
{ moveData },
state,
@@ -1395,7 +1395,7 @@ describe('updateIssueOrder', () => {
},
});
- testAction(
+ return testAction(
actions.updateIssueOrder,
{ moveData },
state,
@@ -1448,7 +1448,7 @@ describe('addListItem', () => {
inProgress: true,
};
- testAction(
+ return testAction(
actions.addListItem,
payload,
{},
@@ -1475,7 +1475,7 @@ describe('addListItem', () => {
position: 0,
};
- testAction(
+ return testAction(
actions.addListItem,
payload,
{},
@@ -1503,7 +1503,7 @@ describe('removeListItem', () => {
itemId: mockIssue.id,
};
- testAction(actions.removeListItem, payload, {}, [
+ return testAction(actions.removeListItem, payload, {}, [
{ type: types.REMOVE_BOARD_ITEM_FROM_LIST, payload },
{ type: types.REMOVE_BOARD_ITEM, payload: mockIssue.id },
]);
@@ -1608,7 +1608,7 @@ describe('addListNewIssue', () => {
},
});
- testAction({
+ return testAction({
action: actions.addListNewIssue,
payload: {
issueInput: mockIssue,
@@ -1651,7 +1651,7 @@ describe('addListNewIssue', () => {
},
});
- testAction({
+ return testAction({
action: actions.addListNewIssue,
payload: {
issueInput: mockIssue,
@@ -1700,7 +1700,7 @@ describe('setActiveIssueLabels', () => {
value: labels,
};
- testAction(
+ return testAction(
actions.setActiveIssueLabels,
input,
{ ...state, ...getters },
@@ -1721,7 +1721,7 @@ describe('setActiveIssueLabels', () => {
value: [labels[1]],
};
- testAction(
+ return testAction(
actions.setActiveIssueLabels,
{ ...input, removeLabelIds: [getIdFromGraphQLId(labels[0].id)] },
{ ...state, ...getters },
@@ -1962,7 +1962,7 @@ describe('toggleBoardItemMultiSelection', () => {
const boardItem2 = mockIssue2;
it('should commit mutation ADD_BOARD_ITEM_TO_SELECTION if item is not on selection state', () => {
- testAction(
+ return testAction(
actions.toggleBoardItemMultiSelection,
boardItem,
{ selectedBoardItems: [] },
@@ -1977,7 +1977,7 @@ describe('toggleBoardItemMultiSelection', () => {
});
it('should commit mutation REMOVE_BOARD_ITEM_FROM_SELECTION if item is on selection state', () => {
- testAction(
+ return testAction(
actions.toggleBoardItemMultiSelection,
boardItem,
{ selectedBoardItems: [mockIssue] },
@@ -1992,7 +1992,7 @@ describe('toggleBoardItemMultiSelection', () => {
});
it('should additionally commit mutation ADD_BOARD_ITEM_TO_SELECTION for active issue and dispatch unsetActiveId', () => {
- testAction(
+ return testAction(
actions.toggleBoardItemMultiSelection,
boardItem2,
{ activeId: mockActiveIssue.id, activeBoardItem: mockActiveIssue, selectedBoardItems: [] },
@@ -2013,7 +2013,7 @@ describe('toggleBoardItemMultiSelection', () => {
describe('resetBoardItemMultiSelection', () => {
it('should commit mutation RESET_BOARD_ITEM_SELECTION', () => {
- testAction({
+ return testAction({
action: actions.resetBoardItemMultiSelection,
state: { selectedBoardItems: [mockIssue] },
expectedMutations: [
@@ -2027,7 +2027,7 @@ describe('resetBoardItemMultiSelection', () => {
describe('toggleBoardItem', () => {
it('should dispatch resetBoardItemMultiSelection and unsetActiveId when boardItem is the active item', () => {
- testAction({
+ return testAction({
action: actions.toggleBoardItem,
payload: { boardItem: mockIssue },
state: {
@@ -2038,7 +2038,7 @@ describe('toggleBoardItem', () => {
});
it('should dispatch resetBoardItemMultiSelection and setActiveId when boardItem is not the active item', () => {
- testAction({
+ return testAction({
action: actions.toggleBoardItem,
payload: { boardItem: mockIssue },
state: {
@@ -2054,7 +2054,7 @@ describe('toggleBoardItem', () => {
describe('setError', () => {
it('should commit mutation SET_ERROR', () => {
- testAction({
+ return testAction({
action: actions.setError,
payload: { message: 'mayday' },
expectedMutations: [
@@ -2085,7 +2085,7 @@ describe('setError', () => {
describe('unsetError', () => {
it('should commit mutation SET_ERROR with undefined as payload', () => {
- testAction({
+ return testAction({
action: actions.unsetError,
expectedMutations: [
{
diff --git a/spec/frontend/boards/stores/mutations_spec.js b/spec/frontend/boards/stores/mutations_spec.js
deleted file mode 100644
index 2d68c070b83..00000000000
--- a/spec/frontend/boards/stores/mutations_spec.js
+++ /dev/null
@@ -1,672 +0,0 @@
-import { cloneDeep } from 'lodash';
-import * as types from '~/boards/stores/mutation_types';
-import mutations from '~/boards/stores/mutations';
-import defaultState from '~/boards/stores/state';
-import { TYPE_ISSUE } from '~/issues/constants';
-import {
- mockBoard,
- mockLists,
- rawIssue,
- mockIssue,
- mockIssue2,
- mockGroupProjects,
- labels,
- mockList,
-} from '../mock_data';
-
-describe('Board Store Mutations', () => {
- let state;
-
- const initialBoardListsState = {
- 'gid://gitlab/List/1': mockLists[0],
- 'gid://gitlab/List/2': mockLists[1],
- };
-
- const setBoardsListsState = () => {
- state = cloneDeep({
- ...state,
- boardItemsByListId: { 'gid://gitlab/List/1': [mockIssue.id] },
- boardLists: { 'gid://gitlab/List/1': mockList },
- });
- };
-
- beforeEach(() => {
- state = defaultState();
- });
-
- describe('REQUEST_CURRENT_BOARD', () => {
- it('Should set isBoardLoading state to true', () => {
- mutations[types.REQUEST_CURRENT_BOARD](state);
-
- expect(state.isBoardLoading).toBe(true);
- });
- });
-
- describe('RECEIVE_BOARD_SUCCESS', () => {
- it('Should set board to state', () => {
- mutations[types.RECEIVE_BOARD_SUCCESS](state, mockBoard);
-
- expect(state.board).toEqual({
- ...mockBoard,
- labels: mockBoard.labels.nodes,
- });
- });
- });
-
- describe('RECEIVE_BOARD_FAILURE', () => {
- it('Should set error in state', () => {
- mutations[types.RECEIVE_BOARD_FAILURE](state);
-
- expect(state.error).toEqual(
- 'An error occurred while fetching the board. Please reload the page.',
- );
- });
- });
-
- describe('SET_INITIAL_BOARD_DATA', () => {
- it('Should set initial Boards data to state', () => {
- const allowSubEpics = true;
- const boardId = 1;
- const fullPath = 'gitlab-org';
- const boardType = 'group';
- const disabled = false;
- const issuableType = TYPE_ISSUE;
-
- mutations[types.SET_INITIAL_BOARD_DATA](state, {
- allowSubEpics,
- boardId,
- fullPath,
- boardType,
- disabled,
- issuableType,
- });
-
- expect(state.allowSubEpics).toBe(allowSubEpics);
- expect(state.boardId).toEqual(boardId);
- expect(state.fullPath).toEqual(fullPath);
- expect(state.boardType).toEqual(boardType);
- expect(state.disabled).toEqual(disabled);
- expect(state.issuableType).toEqual(issuableType);
- });
- });
-
- describe('SET_BOARD_CONFIG', () => {
- it('Should set board config data o state', () => {
- const boardConfig = {
- milestoneId: 1,
- milestoneTitle: 'Milestone 1',
- };
-
- mutations[types.SET_BOARD_CONFIG](state, boardConfig);
-
- expect(state.boardConfig).toEqual(boardConfig);
- });
- });
-
- describe('RECEIVE_BOARD_LISTS_SUCCESS', () => {
- it('Should set boardLists to state', () => {
- mutations[types.RECEIVE_BOARD_LISTS_SUCCESS](state, initialBoardListsState);
-
- expect(state.boardLists).toEqual(initialBoardListsState);
- });
- });
-
- describe('RECEIVE_BOARD_LISTS_FAILURE', () => {
- it('Should set error in state', () => {
- mutations[types.RECEIVE_BOARD_LISTS_FAILURE](state);
-
- expect(state.error).toEqual(
- 'An error occurred while fetching the board lists. Please reload the page.',
- );
- });
- });
-
- describe('SET_ACTIVE_ID', () => {
- const expected = { id: 1, sidebarType: '' };
-
- beforeEach(() => {
- mutations.SET_ACTIVE_ID(state, expected);
- });
-
- it('updates activeListId to be the value that is passed', () => {
- expect(state.activeId).toBe(expected.id);
- });
-
- it('updates sidebarType to be the value that is passed', () => {
- expect(state.sidebarType).toBe(expected.sidebarType);
- });
- });
-
- describe('SET_FILTERS', () => {
- it('updates filterParams to be the value that is passed', () => {
- const filterParams = { labelName: 'label' };
-
- mutations.SET_FILTERS(state, filterParams);
-
- expect(state.filterParams).toBe(filterParams);
- });
- });
-
- describe('CREATE_LIST_FAILURE', () => {
- it('sets error message', () => {
- mutations.CREATE_LIST_FAILURE(state);
-
- expect(state.error).toEqual('An error occurred while creating the list. Please try again.');
- });
- });
-
- describe('RECEIVE_LABELS_REQUEST', () => {
- it('sets labelsLoading on state', () => {
- mutations.RECEIVE_LABELS_REQUEST(state);
-
- expect(state.labelsLoading).toEqual(true);
- });
- });
-
- describe('RECEIVE_LABELS_SUCCESS', () => {
- it('sets labels on state', () => {
- mutations.RECEIVE_LABELS_SUCCESS(state, labels);
-
- expect(state.labels).toEqual(labels);
- expect(state.labelsLoading).toEqual(false);
- });
- });
-
- describe('RECEIVE_LABELS_FAILURE', () => {
- it('sets error message', () => {
- mutations.RECEIVE_LABELS_FAILURE(state);
-
- expect(state.error).toEqual(
- 'An error occurred while fetching labels. Please reload the page.',
- );
- expect(state.labelsLoading).toEqual(false);
- });
- });
-
- describe('GENERATE_DEFAULT_LISTS_FAILURE', () => {
- it('sets error message', () => {
- mutations.GENERATE_DEFAULT_LISTS_FAILURE(state);
-
- expect(state.error).toEqual(
- 'An error occurred while generating lists. Please reload the page.',
- );
- });
- });
-
- describe('RECEIVE_ADD_LIST_SUCCESS', () => {
- it('adds list to boardLists state', () => {
- mutations.RECEIVE_ADD_LIST_SUCCESS(state, mockLists[0]);
-
- expect(state.boardLists).toEqual({
- [mockLists[0].id]: mockLists[0],
- });
- });
- });
-
- describe('MOVE_LISTS', () => {
- it('updates the positions of board lists', () => {
- state = {
- ...state,
- boardLists: initialBoardListsState,
- };
-
- mutations.MOVE_LISTS(state, [
- {
- listId: mockLists[0].id,
- position: 1,
- },
- {
- listId: mockLists[1].id,
- position: 0,
- },
- ]);
-
- expect(state.boardLists[mockLists[0].id].position).toBe(1);
- expect(state.boardLists[mockLists[1].id].position).toBe(0);
- });
- });
-
- describe('TOGGLE_LIST_COLLAPSED', () => {
- it('updates collapsed attribute of list in boardLists state', () => {
- const listId = 'gid://gitlab/List/1';
- state = {
- ...state,
- boardLists: {
- [listId]: mockLists[0],
- },
- };
-
- expect(state.boardLists[listId].collapsed).toEqual(false);
-
- mutations.TOGGLE_LIST_COLLAPSED(state, { listId, collapsed: true });
-
- expect(state.boardLists[listId].collapsed).toEqual(true);
- });
- });
-
- describe('REMOVE_LIST', () => {
- it('removes list from boardLists', () => {
- const [list, secondList] = mockLists;
- const expected = {
- [secondList.id]: secondList,
- };
- state = {
- ...state,
- boardLists: { ...initialBoardListsState },
- };
-
- mutations[types.REMOVE_LIST](state, list.id);
-
- expect(state.boardLists).toEqual(expected);
- });
- });
-
- describe('REMOVE_LIST_FAILURE', () => {
- it('restores lists from backup', () => {
- const backupLists = { ...initialBoardListsState };
-
- mutations[types.REMOVE_LIST_FAILURE](state, backupLists);
-
- expect(state.boardLists).toEqual(backupLists);
- });
-
- it('sets error state', () => {
- const backupLists = { ...initialBoardListsState };
- state = {
- ...state,
- error: undefined,
- };
-
- mutations[types.REMOVE_LIST_FAILURE](state, backupLists);
-
- expect(state.error).toEqual('An error occurred while removing the list. Please try again.');
- });
- });
-
- describe('RESET_ISSUES', () => {
- it('should remove issues from boardItemsByListId state', () => {
- const boardItemsByListId = {
- 'gid://gitlab/List/1': [mockIssue.id],
- };
-
- state = {
- ...state,
- boardItemsByListId,
- };
-
- mutations[types.RESET_ISSUES](state);
-
- expect(state.boardItemsByListId).toEqual({ 'gid://gitlab/List/1': [] });
- });
- });
-
- describe('REQUEST_ITEMS_FOR_LIST', () => {
- const listId = 'gid://gitlab/List/1';
- const boardItemsByListId = {
- [listId]: [mockIssue.id],
- };
-
- it.each`
- fetchNext | isLoading | isLoadingMore
- ${true} | ${undefined} | ${true}
- ${false} | ${true} | ${undefined}
- `(
- 'sets isLoading to $isLoading and isLoadingMore to $isLoadingMore when fetchNext is $fetchNext',
- ({ fetchNext, isLoading, isLoadingMore }) => {
- state = {
- ...state,
- boardItemsByListId,
- listsFlags: {
- [listId]: {},
- },
- };
-
- mutations[types.REQUEST_ITEMS_FOR_LIST](state, { listId, fetchNext });
-
- expect(state.listsFlags[listId].isLoading).toBe(isLoading);
- expect(state.listsFlags[listId].isLoadingMore).toBe(isLoadingMore);
- },
- );
- });
-
- describe('RECEIVE_ITEMS_FOR_LIST_SUCCESS', () => {
- it('updates boardItemsByListId and issues on state', () => {
- const listIssues = {
- 'gid://gitlab/List/1': [mockIssue.id],
- };
- const issues = {
- 1: mockIssue,
- };
-
- state = {
- ...state,
- boardItemsByListId: {
- 'gid://gitlab/List/1': [],
- },
- boardItems: {},
- boardLists: initialBoardListsState,
- };
-
- const listPageInfo = {
- 'gid://gitlab/List/1': {
- endCursor: '',
- hasNextPage: false,
- },
- };
-
- mutations.RECEIVE_ITEMS_FOR_LIST_SUCCESS(state, {
- listItems: { listData: listIssues, boardItems: issues },
- listPageInfo,
- listId: 'gid://gitlab/List/1',
- });
-
- expect(state.boardItemsByListId).toEqual(listIssues);
- expect(state.boardItems).toEqual(issues);
- });
- });
-
- describe('RECEIVE_ITEMS_FOR_LIST_FAILURE', () => {
- it('sets error message', () => {
- state = {
- ...state,
- boardLists: initialBoardListsState,
- error: undefined,
- };
-
- const listId = 'gid://gitlab/List/1';
-
- mutations.RECEIVE_ITEMS_FOR_LIST_FAILURE(state, listId);
-
- expect(state.error).toEqual(
- 'An error occurred while fetching the board issues. Please reload the page.',
- );
- });
- });
-
- describe('UPDATE_BOARD_ITEM_BY_ID', () => {
- const issueId = '1';
- const prop = 'id';
- const value = '2';
- const issue = { [issueId]: { id: 1, title: 'Issue' } };
-
- beforeEach(() => {
- state = {
- ...state,
- error: undefined,
- boardItems: {
- ...issue,
- },
- };
- });
-
- describe('when the issue is in state', () => {
- it('updates the property of the correct issue', () => {
- mutations.UPDATE_BOARD_ITEM_BY_ID(state, {
- itemId: issueId,
- prop,
- value,
- });
-
- expect(state.boardItems[issueId]).toEqual({ ...issue[issueId], id: '2' });
- });
- });
-
- describe('when the issue is not in state', () => {
- it('throws an error', () => {
- expect(() => {
- mutations.UPDATE_BOARD_ITEM_BY_ID(state, {
- itemId: '3',
- prop,
- value,
- });
- }).toThrow(new Error('No issue found.'));
- });
- });
- });
-
- describe('MUTATE_ISSUE_SUCCESS', () => {
- it('updates issue in issues state', () => {
- const issues = {
- [rawIssue.id]: { id: rawIssue.id },
- };
-
- state = {
- ...state,
- boardItems: issues,
- };
-
- mutations.MUTATE_ISSUE_SUCCESS(state, {
- issue: rawIssue,
- });
-
- expect(state.boardItems).toEqual({ [mockIssue.id]: mockIssue });
- });
- });
-
- describe('UPDATE_BOARD_ITEM', () => {
- it('updates the given issue in state.boardItems', () => {
- const updatedIssue = { id: 'some_gid', foo: 'bar' };
- state = { boardItems: { some_gid: { id: 'some_gid' } } };
-
- mutations.UPDATE_BOARD_ITEM(state, updatedIssue);
-
- expect(state.boardItems.some_gid).toEqual(updatedIssue);
- });
- });
-
- describe('REMOVE_BOARD_ITEM', () => {
- it('removes the given issue from state.boardItems', () => {
- state = { boardItems: { some_gid: {}, some_gid2: {} } };
-
- mutations.REMOVE_BOARD_ITEM(state, 'some_gid');
-
- expect(state.boardItems).toEqual({ some_gid2: {} });
- });
- });
-
- describe('ADD_BOARD_ITEM_TO_LIST', () => {
- beforeEach(() => {
- setBoardsListsState();
- });
-
- it.each([
- [
- 'at position 0 by default',
- {
- payload: {
- itemId: mockIssue2.id,
- listId: mockList.id,
- },
- listState: [mockIssue2.id, mockIssue.id],
- },
- ],
- [
- 'at a given position',
- {
- payload: {
- itemId: mockIssue2.id,
- listId: mockList.id,
- atIndex: 1,
- },
- listState: [mockIssue.id, mockIssue2.id],
- },
- ],
- [
- "below the issue with id of 'moveBeforeId'",
- {
- payload: {
- itemId: mockIssue2.id,
- listId: mockList.id,
- moveBeforeId: mockIssue.id,
- },
- listState: [mockIssue.id, mockIssue2.id],
- },
- ],
- [
- "above the issue with id of 'moveAfterId'",
- {
- payload: {
- itemId: mockIssue2.id,
- listId: mockList.id,
- moveAfterId: mockIssue.id,
- },
- listState: [mockIssue2.id, mockIssue.id],
- },
- ],
- [
- 'to the top of the list',
- {
- payload: {
- itemId: mockIssue2.id,
- listId: mockList.id,
- positionInList: 0,
- atIndex: 1,
- },
- listState: [mockIssue2.id, mockIssue.id],
- },
- ],
- [
- 'to the bottom of the list when the list is fully loaded',
- {
- payload: {
- itemId: mockIssue2.id,
- listId: mockList.id,
- positionInList: -1,
- atIndex: 0,
- allItemsLoadedInList: true,
- },
- listState: [mockIssue.id, mockIssue2.id],
- },
- ],
- ])(`inserts an item into a list %s`, (_, { payload, listState }) => {
- mutations.ADD_BOARD_ITEM_TO_LIST(state, payload);
-
- expect(state.boardItemsByListId[payload.listId]).toEqual(listState);
- });
- });
-
- describe('REMOVE_BOARD_ITEM_FROM_LIST', () => {
- beforeEach(() => {
- setBoardsListsState();
- });
-
- it('removes an item from a list', () => {
- expect(state.boardItemsByListId['gid://gitlab/List/1']).toContain(mockIssue.id);
-
- mutations.REMOVE_BOARD_ITEM_FROM_LIST(state, {
- itemId: mockIssue.id,
- listId: mockList.id,
- });
-
- expect(state.boardItemsByListId['gid://gitlab/List/1']).not.toContain(mockIssue.id);
- });
- });
-
- describe('SET_ASSIGNEE_LOADING', () => {
- it('sets isSettingAssignees to the value passed', () => {
- mutations.SET_ASSIGNEE_LOADING(state, true);
-
- expect(state.isSettingAssignees).toBe(true);
- });
- });
-
- describe('REQUEST_GROUP_PROJECTS', () => {
- it('Should set isLoading in groupProjectsFlags to true in state when fetchNext is false', () => {
- mutations[types.REQUEST_GROUP_PROJECTS](state, false);
-
- expect(state.groupProjectsFlags.isLoading).toBe(true);
- });
-
- it('Should set isLoadingMore in groupProjectsFlags to true in state when fetchNext is true', () => {
- mutations[types.REQUEST_GROUP_PROJECTS](state, true);
-
- expect(state.groupProjectsFlags.isLoadingMore).toBe(true);
- });
- });
-
- describe('RECEIVE_GROUP_PROJECTS_SUCCESS', () => {
- it('Should set groupProjects and pageInfo to state and isLoading in groupProjectsFlags to false', () => {
- mutations[types.RECEIVE_GROUP_PROJECTS_SUCCESS](state, {
- projects: mockGroupProjects,
- pageInfo: { hasNextPage: false },
- });
-
- expect(state.groupProjects).toEqual(mockGroupProjects);
- expect(state.groupProjectsFlags.isLoading).toBe(false);
- expect(state.groupProjectsFlags.pageInfo).toEqual({ hasNextPage: false });
- });
-
- it('Should merge projects in groupProjects in state when fetchNext is true', () => {
- state = {
- ...state,
- groupProjects: [mockGroupProjects[0]],
- };
-
- mutations[types.RECEIVE_GROUP_PROJECTS_SUCCESS](state, {
- projects: [mockGroupProjects[1]],
- fetchNext: true,
- });
-
- expect(state.groupProjects).toEqual(mockGroupProjects);
- });
- });
-
- describe('RECEIVE_GROUP_PROJECTS_FAILURE', () => {
- it('Should set error in state and isLoading in groupProjectsFlags to false', () => {
- mutations[types.RECEIVE_GROUP_PROJECTS_FAILURE](state);
-
- expect(state.error).toEqual(
- 'An error occurred while fetching group projects. Please try again.',
- );
- expect(state.groupProjectsFlags.isLoading).toBe(false);
- });
- });
-
- describe('SET_SELECTED_PROJECT', () => {
- it('Should set selectedProject to state', () => {
- mutations[types.SET_SELECTED_PROJECT](state, mockGroupProjects[0]);
-
- expect(state.selectedProject).toEqual(mockGroupProjects[0]);
- });
- });
-
- describe('ADD_BOARD_ITEM_TO_SELECTION', () => {
- it('Should add boardItem to selectedBoardItems state', () => {
- expect(state.selectedBoardItems).toEqual([]);
-
- mutations[types.ADD_BOARD_ITEM_TO_SELECTION](state, mockIssue);
-
- expect(state.selectedBoardItems).toEqual([mockIssue]);
- });
- });
-
- describe('REMOVE_BOARD_ITEM_FROM_SELECTION', () => {
- it('Should remove boardItem to selectedBoardItems state', () => {
- state.selectedBoardItems = [mockIssue];
-
- mutations[types.REMOVE_BOARD_ITEM_FROM_SELECTION](state, mockIssue);
-
- expect(state.selectedBoardItems).toEqual([]);
- });
- });
-
- describe('RESET_BOARD_ITEM_SELECTION', () => {
- it('Should reset selectedBoardItems state', () => {
- state.selectedBoardItems = [mockIssue];
-
- mutations[types.RESET_BOARD_ITEM_SELECTION](state, mockIssue);
-
- expect(state.selectedBoardItems).toEqual([]);
- });
- });
-
- describe('SET_ERROR', () => {
- it('Should set error state', () => {
- state.error = undefined;
-
- mutations[types.SET_ERROR](state, 'mayday');
-
- expect(state.error).toBe('mayday');
- });
- });
-});
diff --git a/spec/frontend/ci/artifacts/components/job_artifacts_table_spec.js b/spec/frontend/ci/artifacts/components/job_artifacts_table_spec.js
index ba77d90f4e2..36f27d1781e 100644
--- a/spec/frontend/ci/artifacts/components/job_artifacts_table_spec.js
+++ b/spec/frontend/ci/artifacts/components/job_artifacts_table_spec.js
@@ -2,7 +2,7 @@ import { GlLoadingIcon, GlTable, GlLink, GlPagination, GlModal, GlFormCheckbox }
import Vue, { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
import getJobArtifactsResponse from 'test_fixtures/graphql/ci/artifacts/graphql/queries/get_job_artifacts.query.graphql.json';
-import CiIcon from '~/vue_shared/components/ci_icon.vue';
+import CiIcon from '~/vue_shared/components/ci_icon/ci_icon.vue';
import waitForPromises from 'helpers/wait_for_promises';
import JobArtifactsTable from '~/ci/artifacts/components/job_artifacts_table.vue';
import ArtifactsTableRowDetails from '~/ci/artifacts/components/artifacts_table_row_details.vue';
@@ -22,11 +22,12 @@ import {
I18N_FETCH_ERROR,
INITIAL_CURRENT_PAGE,
I18N_BULK_DELETE_ERROR,
- SELECTED_ARTIFACTS_MAX_COUNT,
} from '~/ci/artifacts/constants';
import { totalArtifactsSizeForJob } from '~/ci/artifacts/utils';
import { createAlert } from '~/alert';
+const jobArtifactsCountLimit = 100;
+
jest.mock('~/alert');
Vue.use(VueApollo);
@@ -127,10 +128,10 @@ describe('JobArtifactsTable component', () => {
.map((jobNode) => jobNode.artifacts.nodes.map((artifactNode) => artifactNode.id))
.reduce((artifacts, jobArtifacts) => artifacts.concat(jobArtifacts));
- const maxSelectedArtifacts = new Array(SELECTED_ARTIFACTS_MAX_COUNT).fill('artifact-id');
+ const maxSelectedArtifacts = new Array(jobArtifactsCountLimit).fill('artifact-id');
const maxSelectedArtifactsIncludingCurrentPage = [
...allArtifacts,
- ...new Array(SELECTED_ARTIFACTS_MAX_COUNT - allArtifacts.length).fill('artifact-id'),
+ ...new Array(jobArtifactsCountLimit - allArtifacts.length).fill('artifact-id'),
];
const createComponent = ({
@@ -151,6 +152,7 @@ describe('JobArtifactsTable component', () => {
projectPath: 'project/path',
projectId,
canDestroyArtifacts,
+ jobArtifactsCountLimit,
},
mocks: {
$toast: {
@@ -665,7 +667,7 @@ describe('JobArtifactsTable component', () => {
describe('select all checkbox respects selected artifacts limit', () => {
describe('when selecting all visible artifacts would exceed the limit', () => {
- const selectedArtifactsLength = SELECTED_ARTIFACTS_MAX_COUNT - 1;
+ const selectedArtifactsLength = jobArtifactsCountLimit - 1;
beforeEach(async () => {
createComponent({
@@ -687,9 +689,7 @@ describe('JobArtifactsTable component', () => {
await nextTick();
expect(findSelectAllCheckboxChecked()).toBe(true);
- expect(findBulkDelete().props('selectedArtifacts')).toHaveLength(
- SELECTED_ARTIFACTS_MAX_COUNT,
- );
+ expect(findBulkDelete().props('selectedArtifacts')).toHaveLength(jobArtifactsCountLimit);
expect(findBulkDelete().props('selectedArtifacts')).not.toContain(
allArtifacts[allArtifacts.length - 1],
);
@@ -748,7 +748,7 @@ describe('JobArtifactsTable component', () => {
it('deselects all artifacts when toggled', async () => {
expect(findBulkDelete().props('selectedArtifacts')).toHaveLength(
- SELECTED_ARTIFACTS_MAX_COUNT,
+ jobArtifactsCountLimit,
);
toggleSelectAllCheckbox();
@@ -757,7 +757,7 @@ describe('JobArtifactsTable component', () => {
expect(findSelectAllCheckboxChecked()).toBe(false);
expect(findBulkDelete().props('selectedArtifacts')).toHaveLength(
- SELECTED_ARTIFACTS_MAX_COUNT - allArtifacts.length,
+ jobArtifactsCountLimit - allArtifacts.length,
);
});
});
diff --git a/spec/frontend/ci/catalog/components/details/ci_resource_components_spec.js b/spec/frontend/ci/catalog/components/details/ci_resource_components_spec.js
index 382f8e46203..330163e9f39 100644
--- a/spec/frontend/ci/catalog/components/details/ci_resource_components_spec.js
+++ b/spec/frontend/ci/catalog/components/details/ci_resource_components_spec.js
@@ -2,7 +2,6 @@ import Vue from 'vue';
import VueApollo from 'vue-apollo';
import { GlEmptyState, GlLoadingIcon } from '@gitlab/ui';
import { mountExtended } from 'helpers/vue_test_utils_helper';
-import { resolvers } from '~/ci/catalog/graphql/settings';
import CiResourceComponents from '~/ci/catalog/components/details/ci_resource_components.vue';
import getCiCatalogcomponentComponents from '~/ci/catalog/graphql/queries/get_ci_catalog_resource_components.query.graphql';
import createMockApollo from 'helpers/mock_apollo_helper';
@@ -17,15 +16,15 @@ describe('CiResourceComponents', () => {
let wrapper;
let mockComponentsResponse;
- const components = mockComponents.data.ciCatalogResource.components.nodes;
+ const components = mockComponents.data.ciCatalogResource.latestVersion.components.nodes;
- const resourceId = 'gid://gitlab/Ci::Catalog::Resource/1';
+ const resourcePath = 'twitter/project-1';
- const defaultProps = { resourceId };
+ const defaultProps = { resourcePath };
const createComponent = async () => {
const handlers = [[getCiCatalogcomponentComponents, mockComponentsResponse]];
- const mockApollo = createMockApollo(handlers, resolvers);
+ const mockApollo = createMockApollo(handlers);
wrapper = mountExtended(CiResourceComponents, {
propsData: {
@@ -113,10 +112,9 @@ describe('CiResourceComponents', () => {
expect(findComponents()).toHaveLength(components.length);
});
- it('renders the component name, description and snippet', () => {
+ it('renders the component name and snippet', () => {
components.forEach((component) => {
expect(wrapper.text()).toContain(component.name);
- expect(wrapper.text()).toContain(component.description);
expect(wrapper.text()).toContain(component.path);
});
});
@@ -134,9 +132,9 @@ describe('CiResourceComponents', () => {
it('renders the component parameter attributes', () => {
const [firstComponent] = components;
- firstComponent.inputs.nodes.forEach((input) => {
+ firstComponent.inputs.forEach((input) => {
expect(findComponents().at(0).text()).toContain(input.name);
- expect(findComponents().at(0).text()).toContain(input.defaultValue);
+ expect(findComponents().at(0).text()).toContain(input.default);
expect(findComponents().at(0).text()).toContain('Yes');
});
});
diff --git a/spec/frontend/ci/catalog/components/details/ci_resource_details_spec.js b/spec/frontend/ci/catalog/components/details/ci_resource_details_spec.js
index 1f7dcf9d4e5..e4b6c1cd046 100644
--- a/spec/frontend/ci/catalog/components/details/ci_resource_details_spec.js
+++ b/spec/frontend/ci/catalog/components/details/ci_resource_details_spec.js
@@ -8,7 +8,7 @@ describe('CiResourceDetails', () => {
let wrapper;
const defaultProps = {
- resourceId: 'gid://gitlab/Ci::Catalog::Resource/1',
+ resourcePath: 'twitter/project-1',
};
const defaultProvide = {
glFeatures: { ciCatalogComponentsTab: true },
diff --git a/spec/frontend/ci/catalog/components/details/ci_resource_header_spec.js b/spec/frontend/ci/catalog/components/details/ci_resource_header_spec.js
index c061332ba13..6af9daabea0 100644
--- a/spec/frontend/ci/catalog/components/details/ci_resource_header_spec.js
+++ b/spec/frontend/ci/catalog/components/details/ci_resource_header_spec.js
@@ -3,7 +3,7 @@ import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import CiResourceHeader from '~/ci/catalog/components/details/ci_resource_header.vue';
import { getIdFromGraphQLId } from '~/graphql_shared/utils';
import CiResourceAbout from '~/ci/catalog/components/details/ci_resource_about.vue';
-import CiIcon from '~/vue_shared/components/ci_icon.vue';
+import CiIcon from '~/vue_shared/components/ci_icon/ci_icon.vue';
import { catalogSharedDataMock, catalogAdditionalDetailsMock } from '../../mock';
describe('CiResourceHeader', () => {
@@ -45,9 +45,9 @@ describe('CiResourceHeader', () => {
expect(wrapper.html()).toContain(resource.description);
});
- it('renders the namespace and project path', () => {
- expect(wrapper.html()).toContain(resource.rootNamespace.fullPath);
- expect(wrapper.html()).toContain(resource.rootNamespace.name);
+ it('renders the project path and name', () => {
+ expect(wrapper.html()).toContain(resource.webPath);
+ expect(wrapper.html()).toContain(resource.name);
});
it('renders the avatar', () => {
diff --git a/spec/frontend/ci/catalog/components/details/ci_resource_readme_spec.js b/spec/frontend/ci/catalog/components/details/ci_resource_readme_spec.js
index 0dadac236a8..ad76b47db57 100644
--- a/spec/frontend/ci/catalog/components/details/ci_resource_readme_spec.js
+++ b/spec/frontend/ci/catalog/components/details/ci_resource_readme_spec.js
@@ -23,12 +23,13 @@ describe('CiResourceReadme', () => {
data: {
ciCatalogResource: {
id: resourceId,
+ webPath: 'twitter/project-1',
readmeHtml,
},
},
};
- const defaultProps = { resourceId };
+ const defaultProps = { resourcePath: readmeMockData.data.ciCatalogResource.webPath };
const createComponent = ({ props = {} } = {}) => {
const handlers = [[getCiCatalogResourceReadme, mockReadmeResponse]];
diff --git a/spec/frontend/ci/catalog/components/list/catalog_header_spec.js b/spec/frontend/ci/catalog/components/list/catalog_header_spec.js
index 2a5c24d0515..e9d2e68c1a3 100644
--- a/spec/frontend/ci/catalog/components/list/catalog_header_spec.js
+++ b/spec/frontend/ci/catalog/components/list/catalog_header_spec.js
@@ -1,6 +1,7 @@
import { GlBanner, GlButton } from '@gitlab/ui';
import { useLocalStorageSpy } from 'helpers/local_storage_helper';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import BetaBadge from '~/vue_shared/components/badges/beta_badge.vue';
import CatalogHeader from '~/ci/catalog/components/list/catalog_header.vue';
import { CATALOG_FEEDBACK_DISMISSED_KEY } from '~/ci/catalog/constants';
@@ -16,9 +17,10 @@ describe('CatalogHeader', () => {
};
const findBanner = () => wrapper.findComponent(GlBanner);
+ const findBetaBadge = () => wrapper.findComponent(BetaBadge);
const findFeedbackButton = () => findBanner().findComponent(GlButton);
const findTitle = () => wrapper.find('h1');
- const findDescription = () => wrapper.findByTestId('description');
+ const findDescription = () => wrapper.findByTestId('page-description');
const createComponent = ({ props = {}, provide = {}, stubs = {} } = {}) => {
wrapper = shallowMountExtended(CatalogHeader, {
@@ -33,6 +35,16 @@ describe('CatalogHeader', () => {
});
};
+ describe('Default view', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('renders a Beta Badge', () => {
+ expect(findBetaBadge().exists()).toBe(true);
+ });
+ });
+
describe('title and description', () => {
describe('when there are no values provided', () => {
beforeEach(() => {
@@ -42,10 +54,11 @@ describe('CatalogHeader', () => {
it('renders the default values', () => {
expect(findTitle().text()).toBe('CI/CD Catalog');
expect(findDescription().text()).toBe(
- 'Discover CI configuration resources for a seamless CI/CD experience.',
+ 'Discover CI/CD components that can improve your pipeline with additional functionality.',
);
});
});
+
describe('when custom values are provided', () => {
beforeEach(() => {
createComponent({ provide: customProvide });
@@ -57,6 +70,7 @@ describe('CatalogHeader', () => {
});
});
});
+
describe('Feedback banner', () => {
describe('when user has never dismissed', () => {
beforeEach(() => {
diff --git a/spec/frontend/ci/catalog/components/list/catalog_search_spec.js b/spec/frontend/ci/catalog/components/list/catalog_search_spec.js
new file mode 100644
index 00000000000..c6f8498f2fd
--- /dev/null
+++ b/spec/frontend/ci/catalog/components/list/catalog_search_spec.js
@@ -0,0 +1,103 @@
+import { GlSearchBoxByClick, GlSorting, GlSortingItem } from '@gitlab/ui';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import CatalogSearch from '~/ci/catalog/components/list/catalog_search.vue';
+import { SORT_ASC, SORT_DESC, SORT_OPTION_CREATED } from '~/ci/catalog/constants';
+
+describe('CatalogSearch', () => {
+ let wrapper;
+
+ const findSearchBar = () => wrapper.findComponent(GlSearchBoxByClick);
+ const findSorting = () => wrapper.findComponent(GlSorting);
+ const findAllSortingItems = () => wrapper.findAllComponents(GlSortingItem);
+
+ const createComponent = () => {
+ wrapper = shallowMountExtended(CatalogSearch, {});
+ };
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ describe('default UI', () => {
+ it('renders the search bar', () => {
+ expect(findSearchBar().exists()).toBe(true);
+ });
+
+ it('renders the sorting options', () => {
+ expect(findSorting().exists()).toBe(true);
+ expect(findAllSortingItems()).toHaveLength(1);
+ });
+
+ it('renders the `Created at` option as the default', () => {
+ expect(findAllSortingItems().at(0).text()).toBe('Created at');
+ });
+ });
+
+ describe('search', () => {
+ it('passes down the search value to the search component', async () => {
+ const newSearchTerm = 'cat';
+
+ expect(findSearchBar().props().value).toBe('');
+
+ await findSearchBar().vm.$emit('input', newSearchTerm);
+
+ expect(findSearchBar().props().value).toBe(newSearchTerm);
+ });
+
+ it('does not submit only when typing', async () => {
+ expect(wrapper.emitted('update-search-term')).toBeUndefined();
+
+ await findSearchBar().vm.$emit('input', 'new');
+
+ expect(wrapper.emitted('update-search-term')).toBeUndefined();
+ });
+
+ describe('when submitting the search', () => {
+ const newSearchTerm = 'dog';
+
+ beforeEach(async () => {
+ await findSearchBar().vm.$emit('input', newSearchTerm);
+ await findSearchBar().vm.$emit('submit');
+ });
+
+ it('emits the event up with the new payload', () => {
+ expect(wrapper.emitted('update-search-term')).toEqual([[newSearchTerm]]);
+ });
+ });
+
+ describe('when clearing the search', () => {
+ beforeEach(async () => {
+ await findSearchBar().vm.$emit('input', 'new');
+ await findSearchBar().vm.$emit('clear');
+ });
+
+ it('emits an update event with an empty string payload', () => {
+ expect(wrapper.emitted('update-search-term')).toEqual([['']]);
+ });
+ });
+ });
+
+ describe('sort', () => {
+ describe('when changing sort order', () => {
+ it('changes the `isAscending` prop to the sorting component', async () => {
+ expect(findSorting().props().isAscending).toBe(false);
+
+ await findSorting().vm.$emit('sortDirectionChange');
+
+ expect(findSorting().props().isAscending).toBe(true);
+ });
+
+ it('emits an `update-sorting` event with the new direction', async () => {
+ expect(wrapper.emitted('update-sorting')).toBeUndefined();
+
+ await findSorting().vm.$emit('sortDirectionChange');
+ await findSorting().vm.$emit('sortDirectionChange');
+
+ expect(wrapper.emitted('update-sorting')).toEqual([
+ [`${SORT_OPTION_CREATED}_${SORT_ASC}`],
+ [`${SORT_OPTION_CREATED}_${SORT_DESC}`],
+ ]);
+ });
+ });
+ });
+});
diff --git a/spec/frontend/ci/catalog/components/list/ci_resources_list_item_spec.js b/spec/frontend/ci/catalog/components/list/ci_resources_list_item_spec.js
index 3862195d8c7..d74b133f386 100644
--- a/spec/frontend/ci/catalog/components/list/ci_resources_list_item_spec.js
+++ b/spec/frontend/ci/catalog/components/list/ci_resources_list_item_spec.js
@@ -1,21 +1,22 @@
import Vue from 'vue';
import VueRouter from 'vue-router';
-import { GlAvatar, GlBadge, GlButton, GlSprintf } from '@gitlab/ui';
+import { GlAvatar, GlBadge, GlSprintf } from '@gitlab/ui';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import { cleanLeadingSeparator } from '~/lib/utils/url_utility';
import { createRouter } from '~/ci/catalog/router/index';
import CiResourcesListItem from '~/ci/catalog/components/list/ci_resources_list_item.vue';
import { getIdFromGraphQLId } from '~/graphql_shared/utils';
-import { CI_RESOURCE_DETAILS_PAGE_NAME } from '~/ci/catalog/router/constants';
import { catalogSinglePageResponse } from '../../mock';
Vue.use(VueRouter);
-let router;
-let routerPush;
+const defaultEvent = { preventDefault: jest.fn, ctrlKey: false, metaKey: false };
describe('CiResourcesListItem', () => {
let wrapper;
+ let routerPush;
+ const router = createRouter();
const resource = catalogSinglePageResponse.data.ciCatalogResources.nodes[0];
const release = {
author: { name: 'author', webUrl: '/user/1' },
@@ -35,22 +36,19 @@ describe('CiResourcesListItem', () => {
},
stubs: {
GlSprintf,
- RouterLink: true,
- RouterView: true,
},
});
};
const findAvatar = () => wrapper.findComponent(GlAvatar);
const findBadge = () => wrapper.findComponent(GlBadge);
- const findResourceName = () => wrapper.findComponent(GlButton);
+ const findResourceName = () => wrapper.findByTestId('ci-resource-link');
const findResourceDescription = () => wrapper.findByText(defaultProps.resource.description);
const findUserLink = () => wrapper.findByTestId('user-link');
const findTimeAgoMessage = () => wrapper.findComponent(GlSprintf);
const findFavorites = () => wrapper.findByTestId('stats-favorites');
beforeEach(() => {
- router = createRouter();
routerPush = jest.spyOn(router, 'push').mockImplementation(() => {});
});
@@ -70,8 +68,9 @@ describe('CiResourcesListItem', () => {
});
});
- it('renders the resource name button', () => {
+ it('renders the resource name and link', () => {
expect(findResourceName().exists()).toBe(true);
+ expect(findResourceName().attributes().href).toBe(defaultProps.resource.webPath);
});
it('renders the resource version badge', () => {
@@ -81,58 +80,69 @@ describe('CiResourcesListItem', () => {
it('renders the resource description', () => {
expect(findResourceDescription().exists()).toBe(true);
});
+ });
- describe('release time', () => {
- describe('when there is no release data', () => {
- beforeEach(() => {
- createComponent({ props: { resource: { ...resource, latestVersion: null } } });
- });
+ describe('release time', () => {
+ describe('when there is no release data', () => {
+ beforeEach(() => {
+ createComponent({ props: { resource: { ...resource, latestVersion: null } } });
+ });
- it('does not render the release', () => {
- expect(findTimeAgoMessage().exists()).toBe(false);
- });
+ it('does not render the release', () => {
+ expect(findTimeAgoMessage().exists()).toBe(false);
+ });
- it('renders the generic `unreleased` badge', () => {
- expect(findBadge().exists()).toBe(true);
- expect(findBadge().text()).toBe('Unreleased');
- });
+ it('renders the generic `unreleased` badge', () => {
+ expect(findBadge().exists()).toBe(true);
+ expect(findBadge().text()).toBe('Unreleased');
});
+ });
- describe('when there is release data', () => {
- beforeEach(() => {
- createComponent({ props: { resource: { ...resource, latestVersion: { ...release } } } });
- });
+ describe('when there is release data', () => {
+ beforeEach(() => {
+ createComponent({ props: { resource: { ...resource, latestVersion: { ...release } } } });
+ });
- it('renders the user link', () => {
- expect(findUserLink().exists()).toBe(true);
- expect(findUserLink().attributes('href')).toBe(release.author.webUrl);
- });
+ it('renders the user link', () => {
+ expect(findUserLink().exists()).toBe(true);
+ expect(findUserLink().attributes('href')).toBe(release.author.webUrl);
+ });
- it('renders the time since the resource was released', () => {
- expect(findTimeAgoMessage().exists()).toBe(true);
- });
+ it('renders the time since the resource was released', () => {
+ expect(findTimeAgoMessage().exists()).toBe(true);
+ });
- it('renders the version badge', () => {
- expect(findBadge().exists()).toBe(true);
- expect(findBadge().text()).toBe(release.tagName);
- });
+ it('renders the version badge', () => {
+ expect(findBadge().exists()).toBe(true);
+ expect(findBadge().text()).toBe(release.tagName);
});
});
});
describe('when clicking on an item title', () => {
- beforeEach(async () => {
- createComponent();
+ describe('without holding down a modifier key', () => {
+ it('navigates to the details page in the same tab', async () => {
+ createComponent();
+ await findResourceName().vm.$emit('click', defaultEvent);
- await findResourceName().vm.$emit('click');
+ expect(routerPush).toHaveBeenCalledWith({
+ path: cleanLeadingSeparator(resource.webPath),
+ });
+ });
});
- it('navigates to the details page', () => {
- expect(routerPush).toHaveBeenCalledWith({
- name: CI_RESOURCE_DETAILS_PAGE_NAME,
- params: {
- id: getIdFromGraphQLId(resource.id),
- },
+ describe.each`
+ keyName
+ ${'ctrlKey'}
+ ${'metaKey'}
+ `('when $keyName is being held down', ({ keyName }) => {
+ beforeEach(async () => {
+ createComponent();
+ await findResourceName().vm.$emit('click', { ...defaultEvent, [keyName]: true });
+ });
+
+ it('does not call VueRouter push', () => {
+ expect(routerPush).not.toHaveBeenCalled();
});
});
});
@@ -141,43 +151,35 @@ describe('CiResourcesListItem', () => {
beforeEach(async () => {
createComponent();
- await findAvatar().vm.$emit('click');
+ await findAvatar().vm.$emit('click', defaultEvent);
});
it('navigates to the details page', () => {
- expect(routerPush).toHaveBeenCalledWith({
- name: CI_RESOURCE_DETAILS_PAGE_NAME,
- params: {
- id: getIdFromGraphQLId(resource.id),
- },
- });
+ expect(routerPush).toHaveBeenCalledWith({ path: cleanLeadingSeparator(resource.webPath) });
});
});
describe('statistics', () => {
describe('when there are no statistics', () => {
- beforeEach(() => {
+ it('render favorites as 0', () => {
createComponent({
props: {
resource: {
+ ...resource,
starCount: 0,
},
},
});
- });
- it('render favorites as 0', () => {
expect(findFavorites().exists()).toBe(true);
expect(findFavorites().text()).toBe('0');
});
});
describe('where there are statistics', () => {
- beforeEach(() => {
+ it('render favorites', () => {
createComponent();
- });
- it('render favorites', () => {
expect(findFavorites().exists()).toBe(true);
expect(findFavorites().text()).toBe(String(defaultProps.resource.starCount));
});
diff --git a/spec/frontend/ci/catalog/components/list/empty_state_spec.js b/spec/frontend/ci/catalog/components/list/empty_state_spec.js
index f589ad96a9d..5db0c61371d 100644
--- a/spec/frontend/ci/catalog/components/list/empty_state_spec.js
+++ b/spec/frontend/ci/catalog/components/list/empty_state_spec.js
@@ -1,27 +1,83 @@
-import { GlEmptyState } from '@gitlab/ui';
+import { GlEmptyState, GlLink, GlSprintf } from '@gitlab/ui';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import EmptyState from '~/ci/catalog/components/list/empty_state.vue';
+import { COMPONENTS_DOCS_URL } from '~/ci/catalog/constants';
describe('EmptyState', () => {
let wrapper;
const findEmptyState = () => wrapper.findComponent(GlEmptyState);
+ const findComponentsDocLink = () => wrapper.findComponent(GlLink);
const createComponent = ({ props = {} } = {}) => {
wrapper = shallowMountExtended(EmptyState, {
propsData: {
...props,
},
+ stubs: {
+ GlEmptyState,
+ GlSprintf,
+ },
});
};
- describe('when mounted', () => {
+ describe('default', () => {
beforeEach(() => {
createComponent();
});
- it('renders the empty state', () => {
- expect(findEmptyState().exists()).toBe(true);
+ it('renders the default empty state', () => {
+ const emptyState = findEmptyState();
+
+ expect(emptyState.exists()).toBe(true);
+ expect(emptyState.props().title).toBe('Get started with the CI/CD Catalog');
+ expect(emptyState.props().description).toBe(
+ 'Create a pipeline component repository and make reusing pipeline configurations faster and easier.',
+ );
+ });
+ });
+
+ describe('when there is a search query', () => {
+ beforeEach(() => {
+ createComponent({
+ props: { searchTerm: 'a' },
+ });
+ });
+
+ it('renders the search description', () => {
+ expect(findEmptyState().text()).toContain(
+ 'Edit your search and try again. Or learn to create a component repository.',
+ );
+ });
+
+ it('renders the link to the components documentation', () => {
+ const docsLink = findComponentsDocLink();
+ expect(docsLink.exists()).toBe(true);
+ expect(docsLink.attributes().href).toBe(COMPONENTS_DOCS_URL);
+ });
+
+ describe('and it is less than 3 characters', () => {
+ beforeEach(() => {
+ createComponent({
+ props: { searchTerm: 'a' },
+ });
+ });
+
+ it('render the too few chars empty state title', () => {
+ expect(findEmptyState().props().title).toBe('Search must be at least 3 characters');
+ });
+ });
+
+ describe('and it has more than 3 characters', () => {
+ beforeEach(() => {
+ createComponent({
+ props: { searchTerm: 'my component' },
+ });
+ });
+
+ it('renders the search empty state title', () => {
+ expect(findEmptyState().props().title).toBe('No result found');
+ });
});
});
});
diff --git a/spec/frontend/ci/catalog/components/pages/ci_resource_details_page_spec.js b/spec/frontend/ci/catalog/components/pages/ci_resource_details_page_spec.js
index 40f243ed891..015c6504fa5 100644
--- a/spec/frontend/ci/catalog/components/pages/ci_resource_details_page_spec.js
+++ b/spec/frontend/ci/catalog/components/pages/ci_resource_details_page_spec.js
@@ -5,7 +5,8 @@ import { GlEmptyState } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
-import { CI_CATALOG_RESOURCE_TYPE, cacheConfig } from '~/ci/catalog/graphql/settings';
+import { cacheConfig } from '~/ci/catalog/graphql/settings';
+import { cleanLeadingSeparator } from '~/lib/utils/url_utility';
import getCiCatalogResourceSharedData from '~/ci/catalog/graphql/queries/get_ci_catalog_resource_shared_data.query.graphql';
import getCiCatalogResourceDetails from '~/ci/catalog/graphql/queries/get_ci_catalog_resource_details.query.graphql';
@@ -17,7 +18,6 @@ import CiResourceHeaderSkeletonLoader from '~/ci/catalog/components/details/ci_r
import { createRouter } from '~/ci/catalog/router/index';
import { CI_RESOURCE_DETAILS_PAGE_NAME } from '~/ci/catalog/router/constants';
-import { convertToGraphQLId } from '~/graphql_shared/utils';
import { catalogSharedDataMock, catalogAdditionalDetailsMock } from '../../mock';
Vue.use(VueApollo);
@@ -75,7 +75,7 @@ describe('CiResourceDetailsPage', () => {
router = createRouter();
await router.push({
name: CI_RESOURCE_DETAILS_PAGE_NAME,
- params: { id: defaultSharedData.id },
+ params: { id: defaultSharedData.webPath },
});
});
@@ -178,7 +178,7 @@ describe('CiResourceDetailsPage', () => {
it('passes expected props', () => {
expect(findDetailsComponent().props()).toEqual({
- resourceId: convertToGraphQLId(CI_CATALOG_RESOURCE_TYPE, defaultAdditionalData.id),
+ resourcePath: cleanLeadingSeparator(defaultSharedData.webPath),
});
});
});
diff --git a/spec/frontend/ci/catalog/components/pages/ci_resources_page_spec.js b/spec/frontend/ci/catalog/components/pages/ci_resources_page_spec.js
index e18b418b155..e6fbd63f307 100644
--- a/spec/frontend/ci/catalog/components/pages/ci_resources_page_spec.js
+++ b/spec/frontend/ci/catalog/components/pages/ci_resources_page_spec.js
@@ -7,10 +7,12 @@ import createMockApollo from 'helpers/mock_apollo_helper';
import { createAlert } from '~/alert';
import CatalogHeader from '~/ci/catalog/components/list/catalog_header.vue';
+import CatalogSearch from '~/ci/catalog/components/list/catalog_search.vue';
import CiResourcesList from '~/ci/catalog/components/list/ci_resources_list.vue';
import CatalogListSkeletonLoader from '~/ci/catalog/components/list/catalog_list_skeleton_loader.vue';
import EmptyState from '~/ci/catalog/components/list/empty_state.vue';
-import { cacheConfig } from '~/ci/catalog/graphql/settings';
+import { cacheConfig, resolvers } from '~/ci/catalog/graphql/settings';
+import typeDefs from '~/ci/catalog/graphql/typedefs.graphql';
import ciResourcesPage from '~/ci/catalog/components/pages/ci_resources_page.vue';
import getCatalogResources from '~/ci/catalog/graphql/queries/get_ci_catalog_resources.query.graphql';
@@ -24,9 +26,11 @@ describe('CiResourcesPage', () => {
let wrapper;
let catalogResourcesResponse;
+ const defaultQueryVariables = { first: 20 };
+
const createComponent = () => {
const handlers = [[getCatalogResources, catalogResourcesResponse]];
- const mockApollo = createMockApollo(handlers, {}, cacheConfig);
+ const mockApollo = createMockApollo(handlers, resolvers, { cacheConfig, typeDefs });
wrapper = shallowMountExtended(ciResourcesPage, {
apolloProvider: mockApollo,
@@ -36,6 +40,7 @@ describe('CiResourcesPage', () => {
};
const findCatalogHeader = () => wrapper.findComponent(CatalogHeader);
+ const findCatalogSearch = () => wrapper.findComponent(CatalogSearch);
const findCiResourcesList = () => wrapper.findComponent(CiResourcesList);
const findLoadingState = () => wrapper.findComponent(CatalogListSkeletonLoader);
const findEmptyState = () => wrapper.findComponent(EmptyState);
@@ -71,8 +76,14 @@ describe('CiResourcesPage', () => {
});
it('renders the empty state', () => {
- expect(findLoadingState().exists()).toBe(false);
expect(findEmptyState().exists()).toBe(true);
+ });
+
+ it('renders the search', () => {
+ expect(findCatalogSearch().exists()).toBe(true);
+ });
+
+ it('does not render the list', () => {
expect(findCiResourcesList().exists()).toBe(false);
});
});
@@ -99,6 +110,10 @@ describe('CiResourcesPage', () => {
totalCount: count,
});
});
+
+ it('renders the search and sort', () => {
+ expect(findCatalogSearch().exists()).toBe(true);
+ });
});
});
@@ -121,11 +136,12 @@ describe('CiResourcesPage', () => {
if (eventName === 'onNextPage') {
expect(catalogResourcesResponse.mock.calls[1][0]).toEqual({
+ ...defaultQueryVariables,
after: pageInfo.endCursor,
- first: 20,
});
} else {
expect(catalogResourcesResponse.mock.calls[1][0]).toEqual({
+ ...defaultQueryVariables,
before: pageInfo.startCursor,
last: 20,
first: null,
@@ -134,8 +150,75 @@ describe('CiResourcesPage', () => {
});
});
+ describe('search and sort', () => {
+ describe('on initial load', () => {
+ beforeEach(async () => {
+ catalogResourcesResponse.mockResolvedValue(catalogResponseBody);
+ await createComponent();
+ });
+
+ it('calls the query without search or sort', () => {
+ expect(catalogResourcesResponse).toHaveBeenCalledTimes(1);
+ expect(catalogResourcesResponse.mock.calls[0][0]).toEqual({
+ ...defaultQueryVariables,
+ });
+ });
+ });
+
+ describe('when sorting changes', () => {
+ const newSort = 'MOST_AWESOME_ASC';
+
+ beforeEach(async () => {
+ catalogResourcesResponse.mockResolvedValue(catalogResponseBody);
+ await createComponent();
+ await findCatalogSearch().vm.$emit('update-sorting', newSort);
+ });
+
+ it('passes it to the graphql query', () => {
+ expect(catalogResourcesResponse).toHaveBeenCalledTimes(2);
+ expect(catalogResourcesResponse.mock.calls[1][0]).toEqual({
+ ...defaultQueryVariables,
+ sortValue: newSort,
+ });
+ });
+ });
+
+ describe('when search component emits a new search term', () => {
+ const newSearch = 'sloths';
+
+ describe('and there are no results', () => {
+ beforeEach(async () => {
+ catalogResourcesResponse.mockResolvedValue(emptyCatalogResponseBody);
+ await createComponent();
+ await findCatalogSearch().vm.$emit('update-search-term', newSearch);
+ });
+
+ it('renders the empty state and passes down the search query', () => {
+ expect(findEmptyState().exists()).toBe(true);
+ expect(findEmptyState().props().searchTerm).toBe(newSearch);
+ });
+ });
+
+ describe('and there are results', () => {
+ beforeEach(async () => {
+ catalogResourcesResponse.mockResolvedValue(catalogResponseBody);
+ await createComponent();
+ await findCatalogSearch().vm.$emit('update-search-term', newSearch);
+ });
+
+ it('passes it to the graphql query', () => {
+ expect(catalogResourcesResponse).toHaveBeenCalledTimes(2);
+ expect(catalogResourcesResponse.mock.calls[1][0]).toEqual({
+ ...defaultQueryVariables,
+ searchTerm: newSearch,
+ });
+ });
+ });
+ });
+ });
+
describe('pages count', () => {
- describe('when the fetchMore call suceeds', () => {
+ describe('when the fetchMore call succeeds', () => {
beforeEach(async () => {
catalogResourcesResponse.mockResolvedValue(catalogResponseBody);
@@ -157,6 +240,31 @@ describe('CiResourcesPage', () => {
});
});
+ describe.each`
+ event | payload
+ ${'update-search-term'} | ${'cat'}
+ ${'update-sorting'} | ${'CREATED_ASC'}
+ `('when $event event is emitted', ({ event, payload }) => {
+ beforeEach(async () => {
+ catalogResourcesResponse.mockResolvedValue(catalogResponseBody);
+ await createComponent();
+ });
+
+ it('resets the page count', async () => {
+ expect(findCiResourcesList().props().currentPage).toBe(1);
+
+ findCiResourcesList().vm.$emit('onNextPage');
+ await waitForPromises();
+
+ expect(findCiResourcesList().props().currentPage).toBe(2);
+
+ await findCatalogSearch().vm.$emit(event, payload);
+ await waitForPromises();
+
+ expect(findCiResourcesList().props().currentPage).toBe(1);
+ });
+ });
+
describe('when the fetchMore call fails', () => {
const errorMessage = 'there was an error';
diff --git a/spec/frontend/ci/catalog/mock.js b/spec/frontend/ci/catalog/mock.js
index 125f003224c..e370ac5054f 100644
--- a/spec/frontend/ci/catalog/mock.js
+++ b/spec/frontend/ci/catalog/mock.js
@@ -1,5 +1,3 @@
-import { componentsMockData } from '~/ci/catalog/constants';
-
export const emptyCatalogResponseBody = {
data: {
ciCatalogResources: {
@@ -39,12 +37,6 @@ export const catalogResponseBody = {
description: 'A simple component',
starCount: 0,
latestVersion: null,
- rootNamespace: {
- id: 'gid://gitlab/Group/185',
- fullPath: 'frontend-fixtures',
- name: 'frontend-fixtures',
- __typename: 'Namespace',
- },
webPath: '/frontend-fixtures/project-42',
__typename: 'CiCatalogResource',
},
@@ -55,12 +47,6 @@ export const catalogResponseBody = {
description: 'A simple component',
starCount: 0,
latestVersion: null,
- rootNamespace: {
- id: 'gid://gitlab/Group/185',
- fullPath: 'frontend-fixtures',
- name: 'frontend-fixtures',
- __typename: 'Namespace',
- },
webPath: '/frontend-fixtures/project-41',
__typename: 'CiCatalogResource',
},
@@ -71,12 +57,6 @@ export const catalogResponseBody = {
description: 'A simple component',
starCount: 0,
latestVersion: null,
- rootNamespace: {
- id: 'gid://gitlab/Group/185',
- fullPath: 'frontend-fixtures',
- name: 'frontend-fixtures',
- __typename: 'Namespace',
- },
webPath: '/frontend-fixtures/project-40',
__typename: 'CiCatalogResource',
},
@@ -87,12 +67,6 @@ export const catalogResponseBody = {
description: 'A simple component',
starCount: 0,
latestVersion: null,
- rootNamespace: {
- id: 'gid://gitlab/Group/185',
- fullPath: 'frontend-fixtures',
- name: 'frontend-fixtures',
- __typename: 'Namespace',
- },
webPath: '/frontend-fixtures/project-39',
__typename: 'CiCatalogResource',
},
@@ -103,12 +77,6 @@ export const catalogResponseBody = {
description: 'A simple component',
starCount: 0,
latestVersion: null,
- rootNamespace: {
- id: 'gid://gitlab/Group/185',
- fullPath: 'frontend-fixtures',
- name: 'frontend-fixtures',
- __typename: 'Namespace',
- },
webPath: '/frontend-fixtures/project-38',
__typename: 'CiCatalogResource',
},
@@ -119,12 +87,6 @@ export const catalogResponseBody = {
description: 'A simple component',
starCount: 0,
latestVersion: null,
- rootNamespace: {
- id: 'gid://gitlab/Group/185',
- fullPath: 'frontend-fixtures',
- name: 'frontend-fixtures',
- __typename: 'Namespace',
- },
webPath: '/frontend-fixtures/project-37',
__typename: 'CiCatalogResource',
},
@@ -135,12 +97,6 @@ export const catalogResponseBody = {
description: 'A simple component',
starCount: 0,
latestVersion: null,
- rootNamespace: {
- id: 'gid://gitlab/Group/185',
- fullPath: 'frontend-fixtures',
- name: 'frontend-fixtures',
- __typename: 'Namespace',
- },
webPath: '/frontend-fixtures/project-36',
__typename: 'CiCatalogResource',
},
@@ -151,12 +107,6 @@ export const catalogResponseBody = {
description: 'A simple component',
starCount: 0,
latestVersion: null,
- rootNamespace: {
- id: 'gid://gitlab/Group/185',
- fullPath: 'frontend-fixtures',
- name: 'frontend-fixtures',
- __typename: 'Namespace',
- },
webPath: '/frontend-fixtures/project-35',
__typename: 'CiCatalogResource',
},
@@ -167,12 +117,6 @@ export const catalogResponseBody = {
description: 'A simple component',
starCount: 0,
latestVersion: null,
- rootNamespace: {
- id: 'gid://gitlab/Group/185',
- fullPath: 'frontend-fixtures',
- name: 'frontend-fixtures',
- __typename: 'Namespace',
- },
webPath: '/frontend-fixtures/project-34',
__typename: 'CiCatalogResource',
},
@@ -183,12 +127,6 @@ export const catalogResponseBody = {
description: 'A simple component',
starCount: 0,
latestVersion: null,
- rootNamespace: {
- id: 'gid://gitlab/Group/185',
- fullPath: 'frontend-fixtures',
- name: 'frontend-fixtures',
- __typename: 'Namespace',
- },
webPath: '/frontend-fixtures/project-33',
__typename: 'CiCatalogResource',
},
@@ -199,12 +137,6 @@ export const catalogResponseBody = {
description: 'A simple component',
starCount: 0,
latestVersion: null,
- rootNamespace: {
- id: 'gid://gitlab/Group/185',
- fullPath: 'frontend-fixtures',
- name: 'frontend-fixtures',
- __typename: 'Namespace',
- },
webPath: '/frontend-fixtures/project-32',
__typename: 'CiCatalogResource',
},
@@ -215,12 +147,6 @@ export const catalogResponseBody = {
description: 'A simple component',
starCount: 0,
latestVersion: null,
- rootNamespace: {
- id: 'gid://gitlab/Group/185',
- fullPath: 'frontend-fixtures',
- name: 'frontend-fixtures',
- __typename: 'Namespace',
- },
webPath: '/frontend-fixtures/project-31',
__typename: 'CiCatalogResource',
},
@@ -231,12 +157,6 @@ export const catalogResponseBody = {
description: 'A simple component',
starCount: 0,
latestVersion: null,
- rootNamespace: {
- id: 'gid://gitlab/Group/185',
- fullPath: 'frontend-fixtures',
- name: 'frontend-fixtures',
- __typename: 'Namespace',
- },
webPath: '/frontend-fixtures/project-30',
__typename: 'CiCatalogResource',
},
@@ -247,12 +167,6 @@ export const catalogResponseBody = {
description: 'A simple component',
starCount: 0,
latestVersion: null,
- rootNamespace: {
- id: 'gid://gitlab/Group/185',
- fullPath: 'frontend-fixtures',
- name: 'frontend-fixtures',
- __typename: 'Namespace',
- },
webPath: '/frontend-fixtures/project-29',
__typename: 'CiCatalogResource',
},
@@ -263,12 +177,6 @@ export const catalogResponseBody = {
description: 'A simple component',
starCount: 0,
latestVersion: null,
- rootNamespace: {
- id: 'gid://gitlab/Group/185',
- fullPath: 'frontend-fixtures',
- name: 'frontend-fixtures',
- __typename: 'Namespace',
- },
webPath: '/frontend-fixtures/project-28',
__typename: 'CiCatalogResource',
},
@@ -279,12 +187,6 @@ export const catalogResponseBody = {
description: 'A simple component',
starCount: 0,
latestVersion: null,
- rootNamespace: {
- id: 'gid://gitlab/Group/185',
- fullPath: 'frontend-fixtures',
- name: 'frontend-fixtures',
- __typename: 'Namespace',
- },
webPath: '/frontend-fixtures/project-27',
__typename: 'CiCatalogResource',
},
@@ -295,12 +197,6 @@ export const catalogResponseBody = {
description: 'A simple component',
starCount: 0,
latestVersion: null,
- rootNamespace: {
- id: 'gid://gitlab/Group/185',
- fullPath: 'frontend-fixtures',
- name: 'frontend-fixtures',
- __typename: 'Namespace',
- },
webPath: '/frontend-fixtures/project-26',
__typename: 'CiCatalogResource',
},
@@ -311,12 +207,6 @@ export const catalogResponseBody = {
description: 'A simple component',
starCount: 0,
latestVersion: null,
- rootNamespace: {
- id: 'gid://gitlab/Group/185',
- fullPath: 'frontend-fixtures',
- name: 'frontend-fixtures',
- __typename: 'Namespace',
- },
webPath: '/frontend-fixtures/project-25',
__typename: 'CiCatalogResource',
},
@@ -327,12 +217,6 @@ export const catalogResponseBody = {
description: 'A simple component',
starCount: 0,
latestVersion: null,
- rootNamespace: {
- id: 'gid://gitlab/Group/185',
- fullPath: 'frontend-fixtures',
- name: 'frontend-fixtures',
- __typename: 'Namespace',
- },
webPath: '/frontend-fixtures/project-24',
__typename: 'CiCatalogResource',
},
@@ -343,12 +227,6 @@ export const catalogResponseBody = {
description: 'A simple component',
starCount: 0,
latestVersion: null,
- rootNamespace: {
- id: 'gid://gitlab/Group/185',
- fullPath: 'frontend-fixtures',
- name: 'frontend-fixtures',
- __typename: 'Namespace',
- },
webPath: '/frontend-fixtures/project-23',
__typename: 'CiCatalogResource',
},
@@ -379,12 +257,6 @@ export const catalogSinglePageResponse = {
description: 'A simple component',
starCount: 0,
latestVersion: null,
- rootNamespace: {
- id: 'gid://gitlab/Group/185',
- fullPath: 'frontend-fixtures',
- name: 'frontend-fixtures',
- __typename: 'Namespace',
- },
webPath: '/frontend-fixtures/project-45',
__typename: 'CiCatalogResource',
},
@@ -395,12 +267,6 @@ export const catalogSinglePageResponse = {
description: 'A simple component',
starCount: 0,
latestVersion: null,
- rootNamespace: {
- id: 'gid://gitlab/Group/185',
- fullPath: 'frontend-fixtures',
- name: 'frontend-fixtures',
- __typename: 'Namespace',
- },
webPath: '/frontend-fixtures/project-44',
__typename: 'CiCatalogResource',
},
@@ -411,12 +277,6 @@ export const catalogSinglePageResponse = {
description: 'A simple component',
starCount: 0,
latestVersion: null,
- rootNamespace: {
- id: 'gid://gitlab/Group/185',
- fullPath: 'frontend-fixtures',
- name: 'frontend-fixtures',
- __typename: 'Namespace',
- },
webPath: '/frontend-fixtures/project-43',
__typename: 'CiCatalogResource',
},
@@ -434,7 +294,6 @@ export const catalogSharedDataMock = {
icon: null,
description: 'This is the description of the repo',
name: 'Ruby',
- rootNamespace: { id: 1, fullPath: '/group/project', name: 'my-dumb-project' },
starCount: 1,
latestVersion: {
__typename: 'Release',
@@ -444,7 +303,7 @@ export const catalogSharedDataMock = {
releasedAt: Date.now(),
author: { id: 1, webUrl: 'profile/1', name: 'username' },
},
- webPath: 'path/to/project',
+ webPath: '/path/to/project',
},
},
};
@@ -454,6 +313,7 @@ export const catalogAdditionalDetailsMock = {
ciCatalogResource: {
__typename: 'CiCatalogResource',
id: `gid://gitlab/CiCatalogResource/1`,
+ webPath: '/twitter/project',
openIssuesCount: 4,
openMergeRequestsCount: 10,
readmeHtml: '<h1>Hello world</h1>',
@@ -502,12 +362,6 @@ const generateResourcesNodes = (count = 20, startId = 0) => {
description: `This is a component that does a bunch of stuff and is really just a number: ${i}`,
icon: 'my-icon',
name: `My component #${i}`,
- rootNamespace: {
- id: 1,
- __typename: 'Namespace',
- name: 'namespaceName',
- path: 'namespacePath',
- },
starCount: 10,
latestVersion: {
__typename: 'Release',
@@ -526,13 +380,47 @@ const generateResourcesNodes = (count = 20, startId = 0) => {
export const mockCatalogResourceItem = generateResourcesNodes(1)[0];
+const componentsMockData = {
+ __typename: 'CiComponentConnection',
+ nodes: [
+ {
+ id: 'gid://gitlab/Ci::Component/1',
+ name: 'Ruby gal',
+ description: 'This is a pretty amazing component that does EVERYTHING ruby.',
+ path: 'gitlab.com/gitlab-org/ruby-gal@~latest',
+ inputs: [{ name: 'version', default: '1.0.0', required: true }],
+ },
+ {
+ id: 'gid://gitlab/Ci::Component/2',
+ name: 'Javascript madness',
+ description: 'Adds some spice to your life.',
+ path: 'gitlab.com/gitlab-org/javascript-madness@~latest',
+ inputs: [
+ { name: 'isFun', default: 'true', required: true },
+ { name: 'RandomNumber', default: '10', required: false },
+ ],
+ },
+ {
+ id: 'gid://gitlab/Ci::Component/3',
+ name: 'Go go go',
+ description: 'When you write Go, you gotta go go go.',
+ path: 'gitlab.com/gitlab-org/go-go-go@~latest',
+ inputs: [{ name: 'version', default: '1.0.0', required: true }],
+ },
+ ],
+};
+
export const mockComponents = {
data: {
ciCatalogResource: {
__typename: 'CiCatalogResource',
id: `gid://gitlab/CiCatalogResource/1`,
- components: {
- ...componentsMockData,
+ webPath: '/twitter/project-1',
+ latestVersion: {
+ id: 'gid://gitlab/Version/1',
+ components: {
+ ...componentsMockData,
+ },
},
},
},
@@ -543,7 +431,11 @@ export const mockComponentsEmpty = {
ciCatalogResource: {
__typename: 'CiCatalogResource',
id: `gid://gitlab/CiCatalogResource/1`,
- components: [],
+ webPath: '/twitter/project-1',
+ latestVersion: {
+ id: 'gid://gitlab/Version/1',
+ components: [],
+ },
},
},
};
diff --git a/spec/frontend/ci/ci_variable_list/components/ci_variable_drawer_spec.js b/spec/frontend/ci/ci_variable_list/components/ci_variable_drawer_spec.js
index 610aae3946f..721e2b831fc 100644
--- a/spec/frontend/ci/ci_variable_list/components/ci_variable_drawer_spec.js
+++ b/spec/frontend/ci/ci_variable_list/components/ci_variable_drawer_spec.js
@@ -1,6 +1,16 @@
import { nextTick } from 'vue';
-import { GlDrawer, GlFormCombobox, GlFormInput, GlFormSelect, GlModal } from '@gitlab/ui';
+import {
+ GlDrawer,
+ GlFormCombobox,
+ GlFormGroup,
+ GlFormInput,
+ GlFormSelect,
+ GlLink,
+ GlModal,
+ GlSprintf,
+} from '@gitlab/ui';
import { mountExtended, shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import { helpPagePath } from '~/helpers/help_page_helper';
import CiEnvironmentsDropdown from '~/ci/ci_variable_list/components/ci_environments_dropdown.vue';
import CiVariableDrawer from '~/ci/ci_variable_list/components/ci_variable_drawer.vue';
import { awsTokenList } from '~/ci/ci_variable_list/components/ci_variable_autocomplete_tokens';
@@ -20,6 +30,8 @@ describe('CI Variable Drawer', () => {
let wrapper;
let trackingSpy;
+ const itif = (condition) => (condition ? it : it.skip);
+
const mockProjectVariable = mockVariablesWithScopes(projectString)[0];
const mockProjectVariableFileType = mockVariablesWithScopes(projectString)[1];
const mockEnvScope = 'staging';
@@ -74,6 +86,7 @@ describe('CI Variable Drawer', () => {
const findDrawer = () => wrapper.findComponent(GlDrawer);
const findEnvironmentScopeDropdown = () => wrapper.findComponent(CiEnvironmentsDropdown);
const findExpandedCheckbox = () => wrapper.findByTestId('ci-variable-expanded-checkbox');
+ const findFlagsDocsLink = () => wrapper.findByTestId('ci-variable-flags-docs-link');
const findKeyField = () => wrapper.findComponent(GlFormCombobox);
const findMaskedCheckbox = () => wrapper.findByTestId('ci-variable-masked-checkbox');
const findProtectedCheckbox = () => wrapper.findByTestId('ci-variable-protected-checkbox');
@@ -81,6 +94,26 @@ describe('CI Variable Drawer', () => {
const findValueLabel = () => wrapper.findByTestId('ci-variable-value-label');
const findTitle = () => findDrawer().find('h2');
const findTypeDropdown = () => wrapper.findComponent(GlFormSelect);
+ const findVariablesPrecedenceDocsLink = () =>
+ wrapper.findByTestId('ci-variable-precedence-docs-link');
+
+ describe('template', () => {
+ beforeEach(() => {
+ createComponent({ stubs: { GlFormGroup, GlLink, GlSprintf } });
+ });
+
+ it('renders docs link for variables precendece', () => {
+ expect(findVariablesPrecedenceDocsLink().attributes('href')).toBe(
+ helpPagePath('ci/variables/index', { anchor: 'cicd-variable-precedence' }),
+ );
+ });
+
+ it('renders docs link for flags', () => {
+ expect(findFlagsDocsLink().attributes('href')).toBe(
+ helpPagePath('ci/variables/index', { anchor: 'define-a-cicd-variable-in-the-ui' }),
+ );
+ });
+ });
describe('validations', () => {
describe('type dropdown', () => {
@@ -263,12 +296,22 @@ describe('CI Variable Drawer', () => {
expect(findKeyField().props('tokenList')).toBe(awsTokenList);
});
- it('cannot submit with empty key', async () => {
- expect(findConfirmBtn().attributes('disabled')).toBeDefined();
-
- await findKeyField().vm.$emit('input', 'NEW_VARIABLE');
-
- expect(findConfirmBtn().attributes('disabled')).toBeUndefined();
+ const keyFeedbackMessage = "A variable key can only contain letters, numbers, and '_'.";
+ describe.each`
+ key | feedbackMessage | submitButtonDisabledState
+ ${'validKey123'} | ${''} | ${undefined}
+ ${'VALID_KEY'} | ${''} | ${undefined}
+ ${''} | ${''} | ${'true'}
+ ${'invalid!!key'} | ${keyFeedbackMessage} | ${'true'}
+ ${'key with whitespace'} | ${keyFeedbackMessage} | ${'true'}
+ ${'multiline\nkey'} | ${keyFeedbackMessage} | ${'true'}
+ `('key validation', ({ key, feedbackMessage, submitButtonDisabledState }) => {
+ it(`validates key ${key} correctly`, async () => {
+ await findKeyField().vm.$emit('input', key);
+
+ expect(findConfirmBtn().attributes('disabled')).toBe(submitButtonDisabledState);
+ expect(wrapper.text()).toContain(feedbackMessage);
+ });
});
});
@@ -284,52 +327,106 @@ describe('CI Variable Drawer', () => {
expect(findConfirmBtn().attributes('disabled')).toBeUndefined();
});
- describe.each`
- value | canSubmit | trackingErrorProperty
- ${'secretValue'} | ${true} | ${null}
- ${'~v@lid:symbols.'} | ${true} | ${null}
- ${'short'} | ${false} | ${null}
- ${'multiline\nvalue'} | ${false} | ${'\n'}
- ${'dollar$ign'} | ${false} | ${'$'}
- ${'unsupported|char'} | ${false} | ${'|'}
- `('masking requirements', ({ value, canSubmit, trackingErrorProperty }) => {
- beforeEach(async () => {
- createComponent();
-
- trackingSpy = mockTracking(undefined, wrapper.element, jest.spyOn);
- await findKeyField().vm.$emit('input', 'NEW_VARIABLE');
- await findValueField().vm.$emit('input', value);
- await findMaskedCheckbox().vm.$emit('input', true);
- });
+ const invalidValues = {
+ short: 'short',
+ multiLine: 'multiline\nvalue',
+ unsupportedChar: 'unsupported|char',
+ twoUnsupportedChars: 'unsupported|chars!',
+ threeUnsupportedChars: '%unsupported|chars!',
+ shortAndMultiLine: 'sho\nrt',
+ shortAndUnsupportedChar: 'short!',
+ shortAndMultiLineAndUnsupportedChar: 'short\n!',
+ multiLineAndUnsupportedChar: 'multiline\nvalue!',
+ };
+ const maskedValidationIssuesText = {
+ short: 'The value must have at least 8 characters.',
+ multiLine:
+ 'This value cannot be masked because it contains the following characters: whitespace characters.',
+ unsupportedChar:
+ 'This value cannot be masked because it contains the following characters: |.',
+ unsupportedDollarChar:
+ 'This value cannot be masked because it contains the following characters: $.',
+ twoUnsupportedChars:
+ 'This value cannot be masked because it contains the following characters: |, !.',
+ threeUnsupportedChars:
+ 'This value cannot be masked because it contains the following characters: %, |, !.',
+ shortAndMultiLine:
+ 'This value cannot be masked because it contains the following characters: whitespace characters. The value must have at least 8 characters.',
+ shortAndUnsupportedChar:
+ 'This value cannot be masked because it contains the following characters: !. The value must have at least 8 characters.',
+ shortAndMultiLineAndUnsupportedChar:
+ 'This value cannot be masked because it contains the following characters: ! and whitespace characters. The value must have at least 8 characters.',
+ multiLineAndUnsupportedChar:
+ 'This value cannot be masked because it contains the following characters: ! and whitespace characters.',
+ };
- it(`${
- canSubmit ? 'can submit' : 'shows validation errors and disables submit button'
- } when value is '${value}'`, () => {
- if (canSubmit) {
+ describe.each`
+ value | canSubmit | trackingErrorProperty | validationIssueKey
+ ${'secretValue'} | ${true} | ${null} | ${''}
+ ${'~v@lid:symbols.'} | ${true} | ${null} | ${''}
+ ${invalidValues.short} | ${false} | ${null} | ${'short'}
+ ${invalidValues.multiLine} | ${false} | ${'\n'} | ${'multiLine'}
+ ${'dollar$ign'} | ${false} | ${'$'} | ${'unsupportedDollarChar'}
+ ${invalidValues.unsupportedChar} | ${false} | ${'|'} | ${'unsupportedChar'}
+ ${invalidValues.twoUnsupportedChars} | ${false} | ${'|!'} | ${'twoUnsupportedChars'}
+ ${invalidValues.threeUnsupportedChars} | ${false} | ${'%|!'} | ${'threeUnsupportedChars'}
+ ${invalidValues.shortAndMultiLine} | ${false} | ${'\n'} | ${'shortAndMultiLine'}
+ ${invalidValues.shortAndUnsupportedChar} | ${false} | ${'!'} | ${'shortAndUnsupportedChar'}
+ ${invalidValues.shortAndMultiLineAndUnsupportedChar} | ${false} | ${'\n!'} | ${'shortAndMultiLineAndUnsupportedChar'}
+ ${invalidValues.multiLineAndUnsupportedChar} | ${false} | ${'\n!'} | ${'multiLineAndUnsupportedChar'}
+ `(
+ 'masking requirements',
+ ({ value, canSubmit, trackingErrorProperty, validationIssueKey }) => {
+ beforeEach(() => {
+ createComponent();
+
+ trackingSpy = mockTracking(undefined, wrapper.element, jest.spyOn);
+ findKeyField().vm.$emit('input', 'NEW_VARIABLE');
+ findValueField().vm.$emit('input', value);
+ findMaskedCheckbox().vm.$emit('input', true);
+ });
+
+ itif(canSubmit)(`can submit when value is ${value}`, () => {
+ /* eslint-disable jest/no-standalone-expect */
expect(findValueLabel().attributes('invalid-feedback')).toBe('');
expect(findConfirmBtn().attributes('disabled')).toBeUndefined();
- } else {
- expect(findValueLabel().attributes('invalid-feedback')).toBe(
- 'This variable value does not meet the masking requirements.',
- );
- expect(findConfirmBtn().attributes('disabled')).toBeDefined();
- }
- });
+ /* eslint-enable jest/no-standalone-expect */
+ });
+
+ itif(!canSubmit)(
+ `shows validation errors and disables submit button when value is ${value}`,
+ () => {
+ const validationIssueText = maskedValidationIssuesText[validationIssueKey] || '';
+
+ /* eslint-disable jest/no-standalone-expect */
+ expect(findValueLabel().attributes('invalid-feedback')).toBe(validationIssueText);
+ expect(findConfirmBtn().attributes('disabled')).toBeDefined();
+ /* eslint-enable jest/no-standalone-expect */
+ },
+ );
+
+ itif(trackingErrorProperty)(
+ `sends the correct variable validation tracking event when value is ${value}`,
+ () => {
+ /* eslint-disable jest/no-standalone-expect */
+ expect(trackingSpy).toHaveBeenCalledTimes(1);
+ expect(trackingSpy).toHaveBeenCalledWith(undefined, EVENT_ACTION, {
+ label: DRAWER_EVENT_LABEL,
+ property: trackingErrorProperty,
+ });
+ /* eslint-enable jest/no-standalone-expect */
+ },
+ );
- it(`${
- trackingErrorProperty ? 'sends the correct' : 'does not send the'
- } variable validation tracking event when value is '${value}'`, () => {
- const trackingEventSent = trackingErrorProperty ? 1 : 0;
- expect(trackingSpy).toHaveBeenCalledTimes(trackingEventSent);
-
- if (trackingErrorProperty) {
- expect(trackingSpy).toHaveBeenCalledWith(undefined, EVENT_ACTION, {
- label: DRAWER_EVENT_LABEL,
- property: trackingErrorProperty,
- });
- }
- });
- });
+ itif(!trackingErrorProperty)(
+ `does not send the the correct variable validation tracking event when value is ${value}`,
+ () => {
+ // eslint-disable-next-line jest/no-standalone-expect
+ expect(trackingSpy).toHaveBeenCalledTimes(0);
+ },
+ );
+ },
+ );
it('only sends the tracking event once', async () => {
trackingSpy = mockTracking(undefined, wrapper.element, jest.spyOn);
diff --git a/spec/frontend/ci/common/pipelines_table_spec.js b/spec/frontend/ci/common/pipelines_table_spec.js
index f6d3121109f..ca07e0ab8c8 100644
--- a/spec/frontend/ci/common/pipelines_table_spec.js
+++ b/spec/frontend/ci/common/pipelines_table_spec.js
@@ -16,7 +16,7 @@ import {
TRACKING_CATEGORIES,
} from '~/ci/constants';
-import CiIcon from '~/vue_shared/components/ci_icon.vue';
+import CiIcon from '~/vue_shared/components/ci_icon/ci_icon.vue';
describe('Pipelines Table', () => {
let wrapper;
diff --git a/spec/frontend/ci/job_details/components/job_header_spec.js b/spec/frontend/ci/job_details/components/job_header_spec.js
index d12267807ac..0b98d5fa935 100644
--- a/spec/frontend/ci/job_details/components/job_header_spec.js
+++ b/spec/frontend/ci/job_details/components/job_header_spec.js
@@ -1,7 +1,7 @@
import { GlButton, GlAvatarLink, GlTooltip } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
-import CiIcon from '~/vue_shared/components/ci_icon.vue';
+import CiIcon from '~/vue_shared/components/ci_icon/ci_icon.vue';
import JobHeader from '~/ci/job_details/components/job_header.vue';
import TimeagoTooltip from '~/vue_shared/components/time_ago_tooltip.vue';
diff --git a/spec/frontend/ci/job_details/components/job_log_controllers_spec.js b/spec/frontend/ci/job_details/components/job_log_controllers_spec.js
index 84c664aca34..078ad4aee34 100644
--- a/spec/frontend/ci/job_details/components/job_log_controllers_spec.js
+++ b/spec/frontend/ci/job_details/components/job_log_controllers_spec.js
@@ -30,17 +30,12 @@ describe('Job log controllers', () => {
jobLog: mockJobLog,
};
- const createWrapper = (props, { jobLogJumpToFailures = false } = {}) => {
+ const createWrapper = (props) => {
wrapper = mount(JobLogControllers, {
propsData: {
...defaultProps,
...props,
},
- provide: {
- glFeatures: {
- jobLogJumpToFailures,
- },
- },
data() {
return {
searchTerm: '82',
@@ -62,6 +57,10 @@ describe('Job log controllers', () => {
const findJobLogSearch = () => wrapper.findComponent(GlSearchBoxByClick);
const findSearchHelp = () => wrapper.findComponent(HelpPopover);
const findScrollFailure = () => wrapper.find('[data-testid="job-controller-scroll-to-failure"]');
+ const findShowFullScreenButton = () =>
+ wrapper.find('[data-testid="job-controller-enter-fullscreen"]');
+ const findExitFullScreenButton = () =>
+ wrapper.find('[data-testid="job-controller-exit-fullscreen"]');
describe('Truncate information', () => {
describe('with isJobLogSizeVisible', () => {
@@ -199,14 +198,6 @@ describe('Job log controllers', () => {
});
describe('scroll to failure button', () => {
- describe('with feature flag disabled', () => {
- it('does not display button', () => {
- createWrapper();
-
- expect(findScrollFailure().exists()).toBe(false);
- });
- });
-
describe('with red text failures on the page', () => {
let firstFailure;
let secondFailure;
@@ -214,7 +205,7 @@ describe('Job log controllers', () => {
beforeEach(() => {
jest.spyOn(document, 'querySelectorAll').mockReturnValueOnce(['mock-element']);
- createWrapper({}, { jobLogJumpToFailures: true });
+ createWrapper();
firstFailure = document.createElement('div');
firstFailure.className = 'term-fg-l-red';
@@ -262,7 +253,7 @@ describe('Job log controllers', () => {
beforeEach(() => {
jest.spyOn(document, 'querySelectorAll').mockReturnValueOnce([]);
- createWrapper({}, { jobLogJumpToFailures: true });
+ createWrapper();
});
it('is disabled', () => {
@@ -274,7 +265,7 @@ describe('Job log controllers', () => {
beforeEach(() => {
jest.spyOn(document, 'querySelectorAll').mockReturnValueOnce(['mock-element']);
- createWrapper({ isComplete: false }, { jobLogJumpToFailures: true });
+ createWrapper();
});
it('is enabled', () => {
@@ -286,7 +277,7 @@ describe('Job log controllers', () => {
beforeEach(() => {
jest.spyOn(commonUtils, 'backOff').mockRejectedValueOnce();
- createWrapper({}, { jobLogJumpToFailures: true });
+ createWrapper();
});
it('stays disabled', () => {
@@ -318,4 +309,53 @@ describe('Job log controllers', () => {
expect(wrapper.emitted('searchResults')).toEqual([[[]]]);
});
});
+
+ describe('Fullscreen controls', () => {
+ it('displays a disabled "Show fullscreen" button', () => {
+ createWrapper();
+
+ expect(findShowFullScreenButton().exists()).toBe(true);
+ expect(findShowFullScreenButton().attributes('disabled')).toBe('disabled');
+ });
+
+ it('displays a enabled "Show fullscreen" button', () => {
+ createWrapper({
+ fullScreenModeAvailable: true,
+ });
+
+ expect(findShowFullScreenButton().exists()).toBe(true);
+ expect(findShowFullScreenButton().attributes('disabled')).toBeUndefined();
+ });
+
+ it('emits a enterFullscreen event when the show fullscreen is clicked', async () => {
+ createWrapper({
+ fullScreenModeAvailable: true,
+ });
+
+ await findShowFullScreenButton().trigger('click');
+
+ expect(wrapper.emitted('enterFullscreen')).toHaveLength(1);
+ });
+
+ it('displays a enabled "Exit fullscreen" button', () => {
+ createWrapper({
+ fullScreenModeAvailable: true,
+ fullScreenEnabled: true,
+ });
+
+ expect(findExitFullScreenButton().exists()).toBe(true);
+ expect(findExitFullScreenButton().attributes('disabled')).toBeUndefined();
+ });
+
+ it('emits a exitFullscreen event when the exit fullscreen is clicked', async () => {
+ createWrapper({
+ fullScreenModeAvailable: true,
+ fullScreenEnabled: true,
+ });
+
+ await findExitFullScreenButton().trigger('click');
+
+ expect(wrapper.emitted('exitFullscreen')).toHaveLength(1);
+ });
+ });
});
diff --git a/spec/frontend/ci/job_details/components/log/collapsible_section_spec.js b/spec/frontend/ci/job_details/components/log/collapsible_section_spec.js
deleted file mode 100644
index 5abf2a5ce53..00000000000
--- a/spec/frontend/ci/job_details/components/log/collapsible_section_spec.js
+++ /dev/null
@@ -1,103 +0,0 @@
-import { mount } from '@vue/test-utils';
-import { nextTick } from 'vue';
-import CollapsibleSection from '~/ci/job_details/components/log/collapsible_section.vue';
-import LogLine from '~/ci/job_details/components/log/line.vue';
-import LogLineHeader from '~/ci/job_details/components/log/line_header.vue';
-import { collapsibleSectionClosed, collapsibleSectionOpened } from './mock_data';
-
-describe('Job Log Collapsible Section', () => {
- let wrapper;
-
- const jobLogEndpoint = 'jobs/335';
-
- const findLogLineHeader = () => wrapper.findComponent(LogLineHeader);
- const findLogLineHeaderSvg = () => findLogLineHeader().find('svg');
- const findLogLines = () => wrapper.findAllComponents(LogLine);
-
- const createComponent = (props = {}) => {
- wrapper = mount(CollapsibleSection, {
- propsData: {
- ...props,
- },
- });
- };
-
- describe('with closed section', () => {
- beforeEach(() => {
- createComponent({
- section: collapsibleSectionClosed,
- jobLogEndpoint,
- });
- });
-
- it('renders clickable header line', () => {
- expect(findLogLineHeader().text()).toBe('1 foo');
- expect(findLogLineHeader().attributes('role')).toBe('button');
- });
-
- it('renders an icon with a closed state', () => {
- expect(findLogLineHeaderSvg().attributes('data-testid')).toBe('chevron-lg-right-icon');
- });
-
- it('does not render collapsed lines', () => {
- expect(findLogLines()).toHaveLength(0);
- });
- });
-
- describe('with opened section', () => {
- beforeEach(() => {
- createComponent({
- section: collapsibleSectionOpened,
- jobLogEndpoint,
- });
- });
-
- it('renders clickable header line', () => {
- expect(findLogLineHeader().text()).toContain('foo');
- expect(findLogLineHeader().attributes('role')).toBe('button');
- });
-
- it('renders an icon with the open state', () => {
- expect(findLogLineHeaderSvg().attributes('data-testid')).toBe('chevron-lg-down-icon');
- });
-
- it('renders collapsible lines', () => {
- expect(findLogLines().at(0).text()).toContain('this is a collapsible nested section');
- expect(findLogLines()).toHaveLength(collapsibleSectionOpened.lines.length);
- });
- });
-
- it('emits onClickCollapsibleLine on click', async () => {
- createComponent({
- section: collapsibleSectionOpened,
- jobLogEndpoint,
- });
-
- findLogLineHeader().trigger('click');
-
- await nextTick();
- expect(wrapper.emitted('onClickCollapsibleLine').length).toBe(1);
- });
-
- describe('with search results', () => {
- it('passes isHighlighted prop correctly', () => {
- const mockSearchResults = [
- {
- content: [{ text: 'foo' }],
- lineNumber: 1,
- offset: 5,
- section: 'prepare-script',
- section_header: true,
- },
- ];
-
- createComponent({
- section: collapsibleSectionOpened,
- jobLogEndpoint,
- searchResults: mockSearchResults,
- });
-
- expect(findLogLineHeader().props('isHighlighted')).toBe(true);
- });
- });
-});
diff --git a/spec/frontend/ci/job_details/components/log/line_header_spec.js b/spec/frontend/ci/job_details/components/log/line_header_spec.js
index c75f5fa30d5..0ac33f5aa5a 100644
--- a/spec/frontend/ci/job_details/components/log/line_header_spec.js
+++ b/spec/frontend/ci/job_details/components/log/line_header_spec.js
@@ -95,12 +95,14 @@ describe('Job Log Header Line', () => {
});
describe('with duration', () => {
- beforeEach(() => {
+ it('renders the duration badge', () => {
createComponent({ ...defaultProps, duration: '00:10' });
+ expect(wrapper.findComponent(DurationBadge).exists()).toBe(true);
});
- it('renders the duration badge', () => {
- expect(wrapper.findComponent(DurationBadge).exists()).toBe(true);
+ it('does not render the duration badge with hidden duration', () => {
+ createComponent({ ...defaultProps, hideDuration: true, duration: '00:10' });
+ expect(wrapper.findComponent(DurationBadge).exists()).toBe(false);
});
});
diff --git a/spec/frontend/ci/job_details/components/log/log_spec.js b/spec/frontend/ci/job_details/components/log/log_spec.js
index 1931d5046dc..de02c7aad6d 100644
--- a/spec/frontend/ci/job_details/components/log/log_spec.js
+++ b/spec/frontend/ci/job_details/components/log/log_spec.js
@@ -6,9 +6,12 @@ import waitForPromises from 'helpers/wait_for_promises';
import { scrollToElement } from '~/lib/utils/common_utils';
import Log from '~/ci/job_details/components/log/log.vue';
import LogLineHeader from '~/ci/job_details/components/log/line_header.vue';
+import LineNumber from '~/ci/job_details/components/log/line_number.vue';
import { logLinesParser } from '~/ci/job_details/store/utils';
import { mockJobLog, mockJobLogLineCount } from './mock_data';
+const mockPagePath = 'project/-/jobs/99';
+
jest.mock('~/lib/utils/common_utils', () => ({
...jest.requireActual('~/lib/utils/common_utils'),
scrollToElement: jest.fn(),
@@ -24,7 +27,12 @@ describe('Job Log', () => {
Vue.use(Vuex);
const createComponent = (props) => {
+ store = new Vuex.Store({ actions, state });
+
wrapper = mount(Log, {
+ provide: {
+ pagePath: mockPagePath,
+ },
propsData: {
...props,
},
@@ -36,39 +44,34 @@ describe('Job Log', () => {
toggleCollapsibleLineMock = jest.fn();
actions = {
toggleCollapsibleLine: toggleCollapsibleLineMock,
+ setupFullScreenListeners: jest.fn(),
};
+ const { lines, sections } = logLinesParser(mockJobLog);
+
state = {
- jobLog: logLinesParser(mockJobLog),
- jobLogEndpoint: 'jobs/id',
+ jobLog: lines,
+ jobLogSections: sections,
};
-
- store = new Vuex.Store({
- actions,
- state,
- });
});
- const findCollapsibleLine = () => wrapper.findComponent(LogLineHeader);
- const findAllCollapsibleLines = () => wrapper.findAllComponents(LogLineHeader);
+ const findLineNumbers = () => wrapper.findAllComponents(LineNumber);
+ const findLineHeader = () => wrapper.findComponent(LogLineHeader);
+ const findLineHeaders = () => wrapper.findAllComponents(LogLineHeader);
describe('line numbers', () => {
beforeEach(() => {
createComponent();
});
- it.each([...Array(mockJobLogLineCount).keys()])(
- 'renders a line number for each line %d',
- (index) => {
- const lineNumber = wrapper
- .findAll('.js-log-line')
- .at(index)
- .find(`#L${index + 1}`);
+ it('renders a line number for each line %d with an href', () => {
+ for (let i = 0; i < mockJobLogLineCount; i += 1) {
+ const w = findLineNumbers().at(i);
- expect(lineNumber.text()).toBe(`${index + 1}`);
- expect(lineNumber.attributes('href')).toBe(`${state.jobLogEndpoint}#L${index + 1}`);
- },
- );
+ expect(w.text()).toBe(`${i + 1}`);
+ expect(w.attributes('href')).toBe(`${mockPagePath}#L${i + 1}`);
+ }
+ });
});
describe('collapsible sections', () => {
@@ -77,22 +80,54 @@ describe('Job Log', () => {
});
it('renders a clickable header section', () => {
- expect(findCollapsibleLine().attributes('role')).toBe('button');
+ expect(findLineHeader().attributes('role')).toBe('button');
});
it('renders an icon with the open state', () => {
- expect(findCollapsibleLine().find('[data-testid="chevron-lg-down-icon"]').exists()).toBe(
- true,
- );
+ expect(findLineHeader().find('[data-testid="chevron-lg-down-icon"]').exists()).toBe(true);
});
describe('on click header section', () => {
it('calls toggleCollapsibleLine', () => {
- findCollapsibleLine().trigger('click');
+ findLineHeader().trigger('click');
expect(toggleCollapsibleLineMock).toHaveBeenCalled();
});
});
+
+ describe('duration', () => {
+ it('shows duration', () => {
+ expect(findLineHeader().props('duration')).toBe('00:00');
+ expect(findLineHeader().props('hideDuration')).toBe(false);
+ });
+
+ it('hides duration', () => {
+ state.jobLogSections['resolve-secrets'].hideDuration = true;
+ createComponent();
+
+ expect(findLineHeader().props('duration')).toBe('00:00');
+ expect(findLineHeader().props('hideDuration')).toBe(true);
+ });
+ });
+
+ describe('when a section is collapsed', () => {
+ beforeEach(() => {
+ state.jobLogSections['prepare-executor'].isClosed = true;
+
+ createComponent();
+ });
+
+ it('hides lines in section', () => {
+ expect(findLineNumbers().wrappers.map((w) => w.text())).toEqual([
+ '1',
+ '2',
+ '3',
+ '4',
+ // closed section not shown
+ '7',
+ ]);
+ });
+ });
});
describe('anchor scrolling', () => {
@@ -119,19 +154,19 @@ describe('Job Log', () => {
it('scrolls to line number', async () => {
createComponent();
- state.jobLog = logLinesParser(mockJobLog, [], '#L6');
+ state.jobLog = logLinesParser(mockJobLog, [], '#L6').lines;
await waitForPromises();
expect(scrollToElement).toHaveBeenCalledTimes(1);
- state.jobLog = logLinesParser(mockJobLog, [], '#L7');
+ state.jobLog = logLinesParser(mockJobLog, [], '#L6').lines;
await waitForPromises();
expect(scrollToElement).toHaveBeenCalledTimes(1);
});
it('line number within collapsed section is visible', () => {
- state.jobLog = logLinesParser(mockJobLog, [], '#L6');
+ state.jobLog = logLinesParser(mockJobLog, [], '#L6').lines;
createComponent();
@@ -150,15 +185,14 @@ describe('Job Log', () => {
},
],
section: 'prepare-executor',
- section_header: true,
lineNumber: 3,
},
];
createComponent({ searchResults: mockSearchResults });
- expect(findAllCollapsibleLines().at(0).props('isHighlighted')).toBe(true);
- expect(findAllCollapsibleLines().at(1).props('isHighlighted')).toBe(false);
+ expect(findLineHeaders().at(0).props('isHighlighted')).toBe(true);
+ expect(findLineHeaders().at(1).props('isHighlighted')).toBe(false);
});
});
});
diff --git a/spec/frontend/ci/job_details/components/log/mock_data.js b/spec/frontend/ci/job_details/components/log/mock_data.js
index d9b1354f475..066f783586b 100644
--- a/spec/frontend/ci/job_details/components/log/mock_data.js
+++ b/spec/frontend/ci/job_details/components/log/mock_data.js
@@ -65,141 +65,182 @@ export const mockContentSection = [
},
];
-export const mockJobLog = [...mockJobLines, ...mockEmptySection, ...mockContentSection];
-
-export const mockJobLogLineCount = 6; // `text` entries in mockJobLog
-
-export const originalTrace = [
+export const mockJobLogEnd = [
{
- offset: 1,
- content: [
- {
- text: 'Downloading',
- },
- ],
+ offset: 1008,
+ content: [{ text: 'Job succeeded' }],
},
];
-export const regularIncremental = [
- {
- offset: 2,
- content: [
- {
- text: 'log line',
- },
- ],
- },
+export const mockJobLog = [
+ ...mockJobLines,
+ ...mockEmptySection,
+ ...mockContentSection,
+ ...mockJobLogEnd,
];
-export const regularIncrementalRepeated = [
+export const mockJobLogLineCount = 7; // `text` entries in mockJobLog
+
+export const mockContentSectionClosed = [
{
- offset: 1,
+ offset: 0,
content: [
{
- text: 'log line',
+ text: 'Using Docker executor with image dev.gitlab.org3',
},
],
+ section: 'mock-closed-section',
+ section_header: true,
+ section_options: { collapsed: true },
+ },
+ {
+ offset: 1003,
+ content: [{ text: 'Docker executor with image registry.gitlab.com ...' }],
+ section: 'mock-closed-section',
+ },
+ {
+ offset: 1004,
+ content: [{ text: 'Starting service ...', style: 'term-fg-l-green' }],
+ section: 'mock-closed-section',
+ },
+ {
+ offset: 1005,
+ content: [],
+ section: 'mock-closed-section',
+ section_footer: true,
+ section_duration: '00:09',
},
];
-export const headerTrace = [
+export const mockContentSectionHiddenDuration = [
{
- offset: 1,
+ offset: 0,
+ content: [{ text: 'Line 1' }],
+ section: 'mock-hidden-duration-section',
section_header: true,
- content: [
- {
- text: 'log line',
- },
- ],
- section: 'section',
+ section_options: { hide_duration: 'true' },
+ },
+ {
+ offset: 1001,
+ content: [{ text: 'Line 2' }],
+ section: 'mock-hidden-duration-section',
+ },
+ {
+ offset: 1002,
+ content: [],
+ section: 'mock-hidden-duration-section',
+ section_footer: true,
+ section_duration: '00:09',
},
];
-export const headerTraceIncremental = [
+export const mockContentSubsection = [
{
- offset: 1,
+ offset: 0,
+ content: [{ text: 'Line 1' }],
+ section: 'mock-section',
section_header: true,
- content: [
- {
- text: 'updated log line',
- },
- ],
- section: 'section',
},
-];
-
-export const collapsibleTrace = [
{
- offset: 1,
+ offset: 1002,
+ content: [{ text: 'Line 2 - section content' }],
+ section: 'mock-section',
+ },
+ {
+ offset: 1003,
+ content: [{ text: 'Line 3 - sub section header' }],
+ section: 'sub-section',
section_header: true,
- content: [
- {
- text: 'log line',
- },
- ],
- section: 'section',
},
{
- offset: 2,
- content: [
- {
- text: 'log line',
- },
- ],
- section: 'section',
+ offset: 1004,
+ content: [{ text: 'Line 4 - sub section content' }],
+ section: 'sub-section',
+ },
+ {
+ offset: 1005,
+ content: [{ text: 'Line 5 - sub sub section header with no content' }],
+ section: 'sub-sub-section',
+ section_header: true,
+ },
+ {
+ offset: 1006,
+ content: [],
+ section: 'sub-sub-section',
+ section_footer: true,
+ section_duration: '00:00',
+ },
+
+ {
+ offset: 1007,
+ content: [{ text: 'Line 6 - sub section content 2' }],
+ section: 'sub-section',
+ },
+ {
+ offset: 1008,
+ content: [],
+ section: 'sub-section',
+ section_footer: true,
+ section_duration: '00:29',
+ },
+ {
+ offset: 1009,
+ content: [{ text: 'Line 7 - section content' }],
+ section: 'mock-section',
+ },
+ {
+ offset: 1010,
+ content: [],
+ section: 'mock-section',
+ section_footer: true,
+ section_duration: '00:59',
+ },
+ {
+ offset: 1011,
+ content: [{ text: 'Job succeeded' }],
},
];
-export const collapsibleTraceIncremental = [
+export const mockTruncatedBottomSection = [
+ // only the top of a section is obtained, such as when a job gets cancelled
{
- offset: 2,
+ offset: 1004,
content: [
{
- text: 'updated log line',
+ text: 'Starting job',
},
],
- section: 'section',
+ section: 'mock-section',
+ section_header: true,
+ },
+ {
+ offset: 1005,
+ content: [{ text: 'Job interrupted' }],
+ section: 'mock-section',
},
];
-export const collapsibleSectionClosed = {
- offset: 5,
- section_header: true,
- isHeader: true,
- isClosed: true,
- line: {
- content: [{ text: 'foo' }],
- section: 'prepare-script',
- lineNumber: 1,
- },
- section_duration: '00:03',
- lines: [
- {
- offset: 80,
- content: [{ text: 'this is a collapsible nested section' }],
- section: 'prepare-script',
- lineNumber: 2,
- },
- ],
-};
-
-export const collapsibleSectionOpened = {
- offset: 5,
- section_header: true,
- isHeader: true,
- isClosed: false,
- line: {
- content: [{ text: 'foo' }],
- section: 'prepare-script',
- lineNumber: 1,
- },
- section_duration: '00:03',
- lines: [
- {
- offset: 80,
- content: [{ text: 'this is a collapsible nested section' }],
- section: 'prepare-script',
- lineNumber: 2,
- },
- ],
-};
+export const mockTruncatedTopSection = [
+ // only the bottom half of a section is obtained, such as when jobs are cut off due to large sizes
+ {
+ offset: 1008,
+ content: [{ text: 'Line N - incomplete section content' }],
+ section: 'mock-section',
+ },
+ {
+ offset: 1009,
+ content: [{ text: 'Line N+1 - incomplete section content' }],
+ section: 'mock-section',
+ },
+ {
+ offset: 1010,
+ content: [],
+ section: 'mock-section',
+ section_footer: true,
+ section_duration: '00:59',
+ },
+ {
+ offset: 1011,
+ content: [{ text: 'Job succeeded' }],
+ },
+];
diff --git a/spec/frontend/ci/job_details/components/manual_variables_form_spec.js b/spec/frontend/ci/job_details/components/manual_variables_form_spec.js
index 3391cafb4fc..4961b605ee3 100644
--- a/spec/frontend/ci/job_details/components/manual_variables_form_spec.js
+++ b/spec/frontend/ci/job_details/components/manual_variables_form_spec.js
@@ -1,7 +1,6 @@
import { GlSprintf, GlLink } from '@gitlab/ui';
-import { createLocalVue } from '@vue/test-utils';
import VueApollo from 'vue-apollo';
-import { nextTick } from 'vue';
+import Vue, { nextTick } from 'vue';
import { createAlert } from '~/alert';
import { mountExtended } from 'helpers/vue_test_utils_helper';
import createMockApollo from 'helpers/mock_apollo_helper';
@@ -24,9 +23,8 @@ import {
mockJobRetryMutationData,
} from '../mock_data';
-const localVue = createLocalVue();
jest.mock('~/alert');
-localVue.use(VueApollo);
+Vue.use(VueApollo);
jest.mock('~/lib/utils/url_utility', () => ({
...jest.requireActual('~/lib/utils/url_utility'),
@@ -62,7 +60,6 @@ describe('Manual Variables Form', () => {
]);
const options = {
- localVue,
apolloProvider: mockApollo,
};
@@ -180,6 +177,9 @@ describe('Manual Variables Form', () => {
beforeEach(async () => {
await createComponent({
handlers: {
+ getJobQueryResponseHandlerWithVariables: jest
+ .fn()
+ .mockResolvedValue(mockJobWithVariablesResponse),
playJobMutationHandler: jest.fn().mockResolvedValue(mockJobPlayMutationData),
},
});
@@ -211,6 +211,15 @@ describe('Manual Variables Form', () => {
expect(requestHandlers.playJobMutationHandler).toHaveBeenCalledTimes(1);
expect(redirectTo).toHaveBeenCalledWith(mockJobPlayMutationData.data.jobPlay.job.webPath); // eslint-disable-line import/no-deprecated
});
+
+ it('does not refetch variables after job is run', async () => {
+ expect(requestHandlers.getJobQueryResponseHandlerWithVariables).toHaveBeenCalledTimes(1);
+
+ findRunBtn().vm.$emit('click');
+ await waitForPromises();
+
+ expect(requestHandlers.getJobQueryResponseHandlerWithVariables).toHaveBeenCalledTimes(1);
+ });
});
describe('when play mutation is unsuccessful', () => {
@@ -237,6 +246,9 @@ describe('Manual Variables Form', () => {
await createComponent({
props: { isRetryable: true },
handlers: {
+ getJobQueryResponseHandlerWithVariables: jest
+ .fn()
+ .mockResolvedValue(mockJobWithVariablesResponse),
retryJobMutationHandler: jest.fn().mockResolvedValue(mockJobRetryMutationData),
},
});
@@ -253,6 +265,15 @@ describe('Manual Variables Form', () => {
expect(requestHandlers.retryJobMutationHandler).toHaveBeenCalledTimes(1);
expect(redirectTo).toHaveBeenCalledWith(mockJobRetryMutationData.data.jobRetry.job.webPath); // eslint-disable-line import/no-deprecated
});
+
+ it('does not refetch variables after job is rerun', async () => {
+ expect(requestHandlers.getJobQueryResponseHandlerWithVariables).toHaveBeenCalledTimes(1);
+
+ findRunBtn().vm.$emit('click');
+ await waitForPromises();
+
+ expect(requestHandlers.getJobQueryResponseHandlerWithVariables).toHaveBeenCalledTimes(1);
+ });
});
describe('when retry mutation is unsuccessful', () => {
diff --git a/spec/frontend/ci/job_details/components/sidebar/job_container_item_spec.js b/spec/frontend/ci/job_details/components/sidebar/job_container_item_spec.js
index 0eabaefd5de..697235dbe54 100644
--- a/spec/frontend/ci/job_details/components/sidebar/job_container_item_spec.js
+++ b/spec/frontend/ci/job_details/components/sidebar/job_container_item_spec.js
@@ -3,7 +3,7 @@ import { shallowMount } from '@vue/test-utils';
import { nextTick } from 'vue';
import delayedJobFixture from 'test_fixtures/jobs/delayed.json';
import JobContainerItem from '~/ci/job_details/components/sidebar/job_container_item.vue';
-import CiIcon from '~/vue_shared/components/ci_icon.vue';
+import CiIcon from '~/vue_shared/components/ci_icon/ci_icon.vue';
import job from 'jest/ci/jobs_mock_data';
describe('JobContainerItem', () => {
diff --git a/spec/frontend/ci/job_details/components/sidebar/sidebar_job_details_container_spec.js b/spec/frontend/ci/job_details/components/sidebar/sidebar_job_details_container_spec.js
index 37a2ca75df0..3b6cc85472b 100644
--- a/spec/frontend/ci/job_details/components/sidebar/sidebar_job_details_container_spec.js
+++ b/spec/frontend/ci/job_details/components/sidebar/sidebar_job_details_container_spec.js
@@ -3,7 +3,7 @@ import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import DetailRow from '~/ci/job_details/components/sidebar/sidebar_detail_row.vue';
import SidebarJobDetailsContainer from '~/ci/job_details/components/sidebar/sidebar_job_details_container.vue';
import createStore from '~/ci/job_details/store';
-import job from 'jest/ci/jobs_mock_data';
+import job, { testSummaryData, testSummaryDataWithFailures } from 'jest/ci/jobs_mock_data';
describe('Job Sidebar Details Container', () => {
let store;
@@ -12,6 +12,7 @@ describe('Job Sidebar Details Container', () => {
const findJobTimeout = () => wrapper.findByTestId('job-timeout');
const findJobTags = () => wrapper.findByTestId('job-tags');
const findAllDetailsRow = () => wrapper.findAllComponents(DetailRow);
+ const findTestSummary = () => wrapper.findByTestId('test-summary');
const createWrapper = ({ props = {} } = {}) => {
store = createStore();
@@ -22,6 +23,9 @@ describe('Job Sidebar Details Container', () => {
stubs: {
DetailRow,
},
+ provide: {
+ pipelineTestReportUrl: '/root/test-unit-test-reports/-/pipelines/512/test_report',
+ },
}),
);
};
@@ -90,6 +94,37 @@ describe('Job Sidebar Details Container', () => {
});
});
+ describe('Test summary details', () => {
+ it('displays the test summary section', async () => {
+ createWrapper();
+
+ await store.dispatch('receiveJobSuccess', job);
+ await store.dispatch('receiveTestSummarySuccess', testSummaryData);
+
+ expect(findTestSummary().exists()).toBe(true);
+ expect(findTestSummary().text()).toContain('Test summary');
+ expect(findTestSummary().text()).toContain('1');
+ });
+
+ it('does not display the test summary section', async () => {
+ createWrapper();
+
+ await store.dispatch('receiveJobSuccess', job);
+
+ expect(findTestSummary().exists()).toBe(false);
+ });
+
+ it('displays the failure count message', async () => {
+ createWrapper();
+
+ await store.dispatch('receiveJobSuccess', job);
+ await store.dispatch('receiveTestSummarySuccess', testSummaryDataWithFailures);
+
+ expect(findTestSummary().text()).toContain('Test summary');
+ expect(findTestSummary().text()).toContain('1 of 2 failed');
+ });
+ });
+
describe('timeout', () => {
const {
metadata: { timeout_human_readable, timeout_source },
diff --git a/spec/frontend/ci/job_details/components/sidebar/stages_dropdown_spec.js b/spec/frontend/ci/job_details/components/sidebar/stages_dropdown_spec.js
index 54c5a73f757..a629c1c185a 100644
--- a/spec/frontend/ci/job_details/components/sidebar/stages_dropdown_spec.js
+++ b/spec/frontend/ci/job_details/components/sidebar/stages_dropdown_spec.js
@@ -3,7 +3,7 @@ import { shallowMount } from '@vue/test-utils';
import { Mousetrap } from '~/lib/mousetrap';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import StagesDropdown from '~/ci/job_details/components/sidebar/stages_dropdown.vue';
-import CiIcon from '~/vue_shared/components/ci_icon.vue';
+import CiIcon from '~/vue_shared/components/ci_icon/ci_icon.vue';
import * as copyToClipboard from '~/behaviors/copy_to_clipboard';
import {
mockPipelineWithoutRef,
diff --git a/spec/frontend/ci/job_details/job_app_spec.js b/spec/frontend/ci/job_details/job_app_spec.js
index 2bd0429ef56..8601850a403 100644
--- a/spec/frontend/ci/job_details/job_app_spec.js
+++ b/spec/frontend/ci/job_details/job_app_spec.js
@@ -4,7 +4,6 @@ import Vuex from 'vuex';
import { GlLoadingIcon } from '@gitlab/ui';
import MockAdapter from 'axios-mock-adapter';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
-import { TEST_HOST } from 'helpers/test_constants';
import EmptyState from '~/ci/job_details/components/empty_state.vue';
import EnvironmentsBlock from '~/ci/job_details/components/environments_block.vue';
import ErasedBlock from '~/ci/job_details/components/erased_block.vue';
@@ -29,8 +28,9 @@ describe('Job App', () => {
let mock;
const initSettings = {
- endpoint: `${TEST_HOST}jobs/123.json`,
- pagePath: `${TEST_HOST}jobs/123`,
+ jobEndpoint: '/group1/project1/-/jobs/99.json',
+ logEndpoint: '/group1/project1/-/jobs/99/trace',
+ testReportSummaryUrl: '/group1/project1/-/jobs/99/test_report_summary.json',
};
const props = {
@@ -50,8 +50,8 @@ describe('Job App', () => {
};
const setupAndMount = async ({ jobData = {}, jobLogData = {} } = {}) => {
- mock.onGet(initSettings.endpoint).replyOnce(HTTP_STATUS_OK, { ...job, ...jobData });
- mock.onGet(`${initSettings.pagePath}/trace.json`).reply(HTTP_STATUS_OK, jobLogData);
+ mock.onGet(initSettings.jobEndpoint).replyOnce(HTTP_STATUS_OK, { ...job, ...jobData });
+ mock.onGet(initSettings.logEndpoint).reply(HTTP_STATUS_OK, jobLogData);
const asyncInit = store.dispatch('init', initSettings);
diff --git a/spec/frontend/ci/job_details/store/actions_spec.js b/spec/frontend/ci/job_details/store/actions_spec.js
index 849f55ac444..9c4b241b6eb 100644
--- a/spec/frontend/ci/job_details/store/actions_spec.js
+++ b/spec/frontend/ci/job_details/store/actions_spec.js
@@ -2,7 +2,7 @@ import MockAdapter from 'axios-mock-adapter';
import { TEST_HOST } from 'helpers/test_constants';
import testAction from 'helpers/vuex_action_helper';
import {
- setJobLogOptions,
+ init,
clearEtagPoll,
stopPolling,
requestJob,
@@ -15,7 +15,6 @@ import {
fetchJobLog,
startPollingJobLog,
stopPollingJobLog,
- receiveJobLogSuccess,
receiveJobLogError,
toggleCollapsibleLine,
requestJobsForStage,
@@ -25,11 +24,24 @@ import {
hideSidebar,
showSidebar,
toggleSidebar,
+ receiveTestSummarySuccess,
+ requestTestSummary,
+ enterFullscreenSuccess,
+ exitFullscreenSuccess,
+ fullScreenContainerSetUpResult,
} from '~/ci/job_details/store/actions';
+import { isScrolledToBottom } from '~/lib/utils/scroll_utils';
+
import * as types from '~/ci/job_details/store/mutation_types';
import state from '~/ci/job_details/store/state';
import axios from '~/lib/utils/axios_utils';
import { HTTP_STATUS_INTERNAL_SERVER_ERROR, HTTP_STATUS_OK } from '~/lib/utils/http_status';
+import { testSummaryData } from 'jest/ci/jobs_mock_data';
+
+jest.mock('~/lib/utils/scroll_utils');
+
+const mockJobEndpoint = '/group1/project1/-/jobs/99.json';
+const mockLogEndpoint = '/group1/project1/-/jobs/99/trace';
describe('Job State actions', () => {
let mockedState;
@@ -38,22 +50,28 @@ describe('Job State actions', () => {
mockedState = state();
});
- describe('setJobLogOptions', () => {
+ describe('init', () => {
it('should commit SET_JOB_LOG_OPTIONS mutation', () => {
return testAction(
- setJobLogOptions,
- { endpoint: '/group1/project1/-/jobs/99.json', pagePath: '/group1/project1/-/jobs/99' },
+ init,
+ {
+ jobEndpoint: mockJobEndpoint,
+ logEndpoint: mockLogEndpoint,
+ testReportSummaryUrl: '/group1/project1/-/jobs/99/test_report_summary.json',
+ },
mockedState,
[
{
type: types.SET_JOB_LOG_OPTIONS,
payload: {
- endpoint: '/group1/project1/-/jobs/99.json',
- pagePath: '/group1/project1/-/jobs/99',
+ fullScreenAPIAvailable: false,
+ jobEndpoint: mockJobEndpoint,
+ logEndpoint: mockLogEndpoint,
+ testReportSummaryUrl: '/group1/project1/-/jobs/99/test_report_summary.json',
},
},
],
- [],
+ [{ type: 'fetchJob' }],
);
});
});
@@ -96,7 +114,7 @@ describe('Job State actions', () => {
let mock;
beforeEach(() => {
- mockedState.jobEndpoint = `${TEST_HOST}/endpoint.json`;
+ mockedState.jobEndpoint = mockJobEndpoint;
mock = new MockAdapter(axios);
});
@@ -108,9 +126,7 @@ describe('Job State actions', () => {
describe('success', () => {
it('dispatches requestJob and receiveJobSuccess', () => {
- mock
- .onGet(`${TEST_HOST}/endpoint.json`)
- .replyOnce(HTTP_STATUS_OK, { id: 121212, name: 'karma' });
+ mock.onGet(mockJobEndpoint).replyOnce(HTTP_STATUS_OK, { id: 121212, name: 'karma' });
return testAction(
fetchJob,
@@ -200,7 +216,7 @@ describe('Job State actions', () => {
let mock;
beforeEach(() => {
- mockedState.jobLogEndpoint = `${TEST_HOST}/endpoint`;
+ mockedState.logEndpoint = mockLogEndpoint;
mock = new MockAdapter(axios);
});
@@ -211,46 +227,46 @@ describe('Job State actions', () => {
});
describe('success', () => {
- it('dispatches requestJobLog, receiveJobLogSuccess and stopPollingJobLog when job is complete', () => {
- mock.onGet(`${TEST_HOST}/endpoint/trace.json`).replyOnce(HTTP_STATUS_OK, {
- html: 'I, [2018-08-17T22:57:45.707325 #1841] INFO -- :',
- complete: true,
+ let jobLogPayload;
+
+ beforeEach(() => {
+ isScrolledToBottom.mockReturnValue(false);
+ });
+
+ describe('when job is complete', () => {
+ beforeEach(() => {
+ jobLogPayload = {
+ html: 'I, [2018-08-17T22:57:45.707325 #1841] INFO -- :',
+ complete: true,
+ };
+
+ mock.onGet(mockLogEndpoint).replyOnce(HTTP_STATUS_OK, jobLogPayload);
});
- return testAction(
- fetchJobLog,
- null,
- mockedState,
- [],
- [
- {
- type: 'toggleScrollisInBottom',
- payload: true,
- },
- {
- payload: {
- html: 'I, [2018-08-17T22:57:45.707325 #1841] INFO -- :',
- complete: true,
+ it('commits RECEIVE_JOB_LOG_SUCCESS, dispatches stopPollingJobLog and requestTestSummary', () => {
+ return testAction(
+ fetchJobLog,
+ null,
+ mockedState,
+ [
+ {
+ type: types.RECEIVE_JOB_LOG_SUCCESS,
+ payload: jobLogPayload,
},
- type: 'receiveJobLogSuccess',
- },
- {
- type: 'stopPollingJobLog',
- },
- ],
- );
+ ],
+ [{ type: 'stopPollingJobLog' }, { type: 'requestTestSummary' }],
+ );
+ });
});
describe('when job is incomplete', () => {
- let jobLogPayload;
-
beforeEach(() => {
jobLogPayload = {
html: 'I, [2018-08-17T22:57:45.707325 #1841] INFO -- :',
complete: false,
};
- mock.onGet(`${TEST_HOST}/endpoint/trace.json`).replyOnce(HTTP_STATUS_OK, jobLogPayload);
+ mock.onGet(mockLogEndpoint).replyOnce(HTTP_STATUS_OK, jobLogPayload);
});
it('dispatches startPollingJobLog', () => {
@@ -258,12 +274,13 @@ describe('Job State actions', () => {
fetchJobLog,
null,
mockedState,
- [],
[
- { type: 'toggleScrollisInBottom', payload: true },
- { type: 'receiveJobLogSuccess', payload: jobLogPayload },
- { type: 'startPollingJobLog' },
+ {
+ type: types.RECEIVE_JOB_LOG_SUCCESS,
+ payload: jobLogPayload,
+ },
],
+ [{ type: 'startPollingJobLog' }],
);
});
@@ -274,10 +291,44 @@ describe('Job State actions', () => {
fetchJobLog,
null,
mockedState,
+ [
+ {
+ type: types.RECEIVE_JOB_LOG_SUCCESS,
+ payload: jobLogPayload,
+ },
+ ],
[],
+ );
+ });
+ });
+
+ describe('when user scrolled to the bottom', () => {
+ beforeEach(() => {
+ isScrolledToBottom.mockReturnValue(true);
+
+ jobLogPayload = {
+ html: 'I, [2018-08-17T22:57:45.707325 #1841] INFO -- :',
+ complete: true,
+ };
+
+ mock.onGet(mockLogEndpoint).replyOnce(HTTP_STATUS_OK, jobLogPayload);
+ });
+
+ it('should auto scroll to bottom by dispatching scrollBottom', () => {
+ return testAction(
+ fetchJobLog,
+ null,
+ mockedState,
+ [
+ {
+ type: types.RECEIVE_JOB_LOG_SUCCESS,
+ payload: jobLogPayload,
+ },
+ ],
[
- { type: 'toggleScrollisInBottom', payload: true },
- { type: 'receiveJobLogSuccess', payload: jobLogPayload },
+ { type: 'stopPollingJobLog' },
+ { type: 'requestTestSummary' },
+ { type: 'scrollBottom' },
],
);
});
@@ -286,7 +337,7 @@ describe('Job State actions', () => {
describe('server error', () => {
beforeEach(() => {
- mock.onGet(`${TEST_HOST}/endpoint/trace.json`).reply(HTTP_STATUS_INTERNAL_SERVER_ERROR);
+ mock.onGet(mockLogEndpoint).reply(HTTP_STATUS_INTERNAL_SERVER_ERROR);
});
it('dispatches requestJobLog and receiveJobLogError', () => {
@@ -306,7 +357,7 @@ describe('Job State actions', () => {
describe('unexpected error', () => {
beforeEach(() => {
- mock.onGet(`${TEST_HOST}/endpoint/trace.json`).reply(() => {
+ mock.onGet(mockLogEndpoint).reply(() => {
throw new Error('an error');
});
});
@@ -389,18 +440,6 @@ describe('Job State actions', () => {
});
});
- describe('receiveJobLogSuccess', () => {
- it('should commit RECEIVE_JOB_LOG_SUCCESS mutation', () => {
- return testAction(
- receiveJobLogSuccess,
- 'hello world',
- mockedState,
- [{ type: types.RECEIVE_JOB_LOG_SUCCESS, payload: 'hello world' }],
- [],
- );
- });
- });
-
describe('receiveJobLogError', () => {
it('should commit stop polling job log', () => {
return testAction(receiveJobLogError, null, mockedState, [], [{ type: 'stopPollingJobLog' }]);
@@ -516,4 +555,95 @@ describe('Job State actions', () => {
);
});
});
+
+ describe('requestTestSummarySuccess', () => {
+ it('should commit RECEIVE_TEST_SUMMARY_SUCCESS mutation', () => {
+ return testAction(
+ receiveTestSummarySuccess,
+ { total: {}, test_suites: [] },
+ mockedState,
+ [{ type: types.RECEIVE_TEST_SUMMARY_SUCCESS, payload: { total: {}, test_suites: [] } }],
+ [],
+ );
+ });
+ });
+
+ describe('requestTestSummary', () => {
+ let mock;
+
+ beforeEach(() => {
+ mock = new MockAdapter(axios);
+ });
+
+ afterEach(() => {
+ mock.restore();
+ stopPolling();
+ clearEtagPoll();
+ });
+
+ describe('success', () => {
+ it('dispatches receiveTestSummarySuccess', () => {
+ mockedState.testReportSummaryUrl = `${TEST_HOST}/test_report_summary.json`;
+
+ mock
+ .onGet(`${TEST_HOST}/test_report_summary.json`)
+ .replyOnce(HTTP_STATUS_OK, testSummaryData);
+
+ return testAction(
+ requestTestSummary,
+ null,
+ mockedState,
+ [{ type: types.RECEIVE_TEST_SUMMARY_COMPLETE }],
+ [
+ {
+ payload: testSummaryData,
+ type: 'receiveTestSummarySuccess',
+ },
+ ],
+ );
+ });
+ });
+
+ describe('without testReportSummaryUrl', () => {
+ it('does not dispatch any actions or mutations', () => {
+ return testAction(requestTestSummary, null, mockedState, [], []);
+ });
+ });
+ });
+
+ describe('enterFullscreenSuccess', () => {
+ it('should commit ENTER_FULLSCREEN_SUCCESS mutation', () => {
+ return testAction(
+ enterFullscreenSuccess,
+ {},
+ mockedState,
+ [{ type: types.ENTER_FULLSCREEN_SUCCESS }],
+ [],
+ );
+ });
+ });
+
+ describe('exitFullscreenSuccess', () => {
+ it('should commit EXIT_FULLSCREEN_SUCCESS mutation', () => {
+ return testAction(
+ exitFullscreenSuccess,
+ {},
+ mockedState,
+ [{ type: types.EXIT_FULLSCREEN_SUCCESS }],
+ [],
+ );
+ });
+ });
+
+ describe('fullScreenContainerSetUpResult', () => {
+ it('should commit FULL_SCREEN_CONTAINER_SET_UP mutation', () => {
+ return testAction(
+ fullScreenContainerSetUpResult,
+ {},
+ mockedState,
+ [{ type: types.FULL_SCREEN_CONTAINER_SET_UP, payload: {} }],
+ [],
+ );
+ });
+ });
});
diff --git a/spec/frontend/ci/job_details/store/mutations_spec.js b/spec/frontend/ci/job_details/store/mutations_spec.js
index 601dff47584..d42e4c40107 100644
--- a/spec/frontend/ci/job_details/store/mutations_spec.js
+++ b/spec/frontend/ci/job_details/store/mutations_spec.js
@@ -16,13 +16,15 @@ describe('Jobs Store Mutations', () => {
describe('SET_JOB_LOG_OPTIONS', () => {
it('should set jobEndpoint', () => {
mutations[types.SET_JOB_LOG_OPTIONS](stateCopy, {
- endpoint: '/group1/project1/-/jobs/99.json',
- pagePath: '/group1/project1/-/jobs/99',
+ jobEndpoint: '/group1/project1/-/jobs/99.json',
+ logEndpoint: '/group1/project1/-/jobs/99/trace',
+ testReportSummaryUrl: '/group1/project1/-/jobs/99/test_report_summary.json',
});
expect(stateCopy).toMatchObject({
- jobLogEndpoint: '/group1/project1/-/jobs/99',
jobEndpoint: '/group1/project1/-/jobs/99.json',
+ logEndpoint: '/group1/project1/-/jobs/99/trace',
+ testReportSummaryUrl: '/group1/project1/-/jobs/99/test_report_summary.json',
});
});
});
@@ -113,7 +115,7 @@ describe('Jobs Store Mutations', () => {
it('sets the parsed log', () => {
mutations[types.RECEIVE_JOB_LOG_SUCCESS](stateCopy, mockLog);
- expect(utils.logLinesParser).toHaveBeenCalledWith(mockLog.lines, [], '');
+ expect(utils.logLinesParser).toHaveBeenCalledWith(mockLog.lines, {}, '');
expect(stateCopy.jobLog).toEqual([
{
@@ -133,7 +135,7 @@ describe('Jobs Store Mutations', () => {
it('sets the parsed log', () => {
mutations[types.RECEIVE_JOB_LOG_SUCCESS](stateCopy, mockLog);
- expect(utils.logLinesParser).toHaveBeenCalledWith(mockLog.lines, [], '#L1');
+ expect(utils.logLinesParser).toHaveBeenCalledWith(mockLog.lines, {}, '#L1');
expect(stateCopy.jobLog).toEqual([
{
@@ -214,9 +216,17 @@ describe('Jobs Store Mutations', () => {
describe('TOGGLE_COLLAPSIBLE_LINE', () => {
it('toggles the `isClosed` property of the provided object', () => {
- const section = { isClosed: true };
- mutations[types.TOGGLE_COLLAPSIBLE_LINE](stateCopy, section);
- expect(section.isClosed).toEqual(false);
+ stateCopy.jobLogSections = {
+ 'step-script': { isClosed: true },
+ };
+
+ mutations[types.TOGGLE_COLLAPSIBLE_LINE](stateCopy, 'step-script');
+
+ expect(stateCopy.jobLogSections['step-script'].isClosed).toEqual(false);
+
+ mutations[types.TOGGLE_COLLAPSIBLE_LINE](stateCopy, 'step-script');
+
+ expect(stateCopy.jobLogSections['step-script'].isClosed).toEqual(true);
});
});
@@ -314,4 +324,34 @@ describe('Jobs Store Mutations', () => {
expect(stateCopy.jobs).toEqual([]);
});
});
+
+ describe('ENTER_FULLSCREEN_SUCCESS', () => {
+ beforeEach(() => {
+ mutations[types.ENTER_FULLSCREEN_SUCCESS](stateCopy);
+ });
+
+ it('sets fullScreenEnabled to true', () => {
+ expect(stateCopy.fullScreenEnabled).toEqual(true);
+ });
+ });
+
+ describe('EXIT_FULLSCREEN_SUCCESS', () => {
+ beforeEach(() => {
+ mutations[types.EXIT_FULLSCREEN_SUCCESS](stateCopy);
+ });
+
+ it('sets fullScreenEnabled to false', () => {
+ expect(stateCopy.fullScreenEnabled).toEqual(false);
+ });
+ });
+
+ describe('FULL_SCREEN_CONTAINER_SET_UP', () => {
+ beforeEach(() => {
+ mutations[types.FULL_SCREEN_CONTAINER_SET_UP](stateCopy, true);
+ });
+
+ it('sets fullScreenEnabled to true', () => {
+ expect(stateCopy.fullScreenContainerSetUp).toEqual(true);
+ });
+ });
});
diff --git a/spec/frontend/ci/job_details/store/utils_spec.js b/spec/frontend/ci/job_details/store/utils_spec.js
index 8fc4eeb0ca8..6105c53a306 100644
--- a/spec/frontend/ci/job_details/store/utils_spec.js
+++ b/spec/frontend/ci/job_details/store/utils_spec.js
@@ -1,524 +1,305 @@
+import { logLinesParser } from '~/ci/job_details/store/utils';
+
import {
- logLinesParser,
- updateIncrementalJobLog,
- parseHeaderLine,
- parseLine,
- addDurationToHeader,
- isCollapsibleSection,
- findOffsetAndRemove,
- getNextLineNumber,
-} from '~/ci/job_details/store/utils';
-import {
- mockJobLog,
- originalTrace,
- regularIncremental,
- regularIncrementalRepeated,
- headerTrace,
- headerTraceIncremental,
- collapsibleTrace,
- collapsibleTraceIncremental,
+ mockJobLines,
+ mockEmptySection,
+ mockContentSection,
+ mockContentSectionClosed,
+ mockContentSectionHiddenDuration,
+ mockContentSubsection,
+ mockTruncatedBottomSection,
+ mockTruncatedTopSection,
} from '../components/log/mock_data';
describe('Jobs Store Utils', () => {
- describe('parseHeaderLine', () => {
- it('returns a new object with the header keys and the provided line parsed', () => {
- const headerLine = { content: [{ text: 'foo' }] };
- const parsedHeaderLine = parseHeaderLine(headerLine, 2);
+ describe('logLinesParser', () => {
+ it('parses plain lines', () => {
+ const result = logLinesParser(mockJobLines);
- expect(parsedHeaderLine).toEqual({
- isClosed: false,
- isHeader: true,
- line: {
- ...headerLine,
- lineNumber: 2,
- },
- lines: [],
+ expect(result).toEqual({
+ lines: [
+ {
+ offset: 0,
+ content: [
+ {
+ text: 'Running with gitlab-runner 12.1.0 (de7731dd)',
+ style: 'term-fg-l-cyan term-bold',
+ },
+ ],
+ lineNumber: 1,
+ },
+ {
+ offset: 1001,
+ content: [{ text: ' on docker-auto-scale-com 8a6210b8' }],
+ lineNumber: 2,
+ },
+ ],
+ sections: {},
});
});
- it('pre-closes a section when specified in options', () => {
- const headerLine = { content: [{ text: 'foo' }], section_options: { collapsed: 'true' } };
-
- const parsedHeaderLine = parseHeaderLine(headerLine, 2);
-
- expect(parsedHeaderLine.isClosed).toBe(true);
- });
-
- it('expands all pre-closed sections if hash is present', () => {
- const headerLine = { content: [{ text: 'foo' }], section_options: { collapsed: 'true' } };
-
- const parsedHeaderLine = parseHeaderLine(headerLine, 2, '#L33');
-
- expect(parsedHeaderLine.isClosed).toBe(false);
- });
- });
-
- describe('parseLine', () => {
- it('returns a new object with the lineNumber key added to the provided line object', () => {
- const line = { content: [{ text: 'foo' }] };
- const parsed = parseLine(line, 1);
- expect(parsed.content).toEqual(line.content);
- expect(parsed.lineNumber).toEqual(1);
- });
- });
+ it('parses an empty section', () => {
+ const result = logLinesParser(mockEmptySection);
- describe('addDurationToHeader', () => {
- const duration = {
- offset: 106,
- content: [],
- section: 'prepare-script',
- section_duration: '00:03',
- };
-
- it('adds the section duration to the correct header', () => {
- const parsed = [
- {
- isClosed: false,
- isHeader: true,
- line: {
- section: 'prepare-script',
- content: [{ text: 'foo' }],
+ expect(result).toEqual({
+ lines: [
+ {
+ offset: 1002,
+ content: [
+ {
+ text: 'Resolving secrets',
+ style: 'term-fg-l-cyan term-bold',
+ },
+ ],
+ lineNumber: 1,
+ section: 'resolve-secrets',
+ isHeader: true,
},
- lines: [],
- },
- {
- isClosed: false,
- isHeader: true,
- line: {
- section: 'foo-bar',
- content: [{ text: 'foo' }],
+ ],
+ sections: {
+ 'resolve-secrets': {
+ startLineNumber: 1,
+ endLineNumber: 1,
+ duration: '00:00',
+ isClosed: false,
},
- lines: [],
},
- ];
-
- addDurationToHeader(parsed, duration);
-
- expect(parsed[0].line.section_duration).toEqual(duration.section_duration);
- expect(parsed[1].line.section_duration).toEqual(undefined);
+ });
});
- it('does not add the section duration when the headers do not match', () => {
- const parsed = [
- {
- isClosed: false,
- isHeader: true,
- line: {
- section: 'bar-foo',
- content: [{ text: 'foo' }],
+ it('parses a section with content', () => {
+ const result = logLinesParser(mockContentSection);
+
+ expect(result).toEqual({
+ lines: [
+ {
+ content: [{ text: 'Using Docker executor with image dev.gitlab.org3' }],
+ isHeader: true,
+ lineNumber: 1,
+ offset: 1004,
+ section: 'prepare-executor',
},
- lines: [],
- },
- {
- isClosed: false,
- isHeader: true,
- line: {
- section: 'foo-bar',
- content: [{ text: 'foo' }],
+ {
+ content: [{ text: 'Docker executor with image registry.gitlab.com ...' }],
+ lineNumber: 2,
+ offset: 1005,
+ section: 'prepare-executor',
+ },
+ {
+ content: [{ style: 'term-fg-l-green', text: 'Starting service ...' }],
+ lineNumber: 3,
+ offset: 1006,
+ section: 'prepare-executor',
+ },
+ ],
+ sections: {
+ 'prepare-executor': {
+ startLineNumber: 1,
+ endLineNumber: 3,
+ duration: '00:09',
+ isClosed: false,
},
- lines: [],
- },
- ];
- addDurationToHeader(parsed, duration);
-
- expect(parsed[0].line.section_duration).toEqual(undefined);
- expect(parsed[1].line.section_duration).toEqual(undefined);
- });
-
- it('does not add when content has no headers', () => {
- const parsed = [
- {
- section: 'bar-foo',
- content: [{ text: 'foo' }],
- lineNumber: 1,
- },
- {
- section: 'foo-bar',
- content: [{ text: 'foo' }],
- lineNumber: 2,
},
- ];
-
- addDurationToHeader(parsed, duration);
-
- expect(parsed[0].line).toEqual(undefined);
- expect(parsed[1].line).toEqual(undefined);
- });
- });
-
- describe('isCollapsibleSection', () => {
- const header = {
- isHeader: true,
- line: {
- section: 'foo',
- },
- };
- const line = {
- lineNumber: 1,
- section: 'foo',
- content: [],
- };
-
- it('returns true when line belongs to the last section', () => {
- expect(isCollapsibleSection([header], header, { section: 'foo', content: [] })).toEqual(true);
- });
-
- it('returns false when last line was not an header', () => {
- expect(isCollapsibleSection([line], line, { section: 'bar' })).toEqual(false);
- });
-
- it('returns false when accumulator is empty', () => {
- expect(isCollapsibleSection([], { isHeader: true }, { section: 'bar' })).toEqual(false);
- });
-
- it('returns false when section_duration is defined', () => {
- expect(isCollapsibleSection([header], header, { section_duration: '10:00' })).toEqual(false);
- });
-
- it('returns false when `section` is not a match', () => {
- expect(isCollapsibleSection([header], header, { section: 'bar' })).toEqual(false);
- });
-
- it('returns false when no parameters are provided', () => {
- expect(isCollapsibleSection()).toEqual(false);
- });
- });
- describe('logLinesParser', () => {
- let result;
-
- beforeEach(() => {
- result = logLinesParser(mockJobLog);
- });
-
- describe('regular line', () => {
- it('adds a lineNumber property with correct index', () => {
- expect(result[0].lineNumber).toEqual(1);
- expect(result[1].lineNumber).toEqual(2);
- expect(result[2].line.lineNumber).toEqual(3);
- expect(result[3].line.lineNumber).toEqual(4);
- expect(result[3].lines[0].lineNumber).toEqual(5);
- expect(result[3].lines[1].lineNumber).toEqual(6);
});
});
- describe('collapsible section', () => {
- it('adds a `isClosed` property', () => {
- expect(result[2].isClosed).toEqual(false);
- expect(result[3].isClosed).toEqual(false);
- });
-
- it('adds a `isHeader` property', () => {
- expect(result[2].isHeader).toEqual(true);
- expect(result[3].isHeader).toEqual(true);
- });
+ it('parses a closed section with content', () => {
+ const result = logLinesParser(mockContentSectionClosed);
- it('creates a lines array property with the content of the collapsible section', () => {
- expect(result[3].lines.length).toEqual(2);
- expect(result[3].lines[0].content).toEqual(mockJobLog[5].content);
- expect(result[3].lines[1].content).toEqual(mockJobLog[6].content);
+ expect(result.sections['mock-closed-section']).toMatchObject({
+ isClosed: true,
});
});
- describe('section duration', () => {
- it('adds the section information to the header section', () => {
- expect(result[2].line.section_duration).toEqual(mockJobLog[3].section_duration);
- expect(result[3].line.section_duration).toEqual(mockJobLog[7].section_duration);
- });
-
- it('does not add section duration as a line', () => {
- expect(result[2].lines.includes(mockJobLog[5])).toEqual(false);
- expect(result[3].lines.includes(mockJobLog[9])).toEqual(false);
- });
- });
- });
-
- describe('findOffsetAndRemove', () => {
- describe('when last item is header', () => {
- const existingLog = [
- {
- isHeader: true,
- isClosed: false,
- line: { content: [{ text: 'bar' }], offset: 10, lineNumber: 1 },
- },
- ];
-
- describe('and matches the offset', () => {
- it('returns an array with the item removed', () => {
- const newData = [{ offset: 10, content: [{ text: 'foobar' }] }];
- const result = findOffsetAndRemove(newData, existingLog);
-
- expect(result).toEqual([]);
- });
- });
+ it('parses a closed section as open when hash is present', () => {
+ const result = logLinesParser(mockContentSectionClosed, {}, '#L1');
- describe('and does not match the offset', () => {
- it('returns the provided existing log', () => {
- const newData = [{ offset: 110, content: [{ text: 'foobar' }] }];
- const result = findOffsetAndRemove(newData, existingLog);
-
- expect(result).toEqual(existingLog);
- });
- });
- });
-
- describe('when last item is a regular line', () => {
- const existingLog = [{ content: [{ text: 'bar' }], offset: 10, lineNumber: 1 }];
-
- describe('and matches the offset', () => {
- it('returns an array with the item removed', () => {
- const newData = [{ offset: 10, content: [{ text: 'foobar' }] }];
- const result = findOffsetAndRemove(newData, existingLog);
-
- expect(result).toEqual([]);
- });
- });
-
- describe('and does not match the fofset', () => {
- it('returns the provided old log', () => {
- const newData = [{ offset: 101, content: [{ text: 'foobar' }] }];
- const result = findOffsetAndRemove(newData, existingLog);
-
- expect(result).toEqual(existingLog);
- });
+ expect(result.sections['mock-closed-section']).toMatchObject({
+ isClosed: false,
});
});
- describe('when last item is nested', () => {
- const existingLog = [
- {
- isHeader: true,
- isClosed: false,
- lines: [{ offset: 101, content: [{ text: 'foobar' }], lineNumber: 2 }],
- line: {
- offset: 10,
- lineNumber: 1,
- section_duration: '10:00',
- },
- },
- ];
-
- describe('and matches the offset', () => {
- it('returns an array with the last nested line item removed', () => {
- const newData = [{ offset: 101, content: [{ text: 'foobar' }] }];
+ it('parses a section with a hidden duration', () => {
+ const result = logLinesParser(mockContentSectionHiddenDuration);
- const result = findOffsetAndRemove(newData, existingLog);
- expect(result[0].lines).toEqual([]);
- });
- });
-
- describe('and does not match the offset', () => {
- it('returns the provided old log', () => {
- const newData = [{ offset: 120, content: [{ text: 'foobar' }] }];
-
- const result = findOffsetAndRemove(newData, existingLog);
- expect(result).toEqual(existingLog);
- });
+ expect(result.sections['mock-hidden-duration-section']).toMatchObject({
+ hideDuration: true,
+ duration: '00:09',
});
});
- describe('when no data is provided', () => {
- it('returns an empty array', () => {
- const result = findOffsetAndRemove();
- expect(result).toEqual([]);
- });
- });
- });
-
- describe('getNextLineNumber', () => {
- describe('when there is no previous log', () => {
- it('returns 1', () => {
- expect(getNextLineNumber([])).toEqual(1);
- expect(getNextLineNumber(undefined)).toEqual(1);
- });
- });
+ it('parses a section with a sub section', () => {
+ const result = logLinesParser(mockContentSubsection);
- describe('when last line is 1', () => {
- it('returns 1', () => {
- const log = [
+ expect(result).toEqual({
+ lines: [
{
- content: [],
+ offset: 0,
+ content: [{ text: 'Line 1' }],
lineNumber: 1,
+ section: 'mock-section',
+ isHeader: true,
},
- ];
-
- expect(getNextLineNumber(log)).toEqual(2);
- });
- });
-
- describe('with unnested line', () => {
- it('returns the lineNumber of the last item in the array', () => {
- const log = [
{
- content: [],
- lineNumber: 10,
+ offset: 1002,
+ content: [{ text: 'Line 2 - section content' }],
+ lineNumber: 2,
+ section: 'mock-section',
},
{
- content: [],
- lineNumber: 101,
+ offset: 1003,
+ content: [{ text: 'Line 3 - sub section header' }],
+ lineNumber: 3,
+ section: 'sub-section',
+ isHeader: true,
},
- ];
-
- expect(getNextLineNumber(log)).toEqual(102);
- });
- });
-
- describe('when last line is the header section', () => {
- it('returns the lineNumber of the last item in the array', () => {
- const log = [
{
- content: [],
- lineNumber: 10,
+ offset: 1004,
+ content: [{ text: 'Line 4 - sub section content' }],
+ lineNumber: 4,
+ section: 'sub-section',
},
{
+ offset: 1005,
+ content: [{ text: 'Line 5 - sub sub section header with no content' }],
+ lineNumber: 5,
+ section: 'sub-sub-section',
isHeader: true,
- line: {
- lineNumber: 101,
- content: [],
- },
- lines: [],
},
- ];
-
- expect(getNextLineNumber(log)).toEqual(102);
- });
- });
-
- describe('when last line is a nested line', () => {
- it('returns the lineNumber of the last item in the nested array', () => {
- const log = [
{
- content: [],
- lineNumber: 10,
+ offset: 1007,
+ content: [{ text: 'Line 6 - sub section content 2' }],
+ lineNumber: 6,
+ section: 'sub-section',
},
{
- isHeader: true,
- line: {
- lineNumber: 101,
- content: [],
- },
- lines: [
- {
- lineNumber: 102,
- content: [],
- },
- { lineNumber: 103, content: [] },
- ],
+ offset: 1009,
+ content: [{ text: 'Line 7 - section content' }],
+ lineNumber: 7,
+ section: 'mock-section',
+ },
+ {
+ offset: 1011,
+ content: [{ text: 'Job succeeded' }],
+ lineNumber: 8,
+ },
+ ],
+ sections: {
+ 'mock-section': {
+ startLineNumber: 1,
+ endLineNumber: 7,
+ duration: '00:59',
+ isClosed: false,
},
- ];
+ 'sub-section': {
+ startLineNumber: 3,
+ endLineNumber: 6,
+ duration: '00:29',
+ isClosed: false,
+ },
+ 'sub-sub-section': {
+ startLineNumber: 5,
+ endLineNumber: 5,
+ duration: '00:00',
+ isClosed: false,
+ },
+ },
+ });
+ });
- expect(getNextLineNumber(log)).toEqual(104);
+ it('parsing repeated lines returns the same result', () => {
+ const result1 = logLinesParser(mockJobLines);
+ const result2 = logLinesParser(mockJobLines, {
+ currentLines: result1.lines,
+ currentSections: result1.sections,
});
+
+ // `toBe` is used to ensure objects do not change and trigger Vue reactivity
+ expect(result1.lines).toBe(result2.lines);
+ expect(result1.sections).toBe(result2.sections);
});
- });
- describe('updateIncrementalJobLog', () => {
- describe('without repeated section', () => {
- it('concats and parses both arrays', () => {
- const oldLog = logLinesParser(originalTrace);
- const result = updateIncrementalJobLog(regularIncremental, oldLog);
+ it('discards repeated lines and adds new ones', () => {
+ const result1 = logLinesParser(mockContentSection);
+ const result2 = logLinesParser(
+ [
+ ...mockContentSection,
+ {
+ content: [{ text: 'offset is too low, is ignored' }],
+ offset: 500,
+ },
+ {
+ content: [{ text: 'one new line' }],
+ offset: 1007,
+ },
+ ],
+ {
+ currentLines: result1.lines,
+ currentSections: result1.sections,
+ },
+ );
- expect(result).toEqual([
+ expect(result2).toEqual({
+ lines: [
{
- offset: 1,
- content: [
- {
- text: 'Downloading',
- },
- ],
+ content: [{ text: 'Using Docker executor with image dev.gitlab.org3' }],
+ isHeader: true,
lineNumber: 1,
+ offset: 1004,
+ section: 'prepare-executor',
},
{
- offset: 2,
- content: [
- {
- text: 'log line',
- },
- ],
+ content: [{ text: 'Docker executor with image registry.gitlab.com ...' }],
lineNumber: 2,
+ offset: 1005,
+ section: 'prepare-executor',
},
- ]);
- });
- });
-
- describe('with regular line repeated offset', () => {
- it('updates the last line and formats with the incremental part', () => {
- const oldLog = logLinesParser(originalTrace);
- const result = updateIncrementalJobLog(regularIncrementalRepeated, oldLog);
-
- expect(result).toEqual([
{
- offset: 1,
- content: [
- {
- text: 'log line',
- },
- ],
- lineNumber: 1,
+ content: [{ style: 'term-fg-l-green', text: 'Starting service ...' }],
+ lineNumber: 3,
+ offset: 1006,
+ section: 'prepare-executor',
+ },
+ {
+ content: [{ text: 'one new line' }],
+ lineNumber: 4,
+ offset: 1007,
},
- ]);
+ ],
+ sections: {
+ 'prepare-executor': {
+ startLineNumber: 1,
+ endLineNumber: 3,
+ duration: '00:09',
+ isClosed: false,
+ },
+ },
});
});
- describe('with header line repeated', () => {
- it('updates the header line and formats with the incremental part', () => {
- const oldLog = logLinesParser(headerTrace);
- const result = updateIncrementalJobLog(headerTraceIncremental, oldLog);
+ it('parses an interrupted job', () => {
+ const result = logLinesParser(mockTruncatedBottomSection);
- expect(result).toEqual([
- {
- isClosed: false,
- isHeader: true,
- line: {
- offset: 1,
- section_header: true,
- content: [
- {
- text: 'updated log line',
- },
- ],
- section: 'section',
- lineNumber: 1,
- },
- lines: [],
- },
- ]);
+ expect(result.sections).toEqual({
+ 'mock-section': {
+ startLineNumber: 1,
+ endLineNumber: Infinity,
+ duration: null,
+ isClosed: false,
+ },
});
});
- describe('with collapsible line repeated', () => {
- it('updates the collapsible line and formats with the incremental part', () => {
- const oldLog = logLinesParser(collapsibleTrace);
- const result = updateIncrementalJobLog(collapsibleTraceIncremental, oldLog);
+ it('parses the ending of an incomplete section', () => {
+ const result = logLinesParser(mockTruncatedTopSection);
- expect(result).toEqual([
- {
- isClosed: false,
- isHeader: true,
- line: {
- offset: 1,
- section_header: true,
- content: [
- {
- text: 'log line',
- },
- ],
- section: 'section',
- lineNumber: 1,
- },
- lines: [
- {
- offset: 2,
- content: [
- {
- text: 'updated log line',
- },
- ],
- section: 'section',
- lineNumber: 2,
- },
- ],
- },
- ]);
+ expect(result.sections).toEqual({
+ 'mock-section': {
+ startLineNumber: 0,
+ endLineNumber: 2,
+ duration: '00:59',
+ isClosed: false,
+ },
});
});
});
diff --git a/spec/frontend/ci/jobs_mock_data.js b/spec/frontend/ci/jobs_mock_data.js
index c428de3b9d8..12833524fd9 100644
--- a/spec/frontend/ci/jobs_mock_data.js
+++ b/spec/frontend/ci/jobs_mock_data.js
@@ -1627,3 +1627,53 @@ export const mockJobLog = [
lineNumber: 23,
},
];
+
+export const testSummaryData = {
+ total: {
+ time: 0.001,
+ count: 1,
+ success: 1,
+ failed: 0,
+ skipped: 0,
+ error: 0,
+ suite_error: null,
+ },
+ test_suites: [
+ {
+ name: 'javascript',
+ total_time: 0.001,
+ total_count: 1,
+ success_count: 1,
+ failed_count: 0,
+ skipped_count: 0,
+ error_count: 0,
+ build_ids: [3633],
+ suite_error: null,
+ },
+ ],
+};
+
+export const testSummaryDataWithFailures = {
+ total: {
+ time: 0.001,
+ count: 2,
+ success: 1,
+ failed: 1,
+ skipped: 0,
+ error: 0,
+ suite_error: null,
+ },
+ test_suites: [
+ {
+ name: 'javascript',
+ total_time: 0.001,
+ total_count: 2,
+ success_count: 1,
+ failed_count: 1,
+ skipped_count: 0,
+ error_count: 0,
+ build_ids: [3633],
+ suite_error: null,
+ },
+ ],
+};
diff --git a/spec/frontend/ci/jobs_page/components/job_cells/actions_cell_spec.js b/spec/frontend/ci/jobs_page/components/job_cells/actions_cell_spec.js
index 1ffd680118e..7af333543b8 100644
--- a/spec/frontend/ci/jobs_page/components/job_cells/actions_cell_spec.js
+++ b/spec/frontend/ci/jobs_page/components/job_cells/actions_cell_spec.js
@@ -43,6 +43,7 @@ describe('Job actions cell', () => {
const cannotPlayJob = findMockJob('playable', mockJobsNodesAsGuest);
const cannotRetryJob = findMockJob('retryable', mockJobsNodesAsGuest);
const cannotPlayScheduledJob = findMockJob('scheduled', mockJobsNodesAsGuest);
+ const cannotCancelJob = findMockJob('cancelable', mockJobsNodesAsGuest);
const findRetryButton = () => wrapper.findByTestId('retry');
const findPlayButton = () => wrapper.findByTestId('play');
@@ -99,6 +100,7 @@ describe('Job actions cell', () => {
${findPlayButton} | ${'play'} | ${cannotPlayJob}
${findRetryButton} | ${'retry'} | ${cannotRetryJob}
${findPlayScheduledJobButton} | ${'play scheduled'} | ${cannotPlayScheduledJob}
+ ${findCancelButton} | ${'cancel'} | ${cannotCancelJob}
`('does not display the $action button if user cannot update build', ({ button, jobType }) => {
createComponent(jobType);
diff --git a/spec/frontend/ci/jobs_page/components/jobs_table_spec.js b/spec/frontend/ci/jobs_page/components/jobs_table_spec.js
index d14afe7dd3e..a865b7a0c0c 100644
--- a/spec/frontend/ci/jobs_page/components/jobs_table_spec.js
+++ b/spec/frontend/ci/jobs_page/components/jobs_table_spec.js
@@ -3,7 +3,7 @@ import { mount } from '@vue/test-utils';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import JobsTable from '~/ci/jobs_page/components/jobs_table.vue';
import { getIdFromGraphQLId } from '~/graphql_shared/utils';
-import CiIcon from '~/vue_shared/components/ci_icon.vue';
+import CiIcon from '~/vue_shared/components/ci_icon/ci_icon.vue';
import { DEFAULT_FIELDS_ADMIN } from '~/ci/admin/jobs_table/constants';
import ProjectCell from '~/ci/admin/jobs_table/components/cells/project_cell.vue';
import RunnerCell from '~/ci/admin/jobs_table/components/cells/runner_cell.vue';
diff --git a/spec/frontend/ci/pipeline_details/graph/components/job_item_spec.js b/spec/frontend/ci/pipeline_details/graph/components/job_item_spec.js
index 10db7f398fe..432775d469c 100644
--- a/spec/frontend/ci/pipeline_details/graph/components/job_item_spec.js
+++ b/spec/frontend/ci/pipeline_details/graph/components/job_item_spec.js
@@ -5,7 +5,7 @@ import JobItem from '~/ci/pipeline_details/graph/components/job_item.vue';
import axios from '~/lib/utils/axios_utils';
import { useLocalStorageSpy } from 'helpers/local_storage_helper';
import ActionComponent from '~/ci/common/private/job_action_component.vue';
-import CiIcon from '~/vue_shared/components/ci_icon.vue';
+import CiIcon from '~/vue_shared/components/ci_icon/ci_icon.vue';
import { mountExtended, shallowMountExtended } from 'helpers/vue_test_utils_helper';
import {
diff --git a/spec/frontend/ci/pipeline_details/graph/components/job_name_component_spec.js b/spec/frontend/ci/pipeline_details/graph/components/job_name_component_spec.js
index 1da85ad9f78..b84ca77081a 100644
--- a/spec/frontend/ci/pipeline_details/graph/components/job_name_component_spec.js
+++ b/spec/frontend/ci/pipeline_details/graph/components/job_name_component_spec.js
@@ -1,6 +1,6 @@
import { mount } from '@vue/test-utils';
import jobNameComponent from '~/ci/common/private/job_name_component.vue';
-import CiIcon from '~/vue_shared/components/ci_icon.vue';
+import CiIcon from '~/vue_shared/components/ci_icon/ci_icon.vue';
describe('job name component', () => {
let wrapper;
diff --git a/spec/frontend/ci/pipeline_details/graph/components/linked_pipeline_spec.js b/spec/frontend/ci/pipeline_details/graph/components/linked_pipeline_spec.js
index 72be51575d7..e6f89910a97 100644
--- a/spec/frontend/ci/pipeline_details/graph/components/linked_pipeline_spec.js
+++ b/spec/frontend/ci/pipeline_details/graph/components/linked_pipeline_spec.js
@@ -10,7 +10,7 @@ import { ACTION_FAILURE, UPSTREAM, DOWNSTREAM } from '~/ci/pipeline_details/grap
import LinkedPipelineComponent from '~/ci/pipeline_details/graph/components/linked_pipeline.vue';
import CancelPipelineMutation from '~/ci/pipeline_details/graphql/mutations/cancel_pipeline.mutation.graphql';
import RetryPipelineMutation from '~/ci/pipeline_details/graphql/mutations/retry_pipeline.mutation.graphql';
-import CiIcon from '~/vue_shared/components/ci_icon.vue';
+import CiIcon from '~/vue_shared/components/ci_icon/ci_icon.vue';
import mockPipeline from './linked_pipelines_mock_data';
describe('Linked pipeline', () => {
diff --git a/spec/frontend/ci/pipeline_details/header/pipeline_details_header_spec.js b/spec/frontend/ci/pipeline_details/header/pipeline_details_header_spec.js
index e8e178ed148..86b8c416a07 100644
--- a/spec/frontend/ci/pipeline_details/header/pipeline_details_header_spec.js
+++ b/spec/frontend/ci/pipeline_details/header/pipeline_details_header_spec.js
@@ -7,7 +7,7 @@ import waitForPromises from 'helpers/wait_for_promises';
import { getIdFromGraphQLId } from '~/graphql_shared/utils';
import PipelineDetailsHeader from '~/ci/pipeline_details/header/pipeline_details_header.vue';
import { BUTTON_TOOLTIP_RETRY, BUTTON_TOOLTIP_CANCEL } from '~/ci/constants';
-import CiIcon from '~/vue_shared/components/ci_icon.vue';
+import CiIcon from '~/vue_shared/components/ci_icon/ci_icon.vue';
import cancelPipelineMutation from '~/ci/pipeline_details/graphql/mutations/cancel_pipeline.mutation.graphql';
import deletePipelineMutation from '~/ci/pipeline_details/graphql/mutations/delete_pipeline.mutation.graphql';
import retryPipelineMutation from '~/ci/pipeline_details/graphql/mutations/retry_pipeline.mutation.graphql';
@@ -15,6 +15,7 @@ import getPipelineDetailsQuery from '~/ci/pipeline_details/header/graphql/querie
import {
pipelineHeaderSuccess,
pipelineHeaderRunning,
+ pipelineHeaderRunningNoPermissions,
pipelineHeaderRunningWithDuration,
pipelineHeaderFailed,
pipelineRetryMutationResponseSuccess,
@@ -33,6 +34,9 @@ describe('Pipeline details header', () => {
const successHandler = jest.fn().mockResolvedValue(pipelineHeaderSuccess);
const runningHandler = jest.fn().mockResolvedValue(pipelineHeaderRunning);
+ const runningHandlerNoPermissions = jest
+ .fn()
+ .mockResolvedValue(pipelineHeaderRunningNoPermissions);
const runningHandlerWithDuration = jest.fn().mockResolvedValue(pipelineHeaderRunningWithDuration);
const failedHandler = jest.fn().mockResolvedValue(pipelineHeaderFailed);
@@ -65,7 +69,6 @@ describe('Pipeline details header', () => {
const findPipelineName = () => wrapper.findByTestId('pipeline-name');
const findCommitTitle = () => wrapper.findByTestId('pipeline-commit-title');
const findTotalJobs = () => wrapper.findByTestId('total-jobs');
- const findComputeMinutes = () => wrapper.findByTestId('compute-minutes');
const findCommitLink = () => wrapper.findByTestId('commit-link');
const findPipelineRunningText = () => wrapper.findByTestId('pipeline-running-text').text();
const findPipelineRefText = () => wrapper.findByTestId('pipeline-ref-text').text();
@@ -82,31 +85,12 @@ describe('Pipeline details header', () => {
paths: {
pipelinesPath: '/namespace/my-project/-/pipelines',
fullProject: '/namespace/my-project',
- triggeredByPath: '',
},
};
const defaultProps = {
- name: 'Ruby 3.0 master branch pipeline',
- totalJobs: '50',
- computeMinutes: '0.65',
- yamlErrors: 'errors',
- failureReason: 'pipeline failed',
- badges: {
- schedule: true,
- trigger: false,
- child: false,
- latest: true,
- mergeTrainPipeline: false,
- mergedResultsPipeline: false,
- invalid: false,
- failed: false,
- autoDevops: false,
- detached: false,
- stuck: false,
- },
- refText:
- 'Related merge request <a class="mr-iid" href="/root/ci-project/-/merge_requests/1">!1</a> to merge <a class="ref-name" href="/root/ci-project/-/commits/test">test</a>',
+ yamlErrors: '',
+ trigger: false,
};
const createMockApolloProvider = (handlers) => {
@@ -159,11 +143,11 @@ describe('Pipeline details header', () => {
});
it('displays pipeline name', () => {
- expect(findPipelineName().text()).toBe(defaultProps.name);
+ expect(findPipelineName().text()).toBe('Build pipeline');
});
it('displays total jobs', () => {
- expect(findTotalJobs().text()).toBe('50 Jobs');
+ expect(findTotalJobs().text()).toBe('3 Jobs');
});
it('has link to commit', () => {
@@ -178,13 +162,13 @@ describe('Pipeline details header', () => {
it('displays correct badges', () => {
expect(findAllBadges()).toHaveLength(2);
- expect(wrapper.findByText('latest').exists()).toBe(true);
+ expect(wrapper.findByText('merged results').exists()).toBe(true);
expect(wrapper.findByText('Scheduled').exists()).toBe(true);
expect(wrapper.findByText('trigger token').exists()).toBe(false);
});
it('displays ref text', () => {
- expect(findPipelineRefText()).toBe('Related merge request !1 to merge test');
+ expect(findPipelineRefText()).toBe('Related merge request !1 to merge master into feature');
});
it('displays pipeline user link with required user popover attributes', () => {
@@ -209,7 +193,7 @@ describe('Pipeline details header', () => {
beforeEach(async () => {
createComponent(defaultHandlers, {
...defaultProps,
- badges: { ...defaultProps.badges, trigger: true },
+ trigger: true,
});
await waitForPromises();
@@ -222,7 +206,7 @@ describe('Pipeline details header', () => {
describe('without pipeline name', () => {
it('displays commit title', async () => {
- createComponent(defaultHandlers, { ...defaultProps, name: '' });
+ createComponent([[getPipelineDetailsQuery, runningHandler]]);
await waitForPromises();
@@ -234,22 +218,6 @@ describe('Pipeline details header', () => {
});
describe('finished pipeline', () => {
- it('displays compute minutes when not zero', async () => {
- createComponent();
-
- await waitForPromises();
-
- expect(findComputeMinutes().text()).toBe('0.65');
- });
-
- it('does not display compute minutes when zero', async () => {
- createComponent(defaultHandlers, { ...defaultProps, computeMinutes: '0.0' });
-
- await waitForPromises();
-
- expect(findComputeMinutes().exists()).toBe(false);
- });
-
it('does not display created time ago', async () => {
createComponent();
@@ -284,10 +252,6 @@ describe('Pipeline details header', () => {
await waitForPromises();
});
- it('does not display compute minutes', () => {
- expect(findComputeMinutes().exists()).toBe(false);
- });
-
it('does not display finished time ago', () => {
expect(findFinishedTimeAgo().exists()).toBe(false);
});
@@ -374,46 +338,58 @@ describe('Pipeline details header', () => {
});
describe('cancel action', () => {
- it('should call cancelPipeline Mutation with pipeline id', async () => {
- createComponent([
- [getPipelineDetailsQuery, runningHandler],
- [cancelPipelineMutation, cancelMutationHandlerSuccess],
- ]);
+ describe('with permissions', () => {
+ it('should call cancelPipeline Mutation with pipeline id', async () => {
+ createComponent([
+ [getPipelineDetailsQuery, runningHandler],
+ [cancelPipelineMutation, cancelMutationHandlerSuccess],
+ ]);
- await waitForPromises();
+ await waitForPromises();
- findCancelButton().vm.$emit('click');
+ findCancelButton().vm.$emit('click');
- expect(cancelMutationHandlerSuccess).toHaveBeenCalledWith({
- id: pipelineHeaderRunning.data.project.pipeline.id,
+ expect(cancelMutationHandlerSuccess).toHaveBeenCalledWith({
+ id: pipelineHeaderRunning.data.project.pipeline.id,
+ });
+ expect(findAlert().exists()).toBe(false);
});
- expect(findAlert().exists()).toBe(false);
- });
- it('should render cancel action tooltip', async () => {
- createComponent([
- [getPipelineDetailsQuery, runningHandler],
- [cancelPipelineMutation, cancelMutationHandlerSuccess],
- ]);
+ it('should render cancel action tooltip', async () => {
+ createComponent([
+ [getPipelineDetailsQuery, runningHandler],
+ [cancelPipelineMutation, cancelMutationHandlerSuccess],
+ ]);
- await waitForPromises();
+ await waitForPromises();
- expect(findCancelButton().attributes('title')).toBe(BUTTON_TOOLTIP_CANCEL);
- });
+ expect(findCancelButton().attributes('title')).toBe(BUTTON_TOOLTIP_CANCEL);
+ });
- it('should display error message on failure', async () => {
- createComponent([
- [getPipelineDetailsQuery, runningHandler],
- [cancelPipelineMutation, cancelMutationHandlerFailed],
- ]);
+ it('should display error message on failure', async () => {
+ createComponent([
+ [getPipelineDetailsQuery, runningHandler],
+ [cancelPipelineMutation, cancelMutationHandlerFailed],
+ ]);
- await waitForPromises();
+ await waitForPromises();
- findCancelButton().vm.$emit('click');
+ findCancelButton().vm.$emit('click');
- await waitForPromises();
+ await waitForPromises();
- expect(findAlert().exists()).toBe(true);
+ expect(findAlert().exists()).toBe(true);
+ });
+ });
+
+ describe('without permissions', () => {
+ it('should not display cancel pipeline button', async () => {
+ createComponent([[getPipelineDetailsQuery, runningHandlerNoPermissions]]);
+
+ await waitForPromises();
+
+ expect(findCancelButton().exists()).toBe(false);
+ });
});
});
diff --git a/spec/frontend/ci/pipeline_details/mock_data.js b/spec/frontend/ci/pipeline_details/mock_data.js
index 56365622544..48570b2515f 100644
--- a/spec/frontend/ci/pipeline_details/mock_data.js
+++ b/spec/frontend/ci/pipeline_details/mock_data.js
@@ -1,5 +1,7 @@
+// pipeline header fixtures located in spec/frontend/fixtures/pipeline_header.rb
import pipelineHeaderSuccess from 'test_fixtures/graphql/pipelines/pipeline_header_success.json';
import pipelineHeaderRunning from 'test_fixtures/graphql/pipelines/pipeline_header_running.json';
+import pipelineHeaderRunningNoPermissions from 'test_fixtures/graphql/pipelines/pipeline_header_running_no_permissions.json';
import pipelineHeaderRunningWithDuration from 'test_fixtures/graphql/pipelines/pipeline_header_running_with_duration.json';
import pipelineHeaderFailed from 'test_fixtures/graphql/pipelines/pipeline_header_failed.json';
@@ -13,6 +15,7 @@ threeWeeksAgo.setDate(threeWeeksAgo.getDate() - 21);
export {
pipelineHeaderSuccess,
pipelineHeaderRunning,
+ pipelineHeaderRunningNoPermissions,
pipelineHeaderRunningWithDuration,
pipelineHeaderFailed,
};
diff --git a/spec/frontend/ci/pipeline_details/test_reports/stores/utils_spec.js b/spec/frontend/ci/pipeline_details/test_reports/stores/utils_spec.js
index c0ffc2b34fb..ecc61ab43c0 100644
--- a/spec/frontend/ci/pipeline_details/test_reports/stores/utils_spec.js
+++ b/spec/frontend/ci/pipeline_details/test_reports/stores/utils_spec.js
@@ -36,5 +36,33 @@ describe('Test reports utils', () => {
expect(result).toBe('4.82s');
});
});
+
+ describe('when time is greater than a minute', () => {
+ it('should return time in minutes', () => {
+ const result = formattedTime(99);
+ expect(result).toBe('1m 39s');
+ });
+ });
+
+ describe('when time is greater than a hour', () => {
+ it('should return time in hours', () => {
+ const result = formattedTime(3606);
+ expect(result).toBe('1h 6s');
+ });
+ });
+
+ describe('when time is exact a hour', () => {
+ it('should return time as one hour', () => {
+ const result = formattedTime(3600);
+ expect(result).toBe('1h');
+ });
+ });
+
+ describe('when time is greater than a hour with some minutes', () => {
+ it('should return time in hours', () => {
+ const result = formattedTime(3662);
+ expect(result).toBe('1h 1m 2s');
+ });
+ });
});
});
diff --git a/spec/frontend/ci/pipeline_details/test_reports/test_reports_spec.js b/spec/frontend/ci/pipeline_details/test_reports/test_reports_spec.js
index 8ff060026da..d318aa36bcf 100644
--- a/spec/frontend/ci/pipeline_details/test_reports/test_reports_spec.js
+++ b/spec/frontend/ci/pipeline_details/test_reports/test_reports_spec.js
@@ -5,6 +5,7 @@ import Vue from 'vue';
import Vuex from 'vuex';
import testReports from 'test_fixtures/pipelines/test_report.json';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
+import { getParameterValues } from '~/lib/utils/url_utility';
import EmptyState from '~/ci/pipeline_details/test_reports/empty_state.vue';
import TestReports from '~/ci/pipeline_details/test_reports/test_reports.vue';
import TestSummary from '~/ci/pipeline_details/test_reports/test_summary.vue';
@@ -13,6 +14,11 @@ import * as getters from '~/ci/pipeline_details/stores/test_reports/getters';
Vue.use(Vuex);
+jest.mock('~/lib/utils/url_utility', () => ({
+ ...jest.requireActual('~/lib/utils/url_utility'),
+ getParameterValues: jest.fn().mockReturnValue([]),
+}));
+
describe('Test reports app', () => {
let wrapper;
let store;
@@ -100,6 +106,22 @@ describe('Test reports app', () => {
});
});
+ describe('when a job name is provided as a query parameter', () => {
+ beforeEach(() => {
+ getParameterValues.mockReturnValue(['javascript']);
+ createComponent();
+ });
+
+ it('shows tests details', () => {
+ expect(testsDetail().exists()).toBe(true);
+ });
+
+ it('should call setSelectedSuiteIndex and fetchTestSuite', () => {
+ expect(actionSpies.setSelectedSuiteIndex).toHaveBeenCalled();
+ expect(actionSpies.fetchTestSuite).toHaveBeenCalled();
+ });
+ });
+
describe('when a suite is clicked', () => {
beforeEach(() => {
createComponent({ state: { hasFullReport: true } });
diff --git a/spec/frontend/ci/pipeline_editor/components/editor/ci_editor_header_spec.js b/spec/frontend/ci/pipeline_editor/components/editor/ci_editor_header_spec.js
index f6247fb4a19..46ef8a0d771 100644
--- a/spec/frontend/ci/pipeline_editor/components/editor/ci_editor_header_spec.js
+++ b/spec/frontend/ci/pipeline_editor/components/editor/ci_editor_header_spec.js
@@ -16,14 +16,15 @@ describe('CI Editor Header', () => {
const createComponent = ({
showHelpDrawer = false,
showJobAssistantDrawer = false,
- showAiAssistantDrawer = false,
aiChatAvailable = false,
aiCiConfigGenerator = false,
+ ciCatalogPath = '/explore/catalog',
} = {}) => {
wrapper = extendedWrapper(
shallowMount(CiEditorHeader, {
provide: {
aiChatAvailable,
+ ciCatalogPath,
glFeatures: {
aiCiConfigGenerator,
},
@@ -31,7 +32,6 @@ describe('CI Editor Header', () => {
propsData: {
showHelpDrawer,
showJobAssistantDrawer,
- showAiAssistantDrawer,
},
}),
);
@@ -39,7 +39,7 @@ describe('CI Editor Header', () => {
const findLinkBtn = () => wrapper.findByTestId('template-repo-link');
const findHelpBtn = () => wrapper.findByTestId('drawer-toggle');
- const findAiAssistnantBtn = () => wrapper.findByTestId('ai-assistant-drawer-toggle');
+ const findCatalogRepoLinkButton = () => wrapper.findByTestId('catalog-repo-link');
afterEach(() => {
unmockTracking();
@@ -55,29 +55,32 @@ describe('CI Editor Header', () => {
label,
});
};
- describe('Ai Assistant toggle button', () => {
- describe('when feature is unavailable', () => {
- it('should not show ai button when feature toggle is off', () => {
- createComponent({ aiChatAvailable: true });
- mockTracking(undefined, wrapper.element, jest.spyOn);
- expect(findAiAssistnantBtn().exists()).toBe(false);
- });
- it('should not show ai button when feature is unavailable', () => {
- createComponent({ aiCiConfigGenerator: true });
- mockTracking(undefined, wrapper.element, jest.spyOn);
- expect(findAiAssistnantBtn().exists()).toBe(false);
- });
+ describe('component repo link button', () => {
+ beforeEach(() => {
+ createComponent();
+ trackingSpy = mockTracking(undefined, wrapper.element, jest.spyOn);
});
- describe('when feature is available', () => {
- it('should show ai button', () => {
- createComponent({ aiCiConfigGenerator: true, aiChatAvailable: true });
- mockTracking(undefined, wrapper.element, jest.spyOn);
- expect(findAiAssistnantBtn().exists()).toBe(true);
- });
+ afterEach(() => {
+ unmockTracking();
+ });
+
+ it('finds the CI/CD Catalog button', () => {
+ expect(findCatalogRepoLinkButton().exists()).toBe(true);
+ });
+
+ it('has the external-link icon', () => {
+ expect(findCatalogRepoLinkButton().props('icon')).toBe('external-link');
+ });
+
+ it('tracks the click on the Catalog button', () => {
+ const { browseCatalog } = pipelineEditorTrackingOptions.actions;
+
+ testTracker(findCatalogRepoLinkButton(), browseCatalog);
});
});
+
describe('link button', () => {
beforeEach(() => {
createComponent();
diff --git a/spec/frontend/ci/pipeline_editor/components/pipeline_editor_tabs_spec.js b/spec/frontend/ci/pipeline_editor/components/pipeline_editor_tabs_spec.js
index 69e91f11309..43620a58572 100644
--- a/spec/frontend/ci/pipeline_editor/components/pipeline_editor_tabs_spec.js
+++ b/spec/frontend/ci/pipeline_editor/components/pipeline_editor_tabs_spec.js
@@ -65,6 +65,7 @@ describe('Pipeline editor tabs component', () => {
},
provide: {
aiChatAvailable: false,
+ ciCatalogPath: '/explore/catalog',
ciConfigPath: '/path/to/ci-config',
ciLintPath: mockCiLintPath,
currentBranch: 'main',
diff --git a/spec/frontend/ci/pipeline_editor/components/validate/ci_validate_spec.js b/spec/frontend/ci/pipeline_editor/components/validate/ci_validate_spec.js
index f2818277c59..b66b44e5f06 100644
--- a/spec/frontend/ci/pipeline_editor/components/validate/ci_validate_spec.js
+++ b/spec/frontend/ci/pipeline_editor/components/validate/ci_validate_spec.js
@@ -1,5 +1,12 @@
import Vue from 'vue';
-import { GlAlert, GlDisclosureDropdown, GlIcon, GlLoadingIcon, GlPopover } from '@gitlab/ui';
+import {
+ GlAlert,
+ GlDisclosureDropdown,
+ GlEmptyState,
+ GlIcon,
+ GlLoadingIcon,
+ GlPopover,
+} from '@gitlab/ui';
import VueApollo from 'vue-apollo';
import MockAdapter from 'axios-mock-adapter';
@@ -70,7 +77,7 @@ describe('Pipeline Editor Validate Tab', () => {
const findCta = () => wrapper.findByTestId('simulate-pipeline-button');
const findDisabledCtaTooltip = () => wrapper.findByTestId('cta-tooltip');
const findHelpIcon = () => wrapper.findComponent(GlIcon);
- const findIllustration = () => wrapper.findByRole('img');
+ const findEmptyState = () => wrapper.findComponent(GlEmptyState);
const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
const findPipelineSource = () => wrapper.findComponent(GlDisclosureDropdown);
const findPopover = () => wrapper.findComponent(GlPopover);
@@ -283,7 +290,7 @@ describe('Pipeline Editor Validate Tab', () => {
it('returns to init state', async () => {
// init state
- expect(findIllustration().exists()).toBe(true);
+ expect(findEmptyState().exists()).toBe(true);
expect(findCiLintResults().exists()).toBe(false);
// mutations should have successful results
@@ -294,7 +301,7 @@ describe('Pipeline Editor Validate Tab', () => {
await findCancelBtn().vm.$emit('click');
// should still render init state
- expect(findIllustration().exists()).toBe(true);
+ expect(findEmptyState().exists()).toBe(true);
expect(findCiLintResults().exists()).toBe(false);
});
});
diff --git a/spec/frontend/ci/pipeline_editor/mock_data.js b/spec/frontend/ci/pipeline_editor/mock_data.js
index e08c35f1555..e700411ec57 100644
--- a/spec/frontend/ci/pipeline_editor/mock_data.js
+++ b/spec/frontend/ci/pipeline_editor/mock_data.js
@@ -22,18 +22,17 @@ export const commonOptions = {
usesExternalConfig: 'false',
validateTabIllustrationPath: 'illustrations/tab',
ymlHelpPagePath: 'help/ci/yml',
- aiChatAvailable: 'true',
};
export const editorDatasetOptions = {
initialBranchName: 'production',
pipelineEtag: 'pipelineEtag',
+ ciCatalogPath: '/explore/catalog',
...commonOptions,
};
export const expectedInjectValues = {
...commonOptions,
- aiChatAvailable: true,
usesExternalConfig: false,
totalBranches: 10,
};
diff --git a/spec/frontend/ci/pipeline_editor/pipeline_editor_home_spec.js b/spec/frontend/ci/pipeline_editor/pipeline_editor_home_spec.js
index ca5f80f331c..fd0d17ee05b 100644
--- a/spec/frontend/ci/pipeline_editor/pipeline_editor_home_spec.js
+++ b/spec/frontend/ci/pipeline_editor/pipeline_editor_home_spec.js
@@ -1,5 +1,6 @@
import { shallowMount } from '@vue/test-utils';
-import { GlButton, GlModal } from '@gitlab/ui';
+import { GlModal } from '@gitlab/ui';
+import { nextTick } from 'vue';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import setWindowLocation from 'helpers/set_window_location_helper';
import CommitSection from '~/ci/pipeline_editor/components/commit/commit_section.vue';
@@ -60,7 +61,7 @@ describe('Pipeline editor home wrapper', () => {
const findPipelineEditorFileTree = () => wrapper.findComponent(PipelineEditorFileTree);
const findPipelineEditorHeader = () => wrapper.findComponent(PipelineEditorHeader);
const findPipelineEditorTabs = () => wrapper.findComponent(PipelineEditorTabs);
- const findFileTreeBtn = () => wrapper.findByTestId('file-tree-toggle');
+ const findPipelineEditorFileNav = () => wrapper.findComponent(PipelineEditorFileNav);
const clickHelpBtn = async () => {
await findPipelineEditorDrawer().vm.$emit('switch-drawer', EDITOR_APP_DRAWER_HELP);
@@ -279,24 +280,16 @@ describe('Pipeline editor home wrapper', () => {
describe('file tree', () => {
const toggleFileTree = async () => {
- await findFileTreeBtn().vm.$emit('click');
+ findPipelineEditorFileNav().vm.$emit('toggle-file-tree');
+ await nextTick();
};
- describe('button toggle', () => {
+ describe('file navigation', () => {
beforeEach(() => {
- createComponent({
- stubs: {
- GlButton,
- PipelineEditorFileNav,
- },
- });
- });
-
- it('shows button toggle', () => {
- expect(findFileTreeBtn().exists()).toBe(true);
+ createComponent({});
});
- it('toggles the drawer on button click', async () => {
+ it('toggles the drawer on `toggle-file-tree` event', async () => {
await toggleFileTree();
expect(findPipelineEditorFileTree().exists()).toBe(true);
diff --git a/spec/frontend/ci/pipeline_mini_graph/legacy_pipeline_stage_spec.js b/spec/frontend/ci/pipeline_mini_graph/legacy_pipeline_stage_spec.js
index 87df7676bf1..95fa82adc9e 100644
--- a/spec/frontend/ci/pipeline_mini_graph/legacy_pipeline_stage_spec.js
+++ b/spec/frontend/ci/pipeline_mini_graph/legacy_pipeline_stage_spec.js
@@ -2,7 +2,7 @@ import { GlDropdown } from '@gitlab/ui';
import { nextTick } from 'vue';
import { mount } from '@vue/test-utils';
import MockAdapter from 'axios-mock-adapter';
-import CiIcon from '~/vue_shared/components/ci_icon.vue';
+import CiIcon from '~/vue_shared/components/ci_icon/ci_icon.vue';
import axios from '~/lib/utils/axios_utils';
import { HTTP_STATUS_INTERNAL_SERVER_ERROR, HTTP_STATUS_OK } from '~/lib/utils/http_status';
import LegacyPipelineStage from '~/ci/pipeline_mini_graph/legacy_pipeline_stage.vue';
diff --git a/spec/frontend/ci/pipeline_mini_graph/linked_pipelines_mini_list_spec.js b/spec/frontend/ci/pipeline_mini_graph/linked_pipelines_mini_list_spec.js
index 55ce3c79039..4f0bf3767cd 100644
--- a/spec/frontend/ci/pipeline_mini_graph/linked_pipelines_mini_list_spec.js
+++ b/spec/frontend/ci/pipeline_mini_graph/linked_pipelines_mini_list_spec.js
@@ -1,5 +1,5 @@
import { mount } from '@vue/test-utils';
-import CiIcon from '~/vue_shared/components/ci_icon.vue';
+import CiIcon from '~/vue_shared/components/ci_icon/ci_icon.vue';
import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
import LinkedPipelinesMiniList from '~/ci/pipeline_mini_graph/linked_pipelines_mini_list.vue';
import mockData from './linked_pipelines_mock_data';
diff --git a/spec/frontend/ci/pipeline_schedules/components/table/cells/pipeline_schedule_last_pipeline_spec.js b/spec/frontend/ci/pipeline_schedules/components/table/cells/pipeline_schedule_last_pipeline_spec.js
index b79e7c6e251..b79662e7a89 100644
--- a/spec/frontend/ci/pipeline_schedules/components/table/cells/pipeline_schedule_last_pipeline_spec.js
+++ b/spec/frontend/ci/pipeline_schedules/components/table/cells/pipeline_schedule_last_pipeline_spec.js
@@ -1,5 +1,5 @@
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
-import CiIcon from '~/vue_shared/components/ci_icon.vue';
+import CiIcon from '~/vue_shared/components/ci_icon/ci_icon.vue';
import PipelineScheduleLastPipeline from '~/ci/pipeline_schedules/components/table/cells/pipeline_schedule_last_pipeline.vue';
import { mockPipelineScheduleNodes } from '../../../mock_data';
diff --git a/spec/frontend/ci/pipelines_page/components/empty_state/pipelines_ci_templates_spec.js b/spec/frontend/ci/pipelines_page/components/empty_state/pipelines_ci_templates_spec.js
index fbef4aa08eb..f824dab9ae1 100644
--- a/spec/frontend/ci/pipelines_page/components/empty_state/pipelines_ci_templates_spec.js
+++ b/spec/frontend/ci/pipelines_page/components/empty_state/pipelines_ci_templates_spec.js
@@ -1,4 +1,5 @@
import '~/commons';
+import { GlButton } from '@gitlab/ui';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import { mockTracking, unmockTracking } from 'helpers/tracking_helper';
import PipelinesCiTemplates from '~/ci/pipelines_page/components/empty_state/pipelines_ci_templates.vue';
@@ -14,12 +15,15 @@ describe('Pipelines CI Templates', () => {
return shallowMountExtended(PipelinesCiTemplates, {
provide: {
pipelineEditorPath,
+ showJenkinsCiPrompt: false,
...propsData,
},
stubs,
});
};
+ const findMigrateFromJenkinsPrompt = () => wrapper.findByTestId('migrate-from-jenkins-prompt');
+ const findMigrationPlanBtn = () => findMigrateFromJenkinsPrompt().findComponent(GlButton);
const findTestTemplateLink = () => wrapper.findByTestId('test-template-link');
const findCiTemplates = () => wrapper.findComponent(CiTemplates);
@@ -34,6 +38,27 @@ describe('Pipelines CI Templates', () => {
);
expect(findCiTemplates().exists()).toBe(true);
});
+
+ it('does not show migrate from jenkins prompt', () => {
+ expect(findMigrateFromJenkinsPrompt().exists()).toBe(false);
+ });
+
+ describe('when Jenkinsfile is detected', () => {
+ beforeEach(() => {
+ wrapper = createWrapper({ showJenkinsCiPrompt: true });
+ });
+
+ it('shows migrate from jenkins prompt', () => {
+ expect(findMigrateFromJenkinsPrompt().exists()).toBe(true);
+ });
+
+ it('opens correct link in new tab after clicking migration plan CTA', () => {
+ expect(findMigrationPlanBtn().attributes('href')).toBe(
+ '/help/ci/migration/plan_a_migration',
+ );
+ expect(findMigrationPlanBtn().attributes('target')).toBe('_blank');
+ });
+ });
});
describe('tracking', () => {
@@ -54,5 +79,27 @@ describe('Pipelines CI Templates', () => {
label: 'Getting-Started',
});
});
+
+ describe('when Jenkinsfile detected', () => {
+ beforeEach(() => {
+ wrapper = createWrapper({ showJenkinsCiPrompt: true });
+ });
+
+ it('creates render event on page load', () => {
+ expect(trackingSpy).toHaveBeenCalledTimes(1);
+ expect(trackingSpy).toHaveBeenCalledWith(undefined, 'render', {
+ label: 'migrate_from_jenkins_prompt',
+ });
+ });
+
+ it('sends an event when migration plan is clicked', () => {
+ findMigrationPlanBtn().vm.$emit('click');
+
+ expect(trackingSpy).toHaveBeenCalledTimes(2);
+ expect(trackingSpy).toHaveBeenCalledWith(undefined, 'template_clicked', {
+ label: 'migrate_from_jenkins_prompt',
+ });
+ });
+ });
});
});
diff --git a/spec/frontend/ci/pipelines_page/pipelines_spec.js b/spec/frontend/ci/pipelines_page/pipelines_spec.js
index 97192058ff6..f3c28b17339 100644
--- a/spec/frontend/ci/pipelines_page/pipelines_spec.js
+++ b/spec/frontend/ci/pipelines_page/pipelines_spec.js
@@ -110,6 +110,7 @@ describe('Pipelines', () => {
suggestedCiTemplates: [],
ciRunnerSettingsPath: defaultProps.ciRunnerSettingsPath,
anyRunnersAvailable: true,
+ showJenkinsCiPrompt: false,
},
propsData: {
...defaultProps,
diff --git a/spec/frontend/ci/reports/codequality_report/store/actions_spec.js b/spec/frontend/ci/reports/codequality_report/store/actions_spec.js
deleted file mode 100644
index a606bce3d78..00000000000
--- a/spec/frontend/ci/reports/codequality_report/store/actions_spec.js
+++ /dev/null
@@ -1,190 +0,0 @@
-import MockAdapter from 'axios-mock-adapter';
-import testAction from 'helpers/vuex_action_helper';
-import { TEST_HOST } from 'spec/test_constants';
-import axios from '~/lib/utils/axios_utils';
-import {
- HTTP_STATUS_INTERNAL_SERVER_ERROR,
- HTTP_STATUS_NO_CONTENT,
- HTTP_STATUS_OK,
-} from '~/lib/utils/http_status';
-import createStore from '~/ci/reports/codequality_report/store';
-import * as actions from '~/ci/reports/codequality_report/store/actions';
-import * as types from '~/ci/reports/codequality_report/store/mutation_types';
-import { STATUS_NOT_FOUND } from '~/ci/reports/constants';
-import { reportIssues, parsedReportIssues } from '../mock_data';
-
-const pollInterval = 123;
-const pollIntervalHeader = {
- 'Poll-Interval': pollInterval,
-};
-
-describe('Codequality Reports actions', () => {
- let localState;
- let localStore;
-
- beforeEach(() => {
- localStore = createStore();
- localState = localStore.state;
- });
-
- describe('setPaths', () => {
- it('should commit SET_PATHS mutation', () => {
- const paths = {
- baseBlobPath: 'baseBlobPath',
- headBlobPath: 'headBlobPath',
- reportsPath: 'reportsPath',
- };
-
- return testAction(
- actions.setPaths,
- paths,
- localState,
- [{ type: types.SET_PATHS, payload: paths }],
- [],
- );
- });
- });
-
- describe('fetchReports', () => {
- const endpoint = `${TEST_HOST}/codequality_reports.json`;
- let mock;
-
- beforeEach(() => {
- localState.reportsPath = endpoint;
- mock = new MockAdapter(axios);
- });
-
- afterEach(() => {
- mock.restore();
- });
-
- describe('on success', () => {
- it('commits REQUEST_REPORTS and dispatches receiveReportsSuccess', () => {
- mock.onGet(endpoint).reply(HTTP_STATUS_OK, reportIssues);
-
- return testAction(
- actions.fetchReports,
- null,
- localState,
- [{ type: types.REQUEST_REPORTS }],
- [
- {
- payload: parsedReportIssues,
- type: 'receiveReportsSuccess',
- },
- ],
- );
- });
- });
-
- describe('on error', () => {
- it('commits REQUEST_REPORTS and dispatches receiveReportsError', () => {
- mock.onGet(endpoint).reply(HTTP_STATUS_INTERNAL_SERVER_ERROR);
-
- return testAction(
- actions.fetchReports,
- null,
- localState,
- [{ type: types.REQUEST_REPORTS }],
- [{ type: 'receiveReportsError', payload: expect.any(Error) }],
- );
- });
- });
-
- describe('when base report is not found', () => {
- it('commits REQUEST_REPORTS and dispatches receiveReportsError', () => {
- const data = { status: STATUS_NOT_FOUND };
- mock.onGet(`${TEST_HOST}/codequality_reports.json`).reply(HTTP_STATUS_OK, data);
-
- return testAction(
- actions.fetchReports,
- null,
- localState,
- [{ type: types.REQUEST_REPORTS }],
- [{ type: 'receiveReportsError', payload: data }],
- );
- });
- });
-
- describe('while waiting for report results', () => {
- it('continues polling until it receives data', () => {
- mock
- .onGet(endpoint)
- .replyOnce(HTTP_STATUS_NO_CONTENT, undefined, pollIntervalHeader)
- .onGet(endpoint)
- .reply(HTTP_STATUS_OK, reportIssues);
-
- return Promise.all([
- testAction(
- actions.fetchReports,
- null,
- localState,
- [{ type: types.REQUEST_REPORTS }],
- [
- {
- payload: parsedReportIssues,
- type: 'receiveReportsSuccess',
- },
- ],
- ),
- axios
- // wait for initial NO_CONTENT response to be fulfilled
- .waitForAll()
- .then(() => {
- jest.advanceTimersByTime(pollInterval);
- }),
- ]);
- });
-
- it('continues polling until it receives an error', () => {
- mock
- .onGet(endpoint)
- .replyOnce(HTTP_STATUS_NO_CONTENT, undefined, pollIntervalHeader)
- .onGet(endpoint)
- .reply(HTTP_STATUS_INTERNAL_SERVER_ERROR);
-
- return Promise.all([
- testAction(
- actions.fetchReports,
- null,
- localState,
- [{ type: types.REQUEST_REPORTS }],
- [{ type: 'receiveReportsError', payload: expect.any(Error) }],
- ),
- axios
- // wait for initial NO_CONTENT response to be fulfilled
- .waitForAll()
- .then(() => {
- jest.advanceTimersByTime(pollInterval);
- }),
- ]);
- });
- });
- });
-
- describe('receiveReportsSuccess', () => {
- it('commits RECEIVE_REPORTS_SUCCESS', () => {
- const data = { issues: [] };
-
- return testAction(
- actions.receiveReportsSuccess,
- data,
- localState,
- [{ type: types.RECEIVE_REPORTS_SUCCESS, payload: data }],
- [],
- );
- });
- });
-
- describe('receiveReportsError', () => {
- it('commits RECEIVE_REPORTS_ERROR', () => {
- return testAction(
- actions.receiveReportsError,
- null,
- localState,
- [{ type: types.RECEIVE_REPORTS_ERROR, payload: null }],
- [],
- );
- });
- });
-});
diff --git a/spec/frontend/ci/reports/codequality_report/store/getters_spec.js b/spec/frontend/ci/reports/codequality_report/store/getters_spec.js
deleted file mode 100644
index f4505204f67..00000000000
--- a/spec/frontend/ci/reports/codequality_report/store/getters_spec.js
+++ /dev/null
@@ -1,94 +0,0 @@
-import createStore from '~/ci/reports/codequality_report/store';
-import * as getters from '~/ci/reports/codequality_report/store/getters';
-import { LOADING, ERROR, SUCCESS, STATUS_NOT_FOUND } from '~/ci/reports/constants';
-
-describe('Codequality reports store getters', () => {
- let localState;
- let localStore;
-
- beforeEach(() => {
- localStore = createStore();
- localState = localStore.state;
- });
-
- describe('hasCodequalityIssues', () => {
- describe('when there are issues', () => {
- it('returns true', () => {
- localState.newIssues = [{ reason: 'repetitive code' }];
- localState.resolvedIssues = [];
-
- expect(getters.hasCodequalityIssues(localState)).toEqual(true);
-
- localState.newIssues = [];
- localState.resolvedIssues = [{ reason: 'repetitive code' }];
-
- expect(getters.hasCodequalityIssues(localState)).toEqual(true);
- });
- });
-
- describe('when there are no issues', () => {
- it('returns false when there are no issues', () => {
- expect(getters.hasCodequalityIssues(localState)).toEqual(false);
- });
- });
- });
-
- describe('codequalityStatus', () => {
- describe('when loading', () => {
- it('returns loading status', () => {
- localState.isLoading = true;
-
- expect(getters.codequalityStatus(localState)).toEqual(LOADING);
- });
- });
-
- describe('on error', () => {
- it('returns error status', () => {
- localState.hasError = true;
-
- expect(getters.codequalityStatus(localState)).toEqual(ERROR);
- });
- });
-
- describe('when successfully loaded', () => {
- it('returns error status', () => {
- expect(getters.codequalityStatus(localState)).toEqual(SUCCESS);
- });
- });
- });
-
- describe('codequalityText', () => {
- it.each`
- resolvedIssues | newIssues | expectedText
- ${0} | ${0} | ${'No changes to code quality'}
- ${0} | ${1} | ${'Code quality degraded due to 1 new issue'}
- ${2} | ${0} | ${'Code quality improved due to 2 resolved issues'}
- ${1} | ${2} | ${'Code quality scanning detected 3 changes in merged results'}
- `(
- 'returns a summary containing $resolvedIssues resolved issues and $newIssues new issues',
- ({ newIssues, resolvedIssues, expectedText }) => {
- localState.newIssues = new Array(newIssues).fill({ reason: 'Repetitive code' });
- localState.resolvedIssues = new Array(resolvedIssues).fill({ reason: 'Repetitive code' });
-
- expect(getters.codequalityText(localState)).toEqual(expectedText);
- },
- );
- });
-
- describe('codequalityPopover', () => {
- describe('when base report is not available', () => {
- it('returns a popover with a documentation link', () => {
- localState.status = STATUS_NOT_FOUND;
- localState.helpPath = 'codequality_help.html';
-
- expect(getters.codequalityPopover(localState).title).toEqual(
- 'Base pipeline codequality artifact not found',
- );
- expect(getters.codequalityPopover(localState).content).toContain(
- 'Learn more about codequality reports',
- 'href="codequality_help.html"',
- );
- });
- });
- });
-});
diff --git a/spec/frontend/ci/reports/codequality_report/store/mutations_spec.js b/spec/frontend/ci/reports/codequality_report/store/mutations_spec.js
deleted file mode 100644
index 22ff86b1040..00000000000
--- a/spec/frontend/ci/reports/codequality_report/store/mutations_spec.js
+++ /dev/null
@@ -1,100 +0,0 @@
-import createStore from '~/ci/reports/codequality_report/store';
-import mutations from '~/ci/reports/codequality_report/store/mutations';
-import { STATUS_NOT_FOUND } from '~/ci/reports/constants';
-
-describe('Codequality Reports mutations', () => {
- let localState;
- let localStore;
-
- beforeEach(() => {
- localStore = createStore();
- localState = localStore.state;
- });
-
- describe('SET_PATHS', () => {
- it('sets paths to given values', () => {
- const baseBlobPath = 'base/blob/path/';
- const headBlobPath = 'head/blob/path/';
- const reportsPath = 'reports.json';
- const helpPath = 'help.html';
-
- mutations.SET_PATHS(localState, {
- baseBlobPath,
- headBlobPath,
- reportsPath,
- helpPath,
- });
-
- expect(localState.baseBlobPath).toEqual(baseBlobPath);
- expect(localState.headBlobPath).toEqual(headBlobPath);
- expect(localState.reportsPath).toEqual(reportsPath);
- expect(localState.helpPath).toEqual(helpPath);
- });
- });
-
- describe('REQUEST_REPORTS', () => {
- it('sets isLoading to true', () => {
- mutations.REQUEST_REPORTS(localState);
-
- expect(localState.isLoading).toEqual(true);
- });
- });
-
- describe('RECEIVE_REPORTS_SUCCESS', () => {
- it('sets isLoading to false', () => {
- mutations.RECEIVE_REPORTS_SUCCESS(localState, {});
-
- expect(localState.isLoading).toEqual(false);
- });
-
- it('sets hasError to false', () => {
- mutations.RECEIVE_REPORTS_SUCCESS(localState, {});
-
- expect(localState.hasError).toEqual(false);
- });
-
- it('clears status and statusReason', () => {
- mutations.RECEIVE_REPORTS_SUCCESS(localState, {});
-
- expect(localState.status).toEqual('');
- expect(localState.statusReason).toEqual('');
- });
-
- it('sets newIssues and resolvedIssues from response data', () => {
- const data = { newIssues: [{ id: 1 }], resolvedIssues: [{ id: 2 }] };
- mutations.RECEIVE_REPORTS_SUCCESS(localState, data);
-
- expect(localState.newIssues).toEqual(data.newIssues);
- expect(localState.resolvedIssues).toEqual(data.resolvedIssues);
- });
- });
-
- describe('RECEIVE_REPORTS_ERROR', () => {
- it('sets isLoading to false', () => {
- mutations.RECEIVE_REPORTS_ERROR(localState);
-
- expect(localState.isLoading).toEqual(false);
- });
-
- it('sets hasError to true', () => {
- mutations.RECEIVE_REPORTS_ERROR(localState);
-
- expect(localState.hasError).toEqual(true);
- });
-
- it('sets status based on error object', () => {
- const error = { status: STATUS_NOT_FOUND };
- mutations.RECEIVE_REPORTS_ERROR(localState, error);
-
- expect(localState.status).toEqual(error.status);
- });
-
- it('sets statusReason to string from error response data', () => {
- const data = { status_reason: 'This merge request does not have codequality reports' };
- const error = { response: { data } };
- mutations.RECEIVE_REPORTS_ERROR(localState, error);
-
- expect(localState.statusReason).toEqual(data.status_reason);
- });
- });
-});
diff --git a/spec/frontend/ci/reports/codequality_report/store/utils/codequality_parser_spec.js b/spec/frontend/ci/reports/codequality_report/utils/codequality_parser_spec.js
index f7d82d2b662..953e6173662 100644
--- a/spec/frontend/ci/reports/codequality_report/store/utils/codequality_parser_spec.js
+++ b/spec/frontend/ci/reports/codequality_report/utils/codequality_parser_spec.js
@@ -1,5 +1,5 @@
import { reportIssues, parsedReportIssues } from 'jest/ci/reports/codequality_report/mock_data';
-import { parseCodeclimateMetrics } from '~/ci/reports/codequality_report/store/utils/codequality_parser';
+import { parseCodeclimateMetrics } from '~/ci/reports/codequality_report/utils/codequality_parser';
describe('Codequality report store utils', () => {
let result;
diff --git a/spec/frontend/ci/runner/admin_runners/admin_runners_app_spec.js b/spec/frontend/ci/runner/admin_runners/admin_runners_app_spec.js
index 4f5f9c43cb4..798cef252c9 100644
--- a/spec/frontend/ci/runner/admin_runners/admin_runners_app_spec.js
+++ b/spec/frontend/ci/runner/admin_runners/admin_runners_app_spec.js
@@ -50,11 +50,13 @@ import {
} from '~/ci/runner/constants';
import allRunnersQuery from 'ee_else_ce/ci/runner/graphql/list/all_runners.query.graphql';
import allRunnersCountQuery from 'ee_else_ce/ci/runner/graphql/list/all_runners_count.query.graphql';
+import runnerJobCountQuery from '~/ci/runner/graphql/list/runner_job_count.query.graphql';
import { captureException } from '~/ci/runner/sentry_utils';
import {
allRunnersData,
runnersCountData,
+ runnerJobCountData,
allRunnersDataPaginated,
onlineContactTimeoutSecs,
staleTimeoutSecs,
@@ -68,6 +70,7 @@ const mockRunnersCount = runnersCountData.data.runners.count;
const mockRunnersHandler = jest.fn();
const mockRunnersCountHandler = jest.fn();
+const mockRunnerJobCountHandler = jest.fn();
jest.mock('~/alert');
jest.mock('~/ci/runner/sentry_utils');
@@ -108,6 +111,7 @@ describe('AdminRunnersApp', () => {
const handlers = [
[allRunnersQuery, mockRunnersHandler],
[allRunnersCountQuery, mockRunnersCountHandler],
+ [runnerJobCountQuery, mockRunnerJobCountHandler],
];
wrapper = mountFn(AdminRunnersApp, {
@@ -137,11 +141,13 @@ describe('AdminRunnersApp', () => {
beforeEach(() => {
mockRunnersHandler.mockResolvedValue(allRunnersData);
mockRunnersCountHandler.mockResolvedValue(runnersCountData);
+ mockRunnerJobCountHandler.mockResolvedValue(runnerJobCountData);
});
afterEach(() => {
mockRunnersHandler.mockReset();
mockRunnersCountHandler.mockReset();
+ mockRunnerJobCountHandler.mockReset();
showToast.mockReset();
});
diff --git a/spec/frontend/ci/runner/components/cells/runner_summary_cell_spec.js b/spec/frontend/ci/runner/components/cells/runner_summary_cell_spec.js
index 27fb288c462..2504458efff 100644
--- a/spec/frontend/ci/runner/components/cells/runner_summary_cell_spec.js
+++ b/spec/frontend/ci/runner/components/cells/runner_summary_cell_spec.js
@@ -4,6 +4,7 @@ import { mountExtended, shallowMountExtended } from 'helpers/vue_test_utils_help
import RunnerSummaryCell from '~/ci/runner/components/cells/runner_summary_cell.vue';
import TimeAgo from '~/vue_shared/components/time_ago_tooltip.vue';
import RunnerCreatedAt from '~/ci/runner/components/runner_created_at.vue';
+import RunnerJobCount from '~/ci/runner/components/runner_job_count.vue';
import RunnerManagersBadge from '~/ci/runner/components/runner_managers_badge.vue';
import RunnerTags from '~/ci/runner/components/runner_tags.vue';
import RunnerSummaryField from '~/ci/runner/components/cells/runner_summary_field.vue';
@@ -157,23 +158,9 @@ describe('RunnerTypeCell', () => {
});
it('Displays job count', () => {
- expect(findRunnerSummaryField('pipeline').text()).toContain(`${mockRunner.jobCount}`);
- });
-
- it('Formats large job counts', () => {
- createComponent({
- runner: { jobCount: 1000 },
- });
-
- expect(findRunnerSummaryField('pipeline').text()).toContain('1,000');
- });
-
- it('Formats large job counts with a plus symbol', () => {
- createComponent({
- runner: { jobCount: 1001 },
- });
-
- expect(findRunnerSummaryField('pipeline').text()).toContain('1,000+');
+ expect(
+ findRunnerSummaryField('pipeline').findComponent(RunnerJobCount).props('runner'),
+ ).toEqual(mockRunner);
});
it('Displays creation info', () => {
diff --git a/spec/frontend/ci/runner/components/runner_filtered_search_bar_spec.js b/spec/frontend/ci/runner/components/runner_filtered_search_bar_spec.js
index ffc19d66cac..62ab40b2ebb 100644
--- a/spec/frontend/ci/runner/components/runner_filtered_search_bar_spec.js
+++ b/spec/frontend/ci/runner/components/runner_filtered_search_bar_spec.js
@@ -1,4 +1,4 @@
-import { GlFilteredSearch, GlDropdown, GlDropdownItem } from '@gitlab/ui';
+import { GlFilteredSearch, GlSorting } from '@gitlab/ui';
import { nextTick } from 'vue';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import { assertProps } from 'helpers/assert_props';
@@ -32,7 +32,12 @@ describe('RunnerList', () => {
const findFilteredSearch = () => wrapper.findComponent(FilteredSearch);
const findGlFilteredSearch = () => wrapper.findComponent(GlFilteredSearch);
- const findSortOptions = () => wrapper.findAllComponents(GlDropdownItem);
+ const findGlSorting = () => wrapper.findComponent(GlSorting);
+ const getSortOptions = () => findGlSorting().props('sortOptions');
+ const getSelectedSortOption = () => {
+ const sortBy = findGlSorting().props('sortBy');
+ return getSortOptions().find(({ value }) => sortBy === value)?.text;
+ };
const mockOtherSort = CONTACTED_DESC;
const mockFilters = [
@@ -56,8 +61,6 @@ describe('RunnerList', () => {
stubs: {
FilteredSearch,
GlFilteredSearch,
- GlDropdown,
- GlDropdownItem,
},
...options,
});
@@ -74,9 +77,10 @@ describe('RunnerList', () => {
it('sets sorting options', () => {
const SORT_OPTIONS_COUNT = 2;
- expect(findSortOptions()).toHaveLength(SORT_OPTIONS_COUNT);
- expect(findSortOptions().at(0).text()).toBe('Created date');
- expect(findSortOptions().at(1).text()).toBe('Last contact');
+ const sortOptionsProp = getSortOptions();
+ expect(sortOptionsProp).toHaveLength(SORT_OPTIONS_COUNT);
+ expect(sortOptionsProp[0].text).toBe('Created date');
+ expect(sortOptionsProp[1].text).toBe('Last contact');
});
it('sets tokens to the filtered search', () => {
@@ -141,12 +145,7 @@ describe('RunnerList', () => {
});
it('sort option is selected', () => {
- expect(
- findSortOptions()
- .filter((w) => w.props('isChecked'))
- .at(0)
- .text(),
- ).toEqual('Last contact');
+ expect(getSelectedSortOption()).toBe('Last contact');
});
it('when the user sets a filter, the "search" preserves the other filters', async () => {
@@ -181,7 +180,7 @@ describe('RunnerList', () => {
});
it('when the user sets a sorting method, the "search" is emitted with the sort', () => {
- findSortOptions().at(1).vm.$emit('click');
+ findGlSorting().vm.$emit('sortByChange', 2);
expectToHaveLastEmittedInput({
runnerType: null,
diff --git a/spec/frontend/ci/runner/components/runner_job_count_spec.js b/spec/frontend/ci/runner/components/runner_job_count_spec.js
new file mode 100644
index 00000000000..01b5ca5332e
--- /dev/null
+++ b/spec/frontend/ci/runner/components/runner_job_count_spec.js
@@ -0,0 +1,74 @@
+import Vue from 'vue';
+import VueApollo from 'vue-apollo';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import runnerJobCountQuery from '~/ci/runner/graphql/list/runner_job_count.query.graphql';
+
+import RunnerJobCount from '~/ci/runner/components/runner_job_count.vue';
+
+import { runnerJobCountData } from '../mock_data';
+
+const mockRunner = runnerJobCountData.data.runner;
+
+Vue.use(VueApollo);
+
+describe('RunnerJobCount', () => {
+ let wrapper;
+ let runnerJobCountHandler;
+
+ const createComponent = ({ props = {}, ...options } = {}, mountFn = shallowMountExtended) => {
+ wrapper = mountFn(RunnerJobCount, {
+ apolloProvider: createMockApollo([[runnerJobCountQuery, runnerJobCountHandler]]),
+ propsData: {
+ runner: mockRunner,
+ ...props,
+ },
+ ...options,
+ });
+ };
+
+ beforeEach(() => {
+ runnerJobCountHandler = jest.fn().mockReturnValue(new Promise(() => {}));
+ });
+
+ it('Loads data while it displays empty content', () => {
+ createComponent();
+
+ expect(runnerJobCountHandler).toHaveBeenCalledWith({ id: mockRunner.id });
+ expect(wrapper.text()).toBe('-');
+ });
+
+ it('Sets a batch key for the "jobCount" query', () => {
+ createComponent();
+
+ expect(wrapper.vm.$apollo.queries.jobCount.options.context.batchKey).toBe('RunnerJobCount');
+ });
+
+ it('Displays job count', async () => {
+ runnerJobCountHandler.mockResolvedValue(runnerJobCountData);
+
+ createComponent();
+
+ await waitForPromises();
+
+ expect(wrapper.text()).toBe('999');
+ });
+
+ it('Displays formatted job count', async () => {
+ runnerJobCountHandler.mockResolvedValue({
+ data: {
+ runner: {
+ ...mockRunner,
+ jobCount: 1001,
+ },
+ },
+ });
+
+ createComponent();
+
+ await waitForPromises();
+
+ expect(wrapper.text()).toBe('1,000+');
+ });
+});
diff --git a/spec/frontend/ci/runner/components/runner_managers_detail_spec.js b/spec/frontend/ci/runner/components/runner_managers_detail_spec.js
index 3435292394f..6db9bb1d091 100644
--- a/spec/frontend/ci/runner/components/runner_managers_detail_spec.js
+++ b/spec/frontend/ci/runner/components/runner_managers_detail_spec.js
@@ -85,7 +85,7 @@ describe('RunnerJobs', () => {
});
it('is collapsed', () => {
- expect(findCollapse().attributes('visible')).toBeUndefined();
+ expect(findCollapse().props('visible')).toBe(false);
});
describe('when expanded', () => {
@@ -99,7 +99,7 @@ describe('RunnerJobs', () => {
});
it('shows loading state', () => {
- expect(findCollapse().attributes('visible')).toBe('true');
+ expect(findCollapse().props('visible')).toBe(true);
expect(findSkeletonLoader().exists()).toBe(true);
});
@@ -156,14 +156,14 @@ describe('RunnerJobs', () => {
});
it('shows rows', () => {
- expect(findCollapse().attributes('visible')).toBe('true');
+ expect(findCollapse().props('visible')).toBe(true);
expect(findRunnerManagersTable().props('items')).toEqual(mockRunnerManagers);
});
it('collapses when clicked', async () => {
await findHideDetails().trigger('click');
- expect(findCollapse().attributes('visible')).toBeUndefined();
+ expect(findCollapse().props('visible')).toBe(false);
});
});
});
diff --git a/spec/frontend/ci/runner/group_runners/group_runners_app_spec.js b/spec/frontend/ci/runner/group_runners/group_runners_app_spec.js
index f3d7ae85e0d..3e4cdecb07b 100644
--- a/spec/frontend/ci/runner/group_runners/group_runners_app_spec.js
+++ b/spec/frontend/ci/runner/group_runners/group_runners_app_spec.js
@@ -50,12 +50,14 @@ import {
} from '~/ci/runner/constants';
import groupRunnersQuery from 'ee_else_ce/ci/runner/graphql/list/group_runners.query.graphql';
import groupRunnersCountQuery from 'ee_else_ce/ci/runner/graphql/list/group_runners_count.query.graphql';
+import runnerJobCountQuery from '~/ci/runner/graphql/list/runner_job_count.query.graphql';
import GroupRunnersApp from '~/ci/runner/group_runners/group_runners_app.vue';
import { captureException } from '~/ci/runner/sentry_utils';
import {
groupRunnersData,
groupRunnersDataPaginated,
groupRunnersCountData,
+ runnerJobCountData,
onlineContactTimeoutSecs,
staleTimeoutSecs,
mockRegistrationToken,
@@ -72,6 +74,7 @@ const mockGroupRunnersCount = mockGroupRunnersEdges.length;
const mockGroupRunnersHandler = jest.fn();
const mockGroupRunnersCountHandler = jest.fn();
+const mockRunnerJobCountHandler = jest.fn();
jest.mock('~/alert');
jest.mock('~/ci/runner/sentry_utils');
@@ -108,6 +111,7 @@ describe('GroupRunnersApp', () => {
const handlers = [
[groupRunnersQuery, mockGroupRunnersHandler],
[groupRunnersCountQuery, mockGroupRunnersCountHandler],
+ [runnerJobCountQuery, mockRunnerJobCountHandler],
];
wrapper = mountFn(GroupRunnersApp, {
@@ -138,11 +142,13 @@ describe('GroupRunnersApp', () => {
beforeEach(() => {
mockGroupRunnersHandler.mockResolvedValue(groupRunnersData);
mockGroupRunnersCountHandler.mockResolvedValue(groupRunnersCountData);
+ mockRunnerJobCountHandler.mockResolvedValue(runnerJobCountData);
});
afterEach(() => {
mockGroupRunnersHandler.mockReset();
mockGroupRunnersCountHandler.mockReset();
+ mockRunnerJobCountHandler.mockReset();
});
it('shows the runner tabs with a runner count for each type', async () => {
diff --git a/spec/frontend/ci/runner/mock_data.js b/spec/frontend/ci/runner/mock_data.js
index 51556650c32..58d8e0ee74a 100644
--- a/spec/frontend/ci/runner/mock_data.js
+++ b/spec/frontend/ci/runner/mock_data.js
@@ -43,6 +43,15 @@ const emptyPageInfo = {
endCursor: '',
};
+const runnerJobCountData = {
+ data: {
+ runner: {
+ id: 'gid://gitlab/Ci::Runner/99',
+ jobCount: 999,
+ },
+ },
+};
+
// Other mock data
// Mock searches and their corresponding urls
@@ -348,6 +357,7 @@ export {
groupRunnersCountData,
emptyPageInfo,
runnerData,
+ runnerJobCountData,
runnerWithGroupData,
runnerProjectsData,
runnerJobsData,
diff --git a/spec/frontend/clusters/agents/components/integration_status_spec.js b/spec/frontend/clusters/agents/components/integration_status_spec.js
index 28a59391578..0f3da3e02be 100644
--- a/spec/frontend/clusters/agents/components/integration_status_spec.js
+++ b/spec/frontend/clusters/agents/components/integration_status_spec.js
@@ -58,7 +58,7 @@ describe('IntegrationStatus', () => {
});
it('sets collapse component as invisible by default', () => {
- expect(findCollapse().props('visible')).toBeUndefined();
+ expect(findCollapse().props('visible')).toBe(false);
});
});
@@ -73,7 +73,7 @@ describe('IntegrationStatus', () => {
});
it('sets collapse component as visible', () => {
- expect(findCollapse().attributes('visible')).toBe('true');
+ expect(findCollapse().props('visible')).toBe(true);
});
});
diff --git a/spec/frontend/comment_templates/components/__snapshots__/list_item_spec.js.snap b/spec/frontend/comment_templates/components/__snapshots__/list_item_spec.js.snap
index 24b2677f497..97b8e1f7fc8 100644
--- a/spec/frontend/comment_templates/components/__snapshots__/list_item_spec.js.snap
+++ b/spec/frontend/comment_templates/components/__snapshots__/list_item_spec.js.snap
@@ -22,7 +22,7 @@ exports[`Comment templates list item component renders list item 1`] = `
<button
aria-controls="reference-1"
aria-labelledby="reference-0"
- class="btn btn-default btn-default-tertiary btn-md gl-button gl-new-dropdown-icon-only gl-new-dropdown-toggle gl-new-dropdown-toggle-no-caret"
+ class="btn btn-default btn-default-tertiary btn-icon btn-md gl-button gl-new-dropdown-icon-only gl-new-dropdown-toggle gl-new-dropdown-toggle-no-caret"
data-testid="base-dropdown-toggle"
id="reference-0"
type="button"
diff --git a/spec/frontend/commit/commit_pipeline_status_spec.js b/spec/frontend/commit/commit_pipeline_status_spec.js
index 08a7ec17785..6d407ed886a 100644
--- a/spec/frontend/commit/commit_pipeline_status_spec.js
+++ b/spec/frontend/commit/commit_pipeline_status_spec.js
@@ -6,7 +6,7 @@ import fixture from 'test_fixtures/pipelines/pipelines.json';
import { createAlert } from '~/alert';
import Poll from '~/lib/utils/poll';
import CommitPipelineStatus from '~/projects/tree/components/commit_pipeline_status.vue';
-import CiIcon from '~/vue_shared/components/ci_icon.vue';
+import CiIcon from '~/vue_shared/components/ci_icon/ci_icon.vue';
jest.mock('~/lib/utils/poll');
jest.mock('visibilityjs');
diff --git a/spec/frontend/commit/components/commit_box_pipeline_status_spec.js b/spec/frontend/commit/components/commit_box_pipeline_status_spec.js
index 008a1b2c068..37ce234c61c 100644
--- a/spec/frontend/commit/components/commit_box_pipeline_status_spec.js
+++ b/spec/frontend/commit/components/commit_box_pipeline_status_spec.js
@@ -5,7 +5,7 @@ import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import { createAlert } from '~/alert';
-import CiIcon from '~/vue_shared/components/ci_icon.vue';
+import CiIcon from '~/vue_shared/components/ci_icon/ci_icon.vue';
import CommitBoxPipelineStatus from '~/projects/commit_box/info/components/commit_box_pipeline_status.vue';
import {
COMMIT_BOX_POLL_INTERVAL,
diff --git a/spec/frontend/commons/nav/user_merge_requests_spec.js b/spec/frontend/commons/nav/user_merge_requests_spec.js
deleted file mode 100644
index 114cbbf812c..00000000000
--- a/spec/frontend/commons/nav/user_merge_requests_spec.js
+++ /dev/null
@@ -1,154 +0,0 @@
-import { setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
-import * as UserApi from '~/api/user_api';
-import {
- openUserCountsBroadcast,
- closeUserCountsBroadcast,
- refreshUserMergeRequestCounts,
-} from '~/commons/nav/user_merge_requests';
-
-jest.mock('~/api');
-
-const TEST_COUNT = 1000;
-const MR_COUNT_CLASS = 'js-merge-requests-count';
-
-describe('User Merge Requests', () => {
- let channelMock;
- let newBroadcastChannelMock;
-
- beforeEach(() => {
- jest.spyOn(document, 'dispatchEvent').mockReturnValue(false);
-
- global.gon.current_user_id = 123;
- global.gon.use_new_navigation = false;
-
- channelMock = {
- postMessage: jest.fn(),
- close: jest.fn(),
- };
- newBroadcastChannelMock = jest.fn().mockImplementation(() => channelMock);
-
- global.BroadcastChannel = newBroadcastChannelMock;
- setHTMLFixture(
- `<div><div class="${MR_COUNT_CLASS}">0</div><div class="js-assigned-mr-count"></div><div class="js-reviewer-mr-count"></div></div>`,
- );
- });
-
- afterEach(() => {
- resetHTMLFixture();
- });
-
- const findMRCountText = () => document.body.querySelector(`.${MR_COUNT_CLASS}`).textContent;
-
- describe('refreshUserMergeRequestCounts', () => {
- beforeEach(() => {
- jest.spyOn(UserApi, 'getUserCounts').mockResolvedValue({
- data: {
- assigned_merge_requests: TEST_COUNT,
- review_requested_merge_requests: TEST_COUNT,
- },
- });
- });
-
- describe('with open broadcast channel', () => {
- beforeEach(() => {
- openUserCountsBroadcast();
-
- return refreshUserMergeRequestCounts();
- });
-
- it('updates the top count of merge requests', () => {
- expect(findMRCountText()).toEqual(Number(TEST_COUNT + TEST_COUNT).toLocaleString());
- });
-
- it('calls the API', () => {
- expect(UserApi.getUserCounts).toHaveBeenCalled();
- });
-
- it('posts count to BroadcastChannel', () => {
- expect(channelMock.postMessage).toHaveBeenCalledWith(TEST_COUNT + TEST_COUNT);
- });
- });
-
- describe('without open broadcast channel', () => {
- beforeEach(() => refreshUserMergeRequestCounts());
-
- it('does not post anything', () => {
- expect(channelMock.postMessage).not.toHaveBeenCalled();
- });
- });
-
- it('does not emit event to refetch counts', () => {
- expect(document.dispatchEvent).not.toHaveBeenCalled();
- });
- });
-
- describe('openUserCountsBroadcast', () => {
- beforeEach(() => {
- openUserCountsBroadcast();
- });
-
- it('creates BroadcastChannel that updates DOM on message received', () => {
- expect(findMRCountText()).toEqual('0');
-
- channelMock.onmessage({ data: TEST_COUNT });
-
- expect(newBroadcastChannelMock).toHaveBeenCalled();
- expect(findMRCountText()).toEqual(TEST_COUNT.toLocaleString());
- });
-
- it('closes if called while already open', () => {
- expect(channelMock.close).not.toHaveBeenCalled();
-
- openUserCountsBroadcast();
-
- expect(newBroadcastChannelMock).toHaveBeenCalled();
- expect(channelMock.close).toHaveBeenCalled();
- });
- });
-
- describe('closeUserCountsBroadcast', () => {
- describe('when not opened', () => {
- it('does nothing', () => {
- expect(channelMock.close).not.toHaveBeenCalled();
- });
- });
-
- describe('when opened', () => {
- beforeEach(() => {
- openUserCountsBroadcast();
- });
-
- it('closes', () => {
- expect(channelMock.close).not.toHaveBeenCalled();
-
- closeUserCountsBroadcast();
-
- expect(channelMock.close).toHaveBeenCalled();
- });
- });
- });
-
- describe('if new navigation is enabled', () => {
- beforeEach(() => {
- global.gon.use_new_navigation = true;
- jest.spyOn(UserApi, 'getUserCounts');
- });
-
- it('openUserCountsBroadcast is a noop', () => {
- openUserCountsBroadcast();
- expect(newBroadcastChannelMock).not.toHaveBeenCalled();
- });
-
- describe('refreshUserMergeRequestCounts', () => {
- it('does not call api', async () => {
- await refreshUserMergeRequestCounts();
- expect(UserApi.getUserCounts).not.toHaveBeenCalled();
- });
-
- it('emits event to refetch counts', async () => {
- await refreshUserMergeRequestCounts();
- expect(document.dispatchEvent).toHaveBeenCalledWith(new CustomEvent('todo:toggle'));
- });
- });
- });
-});
diff --git a/spec/frontend/content_editor/components/__snapshots__/toolbar_button_spec.js.snap b/spec/frontend/content_editor/components/__snapshots__/toolbar_button_spec.js.snap
index a708f7d5f47..0fafd42095b 100644
--- a/spec/frontend/content_editor/components/__snapshots__/toolbar_button_spec.js.snap
+++ b/spec/frontend/content_editor/components/__snapshots__/toolbar_button_spec.js.snap
@@ -3,7 +3,7 @@
exports[`content_editor/components/toolbar_button displays tertiary, medium button with a provided label and icon 1`] = `
<b-button-stub
aria-label="Bold"
- class="btn-default-tertiary btn-icon gl-button gl-mr-3"
+ class="btn-default-tertiary btn-icon gl-button gl-mr-2"
size="sm"
tag="button"
title="Bold"
diff --git a/spec/frontend/content_editor/components/bubble_menus/code_block_bubble_menu_spec.js b/spec/frontend/content_editor/components/bubble_menus/code_block_bubble_menu_spec.js
index 2a6ab75227c..6e8a6092667 100644
--- a/spec/frontend/content_editor/components/bubble_menus/code_block_bubble_menu_spec.js
+++ b/spec/frontend/content_editor/components/bubble_menus/code_block_bubble_menu_spec.js
@@ -80,6 +80,7 @@ describe('content_editor/components/bubble_menus/code_block_bubble_menu', () =>
await emitEditorEvent({ event: 'transaction', tiptapEditor });
expect(wrapper.findComponent(GlDropdown).props('text')).toBe('Plain text');
+ expect(wrapper.findComponent(GlDropdown).attributes('contenteditable')).toBe(String(false));
});
it('selects appropriate language based on the code block', async () => {
diff --git a/spec/frontend/content_editor/components/content_editor_spec.js b/spec/frontend/content_editor/components/content_editor_spec.js
index 816c9458201..bbc0203344c 100644
--- a/spec/frontend/content_editor/components/content_editor_spec.js
+++ b/spec/frontend/content_editor/components/content_editor_spec.js
@@ -1,6 +1,8 @@
import { GlAlert, GlLink, GlSprintf } from '@gitlab/ui';
import { EditorContent, Editor } from '@tiptap/vue-2';
import { nextTick } from 'vue';
+import MockAdapter from 'axios-mock-adapter';
+import axios from 'axios';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import ContentEditor from '~/content_editor/components/content_editor.vue';
import ContentEditorAlert from '~/content_editor/components/content_editor_alert.vue';
@@ -16,11 +18,10 @@ import waitForPromises from 'helpers/wait_for_promises';
import { KEYDOWN_EVENT } from '~/content_editor/constants';
import EditorModeSwitcher from '~/vue_shared/components/markdown/editor_mode_switcher.vue';
-jest.mock('~/emoji');
-
describe('ContentEditor', () => {
let wrapper;
let renderMarkdown;
+ let mock;
const uploadsPath = '/uploads';
const findEditorElement = () => wrapper.findByTestId('content-editor');
@@ -32,6 +33,7 @@ describe('ContentEditor', () => {
wrapper = shallowMountExtended(ContentEditor, {
propsData: {
renderMarkdown,
+ markdownDocsPath: '/docs/markdown',
uploadsPath,
markdown,
autofocus,
@@ -49,9 +51,17 @@ describe('ContentEditor', () => {
};
beforeEach(() => {
+ mock = new MockAdapter(axios);
+ // ignore /-/emojis requests
+ mock.onGet().reply(200, []);
+
renderMarkdown = jest.fn();
});
+ afterEach(() => {
+ mock.restore();
+ });
+
it('triggers initialized event', () => {
createWrapper();
diff --git a/spec/frontend/content_editor/components/suggestions_dropdown_spec.js b/spec/frontend/content_editor/components/suggestions_dropdown_spec.js
index ee3ad59bf9a..b17a1b5fc11 100644
--- a/spec/frontend/content_editor/components/suggestions_dropdown_spec.js
+++ b/spec/frontend/content_editor/components/suggestions_dropdown_spec.js
@@ -1,5 +1,6 @@
-import { GlAvatarLabeled, GlLoadingIcon } from '@gitlab/ui';
+import { GlAvatar, GlLoadingIcon } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
+import { nextTick } from 'vue';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import SuggestionsDropdown from '~/content_editor/components/suggestions_dropdown.vue';
@@ -14,11 +15,17 @@ describe('~/content_editor/components/suggestions_dropdown', () => {
command: jest.fn(),
...propsData,
},
+ stubs: ['gl-emoji'],
}),
);
};
- const exampleUser = { username: 'root', avatar_url: 'root_avatar.png', type: 'User' };
+ const exampleUser = {
+ username: 'root',
+ avatar_url: 'root_avatar.png',
+ type: 'User',
+ name: 'Administrator',
+ };
const exampleIssue = { iid: 123, title: 'Test Issue' };
const exampleMergeRequest = { iid: 224, title: 'Test MR' };
const exampleMilestone1 = { iid: 21, title: '13' };
@@ -61,11 +68,14 @@ describe('~/content_editor/components/suggestions_dropdown', () => {
title: 'Project creation QueryRecorder logs',
};
const exampleEmoji = {
- c: 'people',
- e: '😃',
- d: 'smiling face with open mouth',
- u: '6.0',
- name: 'smiley',
+ emoji: {
+ c: 'people',
+ e: '😃',
+ d: 'smiling face with open mouth',
+ u: '6.0',
+ name: 'smiley',
+ },
+ fieldValue: 'smiley',
};
const insertedEmojiProps = {
@@ -95,6 +105,68 @@ describe('~/content_editor/components/suggestions_dropdown', () => {
expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(loading);
});
+ it('selects first item if query is not empty and items are available', async () => {
+ buildWrapper({
+ propsData: {
+ char: '@',
+ nodeType: 'reference',
+ nodeProps: {
+ referenceType: 'member',
+ },
+ items: [exampleUser],
+ query: 'ro',
+ },
+ });
+
+ await nextTick();
+
+ expect(
+ wrapper.findByTestId('content-editor-suggestions-dropdown').find('li').classes(),
+ ).toContain('focused');
+ });
+
+ describe('when query is defined', () => {
+ it.each`
+ nodeType | referenceType | reference | query | expectedHTML
+ ${'reference'} | ${'user'} | ${exampleUser} | ${'r'} | ${'<strong class="gl-text-body!">r</strong>oot'}
+ ${'reference'} | ${'user'} | ${exampleUser} | ${'r'} | ${'Administ<strong class="gl-text-body!">r</strong>ator'}
+ ${'reference'} | ${'issue'} | ${exampleIssue} | ${'test'} | ${'<strong class="gl-text-body!">Test</strong> Issue'}
+ ${'reference'} | ${'issue'} | ${exampleIssue} | ${'12'} | ${'<strong class="gl-text-body!">12</strong>3'}
+ ${'reference'} | ${'merge_request'} | ${exampleMergeRequest} | ${'test'} | ${'<strong class="gl-text-body!">Test</strong> MR'}
+ ${'reference'} | ${'merge_request'} | ${exampleMergeRequest} | ${'22'} | ${'<strong class="gl-text-body!">22</strong>4'}
+ ${'reference'} | ${'epic'} | ${exampleEpic} | ${'rem'} | ${'❓ <strong class="gl-text-body!">Rem</strong>ote Development | Solution validation'}
+ ${'reference'} | ${'epic'} | ${exampleEpic} | ${'88'} | ${'gitlab-org&amp;<strong class="gl-text-body!">88</strong>84'}
+ ${'reference'} | ${'milestone'} | ${exampleMilestone1} | ${'1'} | ${'<strong class="gl-text-body!">1</strong>3'}
+ ${'reference'} | ${'command'} | ${exampleCommand} | ${'due'} | ${'<strong class="gl-text-body!">due</strong>'}
+ ${'reference'} | ${'command'} | ${exampleCommand} | ${'due'} | ${'Set <strong class="gl-text-body!">due</strong> date'}
+ ${'reference'} | ${'label'} | ${exampleLabel1} | ${'c'} | ${'<strong class="gl-text-body!">C</strong>reate'}
+ ${'reference'} | ${'vulnerability'} | ${exampleVulnerability} | ${'network'} | ${'System procs <strong class="gl-text-body!">network</strong> activity'}
+ ${'reference'} | ${'vulnerability'} | ${exampleVulnerability} | ${'85'} | ${'60<strong class="gl-text-body!">85</strong>0147'}
+ ${'reference'} | ${'snippet'} | ${exampleSnippet} | ${'project'} | ${'<strong class="gl-text-body!">Project</strong> creation QueryRecorder logs'}
+ ${'reference'} | ${'snippet'} | ${exampleSnippet} | ${'242'} | ${'<strong class="gl-text-body!">242</strong>0859'}
+ ${'emoji'} | ${'emoji'} | ${exampleEmoji} | ${'sm'} | ${'<strong class="gl-text-body!">sm</strong>iley'}
+ `(
+ 'highlights query as bolded in $referenceType text',
+ ({ nodeType, referenceType, reference, query, expectedHTML }) => {
+ buildWrapper({
+ propsData: {
+ char: '@',
+ nodeType,
+ nodeProps: {
+ referenceType,
+ },
+ items: [reference],
+ query,
+ },
+ });
+
+ expect(wrapper.findByTestId('content-editor-suggestions-dropdown').html()).toContain(
+ expectedHTML,
+ );
+ },
+ );
+ });
+
describe('on item select', () => {
it.each`
nodeType | referenceType | char | reference | insertedText | insertedProps
@@ -146,7 +218,7 @@ describe('~/content_editor/components/suggestions_dropdown', () => {
});
describe('rendering user references', () => {
- it('displays avatar labeled component', () => {
+ it('displays avatar component', () => {
buildWrapper({
propsData: {
char: '@',
@@ -157,13 +229,11 @@ describe('~/content_editor/components/suggestions_dropdown', () => {
},
});
- expect(wrapper.findComponent(GlAvatarLabeled).attributes()).toEqual(
- expect.objectContaining({
- label: exampleUser.username,
- shape: 'circle',
- src: exampleUser.avatar_url,
- }),
- );
+ expect(wrapper.findComponent(GlAvatar).attributes()).toMatchObject({
+ entityname: exampleUser.username,
+ shape: 'circle',
+ src: exampleUser.avatar_url,
+ });
});
describe.each`
@@ -273,20 +343,46 @@ describe('~/content_editor/components/suggestions_dropdown', () => {
it('displays emoji', () => {
const testEmojis = [
{
- c: 'people',
- e: '😄',
- d: 'smiling face with open mouth and smiling eyes',
- u: '6.0',
- name: 'smile',
+ emoji: {
+ c: 'people',
+ e: '😄',
+ d: 'smiling face with open mouth and smiling eyes',
+ u: '6.0',
+ name: 'smile',
+ },
+ fieldValue: 'smile',
+ },
+ {
+ emoji: {
+ c: 'people',
+ e: '😸',
+ d: 'grinning cat face with smiling eyes',
+ u: '6.0',
+ name: 'smile_cat',
+ },
+ fieldValue: 'smile_cat',
+ },
+ {
+ emoji: {
+ c: 'people',
+ e: '😃',
+ d: 'smiling face with open mouth',
+ u: '6.0',
+ name: 'smiley',
+ },
+ fieldValue: 'smiley',
},
{
- c: 'people',
- e: '😸',
- d: 'grinning cat face with smiling eyes',
- u: '6.0',
- name: 'smile_cat',
+ emoji: {
+ c: 'custom',
+ e: null,
+ d: 'party-parrot',
+ u: 'custom',
+ name: 'party-parrot',
+ src: 'https://cultofthepartyparrot.com/parrots/hd/parrot.gif',
+ },
+ fieldValue: 'party-parrot',
},
- { c: 'people', e: '😃', d: 'smiling face with open mouth', u: '6.0', name: 'smiley' },
];
buildWrapper({
@@ -298,11 +394,41 @@ describe('~/content_editor/components/suggestions_dropdown', () => {
},
});
- testEmojis.forEach((testEmoji) => {
- expect(wrapper.text()).toContain(testEmoji.e);
- expect(wrapper.text()).toContain(testEmoji.d);
- expect(wrapper.text()).toContain(testEmoji.name);
- });
+ expect(wrapper.findAllComponents('gl-emoji-stub').at(0).html()).toMatchInlineSnapshot(`
+ <gl-emoji-stub
+ data-name="smile"
+ data-unicode-version="6.0"
+ title="smiling face with open mouth and smiling eyes"
+ >
+ 😄
+ </gl-emoji-stub>
+ `);
+ expect(wrapper.findAllComponents('gl-emoji-stub').at(1).html()).toMatchInlineSnapshot(`
+ <gl-emoji-stub
+ data-name="smile_cat"
+ data-unicode-version="6.0"
+ title="grinning cat face with smiling eyes"
+ >
+ 😸
+ </gl-emoji-stub>
+ `);
+ expect(wrapper.findAllComponents('gl-emoji-stub').at(2).html()).toMatchInlineSnapshot(`
+ <gl-emoji-stub
+ data-name="smiley"
+ data-unicode-version="6.0"
+ title="smiling face with open mouth"
+ >
+ 😃
+ </gl-emoji-stub>
+ `);
+ expect(wrapper.findAllComponents('gl-emoji-stub').at(3).html()).toMatchInlineSnapshot(`
+ <gl-emoji-stub
+ data-fallback-src="https://cultofthepartyparrot.com/parrots/hd/parrot.gif"
+ data-name="party-parrot"
+ data-unicode-version="custom"
+ title="party-parrot"
+ />
+ `);
});
});
});
diff --git a/spec/frontend/content_editor/components/wrappers/code_block_spec.js b/spec/frontend/content_editor/components/wrappers/code_block_spec.js
index 0093393eceb..1f15dc17f7f 100644
--- a/spec/frontend/content_editor/components/wrappers/code_block_spec.js
+++ b/spec/frontend/content_editor/components/wrappers/code_block_spec.js
@@ -97,6 +97,7 @@ describe('content/components/wrappers/code_block', () => {
const label = wrapper.findByTestId('frontmatter-label');
expect(label.text()).toEqual('frontmatter:yaml');
+ expect(label.attributes('contenteditable')).toBe('false');
expect(label.classes()).toEqual(['gl-absolute', 'gl-top-0', 'gl-right-3']);
});
@@ -128,6 +129,9 @@ describe('content/components/wrappers/code_block', () => {
await nextTick();
expect(wrapper.find('img').attributes('src')).toBe('url/to/some/diagram');
+ expect(wrapper.findByTestId('sandbox-preview').attributes('contenteditable')).toBe(
+ String(false),
+ );
jest.spyOn(tiptapEditor, 'isActive').mockReturnValue(false);
@@ -214,6 +218,9 @@ describe('content/components/wrappers/code_block', () => {
});
it('shows a code suggestion block', () => {
+ expect(wrapper.findByTestId('code-suggestion-box').attributes('contenteditable')).toBe(
+ 'false',
+ );
expect(findCodeSuggestionBoxText()).toContain('Suggested change From line 5 to 5');
expect(findCodeDeleted()).toMatchInlineSnapshot(`
<code
diff --git a/spec/frontend/content_editor/components/wrappers/table_cell_base_spec.js b/spec/frontend/content_editor/components/wrappers/table_cell_base_spec.js
index 275f48ea857..94628f2b2c5 100644
--- a/spec/frontend/content_editor/components/wrappers/table_cell_base_spec.js
+++ b/spec/frontend/content_editor/components/wrappers/table_cell_base_spec.js
@@ -165,6 +165,9 @@ describe('content/components/wrappers/table_cell_base', () => {
it('does not allow adding a row before the header', () => {
expect(findDropdown().text()).not.toContain('Insert row before');
+ expect(wrapper.findByTestId('actions-dropdown').attributes('contenteditable')).toBe(
+ 'false',
+ );
});
it('does not allow removing the header row', async () => {
diff --git a/spec/frontend/content_editor/extensions/copy_paste_spec.js b/spec/frontend/content_editor/extensions/copy_paste_spec.js
index e290b4e5137..6969f4985a1 100644
--- a/spec/frontend/content_editor/extensions/copy_paste_spec.js
+++ b/spec/frontend/content_editor/extensions/copy_paste_spec.js
@@ -20,12 +20,6 @@ import waitForPromises from 'helpers/wait_for_promises';
import MarkdownSerializer from '~/content_editor/services/markdown_serializer';
import { createTestEditor, createDocBuilder, waitUntilNextDocTransaction } from '../test_utils';
-const CODE_BLOCK_HTML = '<pre class="js-syntax-highlight" lang="javascript">var a = 2;</pre>';
-const CODE_SUGGESTION_HTML =
- '<pre data-lang-params="-0+0" class="js-syntax-highlight language-suggestion" lang="suggestion">Suggested code</pre>';
-const DIAGRAM_HTML =
- '<img data-diagram="nomnoml" data-diagram-src="data:text/plain;base64,WzxmcmFtZT5EZWNvcmF0b3IgcGF0dGVybl0=">';
-const FRONTMATTER_HTML = '<pre lang="yaml" data-lang-params="frontmatter">key: value</pre>';
const PARAGRAPH_HTML =
'<p dir="auto">Some text with <strong>bold</strong> and <em>italic</em> text.</p>';
@@ -123,19 +117,6 @@ describe('content_editor/extensions/copy_paste', () => {
expect(await triggerPasteEventHandler(buildClipboardEvent({ types, data }))).toBe(true);
});
- it.each`
- nodeType | html | handled | desc
- ${'codeBlock'} | ${CODE_BLOCK_HTML} | ${false} | ${'does not handle'}
- ${'codeSuggestion'} | ${CODE_SUGGESTION_HTML} | ${false} | ${'does not handle'}
- ${'diagram'} | ${DIAGRAM_HTML} | ${false} | ${'does not handle'}
- ${'frontmatter'} | ${FRONTMATTER_HTML} | ${false} | ${'does not handle'}
- ${'paragraph'} | ${PARAGRAPH_HTML} | ${true} | ${'handles'}
- `('$desc paste if currently a `$nodeType` is in focus', async ({ html, handled }) => {
- tiptapEditor.commands.insertContent(html);
-
- expect(await triggerPasteEventHandler(buildClipboardEvent())).toBe(handled);
- });
-
describe.each`
eventName | expectedDoc
${'cut'} | ${() => doc(p())}
diff --git a/spec/frontend/content_editor/extensions/reference_spec.js b/spec/frontend/content_editor/extensions/reference_spec.js
index c25c7c41d75..d4b07d5127e 100644
--- a/spec/frontend/content_editor/extensions/reference_spec.js
+++ b/spec/frontend/content_editor/extensions/reference_spec.js
@@ -1,9 +1,15 @@
import Reference from '~/content_editor/extensions/reference';
+import ReferenceLabel from '~/content_editor/extensions/reference_label';
import AssetResolver from '~/content_editor/services/asset_resolver';
import {
RESOLVED_ISSUE_HTML,
RESOLVED_MERGE_REQUEST_HTML,
RESOLVED_EPIC_HTML,
+ RESOLVED_LABEL_HTML,
+ RESOLVED_SNIPPET_HTML,
+ RESOLVED_MILESTONE_HTML,
+ RESOLVED_USER_HTML,
+ RESOLVED_VULNERABILITY_HTML,
} from '../test_constants';
import {
createTestEditor,
@@ -17,6 +23,7 @@ describe('content_editor/extensions/reference', () => {
let doc;
let p;
let reference;
+ let referenceLabel;
let renderMarkdown;
let assetResolver;
@@ -25,33 +32,54 @@ describe('content_editor/extensions/reference', () => {
assetResolver = new AssetResolver({ renderMarkdown });
tiptapEditor = createTestEditor({
- extensions: [Reference.configure({ assetResolver })],
+ extensions: [Reference.configure({ assetResolver }), ReferenceLabel],
});
({
- builders: { doc, p, reference },
+ builders: { doc, p, reference, referenceLabel },
} = createDocBuilder({
tiptapEditor,
names: {
reference: { nodeType: Reference.name },
+ referenceLabel: { nodeType: ReferenceLabel.name },
},
}));
});
describe('when typing a valid reference input rule', () => {
- const buildExpectedDoc = (href, originalText, referenceType, text) =>
+ const buildExpectedDoc = (href, originalText, referenceType, text = originalText) =>
doc(p(reference({ className: null, href, originalText, referenceType, text }), ' '));
+ const buildExpectedDocForLabel = (href, originalText, text, color) =>
+ doc(
+ p(
+ referenceLabel({
+ className: null,
+ referenceType: 'label',
+ href,
+ originalText,
+ text,
+ color,
+ }),
+ ' ',
+ ),
+ );
+
it.each`
- inputRuleText | mockReferenceHtml | expectedDoc
- ${'#1 '} | ${RESOLVED_ISSUE_HTML} | ${() => buildExpectedDoc('/gitlab-org/gitlab/-/issues/1', '#1', 'issue', '#1 (closed)')}
- ${'#1+ '} | ${RESOLVED_ISSUE_HTML} | ${() => buildExpectedDoc('/gitlab-org/gitlab/-/issues/1', '#1+', 'issue', '500 error on MR approvers edit page (#1 - closed)')}
- ${'#1+s '} | ${RESOLVED_ISSUE_HTML} | ${() => buildExpectedDoc('/gitlab-org/gitlab/-/issues/1', '#1+s', 'issue', '500 error on MR approvers edit page (#1 - closed) • Unassigned')}
- ${'!1 '} | ${RESOLVED_MERGE_REQUEST_HTML} | ${() => buildExpectedDoc('/gitlab-org/gitlab/-/merge_requests/1', '!1', 'merge_request', '!1 (merged)')}
- ${'!1+ '} | ${RESOLVED_MERGE_REQUEST_HTML} | ${() => buildExpectedDoc('/gitlab-org/gitlab/-/merge_requests/1', '!1+', 'merge_request', 'Enhance the LDAP group synchronization (!1 - merged)')}
- ${'!1+s '} | ${RESOLVED_MERGE_REQUEST_HTML} | ${() => buildExpectedDoc('/gitlab-org/gitlab/-/merge_requests/1', '!1+s', 'merge_request', 'Enhance the LDAP group synchronization (!1 - merged) • John Doe')}
- ${'&1 '} | ${RESOLVED_EPIC_HTML} | ${() => buildExpectedDoc('/groups/gitlab-org/-/epics/1', '&1', 'epic', '&1')}
- ${'&1+ '} | ${RESOLVED_EPIC_HTML} | ${() => buildExpectedDoc('/groups/gitlab-org/-/epics/1', '&1+', 'epic', 'Approvals in merge request list (&1)')}
+ inputRuleText | mockReferenceHtml | expectedDoc
+ ${'#1'} | ${RESOLVED_ISSUE_HTML} | ${() => buildExpectedDoc('/gitlab-org/gitlab/-/issues/1', '#1', 'issue', '#1 (closed)')}
+ ${'#1+'} | ${RESOLVED_ISSUE_HTML} | ${() => buildExpectedDoc('/gitlab-org/gitlab/-/issues/1', '#1+', 'issue', '500 error on MR approvers edit page (#1 - closed)')}
+ ${'#1+s'} | ${RESOLVED_ISSUE_HTML} | ${() => buildExpectedDoc('/gitlab-org/gitlab/-/issues/1', '#1+s', 'issue', '500 error on MR approvers edit page (#1 - closed) • Unassigned')}
+ ${'!1'} | ${RESOLVED_MERGE_REQUEST_HTML} | ${() => buildExpectedDoc('/gitlab-org/gitlab/-/merge_requests/1', '!1', 'merge_request', '!1 (merged)')}
+ ${'!1+'} | ${RESOLVED_MERGE_REQUEST_HTML} | ${() => buildExpectedDoc('/gitlab-org/gitlab/-/merge_requests/1', '!1+', 'merge_request', 'Enhance the LDAP group synchronization (!1 - merged)')}
+ ${'!1+s'} | ${RESOLVED_MERGE_REQUEST_HTML} | ${() => buildExpectedDoc('/gitlab-org/gitlab/-/merge_requests/1', '!1+s', 'merge_request', 'Enhance the LDAP group synchronization (!1 - merged) • John Doe')}
+ ${'&1'} | ${RESOLVED_EPIC_HTML} | ${() => buildExpectedDoc('/groups/gitlab-org/-/epics/1', '&1', 'epic', '&1')}
+ ${'&1+'} | ${RESOLVED_EPIC_HTML} | ${() => buildExpectedDoc('/groups/gitlab-org/-/epics/1', '&1+', 'epic', 'Approvals in merge request list (&1)')}
+ ${'@root'} | ${RESOLVED_USER_HTML} | ${() => buildExpectedDoc('/root', '@root', 'user')}
+ ${'~Aquanix'} | ${RESOLVED_LABEL_HTML} | ${() => buildExpectedDocForLabel('/gitlab-org/gitlab-shell/-/issues?label_name=Aquanix', '~Aquanix', 'Aquanix', 'rgb(230, 84, 49)')}
+ ${'%v4.0'} | ${RESOLVED_MILESTONE_HTML} | ${() => buildExpectedDoc('/gitlab-org/gitlab-shell/-/milestones/5', '%v4.0', 'milestone')}
+ ${'$25'} | ${RESOLVED_SNIPPET_HTML} | ${() => buildExpectedDoc('/gitlab-org/gitlab-shell/-/snippets/25', '$25', 'snippet')}
+ ${'[vulnerability:1]'} | ${RESOLVED_VULNERABILITY_HTML} | ${() => buildExpectedDoc('/gitlab-org/gitlab-shell/-/security/vulnerabilities/1', '[vulnerability:1]', 'vulnerability')}
`(
'replaces the input rule ($inputRuleText) with a reference node',
async ({ inputRuleText, mockReferenceHtml, expectedDoc }) => {
@@ -61,8 +89,8 @@ describe('content_editor/extensions/reference', () => {
action() {
renderMarkdown.mockResolvedValueOnce(mockReferenceHtml);
- tiptapEditor.commands.insertContent({ type: 'text', text: inputRuleText });
- triggerNodeInputRule({ tiptapEditor, inputRuleText });
+ tiptapEditor.commands.insertContent({ type: 'text', text: `${inputRuleText} ` });
+ triggerNodeInputRule({ tiptapEditor, inputRuleText: `${inputRuleText} ` });
},
});
diff --git a/spec/frontend/content_editor/services/__snapshots__/data_source_factory_spec.js.snap b/spec/frontend/content_editor/services/__snapshots__/data_source_factory_spec.js.snap
new file mode 100644
index 00000000000..2d16c6b1a2f
--- /dev/null
+++ b/spec/frontend/content_editor/services/__snapshots__/data_source_factory_spec.js.snap
@@ -0,0 +1,256 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`DataSourceFactory filters items based on command "/assign" for reference type "user" and command 1`] = `
+Array [
+ "florida.schoen",
+ "root",
+ "all",
+ "lakeesha.batz",
+ "laurene_blick",
+ "myrtis",
+ "patty",
+ "Commit451",
+ "flightjs",
+ "gitlab-instance-ade037f9",
+ "gitlab-org",
+ "gnuwget",
+ "h5bp",
+ "jashkenas",
+ "twitter",
+]
+`;
+
+exports[`DataSourceFactory filters items based on command "/assign_reviewer" for reference type "user" and command 1`] = `
+Array [
+ "florida.schoen",
+ "root",
+ "all",
+ "errol",
+ "evelynn_olson",
+ "Commit451",
+ "flightjs",
+ "gitlab-instance-ade037f9",
+ "gitlab-org",
+ "gnuwget",
+ "h5bp",
+ "jashkenas",
+ "twitter",
+]
+`;
+
+exports[`DataSourceFactory filters items based on command "/label" for reference type "label" and command 1`] = `
+Array [
+ "Bronce",
+ "Contour",
+ "Corolla",
+ "Cygsync",
+ "Frontier",
+ "Grand Am",
+ "Onesync",
+ "Phone",
+ "Pynefunc",
+ "Trinix",
+ "Trounswood",
+ "group::knowledge",
+ "scoped label",
+ "type::one",
+ "type::two",
+]
+`;
+
+exports[`DataSourceFactory filters items based on command "/reassign" for reference type "user" and command 1`] = `
+Array [
+ "florida.schoen",
+ "root",
+ "all",
+ "errol",
+ "evelynn_olson",
+ "lakeesha.batz",
+ "laurene_blick",
+ "myrtis",
+ "patty",
+ "Commit451",
+ "flightjs",
+ "gitlab-instance-ade037f9",
+ "gitlab-org",
+ "gnuwget",
+ "h5bp",
+]
+`;
+
+exports[`DataSourceFactory filters items based on command "/reassign_reviewer" for reference type "user" and command 1`] = `
+Array [
+ "florida.schoen",
+ "root",
+ "all",
+ "errol",
+ "evelynn_olson",
+ "lakeesha.batz",
+ "laurene_blick",
+ "myrtis",
+ "patty",
+ "Commit451",
+ "flightjs",
+ "gitlab-instance-ade037f9",
+ "gitlab-org",
+ "gnuwget",
+ "h5bp",
+]
+`;
+
+exports[`DataSourceFactory filters items based on command "/relabel" for reference type "label" and command 1`] = `
+Array [
+ "Amsche",
+ "Brioffe",
+ "Bronce",
+ "Bryncefunc",
+ "Contour",
+ "Corolla",
+ "Cygsync",
+ "Frontier",
+ "Ghost",
+ "Grand Am",
+ "Onesync",
+ "Phone",
+ "Pynefunc",
+ "Trinix",
+ "Trounswood",
+]
+`;
+
+exports[`DataSourceFactory filters items based on command "/unassign" for reference type "user" and command 1`] = `
+Array [
+ "errol",
+ "evelynn_olson",
+]
+`;
+
+exports[`DataSourceFactory filters items based on command "/unassign_reviewer" for reference type "user" and command 1`] = `
+Array [
+ "lakeesha.batz",
+ "laurene_blick",
+ "myrtis",
+ "patty",
+]
+`;
+
+exports[`DataSourceFactory filters items based on command "/unlabel" for reference type "label" and command 1`] = `
+Array [
+ "Amsche",
+ "Brioffe",
+ "Bryncefunc",
+ "Ghost",
+]
+`;
+
+exports[`DataSourceFactory for reference type "command", searches for "re" correctly 1`] = `
+Array [
+ "relabel",
+ "remove_milestone",
+ "remove_estimate",
+ "remove_time_spent",
+ "relate",
+ "remove_epic",
+ "reassign",
+ "create_merge_request",
+]
+`;
+
+exports[`DataSourceFactory for reference type "epic", searches for "n" correctly 1`] = `
+Array [
+ "Nobis quidem aspernatur reprehenderit sunt ut ipsum tempora sapiente sed iste.",
+ "Minus eius ut omnis quos sunt dicta ex ipsum.",
+ "Quae nostrum possimus rerum aliquam pariatur a eos aut id.",
+ "Dicta incidunt vel dignissimos sint sit esse est quibusdam quidem consequatur.",
+ "Doloremque a quisquam qui culpa numquam doloribus similique iure enim.",
+]
+`;
+
+exports[`DataSourceFactory for reference type "issue", searches for "q" correctly 1`] = `
+Array [
+ "Quasi id et et nihil sint autem.",
+ "Eaque omnis eius quas necessitatibus hic ut et corrupti.",
+ "Aut quisquam magnam eos distinctio incidunt perferendis fugit.",
+ "Dolorem quisquam cupiditate consequatur perspiciatis sequi eligendi ullam.",
+ "Nesciunt quia molestiae in aliquam amet et dolorem.",
+ "Porro tempore qui qui culpa saepe et nam quos.",
+ "Sed sint a est consequatur quae quasi autem debitis alias.",
+ "Molestiae minima maxime optio nihil quam eveniet dolor.",
+ "Et laboriosam aut ratione voluptatem quasi recusandae.",
+ "Et molestiae delectus voluptates velit vero illo aut rerum quo et.",
+]
+`;
+
+exports[`DataSourceFactory for reference type "label", searches for "c" correctly 1`] = `
+Array [
+ "Contour",
+ "Corolla",
+ "Cygsync",
+ "scoped label",
+ "Amsche",
+ "Bronce",
+ "Bryncefunc",
+ "Onesync",
+ "Pynefunc",
+]
+`;
+
+exports[`DataSourceFactory for reference type "merge_request", searches for "n" correctly 1`] = `
+Array [
+ "Blanditiis maxime voluptatem ut pariatur vel autem vero non quod libero.",
+ "Optio nemo qui dolorem sit ipsum qui saepe.",
+ "Draft: Alunny/publish lib",
+ "Draft: Fix event current target",
+ "Draft: Resolve \\"hgvbbvnnb\\"",
+ "Autem eaque et sed provident enim corrupti molestiae.",
+ "Always call registry's trigger method from withRegistration",
+]
+`;
+
+exports[`DataSourceFactory for reference type "milestone", searches for "16" correctly 1`] = `
+Array [
+ "16.7",
+ "16.8",
+ "16.9",
+ "16.10",
+ "16.11",
+ "16.0 (expired)",
+ "16.1 (expired)",
+ "16.2 (expired)",
+ "16.3 (expired)",
+ "16.4 (expired)",
+ "16.5 (expired)",
+ "16.6 (expired)",
+]
+`;
+
+exports[`DataSourceFactory for reference type "snippet", searches for "s" correctly 1`] = `
+Array [
+ "ss",
+ "test snippet",
+ "another test snippet",
+]
+`;
+
+exports[`DataSourceFactory for reference type "user", searches for "r" correctly 1`] = `
+Array [
+ "root",
+ "errol",
+ "lakeesha.batz",
+ "myrtis",
+ "florida.schoen",
+ "laurene_blick",
+ "all",
+ "twitter",
+ "gitlab-org",
+ "evelynn_olson",
+]
+`;
+
+exports[`DataSourceFactory for reference type "vulnerability", searches for "cross" correctly 1`] = `
+Array [
+ "Cross Site Scripting (Persistent)",
+ "Cross Site Scripting (Persistent)",
+ "Cross Site Scripting (Persistent)",
+]
+`;
diff --git a/spec/frontend/content_editor/services/asset_resolver_spec.js b/spec/frontend/content_editor/services/asset_resolver_spec.js
index 292eec6db77..b0135a6bc9f 100644
--- a/spec/frontend/content_editor/services/asset_resolver_spec.js
+++ b/spec/frontend/content_editor/services/asset_resolver_spec.js
@@ -3,6 +3,11 @@ import {
RESOLVED_ISSUE_HTML,
RESOLVED_MERGE_REQUEST_HTML,
RESOLVED_EPIC_HTML,
+ RESOLVED_LABEL_HTML,
+ RESOLVED_SNIPPET_HTML,
+ RESOLVED_MILESTONE_HTML,
+ RESOLVED_USER_HTML,
+ RESOLVED_VULNERABILITY_HTML,
} from '../test_constants';
describe('content_editor/services/asset_resolver', () => {
@@ -48,6 +53,32 @@ describe('content_editor/services/asset_resolver', () => {
text: '!1 (merged)',
};
+ const resolvedLabel = {
+ backgroundColor: 'rgb(230, 84, 49)',
+ href: '/gitlab-org/gitlab-shell/-/issues?label_name=Aquanix',
+ text: 'Aquanix',
+ };
+
+ const resolvedSnippet = {
+ href: '/gitlab-org/gitlab-shell/-/snippets/25',
+ text: '$25',
+ };
+
+ const resolvedMilestone = {
+ href: '/gitlab-org/gitlab-shell/-/milestones/5',
+ text: '%v4.0',
+ };
+
+ const resolvedUser = {
+ href: '/root',
+ text: '@root',
+ };
+
+ const resolvedVulnerability = {
+ href: '/gitlab-org/gitlab-shell/-/security/vulnerabilities/1',
+ text: '[vulnerability:1]',
+ };
+
describe.each`
referenceType | referenceId | sentMarkdown | returnedHtml | resolvedReference
${'issue'} | ${'#1'} | ${'#1 #1+ #1+s'} | ${RESOLVED_ISSUE_HTML} | ${resolvedIssue}
@@ -59,7 +90,9 @@ describe('content_editor/services/asset_resolver', () => {
it(`resolves ${referenceType} reference to href, text, title and summary`, async () => {
renderMarkdown.mockResolvedValue(returnedHtml);
- expect(await assetResolver.resolveReference(referenceId)).toEqual(resolvedReference);
+ expect(await assetResolver.resolveReference(referenceId)).toMatchObject(
+ resolvedReference,
+ );
});
it.each`
@@ -74,6 +107,26 @@ describe('content_editor/services/asset_resolver', () => {
},
);
+ describe.each`
+ referenceType | referenceId | returnedHtml | resolvedReference
+ ${'label'} | ${'~Aquanix'} | ${RESOLVED_LABEL_HTML} | ${resolvedLabel}
+ ${'snippet'} | ${'$25'} | ${RESOLVED_SNIPPET_HTML} | ${resolvedSnippet}
+ ${'milestone'} | ${'%v4.0'} | ${RESOLVED_MILESTONE_HTML} | ${resolvedMilestone}
+ ${'user'} | ${'@root'} | ${RESOLVED_USER_HTML} | ${resolvedUser}
+ ${'vulnerability'} | ${'[vulnerability:1]'} | ${RESOLVED_VULNERABILITY_HTML} | ${resolvedVulnerability}
+ `(
+ 'for reference type $referenceType',
+ ({ referenceType, referenceId, returnedHtml, resolvedReference }) => {
+ it(`resolves ${referenceType} reference to href, text and additional props (if any)`, async () => {
+ renderMarkdown.mockResolvedValue(returnedHtml);
+
+ expect(await assetResolver.resolveReference(referenceId)).toMatchObject(
+ resolvedReference,
+ );
+ });
+ },
+ );
+
it.each`
case | sentMarkdown | returnedHtml
${'no html is returned'} | ${''} | ${''}
diff --git a/spec/frontend/content_editor/services/autocomplete_mock_data.js b/spec/frontend/content_editor/services/autocomplete_mock_data.js
new file mode 100644
index 00000000000..c1bf2a6ae5b
--- /dev/null
+++ b/spec/frontend/content_editor/services/autocomplete_mock_data.js
@@ -0,0 +1,967 @@
+export const MOCK_MEMBERS = [
+ {
+ type: 'User',
+ username: 'florida.schoen',
+ name: 'Anglea Durgan',
+ avatar_url:
+ 'https://www.gravatar.com/avatar/ac82b5615d3308ecbcacedad361af8e7?s=80\u0026d=identicon',
+ availability: null,
+ },
+ {
+ type: 'User',
+ username: 'root',
+ name: 'Administrator',
+ avatar_url:
+ 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80\u0026d=identicon',
+ availability: null,
+ },
+ {
+ username: 'all',
+ name: 'All Project and Group Members',
+ count: 8,
+ },
+ {
+ type: 'User',
+ username: 'errol',
+ name: "Linnie O'Connell",
+ avatar_url:
+ 'https://www.gravatar.com/avatar/d3d9a468a9884eb217fad5ca5b2b9bd7?s=80\u0026d=identicon',
+ availability: null,
+ },
+ {
+ type: 'User',
+ username: 'evelynn_olson',
+ name: 'Dimple Dare',
+ avatar_url:
+ 'https://www.gravatar.com/avatar/bc1e51ee3512c2b4442f51732d655107?s=80\u0026d=identicon',
+ availability: null,
+ },
+ {
+ type: 'User',
+ username: 'lakeesha.batz',
+ name: 'Larae Veum',
+ avatar_url:
+ 'https://www.gravatar.com/avatar/e5605cb9bbb1a28640d65f25f256e541?s=80\u0026d=identicon',
+ availability: null,
+ },
+ {
+ type: 'User',
+ username: 'laurene_blick',
+ name: 'Evelina Murray',
+ avatar_url:
+ 'https://www.gravatar.com/avatar/389768eef61b7b2d125c64ee01c240fb?s=80\u0026d=identicon',
+ availability: null,
+ },
+ {
+ type: 'User',
+ username: 'myrtis',
+ name: 'Fernanda Adams',
+ avatar_url:
+ 'https://www.gravatar.com/avatar/719d5569bd31d4a70e350b4205fa2cb5?s=80\u0026d=identicon',
+ availability: null,
+ },
+ {
+ type: 'User',
+ username: 'patty',
+ name: 'Emily Toy',
+ avatar_url:
+ 'https://www.gravatar.com/avatar/dca2077b662338808459dc11e70d6688?s=80\u0026d=identicon',
+ availability: null,
+ },
+ {
+ type: 'Group',
+ username: 'Commit451',
+ name: 'Commit451',
+ avatar_url: null,
+ count: 5,
+ mentionsDisabled: null,
+ },
+ {
+ type: 'Group',
+ username: 'flightjs',
+ name: 'Flightjs',
+ avatar_url: null,
+ count: 5,
+ mentionsDisabled: null,
+ },
+ {
+ type: 'Group',
+ username: 'gitlab-instance-ade037f9',
+ name: 'GitLab Instance',
+ avatar_url: null,
+ count: 1,
+ mentionsDisabled: null,
+ },
+ {
+ type: 'Group',
+ username: 'gitlab-org',
+ name: 'Gitlab Org',
+ avatar_url: null,
+ count: 5,
+ mentionsDisabled: null,
+ },
+ {
+ type: 'Group',
+ username: 'gnuwget',
+ name: 'Gnuwget',
+ avatar_url: null,
+ count: 5,
+ mentionsDisabled: null,
+ },
+ {
+ type: 'Group',
+ username: 'h5bp',
+ name: 'H5bp',
+ avatar_url: null,
+ count: 4,
+ mentionsDisabled: null,
+ },
+ {
+ type: 'Group',
+ username: 'jashkenas',
+ name: 'Jashkenas',
+ avatar_url: null,
+ count: 5,
+ mentionsDisabled: null,
+ },
+ {
+ type: 'Group',
+ username: 'twitter',
+ name: 'Twitter',
+ avatar_url: null,
+ count: 5,
+ mentionsDisabled: null,
+ },
+];
+
+export const MOCK_ASSIGNEES = MOCK_MEMBERS.filter(
+ ({ username }) => username === 'errol' || username === 'evelynn_olson',
+);
+
+export const MOCK_REVIEWERS = MOCK_MEMBERS.filter(
+ ({ username }) =>
+ username === 'lakeesha.batz' ||
+ username === 'laurene_blick' ||
+ username === 'myrtis' ||
+ username === 'patty',
+);
+
+export const MOCK_ISSUES = [
+ {
+ iid: 31,
+ title: 'rdfhdfj',
+ id: null,
+ },
+ {
+ iid: 30,
+ title: 'incident1',
+ id: null,
+ },
+ {
+ iid: 29,
+ title: 'example feature rollout',
+ id: null,
+ },
+ {
+ iid: 28,
+ title: 'sagasg',
+ id: null,
+ },
+ {
+ iid: 26,
+ title: 'Quasi id et et nihil sint autem.',
+ id: null,
+ },
+ {
+ iid: 25,
+ title: 'Dolorem quisquam cupiditate consequatur perspiciatis sequi eligendi ullam.',
+ id: null,
+ },
+ {
+ iid: 24,
+ title: 'Et molestiae delectus voluptates velit vero illo aut rerum quo et.',
+ id: null,
+ },
+ {
+ iid: 23,
+ title: 'Nesciunt quia molestiae in aliquam amet et dolorem.',
+ id: null,
+ },
+ {
+ iid: 22,
+ title: 'Sint asperiores unde vel autem delectus ullam dolor nihil et.',
+ id: null,
+ },
+ {
+ iid: 21,
+ title: 'Eaque omnis eius quas necessitatibus hic ut et corrupti.',
+ id: null,
+ },
+ {
+ iid: 20,
+ title: 'Porro tempore qui qui culpa saepe et nam quos.',
+ id: null,
+ },
+ {
+ iid: 19,
+ title: 'Molestiae minima maxime optio nihil quam eveniet dolor.',
+ id: null,
+ },
+ {
+ iid: 18,
+ title: 'Sed sint a est consequatur quae quasi autem debitis alias.',
+ id: null,
+ },
+ {
+ iid: 6,
+ title: 'Et laboriosam aut ratione voluptatem quasi recusandae.',
+ id: null,
+ },
+ {
+ iid: 2,
+ title: 'Aut quisquam magnam eos distinctio incidunt perferendis fugit.',
+ id: null,
+ },
+];
+
+export const MOCK_EPICS = [
+ {
+ iid: 6,
+ title: 'sgs',
+ reference: 'flightjs\u00266',
+ },
+ {
+ iid: 5,
+ title: 'Doloremque a quisquam qui culpa numquam doloribus similique iure enim.',
+ reference: 'flightjs\u00265',
+ },
+ {
+ iid: 4,
+ title: 'Minus eius ut omnis quos sunt dicta ex ipsum.',
+ reference: 'flightjs\u00264',
+ },
+ {
+ iid: 3,
+ title: 'Quae nostrum possimus rerum aliquam pariatur a eos aut id.',
+ reference: 'flightjs\u00263',
+ },
+ {
+ iid: 2,
+ title: 'Nobis quidem aspernatur reprehenderit sunt ut ipsum tempora sapiente sed iste.',
+ reference: 'flightjs\u00262',
+ },
+ {
+ iid: 1,
+ title: 'Dicta incidunt vel dignissimos sint sit esse est quibusdam quidem consequatur.',
+ reference: 'flightjs\u00261',
+ },
+];
+
+export const MOCK_MERGE_REQUESTS = [
+ {
+ iid: 12,
+ title: "Always call registry's trigger method from withRegistration",
+ id: null,
+ },
+ {
+ iid: 11,
+ title: 'Draft: Alunny/publish lib',
+ id: null,
+ },
+ {
+ iid: 10,
+ title: 'Draft: Resolve "hgvbbvnnb"',
+ id: null,
+ },
+ {
+ iid: 9,
+ title: 'Draft: Fix event current target',
+ id: null,
+ },
+ {
+ iid: 3,
+ title: 'Autem eaque et sed provident enim corrupti molestiae.',
+ id: null,
+ },
+ {
+ iid: 2,
+ title: 'Blanditiis maxime voluptatem ut pariatur vel autem vero non quod libero.',
+ id: null,
+ },
+ {
+ iid: 1,
+ title: 'Optio nemo qui dolorem sit ipsum qui saepe.',
+ id: null,
+ },
+];
+
+export const MOCK_SNIPPETS = [
+ {
+ id: 24,
+ title: 'ss',
+ },
+ {
+ id: 22,
+ title: 'another test snippet',
+ },
+ {
+ id: 21,
+ title: 'test snippet',
+ },
+];
+
+export const MOCK_LABELS = [
+ {
+ title: 'Amsche',
+ color: '#9964cf',
+ type: 'GroupLabel',
+ textColor: '#FFFFFF',
+ set: true,
+ },
+ {
+ title: 'Brioffe',
+ color: '#203e13',
+ type: 'GroupLabel',
+ textColor: '#FFFFFF',
+ set: true,
+ },
+ {
+ title: 'Bronce',
+ color: '#c0b7f2',
+ type: 'GroupLabel',
+ textColor: '#1F1E24',
+ },
+ {
+ title: 'Bryncefunc',
+ color: '#8baa5e',
+ type: 'GroupLabel',
+ textColor: '#FFFFFF',
+ set: true,
+ },
+ {
+ title: 'Contour',
+ color: '#8cf3a3',
+ type: 'ProjectLabel',
+ textColor: '#1F1E24',
+ },
+ {
+ title: 'Corolla',
+ color: '#0384f3',
+ type: 'ProjectLabel',
+ textColor: '#FFFFFF',
+ },
+ {
+ title: 'Cygsync',
+ color: '#1308c3',
+ type: 'GroupLabel',
+ textColor: '#FFFFFF',
+ },
+ {
+ title: 'Frontier',
+ color: '#85db43',
+ type: 'ProjectLabel',
+ textColor: '#1F1E24',
+ },
+ {
+ title: 'Ghost',
+ color: '#df1bc4',
+ type: 'ProjectLabel',
+ textColor: '#FFFFFF',
+ set: true,
+ },
+ {
+ title: 'Grand Am',
+ color: '#a1d7ee',
+ type: 'ProjectLabel',
+ textColor: '#1F1E24',
+ },
+ {
+ title: 'Onesync',
+ color: '#a73ba0',
+ type: 'GroupLabel',
+ textColor: '#FFFFFF',
+ },
+ {
+ title: 'Phone',
+ color: '#63dceb',
+ type: 'GroupLabel',
+ textColor: '#1F1E24',
+ },
+ {
+ title: 'Pynefunc',
+ color: '#974b19',
+ type: 'GroupLabel',
+ textColor: '#FFFFFF',
+ },
+ {
+ title: 'Trinix',
+ color: '#2c894f',
+ type: 'GroupLabel',
+ textColor: '#FFFFFF',
+ },
+ {
+ title: 'Trounswood',
+ color: '#ad0370',
+ type: 'GroupLabel',
+ textColor: '#FFFFFF',
+ },
+ {
+ title: 'group::knowledge',
+ color: '#8fbc8f',
+ type: 'ProjectLabel',
+ textColor: '#1F1E24',
+ },
+ {
+ title: 'scoped label',
+ color: '#6699cc',
+ type: 'GroupLabel',
+ textColor: '#FFFFFF',
+ },
+ {
+ title: 'type::one',
+ color: '#9400d3',
+ type: 'ProjectLabel',
+ textColor: '#FFFFFF',
+ },
+ {
+ title: 'type::two',
+ color: '#013220',
+ type: 'ProjectLabel',
+ textColor: '#FFFFFF',
+ },
+];
+
+export const MOCK_MILESTONES = [
+ {
+ iid: 65,
+ title: '15.0',
+ due_date: '2022-05-17',
+ id: null,
+ },
+ {
+ iid: 73,
+ title: '15.1',
+ due_date: '2022-06-17',
+ id: null,
+ },
+ {
+ iid: 74,
+ title: '15.2',
+ due_date: '2022-07-17',
+ id: null,
+ },
+ {
+ iid: 75,
+ title: '15.3',
+ due_date: '2022-08-17',
+ id: null,
+ },
+ {
+ iid: 76,
+ title: '15.4',
+ due_date: '2022-09-17',
+ id: null,
+ },
+ {
+ iid: 77,
+ title: '15.5',
+ due_date: '2022-10-17',
+ id: null,
+ },
+ {
+ iid: 81,
+ title: '15.6',
+ due_date: '2022-11-17',
+ id: null,
+ },
+ {
+ iid: 82,
+ title: '15.7',
+ due_date: '2022-12-17',
+ id: null,
+ },
+ {
+ iid: 83,
+ title: '15.8',
+ due_date: '2023-01-17',
+ id: null,
+ },
+ {
+ iid: 84,
+ title: '15.9',
+ due_date: '2023-02-17',
+ id: null,
+ },
+ {
+ iid: 85,
+ title: '15.10',
+ due_date: '2023-03-17',
+ id: null,
+ },
+ {
+ iid: 86,
+ title: '15.11',
+ due_date: '2023-04-17',
+ id: null,
+ },
+ {
+ iid: 80,
+ title: '16.0',
+ due_date: '2023-05-17',
+ id: null,
+ },
+ {
+ iid: 88,
+ title: '16.1',
+ due_date: '2023-06-17',
+ id: null,
+ },
+ {
+ iid: 89,
+ title: '16.2',
+ due_date: '2023-07-17',
+ id: null,
+ },
+ {
+ iid: 90,
+ title: '16.3',
+ due_date: '2023-08-17',
+ id: null,
+ },
+ {
+ iid: 91,
+ title: '16.4',
+ due_date: '2023-09-17',
+ id: null,
+ },
+ {
+ iid: 92,
+ title: '16.5',
+ due_date: '2023-10-17',
+ id: null,
+ },
+ {
+ iid: 93,
+ title: '16.6',
+ due_date: '2023-11-10',
+ id: null,
+ },
+ {
+ iid: 95,
+ title: '16.7',
+ due_date: '2023-12-15',
+ id: null,
+ },
+ {
+ iid: 94,
+ title: '16.8',
+ due_date: '2024-01-12',
+ id: null,
+ },
+ {
+ iid: 96,
+ title: '16.9',
+ due_date: '2024-02-09',
+ id: null,
+ },
+ {
+ iid: 97,
+ title: '16.10',
+ due_date: '2024-03-15',
+ id: null,
+ },
+ {
+ iid: 98,
+ title: '16.11',
+ due_date: '2024-04-12',
+ id: null,
+ },
+ {
+ iid: 87,
+ title: '17.0',
+ due_date: '2024-05-10',
+ id: null,
+ },
+ {
+ iid: 48,
+ title: 'Next 1-3 releases',
+ due_date: null,
+ id: null,
+ },
+ {
+ iid: 24,
+ title: 'Awaiting further demand',
+ due_date: null,
+ id: null,
+ },
+ {
+ iid: 14,
+ title: 'Backlog',
+ due_date: null,
+ id: null,
+ },
+ {
+ iid: 11,
+ title: 'Next 4-7 releases',
+ due_date: null,
+ id: null,
+ },
+ {
+ iid: 10,
+ title: 'Next 3-4 releases',
+ due_date: null,
+ id: null,
+ },
+ {
+ iid: 6,
+ title: 'Next 7-13 releases',
+ due_date: null,
+ id: null,
+ },
+];
+
+export const MOCK_VULNERABILITIES = [
+ {
+ id: 99499903,
+ title: 'Cross Site Scripting (Persistent)',
+ },
+ {
+ id: 99495085,
+ title: 'Possible SQL injection',
+ },
+ {
+ id: 99490610,
+ title: 'GitLab Runner Authentication Token',
+ },
+ {
+ id: 99288920,
+ title: 'Cross Site Scripting (Persistent)',
+ },
+ {
+ id: 99258720,
+ title: 'Cross Site Scripting (Persistent)',
+ },
+];
+
+export const MOCK_COMMANDS = [
+ {
+ name: 'due',
+ aliases: [],
+ description: 'Set due date',
+ warning: '',
+ icon: '',
+ params: ['\u003cin 2 days | this Friday | December 31st\u003e'],
+ },
+ {
+ name: 'duplicate',
+ aliases: [],
+ description: 'Mark this issue as a duplicate of another issue',
+ warning: '',
+ icon: '',
+ params: ['#issue'],
+ },
+ {
+ name: 'clone',
+ aliases: [],
+ description: 'Clone this issue',
+ warning: '',
+ icon: '',
+ params: ['path/to/project [--with_notes]'],
+ },
+ {
+ name: 'move',
+ aliases: [],
+ description: 'Move this issue to another project.',
+ warning: '',
+ icon: '',
+ params: ['path/to/project'],
+ },
+ {
+ name: 'create_merge_request',
+ aliases: [],
+ description: 'Create a merge request',
+ warning: '',
+ icon: '',
+ params: ['\u003cbranch name\u003e'],
+ },
+ {
+ name: 'zoom',
+ aliases: [],
+ description: 'Add Zoom meeting',
+ warning: '',
+ icon: '',
+ params: ['\u003cZoom URL\u003e'],
+ },
+ {
+ name: 'promote_to_incident',
+ aliases: [],
+ description: 'Promote issue to incident',
+ warning: '',
+ icon: '',
+ params: [],
+ },
+ {
+ name: 'close',
+ aliases: [],
+ description: 'Close this issue',
+ warning: '',
+ icon: '',
+ params: [],
+ },
+ {
+ name: 'title',
+ aliases: [],
+ description: 'Change title',
+ warning: '',
+ icon: '',
+ params: ['\u003cNew title\u003e'],
+ },
+ {
+ name: 'label',
+ aliases: ['labels'],
+ description: 'Add labels',
+ warning: '',
+ icon: '',
+ params: ['~label1 ~"label 2"'],
+ },
+ {
+ name: 'unlabel',
+ aliases: ['remove_label'],
+ description: 'Remove all or specific labels',
+ warning: '',
+ icon: '',
+ params: ['~label1 ~"label 2"'],
+ },
+ {
+ name: 'relabel',
+ aliases: [],
+ description: 'Replace all labels',
+ warning: '',
+ icon: '',
+ params: ['~label1 ~"label 2"'],
+ },
+ {
+ name: 'todo',
+ aliases: [],
+ description: 'Add a to do',
+ warning: '',
+ icon: '',
+ params: [],
+ },
+ {
+ name: 'unsubscribe',
+ aliases: [],
+ description: 'Unsubscribe',
+ warning: '',
+ icon: '',
+ params: [],
+ },
+ {
+ name: 'award',
+ aliases: [],
+ description: 'Toggle emoji award',
+ warning: '',
+ icon: '',
+ params: [':emoji:'],
+ },
+ {
+ name: 'shrug',
+ aliases: [],
+ description: 'Append the comment with ¯\\_(ツ)_/¯',
+ warning: '',
+ icon: '',
+ params: ['\u003cComment\u003e'],
+ },
+ {
+ name: 'tableflip',
+ aliases: [],
+ description: 'Append the comment with (╯°□°)╯︵ ┻━┻',
+ warning: '',
+ icon: '',
+ params: ['\u003cComment\u003e'],
+ },
+ {
+ name: 'confidential',
+ aliases: [],
+ description: 'Make issue confidential',
+ warning: '',
+ icon: '',
+ params: [],
+ },
+ {
+ name: 'assign',
+ aliases: [],
+ description: 'Assign',
+ warning: '',
+ icon: '',
+ params: ['@user1 @user2'],
+ },
+ {
+ name: 'unassign',
+ aliases: [],
+ description: 'Remove all or specific assignees',
+ warning: '',
+ icon: '',
+ params: ['@user1 @user2'],
+ },
+ {
+ name: 'milestone',
+ aliases: [],
+ description: 'Set milestone',
+ warning: '',
+ icon: '',
+ params: ['%"milestone"'],
+ },
+ {
+ name: 'remove_milestone',
+ aliases: [],
+ description: 'Remove milestone',
+ warning: '',
+ icon: '',
+ params: [],
+ },
+ {
+ name: 'copy_metadata',
+ aliases: [],
+ description: 'Copy labels and milestone from other issue or merge request in this project',
+ warning: '',
+ icon: '',
+ params: ['#issue | !merge_request'],
+ },
+ {
+ name: 'estimate',
+ aliases: ['estimate_time'],
+ description: 'Set time estimate',
+ warning: '',
+ icon: '',
+ params: ['\u003c1w 3d 2h 14m\u003e'],
+ },
+ {
+ name: 'spend',
+ aliases: ['spent', 'spend_time'],
+ description: 'Add or subtract spent time',
+ warning: '',
+ icon: '',
+ params: ['\u003ctime(1h30m | -1h30m)\u003e \u003cdate(YYYY-MM-DD)\u003e'],
+ },
+ {
+ name: 'remove_estimate',
+ aliases: ['remove_time_estimate'],
+ description: 'Remove time estimate',
+ warning: '',
+ icon: '',
+ params: [],
+ },
+ {
+ name: 'remove_time_spent',
+ aliases: [],
+ description: 'Remove spent time',
+ warning: '',
+ icon: '',
+ params: [],
+ },
+ {
+ name: 'lock',
+ aliases: [],
+ description: 'Lock the discussion',
+ warning: '',
+ icon: '',
+ params: [],
+ },
+ {
+ name: 'cc',
+ aliases: [],
+ description: 'CC',
+ warning: '',
+ icon: '',
+ params: ['@user'],
+ },
+ {
+ name: 'relate',
+ aliases: [],
+ description: 'Mark this issue as related to another issue',
+ warning: '',
+ icon: '',
+ params: ['\u003c#issue | group/project#issue | issue URL\u003e'],
+ },
+ {
+ name: 'unlink',
+ aliases: [],
+ description: 'Remove link with another issue',
+ warning: '',
+ icon: '',
+ params: ['\u003c#issue | group/project#issue | issue URL\u003e'],
+ },
+ {
+ name: 'epic',
+ aliases: [],
+ description: 'Add to epic',
+ warning: '',
+ icon: '',
+ params: ['\u003c\u0026epic | group\u0026epic | Epic URL\u003e'],
+ },
+ {
+ name: 'remove_epic',
+ aliases: [],
+ description: 'Remove from epic',
+ warning: '',
+ icon: '',
+ params: [],
+ },
+ {
+ name: 'promote',
+ aliases: [],
+ description: 'Promote issue to an epic',
+ warning: '',
+ icon: 'confidential',
+ params: [],
+ },
+ {
+ name: 'iteration',
+ aliases: [],
+ description: 'Set iteration',
+ warning: '',
+ icon: '',
+ params: ['*iteration:"iteration name" | *iteration:\u003cID\u003e'],
+ },
+ {
+ name: 'health_status',
+ aliases: [],
+ description: 'Set health status',
+ warning: '',
+ icon: '',
+ params: ['\u003con_track|needs_attention|at_risk\u003e'],
+ },
+ {
+ name: 'reassign',
+ aliases: [],
+ description: 'Change assignees',
+ warning: '',
+ icon: '',
+ params: ['@user1 @user2'],
+ },
+ {
+ name: 'weight',
+ aliases: [],
+ description: 'Set weight',
+ warning: '',
+ icon: '',
+ params: ['0, 1, 2, …'],
+ },
+ {
+ name: 'blocks',
+ aliases: [],
+ description: 'Specifies that this issue blocks other issues',
+ warning: '',
+ icon: '',
+ params: ['\u003c#issue | group/project#issue | issue URL\u003e'],
+ },
+ {
+ name: 'blocked_by',
+ aliases: [],
+ description: 'Mark this issue as blocked by other issues',
+ warning: '',
+ icon: '',
+ params: ['\u003c#issue | group/project#issue | issue URL\u003e'],
+ },
+];
diff --git a/spec/frontend/content_editor/services/data_source_factory_spec.js b/spec/frontend/content_editor/services/data_source_factory_spec.js
new file mode 100644
index 00000000000..d540f11711d
--- /dev/null
+++ b/spec/frontend/content_editor/services/data_source_factory_spec.js
@@ -0,0 +1,202 @@
+import axios from 'axios';
+import MockAdapter from 'axios-mock-adapter';
+import DataSourceFactory, {
+ defaultSorter,
+ customSorter,
+ createDataSource,
+} from '~/content_editor/services/data_source_factory';
+import {
+ MOCK_MEMBERS,
+ MOCK_COMMANDS,
+ MOCK_EPICS,
+ MOCK_ISSUES,
+ MOCK_LABELS,
+ MOCK_MILESTONES,
+ MOCK_SNIPPETS,
+ MOCK_VULNERABILITIES,
+ MOCK_MERGE_REQUESTS,
+ MOCK_ASSIGNEES,
+ MOCK_REVIEWERS,
+} from './autocomplete_mock_data';
+
+jest.mock('~/emoji');
+
+describe('defaultSorter', () => {
+ it('returns items as is if query is empty', () => {
+ const items = [{ name: 'abc' }, { name: 'bcd' }, { name: 'cde' }];
+ const sorter = defaultSorter(['name']);
+ expect(sorter(items, '')).toEqual(items);
+ });
+
+ it('sorts items based on query match', () => {
+ const items = [{ name: 'abc' }, { name: 'bcd' }, { name: 'cde' }];
+ const sorter = defaultSorter(['name']);
+ expect(sorter(items, 'b')).toEqual([{ name: 'bcd' }, { name: 'abc' }, { name: 'cde' }]);
+ });
+
+ it('sorts items based on query match in multiple fields', () => {
+ const items = [
+ { name: 'wabc', description: 'xyz' },
+ { name: 'bcd', description: 'wxy' },
+ { name: 'cde', description: 'vwx' },
+ ];
+ const sorter = defaultSorter(['name', 'description']);
+ expect(sorter(items, 'w')).toEqual([
+ { name: 'wabc', description: 'xyz' },
+ { name: 'bcd', description: 'wxy' },
+ { name: 'cde', description: 'vwx' },
+ ]);
+ });
+});
+
+describe('customSorter', () => {
+ it('sorts items based on custom sorter function', () => {
+ const items = [3, 1, 2];
+ const sorter = customSorter((a, b) => a - b);
+ expect(sorter(items)).toEqual([1, 2, 3]);
+ });
+});
+
+describe('createDataSource', () => {
+ let mock;
+
+ beforeEach(() => {
+ mock = new MockAdapter(axios);
+ });
+
+ afterEach(() => {
+ mock.restore();
+ });
+
+ it('fetches data from source and filters based on query', async () => {
+ const data = [
+ { name: 'abc', description: 'xyz' },
+ { name: 'bcd', description: 'wxy' },
+ { name: 'cde', description: 'vwx' },
+ ];
+ mock.onGet('/source').reply(200, data);
+
+ const dataSource = createDataSource({
+ source: '/source',
+ searchFields: ['name', 'description'],
+ });
+
+ const results = await dataSource.search('b');
+ expect(results).toEqual([
+ { name: 'bcd', description: 'wxy' },
+ { name: 'abc', description: 'xyz' },
+ ]);
+ });
+
+ it('handles source fetch errors', async () => {
+ mock.onGet('/source').reply(500);
+
+ const dataSource = createDataSource({
+ source: '/source',
+ searchFields: ['name', 'description'],
+ sorter: (items) => items,
+ });
+
+ const results = await dataSource.search('b');
+ expect(results).toEqual([]);
+ });
+});
+
+describe('DataSourceFactory', () => {
+ let mock;
+ let autocompleteHelper;
+ let dateNowOld;
+
+ beforeEach(() => {
+ mock = new MockAdapter(axios);
+ const dataSourceUrls = {
+ members: '/members',
+ issues: '/issues',
+ snippets: '/snippets',
+ labels: '/labels',
+ epics: '/epics',
+ milestones: '/milestones',
+ mergeRequests: '/mergeRequests',
+ vulnerabilities: '/vulnerabilities',
+ commands: '/commands',
+ };
+
+ mock.onGet('/members').reply(200, MOCK_MEMBERS);
+ mock.onGet('/issues').reply(200, MOCK_ISSUES);
+ mock.onGet('/snippets').reply(200, MOCK_SNIPPETS);
+ mock.onGet('/labels').reply(200, MOCK_LABELS);
+ mock.onGet('/epics').reply(200, MOCK_EPICS);
+ mock.onGet('/milestones').reply(200, MOCK_MILESTONES);
+ mock.onGet('/mergeRequests').reply(200, MOCK_MERGE_REQUESTS);
+ mock.onGet('/vulnerabilities').reply(200, MOCK_VULNERABILITIES);
+ mock.onGet('/commands').reply(200, MOCK_COMMANDS);
+
+ const sidebarMediator = {
+ store: {
+ assignees: MOCK_ASSIGNEES,
+ reviewers: MOCK_REVIEWERS,
+ },
+ };
+
+ autocompleteHelper = new DataSourceFactory({
+ dataSourceUrls,
+ sidebarMediator,
+ });
+
+ dateNowOld = Date.now();
+
+ jest.spyOn(Date, 'now').mockImplementation(() => new Date('2023-11-14').getTime());
+ });
+
+ afterEach(() => {
+ mock.restore();
+
+ jest.spyOn(Date, 'now').mockImplementation(() => dateNowOld);
+ });
+
+ it.each`
+ referenceType | query
+ ${'user'} | ${'r'}
+ ${'issue'} | ${'q'}
+ ${'snippet'} | ${'s'}
+ ${'label'} | ${'c'}
+ ${'epic'} | ${'n'}
+ ${'milestone'} | ${'16'}
+ ${'merge_request'} | ${'n'}
+ ${'vulnerability'} | ${'cross'}
+ ${'command'} | ${'re'}
+ `(
+ 'for reference type "$referenceType", searches for "$query" correctly',
+ async ({ referenceType, query }) => {
+ const dataSource = autocompleteHelper.getDataSource(referenceType);
+ const results = await dataSource.search(query);
+
+ expect(
+ results.map(({ title, name, username }) => username || name || title),
+ ).toMatchSnapshot();
+ },
+ );
+
+ it.each`
+ referenceType | command
+ ${'label'} | ${'/label'}
+ ${'label'} | ${'/unlabel'}
+ ${'label'} | ${'/relabel'}
+ ${'user'} | ${'/assign'}
+ ${'user'} | ${'/reassign'}
+ ${'user'} | ${'/unassign'}
+ ${'user'} | ${'/assign_reviewer'}
+ ${'user'} | ${'/unassign_reviewer'}
+ ${'user'} | ${'/reassign_reviewer'}
+ `(
+ 'filters items based on command "$command" for reference type "$referenceType" and command',
+ async ({ referenceType, command }) => {
+ const dataSource = autocompleteHelper.getDataSource(referenceType, { command });
+ const results = await dataSource.search();
+
+ expect(
+ results.map(({ username, name, title }) => username || name || title),
+ ).toMatchSnapshot();
+ },
+ );
+});
diff --git a/spec/frontend/content_editor/services/markdown_serializer_spec.js b/spec/frontend/content_editor/services/markdown_serializer_spec.js
index 548c6030ed7..c329a12bcc4 100644
--- a/spec/frontend/content_editor/services/markdown_serializer_spec.js
+++ b/spec/frontend/content_editor/services/markdown_serializer_spec.js
@@ -152,19 +152,26 @@ describe('markdownSerializer', () => {
expect(serialize(paragraph(italic('italics')))).toBe('_italics_');
});
- it('correctly serializes code blocks wrapped by italics and bold marks', () => {
- const codeBlockContent = 'code block';
-
- expect(serialize(paragraph(italic(code(codeBlockContent))))).toBe(`_\`${codeBlockContent}\`_`);
- expect(serialize(paragraph(code(italic(codeBlockContent))))).toBe(`_\`${codeBlockContent}\`_`);
- expect(serialize(paragraph(bold(code(codeBlockContent))))).toBe(`**\`${codeBlockContent}\`**`);
- expect(serialize(paragraph(code(bold(codeBlockContent))))).toBe(`**\`${codeBlockContent}\`**`);
- expect(serialize(paragraph(strike(code(codeBlockContent))))).toBe(
- `~~\`${codeBlockContent}\`~~`,
- );
- expect(serialize(paragraph(code(strike(codeBlockContent))))).toBe(
- `~~\`${codeBlockContent}\`~~`,
- );
+ it.each`
+ input | output
+ ${'code'} | ${'`code`'}
+ ${'code `with` backticks'} | ${'``code `with` backticks``'}
+ ${'this is `inline-code`'} | ${'`` this is `inline-code` ``'}
+ ${'`inline-code` in markdown'} | ${'`` `inline-code` in markdown ``'}
+ ${'```js'} | ${'`` ```js ``'}
+ `('correctly serializes inline code ("$input")', ({ input, output }) => {
+ expect(serialize(paragraph(code(input)))).toBe(output);
+ });
+
+ it('correctly serializes inline code wrapped by italics and bold marks', () => {
+ const content = 'code';
+
+ expect(serialize(paragraph(italic(code(content))))).toBe(`_\`${content}\`_`);
+ expect(serialize(paragraph(code(italic(content))))).toBe(`_\`${content}\`_`);
+ expect(serialize(paragraph(bold(code(content))))).toBe(`**\`${content}\`**`);
+ expect(serialize(paragraph(code(bold(content))))).toBe(`**\`${content}\`**`);
+ expect(serialize(paragraph(strike(code(content))))).toBe(`~~\`${content}\`~~`);
+ expect(serialize(paragraph(code(strike(content))))).toBe(`~~\`${content}\`~~`);
});
it('correctly serializes inline diff', () => {
@@ -461,6 +468,52 @@ this is not really json:table but just trying out whether this case works or not
);
});
+ it('correctly serializes a markdown code block containing a nested code block', () => {
+ expect(
+ serialize(
+ codeBlock(
+ { language: 'markdown' },
+ 'markdown code block **bold** _italic_ `code`\n\n```js\nvar a = 0;\n```\n\nend markdown code block',
+ ),
+ ),
+ ).toBe(
+ `
+\`\`\`\`markdown
+markdown code block **bold** _italic_ \`code\`
+
+\`\`\`js
+var a = 0;
+\`\`\`
+
+end markdown code block
+\`\`\`\`
+ `.trim(),
+ );
+ });
+
+ it('correctly serializes a markdown code block containing a markdown code block containing another code block', () => {
+ expect(
+ serialize(
+ codeBlock(
+ { language: 'markdown' },
+ '````md\na nested code block\n\n```js\nvar a = 0;\n```\n````',
+ ),
+ ),
+ ).toBe(
+ `
+\`\`\`\`\`markdown
+\`\`\`\`md
+a nested code block
+
+\`\`\`js
+var a = 0;
+\`\`\`
+\`\`\`\`
+\`\`\`\`\`
+ `.trim(),
+ );
+ });
+
it('correctly serializes emoji', () => {
expect(serialize(paragraph(emoji({ name: 'dog' })))).toBe(':dog:');
});
@@ -607,6 +660,34 @@ this is not really json:table but just trying out whether this case works or not
);
});
+ it('correctly serializes bullet task list with different bullet styles', () => {
+ expect(
+ serialize(
+ taskList(
+ { bullet: '+' },
+ taskItem({ checked: true }, paragraph('list item 1')),
+ taskItem(paragraph('list item 2')),
+ taskItem(
+ paragraph('list item 3'),
+ taskList(
+ { bullet: '-' },
+ taskItem({ checked: true }, paragraph('sub-list item 1')),
+ taskItem(paragraph('sub-list item 2')),
+ ),
+ ),
+ ),
+ ),
+ ).toBe(
+ `
++ [x] list item 1
++ [ ] list item 2
++ [ ] list item 3
+ - [x] sub-list item 1
+ - [ ] sub-list item 2
+ `.trim(),
+ );
+ });
+
it('correctly serializes a numeric list', () => {
expect(
serialize(
diff --git a/spec/frontend/content_editor/services/markdown_sourcemap_spec.js b/spec/frontend/content_editor/services/markdown_sourcemap_spec.js
index 2efc73ddef8..4428fa682e7 100644
--- a/spec/frontend/content_editor/services/markdown_sourcemap_spec.js
+++ b/spec/frontend/content_editor/services/markdown_sourcemap_spec.js
@@ -1,6 +1,8 @@
import { Extension } from '@tiptap/core';
import BulletList from '~/content_editor/extensions/bullet_list';
import ListItem from '~/content_editor/extensions/list_item';
+import TaskList from '~/content_editor/extensions/task_list';
+import TaskItem from '~/content_editor/extensions/task_item';
import Paragraph from '~/content_editor/extensions/paragraph';
import markdownDeserializer from '~/content_editor/services/gl_api_markdown_deserializer';
import { getMarkdownSource, getFullSource } from '~/content_editor/services/markdown_sourcemap';
@@ -18,6 +20,20 @@ const BULLET_LIST_HTML = `<ul data-sourcepos="1:1-3:24" dir="auto">
</li>
</ul>`;
+const BULLET_TASK_LIST_MARKDOWN = `- [ ] list item 1
++ [x] checked list item 2
+ + [ ] embedded list item 1
+ - [x] checked embedded list item 2`;
+const BULLET_TASK_LIST_HTML = `<ul data-sourcepos="1:1-4:36" class="task-list" dir="auto">
+ <li data-sourcepos="1:1-1:17" class="task-list-item"><input type="checkbox" class="task-list-item-checkbox"> list item 1</li>
+ <li data-sourcepos="2:1-4:36" class="task-list-item"><input type="checkbox" class="task-list-item-checkbox" checked> checked list item 2
+ <ul data-sourcepos="3:3-4:36" class="task-list">
+ <li data-sourcepos="3:3-3:28" class="task-list-item"><input type="checkbox" class="task-list-item-checkbox"> embedded list item 1</li>
+ <li data-sourcepos="4:3-4:36" class="task-list-item"><input type="checkbox" class="task-list-item-checkbox" checked> checked embedded list item 2</li>
+ </ul>
+ </li>
+</ul>`;
+
const SourcemapExtension = Extension.create({
// lets add `source` attribute to every element using `getMarkdownSource`
addGlobalAttributes() {
@@ -38,19 +54,68 @@ const SourcemapExtension = Extension.create({
});
const tiptapEditor = createTestEditor({
- extensions: [BulletList, ListItem, SourcemapExtension],
+ extensions: [BulletList, ListItem, TaskList, TaskItem, SourcemapExtension],
});
const {
- builders: { doc, bulletList, listItem, paragraph },
+ builders: { doc, bulletList, listItem, taskList, taskItem, paragraph },
} = createDocBuilder({
tiptapEditor,
names: {
bulletList: { nodeType: BulletList.name },
listItem: { nodeType: ListItem.name },
+ taskList: { nodeType: TaskList.name },
+ taskItem: { nodeType: TaskItem.name },
},
});
+const bulletListDoc = () =>
+ doc(
+ bulletList(
+ { bullet: '+', source: '+ list item 1\n+ list item 2\n - embedded list item 3' },
+ listItem({ source: '+ list item 1' }, paragraph('list item 1')),
+ listItem(
+ { source: '+ list item 2\n - embedded list item 3' },
+ paragraph('list item 2'),
+ bulletList(
+ { bullet: '-', source: '- embedded list item 3' },
+ listItem({ source: '- embedded list item 3' }, paragraph('embedded list item 3')),
+ ),
+ ),
+ ),
+ );
+
+const bulletTaskListDoc = () =>
+ doc(
+ taskList(
+ {
+ bullet: '-',
+ source:
+ '- [ ] list item 1\n+ [x] checked list item 2\n + [ ] embedded list item 1\n - [x] checked embedded list item 2',
+ },
+ taskItem({ source: '- [ ] list item 1' }, paragraph('list item 1')),
+ taskItem(
+ {
+ source:
+ '+ [x] checked list item 2\n + [ ] embedded list item 1\n - [x] checked embedded list item 2',
+ checked: true,
+ },
+ paragraph('checked list item 2'),
+ taskList(
+ {
+ bullet: '+',
+ source: '+ [ ] embedded list item 1\n - [x] checked embedded list item 2',
+ },
+ taskItem({ source: '+ [ ] embedded list item 1' }, paragraph('embedded list item 1')),
+ taskItem(
+ { source: '- [x] checked embedded list item 2', checked: true },
+ paragraph('checked embedded list item 2'),
+ ),
+ ),
+ ),
+ ),
+ );
+
describe('content_editor/services/markdown_sourcemap', () => {
describe('getFullSource', () => {
it.each`
@@ -72,29 +137,21 @@ describe('content_editor/services/markdown_sourcemap', () => {
});
});
- it('gets markdown source for a rendered HTML element', async () => {
- const { document } = await markdownDeserializer({
- render: () => BULLET_LIST_HTML,
- }).deserialize({
- schema: tiptapEditor.schema,
- markdown: BULLET_LIST_MARKDOWN,
- });
-
- const expected = doc(
- bulletList(
- { bullet: '+', source: '+ list item 1\n+ list item 2' },
- listItem({ source: '+ list item 1' }, paragraph('list item 1')),
- listItem(
- { source: '+ list item 2' },
- paragraph('list item 2'),
- bulletList(
- { bullet: '-', source: '- embedded list item 3' },
- listItem({ source: '- embedded list item 3' }, paragraph('embedded list item 3')),
- ),
- ),
- ),
- );
+ it.each`
+ description | sourceMarkdown | sourceHTML | expectedDoc
+ ${'bullet list'} | ${BULLET_LIST_MARKDOWN} | ${BULLET_LIST_HTML} | ${bulletListDoc}
+ ${'bullet task list'} | ${BULLET_TASK_LIST_MARKDOWN} | ${BULLET_TASK_LIST_HTML} | ${bulletTaskListDoc}
+ `(
+ 'gets markdown source for a rendered $description',
+ async ({ sourceMarkdown, sourceHTML, expectedDoc }) => {
+ const { document } = await markdownDeserializer({
+ render: () => sourceHTML,
+ }).deserialize({
+ schema: tiptapEditor.schema,
+ markdown: sourceMarkdown,
+ });
- expect(document.toJSON()).toEqual(expected.toJSON());
- });
+ expect(document.toJSON()).toEqual(expectedDoc().toJSON());
+ },
+ );
});
diff --git a/spec/frontend/content_editor/test_constants.js b/spec/frontend/content_editor/test_constants.js
index cbd4f555e97..255a7104eaf 100644
--- a/spec/frontend/content_editor/test_constants.js
+++ b/spec/frontend/content_editor/test_constants.js
@@ -44,3 +44,18 @@ export const RESOLVED_MERGE_REQUEST_HTML =
export const RESOLVED_EPIC_HTML =
'<p data-sourcepos="1:1-1:11" dir="auto"><a href="/groups/gitlab-org/-/epics/1" data-reference-type="epic" data-original="&amp;amp;1" data-link="false" data-link-reference="false" data-group="9970" data-epic="1" data-container="body" data-placement="top" title="Approvals in merge request list" class="gfm gfm-epic has-tooltip">&amp;1</a> <a href="/groups/gitlab-org/-/epics/1" data-reference-type="epic" data-original="&amp;amp;1+" data-link="false" data-link-reference="false" data-group="9970" data-epic="1" data-reference-format="+" data-container="body" data-placement="top" title="Approvals in merge request list" class="gfm gfm-epic has-tooltip">Approvals in merge request list (&amp;1)</a> <a href="/groups/gitlab-org/-/epics/1" data-reference-type="epic" data-original="&amp;amp;1+s" data-link="false" data-link-reference="false" data-group="9970" data-epic="1" data-reference-format="+s" data-container="body" data-placement="top" title="Approvals in merge request list" class="gfm gfm-epic has-tooltip">Approvals in merge request list (&amp;1)</a></p>';
+
+export const RESOLVED_LABEL_HTML =
+ '<p data-sourcepos="1:1-1:29" dir="auto"><span class="gl-label gl-label-sm"><a href="/gitlab-org/gitlab-shell/-/issues?label_name=Aquanix" data-reference-type="label" data-original="~Aquanix" data-link="false" data-link-reference="false" data-project="2" data-label="5" data-container="body" data-placement="top" title="" class="gfm gfm-label has-tooltip gl-link gl-label-link"><span class="gl-label-text gl-label-text-light" data-container="body" data-html="true" style="background-color: #e65431">Aquanix</span></a></span> <span class="gl-label gl-label-sm"><a href="/gitlab-org/gitlab-shell/-/issues?label_name=Aquanix" data-reference-type="label" data-original="~Aquanix" data-link="false" data-link-reference="false" data-project="2" data-label="5" data-container="body" data-placement="top" title="" class="gfm gfm-label has-tooltip gl-link gl-label-link"><span class="gl-label-text gl-label-text-light" data-container="body" data-html="true" style="background-color: #e65431">Aquanix</span></a></span>+ <span class="gl-label gl-label-sm"><a href="/gitlab-org/gitlab-shell/-/issues?label_name=Aquanix" data-reference-type="label" data-original="~Aquanix" data-link="false" data-link-reference="false" data-project="2" data-label="5" data-container="body" data-placement="top" title="" class="gfm gfm-label has-tooltip gl-link gl-label-link"><span class="gl-label-text gl-label-text-light" data-container="body" data-html="true" style="background-color: #e65431">Aquanix</span></a></span>+s</p>';
+
+export const RESOLVED_SNIPPET_HTML =
+ '<p data-sourcepos="1:1-1:14" dir="auto"><a href="/gitlab-org/gitlab-shell/-/snippets/25" data-reference-type="snippet" data-original="$25" data-link="false" data-link-reference="false" data-project="2" data-snippet="25" data-container="body" data-placement="top" title="test" class="gfm gfm-snippet has-tooltip">$25</a> <a href="/gitlab-org/gitlab-shell/-/snippets/25" data-reference-type="snippet" data-original="$25" data-link="false" data-link-reference="false" data-project="2" data-snippet="25" data-container="body" data-placement="top" title="test" class="gfm gfm-snippet has-tooltip">$25</a>+ <a href="/gitlab-org/gitlab-shell/-/snippets/25" data-reference-type="snippet" data-original="$25" data-link="false" data-link-reference="false" data-project="2" data-snippet="25" data-container="body" data-placement="top" title="test" class="gfm gfm-snippet has-tooltip">$25</a>+s</p>';
+
+export const RESOLVED_MILESTONE_HTML =
+ '<p data-sourcepos="1:1-1:20" dir="auto"><a href="/gitlab-org/gitlab-shell/-/milestones/5" data-reference-type="milestone" data-original="%v4.0" data-link="false" data-link-reference="false" data-project="2" data-milestone="10" data-container="body" data-placement="top" title="" class="gfm gfm-milestone has-tooltip">%v4.0</a> <a href="/gitlab-org/gitlab-shell/-/milestones/5" data-reference-type="milestone" data-original="%v4.0" data-link="false" data-link-reference="false" data-project="2" data-milestone="10" data-container="body" data-placement="top" title="" class="gfm gfm-milestone has-tooltip">%v4.0</a>+ %v4.0+s</p>';
+
+export const RESOLVED_USER_HTML =
+ '<p data-sourcepos="1:1-1:20" dir="auto"><a href="/root" data-reference-type="user" data-user="1" data-container="body" data-placement="top" class="gfm gfm-project_member js-user-link" title="Administrator">@root</a> <a href="/root" data-reference-type="user" data-user="1" data-container="body" data-placement="top" class="gfm gfm-project_member js-user-link" title="Administrator">@root</a>+ <a href="/root" data-reference-type="user" data-user="1" data-container="body" data-placement="top" class="gfm gfm-project_member js-user-link" title="Administrator">@root</a>+s</p>';
+
+export const RESOLVED_VULNERABILITY_HTML =
+ '<p data-sourcepos="1:1-1:56" dir="auto"><a href="/gitlab-org/gitlab-shell/-/security/vulnerabilities/1" data-reference-type="vulnerability" data-original="[vulnerability:1]" data-link="false" data-link-reference="false" data-project="2" data-vulnerability="1" data-container="body" data-placement="top" title="oh no!" class="gfm gfm-vulnerability has-tooltip">[vulnerability:1]</a> <a href="/gitlab-org/gitlab-shell/-/security/vulnerabilities/1" data-reference-type="vulnerability" data-original="[vulnerability:1]" data-link="false" data-link-reference="false" data-project="2" data-vulnerability="1" data-container="body" data-placement="top" title="oh no!" class="gfm gfm-vulnerability has-tooltip">[vulnerability:1]</a>+ <a href="/gitlab-org/gitlab-shell/-/security/vulnerabilities/1" data-reference-type="vulnerability" data-original="[vulnerability:1]" data-link="false" data-link-reference="false" data-project="2" data-vulnerability="1" data-container="body" data-placement="top" title="oh no!" class="gfm gfm-vulnerability has-tooltip">[vulnerability:1]</a>+s</p>';
diff --git a/spec/frontend/contributors/component/__snapshots__/contributors_spec.js.snap b/spec/frontend/contributors/component/__snapshots__/contributors_spec.js.snap
index 8b76a627c1e..50a4a21ef1f 100644
--- a/spec/frontend/contributors/component/__snapshots__/contributors_spec.js.snap
+++ b/spec/frontend/contributors/component/__snapshots__/contributors_spec.js.snap
@@ -45,22 +45,32 @@ exports[`Contributors charts should render charts and a RefSelector when loading
Excluding merge commits. Limited to 6,000 commits.
</span>
<glareachart-stub
- annotations=""
class="gl-mb-5"
data="[object Object]"
+ format-tooltip-text="function () { [native code] }"
height="264"
- includelegendavgmax="true"
- legendaveragetext="Avg"
- legendcurrenttext="Current"
- legendlayout="inline"
- legendmaxtext="Max"
- legendmintext="Min"
- legendseriesinfo=""
option="[object Object]"
responsive=""
- thresholds=""
width="auto"
- />
+ >
+ <div
+ data-testid="tooltip-title"
+ />
+ <div
+ class="gl-display-flex gl-gap-6 gl-justify-content-space-between"
+ >
+ <span
+ data-testid="tooltip-label"
+ >
+ Number of commits
+ </span>
+ <span
+ data-testid="tooltip-value"
+ >
+ []
+ </span>
+ </div>
+ </glareachart-stub>
<div
class="row"
>
@@ -78,21 +88,31 @@ exports[`Contributors charts should render charts and a RefSelector when loading
2 commits (jawnnypoo@gmail.com)
</p>
<glareachart-stub
- annotations=""
data="[object Object]"
+ format-tooltip-text="function () { [native code] }"
height="216"
- includelegendavgmax="true"
- legendaveragetext="Avg"
- legendcurrenttext="Current"
- legendlayout="inline"
- legendmaxtext="Max"
- legendmintext="Min"
- legendseriesinfo=""
option="[object Object]"
responsive=""
- thresholds=""
width="auto"
- />
+ >
+ <div
+ data-testid="tooltip-title"
+ />
+ <div
+ class="gl-display-flex gl-gap-6 gl-justify-content-space-between"
+ >
+ <span
+ data-testid="tooltip-label"
+ >
+ Commits
+ </span>
+ <span
+ data-testid="tooltip-value"
+ >
+ []
+ </span>
+ </div>
+ </glareachart-stub>
</div>
</div>
</div>
diff --git a/spec/frontend/contributors/component/contributor_area_chart_spec.js b/spec/frontend/contributors/component/contributor_area_chart_spec.js
new file mode 100644
index 00000000000..262c3e8afee
--- /dev/null
+++ b/spec/frontend/contributors/component/contributor_area_chart_spec.js
@@ -0,0 +1,92 @@
+import { GlAreaChart } from '@gitlab/ui/dist/charts';
+import { nextTick } from 'vue';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import ContributorAreaChart from '~/contributors/components/contributor_area_chart.vue';
+
+describe('Contributor area chart', () => {
+ let wrapper;
+
+ const defaultProps = {
+ data: [
+ {
+ name: 'Commits',
+ data: [
+ ['2015-01-01', 1],
+ ['2015-01-02', 2],
+ ['2015-01-03', 3],
+ ],
+ },
+ ],
+ height: 100,
+ option: {
+ xAxis: { name: '', type: 'time' },
+ yAxis: { name: 'Number of commits' },
+ grid: {
+ top: 10,
+ bottom: 10,
+ left: 10,
+ right: 10,
+ },
+ },
+ };
+
+ const createWrapper = (props = {}) => {
+ wrapper = shallowMountExtended(ContributorAreaChart, {
+ propsData: { ...defaultProps, ...props },
+ });
+ };
+
+ const findAreaChart = () => wrapper.findComponent(GlAreaChart);
+ const findTooltipTitle = () => wrapper.findByTestId('tooltip-title').text();
+ const findTooltipLabel = () => wrapper.findByTestId('tooltip-label').text();
+ const findTooltipValue = () => wrapper.findByTestId('tooltip-value').text();
+
+ const setTooltipData = async (title, value) => {
+ findAreaChart().vm.formatTooltipText({ seriesData: [{ data: [title, value] }] });
+ await nextTick();
+ };
+
+ describe('default inputs', () => {
+ beforeEach(() => {
+ createWrapper();
+ });
+
+ it('renders the area chart', () => {
+ expect(findAreaChart().exists()).toBe(true);
+ expect(findAreaChart().props()).toMatchObject(defaultProps);
+ });
+
+ it('emits the area chart created event', () => {
+ const payload = 'test';
+ findAreaChart().vm.$emit('created', payload);
+
+ expect(wrapper.emitted('created')).toHaveLength(1);
+ expect(wrapper.emitted('created')[0]).toEqual([payload]);
+ });
+
+ it('shows the tooltip with the formatted chart data', async () => {
+ await setTooltipData('01-01-2000', 10);
+
+ expect(findTooltipTitle()).toEqual('Jan 01, 2000');
+ expect(findTooltipLabel()).toEqual(defaultProps.option.yAxis.name);
+ expect(findTooltipValue()).toEqual('10');
+ });
+ });
+
+ describe('Y axis has no name', () => {
+ beforeEach(() => {
+ createWrapper({
+ option: {
+ ...defaultProps.option,
+ yAxis: {},
+ },
+ });
+ });
+
+ it('shows a default tooltip label if the Y axis name is missing', async () => {
+ await setTooltipData('01-01-2000', 10);
+
+ expect(findTooltipLabel()).toEqual('Value');
+ });
+ });
+});
diff --git a/spec/frontend/contributors/component/contributors_spec.js b/spec/frontend/contributors/component/contributors_spec.js
index 7d863a8eb78..6235d2610a9 100644
--- a/spec/frontend/contributors/component/contributors_spec.js
+++ b/spec/frontend/contributors/component/contributors_spec.js
@@ -12,6 +12,8 @@ import { SET_CHART_DATA, SET_LOADING_STATE } from '~/contributors/stores/mutatio
jest.mock('~/lib/utils/url_utility', () => ({
visitUrl: jest.fn(),
+ joinPaths: jest.fn(),
+ setUrlFragment: jest.fn(),
}));
let wrapper;
diff --git a/spec/frontend/deploy_freeze/store/actions_spec.js b/spec/frontend/deploy_freeze/store/actions_spec.js
index d39577baa59..86b72c673bc 100644
--- a/spec/frontend/deploy_freeze/store/actions_spec.js
+++ b/spec/frontend/deploy_freeze/store/actions_spec.js
@@ -36,7 +36,7 @@ describe('deploy freeze store actions', () => {
describe('setSelectedFreezePeriod', () => {
it('commits SET_SELECTED_TIMEZONE mutation', () => {
- testAction(
+ return testAction(
actions.setFreezePeriod,
{
id: 3,
@@ -69,7 +69,7 @@ describe('deploy freeze store actions', () => {
describe('setSelectedTimezone', () => {
it('commits SET_SELECTED_TIMEZONE mutation', () => {
- testAction(actions.setSelectedTimezone, {}, {}, [
+ return testAction(actions.setSelectedTimezone, {}, {}, [
{
payload: {},
type: types.SET_SELECTED_TIMEZONE,
@@ -80,7 +80,7 @@ describe('deploy freeze store actions', () => {
describe('setFreezeStartCron', () => {
it('commits SET_FREEZE_START_CRON mutation', () => {
- testAction(actions.setFreezeStartCron, {}, {}, [
+ return testAction(actions.setFreezeStartCron, {}, {}, [
{
type: types.SET_FREEZE_START_CRON,
},
@@ -90,7 +90,7 @@ describe('deploy freeze store actions', () => {
describe('setFreezeEndCron', () => {
it('commits SET_FREEZE_END_CRON mutation', () => {
- testAction(actions.setFreezeEndCron, {}, {}, [
+ return testAction(actions.setFreezeEndCron, {}, {}, [
{
type: types.SET_FREEZE_END_CRON,
},
diff --git a/spec/frontend/deploy_keys/components/key_spec.js b/spec/frontend/deploy_keys/components/key_spec.js
index 3c4fa2a6de6..e57da4df150 100644
--- a/spec/frontend/deploy_keys/components/key_spec.js
+++ b/spec/frontend/deploy_keys/components/key_spec.js
@@ -4,7 +4,7 @@ import data from 'test_fixtures/deploy_keys/keys.json';
import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
import key from '~/deploy_keys/components/key.vue';
import DeployKeysStore from '~/deploy_keys/store';
-import { getTimeago, formatDate } from '~/lib/utils/datetime_utility';
+import { getTimeago, localeDateFormat } from '~/lib/utils/datetime_utility';
describe('Deploy keys key', () => {
let wrapper;
@@ -64,7 +64,9 @@ describe('Deploy keys key', () => {
const expiryComponent = wrapper.find('[data-testid="expires-at-tooltip"]');
const tooltip = getBinding(expiryComponent.element, 'gl-tooltip');
expect(tooltip).toBeDefined();
- expect(expiryComponent.attributes('title')).toBe(`${formatDate(expiresAt)}`);
+ expect(expiryComponent.attributes('title')).toBe(
+ `${localeDateFormat.asDateTimeFull.format(expiresAt)}`,
+ );
});
it('renders never when no expiration date', () => {
createComponent({
diff --git a/spec/frontend/deploy_keys/graphql/resolvers_spec.js b/spec/frontend/deploy_keys/graphql/resolvers_spec.js
new file mode 100644
index 00000000000..458232697cb
--- /dev/null
+++ b/spec/frontend/deploy_keys/graphql/resolvers_spec.js
@@ -0,0 +1,249 @@
+import MockAdapter from 'axios-mock-adapter';
+import { HTTP_STATUS_OK, HTTP_STATUS_NOT_FOUND } from '~/lib/utils/http_status';
+import axios from '~/lib/utils/axios_utils';
+import pageInfoQuery from '~/graphql_shared/client/page_info.query.graphql';
+import currentPageQuery from '~/deploy_keys/graphql/queries/current_page.query.graphql';
+import currentScopeQuery from '~/deploy_keys/graphql/queries/current_scope.query.graphql';
+import confirmRemoveKeyQuery from '~/deploy_keys/graphql/queries/confirm_remove_key.query.graphql';
+import { resolvers } from '~/deploy_keys/graphql/resolvers';
+
+const ENDPOINTS = {
+ enabledKeysEndpoint: '/enabled_keys',
+ availableProjectKeysEndpoint: '/available_project_keys',
+ availablePublicKeysEndpoint: '/available_public_keys',
+};
+
+describe('~/deploy_keys/graphql/resolvers', () => {
+ let mockResolvers;
+ let mock;
+ let client;
+
+ beforeEach(() => {
+ mockResolvers = resolvers(ENDPOINTS);
+ mock = new MockAdapter(axios);
+ client = {
+ writeQuery: jest.fn(),
+ readQuery: jest.fn(),
+ readFragment: jest.fn(),
+ cache: { evict: jest.fn(), gc: jest.fn() },
+ };
+ });
+
+ afterEach(() => {
+ mock.reset();
+ });
+
+ describe('deployKeys', () => {
+ const key = { id: 1, title: 'hello', edit_path: '/edit' };
+
+ it.each(['enabledKeys', 'availableProjectKeys', 'availablePublicKeys'])(
+ 'should request the endpoint for the %s scope',
+ async (scope) => {
+ mock.onGet(ENDPOINTS[`${scope}Endpoint`]).reply(HTTP_STATUS_OK, { keys: [key] });
+
+ const keys = await mockResolvers.Project.deployKeys(null, { scope, page: 1 }, { client });
+
+ expect(keys).toEqual([
+ { id: 1, title: 'hello', editPath: '/edit', __typename: 'LocalDeployKey' },
+ ]);
+ },
+ );
+
+ it('should default to enabled keys if a bad scope is given', async () => {
+ const scope = 'bad';
+ mock.onGet(ENDPOINTS.enabledKeysEndpoint).reply(HTTP_STATUS_OK, { keys: [key] });
+
+ const keys = await mockResolvers.Project.deployKeys(null, { scope, page: 1 }, { client });
+
+ expect(keys).toEqual([
+ { id: 1, title: 'hello', editPath: '/edit', __typename: 'LocalDeployKey' },
+ ]);
+ });
+
+ it('should request the given page', async () => {
+ const scope = 'enabledKeys';
+ const page = 2;
+ mock
+ .onGet(ENDPOINTS.enabledKeysEndpoint, { params: { page } })
+ .reply(HTTP_STATUS_OK, { keys: [key] });
+
+ const keys = await mockResolvers.Project.deployKeys(null, { scope, page }, { client });
+
+ expect(keys).toEqual([
+ { id: 1, title: 'hello', editPath: '/edit', __typename: 'LocalDeployKey' },
+ ]);
+ });
+
+ it('should write pagination info to the cache', async () => {
+ const scope = 'enabledKeys';
+ const page = 1;
+
+ mock.onGet(ENDPOINTS.enabledKeysEndpoint).reply(
+ HTTP_STATUS_OK,
+ { keys: [key] },
+ {
+ 'x-next-page': '2',
+ 'x-page': '1',
+ 'X-Per-Page': '2',
+ 'X-Prev-Page': '',
+ 'X-TOTAL': '37',
+ 'X-Total-Pages': '5',
+ },
+ );
+
+ await mockResolvers.Project.deployKeys(null, { scope, page }, { client });
+
+ expect(client.writeQuery).toHaveBeenCalledWith({
+ query: pageInfoQuery,
+ variables: { input: { scope, page } },
+ data: {
+ pageInfo: {
+ total: 37,
+ perPage: 2,
+ previousPage: NaN,
+ totalPages: 5,
+ nextPage: 2,
+ page: 1,
+ __typename: 'LocalPageInfo',
+ },
+ },
+ });
+ });
+
+ it('should not write page info if the request fails', async () => {
+ const scope = 'enabledKeys';
+ const page = 1;
+
+ mock.onGet(ENDPOINTS.enabledKeysEndpoint).reply(HTTP_STATUS_NOT_FOUND);
+
+ try {
+ await mockResolvers.Project.deployKeys(null, { scope, page }, { client });
+ } catch {
+ expect(client.writeQuery).not.toHaveBeenCalled();
+ }
+ });
+ });
+
+ describe('currentPage', () => {
+ it('sets the current page', () => {
+ const page = 5;
+ mockResolvers.Mutation.currentPage(null, { page }, { client });
+
+ expect(client.writeQuery).toHaveBeenCalledWith({
+ query: currentPageQuery,
+ data: { currentPage: page },
+ });
+ });
+ });
+
+ describe('currentScope', () => {
+ let scope;
+
+ beforeEach(() => {
+ scope = 'enabledKeys';
+ mockResolvers.Mutation.currentScope(null, { scope }, { client });
+ });
+
+ it('sets the current scope', () => {
+ expect(client.writeQuery).toHaveBeenCalledWith({
+ query: currentScopeQuery,
+ data: { currentScope: scope },
+ });
+ });
+
+ it('resets the page to 1', () => {
+ expect(client.writeQuery).toHaveBeenCalledWith({
+ query: currentPageQuery,
+ data: { currentPage: 1 },
+ });
+ });
+ });
+
+ describe('disableKey', () => {
+ it('disables the key that is pending confirmation', async () => {
+ const key = { id: 1, title: 'hello', disablePath: '/disable', __typename: 'LocalDeployKey' };
+ client.readQuery.mockReturnValue({ deployKeyToRemove: key });
+ client.readFragment.mockReturnValue(key);
+ mock.onPut(key.disablePath).reply(HTTP_STATUS_OK);
+ await mockResolvers.Mutation.disableKey(null, null, { client });
+
+ expect(client.readQuery).toHaveBeenCalledWith({ query: confirmRemoveKeyQuery });
+ expect(client.readFragment).toHaveBeenCalledWith(
+ expect.objectContaining({ id: `LocalDeployKey:${key.id}` }),
+ );
+ expect(client.cache.evict).toHaveBeenCalledWith({ fieldName: 'deployKeyToRemove' });
+ expect(client.cache.evict).toHaveBeenCalledWith({ id: `LocalDeployKey:${key.id}` });
+ expect(client.cache.gc).toHaveBeenCalled();
+ });
+
+ it('does not remove the key from the cache on fail', async () => {
+ const key = { id: 1, title: 'hello', disablePath: '/disable', __typename: 'LocalDeployKey' };
+ client.readQuery.mockReturnValue({ deployKeyToRemove: key });
+ client.readFragment.mockReturnValue(key);
+ mock.onPut(key.disablePath).reply(HTTP_STATUS_NOT_FOUND);
+
+ try {
+ await mockResolvers.Mutation.disableKey(null, null, { client });
+ } catch {
+ expect(client.readQuery).toHaveBeenCalledWith({ query: confirmRemoveKeyQuery });
+ expect(client.readFragment).toHaveBeenCalledWith(
+ expect.objectContaining({ id: `LocalDeployKey:${key.id}` }),
+ );
+ expect(client.cache.evict).not.toHaveBeenCalled();
+ expect(client.cache.gc).not.toHaveBeenCalled();
+ }
+ });
+ });
+
+ describe('enableKey', () => {
+ it("calls the key's enable path", async () => {
+ const key = { id: 1, title: 'hello', enablePath: '/enable', __typename: 'LocalDeployKey' };
+ client.readQuery.mockReturnValue({ deployKeyToRemove: key });
+ client.readFragment.mockReturnValue(key);
+ mock.onPut(key.enablePath).reply(HTTP_STATUS_OK);
+ await mockResolvers.Mutation.enableKey(null, key, { client });
+
+ expect(client.readFragment).toHaveBeenCalledWith(
+ expect.objectContaining({ id: `LocalDeployKey:${key.id}` }),
+ );
+ expect(client.cache.evict).toHaveBeenCalledWith({ id: `LocalDeployKey:${key.id}` });
+ expect(client.cache.gc).toHaveBeenCalled();
+ });
+
+ it('does not remove the key from the cache on failure', async () => {
+ const key = { id: 1, title: 'hello', enablePath: '/enable', __typename: 'LocalDeployKey' };
+ client.readQuery.mockReturnValue({ deployKeyToRemove: key });
+ client.readFragment.mockReturnValue(key);
+ mock.onPut(key.enablePath).reply(HTTP_STATUS_NOT_FOUND);
+ try {
+ await mockResolvers.Mutation.enableKey(null, key, { client });
+ } catch {
+ expect(client.readFragment).toHaveBeenCalledWith(
+ expect.objectContaining({ id: `LocalDeployKey:${key.id}` }),
+ );
+ expect(client.cache.evict).not.toHaveBeenCalled();
+ expect(client.cache.gc).not.toHaveBeenCalled();
+ }
+ });
+ });
+
+ describe('confirmDisable', () => {
+ it('sets the key to disable', () => {
+ const key = { id: 1, title: 'hello', enablePath: '/enable', __typename: 'LocalDeployKey' };
+ mockResolvers.Mutation.confirmDisable(null, key, { client });
+
+ expect(client.writeQuery).toHaveBeenCalledWith({
+ query: confirmRemoveKeyQuery,
+ data: { deployKeyToRemove: { id: key.id, __type: 'LocalDeployKey' } },
+ });
+ });
+ it('clears the value when null id is passed', () => {
+ mockResolvers.Mutation.confirmDisable(null, { id: null }, { client });
+
+ expect(client.writeQuery).toHaveBeenCalledWith({
+ query: confirmRemoveKeyQuery,
+ data: { deployKeyToRemove: null },
+ });
+ });
+ });
+});
diff --git a/spec/frontend/design_management/components/list/__snapshots__/item_spec.js.snap b/spec/frontend/design_management/components/list/__snapshots__/item_spec.js.snap
index a05b3baecd3..6624c90a146 100644
--- a/spec/frontend/design_management/components/list/__snapshots__/item_spec.js.snap
+++ b/spec/frontend/design_management/components/list/__snapshots__/item_spec.js.snap
@@ -48,7 +48,7 @@ exports[`Design management list item component with notes renders item with mult
Updated
<timeago-stub
cssclass=""
- datetimeformat="DATE_WITH_TIME_FORMAT"
+ datetimeformat="asDateTime"
time="01-01-2019"
tooltipplacement="bottom"
/>
@@ -113,7 +113,7 @@ exports[`Design management list item component with notes renders item with sing
Updated
<timeago-stub
cssclass=""
- datetimeformat="DATE_WITH_TIME_FORMAT"
+ datetimeformat="asDateTime"
time="01-01-2019"
tooltipplacement="bottom"
/>
diff --git a/spec/frontend/diffs/components/diff_file_spec.js b/spec/frontend/diffs/components/diff_file_spec.js
index 34af3d72b04..a9fbf4632ac 100644
--- a/spec/frontend/diffs/components/diff_file_spec.js
+++ b/spec/frontend/diffs/components/diff_file_spec.js
@@ -23,7 +23,7 @@ import eventHub from '~/diffs/event_hub';
import { diffViewerModes, diffViewerErrors } from '~/ide/constants';
import axios from '~/lib/utils/axios_utils';
-import { scrollToElement } from '~/lib/utils/common_utils';
+import { scrollToElement, isElementStuck } from '~/lib/utils/common_utils';
import { HTTP_STATUS_OK } from '~/lib/utils/http_status';
import createNotesStore from '~/notes/stores/modules';
import diffsModule from '~/diffs/store/modules';
@@ -399,6 +399,27 @@ describe('DiffFile', () => {
});
});
+ describe('automatically collapsed generated file', () => {
+ beforeEach(() => {
+ makeFileAutomaticallyCollapsed(store);
+ const file = store.state.diffs.diffFiles[0];
+ Object.assign(store.state.diffs.diffFiles[0], {
+ ...file,
+ viewer: {
+ ...file.viewer,
+ generated: true,
+ },
+ });
+ });
+
+ it('should show the generated file warning with expansion button', () => {
+ expect(findDiffContentArea(wrapper).html()).toContain(
+ 'Generated files are collapsed by default. This behavior can be overriden via .gitattributes file if required.',
+ );
+ expect(findToggleButton(wrapper).exists()).toBe(true);
+ });
+ });
+
describe('not collapsed', () => {
beforeEach(() => {
makeFileOpenByDefault(store);
@@ -429,6 +450,7 @@ describe('DiffFile', () => {
describe('scoll-to-top of file after collapse', () => {
beforeEach(() => {
jest.spyOn(wrapper.vm.$store, 'dispatch').mockImplementation(() => {});
+ isElementStuck.mockReturnValueOnce(true);
});
it("scrolls to the top when the file is open, the users initiates the collapse, and there's a content block to scroll to", async () => {
diff --git a/spec/frontend/diffs/components/diff_row_spec.js b/spec/frontend/diffs/components/diff_row_spec.js
index 30510958704..e9fbde11211 100644
--- a/spec/frontend/diffs/components/diff_row_spec.js
+++ b/spec/frontend/diffs/components/diff_row_spec.js
@@ -112,6 +112,8 @@ describe('DiffRow', () => {
});
const getCommentButton = (side) => wrapper.find(`[data-testid="${side}-comment-button"]`);
+ const findRightCommentButton = () => wrapper.find('[data-testid="right-comment-button"]');
+ const findLeftCommentButton = () => wrapper.find('[data-testid="left-comment-button"]');
describe.each`
side
@@ -135,6 +137,10 @@ describe('DiffRow', () => {
it('renders', () => {
wrapper = createWrapper({ props: { line, inline: false } });
+ expect(findRightCommentButton().attributes('draggable')).toBe('true');
+ expect(findLeftCommentButton().attributes('draggable')).toBe(
+ side === 'left' ? 'true' : 'false',
+ );
expect(getCommentButton(side).exists()).toBe(true);
});
diff --git a/spec/frontend/diffs/components/merge_conflict_warning_spec.js b/spec/frontend/diffs/components/merge_conflict_warning_spec.js
deleted file mode 100644
index 715912b361f..00000000000
--- a/spec/frontend/diffs/components/merge_conflict_warning_spec.js
+++ /dev/null
@@ -1,58 +0,0 @@
-import { shallowMount, mount } from '@vue/test-utils';
-import MergeConflictWarning from '~/diffs/components/merge_conflict_warning.vue';
-
-const propsData = {
- limited: true,
- mergeable: true,
- resolutionPath: 'a-path',
-};
-
-function findResolveButton(wrapper) {
- return wrapper.find('.gl-alert-actions a.gl-button:first-child');
-}
-function findLocalMergeButton(wrapper) {
- return wrapper.find('.gl-alert-actions button.gl-button:last-child');
-}
-
-describe('MergeConflictWarning', () => {
- let wrapper;
-
- const createComponent = (props = {}, { full } = { full: false }) => {
- const mounter = full ? mount : shallowMount;
-
- wrapper = mounter(MergeConflictWarning, {
- propsData: { ...propsData, ...props },
- });
- };
-
- it.each`
- present | resolutionPath
- ${false} | ${''}
- ${true} | ${'some-path'}
- `(
- 'toggles the resolve conflicts button based on the provided resolutionPath "$resolutionPath"',
- ({ present, resolutionPath }) => {
- createComponent({ resolutionPath }, { full: true });
- const resolveButton = findResolveButton(wrapper);
-
- expect(resolveButton.exists()).toBe(present);
- if (present) {
- expect(resolveButton.attributes('href')).toBe(resolutionPath);
- }
- },
- );
-
- it.each`
- present | mergeable
- ${false} | ${false}
- ${true} | ${true}
- `(
- 'toggles the local merge button based on the provided mergeable property "$mergable"',
- ({ present, mergeable }) => {
- createComponent({ mergeable }, { full: true });
- const localMerge = findLocalMergeButton(wrapper);
-
- expect(localMerge.exists()).toBe(present);
- },
- );
-});
diff --git a/spec/frontend/diffs/components/shared/__snapshots__/findings_drawer_spec.js.snap b/spec/frontend/diffs/components/shared/__snapshots__/findings_drawer_spec.js.snap
index cfc34bd2f25..33a268c06cc 100644
--- a/spec/frontend/diffs/components/shared/__snapshots__/findings_drawer_spec.js.snap
+++ b/spec/frontend/diffs/components/shared/__snapshots__/findings_drawer_spec.js.snap
@@ -1,6 +1,6 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
-exports[`FindingsDrawer matches the snapshot 1`] = `
+exports[`FindingsDrawer General Rendering matches the snapshot with detected badge 1`] = `
<transition-stub
class="findings-drawer"
name="gl-drawer"
@@ -16,7 +16,7 @@ exports[`FindingsDrawer matches the snapshot 1`] = `
class="gl-drawer-title"
>
<h2
- class="drawer-heading gl-font-base gl-mb-0 gl-mt-0"
+ class="drawer-heading gl-font-base gl-mb-0 gl-mt-0 gl-w-28"
>
<svg
aria-hidden="true"
@@ -61,6 +61,227 @@ exports[`FindingsDrawer matches the snapshot 1`] = `
>
<li
class="gl-mb-4"
+ data-testid="findings-drawer-title"
+ >
+ <p
+ class="gl-line-height-20"
+ >
+ <span
+ class="gl-display-block gl-font-weight-bold gl-mb-1"
+ data-testid="findings-drawer-item-description"
+ >
+ Name
+ </span>
+ <span
+ data-testid="findings-drawer-item-value-prop"
+ >
+ mockedtitle
+ </span>
+ </p>
+ </li>
+ <li
+ class="gl-mb-4"
+ >
+ <p
+ class="gl-line-height-20"
+ >
+ <span
+ class="gl-display-block gl-font-weight-bold gl-mb-1"
+ data-testid="findings-drawer-item-description"
+ >
+ Status
+ </span>
+ <span
+ class="badge badge-pill badge-warning gl-badge md text-capitalize"
+ >
+ detected
+ </span>
+ </p>
+ </li>
+ <li
+ class="gl-mb-4"
+ >
+ <p
+ class="gl-line-height-20"
+ >
+ <span
+ class="gl-display-block gl-font-weight-bold gl-mb-1"
+ data-testid="findings-drawer-item-description"
+ >
+ Description
+ </span>
+ <span
+ data-testid="findings-drawer-item-value-prop"
+ >
+ fakedesc
+ </span>
+ </p>
+ </li>
+ <li
+ class="gl-mb-4"
+ >
+ <p
+ class="gl-line-height-20"
+ >
+ <span
+ class="gl-display-block gl-font-weight-bold gl-mb-1"
+ data-testid="findings-drawer-item-description"
+ >
+ Project
+ </span>
+ <a
+ class="gl-link"
+ href="/testpath"
+ >
+ testname
+ </a>
+ </p>
+ </li>
+ <li
+ class="gl-mb-4"
+ >
+ <p
+ class="gl-line-height-20"
+ >
+ <span
+ class="gl-display-block gl-font-weight-bold gl-mb-1"
+ data-testid="findings-drawer-item-description"
+ >
+ File
+ </span>
+ <span
+ data-testid="findings-drawer-item-value-prop"
+ />
+ </p>
+ </li>
+ <li
+ class="gl-mb-4"
+ >
+ <p
+ class="gl-line-height-20"
+ >
+ <span
+ class="gl-display-block gl-font-weight-bold gl-mb-1"
+ data-testid="findings-drawer-item-description"
+ >
+ Identifiers
+ </span>
+ <span>
+ <a
+ class="gl-link"
+ href="https://semgrep.dev/r/gitlab.eslint.detect-disable-mustache-escape"
+ >
+ eslint.detect-disable-mustache-escape
+ </a>
+ </span>
+ </p>
+ </li>
+ <li
+ class="gl-mb-4"
+ >
+ <p
+ class="gl-line-height-20"
+ >
+ <span
+ class="gl-display-block gl-font-weight-bold gl-mb-1"
+ data-testid="findings-drawer-item-description"
+ >
+ Tool
+ </span>
+ <span
+ data-testid="findings-drawer-item-value-prop"
+ >
+ SAST
+ </span>
+ </p>
+ </li>
+ <li
+ class="gl-mb-4"
+ >
+ <p
+ class="gl-line-height-20"
+ >
+ <span
+ class="gl-display-block gl-font-weight-bold gl-mb-1"
+ data-testid="findings-drawer-item-description"
+ >
+ Engine
+ </span>
+ <span
+ data-testid="findings-drawer-item-value-prop"
+ >
+ testengine name
+ </span>
+ </p>
+ </li>
+ </ul>
+ </div>
+ </aside>
+</transition-stub>
+`;
+
+exports[`FindingsDrawer General Rendering matches the snapshot with dismissed badge 1`] = `
+<transition-stub
+ class="findings-drawer"
+ name="gl-drawer"
+>
+ <aside
+ class="gl-drawer gl-drawer-default"
+ style="top: 0px; z-index: 252;"
+ >
+ <div
+ class="gl-drawer-header"
+ >
+ <div
+ class="gl-drawer-title"
+ >
+ <h2
+ class="drawer-heading gl-font-base gl-mb-0 gl-mt-0 gl-w-28"
+ >
+ <svg
+ aria-hidden="true"
+ class="gl-icon gl-text-orange-300 gl-vertical-align-baseline! inline-findings-severity-icon s12"
+ data-testid="severity-low-icon"
+ role="img"
+ >
+ <use
+ href="file-mock#severity-low"
+ />
+ </svg>
+ <span
+ class="drawer-heading-severity"
+ >
+ low
+ </span>
+ SAST Finding
+ </h2>
+ <button
+ aria-label="Close drawer"
+ class="btn btn-default btn-default-tertiary btn-icon btn-sm gl-button gl-drawer-close-button"
+ type="button"
+ >
+ <svg
+ aria-hidden="true"
+ class="gl-button-icon gl-icon s16"
+ data-testid="close-icon"
+ role="img"
+ >
+ <use
+ href="file-mock#close"
+ />
+ </svg>
+ </button>
+ </div>
+ </div>
+ <div
+ class="gl-drawer-body gl-drawer-body-scrim"
+ >
+ <ul
+ class="gl-border-b-initial gl-list-style-none gl-mb-0 gl-pb-0!"
+ >
+ <li
+ class="gl-mb-4"
+ data-testid="findings-drawer-title"
>
<p
class="gl-line-height-20"
diff --git a/spec/frontend/diffs/components/shared/findings_drawer_spec.js b/spec/frontend/diffs/components/shared/findings_drawer_spec.js
index 62d875ed9b7..00b4ca262be 100644
--- a/spec/frontend/diffs/components/shared/findings_drawer_spec.js
+++ b/spec/frontend/diffs/components/shared/findings_drawer_spec.js
@@ -1,36 +1,106 @@
+import { nextTick } from 'vue';
import { GlDrawer } from '@gitlab/ui';
import FindingsDrawer from '~/diffs/components/shared/findings_drawer.vue';
import { mountExtended } from 'helpers/vue_test_utils_helper';
-import { mockFinding, mockProject } from '../../mock_data/findings_drawer';
-
-let wrapper;
-const getDrawer = () => wrapper.findComponent(GlDrawer);
-const closeEvent = 'close';
-
-const createWrapper = () => {
- return mountExtended(FindingsDrawer, {
- propsData: {
- drawer: mockFinding,
- project: mockProject,
- },
- });
-};
+import {
+ mockFindingDismissed,
+ mockFindingDetected,
+ mockProject,
+ mockFindingsMultiple,
+} from '../../mock_data/findings_drawer';
describe('FindingsDrawer', () => {
- it('renders without errors', () => {
- wrapper = createWrapper();
- expect(wrapper.exists()).toBe(true);
+ let wrapper;
+
+ const findPreviousButton = () => wrapper.findByTestId('findings-drawer-prev-button');
+ const findNextButton = () => wrapper.findByTestId('findings-drawer-next-button');
+ const findTitle = () => wrapper.findByTestId('findings-drawer-title');
+ const createWrapper = (
+ drawer = { findings: [mockFindingDetected], index: 0 },
+ project = mockProject,
+ ) => {
+ return mountExtended(FindingsDrawer, {
+ propsData: {
+ drawer,
+ project,
+ },
+ });
+ };
+
+ describe('General Rendering', () => {
+ beforeEach(() => {
+ wrapper = createWrapper();
+ });
+ it('renders without errors', () => {
+ expect(wrapper.exists()).toBe(true);
+ });
+
+ it('emits close event when gl-drawer emits close event', () => {
+ wrapper.findComponent(GlDrawer).vm.$emit('close');
+ expect(wrapper.emitted('close')).toHaveLength(1);
+ });
+
+ it('matches the snapshot with dismissed badge', () => {
+ expect(wrapper.element).toMatchSnapshot();
+ });
+
+ it('matches the snapshot with detected badge', () => {
+ expect(wrapper.element).toMatchSnapshot();
+ });
});
- it('emits close event when gl-drawer emits close event', () => {
- wrapper = createWrapper();
+ describe('Prev/Next Buttons with Multiple Items', () => {
+ it('renders prev/next buttons when there are multiple items', () => {
+ wrapper = createWrapper({ findings: mockFindingsMultiple, index: 0 });
+ expect(findPreviousButton().exists()).toBe(true);
+ expect(findNextButton().exists()).toBe(true);
+ });
+
+ it('does not render prev/next buttons when there is only one item', () => {
+ wrapper = createWrapper({ findings: [mockFindingDismissed], index: 0 });
+ expect(findPreviousButton().exists()).toBe(false);
+ expect(findNextButton().exists()).toBe(false);
+ });
- getDrawer().vm.$emit(closeEvent);
- expect(wrapper.emitted(closeEvent)).toHaveLength(1);
+ it('calls prev method on prev button click and loops correct activeIndex', async () => {
+ wrapper = createWrapper({ findings: mockFindingsMultiple, index: 0 });
+ expect(findTitle().text()).toBe(`Name ${mockFindingsMultiple[0].title}`);
+
+ await findPreviousButton().trigger('click');
+ await nextTick();
+ expect(findTitle().text()).toBe(`Name ${mockFindingsMultiple[2].title}`);
+
+ await findPreviousButton().trigger('click');
+ await nextTick();
+ expect(findTitle().text()).toBe(`Name ${mockFindingsMultiple[1].title}`);
+ });
+
+ it('calls next method on next button click', async () => {
+ wrapper = createWrapper({ findings: mockFindingsMultiple, index: 0 });
+ expect(findTitle().text()).toBe(`Name ${mockFindingsMultiple[0].title}`);
+
+ await findNextButton().trigger('click');
+ await nextTick();
+ expect(findTitle().text()).toBe(`Name ${mockFindingsMultiple[1].title}`);
+
+ await findNextButton().trigger('click');
+ await nextTick();
+ expect(findTitle().text()).toBe(`Name ${mockFindingsMultiple[2].title}`);
+
+ await findNextButton().trigger('click');
+ await nextTick();
+ expect(findTitle().text()).toBe(`Name ${mockFindingsMultiple[0].title}`);
+ });
});
- it('matches the snapshot', () => {
- wrapper = createWrapper();
- expect(wrapper.element).toMatchSnapshot();
+ describe('Active Index Handling', () => {
+ it('watcher sets active index on drawer prop change', async () => {
+ wrapper = createWrapper();
+ const newFinding = { findings: mockFindingsMultiple, index: 2 };
+
+ await wrapper.setProps({ drawer: newFinding });
+ await nextTick();
+ expect(findTitle().text()).toBe(`Name ${mockFindingsMultiple[2].title}`);
+ });
});
});
diff --git a/spec/frontend/diffs/mock_data/findings_drawer.js b/spec/frontend/diffs/mock_data/findings_drawer.js
index 4823a18b267..257a3b3e499 100644
--- a/spec/frontend/diffs/mock_data/findings_drawer.js
+++ b/spec/frontend/diffs/mock_data/findings_drawer.js
@@ -1,6 +1,6 @@
-export const mockFinding = {
+export const mockFindingDismissed = {
title: 'mockedtitle',
- state: 'detected',
+ state: 'dismissed',
scale: 'sast',
line: 7,
description: 'fakedesc',
@@ -22,7 +22,54 @@ export const mockFinding = {
],
};
+export const mockFindingDetected = {
+ ...mockFindingDismissed,
+ state: 'detected',
+};
+
export const mockProject = {
nameWithNamespace: 'testname',
fullPath: 'testpath',
};
+
+export const mockFindingsMultiple = [
+ {
+ ...mockFindingDismissed,
+ title: 'Finding 1',
+ severity: 'critical',
+ engineName: 'Engine 1',
+ identifiers: [
+ {
+ ...mockFindingDismissed.identifiers[0],
+ name: 'identifier 1',
+ url: 'https://example.com/identifier1',
+ },
+ ],
+ },
+ {
+ ...mockFindingDetected,
+ title: 'Finding 2',
+ severity: 'medium',
+ engineName: 'Engine 2',
+ identifiers: [
+ {
+ ...mockFindingDetected.identifiers[0],
+ name: 'identifier 2',
+ url: 'https://example.com/identifier2',
+ },
+ ],
+ },
+ {
+ ...mockFindingDetected,
+ title: 'Finding 3',
+ severity: 'medium',
+ engineName: 'Engine 3',
+ identifiers: [
+ {
+ ...mockFindingDetected.identifiers[0],
+ name: 'identifier 3',
+ url: 'https://example.com/identifier3',
+ },
+ ],
+ },
+];
diff --git a/spec/frontend/diffs/mock_data/inline_findings.js b/spec/frontend/diffs/mock_data/inline_findings.js
index ae1ae909238..6307c2c7343 100644
--- a/spec/frontend/diffs/mock_data/inline_findings.js
+++ b/spec/frontend/diffs/mock_data/inline_findings.js
@@ -45,36 +45,43 @@ export const multipleFindingsArrSastScale = [
line: 2,
scale: 'sast',
text: 'mocked low Issue',
+ state: 'detected',
},
{
severity: 'medium',
description: 'mocked medium Issue',
line: 3,
scale: 'sast',
+ text: 'mocked medium Issue',
+ state: 'dismissed',
},
{
severity: 'info',
description: 'mocked info Issue',
line: 3,
scale: 'sast',
+ state: 'detected',
},
{
severity: 'high',
description: 'mocked high Issue',
line: 3,
scale: 'sast',
+ state: 'dismissed',
},
{
severity: 'critical',
description: 'mocked critical Issue',
line: 3,
scale: 'sast',
+ state: 'detected',
},
{
severity: 'unknown',
description: 'mocked unknown Issue',
line: 3,
scale: 'sast',
+ state: 'dismissed',
},
];
@@ -114,6 +121,9 @@ export const diffCodeQuality = {
export const singularCodeQualityFinding = [multipleFindingsArrCodeQualityScale[0]];
export const singularSastFinding = [multipleFindingsArrSastScale[0]];
+export const singularSastFindingDetected = [multipleFindingsArrSastScale[0]];
+export const singularSastFindingDismissed = [multipleFindingsArrSastScale[1]];
+
export const twoSastFindings = multipleFindingsArrSastScale.slice(0, 2);
export const fiveCodeQualityFindings = multipleFindingsArrCodeQualityScale.slice(0, 5);
export const threeCodeQualityFindings = multipleFindingsArrCodeQualityScale.slice(0, 3);
diff --git a/spec/frontend/diffs/store/actions_spec.js b/spec/frontend/diffs/store/actions_spec.js
index 8cf376b13e3..be3b30e8e7a 100644
--- a/spec/frontend/diffs/store/actions_spec.js
+++ b/spec/frontend/diffs/store/actions_spec.js
@@ -631,7 +631,7 @@ describe('DiffsStoreActions', () => {
describe('prefetchFileNeighbors', () => {
it('dispatches two requests to prefetch the next/previous files', () => {
- testAction(
+ return testAction(
diffActions.prefetchFileNeighbors,
{},
{
@@ -1327,8 +1327,13 @@ describe('DiffsStoreActions', () => {
await waitForPromises();
expect(dispatch).toHaveBeenCalledWith('fetchFileByFile');
- expect(dispatch).toHaveBeenCalledWith('scrollToFile', file);
- expect(dispatch).toHaveBeenCalledTimes(2);
+ expect(commonUtils.historyPushState).toHaveBeenCalledWith(new URL(`${TEST_HOST}/#test`), {
+ skipScrolling: true,
+ });
+ expect(commonUtils.scrollToElement).toHaveBeenCalledWith('.diff-files-holder', {
+ duration: 0,
+ });
+ expect(dispatch).toHaveBeenCalledTimes(1);
});
it('shows an alert when there was an error fetching the file', async () => {
@@ -2057,11 +2062,48 @@ describe('DiffsStoreActions', () => {
describe('toggleFileCommentForm', () => {
it('commits TOGGLE_FILE_COMMENT_FORM', () => {
+ const file = getDiffFileMock();
return testAction(
diffActions.toggleFileCommentForm,
- 'path',
- {},
- [{ type: types.TOGGLE_FILE_COMMENT_FORM, payload: 'path' }],
+ file.file_path,
+ {
+ diffFiles: [file],
+ },
+ [
+ { type: types.TOGGLE_FILE_COMMENT_FORM, payload: file.file_path },
+ {
+ type: types.SET_FILE_COLLAPSED,
+ payload: { filePath: file.file_path, collapsed: false },
+ },
+ ],
+ [],
+ );
+ });
+
+ it('always opens if file is collapsed', () => {
+ const file = {
+ ...getDiffFileMock(),
+ viewer: {
+ ...getDiffFileMock().viewer,
+ manuallyCollapsed: true,
+ },
+ };
+ return testAction(
+ diffActions.toggleFileCommentForm,
+ file.file_path,
+ {
+ diffFiles: [file],
+ },
+ [
+ {
+ type: types.SET_FILE_COMMENT_FORM,
+ payload: { filePath: file.file_path, expanded: true },
+ },
+ {
+ type: types.SET_FILE_COLLAPSED,
+ payload: { filePath: file.file_path, collapsed: false },
+ },
+ ],
[],
);
});
diff --git a/spec/frontend/diffs/store/mutations_spec.js b/spec/frontend/diffs/store/mutations_spec.js
index fdcf7c3eeab..a5be41aa69f 100644
--- a/spec/frontend/diffs/store/mutations_spec.js
+++ b/spec/frontend/diffs/store/mutations_spec.js
@@ -1045,6 +1045,17 @@ describe('DiffsStoreMutations', () => {
});
});
+ describe('SET_FILE_COMMENT_FORM', () => {
+ it('toggles diff files hasCommentForm', () => {
+ const state = { diffFiles: [{ file_path: 'path', hasCommentForm: false }] };
+ const expanded = true;
+
+ mutations[types.SET_FILE_COMMENT_FORM](state, { filePath: 'path', expanded });
+
+ expect(state.diffFiles[0].hasCommentForm).toEqual(expanded);
+ });
+ });
+
describe('ADD_DRAFT_TO_FILE', () => {
it('adds draft to diff file', () => {
const state = { diffFiles: [{ file_path: 'path', drafts: [] }] };
diff --git a/spec/frontend/dropzone_input_spec.js b/spec/frontend/dropzone_input_spec.js
index ba4d838e44b..bde84d3b603 100644
--- a/spec/frontend/dropzone_input_spec.js
+++ b/spec/frontend/dropzone_input_spec.js
@@ -21,6 +21,10 @@ const TEMPLATE = `<form class="gfm-form" data-uploads-path="${TEST_UPLOAD_PATH}"
</form>`;
describe('dropzone_input', () => {
+ afterEach(() => {
+ resetHTMLFixture();
+ });
+
it('returns null when failed to initialize', () => {
const dropzone = dropzoneInput($('<form class="gfm-form"></form>'));
@@ -58,8 +62,6 @@ describe('dropzone_input', () => {
afterEach(() => {
form = null;
-
- resetHTMLFixture();
});
it('pastes Markdown tables', () => {
@@ -154,8 +156,6 @@ describe('dropzone_input', () => {
mock.teardown();
});
- beforeEach(() => {});
-
it.each`
responseType | responseBody
${'application/json'} | ${JSON.stringify({ message: TEST_ERROR_MESSAGE })}
@@ -174,4 +174,36 @@ describe('dropzone_input', () => {
});
});
});
+
+ describe('clickable element', () => {
+ let form;
+
+ beforeEach(() => {
+ jest.spyOn($.fn, 'dropzone');
+ setHTMLFixture(TEMPLATE);
+ form = $('form');
+ });
+
+ describe('if attach file button exists', () => {
+ let attachFileButton;
+
+ beforeEach(() => {
+ attachFileButton = document.createElement('button');
+ attachFileButton.dataset.buttonType = 'attach-file';
+ document.body.querySelector('form').appendChild(attachFileButton);
+ });
+
+ it('passes attach file button as `clickable` to dropzone', () => {
+ dropzoneInput(form);
+ expect($.fn.dropzone.mock.calls[0][0]).toMatchObject({ clickable: attachFileButton });
+ });
+ });
+
+ describe('if attach file button does not exist', () => {
+ it('passes attach file button as `clickable`, if it exists', () => {
+ dropzoneInput(form);
+ expect($.fn.dropzone.mock.calls[0][0]).toMatchObject({ clickable: true });
+ });
+ });
+ });
});
diff --git a/spec/frontend/editor/schema/ci/ci_schema_spec.js b/spec/frontend/editor/schema/ci/ci_schema_spec.js
index 0f380f13679..7986509074e 100644
--- a/spec/frontend/editor/schema/ci/ci_schema_spec.js
+++ b/spec/frontend/editor/schema/ci/ci_schema_spec.js
@@ -23,6 +23,7 @@ import RetryUnknownWhenJson from './json_tests/negative_tests/retry_unknown_when
// YAML POSITIVE TEST
import ArtifactsYaml from './yaml_tests/positive_tests/artifacts.yml';
+import ImageYaml from './yaml_tests/positive_tests/image.yml';
import CacheYaml from './yaml_tests/positive_tests/cache.yml';
import FilterYaml from './yaml_tests/positive_tests/filter.yml';
import IncludeYaml from './yaml_tests/positive_tests/include.yml';
@@ -37,9 +38,12 @@ import SecretsYaml from './yaml_tests/positive_tests/secrets.yml';
import ServicesYaml from './yaml_tests/positive_tests/services.yml';
import NeedsParallelMatrixYaml from './yaml_tests/positive_tests/needs_parallel_matrix.yml';
import ScriptYaml from './yaml_tests/positive_tests/script.yml';
+import AutoCancelPipelineOnJobFailureAllYaml from './yaml_tests/positive_tests/auto_cancel_pipeline/on_job_failure/all.yml';
+import AutoCancelPipelineOnJobFailureNoneYaml from './yaml_tests/positive_tests/auto_cancel_pipeline/on_job_failure/none.yml';
// YAML NEGATIVE TEST
import ArtifactsNegativeYaml from './yaml_tests/negative_tests/artifacts.yml';
+import ImageNegativeYaml from './yaml_tests/negative_tests/image.yml';
import CacheKeyNeative from './yaml_tests/negative_tests/cache.yml';
import IncludeNegativeYaml from './yaml_tests/negative_tests/include.yml';
import JobWhenNegativeYaml from './yaml_tests/negative_tests/job_when.yml';
@@ -62,6 +66,7 @@ import NeedsParallelMatrixNumericYaml from './yaml_tests/negative_tests/needs/pa
import NeedsParallelMatrixWrongParallelValueYaml from './yaml_tests/negative_tests/needs/parallel_matrix/wrong_parallel_value.yml';
import NeedsParallelMatrixWrongMatrixValueYaml from './yaml_tests/negative_tests/needs/parallel_matrix/wrong_matrix_value.yml';
import ScriptNegativeYaml from './yaml_tests/negative_tests/script.yml';
+import AutoCancelPipelineNegativeYaml from './yaml_tests/negative_tests/auto_cancel_pipeline.yml';
const ajv = new Ajv({
strictTypes: false,
@@ -90,6 +95,7 @@ describe('positive tests', () => {
// YAML
ArtifactsYaml,
+ ImageYaml,
CacheYaml,
FilterYaml,
IncludeYaml,
@@ -104,6 +110,8 @@ describe('positive tests', () => {
SecretsYaml,
NeedsParallelMatrixYaml,
ScriptYaml,
+ AutoCancelPipelineOnJobFailureAllYaml,
+ AutoCancelPipelineOnJobFailureNoneYaml,
}),
)('schema validates %s', (_, input) => {
// We construct a new "JSON" from each main key that is inside a
@@ -126,6 +134,7 @@ describe('negative tests', () => {
// YAML
ArtifactsNegativeYaml,
+ ImageNegativeYaml,
CacheKeyNeative,
HooksNegative,
IdTokensNegativeYaml,
@@ -148,6 +157,7 @@ describe('negative tests', () => {
NeedsParallelMatrixWrongParallelValueYaml,
NeedsParallelMatrixWrongMatrixValueYaml,
ScriptNegativeYaml,
+ AutoCancelPipelineNegativeYaml,
}),
)('schema validates %s', (_, input) => {
// We construct a new "JSON" from each main key that is inside a
diff --git a/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/auto_cancel_pipeline.yml b/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/auto_cancel_pipeline.yml
new file mode 100644
index 00000000000..0ba3e5632e3
--- /dev/null
+++ b/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/auto_cancel_pipeline.yml
@@ -0,0 +1,4 @@
+# invalid workflow:auto-cancel:on-job-failure
+workflow:
+ auto_cancel:
+ on_job_failure: unexpected_value
diff --git a/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/image.yml b/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/image.yml
new file mode 100644
index 00000000000..ad37cd6c3ba
--- /dev/null
+++ b/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/image.yml
@@ -0,0 +1,38 @@
+empty_image:
+ image:
+
+multi_image_array:
+ image:
+ - alpine:latest
+ - ubuntu:latest
+
+image_without_name:
+ image:
+ entrypoint: ["/bin/sh", "-c"]
+
+image_with_invalid_entrypoint:
+ image:
+ name: my-postgres:11.7
+ entrypoint: "/usr/local/bin/db-postgres" # must be array
+
+image_with_empty_pull_policy:
+ image:
+ name: postgres:11.6
+ pull_policy: []
+
+invalid_image_platform:
+ image:
+ name: alpine:latest
+ docker:
+ platform: ["arm64"] # The expected value is a string, not an array
+
+invalid_image_executor_opts:
+ image:
+ name: alpine:latest
+ docker:
+ unknown_key: test
+
+image_with_empty_executor_opts:
+ image:
+ name: alpine:latest
+ docker:
diff --git a/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/services.yml b/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/services.yml
index 6761a603a0a..e14ac9ca86e 100644
--- a/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/services.yml
+++ b/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/services.yml
@@ -36,3 +36,17 @@ empty_pull_policy:
services:
- name: postgres:11.6
pull_policy: []
+
+invalid_service_executor_opts:
+ script: echo "Specifying platform."
+ services:
+ - name: mysql:5.7
+ docker:
+ unknown_key: test
+
+invalid_service_platform:
+ script: echo "Specifying platform."
+ services:
+ - name: mysql:5.7
+ docker:
+ platform: ["arm64"] # The expected value is a string, not an array
diff --git a/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/auto_cancel_pipeline/on_job_failure/all.yml b/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/auto_cancel_pipeline/on_job_failure/all.yml
new file mode 100644
index 00000000000..bf84ff16f42
--- /dev/null
+++ b/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/auto_cancel_pipeline/on_job_failure/all.yml
@@ -0,0 +1,4 @@
+# valid workflow:auto-cancel:on-job-failure
+workflow:
+ auto_cancel:
+ on_job_failure: all
diff --git a/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/auto_cancel_pipeline/on_job_failure/none.yml b/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/auto_cancel_pipeline/on_job_failure/none.yml
new file mode 100644
index 00000000000..b99eb50e962
--- /dev/null
+++ b/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/auto_cancel_pipeline/on_job_failure/none.yml
@@ -0,0 +1,4 @@
+# valid workflow:auto-cancel:on-job-failure
+workflow:
+ auto_cancel:
+ on_job_failure: none
diff --git a/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/image.yml b/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/image.yml
new file mode 100644
index 00000000000..4c2559d0800
--- /dev/null
+++ b/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/image.yml
@@ -0,0 +1,41 @@
+valid_image:
+ image: alpine:latest
+
+valid_image_basic:
+ image:
+ name: alpine:latest
+
+valid_image_with_entrypoint:
+ image:
+ name: alpine:latest
+ entrypoint:
+ - /bin/sh
+ - -c
+
+valid_image_with_pull_policy:
+ image:
+ name: alpine:latest
+ pull_policy: always
+
+valid_image_with_pull_policies:
+ image:
+ name: alpine:latest
+ pull_policy:
+ - always
+ - if-not-present
+
+valid_image_with_docker:
+ image:
+ name: alpine:latest
+ docker:
+ platform: linux/amd64
+
+valid_image_full:
+ image:
+ name: alpine:latest
+ entrypoint:
+ - /bin/sh
+ - -c
+ docker:
+ platform: linux/amd64
+ pull_policy: if-not-present
diff --git a/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/services.yml b/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/services.yml
index 8a0f59d1dfd..1d19ee52cc3 100644
--- a/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/services.yml
+++ b/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/services.yml
@@ -29,3 +29,10 @@ pull_policy_array:
services:
- name: postgres:11.6
pull_policy: [always, if-not-present]
+
+services_platform_string:
+ script: echo "Specifying platform."
+ services:
+ - name: mysql:5.7
+ docker:
+ platform: arm64
diff --git a/spec/frontend/editor/source_editor_security_policy_schema_ext_spec.js b/spec/frontend/editor/source_editor_security_policy_schema_ext_spec.js
new file mode 100644
index 00000000000..96c876b27c9
--- /dev/null
+++ b/spec/frontend/editor/source_editor_security_policy_schema_ext_spec.js
@@ -0,0 +1,181 @@
+import MockAdapter from 'axios-mock-adapter';
+import { registerSchema } from '~/ide/utils';
+import axios from '~/lib/utils/axios_utils';
+import { HTTP_STATUS_OK } from '~/lib/utils/http_status';
+import { TEST_HOST } from 'helpers/test_constants';
+import { setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
+import {
+ getSecurityPolicyListUrl,
+ getSecurityPolicySchemaUrl,
+ getSinglePolicySchema,
+ SecurityPolicySchemaExtension,
+} from '~/editor/extensions/source_editor_security_policy_schema_ext';
+import SourceEditor from '~/editor/source_editor';
+
+jest.mock('~/ide/utils');
+
+const mockNamespacePath = 'mock-namespace';
+
+const mockSchema = {
+ $id: 1,
+ title: 'mockSchema',
+ description: 'mockDescriptions',
+ type: 'Object',
+ properties: {
+ scan_execution_policy: { items: { properties: { foo: 'bar' } } },
+ scan_result_policy: { items: { properties: { fizz: 'buzz' } } },
+ },
+};
+
+const createMockOutput = (policyType) => ({
+ $id: mockSchema.$id,
+ title: mockSchema.title,
+ description: mockSchema.description,
+ type: mockSchema.type,
+ properties: {
+ type: {
+ type: 'string',
+ description: 'Specifies the type of policy to be enforced.',
+ enum: policyType,
+ },
+ ...mockSchema.properties[policyType].items.properties,
+ },
+});
+
+describe('getSecurityPolicyListUrl', () => {
+ it.each`
+ input | output
+ ${{ namespacePath: '' }} | ${`${TEST_HOST}/groups/-/security/policies`}
+ ${{ namespacePath: 'test', namespaceType: 'group' }} | ${`${TEST_HOST}/groups/test/-/security/policies`}
+ ${{ namespacePath: '', namespaceType: 'project' }} | ${`${TEST_HOST}/-/security/policies`}
+ ${{ namespacePath: 'test', namespaceType: 'project' }} | ${`${TEST_HOST}/test/-/security/policies`}
+ ${{ namespacePath: undefined, namespaceType: 'project' }} | ${`${TEST_HOST}/-/security/policies`}
+ ${{ namespacePath: undefined, namespaceType: 'group' }} | ${`${TEST_HOST}/groups/-/security/policies`}
+ ${{ namespacePath: null, namespaceType: 'project' }} | ${`${TEST_HOST}/-/security/policies`}
+ ${{ namespacePath: null, namespaceType: 'group' }} | ${`${TEST_HOST}/groups/-/security/policies`}
+ `('returns `$output` when passed `$input`', ({ input, output }) => {
+ expect(getSecurityPolicyListUrl(input)).toBe(output);
+ });
+});
+
+describe('getSecurityPolicySchemaUrl', () => {
+ it.each`
+ namespacePath | namespaceType | output
+ ${'test'} | ${'project'} | ${`${TEST_HOST}/test/-/security/policies/schema`}
+ ${'test'} | ${'group'} | ${`${TEST_HOST}/groups/test/-/security/policies/schema`}
+ `(
+ 'returns $output when passed $namespacePath and $namespaceType',
+ ({ namespacePath, namespaceType, output }) => {
+ expect(getSecurityPolicySchemaUrl({ namespacePath, namespaceType })).toBe(output);
+ },
+ );
+});
+
+describe('getSinglePolicySchema', () => {
+ let mock;
+
+ beforeEach(() => {
+ mock = new MockAdapter(axios);
+ });
+
+ afterEach(() => {
+ mock.restore();
+ });
+
+ it.each`
+ policyType
+ ${'scan_execution_policy'}
+ ${'scan_result_policy'}
+ `('returns the appropriate schema on request success for $policyType', async ({ policyType }) => {
+ mock.onGet().reply(HTTP_STATUS_OK, mockSchema);
+
+ await expect(
+ getSinglePolicySchema({
+ namespacePath: mockNamespacePath,
+ namespaceType: 'project',
+ policyType,
+ }),
+ ).resolves.toStrictEqual(createMockOutput(policyType));
+ });
+
+ it('returns an empty schema on request failure', async () => {
+ await expect(
+ getSinglePolicySchema({
+ namespacePath: mockNamespacePath,
+ namespaceType: 'project',
+ policyType: 'scan_execution_policy',
+ }),
+ ).resolves.toStrictEqual({});
+ });
+
+ it('returns an empty schema on non-existing policy type', async () => {
+ await expect(
+ getSinglePolicySchema({
+ namespacePath: mockNamespacePath,
+ namespaceType: 'project',
+ policyType: 'non_existent_policy',
+ }),
+ ).resolves.toStrictEqual({});
+ });
+});
+
+describe('SecurityPolicySchemaExtension', () => {
+ let mock;
+ let editor;
+ let instance;
+ let editorEl;
+
+ const createMockEditor = ({ blobPath = '.gitlab/security-policies/policy.yml' } = {}) => {
+ setHTMLFixture('<div id="editor"></div>');
+ editorEl = document.getElementById('editor');
+ editor = new SourceEditor();
+ instance = editor.createInstance({ el: editorEl, blobPath, blobContent: '' });
+ instance.use({ definition: SecurityPolicySchemaExtension });
+ };
+
+ beforeEach(() => {
+ createMockEditor();
+ mock = new MockAdapter(axios);
+ mock.onGet().reply(HTTP_STATUS_OK, mockSchema);
+ });
+
+ afterEach(() => {
+ instance.dispose();
+ editorEl.remove();
+ resetHTMLFixture();
+ mock.restore();
+ });
+
+ describe('registerSecurityPolicyEditorSchema', () => {
+ describe('register validations options with monaco for yaml language', () => {
+ it('registers the schema', async () => {
+ const policyType = 'scan_execution_policy';
+ await instance.registerSecurityPolicyEditorSchema({
+ namespacePath: mockNamespacePath,
+ namespaceType: 'project',
+ policyType,
+ });
+
+ expect(registerSchema).toHaveBeenCalledTimes(1);
+ expect(registerSchema).toHaveBeenCalledWith({
+ uri: `${TEST_HOST}/${mockNamespacePath}/-/security/policies/schema`,
+ schema: createMockOutput(policyType),
+ fileMatch: ['policy.yml'],
+ });
+ });
+ });
+ });
+
+ describe('registerSecurityPolicySchema', () => {
+ describe('register validations options with monaco for yaml language', () => {
+ it('registers the schema', async () => {
+ await instance.registerSecurityPolicySchema(mockNamespacePath);
+ expect(registerSchema).toHaveBeenCalledTimes(1);
+ expect(registerSchema).toHaveBeenCalledWith({
+ uri: `${TEST_HOST}/${mockNamespacePath}/-/security/policies/schema`,
+ fileMatch: ['policy.yml'],
+ });
+ });
+ });
+ });
+});
diff --git a/spec/frontend/emoji/components/emoji_group_spec.js b/spec/frontend/emoji/components/emoji_group_spec.js
index 75397ce25ff..a2a46bedd7b 100644
--- a/spec/frontend/emoji/components/emoji_group_spec.js
+++ b/spec/frontend/emoji/components/emoji_group_spec.js
@@ -1,5 +1,6 @@
import { shallowMount } from '@vue/test-utils';
import Vue from 'vue';
+import { GlButton } from '@gitlab/ui';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import EmojiGroup from '~/emoji/components/emoji_group.vue';
@@ -10,6 +11,9 @@ function factory(propsData = {}) {
wrapper = extendedWrapper(
shallowMount(EmojiGroup, {
propsData,
+ stubs: {
+ GlButton,
+ },
}),
);
}
@@ -19,7 +23,6 @@ describe('Emoji group component', () => {
factory({
emojis: [],
renderGroup: false,
- clickEmoji: jest.fn(),
});
expect(wrapper.findByTestId('emoji-button').exists()).toBe(false);
@@ -29,24 +32,20 @@ describe('Emoji group component', () => {
factory({
emojis: ['thumbsup', 'thumbsdown'],
renderGroup: true,
- clickEmoji: jest.fn(),
});
expect(wrapper.findAllByTestId('emoji-button').exists()).toBe(true);
expect(wrapper.findAllByTestId('emoji-button').length).toBe(2);
});
- it('calls clickEmoji', () => {
- const clickEmoji = jest.fn();
-
+ it('emits emoji-click', () => {
factory({
emojis: ['thumbsup', 'thumbsdown'],
renderGroup: true,
- clickEmoji,
});
- wrapper.findByTestId('emoji-button').trigger('click');
+ wrapper.findComponent(GlButton).vm.$emit('click');
- expect(clickEmoji).toHaveBeenCalledWith('thumbsup');
+ expect(wrapper.emitted('emoji-click')).toStrictEqual([['thumbsup']]);
});
});
diff --git a/spec/frontend/emoji/index_spec.js b/spec/frontend/emoji/index_spec.js
index 7d6a45fbf30..577b7bc726e 100644
--- a/spec/frontend/emoji/index_spec.js
+++ b/spec/frontend/emoji/index_spec.js
@@ -925,7 +925,7 @@ describe('emoji', () => {
window.gon = {};
});
- it('returns empty object', async () => {
+ it('returns empty emoji data', async () => {
const result = await loadCustomEmojiWithNames();
expect(result).toEqual({ emojis: {}, names: [] });
@@ -937,7 +937,28 @@ describe('emoji', () => {
delete document.body.dataset.groupFullPath;
});
- it('returns empty object', async () => {
+ it('returns empty emoji data', async () => {
+ const result = await loadCustomEmojiWithNames();
+
+ expect(result).toEqual({ emojis: {}, names: [] });
+ });
+ });
+
+ describe('when GraphQL request returns null data', () => {
+ beforeEach(() => {
+ mockClient = createMockClient([
+ [
+ customEmojiQuery,
+ jest.fn().mockResolvedValue({
+ data: {
+ group: null,
+ },
+ }),
+ ],
+ ]);
+ });
+
+ it('returns empty emoji data', async () => {
const result = await loadCustomEmojiWithNames();
expect(result).toEqual({ emojis: {}, names: [] });
@@ -945,7 +966,7 @@ describe('emoji', () => {
});
describe('when in a group with flag enabled', () => {
- it('returns empty object', async () => {
+ it('returns emoji data', async () => {
const result = await loadCustomEmojiWithNames();
expect(result).toEqual({
diff --git a/spec/frontend/environments/deploy_board_wrapper_spec.js b/spec/frontend/environments/deploy_board_wrapper_spec.js
index 49eed68fa11..fec5032e31b 100644
--- a/spec/frontend/environments/deploy_board_wrapper_spec.js
+++ b/spec/frontend/environments/deploy_board_wrapper_spec.js
@@ -56,7 +56,7 @@ describe('~/environments/components/deploy_board_wrapper.vue', () => {
});
it('is collapsed by default', () => {
- expect(collapse.attributes('visible')).toBeUndefined();
+ expect(collapse.props('visible')).toBe(false);
expect(icon.props('name')).toBe('chevron-lg-right');
});
@@ -64,7 +64,7 @@ describe('~/environments/components/deploy_board_wrapper.vue', () => {
const button = await expandCollapsedSection();
expect(button.attributes('aria-label')).toBe(__('Collapse'));
- expect(collapse.attributes('visible')).toBe('visible');
+ expect(collapse.props('visible')).toBe(true);
expect(icon.props('name')).toBe('chevron-lg-down');
const deployBoard = findDeployBoard();
diff --git a/spec/frontend/environments/deployment_spec.js b/spec/frontend/environments/deployment_spec.js
index 4cbbb60b74c..bc0f1c58e7d 100644
--- a/spec/frontend/environments/deployment_spec.js
+++ b/spec/frontend/environments/deployment_spec.js
@@ -4,7 +4,7 @@ import { GlLoadingIcon } from '@gitlab/ui';
import { mountExtended } from 'helpers/vue_test_utils_helper';
import { useFakeDate } from 'helpers/fake_date';
import { stubTransition } from 'helpers/stub_transition';
-import { formatDate } from '~/lib/utils/datetime_utility';
+import { localeDateFormat } from '~/lib/utils/datetime_utility';
import { __, s__ } from '~/locale';
import ClipboardButton from '~/vue_shared/components/clipboard_button.vue';
import Deployment from '~/environments/components/deployment.vue';
@@ -158,7 +158,9 @@ describe('~/environments/components/deployment.vue', () => {
describe('is present', () => {
it('shows the timestamp the deployment was deployed at', () => {
wrapper = createWrapper();
- const date = wrapper.findByTitle(formatDate(deployment.createdAt));
+ const date = wrapper.findByTitle(
+ localeDateFormat.asDateTimeFull.format(deployment.createdAt),
+ );
expect(date.text()).toBe('1 day ago');
});
@@ -166,7 +168,9 @@ describe('~/environments/components/deployment.vue', () => {
describe('is not present', () => {
it('does not show the timestamp', () => {
wrapper = createWrapper({ propsData: { deployment: { ...deployment, createdAt: null } } });
- const date = wrapper.findByTitle(formatDate(deployment.createdAt));
+ const date = wrapper.findByTitle(
+ localeDateFormat.asDateTimeFull.format(deployment.createdAt),
+ );
expect(date.exists()).toBe(false);
});
diff --git a/spec/frontend/environments/environment_flux_resource_selector_spec.js b/spec/frontend/environments/environment_flux_resource_selector_spec.js
index ba3375c731f..8dab8fdd96a 100644
--- a/spec/frontend/environments/environment_flux_resource_selector_spec.js
+++ b/spec/frontend/environments/environment_flux_resource_selector_spec.js
@@ -25,7 +25,7 @@ const DEFAULT_PROPS = {
fluxResourcePath: '',
};
-describe('~/environments/components/form.vue', () => {
+describe('~/environments/components/flux_resource_selector.vue', () => {
let wrapper;
const kustomizationItem = {
diff --git a/spec/frontend/environments/environment_folder_spec.js b/spec/frontend/environments/environment_folder_spec.js
index 1973613897d..e21e0f280ec 100644
--- a/spec/frontend/environments/environment_folder_spec.js
+++ b/spec/frontend/environments/environment_folder_spec.js
@@ -79,7 +79,7 @@ describe('~/environments/components/environments_folder.vue', () => {
it('is collapsed by default', () => {
const link = findLink();
- expect(collapse.attributes('visible')).toBeUndefined();
+ expect(collapse.props('visible')).toBe(false);
const iconNames = icons.wrappers.map((i) => i.props('name')).slice(0, 2);
expect(iconNames).toEqual(['chevron-lg-right', 'folder-o']);
expect(folderName.classes('gl-font-weight-bold')).toBe(false);
@@ -96,7 +96,7 @@ describe('~/environments/components/environments_folder.vue', () => {
const link = findLink();
expect(button.attributes('aria-label')).toBe(__('Collapse'));
- expect(collapse.attributes('visible')).toBe('visible');
+ expect(collapse.props('visible')).toBe(true);
const iconNames = icons.wrappers.map((i) => i.props('name')).slice(0, 2);
expect(iconNames).toEqual(['chevron-lg-down', 'folder-open']);
expect(folderName.classes('gl-font-weight-bold')).toBe(true);
diff --git a/spec/frontend/environments/environment_form_spec.js b/spec/frontend/environments/environment_form_spec.js
index 478ac8d6e0e..f3dfc7a72f2 100644
--- a/spec/frontend/environments/environment_form_spec.js
+++ b/spec/frontend/environments/environment_form_spec.js
@@ -1,11 +1,12 @@
-import { GlLoadingIcon, GlAlert } from '@gitlab/ui';
-import Vue from 'vue';
+import { GlLoadingIcon } from '@gitlab/ui';
+import Vue, { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
import waitForPromises from 'helpers/wait_for_promises';
import { mountExtended } from 'helpers/vue_test_utils_helper';
import EnvironmentForm from '~/environments/components/environment_form.vue';
import getUserAuthorizedAgents from '~/environments/graphql/queries/user_authorized_agents.query.graphql';
import EnvironmentFluxResourceSelector from '~/environments/components/environment_flux_resource_selector.vue';
+import EnvironmentNamespaceSelector from '~/environments/components/environment_namespace_selector.vue';
import createMockApollo from '../__helpers__/mock_apollo_helper';
import { mockKasTunnelUrl } from './mock_data';
@@ -36,13 +37,16 @@ const configuration = {
credentials: 'include',
};
+const environmentWithAgentAndNamespace = {
+ ...DEFAULT_PROPS.environment,
+ clusterAgent: { id: '12', name: 'agent-2' },
+ clusterAgentId: '2',
+ kubernetesNamespace: 'agent',
+};
+
describe('~/environments/components/form.vue', () => {
let wrapper;
- const getNamespacesQueryResult = jest
- .fn()
- .mockReturnValue([{ metadata: { name: 'default' } }, { metadata: { name: 'agent' } }]);
-
const createWrapper = (propsData = {}, options = {}) =>
mountExtended(EnvironmentForm, {
provide: PROVIDE,
@@ -53,7 +57,7 @@ describe('~/environments/components/form.vue', () => {
},
});
- const createWrapperWithApollo = ({ propsData = {}, queryResult = null } = {}) => {
+ const createWrapperWithApollo = (propsData = {}) => {
Vue.use(VueApollo);
const requestHandlers = [
@@ -70,12 +74,6 @@ describe('~/environments/components/form.vue', () => {
],
];
- const mockResolvers = {
- Query: {
- k8sNamespaces: queryResult || getNamespacesQueryResult,
- },
- };
-
return mountExtended(EnvironmentForm, {
provide: {
...PROVIDE,
@@ -84,13 +82,12 @@ describe('~/environments/components/form.vue', () => {
...DEFAULT_PROPS,
...propsData,
},
- apolloProvider: createMockApollo(requestHandlers, mockResolvers),
+ apolloProvider: createMockApollo(requestHandlers, []),
});
};
const findAgentSelector = () => wrapper.findByTestId('agent-selector');
- const findNamespaceSelector = () => wrapper.findByTestId('namespace-selector');
- const findAlert = () => wrapper.findComponent(GlAlert);
+ const findNamespaceSelector = () => wrapper.findComponent(EnvironmentNamespaceSelector);
const findFluxResourceSelector = () => wrapper.findComponent(EnvironmentFluxResourceSelector);
const selectAgent = async () => {
@@ -326,91 +323,15 @@ describe('~/environments/components/form.vue', () => {
expect(findNamespaceSelector().exists()).toBe(true);
});
- it('requests the kubernetes namespaces with the correct configuration', async () => {
- await waitForPromises();
-
- expect(getNamespacesQueryResult).toHaveBeenCalledWith(
- {},
- { configuration },
- expect.anything(),
- expect.anything(),
- );
- });
-
- it('sets the loading prop while fetching the list', async () => {
- expect(findNamespaceSelector().props('loading')).toBe(true);
-
- await waitForPromises();
-
- expect(findNamespaceSelector().props('loading')).toBe(false);
- });
-
- it('renders a list of available namespaces', async () => {
- await waitForPromises();
-
- expect(findNamespaceSelector().props('items')).toEqual([
- { text: 'default', value: 'default' },
- { text: 'agent', value: 'agent' },
- ]);
- });
-
- it('filters the namespaces list on user search', async () => {
- await waitForPromises();
- await findNamespaceSelector().vm.$emit('search', 'default');
-
- expect(findNamespaceSelector().props('items')).toEqual([
- { value: 'default', text: 'default' },
- ]);
- });
-
- it('updates namespace selector field with the name of selected namespace', async () => {
- await waitForPromises();
- await findNamespaceSelector().vm.$emit('select', 'agent');
-
- expect(findNamespaceSelector().props('toggleText')).toBe('agent');
- });
-
it('emits changes to the kubernetesNamespace', async () => {
await waitForPromises();
- await findNamespaceSelector().vm.$emit('select', 'agent');
+ findNamespaceSelector().vm.$emit('change', 'agent');
+ await nextTick();
expect(wrapper.emitted('change')[1]).toEqual([
{ name: '', externalUrl: '', kubernetesNamespace: 'agent', fluxResourcePath: null },
]);
});
-
- it('clears namespace selector when another agent was selected', async () => {
- await waitForPromises();
- await findNamespaceSelector().vm.$emit('select', 'agent');
-
- expect(findNamespaceSelector().props('toggleText')).toBe('agent');
-
- await findAgentSelector().vm.$emit('select', '1');
- expect(findNamespaceSelector().props('toggleText')).toBe(
- EnvironmentForm.i18n.namespaceHelpText,
- );
- });
- });
-
- describe('when cannot connect to the cluster', () => {
- const error = new Error('Error from the cluster_client API');
-
- beforeEach(async () => {
- wrapper = createWrapperWithApollo({
- queryResult: jest.fn().mockRejectedValueOnce(error),
- });
-
- await selectAgent();
- await waitForPromises();
- });
-
- it("doesn't render the namespace selector", () => {
- expect(findNamespaceSelector().exists()).toBe(false);
- });
-
- it('renders an alert', () => {
- expect(findAlert().text()).toBe('Error from the cluster_client API');
- });
});
});
@@ -431,16 +352,6 @@ describe('~/environments/components/form.vue', () => {
it("doesn't render flux resource selector", () => {
expect(findFluxResourceSelector().exists()).toBe(false);
});
-
- it('renders the flux resource selector when the namespace is selected', async () => {
- await findNamespaceSelector().vm.$emit('select', 'agent');
-
- expect(findFluxResourceSelector().props()).toEqual({
- namespace: 'agent',
- fluxResourcePath: '',
- configuration,
- });
- });
});
});
@@ -451,9 +362,7 @@ describe('~/environments/components/form.vue', () => {
clusterAgentId: '1',
};
beforeEach(() => {
- wrapper = createWrapperWithApollo({
- propsData: { environment: environmentWithAgent },
- });
+ wrapper = createWrapperWithApollo({ environment: environmentWithAgent });
});
it('updates agent selector field with the name of the associated agent', () => {
@@ -468,45 +377,46 @@ describe('~/environments/components/form.vue', () => {
it('renders a list of available namespaces', async () => {
await waitForPromises();
- expect(findNamespaceSelector().props('items')).toEqual([
- { text: 'default', value: 'default' },
- { text: 'agent', value: 'agent' },
- ]);
+ expect(findNamespaceSelector().exists()).toBe(true);
});
});
describe('when environment has an associated kubernetes namespace', () => {
- const environmentWithAgentAndNamespace = {
- ...DEFAULT_PROPS.environment,
- clusterAgent: { id: '1', name: 'agent-1' },
- clusterAgentId: '1',
- kubernetesNamespace: 'default',
- };
beforeEach(() => {
- wrapper = createWrapperWithApollo({
- propsData: { environment: environmentWithAgentAndNamespace },
- });
+ wrapper = createWrapperWithApollo({ environment: environmentWithAgentAndNamespace });
});
it('updates namespace selector with the name of the associated namespace', async () => {
await waitForPromises();
- expect(findNamespaceSelector().props('toggleText')).toBe('default');
+ expect(findNamespaceSelector().props('namespace')).toBe('agent');
+ });
+
+ it('clears namespace selector when another agent was selected', async () => {
+ expect(findNamespaceSelector().props('namespace')).toBe('agent');
+
+ findAgentSelector().vm.$emit('select', '1');
+ await nextTick();
+
+ expect(findNamespaceSelector().props('namespace')).toBe(null);
+ });
+
+ it('renders the flux resource selector when the namespace is selected', () => {
+ expect(findFluxResourceSelector().props()).toEqual({
+ namespace: 'agent',
+ fluxResourcePath: '',
+ configuration,
+ });
});
});
describe('when environment has an associated flux resource', () => {
const fluxResourcePath = 'path/to/flux/resource';
- const environmentWithAgentAndNamespace = {
- ...DEFAULT_PROPS.environment,
- clusterAgent: { id: '1', name: 'agent-1' },
- clusterAgentId: '1',
- kubernetesNamespace: 'default',
+ const environmentWithFluxResource = {
+ ...environmentWithAgentAndNamespace,
fluxResourcePath,
};
beforeEach(() => {
- wrapper = createWrapperWithApollo({
- propsData: { environment: environmentWithAgentAndNamespace },
- });
+ wrapper = createWrapperWithApollo({ environment: environmentWithFluxResource });
});
it('provides flux resource path to the flux resource selector component', () => {
diff --git a/spec/frontend/environments/environment_namespace_selector_spec.js b/spec/frontend/environments/environment_namespace_selector_spec.js
new file mode 100644
index 00000000000..53e4f807751
--- /dev/null
+++ b/spec/frontend/environments/environment_namespace_selector_spec.js
@@ -0,0 +1,217 @@
+import { GlAlert, GlCollapsibleListbox, GlButton } from '@gitlab/ui';
+import Vue, { nextTick } from 'vue';
+import VueApollo from 'vue-apollo';
+import { shallowMount } from '@vue/test-utils';
+import waitForPromises from 'helpers/wait_for_promises';
+import EnvironmentNamespaceSelector from '~/environments/components/environment_namespace_selector.vue';
+import { stubComponent } from 'helpers/stub_component';
+import createMockApollo from '../__helpers__/mock_apollo_helper';
+import { mockKasTunnelUrl } from './mock_data';
+
+const configuration = {
+ basePath: mockKasTunnelUrl.replace(/\/$/, ''),
+ headers: {
+ 'GitLab-Agent-Id': 2,
+ 'Content-Type': 'application/json',
+ Accept: 'application/json',
+ },
+ credentials: 'include',
+};
+
+const DEFAULT_PROPS = {
+ namespace: '',
+ configuration,
+};
+
+describe('~/environments/components/namespace_selector.vue', () => {
+ let wrapper;
+
+ const getNamespacesQueryResult = jest
+ .fn()
+ .mockReturnValue([
+ { metadata: { name: 'default' } },
+ { metadata: { name: 'agent' } },
+ { metadata: { name: 'test-agent' } },
+ ]);
+
+ const closeMock = jest.fn();
+
+ const createWrapper = ({ propsData = {}, queryResult = null } = {}) => {
+ Vue.use(VueApollo);
+
+ const mockResolvers = {
+ Query: {
+ k8sNamespaces: queryResult || getNamespacesQueryResult,
+ },
+ };
+
+ return shallowMount(EnvironmentNamespaceSelector, {
+ propsData: {
+ ...DEFAULT_PROPS,
+ ...propsData,
+ },
+ stubs: {
+ GlCollapsibleListbox: stubComponent(GlCollapsibleListbox, {
+ template: `<div><slot name="footer"></slot></div>`,
+ methods: {
+ close: closeMock,
+ },
+ }),
+ },
+ apolloProvider: createMockApollo([], mockResolvers),
+ });
+ };
+
+ const findNamespaceSelector = () => wrapper.findComponent(GlCollapsibleListbox);
+ const findAlert = () => wrapper.findComponent(GlAlert);
+ const findSelectButton = () => wrapper.findComponent(GlButton);
+
+ const searchNamespace = async (searchTerm = 'test') => {
+ findNamespaceSelector().vm.$emit('search', searchTerm);
+ await nextTick();
+ };
+
+ describe('default', () => {
+ beforeEach(() => {
+ wrapper = createWrapper();
+ });
+
+ it('renders namespace selector', () => {
+ expect(findNamespaceSelector().exists()).toBe(true);
+ });
+
+ it('requests the namespaces', async () => {
+ await waitForPromises();
+
+ expect(getNamespacesQueryResult).toHaveBeenCalled();
+ });
+
+ it('sets the loading prop while fetching the list', async () => {
+ expect(findNamespaceSelector().props('loading')).toBe(true);
+
+ await waitForPromises();
+
+ expect(findNamespaceSelector().props('loading')).toBe(false);
+ });
+
+ it('renders a list of available namespaces', async () => {
+ await waitForPromises();
+
+ expect(findNamespaceSelector().props('items')).toMatchObject([
+ {
+ text: 'default',
+ value: 'default',
+ },
+ {
+ text: 'agent',
+ value: 'agent',
+ },
+ {
+ text: 'test-agent',
+ value: 'test-agent',
+ },
+ ]);
+ });
+
+ it('filters the namespaces list on user search', async () => {
+ await waitForPromises();
+ await searchNamespace('agent');
+
+ expect(findNamespaceSelector().props('items')).toMatchObject([
+ {
+ text: 'agent',
+ value: 'agent',
+ },
+ {
+ text: 'test-agent',
+ value: 'test-agent',
+ },
+ ]);
+ });
+
+ it('emits changes to the namespace', () => {
+ findNamespaceSelector().vm.$emit('select', 'agent');
+
+ expect(wrapper.emitted('change')).toEqual([['agent']]);
+ });
+ });
+
+ describe('custom select button', () => {
+ beforeEach(async () => {
+ wrapper = createWrapper();
+ await waitForPromises();
+ });
+
+ it("doesn't render custom select button before searching", () => {
+ expect(findSelectButton().exists()).toBe(false);
+ });
+
+ it("doesn't render custom select button when the search is found in the namespaces list", async () => {
+ await searchNamespace('test-agent');
+ expect(findSelectButton().exists()).toBe(false);
+ });
+
+ it('renders custom select button when the namespace searched for is not found in the namespaces list', async () => {
+ await searchNamespace();
+ expect(findSelectButton().exists()).toBe(true);
+ });
+
+ it('emits custom filled namespace name to the `change` event', async () => {
+ await searchNamespace();
+ findSelectButton().vm.$emit('click');
+
+ expect(wrapper.emitted('change')).toEqual([['test']]);
+ });
+
+ it('closes the listbox after the custom value for the namespace was selected', async () => {
+ await searchNamespace();
+ findSelectButton().vm.$emit('click');
+
+ expect(closeMock).toHaveBeenCalled();
+ });
+ });
+
+ describe('when environment has an associated namespace', () => {
+ beforeEach(() => {
+ wrapper = createWrapper({
+ propsData: { namespace: 'existing-namespace' },
+ });
+ });
+
+ it('updates namespace selector with the name of the associated namespace', () => {
+ expect(findNamespaceSelector().props('toggleText')).toBe('existing-namespace');
+ });
+ });
+
+ describe('on error', () => {
+ const error = new Error('Error from the cluster_client API');
+
+ beforeEach(async () => {
+ wrapper = createWrapper({
+ queryResult: jest.fn().mockRejectedValueOnce(error),
+ });
+ await waitForPromises();
+ });
+
+ it('renders an alert with the error text', () => {
+ expect(findAlert().text()).toContain(error.message);
+ });
+
+ it('renders an empty namespace selector', () => {
+ expect(findNamespaceSelector().props('items')).toMatchObject([]);
+ });
+
+ it('renders custom select button when the user performs search', async () => {
+ await searchNamespace();
+
+ expect(findSelectButton().exists()).toBe(true);
+ });
+
+ it('emits custom filled namespace name to the `change` event', async () => {
+ await searchNamespace();
+ findSelectButton().vm.$emit('click');
+
+ expect(wrapper.emitted('change')).toEqual([['test']]);
+ });
+ });
+});
diff --git a/spec/frontend/environments/folder/environments_folder_app_spec.js b/spec/frontend/environments/folder/environments_folder_app_spec.js
new file mode 100644
index 00000000000..0b76a74e3a0
--- /dev/null
+++ b/spec/frontend/environments/folder/environments_folder_app_spec.js
@@ -0,0 +1,131 @@
+import Vue from 'vue';
+import VueApollo from 'vue-apollo';
+import { GlSkeletonLoader, GlTab, GlPagination } from '@gitlab/ui';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import EnvironmentsFolderAppComponent from '~/environments/folder/environments_folder_app.vue';
+import EnvironmentItem from '~/environments/components/new_environment_item.vue';
+import StopEnvironmentModal from '~/environments/components/stop_environment_modal.vue';
+import ConfirmRollbackModal from '~/environments/components/confirm_rollback_modal.vue';
+import DeleteEnvironmentModal from '~/environments/components/delete_environment_modal.vue';
+import CanaryUpdateModal from '~/environments/components/canary_update_modal.vue';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import {
+ resolvedFolder,
+ resolvedEnvironment,
+ resolvedEnvironmentToDelete,
+ resolvedEnvironmentToRollback,
+} from '../graphql/mock_data';
+
+Vue.use(VueApollo);
+
+describe('EnvironmentsFolderAppComponent', () => {
+ let wrapper;
+ const mockFolderName = 'folders';
+
+ let environmentFolderMock;
+
+ const createApolloProvider = () => {
+ const mockResolvers = {
+ Query: {
+ folder: environmentFolderMock,
+ environmentToDelete: jest.fn().mockReturnValue(resolvedEnvironmentToDelete),
+ environmentToRollback: jest.fn().mockReturnValue(resolvedEnvironment),
+ environmentToChangeCanary: jest.fn().mockReturnValue(resolvedEnvironment),
+ environmentToStop: jest.fn().mockReturnValue(resolvedEnvironment),
+ weight: jest.fn().mockReturnValue(1),
+ },
+ };
+
+ return createMockApollo([], mockResolvers);
+ };
+
+ beforeEach(() => {
+ environmentFolderMock = jest.fn();
+ });
+
+ const emptyFolderData = {
+ environments: [],
+ activeCount: 0,
+ stoppedCount: 0,
+ __typename: 'LocalEnvironmentFolder',
+ };
+
+ const createWrapper = ({ folderData } = {}) => {
+ environmentFolderMock.mockReturnValue(folderData || emptyFolderData);
+
+ const apolloProvider = createApolloProvider();
+
+ wrapper = shallowMountExtended(EnvironmentsFolderAppComponent, {
+ apolloProvider,
+ propsData: {
+ folderName: mockFolderName,
+ folderPath: '/gitlab-org/test-project/-/environments/folder/dev',
+ scope: 'active',
+ page: 1,
+ },
+ });
+ };
+
+ const findHeader = () => wrapper.findByTestId('folder-name');
+ const findEnvironmentItems = () => wrapper.findAllComponents(EnvironmentItem);
+ const findSkeletonLoaders = () => wrapper.findAllComponents(GlSkeletonLoader);
+ const findTabs = () => wrapper.findAllComponents(GlTab);
+
+ it('should render a header with the folder name', () => {
+ createWrapper();
+
+ expect(findHeader().text()).toMatchInterpolatedText(`Environments / ${mockFolderName}`);
+ });
+
+ it('should show skeletons while loading', () => {
+ createWrapper();
+ expect(findSkeletonLoaders().length).toBe(3);
+ });
+
+ describe('when environments are loaded', () => {
+ beforeEach(async () => {
+ createWrapper({ folderData: resolvedFolder });
+ await waitForPromises();
+ });
+
+ it('should list environmnets in folder', () => {
+ const items = findEnvironmentItems();
+ expect(items.length).toBe(resolvedFolder.environments.length);
+ });
+
+ it('should render active and stopped tabs', () => {
+ const tabs = findTabs();
+ expect(tabs.length).toBe(2);
+ });
+
+ [
+ [StopEnvironmentModal, resolvedEnvironment],
+ [DeleteEnvironmentModal, resolvedEnvironmentToDelete],
+ [ConfirmRollbackModal, resolvedEnvironmentToRollback],
+ ].forEach(([Component, expectedEnvironment]) =>
+ it(`should render ${Component.name} component`, () => {
+ const modal = wrapper.findComponent(Component);
+
+ expect(modal.exists()).toBe(true);
+ expect(modal.props().environment).toEqual(expectedEnvironment);
+ expect(modal.props().graphql).toBe(true);
+ }),
+ );
+
+ it(`should render CanaryUpdateModal component`, () => {
+ const modal = wrapper.findComponent(CanaryUpdateModal);
+
+ expect(modal.exists()).toBe(true);
+ expect(modal.props().environment).toEqual(resolvedEnvironment);
+ expect(modal.props().weight).toBe(1);
+ });
+
+ it('should render pagination component', () => {
+ const pagination = wrapper.findComponent(GlPagination);
+
+ expect(pagination.props().perPage).toBe(20);
+ expect(pagination.props().totalItems).toBe(2);
+ });
+ });
+});
diff --git a/spec/frontend/environments/folder/environments_folder_view_spec.js b/spec/frontend/environments/folder/environments_folder_view_spec.js
index 6a40c68397b..34eef1e89ab 100644
--- a/spec/frontend/environments/folder/environments_folder_view_spec.js
+++ b/spec/frontend/environments/folder/environments_folder_view_spec.js
@@ -3,6 +3,7 @@ import { mount } from '@vue/test-utils';
import MockAdapter from 'axios-mock-adapter';
import { removeBreakLine, removeWhitespace } from 'helpers/text_helper';
import EnvironmentTable from '~/environments/components/environments_table.vue';
+import ConfirmRollbackModal from '~/environments/components/confirm_rollback_modal.vue';
import EnvironmentsFolderViewComponent from '~/environments/folder/environments_folder_view.vue';
import axios from '~/lib/utils/axios_utils';
import { HTTP_STATUS_INTERNAL_SERVER_ERROR, HTTP_STATUS_OK } from '~/lib/utils/http_status';
@@ -91,6 +92,10 @@ describe('Environments Folder View', () => {
).toContain('Environments / review');
});
+ it('should render the confirm rollback modal', () => {
+ expect(wrapper.findComponent(ConfirmRollbackModal).exists()).toBe(true);
+ });
+
describe('pagination', () => {
it('should render pagination', () => {
expect(wrapper.findComponent(GlPagination).exists()).toBe(true);
diff --git a/spec/frontend/environments/graphql/mock_data.js b/spec/frontend/environments/graphql/mock_data.js
index 7d354566761..efc63a80e89 100644
--- a/spec/frontend/environments/graphql/mock_data.js
+++ b/spec/frontend/environments/graphql/mock_data.js
@@ -886,6 +886,11 @@ const failedDeployment = {
],
},
};
+const pendingDeployment = {
+ status: {
+ conditions: [],
+ },
+};
const readyDaemonSet = {
status: { numberReady: 1, desiredNumberScheduled: 1, numberMisscheduled: 0 },
};
@@ -904,7 +909,7 @@ const suspendedCronJob = { spec: { suspend: 1 }, status: { active: 0, lastSchedu
const failedCronJob = { spec: { suspend: 0 }, status: { active: 2, lastScheduleTime: '' } };
export const k8sWorkloadsMock = {
- DeploymentList: [readyDeployment, failedDeployment],
+ DeploymentList: [readyDeployment, failedDeployment, pendingDeployment],
DaemonSetList: [readyDaemonSet, failedDaemonSet, failedDaemonSet],
StatefulSetList: [readySet, readySet, failedSet],
ReplicaSetList: [readySet, failedSet],
@@ -925,3 +930,153 @@ export const fluxKustomizationsMock = [
];
export const fluxResourcePathMock = 'path/to/flux/resource';
+
+export const resolvedEnvironmentToDelete = {
+ __typename: 'LocalEnvironment',
+ id: 41,
+ name: 'review/hello',
+ deletePath: '/api/v4/projects/8/environments/41',
+};
+
+export const resolvedEnvironmentToRollback = {
+ __typename: 'LocalEnvironment',
+ id: 41,
+ name: 'review/hello',
+ lastDeployment: {
+ id: 78,
+ iid: 24,
+ sha: 'f3ba6dd84f8f891373e9b869135622b954852db1',
+ ref: { name: 'main', refPath: '/h5bp/html5-boilerplate/-/tree/main' },
+ status: 'success',
+ createdAt: '2022-01-07T15:47:27.415Z',
+ deployedAt: '2022-01-07T15:47:32.450Z',
+ tierInYaml: 'staging',
+ tag: false,
+ isLast: true,
+ user: {
+ id: 1,
+ username: 'root',
+ name: 'Administrator',
+ state: 'active',
+ avatarUrl:
+ 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
+ webUrl: 'http://gck.test:3000/root',
+ showStatus: false,
+ path: '/root',
+ },
+ deployable: {
+ id: 1014,
+ name: 'deploy-prod',
+ started: '2022-01-07T15:47:31.037Z',
+ complete: true,
+ archived: false,
+ buildPath: '/h5bp/html5-boilerplate/-/jobs/1014',
+ retryPath: '/h5bp/html5-boilerplate/-/jobs/1014/retry',
+ playable: false,
+ scheduled: false,
+ createdAt: '2022-01-07T15:47:27.404Z',
+ updatedAt: '2022-01-07T15:47:32.341Z',
+ status: {
+ action: {
+ buttonTitle: 'Retry this job',
+ icon: 'retry',
+ method: 'post',
+ path: '/h5bp/html5-boilerplate/-/jobs/1014/retry',
+ title: 'Retry',
+ },
+ detailsPath: '/h5bp/html5-boilerplate/-/jobs/1014',
+ favicon:
+ '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ group: 'success',
+ hasDetails: true,
+ icon: 'status_success',
+ illustration: {
+ image:
+ '/assets/illustrations/skipped-job_empty-29a8a37d8a61d1b6f68cf3484f9024e53cd6eb95e28eae3554f8011a1146bf27.svg',
+ size: 'svg-430',
+ title: 'This job does not have a trace.',
+ },
+ label: 'passed',
+ text: 'passed',
+ tooltip: 'passed',
+ },
+ },
+ commit: {
+ id: 'f3ba6dd84f8f891373e9b869135622b954852db1',
+ shortId: 'f3ba6dd8',
+ createdAt: '2022-01-07T15:47:26.000+00:00',
+ parentIds: ['3213b6ac17afab99be37d5d38f38c6c8407387cc'],
+ title: 'Update .gitlab-ci.yml file',
+ message: 'Update .gitlab-ci.yml file',
+ authorName: 'Administrator',
+ authorEmail: 'admin@example.com',
+ authoredDate: '2022-01-07T15:47:26.000+00:00',
+ committerName: 'Administrator',
+ committerEmail: 'admin@example.com',
+ committedDate: '2022-01-07T15:47:26.000+00:00',
+ trailers: {},
+ webUrl:
+ 'http://gck.test:3000/h5bp/html5-boilerplate/-/commit/f3ba6dd84f8f891373e9b869135622b954852db1',
+ author: {
+ avatarUrl:
+ 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
+ id: 1,
+ name: 'Administrator',
+ path: '/root',
+ showStatus: false,
+ state: 'active',
+ username: 'root',
+ webUrl: 'http://gck.test:3000/root',
+ },
+ authorGravatarUrl:
+ 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
+ commitUrl:
+ 'http://gck.test:3000/h5bp/html5-boilerplate/-/commit/f3ba6dd84f8f891373e9b869135622b954852db1',
+ commitPath: '/h5bp/html5-boilerplate/-/commit/f3ba6dd84f8f891373e9b869135622b954852db1',
+ },
+ manualActions: [
+ {
+ id: 1015,
+ name: 'deploy-staging',
+ started: null,
+ complete: false,
+ archived: false,
+ buildPath: '/h5bp/html5-boilerplate/-/jobs/1015',
+ playPath: '/h5bp/html5-boilerplate/-/jobs/1015/play',
+ playable: true,
+ scheduled: false,
+ createdAt: '2022-01-07T15:47:27.422Z',
+ updatedAt: '2022-01-07T15:47:28.557Z',
+ status: {
+ icon: 'status_manual',
+ text: 'manual',
+ label: 'manual play action',
+ group: 'manual',
+ tooltip: 'manual action',
+ hasDetails: true,
+ detailsPath: '/h5bp/html5-boilerplate/-/jobs/1015',
+ illustration: {
+ image:
+ '/assets/illustrations/manual_action-c55aee2c5f9ebe9f72751480af8bb307be1a6f35552f344cc6d1bf979d3422f6.svg',
+ size: 'svg-394',
+ title: 'This job requires a manual action',
+ content:
+ 'This job requires manual intervention to start. Before starting this job, you can add variables below for last-minute configuration changes.',
+ },
+ favicon:
+ '/assets/ci_favicons/favicon_status_manual-829a0804612cef47d9efc1618dba38325483657c847dba0546c3b9f0295bb36c.png',
+ action: {
+ icon: 'play',
+ title: 'Play',
+ path: '/h5bp/html5-boilerplate/-/jobs/1015/play',
+ method: 'post',
+ buttonTitle: 'Run job',
+ },
+ },
+ },
+ ],
+ scheduledActions: [],
+ cluster: null,
+ },
+ retryUrl: '/h5bp/html5-boilerplate/-/jobs/1014/retry',
+};
diff --git a/spec/frontend/environments/graphql/resolvers/base_spec.js b/spec/frontend/environments/graphql/resolvers/base_spec.js
index e01cf18c40d..939ccc0780c 100644
--- a/spec/frontend/environments/graphql/resolvers/base_spec.js
+++ b/spec/frontend/environments/graphql/resolvers/base_spec.js
@@ -9,7 +9,7 @@ import environmentToStopQuery from '~/environments/graphql/queries/environment_t
import createMockApollo from 'helpers/mock_apollo_helper';
import pollIntervalQuery from '~/environments/graphql/queries/poll_interval.query.graphql';
import isEnvironmentStoppingQuery from '~/environments/graphql/queries/is_environment_stopping.query.graphql';
-import pageInfoQuery from '~/environments/graphql/queries/page_info.query.graphql';
+import pageInfoQuery from '~/graphql_shared/client/page_info.query.graphql';
import { TEST_HOST } from 'helpers/test_constants';
import {
environmentsApp,
@@ -131,13 +131,14 @@ describe('~/frontend/environments/graphql/resolvers', () => {
describe('folder', () => {
it('should fetch the folder url passed to it', async () => {
mock
- .onGet(ENDPOINT, { params: { per_page: 3, scope: 'available', search: '' } })
+ .onGet(ENDPOINT, { params: { per_page: 3, scope: 'available', search: '', page: 1 } })
.reply(HTTP_STATUS_OK, folder);
const environmentFolder = await mockResolvers.Query.folder(null, {
environment: { folderPath: ENDPOINT },
scope: 'available',
search: '',
+ page: 1,
});
expect(environmentFolder).toEqual(resolvedFolder);
@@ -147,10 +148,10 @@ describe('~/frontend/environments/graphql/resolvers', () => {
describe('stopEnvironmentREST', () => {
it('should post to the stop environment path', async () => {
mock.onPost(ENDPOINT).reply(HTTP_STATUS_OK);
-
+ const cache = { evict: jest.fn() };
const client = { writeQuery: jest.fn() };
const environment = { stopPath: ENDPOINT };
- await mockResolvers.Mutation.stopEnvironmentREST(null, { environment }, { client });
+ await mockResolvers.Mutation.stopEnvironmentREST(null, { environment }, { client, cache });
expect(mock.history.post).toContainEqual(
expect.objectContaining({ url: ENDPOINT, method: 'post' }),
@@ -161,6 +162,7 @@ describe('~/frontend/environments/graphql/resolvers', () => {
variables: { environment },
data: { isEnvironmentStopping: true },
});
+ expect(cache.evict).toHaveBeenCalledWith({ fieldName: 'folder' });
});
it('should set is stopping to false if stop fails', async () => {
mock.onPost(ENDPOINT).reply(HTTP_STATUS_INTERNAL_SERVER_ERROR);
@@ -183,27 +185,39 @@ describe('~/frontend/environments/graphql/resolvers', () => {
describe('rollbackEnvironment', () => {
it('should post to the retry environment path', async () => {
mock.onPost(ENDPOINT).reply(HTTP_STATUS_OK);
+ const cache = { evict: jest.fn() };
- await mockResolvers.Mutation.rollbackEnvironment(null, {
- environment: { retryUrl: ENDPOINT },
- });
+ await mockResolvers.Mutation.rollbackEnvironment(
+ null,
+ {
+ environment: { retryUrl: ENDPOINT },
+ },
+ { cache },
+ );
expect(mock.history.post).toContainEqual(
expect.objectContaining({ url: ENDPOINT, method: 'post' }),
);
+ expect(cache.evict).toHaveBeenCalledWith({ fieldName: 'folder' });
});
});
describe('deleteEnvironment', () => {
it('should DELETE to the delete environment path', async () => {
mock.onDelete(ENDPOINT).reply(HTTP_STATUS_OK);
+ const cache = { evict: jest.fn() };
- await mockResolvers.Mutation.deleteEnvironment(null, {
- environment: { deletePath: ENDPOINT },
- });
+ await mockResolvers.Mutation.deleteEnvironment(
+ null,
+ {
+ environment: { deletePath: ENDPOINT },
+ },
+ { cache },
+ );
expect(mock.history.delete).toContainEqual(
expect.objectContaining({ url: ENDPOINT, method: 'delete' }),
);
+ expect(cache.evict).toHaveBeenCalledWith({ fieldName: 'folder' });
});
});
describe('cancelAutoStop', () => {
diff --git a/spec/frontend/environments/graphql/resolvers/kubernetes_spec.js b/spec/frontend/environments/graphql/resolvers/kubernetes_spec.js
index f244ddb01b5..4f3295442b5 100644
--- a/spec/frontend/environments/graphql/resolvers/kubernetes_spec.js
+++ b/spec/frontend/environments/graphql/resolvers/kubernetes_spec.js
@@ -4,6 +4,8 @@ import axios from '~/lib/utils/axios_utils';
import { resolvers } from '~/environments/graphql/resolvers';
import { CLUSTER_AGENT_ERROR_MESSAGES } from '~/environments/constants';
import k8sPodsQuery from '~/environments/graphql/queries/k8s_pods.query.graphql';
+import k8sWorkloadsQuery from '~/environments/graphql/queries/k8s_workloads.query.graphql';
+import k8sServicesQuery from '~/environments/graphql/queries/k8s_services.query.graphql';
import { k8sPodsMock, k8sServicesMock, k8sNamespacesMock } from '../mock_data';
describe('~/frontend/environments/graphql/resolvers', () => {
@@ -157,6 +159,7 @@ describe('~/frontend/environments/graphql/resolvers', () => {
});
});
describe('k8sServices', () => {
+ const client = { writeQuery: jest.fn() };
const mockServicesListFn = jest.fn().mockImplementation(() => {
return Promise.resolve({
items: k8sServicesMock,
@@ -166,49 +169,130 @@ describe('~/frontend/environments/graphql/resolvers', () => {
const mockNamespacedServicesListFn = jest.fn().mockImplementation(mockServicesListFn);
const mockAllServicesListFn = jest.fn().mockImplementation(mockServicesListFn);
- beforeEach(() => {
- jest
- .spyOn(CoreV1Api.prototype, 'listCoreV1ServiceForAllNamespaces')
- .mockImplementation(mockServicesListFn);
+ describe('when k8sWatchApi feature is disabled', () => {
+ beforeEach(() => {
+ jest
+ .spyOn(CoreV1Api.prototype, 'listCoreV1NamespacedService')
+ .mockImplementation(mockNamespacedServicesListFn);
+ jest
+ .spyOn(CoreV1Api.prototype, 'listCoreV1ServiceForAllNamespaces')
+ .mockImplementation(mockAllServicesListFn);
+ });
- jest
- .spyOn(CoreV1Api.prototype, 'listCoreV1NamespacedService')
- .mockImplementation(mockNamespacedServicesListFn);
- jest
- .spyOn(CoreV1Api.prototype, 'listCoreV1ServiceForAllNamespaces')
- .mockImplementation(mockAllServicesListFn);
- });
+ it('should request namespaced services from the cluster_client library if namespace is specified', async () => {
+ const services = await mockResolvers.Query.k8sServices(
+ null,
+ { configuration, namespace },
+ { client },
+ );
- it('should request namespaced services from the cluster_client library if namespace is specified', async () => {
- const services = await mockResolvers.Query.k8sServices(null, { configuration, namespace });
+ expect(mockNamespacedServicesListFn).toHaveBeenCalledWith({ namespace });
+ expect(mockAllServicesListFn).not.toHaveBeenCalled();
- expect(mockNamespacedServicesListFn).toHaveBeenCalledWith({ namespace });
- expect(mockAllServicesListFn).not.toHaveBeenCalled();
+ expect(services).toEqual(k8sServicesMock);
+ });
+ it('should request all services from the cluster_client library if namespace is not specified', async () => {
+ const services = await mockResolvers.Query.k8sServices(
+ null,
+ {
+ configuration,
+ namespace: '',
+ },
+ { client },
+ );
+
+ expect(mockServicesListFn).toHaveBeenCalled();
+ expect(mockNamespacedServicesListFn).not.toHaveBeenCalled();
+
+ expect(services).toEqual(k8sServicesMock);
+ });
+ it('should throw an error if the API call fails', async () => {
+ jest
+ .spyOn(CoreV1Api.prototype, 'listCoreV1ServiceForAllNamespaces')
+ .mockRejectedValue(new Error('API error'));
- expect(services).toEqual(k8sServicesMock);
+ await expect(
+ mockResolvers.Query.k8sServices(null, { configuration }, { client }),
+ ).rejects.toThrow('API error');
+ });
});
- it('should request all services from the cluster_client library if namespace is not specified', async () => {
- const services = await mockResolvers.Query.k8sServices(null, {
- configuration,
- namespace: '',
+
+ describe('when k8sWatchApi feature is enabled', () => {
+ const mockWatcher = WatchApi.prototype;
+ const mockServicesListWatcherFn = jest.fn().mockImplementation(() => {
+ return Promise.resolve(mockWatcher);
+ });
+
+ const mockOnDataFn = jest.fn().mockImplementation((eventName, callback) => {
+ if (eventName === 'data') {
+ callback([]);
+ }
});
- expect(mockServicesListFn).toHaveBeenCalled();
- expect(mockNamespacedServicesListFn).not.toHaveBeenCalled();
+ describe('when the services data is present', () => {
+ beforeEach(() => {
+ gon.features = { k8sWatchApi: true };
- expect(services).toEqual(k8sServicesMock);
- });
- it('should throw an error if the API call fails', async () => {
- jest
- .spyOn(CoreV1Api.prototype, 'listCoreV1ServiceForAllNamespaces')
- .mockRejectedValue(new Error('API error'));
+ jest
+ .spyOn(CoreV1Api.prototype, 'listCoreV1NamespacedService')
+ .mockImplementation(mockNamespacedServicesListFn);
+ jest
+ .spyOn(CoreV1Api.prototype, 'listCoreV1ServiceForAllNamespaces')
+ .mockImplementation(mockAllServicesListFn);
+ jest
+ .spyOn(mockWatcher, 'subscribeToStream')
+ .mockImplementation(mockServicesListWatcherFn);
+ jest.spyOn(mockWatcher, 'on').mockImplementation(mockOnDataFn);
+ });
+
+ it('should request namespaced services from the cluster_client library if namespace is specified', async () => {
+ await mockResolvers.Query.k8sServices(null, { configuration, namespace }, { client });
+
+ expect(mockServicesListWatcherFn).toHaveBeenCalledWith(
+ `/api/v1/namespaces/${namespace}/services`,
+ {
+ watch: true,
+ },
+ );
+ });
+ it('should request all services from the cluster_client library if namespace is not specified', async () => {
+ await mockResolvers.Query.k8sServices(null, { configuration, namespace: '' }, { client });
+
+ expect(mockServicesListWatcherFn).toHaveBeenCalledWith(`/api/v1/services`, {
+ watch: true,
+ });
+ });
+ it('should update cache with the new data when received from the library', async () => {
+ await mockResolvers.Query.k8sServices(null, { configuration, namespace: '' }, { client });
+
+ expect(client.writeQuery).toHaveBeenCalledWith({
+ query: k8sServicesQuery,
+ variables: { configuration, namespace: '' },
+ data: { k8sServices: [] },
+ });
+ });
+ });
+
+ it('should not watch pods from the cluster_client library when the services data is not present', async () => {
+ jest.spyOn(CoreV1Api.prototype, 'listCoreV1NamespacedService').mockImplementation(
+ jest.fn().mockImplementation(() => {
+ return Promise.resolve({
+ items: [],
+ });
+ }),
+ );
- await expect(mockResolvers.Query.k8sServices(null, { configuration })).rejects.toThrow(
- 'API error',
- );
+ await mockResolvers.Query.k8sServices(null, { configuration, namespace }, { client });
+
+ expect(mockServicesListWatcherFn).not.toHaveBeenCalled();
+ });
});
});
describe('k8sWorkloads', () => {
+ const client = {
+ readQuery: jest.fn(() => ({ k8sWorkloads: {} })),
+ writeQuery: jest.fn(),
+ };
const emptyImplementation = jest.fn().mockImplementation(() => {
return Promise.resolve({
data: {
@@ -250,48 +334,137 @@ describe('~/frontend/environments/graphql/resolvers', () => {
{ method: 'listBatchV1CronJobForAllNamespaces', api: BatchV1Api, spy: mockAllCronJob },
];
- beforeEach(() => {
- [...namespacedMocks, ...allMocks].forEach((workloadMock) => {
- jest
- .spyOn(workloadMock.api.prototype, workloadMock.method)
- .mockImplementation(workloadMock.spy);
+ describe('when k8sWatchApi feature is disabled', () => {
+ beforeEach(() => {
+ [...namespacedMocks, ...allMocks].forEach((workloadMock) => {
+ jest
+ .spyOn(workloadMock.api.prototype, workloadMock.method)
+ .mockImplementation(workloadMock.spy);
+ });
});
- });
- it('should request namespaced workload types from the cluster_client library if namespace is specified', async () => {
- await mockResolvers.Query.k8sWorkloads(null, { configuration, namespace });
+ it('should request namespaced workload types from the cluster_client library if namespace is specified', async () => {
+ await mockResolvers.Query.k8sWorkloads(null, { configuration, namespace }, { client });
- namespacedMocks.forEach((workloadMock) => {
- expect(workloadMock.spy).toHaveBeenCalledWith({ namespace });
+ namespacedMocks.forEach((workloadMock) => {
+ expect(workloadMock.spy).toHaveBeenCalledWith({ namespace });
+ });
});
- });
- it('should request all workload types from the cluster_client library if namespace is not specified', async () => {
- await mockResolvers.Query.k8sWorkloads(null, { configuration, namespace: '' });
+ it('should request all workload types from the cluster_client library if namespace is not specified', async () => {
+ await mockResolvers.Query.k8sWorkloads(null, { configuration, namespace: '' }, { client });
- allMocks.forEach((workloadMock) => {
- expect(workloadMock.spy).toHaveBeenCalled();
+ allMocks.forEach((workloadMock) => {
+ expect(workloadMock.spy).toHaveBeenCalled();
+ });
});
- });
- it('should pass fulfilled calls data if one of the API calls fail', async () => {
- jest
- .spyOn(AppsV1Api.prototype, 'listAppsV1DeploymentForAllNamespaces')
- .mockRejectedValue(new Error('API error'));
-
- await expect(
- mockResolvers.Query.k8sWorkloads(null, { configuration }),
- ).resolves.toBeDefined();
- });
- it('should throw an error if all the API calls fail', async () => {
- [...allMocks].forEach((workloadMock) => {
+ it('should pass fulfilled calls data if one of the API calls fail', async () => {
jest
- .spyOn(workloadMock.api.prototype, workloadMock.method)
+ .spyOn(AppsV1Api.prototype, 'listAppsV1DeploymentForAllNamespaces')
.mockRejectedValue(new Error('API error'));
+
+ await expect(
+ mockResolvers.Query.k8sWorkloads(null, { configuration }, { client }),
+ ).resolves.toBeDefined();
+ });
+ it('should throw an error if all the API calls fail', async () => {
+ [...allMocks].forEach((workloadMock) => {
+ jest
+ .spyOn(workloadMock.api.prototype, workloadMock.method)
+ .mockRejectedValue(new Error('API error'));
+ });
+
+ await expect(
+ mockResolvers.Query.k8sWorkloads(null, { configuration }, { client }),
+ ).rejects.toThrow('API error');
+ });
+ });
+ describe('when k8sWatchApi feature is enabled', () => {
+ const mockDeployment = jest.fn().mockImplementation(() => {
+ return Promise.resolve({
+ kind: 'DeploymentList',
+ apiVersion: 'apps/v1',
+ items: [
+ {
+ status: {
+ conditions: [],
+ },
+ },
+ ],
+ });
+ });
+ const mockWatcher = WatchApi.prototype;
+ const mockDeploymentsListWatcherFn = jest.fn().mockImplementation(() => {
+ return Promise.resolve(mockWatcher);
+ });
+
+ const mockOnDataFn = jest.fn().mockImplementation((eventName, callback) => {
+ if (eventName === 'data') {
+ callback([]);
+ }
});
- await expect(mockResolvers.Query.k8sWorkloads(null, { configuration })).rejects.toThrow(
- 'API error',
- );
+ describe('when the deployments data is present', () => {
+ beforeEach(() => {
+ gon.features = { k8sWatchApi: true };
+
+ jest
+ .spyOn(AppsV1Api.prototype, 'listAppsV1NamespacedDeployment')
+ .mockImplementation(mockDeployment);
+ jest
+ .spyOn(AppsV1Api.prototype, 'listAppsV1DeploymentForAllNamespaces')
+ .mockImplementation(mockDeployment);
+ jest
+ .spyOn(mockWatcher, 'subscribeToStream')
+ .mockImplementation(mockDeploymentsListWatcherFn);
+ jest.spyOn(mockWatcher, 'on').mockImplementation(mockOnDataFn);
+ });
+
+ it('should request namespaced deployments from the cluster_client library if namespace is specified', async () => {
+ await mockResolvers.Query.k8sWorkloads(null, { configuration, namespace }, { client });
+
+ expect(mockDeploymentsListWatcherFn).toHaveBeenCalledWith(
+ `/apis/apps/v1/namespaces/${namespace}/deployments`,
+ {
+ watch: true,
+ },
+ );
+ });
+ it('should request all deployments from the cluster_client library if namespace is not specified', async () => {
+ await mockResolvers.Query.k8sWorkloads(
+ null,
+ { configuration, namespace: '' },
+ { client },
+ );
+
+ expect(mockDeploymentsListWatcherFn).toHaveBeenCalledWith(`/apis/apps/v1/deployments`, {
+ watch: true,
+ });
+ });
+ it('should update cache with the new data when received from the library', async () => {
+ await mockResolvers.Query.k8sWorkloads(null, { configuration, namespace }, { client });
+
+ expect(client.writeQuery).toHaveBeenCalledWith({
+ query: k8sWorkloadsQuery,
+ variables: { configuration, namespace },
+ data: { k8sWorkloads: { DeploymentList: [] } },
+ });
+ });
+ });
+
+ it('should not watch deployments from the cluster_client library when the deployments data is not present', async () => {
+ jest.spyOn(AppsV1Api.prototype, 'listAppsV1NamespacedDeployment').mockImplementation(
+ jest.fn().mockImplementation(() => {
+ return Promise.resolve({
+ items: [],
+ });
+ }),
+ );
+
+ await mockResolvers.Query.k8sWorkloads(null, { configuration, namespace }, { client });
+
+ expect(mockDeploymentsListWatcherFn).not.toHaveBeenCalled();
+ });
});
});
describe('k8sNamespaces', () => {
diff --git a/spec/frontend/environments/helpers/k8s_integration_helper_spec.js b/spec/frontend/environments/helpers/k8s_integration_helper_spec.js
new file mode 100644
index 00000000000..97100557ef3
--- /dev/null
+++ b/spec/frontend/environments/helpers/k8s_integration_helper_spec.js
@@ -0,0 +1,225 @@
+import {
+ generateServicePortsString,
+ getDeploymentsStatuses,
+ getDaemonSetStatuses,
+ getStatefulSetStatuses,
+ getReplicaSetStatuses,
+ getJobsStatuses,
+ getCronJobsStatuses,
+ humanizeClusterErrors,
+} from '~/environments/helpers/k8s_integration_helper';
+
+import { CLUSTER_AGENT_ERROR_MESSAGES } from '~/environments/constants';
+
+describe('k8s_integration_helper', () => {
+ describe('generateServicePortsString', () => {
+ const port = '8080';
+ const protocol = 'TCP';
+ const nodePort = '31732';
+
+ it('returns empty string if no ports provided', () => {
+ expect(generateServicePortsString([])).toBe('');
+ });
+
+ it('returns port and protocol when provided', () => {
+ expect(generateServicePortsString([{ port, protocol }])).toBe(`${port}/${protocol}`);
+ });
+
+ it('returns port, protocol and nodePort when provided', () => {
+ expect(generateServicePortsString([{ port, protocol, nodePort }])).toBe(
+ `${port}:${nodePort}/${protocol}`,
+ );
+ });
+
+ it('returns joined strings of ports if multiple are provided', () => {
+ expect(
+ generateServicePortsString([
+ { port, protocol },
+ { port, protocol, nodePort },
+ ]),
+ ).toBe(`${port}/${protocol}, ${port}:${nodePort}/${protocol}`);
+ });
+ });
+
+ describe('getDeploymentsStatuses', () => {
+ const pending = {
+ status: {
+ conditions: [
+ { type: 'Available', status: 'False' },
+ { type: 'Progressing', status: 'True' },
+ ],
+ },
+ };
+ const ready = {
+ status: {
+ conditions: [
+ { type: 'Available', status: 'True' },
+ { type: 'Progressing', status: 'False' },
+ ],
+ },
+ };
+ const failed = {
+ status: {
+ conditions: [
+ { type: 'Available', status: 'False' },
+ { type: 'Progressing', status: 'False' },
+ ],
+ },
+ };
+
+ it.each`
+ condition | items | expected
+ ${'there are only pending items'} | ${[pending]} | ${{ pending: [pending] }}
+ ${'there are pending and ready items'} | ${[pending, ready]} | ${{ pending: [pending], ready: [ready] }}
+ ${'there are all kind of items'} | ${[failed, ready, ready, pending]} | ${{ pending: [pending], failed: [failed], ready: [ready, ready] }}
+ `('returns correct object of statuses when $condition', ({ items, expected }) => {
+ expect(getDeploymentsStatuses(items)).toEqual(expected);
+ });
+ });
+
+ describe('getDaemonSetStatuses', () => {
+ const ready = {
+ status: {
+ numberMisscheduled: 0,
+ numberReady: 1,
+ desiredNumberScheduled: 1,
+ },
+ };
+ const failed = {
+ status: {
+ numberReady: 0,
+ desiredNumberScheduled: 1,
+ },
+ };
+ const anotherFailed = {
+ status: {
+ numberReady: 0,
+ desiredNumberScheduled: 0,
+ numberMisscheduled: 1,
+ },
+ };
+
+ it.each`
+ condition | items | expected
+ ${'there are only failed items'} | ${[failed, anotherFailed]} | ${{ failed: [failed, anotherFailed] }}
+ ${'there are only ready items'} | ${[ready]} | ${{ ready: [ready] }}
+ ${'there are all kind of items'} | ${[failed, ready, anotherFailed]} | ${{ failed: [failed, anotherFailed], ready: [ready] }}
+ `('returns correct object of statuses when $condition', ({ items, expected }) => {
+ expect(getDaemonSetStatuses(items)).toEqual(expected);
+ });
+ });
+
+ describe('getStatefulSetStatuses', () => {
+ const ready = {
+ status: {
+ readyReplicas: 1,
+ },
+ spec: { replicas: 1 },
+ };
+ const failed = {
+ status: {
+ readyReplicas: 1,
+ },
+ spec: { replicas: 3 },
+ };
+
+ it.each`
+ condition | items | expected
+ ${'there are only failed items'} | ${[failed, failed]} | ${{ failed: [failed, failed] }}
+ ${'there are only ready items'} | ${[ready]} | ${{ ready: [ready] }}
+ ${'there are all kind of items'} | ${[failed, failed, ready]} | ${{ failed: [failed, failed], ready: [ready] }}
+ `('returns correct object of statuses when $condition', ({ items, expected }) => {
+ expect(getStatefulSetStatuses(items)).toEqual(expected);
+ });
+ });
+
+ describe('getReplicaSetStatuses', () => {
+ const ready = {
+ status: {
+ readyReplicas: 1,
+ },
+ spec: { replicas: 1 },
+ };
+ const failed = {
+ status: {
+ readyReplicas: 1,
+ },
+ spec: { replicas: 3 },
+ };
+
+ it.each`
+ condition | items | expected
+ ${'there are only failed items'} | ${[failed, failed]} | ${{ failed: [failed, failed] }}
+ ${'there are only ready items'} | ${[ready]} | ${{ ready: [ready] }}
+ ${'there are all kind of items'} | ${[failed, failed, ready]} | ${{ failed: [failed, failed], ready: [ready] }}
+ `('returns correct object of statuses when $condition', ({ items, expected }) => {
+ expect(getReplicaSetStatuses(items)).toEqual(expected);
+ });
+ });
+
+ describe('getJobsStatuses', () => {
+ const completed = {
+ status: {
+ succeeded: 1,
+ },
+ spec: { completions: 1 },
+ };
+ const failed = {
+ status: {
+ failed: 1,
+ },
+ spec: { completions: 2 },
+ };
+
+ const anotherFailed = {
+ status: {
+ succeeded: 1,
+ },
+ spec: { completions: 2 },
+ };
+
+ it.each`
+ condition | items | expected
+ ${'there are only failed items'} | ${[failed, anotherFailed]} | ${{ failed: [failed, anotherFailed] }}
+ ${'there are only completed items'} | ${[completed]} | ${{ completed: [completed] }}
+ ${'there are all kind of items'} | ${[failed, completed, anotherFailed]} | ${{ failed: [failed, anotherFailed], completed: [completed] }}
+ `('returns correct object of statuses when $condition', ({ items, expected }) => {
+ expect(getJobsStatuses(items)).toEqual(expected);
+ });
+ });
+
+ describe('getCronJobsStatuses', () => {
+ const suspended = {
+ spec: { suspend: true },
+ };
+ const ready = {
+ status: {
+ active: 2,
+ lastScheduleTime: new Date(),
+ },
+ };
+ const failed = {
+ status: {
+ active: 2,
+ },
+ };
+
+ it.each`
+ condition | items | expected
+ ${'there are only suspended items'} | ${[suspended]} | ${{ suspended: [suspended] }}
+ ${'there are suspended and ready items'} | ${[suspended, ready]} | ${{ suspended: [suspended], ready: [ready] }}
+ ${'there are all kind of items'} | ${[failed, ready, ready, suspended]} | ${{ suspended: [suspended], failed: [failed], ready: [ready, ready] }}
+ `('returns correct object of statuses when $condition', ({ items, expected }) => {
+ expect(getCronJobsStatuses(items)).toEqual(expected);
+ });
+ });
+
+ describe('humanizeClusterErrors', () => {
+ it.each(['unauthorized', 'forbidden', 'not found', 'other'])(
+ 'returns correct object of statuses when error reason is %s',
+ (reason) => {
+ expect(humanizeClusterErrors(reason)).toEqual(CLUSTER_AGENT_ERROR_MESSAGES[reason]);
+ },
+ );
+ });
+});
diff --git a/spec/frontend/environments/kubernetes_overview_spec.js b/spec/frontend/environments/kubernetes_overview_spec.js
index 12689df586f..e00cabd1066 100644
--- a/spec/frontend/environments/kubernetes_overview_spec.js
+++ b/spec/frontend/environments/kubernetes_overview_spec.js
@@ -74,7 +74,7 @@ describe('~/environments/components/kubernetes_overview.vue', () => {
});
it('is collapsed by default', () => {
- expect(findCollapse().props('visible')).toBeUndefined();
+ expect(findCollapse().props('visible')).toBe(false);
expect(findCollapseButton().attributes('aria-label')).toBe(KubernetesOverview.i18n.expand);
expect(findCollapseButton().props('icon')).toBe('chevron-right');
});
@@ -88,7 +88,7 @@ describe('~/environments/components/kubernetes_overview.vue', () => {
findCollapseButton().vm.$emit('click');
await nextTick();
- expect(findCollapse().attributes('visible')).toBe('true');
+ expect(findCollapse().props('visible')).toBe(true);
expect(findCollapseButton().attributes('aria-label')).toBe(KubernetesOverview.i18n.collapse);
expect(findCollapseButton().props('icon')).toBe('chevron-down');
});
@@ -149,14 +149,14 @@ describe('~/environments/components/kubernetes_overview.vue', () => {
});
it('sets `clusterHealthStatus` as error when pods emitted a failure', async () => {
- findKubernetesPods().vm.$emit('failed');
+ findKubernetesPods().vm.$emit('update-failed-state', { pods: true });
await nextTick();
expect(findKubernetesStatusBar().props('clusterHealthStatus')).toBe('error');
});
it('sets `clusterHealthStatus` as error when workload types emitted a failure', async () => {
- findKubernetesTabs().vm.$emit('failed');
+ findKubernetesTabs().vm.$emit('update-failed-state', { summary: true });
await nextTick();
expect(findKubernetesStatusBar().props('clusterHealthStatus')).toBe('error');
@@ -165,6 +165,21 @@ describe('~/environments/components/kubernetes_overview.vue', () => {
it('sets `clusterHealthStatus` as success when data is loaded and no failures where emitted', () => {
expect(findKubernetesStatusBar().props('clusterHealthStatus')).toBe('success');
});
+
+ it('sets `clusterHealthStatus` as success after state update if there are no failures', async () => {
+ findKubernetesTabs().vm.$emit('update-failed-state', { summary: true });
+ findKubernetesTabs().vm.$emit('update-failed-state', { pods: true });
+ await nextTick();
+ expect(findKubernetesStatusBar().props('clusterHealthStatus')).toBe('error');
+
+ findKubernetesTabs().vm.$emit('update-failed-state', { summary: false });
+ await nextTick();
+ expect(findKubernetesStatusBar().props('clusterHealthStatus')).toBe('error');
+
+ findKubernetesTabs().vm.$emit('update-failed-state', { pods: false });
+ await nextTick();
+ expect(findKubernetesStatusBar().props('clusterHealthStatus')).toBe('success');
+ });
});
describe('on cluster error', () => {
diff --git a/spec/frontend/environments/kubernetes_pods_spec.js b/spec/frontend/environments/kubernetes_pods_spec.js
index a51c85468b4..6c3e49e4d8a 100644
--- a/spec/frontend/environments/kubernetes_pods_spec.js
+++ b/spec/frontend/environments/kubernetes_pods_spec.js
@@ -2,10 +2,10 @@ import Vue from 'vue';
import VueApollo from 'vue-apollo';
import { shallowMount } from '@vue/test-utils';
import { GlLoadingIcon } from '@gitlab/ui';
-import { GlSingleStat } from '@gitlab/ui/dist/charts';
import waitForPromises from 'helpers/wait_for_promises';
import createMockApollo from 'helpers/mock_apollo_helper';
import KubernetesPods from '~/environments/components/kubernetes_pods.vue';
+import WorkloadStats from '~/kubernetes_dashboard/components/workload_stats.vue';
import { mockKasTunnelUrl } from './mock_data';
import { k8sPodsMock } from './graphql/mock_data';
@@ -23,8 +23,7 @@ describe('~/environments/components/kubernetes_pods.vue', () => {
};
const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
- const findAllStats = () => wrapper.findAllComponents(GlSingleStat);
- const findSingleStat = (at) => findAllStats().at(at);
+ const findWorkloadStats = () => wrapper.findComponent(WorkloadStats);
const createApolloProvider = () => {
const mockResolvers = {
@@ -67,37 +66,41 @@ describe('~/environments/components/kubernetes_pods.vue', () => {
});
describe('when gets pods data', () => {
- it('renders stats', async () => {
+ it('renders workload stats with the correct data', async () => {
createWrapper();
await waitForPromises();
- expect(findAllStats()).toHaveLength(4);
+ expect(findWorkloadStats().props('stats')).toEqual([
+ {
+ value: 2,
+ title: 'Running',
+ },
+ {
+ value: 1,
+ title: 'Pending',
+ },
+ {
+ value: 1,
+ title: 'Succeeded',
+ },
+ {
+ value: 2,
+ title: 'Failed',
+ },
+ ]);
});
- it.each`
- count | title | index
- ${2} | ${KubernetesPods.i18n.runningPods} | ${0}
- ${1} | ${KubernetesPods.i18n.pendingPods} | ${1}
- ${1} | ${KubernetesPods.i18n.succeededPods} | ${2}
- ${2} | ${KubernetesPods.i18n.failedPods} | ${3}
- `(
- 'renders stat with title "$title" and count "$count" at index $index',
- async ({ count, title, index }) => {
- createWrapper();
- await waitForPromises();
-
- expect(findSingleStat(index).props()).toMatchObject({
- value: count,
- title,
- });
- },
- );
-
- it('emits a failed event when there are failed pods', async () => {
+ it('emits a update-failed-state event for each pod', async () => {
createWrapper();
await waitForPromises();
- expect(wrapper.emitted('failed')).toHaveLength(1);
+ expect(wrapper.emitted('update-failed-state')).toHaveLength(4);
+ expect(wrapper.emitted('update-failed-state')).toEqual([
+ [{ pods: false }],
+ [{ pods: false }],
+ [{ pods: false }],
+ [{ pods: true }],
+ ]);
});
});
@@ -119,7 +122,7 @@ describe('~/environments/components/kubernetes_pods.vue', () => {
});
it("doesn't show pods stats", () => {
- expect(findAllStats()).toHaveLength(0);
+ expect(findWorkloadStats().exists()).toBe(false);
});
it('emits an error message', () => {
diff --git a/spec/frontend/environments/kubernetes_summary_spec.js b/spec/frontend/environments/kubernetes_summary_spec.js
index 457d1a37c1d..0d448d0b6af 100644
--- a/spec/frontend/environments/kubernetes_summary_spec.js
+++ b/spec/frontend/environments/kubernetes_summary_spec.js
@@ -80,16 +80,16 @@ describe('~/environments/components/kubernetes_summary.vue', () => {
});
it.each`
- type | successText | successCount | failedCount | suspendedCount | index
- ${'Deployments'} | ${'ready'} | ${1} | ${1} | ${0} | ${0}
- ${'DaemonSets'} | ${'ready'} | ${1} | ${2} | ${0} | ${1}
- ${'StatefulSets'} | ${'ready'} | ${2} | ${1} | ${0} | ${2}
- ${'ReplicaSets'} | ${'ready'} | ${1} | ${1} | ${0} | ${3}
- ${'Jobs'} | ${'completed'} | ${2} | ${1} | ${0} | ${4}
- ${'CronJobs'} | ${'ready'} | ${1} | ${1} | ${1} | ${5}
+ type | successText | successCount | failedCount | suspendedCount | pendingCount | index
+ ${'Deployments'} | ${'ready'} | ${1} | ${1} | ${0} | ${1} | ${0}
+ ${'DaemonSets'} | ${'ready'} | ${1} | ${2} | ${0} | ${0} | ${1}
+ ${'StatefulSets'} | ${'ready'} | ${2} | ${1} | ${0} | ${0} | ${2}
+ ${'ReplicaSets'} | ${'ready'} | ${1} | ${1} | ${0} | ${0} | ${3}
+ ${'Jobs'} | ${'completed'} | ${2} | ${1} | ${0} | ${0} | ${4}
+ ${'CronJobs'} | ${'ready'} | ${1} | ${1} | ${1} | ${0} | ${5}
`(
'populates view with the correct badges for workload type $type',
- ({ type, successText, successCount, failedCount, suspendedCount, index }) => {
+ ({ type, successText, successCount, failedCount, suspendedCount, pendingCount, index }) => {
const findAllBadges = () => findSummaryListItem(index).findAllComponents(GlBadge);
const findBadgeByVariant = (variant) =>
findAllBadges().wrappers.find((badge) => badge.props('variant') === variant);
@@ -100,12 +100,15 @@ describe('~/environments/components/kubernetes_summary.vue', () => {
if (suspendedCount > 0) {
expect(findBadgeByVariant('neutral').text()).toBe(`${suspendedCount} suspended`);
}
+ if (pendingCount > 0) {
+ expect(findBadgeByVariant('info').text()).toBe(`${pendingCount} pending`);
+ }
},
);
});
- it('emits a failed event when there are failed workload types', () => {
- expect(wrapper.emitted('failed')).toHaveLength(1);
+ it('emits a update-failed-state event when there are failed workload types', () => {
+ expect(wrapper.emitted('update-failed-state')).toEqual([[{ summary: true }]]);
});
it('emits an error message when gets an error from the cluster_client API', async () => {
diff --git a/spec/frontend/environments/kubernetes_tabs_spec.js b/spec/frontend/environments/kubernetes_tabs_spec.js
index fecd6d2a8ee..bf029ad6a81 100644
--- a/spec/frontend/environments/kubernetes_tabs_spec.js
+++ b/spec/frontend/environments/kubernetes_tabs_spec.js
@@ -179,9 +179,10 @@ describe('~/environments/components/kubernetes_tabs.vue', () => {
expect(wrapper.emitted('loading')[1]).toEqual([false]);
});
- it('emits a failed event when gets it from the component', () => {
- findKubernetesSummary().vm.$emit('failed');
- expect(wrapper.emitted('failed')).toHaveLength(1);
+ it('emits a state update event when gets it from the component', () => {
+ const eventData = { summary: true };
+ findKubernetesSummary().vm.$emit('update-failed-state', eventData);
+ expect(wrapper.emitted('update-failed-state')).toEqual([[eventData]]);
});
});
});
diff --git a/spec/frontend/environments/new_environment_item_spec.js b/spec/frontend/environments/new_environment_item_spec.js
index 7ee31bf2c62..552c44fe197 100644
--- a/spec/frontend/environments/new_environment_item_spec.js
+++ b/spec/frontend/environments/new_environment_item_spec.js
@@ -5,7 +5,7 @@ import createMockApollo from 'helpers/mock_apollo_helper';
import { mountExtended, extendedWrapper } from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
import { stubTransition } from 'helpers/stub_transition';
-import { formatDate, getTimeago } from '~/lib/utils/datetime_utility';
+import { getTimeago, localeDateFormat } from '~/lib/utils/datetime_utility';
import { __, s__, sprintf } from '~/locale';
import EnvironmentItem from '~/environments/components/new_environment_item.vue';
import EnvironmentActions from '~/environments/components/environment_actions.vue';
@@ -253,7 +253,9 @@ describe('~/environments/components/new_environment_item.vue', () => {
});
it('shows when the environment auto stops', () => {
- const autoStop = wrapper.findByTitle(formatDate(environment.autoStopAt));
+ const autoStop = wrapper.findByTitle(
+ localeDateFormat.asDateTimeFull.format(environment.autoStopAt),
+ );
expect(autoStop.text()).toBe('in 1 minute');
});
@@ -380,7 +382,7 @@ describe('~/environments/components/new_environment_item.vue', () => {
});
it('is collapsed by default', () => {
- expect(collapse.attributes('visible')).toBeUndefined();
+ expect(collapse.props('visible')).toBe(false);
expect(icon.props('name')).toBe('chevron-lg-right');
expect(environmentName.classes('gl-font-weight-bold')).toBe(false);
});
@@ -392,7 +394,7 @@ describe('~/environments/components/new_environment_item.vue', () => {
expect(button.attributes('aria-label')).toBe(__('Collapse'));
expect(button.props('category')).toBe('secondary');
- expect(collapse.attributes('visible')).toBe('visible');
+ expect(collapse.props('visible')).toBe(true);
expect(icon.props('name')).toBe('chevron-lg-down');
expect(environmentName.classes('gl-font-weight-bold')).toBe(true);
expect(findDeployment().isVisible()).toBe(true);
diff --git a/spec/frontend/error_tracking/components/error_details_spec.js b/spec/frontend/error_tracking/components/error_details_spec.js
index 977e0a55a99..f43d6a2b025 100644
--- a/spec/frontend/error_tracking/components/error_details_spec.js
+++ b/spec/frontend/error_tracking/components/error_details_spec.js
@@ -463,7 +463,7 @@ describe('ErrorDetails', () => {
const gitlabIssuePath = 'https://gitlab.example.com/issues/1';
const findGitLabLink = () => wrapper.find(`[href="${gitlabIssuePath}"]`);
const findCreateIssueButton = () => wrapper.find('[data-testid="create-issue-button"]');
- const findViewIssueButton = () => wrapper.find('[data-qa-selector="view_issue_button"]');
+ const findViewIssueButton = () => wrapper.find('[data-testid="view-issue-button"]');
describe('is present', () => {
beforeEach(() => {
diff --git a/spec/frontend/error_tracking/components/error_tracking_list_spec.js b/spec/frontend/error_tracking/components/error_tracking_list_spec.js
index a9cd407f758..823f7132fdd 100644
--- a/spec/frontend/error_tracking/components/error_tracking_list_spec.js
+++ b/spec/frontend/error_tracking/components/error_tracking_list_spec.js
@@ -43,6 +43,8 @@ describe('ErrorTrackingList', () => {
userCanEnableErrorTracking = true,
showIntegratedTrackingDisabledAlert = false,
integratedErrorTrackingEnabled = false,
+ listPath = '/error_tracking',
+
stubs = {},
} = {}) {
wrapper = extendedWrapper(
@@ -50,7 +52,7 @@ describe('ErrorTrackingList', () => {
store,
propsData: {
indexPath: '/path',
- listPath: '/error_tracking',
+ listPath,
projectPath: 'project/test',
enableErrorTrackingLink: '/link',
userCanEnableErrorTracking,
@@ -144,13 +146,27 @@ describe('ErrorTrackingList', () => {
expect(findErrorListRows().length).toEqual(store.state.list.errors.length);
});
- it('each error in a list should have a link to the error page', () => {
- const errorTitle = wrapper.findAll('tbody tr a');
+ describe.each([
+ ['/test-project/-/error_tracking'],
+ ['/test-project/-/error_tracking/'], // handles leading '/' https://gitlab.com/gitlab-org/gitlab/-/issues/430211
+ ])('details link', (url) => {
+ beforeEach(() => {
+ mountComponent({
+ listPath: url,
+ stubs: {
+ GlTable: false,
+ GlLink: false,
+ },
+ });
+ });
+ it('each error in a list should have a link to the error page', () => {
+ const errorTitle = wrapper.findAll('tbody tr a');
- errorTitle.wrappers.forEach((_, index) => {
- expect(errorTitle.at(index).attributes('href')).toEqual(
- expect.stringMatching(/error_tracking\/\d+\/details$/),
- );
+ errorTitle.wrappers.forEach((_, index) => {
+ expect(errorTitle.at(index).attributes('href')).toEqual(
+ `/test-project/-/error_tracking/${errorsList[index].id}/details`,
+ );
+ });
});
});
diff --git a/spec/frontend/error_tracking/store/list/actions_spec.js b/spec/frontend/error_tracking/store/list/actions_spec.js
index 24a26476455..622195defa1 100644
--- a/spec/frontend/error_tracking/store/list/actions_spec.js
+++ b/spec/frontend/error_tracking/store/list/actions_spec.js
@@ -57,7 +57,7 @@ describe('error tracking actions', () => {
describe('restartPolling', () => {
it('should restart polling', () => {
- testAction(
+ return testAction(
actions.restartPolling,
{},
{},
@@ -74,7 +74,7 @@ describe('error tracking actions', () => {
it('should search by query', () => {
const query = 'search';
- testAction(
+ return testAction(
actions.searchByQuery,
query,
{},
@@ -92,7 +92,7 @@ describe('error tracking actions', () => {
it('should search errors by status', () => {
const status = 'ignored';
- testAction(
+ return testAction(
actions.filterByStatus,
status,
{},
@@ -106,7 +106,7 @@ describe('error tracking actions', () => {
it('should search by query', () => {
const field = 'frequency';
- testAction(
+ return testAction(
actions.sortByField,
field,
{},
@@ -123,7 +123,7 @@ describe('error tracking actions', () => {
it('should set search endpoint', () => {
const endpoint = 'https://sentry.io';
- testAction(
+ return testAction(
actions.setEndpoint,
{ endpoint },
{},
@@ -136,7 +136,7 @@ describe('error tracking actions', () => {
describe('fetchPaginatedResults', () => {
it('should start polling the selected page cursor', () => {
const cursor = '1576637570000:1:1';
- testAction(
+ return testAction(
actions.fetchPaginatedResults,
cursor,
{},
diff --git a/spec/frontend/feature_flags/mock_data.js b/spec/frontend/feature_flags/mock_data.js
index 4c40c2acf01..61e96057017 100644
--- a/spec/frontend/feature_flags/mock_data.js
+++ b/spec/frontend/feature_flags/mock_data.js
@@ -56,7 +56,7 @@ export const userList = {
iid: 2,
project_id: 1,
created_at: '2020-02-04T08:13:10.507Z',
- updated_at: '2020-02-04T08:13:10.507Z',
+ updated_at: '2020-02-05T08:14:10.507Z',
path: '/path/to/user/list',
edit_path: '/path/to/user/list/edit',
};
diff --git a/spec/frontend/feature_highlight/feature_highlight_helper_spec.js b/spec/frontend/feature_highlight/feature_highlight_helper_spec.js
deleted file mode 100644
index 4609bfc23d7..00000000000
--- a/spec/frontend/feature_highlight/feature_highlight_helper_spec.js
+++ /dev/null
@@ -1,42 +0,0 @@
-import MockAdapter from 'axios-mock-adapter';
-import { dismiss } from '~/feature_highlight/feature_highlight_helper';
-import { createAlert } from '~/alert';
-import axios from '~/lib/utils/axios_utils';
-import { HTTP_STATUS_CREATED, HTTP_STATUS_INTERNAL_SERVER_ERROR } from '~/lib/utils/http_status';
-
-jest.mock('~/alert');
-
-describe('feature highlight helper', () => {
- describe('dismiss', () => {
- let mockAxios;
- const endpoint = '/-/callouts/dismiss';
- const highlightId = '123';
-
- beforeEach(() => {
- mockAxios = new MockAdapter(axios);
- });
-
- afterEach(() => {
- mockAxios.reset();
- });
-
- it('calls persistent dismissal endpoint with highlightId', async () => {
- mockAxios.onPost(endpoint, { feature_name: highlightId }).replyOnce(HTTP_STATUS_CREATED);
-
- await expect(dismiss(endpoint, highlightId)).resolves.toEqual(expect.anything());
- });
-
- it('triggers an alert when dismiss request fails', async () => {
- mockAxios
- .onPost(endpoint, { feature_name: highlightId })
- .replyOnce(HTTP_STATUS_INTERNAL_SERVER_ERROR);
-
- await dismiss(endpoint, highlightId);
-
- expect(createAlert).toHaveBeenCalledWith({
- message:
- 'An error occurred while dismissing the feature highlight. Refresh the page and try dismissing again.',
- });
- });
- });
-});
diff --git a/spec/frontend/feature_highlight/feature_highlight_popover_spec.js b/spec/frontend/feature_highlight/feature_highlight_popover_spec.js
deleted file mode 100644
index 66ea22cece3..00000000000
--- a/spec/frontend/feature_highlight/feature_highlight_popover_spec.js
+++ /dev/null
@@ -1,75 +0,0 @@
-import { GlPopover, GlLink, GlButton } from '@gitlab/ui';
-import { mount } from '@vue/test-utils';
-import { nextTick } from 'vue';
-import { POPOVER_TARGET_ID } from '~/feature_highlight/constants';
-import { dismiss } from '~/feature_highlight/feature_highlight_helper';
-import FeatureHighlightPopover from '~/feature_highlight/feature_highlight_popover.vue';
-
-jest.mock('~/feature_highlight/feature_highlight_helper');
-
-describe('feature_highlight/feature_highlight_popover', () => {
- let wrapper;
- const props = {
- autoDevopsHelpPath: '/help/autodevops',
- highlightId: '123',
- dismissEndpoint: '/api/dismiss',
- };
-
- const buildWrapper = (propsData = props) => {
- wrapper = mount(FeatureHighlightPopover, {
- propsData,
- });
- };
- const findPopoverTarget = () => wrapper.find(`#${POPOVER_TARGET_ID}`);
- const findPopover = () => wrapper.findComponent(GlPopover);
- const findAutoDevopsHelpLink = () => wrapper.findComponent(GlLink);
- const findDismissButton = () => wrapper.findComponent(GlButton);
-
- beforeEach(() => {
- buildWrapper();
- });
-
- it('renders popover target', () => {
- expect(findPopoverTarget().exists()).toBe(true);
- });
-
- it('renders popover', () => {
- expect(findPopover().props()).toMatchObject({
- target: POPOVER_TARGET_ID,
- cssClasses: ['feature-highlight-popover'],
- container: 'body',
- placement: 'right',
- boundary: 'viewport',
- });
- });
-
- it('renders link that points to the autodevops help page', () => {
- expect(findAutoDevopsHelpLink().attributes().href).toBe(props.autoDevopsHelpPath);
- expect(findAutoDevopsHelpLink().text()).toBe('Auto DevOps');
- });
-
- it('renders dismiss button', () => {
- expect(findDismissButton().props()).toMatchObject({
- size: 'small',
- icon: 'thumb-up',
- variant: 'confirm',
- });
- });
-
- it('dismisses popover when dismiss button is clicked', async () => {
- await findDismissButton().trigger('click');
-
- expect(findPopover().emitted('close')).toHaveLength(1);
- expect(dismiss).toHaveBeenCalledWith(props.dismissEndpoint, props.highlightId);
- });
-
- describe('when popover is dismissed and hidden', () => {
- it('hides the popover target', async () => {
- await findDismissButton().trigger('click');
- findPopover().vm.$emit('hidden');
- await nextTick();
-
- expect(findPopoverTarget().exists()).toBe(false);
- });
- });
-});
diff --git a/spec/frontend/filtered_search/add_extra_tokens_for_merge_requests_spec.js b/spec/frontend/filtered_search/add_extra_tokens_for_merge_requests_spec.js
new file mode 100644
index 00000000000..6b3490122c3
--- /dev/null
+++ b/spec/frontend/filtered_search/add_extra_tokens_for_merge_requests_spec.js
@@ -0,0 +1,30 @@
+import addExtraTokensForMergeRequests from 'ee_else_ce/filtered_search/add_extra_tokens_for_merge_requests';
+import { createFilteredSearchTokenKeys } from '~/filtered_search/issuable_filtered_search_token_keys';
+
+describe('app/assets/javascripts/pages/dashboard/merge_requests/index.js', () => {
+ let IssuableFilteredSearchTokenKeys;
+
+ beforeEach(() => {
+ IssuableFilteredSearchTokenKeys = createFilteredSearchTokenKeys();
+ window.gon = {
+ ...window.gon,
+ features: {
+ mrApprovedFilter: true,
+ },
+ };
+ });
+
+ describe.each(['Branch', 'Environment'])('when $filter is disabled', (filter) => {
+ beforeEach(() => {
+ addExtraTokensForMergeRequests(IssuableFilteredSearchTokenKeys, {
+ [`disable${filter}Filter`]: true,
+ });
+ });
+
+ it('excludes the filter', () => {
+ expect(IssuableFilteredSearchTokenKeys.tokenKeys).not.toContainEqual(
+ expect.objectContaining({ tag: filter.toLowerCase() }),
+ );
+ });
+ });
+});
diff --git a/spec/frontend/filtered_search/issues_filtered_search_token_keys_spec.js b/spec/frontend/filtered_search/issues_filtered_search_token_keys_spec.js
index 2041bc3d959..35fdb02e208 100644
--- a/spec/frontend/filtered_search/issues_filtered_search_token_keys_spec.js
+++ b/spec/frontend/filtered_search/issues_filtered_search_token_keys_spec.js
@@ -1,4 +1,6 @@
-import IssuableFilteredSearchTokenKeys from '~/filtered_search/issuable_filtered_search_token_keys';
+import IssuableFilteredSearchTokenKeys, {
+ createFilteredSearchTokenKeys,
+} from '~/filtered_search/issuable_filtered_search_token_keys';
describe('Issues Filtered Search Token Keys', () => {
describe('get', () => {
@@ -167,3 +169,21 @@ describe('Issues Filtered Search Token Keys', () => {
});
});
});
+
+describe('createFilteredSearchTokenKeys', () => {
+ describe.each(['Release'])('when $filter is disabled', (filter) => {
+ let tokens;
+
+ beforeEach(() => {
+ tokens = createFilteredSearchTokenKeys({
+ [`disable${filter}Filter`]: true,
+ });
+ });
+
+ it('excludes the filter', () => {
+ expect(tokens.tokenKeys).not.toContainEqual(
+ expect.objectContaining({ tag: filter.toLowerCase() }),
+ );
+ });
+ });
+});
diff --git a/spec/frontend/fixtures/deploy_keys.rb b/spec/frontend/fixtures/deploy_keys.rb
index 05fca368fd5..8c371827594 100644
--- a/spec/frontend/fixtures/deploy_keys.rb
+++ b/spec/frontend/fixtures/deploy_keys.rb
@@ -12,12 +12,19 @@ RSpec.describe Projects::DeployKeysController, '(JavaScript fixtures)', type: :c
let(:project2) { create(:project, :internal) }
let(:project3) { create(:project, :internal) }
let(:project4) { create(:project, :internal) }
+ let(:project_key) { create(:deploy_key) }
+ let(:internal_key) { create(:deploy_key) }
before do
# Using an admin for these fixtures because they are used for verifying a frontend
# component that would normally get its data from `Admin::DeployKeysController`
sign_in(admin)
enable_admin_mode!(admin)
+ create(:rsa_deploy_key_5120, public: true)
+ create(:deploy_keys_project, project: project, deploy_key: project_key)
+ create(:deploy_keys_project, project: project2, deploy_key: internal_key)
+ create(:deploy_keys_project, project: project3, deploy_key: project_key)
+ create(:deploy_keys_project, project: project4, deploy_key: project_key)
end
after do
@@ -27,14 +34,6 @@ RSpec.describe Projects::DeployKeysController, '(JavaScript fixtures)', type: :c
render_views
it 'deploy_keys/keys.json' do
- create(:rsa_deploy_key_5120, public: true)
- project_key = create(:deploy_key)
- internal_key = create(:deploy_key)
- create(:deploy_keys_project, project: project, deploy_key: project_key)
- create(:deploy_keys_project, project: project2, deploy_key: internal_key)
- create(:deploy_keys_project, project: project3, deploy_key: project_key)
- create(:deploy_keys_project, project: project4, deploy_key: project_key)
-
get :index, params: {
namespace_id: project.namespace.to_param,
project_id: project
@@ -42,4 +41,31 @@ RSpec.describe Projects::DeployKeysController, '(JavaScript fixtures)', type: :c
expect(response).to be_successful
end
+
+ it 'deploy_keys/enabled_keys.json' do
+ get :enabled_keys, params: {
+ namespace_id: project.namespace.to_param,
+ project_id: project
+ }, format: :json
+
+ expect(response).to be_successful
+ end
+
+ it 'deploy_keys/available_project_keys.json' do
+ get :available_project_keys, params: {
+ namespace_id: project.namespace.to_param,
+ project_id: project
+ }, format: :json
+
+ expect(response).to be_successful
+ end
+
+ it 'deploy_keys/available_public_keys.json' do
+ get :available_public_keys, params: {
+ namespace_id: project.namespace.to_param,
+ project_id: project
+ }, format: :json
+
+ expect(response).to be_successful
+ end
end
diff --git a/spec/frontend/fixtures/pipeline_header.rb b/spec/frontend/fixtures/pipeline_header.rb
index 744df18a403..77d626100ad 100644
--- a/spec/frontend/fixtures/pipeline_header.rb
+++ b/spec/frontend/fixtures/pipeline_header.rb
@@ -18,18 +18,23 @@ RSpec.describe "GraphQL Pipeline Header", '(JavaScript fixtures)', type: :reques
let_it_be(:pipeline) do
create(
:ci_pipeline,
+ :merged_result_pipeline,
project: project,
sha: commit.id,
ref: 'master',
user: user,
+ name: 'Build pipeline',
status: :success,
duration: 7210,
created_at: 2.hours.ago,
started_at: 1.hour.ago,
- finished_at: Time.current
+ finished_at: Time.current,
+ source: :schedule
)
end
+ let_it_be(:builds) { create_list(:ci_build, 3, :success, pipeline: pipeline, ref: 'master') }
+
it "graphql/pipelines/pipeline_header_success.json" do
query = get_graphql_query_as_string(query_path)
@@ -64,6 +69,34 @@ RSpec.describe "GraphQL Pipeline Header", '(JavaScript fixtures)', type: :reques
end
end
+ context 'with running pipeline and no permissions' do
+ let_it_be(:pipeline) do
+ create(
+ :ci_pipeline,
+ project: project,
+ sha: commit.id,
+ ref: 'master',
+ user: user,
+ status: :running,
+ created_at: 2.hours.ago,
+ started_at: 1.hour.ago
+ )
+ end
+
+ let_it_be(:build) { create(:ci_build, :running, pipeline: pipeline, ref: 'master') }
+
+ it "graphql/pipelines/pipeline_header_running_no_permissions.json" do
+ guest = create(:user)
+ project.add_guest(guest)
+
+ query = get_graphql_query_as_string(query_path)
+
+ post_graphql(query, current_user: guest, variables: { fullPath: project.full_path, iid: pipeline.iid })
+
+ expect_graphql_errors_to_be_empty
+ end
+ end
+
context 'with running pipeline and duration' do
let_it_be(:pipeline) do
create(
diff --git a/spec/frontend/fixtures/runner.rb b/spec/frontend/fixtures/runner.rb
index a73a0dcbdd1..3b03a03cb96 100644
--- a/spec/frontend/fixtures/runner.rb
+++ b/spec/frontend/fixtures/runner.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Runner (JavaScript fixtures)', feature_category: :runner_fleet do
+RSpec.describe 'Runner (JavaScript fixtures)', feature_category: :fleet_visibility do
include AdminModeHelper
include ApiHelpers
include JavaScriptFixturesHelpers
diff --git a/spec/frontend/fixtures/static/whats_new_notification.html b/spec/frontend/fixtures/static/whats_new_notification.html
deleted file mode 100644
index bc8a27c779f..00000000000
--- a/spec/frontend/fixtures/static/whats_new_notification.html
+++ /dev/null
@@ -1,7 +0,0 @@
-<div class='whats-new-notification-fixture-root'>
- <div class='app' data-version-digest='version-digest'></div>
- <div data-testid='without-digest'></div>
- <div class='header-help'>
- <div class='js-whats-new-notification-count'></div>
- </div>
-</div>
diff --git a/spec/frontend/frequent_items/components/app_spec.js b/spec/frontend/frequent_items/components/app_spec.js
deleted file mode 100644
index 122155a5d3f..00000000000
--- a/spec/frontend/frequent_items/components/app_spec.js
+++ /dev/null
@@ -1,286 +0,0 @@
-import { GlButton, GlIcon } from '@gitlab/ui';
-import MockAdapter from 'axios-mock-adapter';
-import Vue, { nextTick } from 'vue';
-// eslint-disable-next-line no-restricted-imports
-import Vuex from 'vuex';
-import { useLocalStorageSpy } from 'helpers/local_storage_helper';
-import { mountExtended } from 'helpers/vue_test_utils_helper';
-import waitForPromises from 'helpers/wait_for_promises';
-import App from '~/frequent_items/components/app.vue';
-import FrequentItemsList from '~/frequent_items/components/frequent_items_list.vue';
-import { FREQUENT_ITEMS, FIFTEEN_MINUTES_IN_MS } from '~/frequent_items/constants';
-import eventHub from '~/frequent_items/event_hub';
-import { createStore } from '~/frequent_items/store';
-import { getTopFrequentItems } from '~/frequent_items/utils';
-import axios from '~/lib/utils/axios_utils';
-import { HTTP_STATUS_OK } from '~/lib/utils/http_status';
-import { currentSession, mockFrequentProjects, mockSearchedProjects } from '../mock_data';
-
-Vue.use(Vuex);
-
-useLocalStorageSpy();
-
-const TEST_NAMESPACE = 'projects';
-const TEST_VUEX_MODULE = 'frequentProjects';
-const TEST_PROJECT = currentSession[TEST_NAMESPACE].project;
-const TEST_STORAGE_KEY = currentSession[TEST_NAMESPACE].storageKey;
-const TEST_SEARCH_CLASS = 'test-search-class';
-
-describe('Frequent Items App Component', () => {
- let wrapper;
- let mock;
- let store;
-
- const createComponent = (props = {}) => {
- const session = currentSession[TEST_NAMESPACE];
- gon.api_version = session.apiVersion;
-
- wrapper = mountExtended(App, {
- store,
- propsData: {
- namespace: TEST_NAMESPACE,
- currentUserName: session.username,
- currentItem: session.project,
- ...props,
- },
- provide: {
- vuexModule: TEST_VUEX_MODULE,
- },
- });
- };
-
- const triggerDropdownOpen = () => eventHub.$emit(`${TEST_NAMESPACE}-dropdownOpen`);
- const getStoredProjects = () => JSON.parse(localStorage.getItem(TEST_STORAGE_KEY));
- const findSearchInput = () => wrapper.findByTestId('frequent-items-search-input');
- const findLoading = () => wrapper.findByTestId('loading');
- const findSectionHeader = () => wrapper.findByTestId('header');
- const findFrequentItemsList = () => wrapper.findComponent(FrequentItemsList);
- const findFrequentItems = () => findFrequentItemsList().findAll('li');
- const setSearch = (search) => {
- const searchInput = wrapper.find('input');
-
- searchInput.setValue(search);
- };
-
- beforeEach(() => {
- mock = new MockAdapter(axios);
- store = createStore();
- });
-
- afterEach(() => {
- mock.restore();
- });
-
- describe('default', () => {
- beforeEach(() => {
- jest.spyOn(store, 'dispatch');
-
- createComponent();
- });
-
- it('should fetch frequent items', () => {
- triggerDropdownOpen();
-
- expect(store.dispatch).toHaveBeenCalledWith(`${TEST_VUEX_MODULE}/fetchFrequentItems`);
- });
-
- it('should not fetch frequent items if detroyed', () => {
- wrapper.destroy();
- triggerDropdownOpen();
-
- expect(store.dispatch).not.toHaveBeenCalledWith(`${TEST_VUEX_MODULE}/fetchFrequentItems`);
- });
-
- it('should render search input', () => {
- expect(findSearchInput().classes()).toEqual(['search-input-container']);
- });
-
- it('should render loading animation', async () => {
- triggerDropdownOpen();
- store.state[TEST_VUEX_MODULE].isLoadingItems = true;
-
- await nextTick();
-
- const loading = findLoading();
-
- expect(loading.exists()).toBe(true);
- expect(loading.find('[aria-label="Loading projects"]').exists()).toBe(true);
- expect(findSectionHeader().exists()).toBe(false);
- });
-
- it('should render frequent projects list header', () => {
- const sectionHeader = findSectionHeader();
-
- expect(sectionHeader.exists()).toBe(true);
- expect(sectionHeader.text()).toBe('Frequently visited');
- });
-
- it('should render searched projects list', async () => {
- mock
- .onGet(/\/api\/v4\/projects.json(.*)$/)
- .replyOnce(HTTP_STATUS_OK, mockSearchedProjects.data);
-
- setSearch('gitlab');
- await nextTick();
-
- expect(findLoading().exists()).toBe(true);
-
- await waitForPromises();
-
- expect(findFrequentItems().length).toBe(mockSearchedProjects.data.length);
- expect(findFrequentItemsList().props()).toEqual(
- expect.objectContaining({
- items: mockSearchedProjects.data.map(
- ({
- avatar_url: avatarUrl,
- web_url: webUrl,
- name_with_namespace: namespace,
- ...item
- }) => ({
- ...item,
- avatarUrl,
- webUrl,
- namespace,
- }),
- ),
- namespace: TEST_NAMESPACE,
- hasSearchQuery: true,
- isFetchFailed: false,
- matcher: 'gitlab',
- }),
- );
- });
-
- describe('with frequent items list', () => {
- const expectedResult = getTopFrequentItems(mockFrequentProjects);
-
- beforeEach(async () => {
- localStorage.setItem(TEST_STORAGE_KEY, JSON.stringify(mockFrequentProjects));
- triggerDropdownOpen();
- await nextTick();
- });
-
- it('should render edit button within header', () => {
- const itemEditButton = findSectionHeader().findComponent(GlButton);
-
- expect(itemEditButton.exists()).toBe(true);
- expect(itemEditButton.attributes('title')).toBe('Toggle edit mode');
- expect(itemEditButton.findComponent(GlIcon).props('name')).toBe('pencil');
- });
-
- it('should render frequent projects list', () => {
- expect(findFrequentItems().length).toBe(expectedResult.length);
- expect(findFrequentItemsList().props()).toEqual({
- items: expectedResult,
- namespace: TEST_NAMESPACE,
- hasSearchQuery: false,
- isFetchFailed: false,
- isItemRemovalFailed: false,
- matcher: '',
- });
- });
-
- it('dispatches action `toggleItemsListEditablity` when edit button is clicked', async () => {
- const itemEditButton = findSectionHeader().findComponent(GlButton);
- itemEditButton.vm.$emit('click');
-
- await nextTick();
-
- expect(store.dispatch).toHaveBeenCalledWith(
- `${TEST_VUEX_MODULE}/toggleItemsListEditablity`,
- );
- });
- });
- });
-
- describe('with searchClass', () => {
- beforeEach(() => {
- createComponent({ searchClass: TEST_SEARCH_CLASS });
- });
-
- it('should render search input with searchClass', () => {
- expect(findSearchInput().classes()).toEqual(['search-input-container', TEST_SEARCH_CLASS]);
- });
- });
-
- describe('logging', () => {
- it('when created, it should create a project storage entry and adds a project', () => {
- createComponent();
-
- expect(getStoredProjects()).toEqual([
- expect.objectContaining({
- frequency: 1,
- lastAccessedOn: Date.now(),
- }),
- ]);
- });
-
- describe('when created multiple times', () => {
- beforeEach(() => {
- createComponent();
- wrapper.destroy();
- createComponent();
- wrapper.destroy();
- });
-
- it('should only log once', () => {
- expect(getStoredProjects()).toEqual([
- expect.objectContaining({
- lastAccessedOn: Date.now(),
- frequency: 1,
- }),
- ]);
- });
-
- it('should increase frequency, when created 15 minutes later', () => {
- const fifteenMinutesLater = Date.now() + FIFTEEN_MINUTES_IN_MS + 1;
-
- jest.spyOn(Date, 'now').mockReturnValue(fifteenMinutesLater);
- createComponent({ currentItem: { ...TEST_PROJECT, lastAccessedOn: fifteenMinutesLater } });
-
- expect(getStoredProjects()).toEqual([
- expect.objectContaining({
- lastAccessedOn: fifteenMinutesLater,
- frequency: 2,
- }),
- ]);
- });
- });
-
- it('should always update project metadata', () => {
- const oldProject = {
- ...TEST_PROJECT,
- };
-
- const newProject = {
- ...oldProject,
- name: 'New Name',
- avatarUrl: 'new/avatar.png',
- namespace: 'New / Namespace',
- webUrl: 'http://localhost/new/web/url',
- };
-
- createComponent({ currentItem: oldProject });
- wrapper.destroy();
- expect(getStoredProjects()).toEqual([expect.objectContaining(oldProject)]);
-
- createComponent({ currentItem: newProject });
- wrapper.destroy();
-
- expect(getStoredProjects()).toEqual([expect.objectContaining(newProject)]);
- });
-
- it('should not add more than 20 projects in store', () => {
- for (let id = 0; id < FREQUENT_ITEMS.MAX_COUNT + 10; id += 1) {
- const project = {
- ...TEST_PROJECT,
- id,
- };
- createComponent({ currentItem: project });
- wrapper.destroy();
- }
-
- expect(getStoredProjects().length).toBe(FREQUENT_ITEMS.MAX_COUNT);
- });
- });
-});
diff --git a/spec/frontend/frequent_items/components/frequent_items_list_item_spec.js b/spec/frontend/frequent_items/components/frequent_items_list_item_spec.js
deleted file mode 100644
index 55d20ad603c..00000000000
--- a/spec/frontend/frequent_items/components/frequent_items_list_item_spec.js
+++ /dev/null
@@ -1,161 +0,0 @@
-import { GlIcon } from '@gitlab/ui';
-import Vue, { nextTick } from 'vue';
-// eslint-disable-next-line no-restricted-imports
-import Vuex from 'vuex';
-import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
-import { trimText } from 'helpers/text_helper';
-import { mockTracking, unmockTracking } from 'helpers/tracking_helper';
-import frequentItemsListItemComponent from '~/frequent_items/components/frequent_items_list_item.vue';
-import { createStore } from '~/frequent_items/store';
-import ProjectAvatar from '~/vue_shared/components/project_avatar.vue';
-import { mockProject } from '../mock_data';
-
-Vue.use(Vuex);
-
-describe('FrequentItemsListItemComponent', () => {
- const TEST_VUEX_MODULE = 'frequentProjects';
- let wrapper;
- let trackingSpy;
- let store;
-
- const findTitle = () => wrapper.findByTestId('frequent-items-item-title');
- const findAvatar = () => wrapper.findComponent(ProjectAvatar);
- const findAllTitles = () => wrapper.findAllByTestId('frequent-items-item-title');
- const findNamespace = () => wrapper.findByTestId('frequent-items-item-namespace');
- const findAllFrequentItems = () => wrapper.findAllByTestId('frequent-item-link');
- const findAllNamespace = () => wrapper.findAllByTestId('frequent-items-item-namespace');
- const findAllAvatars = () => wrapper.findAllComponents(ProjectAvatar);
- const findAllMetadataContainers = () =>
- wrapper.findAllByTestId('frequent-items-item-metadata-container');
- const findRemoveButton = () => wrapper.findByTestId('item-remove');
-
- const toggleItemsListEditablity = async () => {
- store.dispatch(`${TEST_VUEX_MODULE}/toggleItemsListEditablity`);
-
- await nextTick();
- };
-
- const createComponent = (props = {}) => {
- wrapper = shallowMountExtended(frequentItemsListItemComponent, {
- store,
- propsData: {
- itemId: mockProject.id,
- itemName: mockProject.name,
- namespace: mockProject.namespace,
- webUrl: mockProject.webUrl,
- avatarUrl: mockProject.avatarUrl,
- ...props,
- },
- provide: {
- vuexModule: TEST_VUEX_MODULE,
- },
- });
- };
-
- beforeEach(() => {
- store = createStore();
- trackingSpy = mockTracking('_category_', document, jest.spyOn);
- trackingSpy.mockImplementation(() => {});
- });
-
- afterEach(() => {
- unmockTracking();
- });
-
- describe('computed', () => {
- describe('highlightedItemName', () => {
- it('should enclose part of project name in <b> & </b> which matches with `matcher` prop', () => {
- createComponent({ matcher: 'lab' });
-
- expect(findTitle().element.innerHTML).toContain('<b>L</b><b>a</b><b>b</b>');
- });
-
- it('should return project name as it is if `matcher` is not available', () => {
- createComponent({ matcher: null });
-
- expect(trimText(findTitle().text())).toBe(mockProject.name);
- });
- });
-
- describe('truncatedNamespace', () => {
- it('should truncate project name from namespace string', () => {
- createComponent({ namespace: 'platform / nokia-3310' });
-
- expect(trimText(findNamespace().text())).toBe('platform');
- });
-
- it('should truncate namespace string from the middle if it includes more than two groups in path', () => {
- createComponent({
- namespace: 'platform / hardware / broadcom / Wifi Group / Mobile Chipset / nokia-3310',
- });
-
- expect(trimText(findNamespace().text())).toBe('platform / ... / Mobile Chipset');
- });
- });
- });
-
- describe('template', () => {
- beforeEach(() => {
- createComponent();
- });
-
- it('renders avatar', () => {
- expect(findAvatar().exists()).toBe(true);
- });
-
- it('renders root element with the right classes', () => {
- expect(wrapper.classes('frequent-items-list-item-container')).toBe(true);
- });
-
- it.each`
- name | selector | expected
- ${'list item'} | ${findAllFrequentItems} | ${1}
- ${'avatar container'} | ${findAllAvatars} | ${1}
- ${'metadata container'} | ${findAllMetadataContainers} | ${1}
- ${'title'} | ${findAllTitles} | ${1}
- ${'namespace'} | ${findAllNamespace} | ${1}
- `('should render $expected $name', ({ selector, expected }) => {
- expect(selector()).toHaveLength(expected);
- });
-
- it('renders remove button within item when `isItemsListEditable` is true', async () => {
- await toggleItemsListEditablity();
-
- const removeButton = findRemoveButton();
- expect(removeButton.exists()).toBe(true);
- expect(removeButton.attributes('title')).toBe('Remove');
- expect(removeButton.findComponent(GlIcon).props('name')).toBe('close');
- });
-
- it('dispatches action `removeFrequentItem` when remove button is clicked', async () => {
- await toggleItemsListEditablity();
-
- jest.spyOn(store, 'dispatch');
-
- const removeButton = findRemoveButton();
- removeButton.vm.$emit(
- 'click',
- { stopPropagation: jest.fn(), preventDefault: jest.fn() },
- mockProject.id,
- );
-
- await nextTick();
-
- expect(store.dispatch).toHaveBeenCalledWith(
- `${TEST_VUEX_MODULE}/removeFrequentItem`,
- mockProject.id,
- );
- });
-
- it('tracks when item link is clicked', () => {
- const link = wrapper.findByTestId('frequent-item-link');
-
- link.vm.$emit('click');
-
- expect(trackingSpy).toHaveBeenCalledWith(undefined, 'click_link', {
- label: 'projects_dropdown_frequent_items_list_item',
- property: 'navigation_top',
- });
- });
- });
-});
diff --git a/spec/frontend/frequent_items/components/frequent_items_list_spec.js b/spec/frontend/frequent_items/components/frequent_items_list_spec.js
deleted file mode 100644
index 8055b7a9c13..00000000000
--- a/spec/frontend/frequent_items/components/frequent_items_list_spec.js
+++ /dev/null
@@ -1,121 +0,0 @@
-import Vue, { nextTick } from 'vue';
-// eslint-disable-next-line no-restricted-imports
-import Vuex from 'vuex';
-import { mountExtended } from 'helpers/vue_test_utils_helper';
-import frequentItemsListComponent from '~/frequent_items/components/frequent_items_list.vue';
-import frequentItemsListItemComponent from '~/frequent_items/components/frequent_items_list_item.vue';
-import { createStore } from '~/frequent_items/store';
-import { mockFrequentProjects } from '../mock_data';
-
-Vue.use(Vuex);
-
-describe('FrequentItemsListComponent', () => {
- let wrapper;
-
- const createComponent = (props = {}) => {
- wrapper = mountExtended(frequentItemsListComponent, {
- store: createStore(),
- propsData: {
- namespace: 'projects',
- items: mockFrequentProjects,
- isFetchFailed: false,
- isItemRemovalFailed: false,
- hasSearchQuery: false,
- matcher: 'lab',
- ...props,
- },
- provide: {
- vuexModule: 'frequentProjects',
- },
- });
- };
-
- describe('computed', () => {
- describe('isListEmpty', () => {
- it('should return `true` or `false` representing whether if `items` is empty or not with projects', async () => {
- createComponent({
- items: [],
- });
-
- expect(wrapper.vm.isListEmpty).toBe(true);
-
- wrapper.setProps({
- items: mockFrequentProjects,
- });
- await nextTick();
-
- expect(wrapper.vm.isListEmpty).toBe(false);
- });
- });
-
- describe('fetched item messages', () => {
- it('should show default empty list message', () => {
- createComponent({
- items: [],
- });
-
- expect(wrapper.findByTestId('frequent-items-list-empty').text()).toContain(
- 'Projects you visit often will appear here',
- );
- });
-
- it.each`
- isFetchFailed | isItemRemovalFailed
- ${true} | ${false}
- ${false} | ${true}
- `(
- 'should show failure message when `isFetchFailed` is $isFetchFailed or `isItemRemovalFailed` is $isItemRemovalFailed',
- ({ isFetchFailed, isItemRemovalFailed }) => {
- createComponent({
- items: [],
- isFetchFailed,
- isItemRemovalFailed,
- });
-
- expect(wrapper.findByTestId('frequent-items-list-empty').text()).toContain(
- 'This feature requires browser localStorage support',
- );
- },
- );
- });
-
- describe('searched item messages', () => {
- it('should return appropriate empty list message based on value of `searchFailed` prop with projects', async () => {
- createComponent({
- hasSearchQuery: true,
- isFetchFailed: true,
- });
-
- expect(wrapper.vm.listEmptyMessage).toBe('Something went wrong on our end.');
-
- wrapper.setProps({
- isFetchFailed: false,
- });
- await nextTick();
-
- expect(wrapper.vm.listEmptyMessage).toBe('Sorry, no projects matched your search');
- });
- });
- });
-
- describe('template', () => {
- it('should render component element with list of projects', async () => {
- createComponent();
-
- await nextTick();
- expect(wrapper.classes('frequent-items-list-container')).toBe(true);
- expect(wrapper.findAllByTestId('frequent-items-list')).toHaveLength(1);
- expect(wrapper.findAllComponents(frequentItemsListItemComponent)).toHaveLength(5);
- });
-
- it('should render component element with empty message', async () => {
- createComponent({
- items: [],
- });
-
- await nextTick();
- expect(wrapper.vm.$el.querySelectorAll('li.section-empty')).toHaveLength(1);
- expect(wrapper.findAllComponents(frequentItemsListItemComponent)).toHaveLength(0);
- });
- });
-});
diff --git a/spec/frontend/frequent_items/components/frequent_items_search_input_spec.js b/spec/frontend/frequent_items/components/frequent_items_search_input_spec.js
deleted file mode 100644
index d6aa0f4e221..00000000000
--- a/spec/frontend/frequent_items/components/frequent_items_search_input_spec.js
+++ /dev/null
@@ -1,74 +0,0 @@
-import { GlSearchBoxByType } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
-import Vue, { nextTick } from 'vue';
-// eslint-disable-next-line no-restricted-imports
-import Vuex from 'vuex';
-import { mockTracking, unmockTracking } from 'helpers/tracking_helper';
-import searchComponent from '~/frequent_items/components/frequent_items_search_input.vue';
-import { createStore } from '~/frequent_items/store';
-
-Vue.use(Vuex);
-
-describe('FrequentItemsSearchInputComponent', () => {
- let wrapper;
- let trackingSpy;
- let vm;
- let store;
-
- const createComponent = (namespace = 'projects') =>
- shallowMount(searchComponent, {
- store,
- propsData: { namespace },
- provide: {
- vuexModule: 'frequentProjects',
- },
- });
-
- const findSearchBoxByType = () => wrapper.findComponent(GlSearchBoxByType);
-
- beforeEach(() => {
- store = createStore();
- jest.spyOn(store, 'dispatch').mockImplementation(() => {});
-
- trackingSpy = mockTracking('_category_', document, jest.spyOn);
- trackingSpy.mockImplementation(() => {});
-
- wrapper = createComponent();
-
- ({ vm } = wrapper);
- });
-
- afterEach(() => {
- unmockTracking();
- vm.$destroy();
- });
-
- describe('template', () => {
- it('should render component element', () => {
- expect(wrapper.classes()).toContain('search-input-container');
- expect(findSearchBoxByType().exists()).toBe(true);
- expect(findSearchBoxByType().attributes()).toMatchObject({
- placeholder: 'Search your projects',
- });
- });
- });
-
- describe('tracking', () => {
- it('tracks when search query is entered', async () => {
- expect(trackingSpy).not.toHaveBeenCalled();
- expect(store.dispatch).not.toHaveBeenCalled();
-
- const value = 'my project';
-
- findSearchBoxByType().vm.$emit('input', value);
-
- await nextTick();
-
- expect(trackingSpy).toHaveBeenCalledWith(undefined, 'type_search_query', {
- label: 'projects_dropdown_frequent_items_search_input',
- property: 'navigation_top',
- });
- expect(store.dispatch).toHaveBeenCalledWith('frequentProjects/setSearchQuery', value);
- });
- });
-});
diff --git a/spec/frontend/frequent_items/mock_data.js b/spec/frontend/frequent_items/mock_data.js
deleted file mode 100644
index 6563daee6c3..00000000000
--- a/spec/frontend/frequent_items/mock_data.js
+++ /dev/null
@@ -1,169 +0,0 @@
-import { TEST_HOST } from 'helpers/test_constants';
-
-export const currentSession = {
- groups: {
- username: 'root',
- storageKey: 'root/frequent-groups',
- apiVersion: 'v4',
- group: {
- id: 1,
- name: 'dummy-group',
- full_name: 'dummy-parent-group',
- webUrl: `${TEST_HOST}/dummy-group`,
- avatarUrl: null,
- lastAccessedOn: Date.now(),
- },
- },
- projects: {
- username: 'root',
- storageKey: 'root/frequent-projects',
- apiVersion: 'v4',
- project: {
- id: 1,
- name: 'dummy-project',
- namespace: 'SampleGroup / Dummy-Project',
- webUrl: `${TEST_HOST}/samplegroup/dummy-project`,
- avatarUrl: null,
- lastAccessedOn: Date.now(),
- },
- },
-};
-
-export const mockNamespace = 'projects';
-export const mockStorageKey = 'test-user/frequent-projects';
-
-export const mockGroup = {
- id: 1,
- name: 'Sub451',
- namespace: 'Commit451 / Sub451',
- webUrl: `${TEST_HOST}/Commit451/Sub451`,
- avatarUrl: null,
-};
-
-export const mockRawGroup = {
- id: 1,
- name: 'Sub451',
- full_name: 'Commit451 / Sub451',
- web_url: `${TEST_HOST}/Commit451/Sub451`,
- avatar_url: null,
-};
-
-export const mockFrequentGroups = [
- {
- id: 3,
- name: 'Subgroup451',
- full_name: 'Commit451 / Subgroup451',
- webUrl: '/Commit451/Subgroup451',
- avatarUrl: null,
- frequency: 7,
- lastAccessedOn: 1497979281815,
- },
- {
- id: 1,
- name: 'Commit451',
- full_name: 'Commit451',
- webUrl: '/Commit451',
- avatarUrl: null,
- frequency: 3,
- lastAccessedOn: 1497979281815,
- },
-];
-
-export const mockSearchedGroups = { data: [mockRawGroup] };
-export const mockProcessedSearchedGroups = [mockGroup];
-
-export const mockProject = {
- id: 1,
- name: 'GitLab Community Edition',
- namespace: 'gitlab-org / gitlab-ce',
- webUrl: `${TEST_HOST}/gitlab-org/gitlab-foss`,
- avatarUrl: null,
-};
-
-export const mockRawProject = {
- id: 1,
- name: 'GitLab Community Edition',
- name_with_namespace: 'gitlab-org / gitlab-ce',
- web_url: `${TEST_HOST}/gitlab-org/gitlab-foss`,
- avatar_url: null,
-};
-
-export const mockFrequentProjects = [
- {
- id: 1,
- name: 'GitLab Community Edition',
- namespace: 'gitlab-org / gitlab-ce',
- webUrl: `${TEST_HOST}/gitlab-org/gitlab-foss`,
- avatarUrl: null,
- frequency: 1,
- lastAccessedOn: Date.now(),
- },
- {
- id: 2,
- name: 'GitLab CI',
- namespace: 'gitlab-org / gitlab-ci',
- webUrl: `${TEST_HOST}/gitlab-org/gitlab-ci`,
- avatarUrl: null,
- frequency: 9,
- lastAccessedOn: Date.now(),
- },
- {
- id: 3,
- name: 'Typeahead.Js',
- namespace: 'twitter / typeahead-js',
- webUrl: `${TEST_HOST}/twitter/typeahead-js`,
- avatarUrl: '/uploads/-/system/project/avatar/7/TWBS.png',
- frequency: 2,
- lastAccessedOn: Date.now(),
- },
- {
- id: 4,
- name: 'Intel',
- namespace: 'platform / hardware / bsp / intel',
- webUrl: `${TEST_HOST}/platform/hardware/bsp/intel`,
- avatarUrl: null,
- frequency: 3,
- lastAccessedOn: Date.now(),
- },
- {
- id: 5,
- name: 'v4.4',
- namespace: 'platform / hardware / bsp / kernel / common / v4.4',
- webUrl: `${TEST_HOST}/platform/hardware/bsp/kernel/common/v4.4`,
- avatarUrl: null,
- frequency: 8,
- lastAccessedOn: Date.now(),
- },
-];
-
-export const mockSearchedProjects = { data: [mockRawProject] };
-export const mockProcessedSearchedProjects = [mockProject];
-
-export const unsortedFrequentItems = [
- { id: 1, frequency: 12, lastAccessedOn: 1491400843391 },
- { id: 2, frequency: 14, lastAccessedOn: 1488240890738 },
- { id: 3, frequency: 44, lastAccessedOn: 1497675908472 },
- { id: 4, frequency: 8, lastAccessedOn: 1497979281815 },
- { id: 5, frequency: 34, lastAccessedOn: 1488089211943 },
- { id: 6, frequency: 14, lastAccessedOn: 1493517292488 },
- { id: 7, frequency: 42, lastAccessedOn: 1486815299875 },
- { id: 8, frequency: 33, lastAccessedOn: 1500762279114 },
- { id: 10, frequency: 46, lastAccessedOn: 1483251641543 },
-];
-
-/**
- * This const has a specific order which tests authenticity
- * of `getTopFrequentItems` method so
- * DO NOT change order of items in this const.
- */
-export const sortedFrequentItems = [
- { id: 10, frequency: 46, lastAccessedOn: 1483251641543 },
- { id: 3, frequency: 44, lastAccessedOn: 1497675908472 },
- { id: 7, frequency: 42, lastAccessedOn: 1486815299875 },
- { id: 5, frequency: 34, lastAccessedOn: 1488089211943 },
- { id: 8, frequency: 33, lastAccessedOn: 1500762279114 },
- { id: 6, frequency: 14, lastAccessedOn: 1493517292488 },
- { id: 2, frequency: 14, lastAccessedOn: 1488240890738 },
- { id: 1, frequency: 12, lastAccessedOn: 1491400843391 },
- { id: 4, frequency: 8, lastAccessedOn: 1497979281815 },
-];
diff --git a/spec/frontend/frequent_items/store/actions_spec.js b/spec/frontend/frequent_items/store/actions_spec.js
deleted file mode 100644
index 2feb488da2c..00000000000
--- a/spec/frontend/frequent_items/store/actions_spec.js
+++ /dev/null
@@ -1,304 +0,0 @@
-import MockAdapter from 'axios-mock-adapter';
-import testAction from 'helpers/vuex_action_helper';
-import * as actions from '~/frequent_items/store/actions';
-import * as types from '~/frequent_items/store/mutation_types';
-import state from '~/frequent_items/store/state';
-import AccessorUtilities from '~/lib/utils/accessor';
-import axios from '~/lib/utils/axios_utils';
-import { HTTP_STATUS_INTERNAL_SERVER_ERROR, HTTP_STATUS_OK } from '~/lib/utils/http_status';
-import { useLocalStorageSpy } from 'helpers/local_storage_helper';
-import {
- mockNamespace,
- mockStorageKey,
- mockFrequentProjects,
- mockSearchedProjects,
-} from '../mock_data';
-
-describe('Frequent Items Dropdown Store Actions', () => {
- useLocalStorageSpy();
- let mockedState;
- let mock;
-
- beforeEach(() => {
- mockedState = state();
- mock = new MockAdapter(axios);
-
- mockedState.namespace = mockNamespace;
- mockedState.storageKey = mockStorageKey;
- });
-
- afterEach(() => {
- mock.restore();
- });
-
- describe('setNamespace', () => {
- it('should set namespace', () => {
- return testAction(
- actions.setNamespace,
- mockNamespace,
- mockedState,
- [{ type: types.SET_NAMESPACE, payload: mockNamespace }],
- [],
- );
- });
- });
-
- describe('setStorageKey', () => {
- it('should set storage key', () => {
- return testAction(
- actions.setStorageKey,
- mockStorageKey,
- mockedState,
- [{ type: types.SET_STORAGE_KEY, payload: mockStorageKey }],
- [],
- );
- });
- });
-
- describe('toggleItemsListEditablity', () => {
- it('should toggle items list editablity', () => {
- return testAction(
- actions.toggleItemsListEditablity,
- null,
- mockedState,
- [{ type: types.TOGGLE_ITEMS_LIST_EDITABILITY }],
- [],
- );
- });
- });
-
- describe('requestFrequentItems', () => {
- it('should request frequent items', () => {
- return testAction(
- actions.requestFrequentItems,
- null,
- mockedState,
- [{ type: types.REQUEST_FREQUENT_ITEMS }],
- [],
- );
- });
- });
-
- describe('receiveFrequentItemsSuccess', () => {
- it('should set frequent items', () => {
- return testAction(
- actions.receiveFrequentItemsSuccess,
- mockFrequentProjects,
- mockedState,
- [{ type: types.RECEIVE_FREQUENT_ITEMS_SUCCESS, payload: mockFrequentProjects }],
- [],
- );
- });
- });
-
- describe('receiveFrequentItemsError', () => {
- it('should set frequent items error state', () => {
- return testAction(
- actions.receiveFrequentItemsError,
- null,
- mockedState,
- [{ type: types.RECEIVE_FREQUENT_ITEMS_ERROR }],
- [],
- );
- });
- });
-
- describe('fetchFrequentItems', () => {
- it('should dispatch `receiveFrequentItemsSuccess`', () => {
- mockedState.namespace = mockNamespace;
- mockedState.storageKey = mockStorageKey;
-
- return testAction(
- actions.fetchFrequentItems,
- null,
- mockedState,
- [],
- [{ type: 'requestFrequentItems' }, { type: 'receiveFrequentItemsSuccess', payload: [] }],
- );
- });
-
- it('should dispatch `receiveFrequentItemsError`', () => {
- jest.spyOn(AccessorUtilities, 'canUseLocalStorage').mockReturnValue(false);
- mockedState.namespace = mockNamespace;
- mockedState.storageKey = mockStorageKey;
-
- return testAction(
- actions.fetchFrequentItems,
- null,
- mockedState,
- [],
- [{ type: 'requestFrequentItems' }, { type: 'receiveFrequentItemsError' }],
- );
- });
- });
-
- describe('requestSearchedItems', () => {
- it('should request searched items', () => {
- return testAction(
- actions.requestSearchedItems,
- null,
- mockedState,
- [{ type: types.REQUEST_SEARCHED_ITEMS }],
- [],
- );
- });
- });
-
- describe('receiveSearchedItemsSuccess', () => {
- it('should set searched items', () => {
- return testAction(
- actions.receiveSearchedItemsSuccess,
- mockSearchedProjects,
- mockedState,
- [{ type: types.RECEIVE_SEARCHED_ITEMS_SUCCESS, payload: mockSearchedProjects }],
- [],
- );
- });
- });
-
- describe('receiveSearchedItemsError', () => {
- it('should set searched items error state', () => {
- return testAction(
- actions.receiveSearchedItemsError,
- null,
- mockedState,
- [{ type: types.RECEIVE_SEARCHED_ITEMS_ERROR }],
- [],
- );
- });
- });
-
- describe('fetchSearchedItems', () => {
- beforeEach(() => {
- gon.api_version = 'v4';
- });
-
- it('should dispatch `receiveSearchedItemsSuccess`', () => {
- mock
- .onGet(/\/api\/v4\/projects.json(.*)$/)
- .replyOnce(HTTP_STATUS_OK, mockSearchedProjects, {});
-
- return testAction(
- actions.fetchSearchedItems,
- null,
- mockedState,
- [],
- [
- { type: 'requestSearchedItems' },
- {
- type: 'receiveSearchedItemsSuccess',
- payload: { data: mockSearchedProjects, headers: {} },
- },
- ],
- );
- });
-
- it('should dispatch `receiveSearchedItemsError`', () => {
- gon.api_version = 'v4';
- mock.onGet(/\/api\/v4\/projects.json(.*)$/).replyOnce(HTTP_STATUS_INTERNAL_SERVER_ERROR);
-
- return testAction(
- actions.fetchSearchedItems,
- null,
- mockedState,
- [],
- [{ type: 'requestSearchedItems' }, { type: 'receiveSearchedItemsError' }],
- );
- });
- });
-
- describe('setSearchQuery', () => {
- it('should commit query and dispatch `fetchSearchedItems` when query is present', () => {
- return testAction(
- actions.setSearchQuery,
- { query: 'test' },
- mockedState,
- [{ type: types.SET_SEARCH_QUERY, payload: { query: 'test' } }],
- [{ type: 'fetchSearchedItems', payload: { query: 'test' } }],
- );
- });
-
- it('should commit query and dispatch `fetchFrequentItems` when query is empty', () => {
- return testAction(
- actions.setSearchQuery,
- null,
- mockedState,
- [{ type: types.SET_SEARCH_QUERY, payload: null }],
- [{ type: 'fetchFrequentItems' }],
- );
- });
- });
-
- describe('removeFrequentItemSuccess', () => {
- it('should remove frequent item on success', () => {
- return testAction(
- actions.removeFrequentItemSuccess,
- { itemId: 1 },
- mockedState,
- [
- {
- type: types.RECEIVE_REMOVE_FREQUENT_ITEM_SUCCESS,
- payload: { itemId: 1 },
- },
- ],
- [],
- );
- });
- });
-
- describe('removeFrequentItemError', () => {
- it('should should not remove frequent item on failure', () => {
- return testAction(
- actions.removeFrequentItemError,
- null,
- mockedState,
- [{ type: types.RECEIVE_REMOVE_FREQUENT_ITEM_ERROR }],
- [],
- );
- });
- });
-
- describe('removeFrequentItem', () => {
- beforeEach(() => {
- mockedState.items = [...mockFrequentProjects];
- window.localStorage.setItem(mockStorageKey, JSON.stringify(mockFrequentProjects));
- });
-
- it('should remove provided itemId from localStorage', () => {
- jest.spyOn(AccessorUtilities, 'canUseLocalStorage').mockReturnValue(true);
-
- actions.removeFrequentItem(
- { commit: jest.fn(), dispatch: jest.fn(), state: mockedState },
- mockFrequentProjects[0].id,
- );
-
- expect(window.localStorage.getItem(mockStorageKey)).toBe(
- JSON.stringify(mockFrequentProjects.slice(1)), // First item was removed
- );
- });
-
- it('should dispatch `removeFrequentItemSuccess` on localStorage update success', () => {
- jest.spyOn(AccessorUtilities, 'canUseLocalStorage').mockReturnValue(true);
-
- return testAction(
- actions.removeFrequentItem,
- mockFrequentProjects[0].id,
- mockedState,
- [],
- [{ type: 'removeFrequentItemSuccess', payload: mockFrequentProjects[0].id }],
- );
- });
-
- it('should dispatch `removeFrequentItemError` on localStorage update failure', () => {
- jest.spyOn(AccessorUtilities, 'canUseLocalStorage').mockReturnValue(false);
-
- return testAction(
- actions.removeFrequentItem,
- mockFrequentProjects[0].id,
- mockedState,
- [],
- [{ type: 'removeFrequentItemError' }],
- );
- });
- });
-});
diff --git a/spec/frontend/frequent_items/store/getters_spec.js b/spec/frontend/frequent_items/store/getters_spec.js
deleted file mode 100644
index 97732cd95fc..00000000000
--- a/spec/frontend/frequent_items/store/getters_spec.js
+++ /dev/null
@@ -1,24 +0,0 @@
-import * as getters from '~/frequent_items/store/getters';
-import state from '~/frequent_items/store/state';
-
-describe('Frequent Items Dropdown Store Getters', () => {
- let mockedState;
-
- beforeEach(() => {
- mockedState = state();
- });
-
- describe('hasSearchQuery', () => {
- it('should return `true` when search query is present', () => {
- mockedState.searchQuery = 'test';
-
- expect(getters.hasSearchQuery(mockedState)).toBe(true);
- });
-
- it('should return `false` when search query is empty', () => {
- mockedState.searchQuery = '';
-
- expect(getters.hasSearchQuery(mockedState)).toBe(false);
- });
- });
-});
diff --git a/spec/frontend/frequent_items/store/mutations_spec.js b/spec/frontend/frequent_items/store/mutations_spec.js
deleted file mode 100644
index 1e1878c3377..00000000000
--- a/spec/frontend/frequent_items/store/mutations_spec.js
+++ /dev/null
@@ -1,152 +0,0 @@
-import * as types from '~/frequent_items/store/mutation_types';
-import mutations from '~/frequent_items/store/mutations';
-import state from '~/frequent_items/store/state';
-import {
- mockNamespace,
- mockStorageKey,
- mockFrequentProjects,
- mockSearchedProjects,
- mockProcessedSearchedProjects,
- mockSearchedGroups,
- mockProcessedSearchedGroups,
-} from '../mock_data';
-
-describe('Frequent Items dropdown mutations', () => {
- let stateCopy;
-
- beforeEach(() => {
- stateCopy = state();
- });
-
- describe('SET_NAMESPACE', () => {
- it('should set namespace', () => {
- mutations[types.SET_NAMESPACE](stateCopy, mockNamespace);
-
- expect(stateCopy.namespace).toEqual(mockNamespace);
- });
- });
-
- describe('SET_STORAGE_KEY', () => {
- it('should set storage key', () => {
- mutations[types.SET_STORAGE_KEY](stateCopy, mockStorageKey);
-
- expect(stateCopy.storageKey).toEqual(mockStorageKey);
- });
- });
-
- describe('SET_SEARCH_QUERY', () => {
- it('should set search query', () => {
- const searchQuery = 'gitlab-ce';
-
- mutations[types.SET_SEARCH_QUERY](stateCopy, searchQuery);
-
- expect(stateCopy.searchQuery).toEqual(searchQuery);
- });
- });
-
- describe('TOGGLE_ITEMS_LIST_EDITABILITY', () => {
- it('should toggle items list editablity', () => {
- mutations[types.TOGGLE_ITEMS_LIST_EDITABILITY](stateCopy);
-
- expect(stateCopy.isItemsListEditable).toEqual(true);
-
- mutations[types.TOGGLE_ITEMS_LIST_EDITABILITY](stateCopy);
-
- expect(stateCopy.isItemsListEditable).toEqual(false);
- });
- });
-
- describe('REQUEST_FREQUENT_ITEMS', () => {
- it('should set view states when requesting frequent items', () => {
- mutations[types.REQUEST_FREQUENT_ITEMS](stateCopy);
-
- expect(stateCopy.isLoadingItems).toEqual(true);
- expect(stateCopy.hasSearchQuery).toEqual(false);
- });
- });
-
- describe('RECEIVE_FREQUENT_ITEMS_SUCCESS', () => {
- it('should set view states when receiving frequent items', () => {
- mutations[types.RECEIVE_FREQUENT_ITEMS_SUCCESS](stateCopy, mockFrequentProjects);
-
- expect(stateCopy.items).toEqual(mockFrequentProjects);
- expect(stateCopy.isLoadingItems).toEqual(false);
- expect(stateCopy.hasSearchQuery).toEqual(false);
- expect(stateCopy.isFetchFailed).toEqual(false);
- });
- });
-
- describe('RECEIVE_FREQUENT_ITEMS_ERROR', () => {
- it('should set items and view states when error occurs retrieving frequent items', () => {
- mutations[types.RECEIVE_FREQUENT_ITEMS_ERROR](stateCopy);
-
- expect(stateCopy.items).toEqual([]);
- expect(stateCopy.isLoadingItems).toEqual(false);
- expect(stateCopy.hasSearchQuery).toEqual(false);
- expect(stateCopy.isFetchFailed).toEqual(true);
- });
- });
-
- describe('REQUEST_SEARCHED_ITEMS', () => {
- it('should set view states when requesting searched items', () => {
- mutations[types.REQUEST_SEARCHED_ITEMS](stateCopy);
-
- expect(stateCopy.isLoadingItems).toEqual(true);
- expect(stateCopy.hasSearchQuery).toEqual(true);
- });
- });
-
- describe('RECEIVE_SEARCHED_ITEMS_SUCCESS', () => {
- it('should set items and view states when receiving searched items', () => {
- mutations[types.RECEIVE_SEARCHED_ITEMS_SUCCESS](stateCopy, mockSearchedProjects);
-
- expect(stateCopy.items).toEqual(mockProcessedSearchedProjects);
- expect(stateCopy.isLoadingItems).toEqual(false);
- expect(stateCopy.hasSearchQuery).toEqual(true);
- expect(stateCopy.isFetchFailed).toEqual(false);
- });
-
- it('should also handle the different `full_name` key for namespace in groups payload', () => {
- mutations[types.RECEIVE_SEARCHED_ITEMS_SUCCESS](stateCopy, mockSearchedGroups);
-
- expect(stateCopy.items).toEqual(mockProcessedSearchedGroups);
- expect(stateCopy.isLoadingItems).toEqual(false);
- expect(stateCopy.hasSearchQuery).toEqual(true);
- expect(stateCopy.isFetchFailed).toEqual(false);
- });
- });
-
- describe('RECEIVE_SEARCHED_ITEMS_ERROR', () => {
- it('should set view states when error occurs retrieving searched items', () => {
- mutations[types.RECEIVE_SEARCHED_ITEMS_ERROR](stateCopy);
-
- expect(stateCopy.items).toEqual([]);
- expect(stateCopy.isLoadingItems).toEqual(false);
- expect(stateCopy.hasSearchQuery).toEqual(true);
- expect(stateCopy.isFetchFailed).toEqual(true);
- });
- });
-
- describe('RECEIVE_REMOVE_FREQUENT_ITEM_SUCCESS', () => {
- it('should remove item with provided itemId from the items', () => {
- stateCopy.isItemRemovalFailed = true;
- stateCopy.items = mockFrequentProjects;
-
- mutations[types.RECEIVE_REMOVE_FREQUENT_ITEM_SUCCESS](stateCopy, mockFrequentProjects[0].id);
-
- expect(stateCopy.items).toHaveLength(mockFrequentProjects.length - 1);
- expect(stateCopy.items).toEqual([...mockFrequentProjects.slice(1)]);
- expect(stateCopy.isItemRemovalFailed).toBe(false);
- });
- });
-
- describe('RECEIVE_REMOVE_FREQUENT_ITEM_ERROR', () => {
- it('should remove item with provided itemId from the items', () => {
- stateCopy.isItemRemovalFailed = false;
-
- mutations[types.RECEIVE_REMOVE_FREQUENT_ITEM_ERROR](stateCopy);
-
- expect(stateCopy.isItemRemovalFailed).toBe(true);
- });
- });
-});
diff --git a/spec/frontend/frequent_items/utils_spec.js b/spec/frontend/frequent_items/utils_spec.js
deleted file mode 100644
index 8d4c89bd48f..00000000000
--- a/spec/frontend/frequent_items/utils_spec.js
+++ /dev/null
@@ -1,131 +0,0 @@
-import { GlBreakpointInstance as bp } from '@gitlab/ui/dist/utils';
-import { FIFTEEN_MINUTES_IN_MS, FREQUENT_ITEMS } from '~/frequent_items/constants';
-import {
- isMobile,
- getTopFrequentItems,
- updateExistingFrequentItem,
- sanitizeItem,
-} from '~/frequent_items/utils';
-import { mockProject, unsortedFrequentItems, sortedFrequentItems } from './mock_data';
-
-describe('Frequent Items utils spec', () => {
- describe('isMobile', () => {
- it('returns true when the screen is medium', () => {
- jest.spyOn(bp, 'getBreakpointSize').mockReturnValue('md');
-
- expect(isMobile()).toBe(true);
- });
-
- it('returns true when the screen is small', () => {
- jest.spyOn(bp, 'getBreakpointSize').mockReturnValue('sm');
-
- expect(isMobile()).toBe(true);
- });
-
- it('returns true when the screen is extra-small', () => {
- jest.spyOn(bp, 'getBreakpointSize').mockReturnValue('xs');
-
- expect(isMobile()).toBe(true);
- });
-
- it('returns false when the screen is larger than medium', () => {
- jest.spyOn(bp, 'getBreakpointSize').mockReturnValue('lg');
-
- expect(isMobile()).toBe(false);
- });
- });
-
- describe('getTopFrequentItems', () => {
- it('returns empty array if no items provided', () => {
- const result = getTopFrequentItems();
-
- expect(result.length).toBe(0);
- });
-
- it('returns correct amount of items for mobile', () => {
- jest.spyOn(bp, 'getBreakpointSize').mockReturnValue('md');
- const result = getTopFrequentItems(unsortedFrequentItems);
-
- expect(result.length).toBe(FREQUENT_ITEMS.LIST_COUNT_MOBILE);
- });
-
- it('returns correct amount of items for desktop', () => {
- jest.spyOn(bp, 'getBreakpointSize').mockReturnValue('xl');
- const result = getTopFrequentItems(unsortedFrequentItems);
-
- expect(result.length).toBe(FREQUENT_ITEMS.LIST_COUNT_DESKTOP);
- });
-
- it('sorts frequent items in order of frequency and lastAccessedOn', () => {
- jest.spyOn(bp, 'getBreakpointSize').mockReturnValue('xl');
- const result = getTopFrequentItems(unsortedFrequentItems);
- const expectedResult = sortedFrequentItems.slice(0, FREQUENT_ITEMS.LIST_COUNT_DESKTOP);
-
- expect(result).toEqual(expectedResult);
- });
- });
-
- describe('updateExistingFrequentItem', () => {
- const LAST_ACCESSED = 1497979281815;
- const WITHIN_FIFTEEN_MINUTES = LAST_ACCESSED + FIFTEEN_MINUTES_IN_MS;
- const OVER_FIFTEEN_MINUTES = WITHIN_FIFTEEN_MINUTES + 1;
- const EXISTING_ITEM = Object.freeze({
- ...mockProject,
- frequency: 1,
- lastAccessedOn: 1497979281815,
- });
-
- it.each`
- desc | existingProps | newProps | expected
- ${'updates item if accessed over 15 minutes ago'} | ${{}} | ${{ lastAccessedOn: OVER_FIFTEEN_MINUTES }} | ${{ lastAccessedOn: Date.now(), frequency: 2 }}
- ${'does not update is accessed with 15 minutes'} | ${{}} | ${{ lastAccessedOn: WITHIN_FIFTEEN_MINUTES }} | ${{ lastAccessedOn: EXISTING_ITEM.lastAccessedOn, frequency: 1 }}
- ${'updates if lastAccessedOn not found'} | ${{ lastAccessedOn: undefined }} | ${{ lastAccessedOn: WITHIN_FIFTEEN_MINUTES }} | ${{ lastAccessedOn: Date.now(), frequency: 2 }}
- `('$desc', ({ existingProps, newProps, expected }) => {
- const newItem = {
- ...EXISTING_ITEM,
- ...newProps,
- };
- const existingItem = {
- ...EXISTING_ITEM,
- ...existingProps,
- };
-
- const result = updateExistingFrequentItem(existingItem, newItem);
-
- expect(result).toEqual({
- ...newItem,
- ...expected,
- });
- });
- });
-
- describe('sanitizeItem', () => {
- it('strips HTML tags for name and namespace', () => {
- const input = {
- name: '<br><b>test</b>',
- namespace: '<br>test',
- id: 1,
- };
-
- expect(sanitizeItem(input)).toEqual({ name: 'test', namespace: 'test', id: 1 });
- });
-
- it("skips `name` key if it doesn't exist on the item", () => {
- const input = {
- namespace: '<br>test',
- id: 1,
- };
-
- expect(sanitizeItem(input)).toEqual({ namespace: 'test', id: 1 });
- });
-
- it("skips `namespace` key if it doesn't exist on the item", () => {
- const input = {
- name: '<br><b>test</b>',
- id: 1,
- };
-
- expect(sanitizeItem(input)).toEqual({ name: 'test', id: 1 });
- });
- });
-});
diff --git a/spec/frontend/gfm_auto_complete_spec.js b/spec/frontend/gfm_auto_complete_spec.js
index da465552db3..2d7841771a1 100644
--- a/spec/frontend/gfm_auto_complete_spec.js
+++ b/spec/frontend/gfm_auto_complete_spec.js
@@ -45,6 +45,16 @@ describe('GfmAutoComplete', () => {
let sorterValue;
let filterValue;
+ const triggerDropdown = ($textarea, text) => {
+ $textarea
+ .trigger('focus')
+ .val($textarea.val() + text)
+ .caret('pos', -1);
+ $textarea.trigger('keyup');
+
+ jest.runOnlyPendingTimers();
+ };
+
describe('DefaultOptions.filter', () => {
let items;
@@ -537,7 +547,7 @@ describe('GfmAutoComplete', () => {
expect(membersBeforeSave([{ ...mockGroup, avatar_url: null }])).toEqual([
{
username: 'my-group',
- avatarTag: '<div class="avatar rect-avatar center avatar-inline s26">M</div>',
+ avatarTag: '<div class="avatar rect-avatar avatar-inline s24 gl-mr-2">M</div>',
title: 'My Group (2)',
search: 'MyGroup my-group',
icon: '',
@@ -550,7 +560,7 @@ describe('GfmAutoComplete', () => {
{
username: 'my-group',
avatarTag:
- '<img src="./group.jpg" alt="my-group" class="avatar rect-avatar avatar-inline center s26"/>',
+ '<img src="./group.jpg" alt="my-group" class="avatar rect-avatar avatar-inline s24 gl-mr-2"/>',
title: 'My Group (2)',
search: 'MyGroup my-group',
icon: '',
@@ -563,7 +573,7 @@ describe('GfmAutoComplete', () => {
{
username: 'my-group',
avatarTag:
- '<img src="./group.jpg" alt="my-group" class="avatar rect-avatar avatar-inline center s26"/>',
+ '<img src="./group.jpg" alt="my-group" class="avatar rect-avatar avatar-inline s24 gl-mr-2"/>',
title: 'My Group',
search: 'MyGroup my-group',
icon:
@@ -581,7 +591,7 @@ describe('GfmAutoComplete', () => {
{
username: 'my-user',
avatarTag:
- '<img src="./users.jpg" alt="my-user" class="avatar avatar-inline center s26"/>',
+ '<img src="./users.jpg" alt="my-user" class="avatar avatar-inline s24 gl-mr-2"/>',
title: 'My User',
search: 'MyUser my-user',
icon: '',
@@ -786,13 +796,6 @@ describe('GfmAutoComplete', () => {
resetHTMLFixture();
});
- const triggerDropdown = (text) => {
- $textarea.trigger('focus').val(text).caret('pos', -1);
- $textarea.trigger('keyup');
-
- jest.runOnlyPendingTimers();
- };
-
const getDropdownItems = () => {
const dropdown = document.getElementById('at-view-labels');
const items = dropdown.getElementsByTagName('li');
@@ -800,7 +803,7 @@ describe('GfmAutoComplete', () => {
};
const expectLabels = ({ input, output }) => {
- triggerDropdown(input);
+ triggerDropdown($textarea, input);
expect(getDropdownItems()).toEqual(output.map((label) => label.title));
};
@@ -860,6 +863,50 @@ describe('GfmAutoComplete', () => {
});
});
+ describe('submit_review', () => {
+ let autocomplete;
+ let $textarea;
+
+ const getDropdownItems = () => {
+ const dropdown = document.getElementById('at-view-submit_review');
+
+ return dropdown.getElementsByTagName('li');
+ };
+
+ beforeEach(() => {
+ jest
+ .spyOn(AjaxCache, 'retrieve')
+ .mockReturnValue(Promise.resolve([{ name: 'submit_review' }]));
+
+ window.gon = { features: { mrRequestChanges: true } };
+
+ setHTMLFixture('<textarea data-supports-quick-actions="true"></textarea>');
+ autocomplete = new GfmAutoComplete({
+ commands: `${TEST_HOST}/autocomplete_sources/commands`,
+ });
+ $textarea = $('textarea');
+ autocomplete.setup($textarea, {});
+ });
+
+ afterEach(() => {
+ autocomplete.destroy();
+ resetHTMLFixture();
+ });
+
+ it('renders submit review options', async () => {
+ triggerDropdown($textarea, '/');
+
+ await waitForPromises();
+
+ triggerDropdown($textarea, 'submit_review ');
+
+ expect(getDropdownItems()).toHaveLength(3);
+ expect(getDropdownItems()[0].textContent).toContain('Comment');
+ expect(getDropdownItems()[1].textContent).toContain('Approve');
+ expect(getDropdownItems()[2].textContent).toContain('Request changes');
+ });
+ });
+
describe('emoji', () => {
const mockItem = {
'atwho-at': ':',
@@ -951,13 +998,6 @@ describe('GfmAutoComplete', () => {
resetHTMLFixture();
});
- const triggerDropdown = (text) => {
- $textarea.trigger('focus').val(text).caret('pos', -1);
- $textarea.trigger('keyup');
-
- jest.runOnlyPendingTimers();
- };
-
const getDropdownItems = () => {
const dropdown = document.getElementById('at-view-contacts');
const items = dropdown.getElementsByTagName('li');
@@ -965,7 +1005,7 @@ describe('GfmAutoComplete', () => {
};
const expectContacts = ({ input, output }) => {
- triggerDropdown(input);
+ triggerDropdown($textarea, input);
expect(getDropdownItems()).toEqual(
output.map((contact) => `${contact.first_name} ${contact.last_name} ${contact.email}`),
diff --git a/spec/frontend/groups/components/app_spec.js b/spec/frontend/groups/components/app_spec.js
index e32c50db8bf..8ac410c87b1 100644
--- a/spec/frontend/groups/components/app_spec.js
+++ b/spec/frontend/groups/components/app_spec.js
@@ -1,12 +1,10 @@
import { GlModal, GlLoadingIcon } from '@gitlab/ui';
import AxiosMockAdapter from 'axios-mock-adapter';
-import Vue, { nextTick } from 'vue';
+import { nextTick } from 'vue';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
import { createAlert } from '~/alert';
import appComponent from '~/groups/components/app.vue';
-import groupFolderComponent from '~/groups/components/group_folder.vue';
-import groupItemComponent from 'jh_else_ce/groups/components/group_item.vue';
import eventHub from '~/groups/event_hub';
import GroupsService from '~/groups/service/groups_service';
import GroupsStore from '~/groups/store/groups_store';
@@ -67,8 +65,6 @@ describe('AppComponent', () => {
beforeEach(async () => {
mock = new AxiosMockAdapter(axios);
mock.onGet('/dashboard/groups.json').reply(HTTP_STATUS_OK, mockGroups);
- Vue.component('GroupFolder', groupFolderComponent);
- Vue.component('GroupItem', groupItemComponent);
setWindowLocation('?filter=foobar');
document.body.innerHTML = `
diff --git a/spec/frontend/groups/components/groups_spec.js b/spec/frontend/groups/components/groups_spec.js
index 3cdbd3e38be..33fd2681766 100644
--- a/spec/frontend/groups/components/groups_spec.js
+++ b/spec/frontend/groups/components/groups_spec.js
@@ -1,9 +1,7 @@
import Vue from 'vue';
import { GlEmptyState } from '@gitlab/ui';
-
-import { mountExtended } from 'helpers/vue_test_utils_helper';
+import { shallowMount } from '@vue/test-utils';
import GroupFolderComponent from '~/groups/components/group_folder.vue';
-import GroupItemComponent from 'jh_else_ce/groups/components/group_item.vue';
import PaginationLinks from '~/vue_shared/components/pagination_links.vue';
import GroupsComponent from '~/groups/components/groups.vue';
import eventHub from '~/groups/event_hub';
@@ -19,7 +17,7 @@ describe('GroupsComponent', () => {
};
const createComponent = ({ propsData } = {}) => {
- wrapper = mountExtended(GroupsComponent, {
+ wrapper = shallowMount(GroupsComponent, {
propsData: {
...defaultPropsData,
...propsData,
@@ -32,11 +30,6 @@ describe('GroupsComponent', () => {
const findPaginationLinks = () => wrapper.findComponent(PaginationLinks);
- beforeEach(() => {
- Vue.component('GroupFolder', GroupFolderComponent);
- Vue.component('GroupItem', GroupItemComponent);
- });
-
describe('methods', () => {
describe('change', () => {
it('should emit `fetchPage` event when page is changed via pagination', () => {
@@ -57,6 +50,8 @@ describe('GroupsComponent', () => {
});
describe('template', () => {
+ Vue.component('GroupFolder', GroupFolderComponent);
+
it('should render component template correctly', () => {
createComponent();
diff --git a/spec/frontend/groups/service/archived_projects_service_spec.js b/spec/frontend/groups/service/archived_projects_service_spec.js
index 6bc46e4799c..988fb5553ba 100644
--- a/spec/frontend/groups/service/archived_projects_service_spec.js
+++ b/spec/frontend/groups/service/archived_projects_service_spec.js
@@ -30,7 +30,7 @@ describe('ArchivedProjectsService', () => {
markdown_description: project.description_html,
visibility: project.visibility,
avatar_url: project.avatar_url,
- relative_path: `/${project.path_with_namespace}`,
+ relative_path: `${gon.relative_url_root}/${project.path_with_namespace}`,
edit_path: null,
leave_path: null,
can_edit: false,
diff --git a/spec/frontend/groups_projects/components/more_actions_dropdown_spec.js b/spec/frontend/groups_projects/components/more_actions_dropdown_spec.js
new file mode 100644
index 00000000000..1bcff8a44be
--- /dev/null
+++ b/spec/frontend/groups_projects/components/more_actions_dropdown_spec.js
@@ -0,0 +1,173 @@
+import { GlDisclosureDropdownItem, GlDisclosureDropdown } from '@gitlab/ui';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import moreActionsDropdown from '~/groups_projects/components/more_actions_dropdown.vue';
+
+describe('moreActionsDropdown', () => {
+ let wrapper;
+
+ const createComponent = ({ provideData = {}, propsData = {} } = {}) => {
+ wrapper = shallowMountExtended(moreActionsDropdown, {
+ provide: {
+ isGroup: false,
+ id: 1,
+ leavePath: '',
+ leaveConfirmMessage: '',
+ withdrawPath: '',
+ withdrawConfirmMessage: '',
+ requestAccessPath: '',
+ ...provideData,
+ },
+ propsData,
+ stubs: {
+ GlDisclosureDropdownItem,
+ },
+ });
+ };
+
+ const findDropdown = () => wrapper.findComponent(GlDisclosureDropdown);
+ const showDropdown = () => {
+ findDropdown().vm.$emit('show');
+ };
+
+ describe('copy id', () => {
+ describe('project namespace type', () => {
+ beforeEach(async () => {
+ createComponent({
+ provideData: {
+ id: 22,
+ },
+ });
+ await showDropdown();
+ });
+
+ it('has correct test id `copy-project-id`', () => {
+ expect(wrapper.findByTestId('copy-project-id').exists()).toBe(true);
+ expect(wrapper.findByTestId('copy-group-id').exists()).toBe(false);
+ });
+
+ it('renders copy project id with correct id', () => {
+ expect(wrapper.findByTestId('copy-project-id').text()).toBe('Copy project ID: 22');
+ });
+ });
+
+ describe('group namespace type', () => {
+ beforeEach(async () => {
+ createComponent({
+ provideData: {
+ isGroup: true,
+ id: 11,
+ },
+ });
+ await showDropdown();
+ });
+
+ it('has correct test id `copy-group-id`', () => {
+ expect(wrapper.findByTestId('copy-project-id').exists()).toBe(false);
+ expect(wrapper.findByTestId('copy-group-id').exists()).toBe(true);
+ });
+
+ it('renders copy group id with correct id', () => {
+ expect(wrapper.findByTestId('copy-group-id').text()).toBe('Copy group ID: 11');
+ });
+ });
+ });
+
+ describe('request access', () => {
+ it('does not render request access link', async () => {
+ createComponent();
+ await showDropdown();
+
+ expect(wrapper.findByTestId('request-access-link').exists()).toBe(false);
+ });
+
+ it('renders request access link', async () => {
+ createComponent({
+ provideData: {
+ requestAccessPath: 'http://request.path/path',
+ },
+ });
+ await showDropdown();
+
+ expect(wrapper.findByTestId('request-access-link').text()).toBe('Request Access');
+ expect(wrapper.findByTestId('request-access-link').attributes('href')).toBe(
+ 'http://request.path/path',
+ );
+ });
+ });
+
+ describe('withdraw access', () => {
+ it('does not render withdraw access link', async () => {
+ createComponent();
+ await showDropdown();
+
+ expect(wrapper.findByTestId('withdraw-access-link').exists()).toBe(false);
+ });
+
+ it('renders withdraw access link', async () => {
+ createComponent({
+ provideData: {
+ withdrawPath: 'http://withdraw.path/path',
+ },
+ });
+ await showDropdown();
+
+ expect(wrapper.findByTestId('withdraw-access-link').text()).toBe('Withdraw Access Request');
+ expect(wrapper.findByTestId('withdraw-access-link').attributes('href')).toBe(
+ 'http://withdraw.path/path',
+ );
+ });
+ });
+
+ describe('leave access', () => {
+ it('does not render leave link', async () => {
+ createComponent();
+ await showDropdown();
+
+ expect(wrapper.findByTestId('leave-project-link').exists()).toBe(false);
+ });
+
+ it('renders leave link', async () => {
+ createComponent({
+ provideData: {
+ leavePath: 'http://leave.path/path',
+ },
+ });
+ await showDropdown();
+
+ expect(wrapper.findByTestId('leave-project-link').exists()).toBe(true);
+ expect(wrapper.findByTestId('leave-project-link').text()).toBe('Leave project');
+ expect(wrapper.findByTestId('leave-project-link').attributes('href')).toBe(
+ 'http://leave.path/path',
+ );
+ });
+
+ describe('when `isGroup` is set to `false`', () => {
+ it('use testid `leave-project-link`', async () => {
+ createComponent({
+ provideData: {
+ leavePath: 'http://leave.path/path',
+ },
+ });
+ await showDropdown();
+
+ expect(wrapper.findByTestId('leave-project-link').exists()).toBe(true);
+ expect(wrapper.findByTestId('leave-group-link').exists()).toBe(false);
+ });
+ });
+
+ describe('when `isGroup` is set to `true`', () => {
+ it('use testid `leave-group-link`', async () => {
+ createComponent({
+ provideData: {
+ isGroup: true,
+ leavePath: 'http://leave.path/path',
+ },
+ });
+ await showDropdown();
+
+ expect(wrapper.findByTestId('leave-project-link').exists()).toBe(false);
+ expect(wrapper.findByTestId('leave-group-link').exists()).toBe(true);
+ });
+ });
+ });
+});
diff --git a/spec/frontend/header_search/components/app_spec.js b/spec/frontend/header_search/components/app_spec.js
deleted file mode 100644
index 0d0b6628bdf..00000000000
--- a/spec/frontend/header_search/components/app_spec.js
+++ /dev/null
@@ -1,517 +0,0 @@
-import { GlSearchBoxByType, GlToken, GlIcon } from '@gitlab/ui';
-import Vue, { nextTick } from 'vue';
-// eslint-disable-next-line no-restricted-imports
-import Vuex from 'vuex';
-import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
-import { mockTracking } from 'helpers/tracking_helper';
-import { s__, sprintf } from '~/locale';
-import HeaderSearchApp from '~/header_search/components/app.vue';
-import HeaderSearchAutocompleteItems from '~/header_search/components/header_search_autocomplete_items.vue';
-import HeaderSearchDefaultItems from '~/header_search/components/header_search_default_items.vue';
-import HeaderSearchScopedItems from '~/header_search/components/header_search_scoped_items.vue';
-import {
- SEARCH_INPUT_DESCRIPTION,
- SEARCH_RESULTS_DESCRIPTION,
- SEARCH_BOX_INDEX,
- ICON_PROJECT,
- ICON_GROUP,
- ICON_SUBGROUP,
- SCOPE_TOKEN_MAX_LENGTH,
- IS_SEARCHING,
- IS_NOT_FOCUSED,
- IS_FOCUSED,
- SEARCH_SHORTCUTS_MIN_CHARACTERS,
- DROPDOWN_CLOSE_TIMEOUT,
-} from '~/header_search/constants';
-import DropdownKeyboardNavigation from '~/vue_shared/components/dropdown_keyboard_navigation.vue';
-import { ENTER_KEY } from '~/lib/utils/keys';
-import { visitUrl } from '~/lib/utils/url_utility';
-import { truncate } from '~/lib/utils/text_utility';
-import {
- MOCK_SEARCH,
- MOCK_SEARCH_QUERY,
- MOCK_USERNAME,
- MOCK_DEFAULT_SEARCH_OPTIONS,
- MOCK_SCOPED_SEARCH_OPTIONS,
- MOCK_SEARCH_CONTEXT_FULL,
-} from '../mock_data';
-
-Vue.use(Vuex);
-
-jest.mock('~/lib/utils/url_utility', () => ({
- visitUrl: jest.fn(),
-}));
-
-describe('HeaderSearchApp', () => {
- let wrapper;
-
- jest.useFakeTimers();
- jest.spyOn(global, 'setTimeout');
-
- const actionSpies = {
- setSearch: jest.fn(),
- fetchAutocompleteOptions: jest.fn(),
- clearAutocomplete: jest.fn(),
- };
-
- const createComponent = (initialState, mockGetters) => {
- const store = new Vuex.Store({
- state: {
- ...initialState,
- },
- actions: actionSpies,
- getters: {
- searchQuery: () => MOCK_SEARCH_QUERY,
- searchOptions: () => MOCK_DEFAULT_SEARCH_OPTIONS,
- ...mockGetters,
- },
- });
-
- wrapper = shallowMountExtended(HeaderSearchApp, {
- store,
- });
- };
-
- const formatScopeName = (scopeName) => {
- if (!scopeName) {
- return false;
- }
- const searchResultsScope = s__('GlobalSearch|in %{scope}');
- return truncate(
- sprintf(searchResultsScope, {
- scope: scopeName,
- }),
- SCOPE_TOKEN_MAX_LENGTH,
- );
- };
-
- const findHeaderSearchForm = () => wrapper.findByTestId('header-search-form');
- const findHeaderSearchInput = () => wrapper.findComponent(GlSearchBoxByType);
- const findScopeToken = () => wrapper.findComponent(GlToken);
- const findHeaderSearchInputKBD = () => wrapper.find('.keyboard-shortcut-helper');
- const findHeaderSearchDropdown = () => wrapper.findByTestId('header-search-dropdown-menu');
- const findHeaderSearchDefaultItems = () => wrapper.findComponent(HeaderSearchDefaultItems);
- const findHeaderSearchScopedItems = () => wrapper.findComponent(HeaderSearchScopedItems);
- const findHeaderSearchAutocompleteItems = () =>
- wrapper.findComponent(HeaderSearchAutocompleteItems);
- const findDropdownKeyboardNavigation = () => wrapper.findComponent(DropdownKeyboardNavigation);
- const findSearchInputDescription = () => wrapper.find(`#${SEARCH_INPUT_DESCRIPTION}`);
- const findSearchResultsDescription = () => wrapper.findByTestId(SEARCH_RESULTS_DESCRIPTION);
-
- describe('template', () => {
- describe('always renders', () => {
- beforeEach(() => {
- createComponent();
- });
-
- it('Header Search Input', () => {
- expect(findHeaderSearchInput().exists()).toBe(true);
- });
-
- it('Header Search Input KBD hint', () => {
- expect(findHeaderSearchInputKBD().exists()).toBe(true);
- expect(findHeaderSearchInputKBD().text()).toContain('/');
- expect(findHeaderSearchInputKBD().attributes('title')).toContain(
- 'Use the shortcut key <kbd>/</kbd> to start a search',
- );
- });
-
- it('Search Input Description', () => {
- expect(findSearchInputDescription().exists()).toBe(true);
- });
-
- it('Search Results Description', () => {
- expect(findSearchResultsDescription().exists()).toBe(true);
- });
- });
-
- describe.each`
- showDropdown | username | showSearchDropdown
- ${false} | ${null} | ${false}
- ${false} | ${MOCK_USERNAME} | ${false}
- ${true} | ${null} | ${false}
- ${true} | ${MOCK_USERNAME} | ${true}
- `('Header Search Dropdown', ({ showDropdown, username, showSearchDropdown }) => {
- describe(`when showDropdown is ${showDropdown} and current_username is ${username}`, () => {
- beforeEach(() => {
- window.gon.current_username = username;
- createComponent();
- findHeaderSearchInput().vm.$emit(showDropdown ? 'focusin' : '');
- });
-
- it(`should${showSearchDropdown ? '' : ' not'} render`, () => {
- expect(findHeaderSearchDropdown().exists()).toBe(showSearchDropdown);
- });
- });
- });
-
- describe.each`
- search | showDefault | showScoped | showAutocomplete
- ${null} | ${true} | ${false} | ${false}
- ${''} | ${true} | ${false} | ${false}
- ${'t'} | ${false} | ${false} | ${true}
- ${'te'} | ${false} | ${false} | ${true}
- ${'tes'} | ${false} | ${true} | ${true}
- ${MOCK_SEARCH} | ${false} | ${true} | ${true}
- `('Header Search Dropdown Items', ({ search, showDefault, showScoped, showAutocomplete }) => {
- describe(`when search is ${search}`, () => {
- beforeEach(() => {
- window.gon.current_username = MOCK_USERNAME;
- createComponent({ search }, {});
- findHeaderSearchInput().vm.$emit('focusin');
- });
-
- it(`should${showDefault ? '' : ' not'} render the Default Dropdown Items`, () => {
- expect(findHeaderSearchDefaultItems().exists()).toBe(showDefault);
- });
-
- it(`should${showScoped ? '' : ' not'} render the Scoped Dropdown Items`, () => {
- expect(findHeaderSearchScopedItems().exists()).toBe(showScoped);
- });
-
- it(`should${showAutocomplete ? '' : ' not'} render the Autocomplete Dropdown Items`, () => {
- expect(findHeaderSearchAutocompleteItems().exists()).toBe(showAutocomplete);
- });
-
- it(`should render the Dropdown Navigation Component`, () => {
- expect(findDropdownKeyboardNavigation().exists()).toBe(true);
- });
-
- it(`should close the dropdown when press escape key`, async () => {
- findHeaderSearchInput().vm.$emit('keydown', new KeyboardEvent({ key: 27 }));
- jest.runAllTimers();
- await nextTick();
- expect(findHeaderSearchDropdown().exists()).toBe(false);
- expect(wrapper.emitted().expandSearchBar.length).toBe(1);
- });
- });
- });
-
- describe.each`
- username | showDropdown | expectedDesc
- ${null} | ${false} | ${HeaderSearchApp.i18n.SEARCH_INPUT_DESCRIBE_BY_NO_DROPDOWN}
- ${null} | ${true} | ${HeaderSearchApp.i18n.SEARCH_INPUT_DESCRIBE_BY_NO_DROPDOWN}
- ${MOCK_USERNAME} | ${false} | ${HeaderSearchApp.i18n.SEARCH_INPUT_DESCRIBE_BY_WITH_DROPDOWN}
- ${MOCK_USERNAME} | ${true} | ${HeaderSearchApp.i18n.SEARCH_INPUT_DESCRIBE_BY_WITH_DROPDOWN}
- `('Search Input Description', ({ username, showDropdown, expectedDesc }) => {
- describe(`current_username is ${username} and showDropdown is ${showDropdown}`, () => {
- beforeEach(() => {
- window.gon.current_username = username;
- createComponent();
- findHeaderSearchInput().vm.$emit(showDropdown ? 'focusin' : '');
- });
-
- it(`sets description to ${expectedDesc}`, () => {
- expect(findSearchInputDescription().text()).toBe(expectedDesc);
- });
- });
- });
-
- describe.each`
- username | showDropdown | search | loading | searchOptions | expectedDesc
- ${null} | ${true} | ${''} | ${false} | ${[]} | ${''}
- ${MOCK_USERNAME} | ${false} | ${''} | ${false} | ${[]} | ${''}
- ${MOCK_USERNAME} | ${true} | ${''} | ${false} | ${MOCK_DEFAULT_SEARCH_OPTIONS} | ${`${MOCK_DEFAULT_SEARCH_OPTIONS.length} default results provided. Use the up and down arrow keys to navigate search results list.`}
- ${MOCK_USERNAME} | ${true} | ${''} | ${true} | ${MOCK_DEFAULT_SEARCH_OPTIONS} | ${`${MOCK_DEFAULT_SEARCH_OPTIONS.length} default results provided. Use the up and down arrow keys to navigate search results list.`}
- ${MOCK_USERNAME} | ${true} | ${MOCK_SEARCH} | ${false} | ${MOCK_SCOPED_SEARCH_OPTIONS} | ${`Results updated. ${MOCK_SCOPED_SEARCH_OPTIONS.length} results available. Use the up and down arrow keys to navigate search results list, or ENTER to submit.`}
- ${MOCK_USERNAME} | ${true} | ${MOCK_SEARCH} | ${true} | ${MOCK_SCOPED_SEARCH_OPTIONS} | ${HeaderSearchApp.i18n.SEARCH_RESULTS_LOADING}
- `(
- 'Search Results Description',
- ({ username, showDropdown, search, loading, searchOptions, expectedDesc }) => {
- describe(`search is "${search}", loading is ${loading}, and showSearchDropdown is ${showDropdown}`, () => {
- beforeEach(() => {
- window.gon.current_username = username;
- createComponent(
- {
- search,
- loading,
- },
- {
- searchOptions: () => searchOptions,
- },
- );
- findHeaderSearchInput().vm.$emit(showDropdown ? 'focusin' : '');
- });
-
- it(`sets description to ${expectedDesc}`, () => {
- expect(findSearchResultsDescription().text()).toBe(expectedDesc);
- });
- });
- },
- );
-
- describe('input box', () => {
- describe.each`
- search | searchOptions | hasToken
- ${MOCK_SEARCH} | ${[MOCK_SCOPED_SEARCH_OPTIONS[0]]} | ${true}
- ${MOCK_SEARCH} | ${[MOCK_SCOPED_SEARCH_OPTIONS[1]]} | ${true}
- ${MOCK_SEARCH} | ${[MOCK_SCOPED_SEARCH_OPTIONS[2]]} | ${true}
- ${MOCK_SEARCH} | ${[MOCK_SCOPED_SEARCH_OPTIONS[3]]} | ${true}
- ${MOCK_SEARCH} | ${[MOCK_SCOPED_SEARCH_OPTIONS[4]]} | ${true}
- ${'te'} | ${[MOCK_SCOPED_SEARCH_OPTIONS[5]]} | ${false}
- ${'x'} | ${[]} | ${false}
- `('token', ({ search, searchOptions, hasToken }) => {
- beforeEach(() => {
- window.gon.current_username = MOCK_USERNAME;
- createComponent(
- { search },
- {
- searchOptions: () => searchOptions,
- },
- );
- findHeaderSearchInput().vm.$emit('focusin');
- });
-
- it(`${hasToken ? 'is' : 'is NOT'} rendered when data set has type "${
- searchOptions[0]?.html_id
- }"`, () => {
- expect(findScopeToken().exists()).toBe(hasToken);
- });
-
- it(`text ${hasToken ? 'is correctly' : 'is NOT'} rendered when text is "${
- searchOptions[0]?.scope || searchOptions[0]?.description
- }"`, () => {
- expect(findScopeToken().exists() && findScopeToken().text()).toBe(
- formatScopeName(searchOptions[0]?.scope || searchOptions[0]?.description),
- );
- });
- });
- });
-
- describe('form', () => {
- describe.each`
- searchContext | search | searchOptions | isFocused
- ${MOCK_SEARCH_CONTEXT_FULL} | ${null} | ${[]} | ${true}
- ${MOCK_SEARCH_CONTEXT_FULL} | ${MOCK_SEARCH} | ${[]} | ${true}
- ${MOCK_SEARCH_CONTEXT_FULL} | ${MOCK_SEARCH} | ${MOCK_SCOPED_SEARCH_OPTIONS} | ${true}
- ${MOCK_SEARCH_CONTEXT_FULL} | ${MOCK_SEARCH} | ${MOCK_SCOPED_SEARCH_OPTIONS} | ${false}
- ${null} | ${MOCK_SEARCH} | ${MOCK_SCOPED_SEARCH_OPTIONS} | ${true}
- ${null} | ${null} | ${MOCK_SCOPED_SEARCH_OPTIONS} | ${true}
- ${null} | ${null} | ${[]} | ${true}
- `('wrapper', ({ searchContext, search, searchOptions, isFocused }) => {
- beforeEach(() => {
- window.gon.current_username = MOCK_USERNAME;
- createComponent({ search, searchContext }, { searchOptions: () => searchOptions });
- if (isFocused) {
- findHeaderSearchInput().vm.$emit('focusin');
- }
- });
-
- const isSearching = search?.length > SEARCH_SHORTCUTS_MIN_CHARACTERS;
-
- it(`classes ${isSearching ? 'contain' : 'do not contain'} "${IS_SEARCHING}"`, () => {
- if (isSearching) {
- expect(findHeaderSearchForm().classes()).toContain(IS_SEARCHING);
- return;
- }
- if (!isSearching) {
- expect(findHeaderSearchForm().classes()).not.toContain(IS_SEARCHING);
- }
- });
-
- it(`classes ${isSearching ? 'contain' : 'do not contain'} "${
- isFocused ? IS_FOCUSED : IS_NOT_FOCUSED
- }"`, () => {
- expect(findHeaderSearchForm().classes()).toContain(
- isFocused ? IS_FOCUSED : IS_NOT_FOCUSED,
- );
- });
- });
- });
-
- describe.each`
- search | searchOptions | hasIcon | iconName
- ${MOCK_SEARCH} | ${[MOCK_SCOPED_SEARCH_OPTIONS[0]]} | ${true} | ${ICON_PROJECT}
- ${MOCK_SEARCH} | ${[MOCK_SCOPED_SEARCH_OPTIONS[2]]} | ${true} | ${ICON_GROUP}
- ${MOCK_SEARCH} | ${[MOCK_SCOPED_SEARCH_OPTIONS[3]]} | ${true} | ${ICON_SUBGROUP}
- ${MOCK_SEARCH} | ${[MOCK_SCOPED_SEARCH_OPTIONS[4]]} | ${false} | ${false}
- `('token', ({ search, searchOptions, hasIcon, iconName }) => {
- beforeEach(() => {
- window.gon.current_username = MOCK_USERNAME;
- createComponent(
- { search },
- {
- searchOptions: () => searchOptions,
- },
- );
- findHeaderSearchInput().vm.$emit('focusin');
- });
-
- it(`icon for data set type "${searchOptions[0]?.html_id}" ${
- hasIcon ? 'is' : 'is NOT'
- } rendered`, () => {
- expect(findScopeToken().findComponent(GlIcon).exists()).toBe(hasIcon);
- });
-
- it(`render ${iconName ? `"${iconName}"` : 'NO'} icon for data set type "${
- searchOptions[0]?.html_id
- }"`, () => {
- expect(
- findScopeToken().findComponent(GlIcon).exists() &&
- findScopeToken().findComponent(GlIcon).attributes('name'),
- ).toBe(iconName);
- });
- });
- });
-
- describe('events', () => {
- describe('Header Search Input', () => {
- beforeEach(() => {
- window.gon.current_username = MOCK_USERNAME;
- createComponent();
- });
-
- describe('when dropdown is closed', () => {
- let trackingSpy;
-
- beforeEach(() => {
- trackingSpy = mockTracking(undefined, wrapper.element, jest.spyOn);
- });
-
- it('onFocusin opens dropdown and triggers snowplow event', async () => {
- expect(findHeaderSearchDropdown().exists()).toBe(false);
- findHeaderSearchInput().vm.$emit('focusin');
-
- await nextTick();
-
- expect(findHeaderSearchDropdown().exists()).toBe(true);
- expect(trackingSpy).toHaveBeenCalledWith(undefined, 'focus_input', {
- label: 'global_search',
- property: 'navigation_top',
- });
- });
-
- it('onFocusout closes dropdown and triggers snowplow event', async () => {
- expect(findHeaderSearchDropdown().exists()).toBe(false);
-
- findHeaderSearchInput().vm.$emit('focusout');
- jest.runAllTimers();
- await nextTick();
-
- expect(findHeaderSearchDropdown().exists()).toBe(false);
- expect(trackingSpy).toHaveBeenCalledWith(undefined, 'blur_input', {
- label: 'global_search',
- property: 'navigation_top',
- });
- });
- });
-
- describe('onInput', () => {
- describe('when search has text', () => {
- beforeEach(() => {
- findHeaderSearchInput().vm.$emit('input', MOCK_SEARCH);
- });
-
- it('calls setSearch with search term', () => {
- expect(actionSpies.setSearch).toHaveBeenCalledWith(expect.any(Object), MOCK_SEARCH);
- });
-
- it('calls fetchAutocompleteOptions', () => {
- expect(actionSpies.fetchAutocompleteOptions).toHaveBeenCalled();
- });
-
- it('does not call clearAutocomplete', () => {
- expect(actionSpies.clearAutocomplete).not.toHaveBeenCalled();
- });
- });
-
- describe('when search is emptied', () => {
- beforeEach(() => {
- findHeaderSearchInput().vm.$emit('input', '');
- });
-
- it('calls setSearch with empty term', () => {
- expect(actionSpies.setSearch).toHaveBeenCalledWith(expect.any(Object), '');
- });
-
- it('does not call fetchAutocompleteOptions', () => {
- expect(actionSpies.fetchAutocompleteOptions).not.toHaveBeenCalled();
- });
-
- it('calls clearAutocomplete', () => {
- expect(actionSpies.clearAutocomplete).toHaveBeenCalled();
- });
- });
- });
- });
-
- describe('onFocusout dropdown', () => {
- beforeEach(() => {
- window.gon.current_username = MOCK_USERNAME;
- createComponent({ search: 'tes' }, {});
- findHeaderSearchInput().vm.$emit('focusin');
- });
-
- it('closes with timeout so click event gets emited', () => {
- findHeaderSearchInput().vm.$emit('focusout');
-
- expect(setTimeout).toHaveBeenCalledTimes(1);
- expect(setTimeout).toHaveBeenLastCalledWith(expect.any(Function), DROPDOWN_CLOSE_TIMEOUT);
- });
- });
- });
-
- describe('computed', () => {
- describe.each`
- MOCK_INDEX | search
- ${1} | ${null}
- ${SEARCH_BOX_INDEX} | ${'test'}
- ${2} | ${'test1'}
- `('currentFocusedOption', ({ MOCK_INDEX, search }) => {
- beforeEach(() => {
- window.gon.current_username = MOCK_USERNAME;
- createComponent({ search });
- findHeaderSearchInput().vm.$emit('focusin');
- });
-
- it(`when currentFocusIndex changes to ${MOCK_INDEX} updates the data to searchOptions[${MOCK_INDEX}]`, () => {
- findDropdownKeyboardNavigation().vm.$emit('change', MOCK_INDEX);
- expect(wrapper.vm.currentFocusedOption).toBe(MOCK_DEFAULT_SEARCH_OPTIONS[MOCK_INDEX]);
- });
- });
- });
-
- describe('Submitting a search', () => {
- describe('with no currentFocusedOption', () => {
- beforeEach(() => {
- createComponent();
- });
-
- it('onKey-enter submits a search', () => {
- findHeaderSearchInput().vm.$emit('keydown', new KeyboardEvent({ key: ENTER_KEY }));
-
- expect(visitUrl).toHaveBeenCalledWith(MOCK_SEARCH_QUERY);
- });
- });
-
- describe('with less than min characters and no dropdown results', () => {
- beforeEach(() => {
- createComponent({ search: 'x' });
- });
-
- it('onKey-enter will NOT submit a search', () => {
- findHeaderSearchInput().vm.$emit('keydown', new KeyboardEvent({ key: ENTER_KEY }));
-
- expect(visitUrl).not.toHaveBeenCalledWith(MOCK_SEARCH_QUERY);
- });
- });
-
- describe('with currentFocusedOption', () => {
- const MOCK_INDEX = 1;
-
- beforeEach(() => {
- window.gon.current_username = MOCK_USERNAME;
- createComponent();
- findHeaderSearchInput().vm.$emit('focusin');
- });
-
- it('onKey-enter clicks the selected dropdown item rather than submitting a search', async () => {
- await nextTick();
- findDropdownKeyboardNavigation().vm.$emit('change', MOCK_INDEX);
-
- findHeaderSearchInput().vm.$emit('keydown', new KeyboardEvent({ key: ENTER_KEY }));
- expect(visitUrl).toHaveBeenCalledWith(MOCK_DEFAULT_SEARCH_OPTIONS[MOCK_INDEX].url);
- });
- });
- });
-});
diff --git a/spec/frontend/header_search/components/header_search_autocomplete_items_spec.js b/spec/frontend/header_search/components/header_search_autocomplete_items_spec.js
deleted file mode 100644
index 868edb3e651..00000000000
--- a/spec/frontend/header_search/components/header_search_autocomplete_items_spec.js
+++ /dev/null
@@ -1,236 +0,0 @@
-import { GlDropdownItem, GlLoadingIcon, GlAvatar, GlAlert, GlDropdownDivider } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
-import Vue, { nextTick } from 'vue';
-// eslint-disable-next-line no-restricted-imports
-import Vuex from 'vuex';
-import HeaderSearchAutocompleteItems from '~/header_search/components/header_search_autocomplete_items.vue';
-import { LARGE_AVATAR_PX, SMALL_AVATAR_PX } from '~/header_search/constants';
-import {
- PROJECTS_CATEGORY,
- GROUPS_CATEGORY,
- ISSUES_CATEGORY,
- MERGE_REQUEST_CATEGORY,
- RECENT_EPICS_CATEGORY,
-} from '~/vue_shared/global_search/constants';
-import {
- MOCK_GROUPED_AUTOCOMPLETE_OPTIONS,
- MOCK_SORTED_AUTOCOMPLETE_OPTIONS,
- MOCK_GROUPED_AUTOCOMPLETE_OPTIONS_SETTINGS_HELP,
- MOCK_GROUPED_AUTOCOMPLETE_OPTIONS_HELP,
- MOCK_SEARCH,
- MOCK_GROUPED_AUTOCOMPLETE_OPTIONS_2,
-} from '../mock_data';
-
-Vue.use(Vuex);
-
-describe('HeaderSearchAutocompleteItems', () => {
- let wrapper;
-
- const createComponent = (initialState, mockGetters, props) => {
- const store = new Vuex.Store({
- state: {
- loading: false,
- ...initialState,
- },
- getters: {
- autocompleteGroupedSearchOptions: () => MOCK_GROUPED_AUTOCOMPLETE_OPTIONS,
- ...mockGetters,
- },
- });
-
- wrapper = shallowMount(HeaderSearchAutocompleteItems, {
- store,
- propsData: {
- ...props,
- },
- });
- };
-
- const findDropdownItems = () => wrapper.findAllComponents(GlDropdownItem);
- const findGlDropdownDividers = () => wrapper.findAllComponents(GlDropdownDivider);
- const findFirstDropdownItem = () => findDropdownItems().at(0);
- const findDropdownItemTitles = () =>
- findDropdownItems().wrappers.map((w) => w.findAll('span').at(1).text());
- const findDropdownItemSubTitles = () =>
- findDropdownItems()
- .wrappers.filter((w) => w.findAll('span').length > 2)
- .map((w) => w.findAll('span').at(2).text());
- const findDropdownItemLinks = () => findDropdownItems().wrappers.map((w) => w.attributes('href'));
- const findGlLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
- const findGlAvatar = () => wrapper.findComponent(GlAvatar);
- const findGlAlert = () => wrapper.findComponent(GlAlert);
-
- describe('template', () => {
- describe('when loading is true', () => {
- beforeEach(() => {
- createComponent({ loading: true });
- });
-
- it('renders GlLoadingIcon', () => {
- expect(findGlLoadingIcon().exists()).toBe(true);
- });
-
- it('does not render autocomplete options', () => {
- expect(findDropdownItems()).toHaveLength(0);
- });
- });
-
- describe('when api returns error', () => {
- beforeEach(() => {
- createComponent({ autocompleteError: true });
- });
-
- it('renders Alert', () => {
- expect(findGlAlert().exists()).toBe(true);
- });
- });
- describe('when loading is false', () => {
- beforeEach(() => {
- createComponent({ loading: false });
- });
-
- it('does not render GlLoadingIcon', () => {
- expect(findGlLoadingIcon().exists()).toBe(false);
- });
-
- describe('Dropdown items', () => {
- it('renders item for each option in autocomplete option', () => {
- expect(findDropdownItems()).toHaveLength(MOCK_SORTED_AUTOCOMPLETE_OPTIONS.length);
- });
-
- it('renders titles correctly', () => {
- const expectedTitles = MOCK_SORTED_AUTOCOMPLETE_OPTIONS.map((o) => o.value || o.label);
- expect(findDropdownItemTitles()).toStrictEqual(expectedTitles);
- });
-
- it('renders sub-titles correctly', () => {
- const expectedSubTitles = MOCK_SORTED_AUTOCOMPLETE_OPTIONS.filter((o) => o.value).map(
- (o) => o.label,
- );
- expect(findDropdownItemSubTitles()).toStrictEqual(expectedSubTitles);
- });
-
- it('renders links correctly', () => {
- const expectedLinks = MOCK_SORTED_AUTOCOMPLETE_OPTIONS.map((o) => o.url);
- expect(findDropdownItemLinks()).toStrictEqual(expectedLinks);
- });
- });
-
- describe.each`
- item | showAvatar | avatarSize | searchContext | entityId | entityName
- ${{ data: [{ category: PROJECTS_CATEGORY, avatar_url: null }] }} | ${true} | ${String(LARGE_AVATAR_PX)} | ${{ project: { id: 29 } }} | ${'29'} | ${''}
- ${{ data: [{ category: GROUPS_CATEGORY, avatar_url: '/123' }] }} | ${true} | ${String(LARGE_AVATAR_PX)} | ${{ group: { id: 12 } }} | ${'12'} | ${''}
- ${{ data: [{ category: 'Help', avatar_url: '' }] }} | ${true} | ${String(SMALL_AVATAR_PX)} | ${null} | ${'0'} | ${''}
- ${{ data: [{ category: 'Settings' }] }} | ${false} | ${false} | ${null} | ${false} | ${false}
- ${{ data: [{ category: GROUPS_CATEGORY, avatar_url: null }] }} | ${true} | ${String(LARGE_AVATAR_PX)} | ${{ group: { id: 1, name: 'test1' } }} | ${'1'} | ${'test1'}
- ${{ data: [{ category: PROJECTS_CATEGORY, avatar_url: null }] }} | ${true} | ${String(LARGE_AVATAR_PX)} | ${{ project: { id: 2, name: 'test2' } }} | ${'2'} | ${'test2'}
- ${{ data: [{ category: ISSUES_CATEGORY, avatar_url: null }] }} | ${true} | ${String(SMALL_AVATAR_PX)} | ${{ project: { id: 3, name: 'test3' } }} | ${'3'} | ${'test3'}
- ${{ data: [{ category: MERGE_REQUEST_CATEGORY, avatar_url: null }] }} | ${true} | ${String(SMALL_AVATAR_PX)} | ${{ project: { id: 4, name: 'test4' } }} | ${'4'} | ${'test4'}
- ${{ data: [{ category: RECENT_EPICS_CATEGORY, avatar_url: null }] }} | ${true} | ${String(SMALL_AVATAR_PX)} | ${{ group: { id: 5, name: 'test5' } }} | ${'5'} | ${'test5'}
- ${{ data: [{ category: GROUPS_CATEGORY, avatar_url: null, group_id: 6, group_name: 'test6' }] }} | ${true} | ${String(LARGE_AVATAR_PX)} | ${null} | ${'6'} | ${'test6'}
- ${{ data: [{ category: PROJECTS_CATEGORY, avatar_url: null, project_id: 7, project_name: 'test7' }] }} | ${true} | ${String(LARGE_AVATAR_PX)} | ${null} | ${'7'} | ${'test7'}
- ${{ data: [{ category: ISSUES_CATEGORY, avatar_url: null, project_id: 8, project_name: 'test8' }] }} | ${true} | ${String(SMALL_AVATAR_PX)} | ${null} | ${'8'} | ${'test8'}
- ${{ data: [{ category: MERGE_REQUEST_CATEGORY, avatar_url: null, project_id: 9, project_name: 'test9' }] }} | ${true} | ${String(SMALL_AVATAR_PX)} | ${null} | ${'9'} | ${'test9'}
- ${{ data: [{ category: RECENT_EPICS_CATEGORY, avatar_url: null, group_id: 10, group_name: 'test10' }] }} | ${true} | ${String(SMALL_AVATAR_PX)} | ${null} | ${'10'} | ${'test10'}
- ${{ data: [{ category: GROUPS_CATEGORY, avatar_url: null, group_id: 11, group_name: 'test11' }] }} | ${true} | ${String(LARGE_AVATAR_PX)} | ${{ group: { id: 1, name: 'test1' } }} | ${'11'} | ${'test11'}
- ${{ data: [{ category: PROJECTS_CATEGORY, avatar_url: null, project_id: 12, project_name: 'test12' }] }} | ${true} | ${String(LARGE_AVATAR_PX)} | ${{ project: { id: 2, name: 'test2' } }} | ${'12'} | ${'test12'}
- ${{ data: [{ category: ISSUES_CATEGORY, avatar_url: null, project_id: 13, project_name: 'test13' }] }} | ${true} | ${String(SMALL_AVATAR_PX)} | ${{ project: { id: 3, name: 'test3' } }} | ${'13'} | ${'test13'}
- ${{ data: [{ category: MERGE_REQUEST_CATEGORY, avatar_url: null, project_id: 14, project_name: 'test14' }] }} | ${true} | ${String(SMALL_AVATAR_PX)} | ${{ project: { id: 4, name: 'test4' } }} | ${'14'} | ${'test14'}
- ${{ data: [{ category: RECENT_EPICS_CATEGORY, avatar_url: null, group_id: 15, group_name: 'test15' }] }} | ${true} | ${String(SMALL_AVATAR_PX)} | ${{ group: { id: 5, name: 'test5' } }} | ${'15'} | ${'test15'}
- `('GlAvatar', ({ item, showAvatar, avatarSize, searchContext, entityId, entityName }) => {
- describe(`when category is ${item.data[0].category} and avatar_url is ${item.data[0].avatar_url}`, () => {
- beforeEach(() => {
- createComponent({ searchContext }, { autocompleteGroupedSearchOptions: () => [item] });
- });
-
- it(`should${showAvatar ? '' : ' not'} render`, () => {
- expect(findGlAvatar().exists()).toBe(showAvatar);
- });
-
- it(`should set avatarSize to ${avatarSize}`, () => {
- expect(findGlAvatar().exists() && findGlAvatar().attributes('size')).toBe(avatarSize);
- });
-
- it(`should set avatar entityId to ${entityId}`, () => {
- expect(findGlAvatar().exists() && findGlAvatar().attributes('entityid')).toBe(entityId);
- });
-
- it(`should set avatar entityName to ${entityName}`, () => {
- expect(findGlAvatar().exists() && findGlAvatar().attributes('entityname')).toBe(
- entityName,
- );
- });
- });
- });
- });
-
- describe.each`
- currentFocusedOption | isFocused | ariaSelected
- ${null} | ${false} | ${undefined}
- ${{ html_id: 'not-a-match' }} | ${false} | ${undefined}
- ${MOCK_SORTED_AUTOCOMPLETE_OPTIONS[0]} | ${true} | ${'true'}
- `('isOptionFocused', ({ currentFocusedOption, isFocused, ariaSelected }) => {
- describe(`when currentFocusedOption.html_id is ${currentFocusedOption?.html_id}`, () => {
- beforeEach(() => {
- createComponent({}, {}, { currentFocusedOption });
- });
-
- it(`should${isFocused ? '' : ' not'} have gl-bg-gray-50 applied`, () => {
- expect(findFirstDropdownItem().classes('gl-bg-gray-50')).toBe(isFocused);
- });
-
- it(`sets "aria-selected to ${ariaSelected}`, () => {
- expect(findFirstDropdownItem().attributes('aria-selected')).toBe(ariaSelected);
- });
- });
- });
-
- describe.each`
- search | items | dividerCount
- ${null} | ${[]} | ${0}
- ${''} | ${[]} | ${0}
- ${'1'} | ${[]} | ${0}
- ${')'} | ${[]} | ${0}
- ${'t'} | ${MOCK_GROUPED_AUTOCOMPLETE_OPTIONS_SETTINGS_HELP} | ${1}
- ${'te'} | ${MOCK_GROUPED_AUTOCOMPLETE_OPTIONS_HELP} | ${0}
- ${'tes'} | ${MOCK_GROUPED_AUTOCOMPLETE_OPTIONS_2} | ${1}
- ${MOCK_SEARCH} | ${MOCK_GROUPED_AUTOCOMPLETE_OPTIONS_2} | ${1}
- `('Header Search Dropdown Dividers', ({ search, items, dividerCount }) => {
- describe(`when search is ${search}`, () => {
- beforeEach(() => {
- createComponent(
- { search },
- {
- autocompleteGroupedSearchOptions: () => items,
- },
- {},
- );
- });
-
- it(`component should have ${dividerCount} dividers`, () => {
- expect(findGlDropdownDividers()).toHaveLength(dividerCount);
- });
- });
- });
- });
-
- describe('watchers', () => {
- describe('currentFocusedOption', () => {
- beforeEach(() => {
- createComponent();
- });
-
- it('when focused changes to existing element calls scroll into view on the newly focused element', async () => {
- const focusedElement = findFirstDropdownItem().element;
- const scrollSpy = jest.spyOn(focusedElement, 'scrollIntoView');
-
- wrapper.setProps({ currentFocusedOption: MOCK_SORTED_AUTOCOMPLETE_OPTIONS[0] });
-
- await nextTick();
-
- expect(scrollSpy).toHaveBeenCalledWith(false);
- scrollSpy.mockRestore();
- });
- });
- });
-});
diff --git a/spec/frontend/header_search/components/header_search_default_items_spec.js b/spec/frontend/header_search/components/header_search_default_items_spec.js
deleted file mode 100644
index acaad251bec..00000000000
--- a/spec/frontend/header_search/components/header_search_default_items_spec.js
+++ /dev/null
@@ -1,103 +0,0 @@
-import { GlDropdownItem, GlDropdownSectionHeader } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
-import Vue from 'vue';
-// eslint-disable-next-line no-restricted-imports
-import Vuex from 'vuex';
-import HeaderSearchDefaultItems from '~/header_search/components/header_search_default_items.vue';
-import { MOCK_SEARCH_CONTEXT, MOCK_DEFAULT_SEARCH_OPTIONS } from '../mock_data';
-
-Vue.use(Vuex);
-
-describe('HeaderSearchDefaultItems', () => {
- let wrapper;
-
- const createComponent = (initialState, props) => {
- const store = new Vuex.Store({
- state: {
- searchContext: MOCK_SEARCH_CONTEXT,
- ...initialState,
- },
- getters: {
- defaultSearchOptions: () => MOCK_DEFAULT_SEARCH_OPTIONS,
- },
- });
-
- wrapper = shallowMount(HeaderSearchDefaultItems, {
- store,
- propsData: {
- ...props,
- },
- });
- };
-
- const findDropdownHeader = () => wrapper.findComponent(GlDropdownSectionHeader);
- const findDropdownItems = () => wrapper.findAllComponents(GlDropdownItem);
- const findFirstDropdownItem = () => findDropdownItems().at(0);
- const findDropdownItemTitles = () => findDropdownItems().wrappers.map((w) => w.text());
- const findDropdownItemLinks = () => findDropdownItems().wrappers.map((w) => w.attributes('href'));
-
- describe('template', () => {
- describe('Dropdown items', () => {
- beforeEach(() => {
- createComponent();
- });
-
- it('renders item for each option in defaultSearchOptions', () => {
- expect(findDropdownItems()).toHaveLength(MOCK_DEFAULT_SEARCH_OPTIONS.length);
- });
-
- it('renders titles correctly', () => {
- const expectedTitles = MOCK_DEFAULT_SEARCH_OPTIONS.map((o) => o.title);
- expect(findDropdownItemTitles()).toStrictEqual(expectedTitles);
- });
-
- it('renders links correctly', () => {
- const expectedLinks = MOCK_DEFAULT_SEARCH_OPTIONS.map((o) => o.url);
- expect(findDropdownItemLinks()).toStrictEqual(expectedLinks);
- });
- });
-
- describe.each`
- group | project | dropdownTitle
- ${null} | ${null} | ${'All GitLab'}
- ${{ name: 'Test Group' }} | ${null} | ${'Test Group'}
- ${{ name: 'Test Group' }} | ${{ name: 'Test Project' }} | ${'Test Project'}
- `('Dropdown Header', ({ group, project, dropdownTitle }) => {
- describe(`when group is ${group?.name} and project is ${project?.name}`, () => {
- beforeEach(() => {
- createComponent({
- searchContext: {
- group,
- project,
- },
- });
- });
-
- it(`should render as ${dropdownTitle}`, () => {
- expect(findDropdownHeader().text()).toBe(dropdownTitle);
- });
- });
- });
-
- describe.each`
- currentFocusedOption | isFocused | ariaSelected
- ${null} | ${false} | ${undefined}
- ${{ html_id: 'not-a-match' }} | ${false} | ${undefined}
- ${MOCK_DEFAULT_SEARCH_OPTIONS[0]} | ${true} | ${'true'}
- `('isOptionFocused', ({ currentFocusedOption, isFocused, ariaSelected }) => {
- describe(`when currentFocusedOption.html_id is ${currentFocusedOption?.html_id}`, () => {
- beforeEach(() => {
- createComponent({}, { currentFocusedOption });
- });
-
- it(`should${isFocused ? '' : ' not'} have gl-bg-gray-50 applied`, () => {
- expect(findFirstDropdownItem().classes('gl-bg-gray-50')).toBe(isFocused);
- });
-
- it(`sets "aria-selected to ${ariaSelected}`, () => {
- expect(findFirstDropdownItem().attributes('aria-selected')).toBe(ariaSelected);
- });
- });
- });
- });
-});
diff --git a/spec/frontend/header_search/components/header_search_scoped_items_spec.js b/spec/frontend/header_search/components/header_search_scoped_items_spec.js
deleted file mode 100644
index 78ea148caac..00000000000
--- a/spec/frontend/header_search/components/header_search_scoped_items_spec.js
+++ /dev/null
@@ -1,121 +0,0 @@
-import { GlDropdownItem, GlToken, GlIcon } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
-import Vue from 'vue';
-// eslint-disable-next-line no-restricted-imports
-import Vuex from 'vuex';
-import { trimText } from 'helpers/text_helper';
-import HeaderSearchScopedItems from '~/header_search/components/header_search_scoped_items.vue';
-import { truncate } from '~/lib/utils/text_utility';
-import { SCOPE_TOKEN_MAX_LENGTH } from '~/header_search/constants';
-import { MSG_IN_ALL_GITLAB } from '~/vue_shared/global_search/constants';
-import {
- MOCK_SEARCH,
- MOCK_SCOPED_SEARCH_OPTIONS,
- MOCK_GROUPED_AUTOCOMPLETE_OPTIONS,
-} from '../mock_data';
-
-Vue.use(Vuex);
-
-describe('HeaderSearchScopedItems', () => {
- let wrapper;
-
- const createComponent = (initialState, mockGetters, props) => {
- const store = new Vuex.Store({
- state: {
- search: MOCK_SEARCH,
- ...initialState,
- },
- getters: {
- scopedSearchOptions: () => MOCK_SCOPED_SEARCH_OPTIONS,
- autocompleteGroupedSearchOptions: () => MOCK_GROUPED_AUTOCOMPLETE_OPTIONS,
- ...mockGetters,
- },
- });
-
- wrapper = shallowMount(HeaderSearchScopedItems, {
- store,
- propsData: {
- ...props,
- },
- });
- };
-
- const findDropdownItems = () => wrapper.findAllComponents(GlDropdownItem);
- const findFirstDropdownItem = () => findDropdownItems().at(0);
- const findDropdownItemTitles = () => findDropdownItems().wrappers.map((w) => trimText(w.text()));
- const findScopeTokens = () => wrapper.findAllComponents(GlToken);
- const findScopeTokensText = () => findScopeTokens().wrappers.map((w) => trimText(w.text()));
- const findScopeTokensIcons = () =>
- findScopeTokens().wrappers.map((w) => w.findAllComponents(GlIcon));
- const findDropdownItemAriaLabels = () =>
- findDropdownItems().wrappers.map((w) => trimText(w.attributes('aria-label')));
- const findDropdownItemLinks = () => findDropdownItems().wrappers.map((w) => w.attributes('href'));
-
- describe('template', () => {
- describe('Dropdown items', () => {
- beforeEach(() => {
- createComponent();
- });
-
- it('renders item for each option in scopedSearchOptions', () => {
- expect(findDropdownItems()).toHaveLength(MOCK_SCOPED_SEARCH_OPTIONS.length);
- });
-
- it('renders titles correctly', () => {
- findDropdownItemTitles().forEach((title) => expect(title).toContain(MOCK_SEARCH));
- });
-
- it('renders scope names correctly', () => {
- const expectedTitles = MOCK_SCOPED_SEARCH_OPTIONS.map((o) =>
- truncate(trimText(`in ${o.description || o.scope}`), SCOPE_TOKEN_MAX_LENGTH),
- );
-
- expect(findScopeTokensText()).toStrictEqual(expectedTitles);
- });
-
- it('renders scope icons correctly', () => {
- findScopeTokensIcons().forEach((icon, i) => {
- const w = icon.wrappers[0];
- expect(w?.attributes('name')).toBe(MOCK_SCOPED_SEARCH_OPTIONS[i].icon);
- });
- });
-
- it(`renders scope ${MSG_IN_ALL_GITLAB} correctly`, () => {
- expect(findScopeTokens().at(-1).findComponent(GlIcon).exists()).toBe(false);
- });
-
- it('renders aria-labels correctly', () => {
- const expectedLabels = MOCK_SCOPED_SEARCH_OPTIONS.map((o) =>
- trimText(`${MOCK_SEARCH} ${o.description || o.icon} ${o.scope || ''}`),
- );
- expect(findDropdownItemAriaLabels()).toStrictEqual(expectedLabels);
- });
-
- it('renders links correctly', () => {
- const expectedLinks = MOCK_SCOPED_SEARCH_OPTIONS.map((o) => o.url);
- expect(findDropdownItemLinks()).toStrictEqual(expectedLinks);
- });
- });
-
- describe.each`
- currentFocusedOption | isFocused | ariaSelected
- ${null} | ${false} | ${undefined}
- ${{ html_id: 'not-a-match' }} | ${false} | ${undefined}
- ${MOCK_SCOPED_SEARCH_OPTIONS[0]} | ${true} | ${'true'}
- `('isOptionFocused', ({ currentFocusedOption, isFocused, ariaSelected }) => {
- describe(`when currentFocusedOption.html_id is ${currentFocusedOption?.html_id}`, () => {
- beforeEach(() => {
- createComponent({}, {}, { currentFocusedOption });
- });
-
- it(`should${isFocused ? '' : ' not'} have gl-bg-gray-50 applied`, () => {
- expect(findFirstDropdownItem().classes('gl-bg-gray-50')).toBe(isFocused);
- });
-
- it(`sets "aria-selected to ${ariaSelected}`, () => {
- expect(findFirstDropdownItem().attributes('aria-selected')).toBe(ariaSelected);
- });
- });
- });
- });
-});
diff --git a/spec/frontend/header_search/init_spec.js b/spec/frontend/header_search/init_spec.js
deleted file mode 100644
index 459ca33ee66..00000000000
--- a/spec/frontend/header_search/init_spec.js
+++ /dev/null
@@ -1,54 +0,0 @@
-import { setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
-
-import initHeaderSearch, { eventHandler, cleanEventListeners } from '~/header_search/init';
-
-describe('Header Search EventListener', () => {
- beforeEach(() => {
- jest.resetModules();
- setHTMLFixture(`
- <div class="js-header-content">
- <div class="header-search-form" id="js-header-search" data-autocomplete-path="/search/autocomplete" data-issues-path="/dashboard/issues" data-mr-path="/dashboard/merge_requests" data-search-context="{}" data-search-path="/search">
- <input autocomplete="off" class="form-control gl-form-input gl-search-box-by-type-input" data-qa-selector="search_box" id="search" name="search" placeholder="Search GitLab" type="text">
- </div>
- </div>`);
- });
-
- afterEach(() => {
- resetHTMLFixture();
- });
-
- it('attached event listener', () => {
- const searchInputBox = document?.querySelector('#search');
- const addEventListenerSpy = jest.spyOn(searchInputBox, 'addEventListener');
- initHeaderSearch();
-
- expect(addEventListenerSpy).toHaveBeenCalledTimes(2);
- });
-
- it('removes event listener', async () => {
- const searchInputBox = document?.querySelector('#search');
- const removeEventListenerSpy = jest.spyOn(searchInputBox, 'removeEventListener');
- jest.mock('~/header_search', () => ({ initHeaderSearchApp: jest.fn() }));
- await eventHandler.apply(
- {
- searchInputBox: document.querySelector('#search'),
- },
- [cleanEventListeners],
- );
-
- expect(removeEventListenerSpy).toHaveBeenCalledTimes(2);
- });
-
- it('attaches new vue dropdown when feature flag is enabled', async () => {
- const mockVueApp = jest.fn();
- jest.mock('~/header_search', () => ({ initHeaderSearchApp: mockVueApp }));
- await eventHandler.apply(
- {
- searchInputBox: document.querySelector('#search'),
- },
- () => {},
- );
-
- expect(mockVueApp).toHaveBeenCalled();
- });
-});
diff --git a/spec/frontend/header_search/mock_data.js b/spec/frontend/header_search/mock_data.js
deleted file mode 100644
index 2218c81efc3..00000000000
--- a/spec/frontend/header_search/mock_data.js
+++ /dev/null
@@ -1,400 +0,0 @@
-import { ICON_PROJECT, ICON_GROUP, ICON_SUBGROUP } from '~/header_search/constants';
-import {
- PROJECTS_CATEGORY,
- GROUPS_CATEGORY,
- MSG_ISSUES_ASSIGNED_TO_ME,
- MSG_ISSUES_IVE_CREATED,
- MSG_MR_ASSIGNED_TO_ME,
- MSG_MR_IM_REVIEWER,
- MSG_MR_IVE_CREATED,
- MSG_IN_ALL_GITLAB,
-} from '~/vue_shared/global_search/constants';
-
-export const MOCK_USERNAME = 'anyone';
-
-export const MOCK_SEARCH_PATH = '/search';
-
-export const MOCK_ISSUE_PATH = '/dashboard/issues';
-
-export const MOCK_MR_PATH = '/dashboard/merge_requests';
-
-export const MOCK_ALL_PATH = '/';
-
-export const MOCK_AUTOCOMPLETE_PATH = '/autocomplete';
-
-export const MOCK_PROJECT = {
- id: 123,
- name: 'MockProject',
- path: '/mock-project',
-};
-
-export const MOCK_PROJECT_LONG = {
- id: 124,
- name: 'Mock Project Name That Is Ridiculously Long And It Goes Forever',
- path: '/mock-project-name-that-is-ridiculously-long-and-it-goes-forever',
-};
-
-export const MOCK_GROUP = {
- id: 321,
- name: 'MockGroup',
- path: '/mock-group',
-};
-
-export const MOCK_SUBGROUP = {
- id: 322,
- name: 'MockSubGroup',
- path: `${MOCK_GROUP}/mock-subgroup`,
-};
-
-export const MOCK_SEARCH_QUERY = 'http://gitlab.com/search?search=test';
-
-export const MOCK_SEARCH = 'test';
-
-export const MOCK_SEARCH_CONTEXT = {
- project: null,
- project_metadata: {},
- group: null,
- group_metadata: {},
-};
-
-export const MOCK_SEARCH_CONTEXT_FULL = {
- group: {
- id: 31,
- name: 'testGroup',
- full_name: 'testGroup',
- },
- group_metadata: {
- group_path: 'testGroup',
- name: 'testGroup',
- issues_path: '/groups/testGroup/-/issues',
- mr_path: '/groups/testGroup/-/merge_requests',
- },
-};
-
-export const MOCK_DEFAULT_SEARCH_OPTIONS = [
- {
- html_id: 'default-issues-assigned',
- title: MSG_ISSUES_ASSIGNED_TO_ME,
- url: `${MOCK_ISSUE_PATH}/?assignee_username=${MOCK_USERNAME}`,
- },
- {
- html_id: 'default-issues-created',
- title: MSG_ISSUES_IVE_CREATED,
- url: `${MOCK_ISSUE_PATH}/?author_username=${MOCK_USERNAME}`,
- },
- {
- html_id: 'default-mrs-assigned',
- title: MSG_MR_ASSIGNED_TO_ME,
- url: `${MOCK_MR_PATH}/?assignee_username=${MOCK_USERNAME}`,
- },
- {
- html_id: 'default-mrs-reviewer',
- title: MSG_MR_IM_REVIEWER,
- url: `${MOCK_MR_PATH}/?reviewer_username=${MOCK_USERNAME}`,
- },
- {
- html_id: 'default-mrs-created',
- title: MSG_MR_IVE_CREATED,
- url: `${MOCK_MR_PATH}/?author_username=${MOCK_USERNAME}`,
- },
-];
-
-export const MOCK_SCOPED_SEARCH_OPTIONS = [
- {
- html_id: 'scoped-in-project',
- scope: MOCK_PROJECT.name,
- scopeCategory: PROJECTS_CATEGORY,
- icon: ICON_PROJECT,
- url: MOCK_PROJECT.path,
- },
- {
- html_id: 'scoped-in-project-long',
- scope: MOCK_PROJECT_LONG.name,
- scopeCategory: PROJECTS_CATEGORY,
- icon: ICON_PROJECT,
- url: MOCK_PROJECT_LONG.path,
- },
- {
- html_id: 'scoped-in-group',
- scope: MOCK_GROUP.name,
- scopeCategory: GROUPS_CATEGORY,
- icon: ICON_GROUP,
- url: MOCK_GROUP.path,
- },
- {
- html_id: 'scoped-in-subgroup',
- scope: MOCK_SUBGROUP.name,
- scopeCategory: GROUPS_CATEGORY,
- icon: ICON_SUBGROUP,
- url: MOCK_SUBGROUP.path,
- },
- {
- html_id: 'scoped-in-all',
- description: MSG_IN_ALL_GITLAB,
- url: MOCK_ALL_PATH,
- },
-];
-
-export const MOCK_SCOPED_SEARCH_OPTIONS_DEF = [
- {
- html_id: 'scoped-in-project',
- scope: MOCK_PROJECT.name,
- scopeCategory: PROJECTS_CATEGORY,
- icon: ICON_PROJECT,
- url: MOCK_PROJECT.path,
- },
- {
- html_id: 'scoped-in-group',
- scope: MOCK_GROUP.name,
- scopeCategory: GROUPS_CATEGORY,
- icon: ICON_GROUP,
- url: MOCK_GROUP.path,
- },
- {
- html_id: 'scoped-in-all',
- description: MSG_IN_ALL_GITLAB,
- url: MOCK_ALL_PATH,
- },
-];
-
-export const MOCK_AUTOCOMPLETE_OPTIONS_RES = [
- {
- category: 'Projects',
- id: 1,
- label: 'Gitlab Org / MockProject1',
- value: 'MockProject1',
- url: 'project/1',
- },
- {
- category: 'Groups',
- id: 1,
- label: 'Gitlab Org / MockGroup1',
- value: 'MockGroup1',
- url: 'group/1',
- },
- {
- category: 'Projects',
- id: 2,
- label: 'Gitlab Org / MockProject2',
- value: 'MockProject2',
- url: 'project/2',
- },
- {
- category: 'Help',
- label: 'GitLab Help',
- url: 'help/gitlab',
- },
-];
-
-export const MOCK_AUTOCOMPLETE_OPTIONS = [
- {
- category: 'Projects',
- html_id: 'autocomplete-Projects-0',
- id: 1,
- label: 'Gitlab Org / MockProject1',
- value: 'MockProject1',
- url: 'project/1',
- },
- {
- category: 'Groups',
- html_id: 'autocomplete-Groups-1',
- id: 1,
- label: 'Gitlab Org / MockGroup1',
- value: 'MockGroup1',
- url: 'group/1',
- },
- {
- category: 'Projects',
- html_id: 'autocomplete-Projects-2',
- id: 2,
- label: 'Gitlab Org / MockProject2',
- value: 'MockProject2',
- url: 'project/2',
- },
- {
- category: 'Help',
- html_id: 'autocomplete-Help-3',
- label: 'GitLab Help',
- url: 'help/gitlab',
- },
-];
-
-export const MOCK_GROUPED_AUTOCOMPLETE_OPTIONS = [
- {
- category: 'Groups',
- data: [
- {
- category: 'Groups',
- html_id: 'autocomplete-Groups-1',
-
- id: 1,
- label: 'Gitlab Org / MockGroup1',
- value: 'MockGroup1',
- url: 'group/1',
- },
- ],
- },
- {
- category: 'Projects',
- data: [
- {
- category: 'Projects',
- html_id: 'autocomplete-Projects-0',
-
- id: 1,
- label: 'Gitlab Org / MockProject1',
- value: 'MockProject1',
- url: 'project/1',
- },
- {
- category: 'Projects',
- html_id: 'autocomplete-Projects-2',
-
- id: 2,
- label: 'Gitlab Org / MockProject2',
- value: 'MockProject2',
- url: 'project/2',
- },
- ],
- },
- {
- category: 'Help',
- data: [
- {
- category: 'Help',
- html_id: 'autocomplete-Help-3',
-
- label: 'GitLab Help',
- url: 'help/gitlab',
- },
- ],
- },
-];
-
-export const MOCK_SORTED_AUTOCOMPLETE_OPTIONS = [
- {
- category: 'Groups',
- html_id: 'autocomplete-Groups-1',
- id: 1,
- label: 'Gitlab Org / MockGroup1',
- value: 'MockGroup1',
- url: 'group/1',
- },
- {
- category: 'Projects',
- html_id: 'autocomplete-Projects-0',
- id: 1,
- label: 'Gitlab Org / MockProject1',
- value: 'MockProject1',
- url: 'project/1',
- },
- {
- category: 'Projects',
- html_id: 'autocomplete-Projects-2',
- id: 2,
- label: 'Gitlab Org / MockProject2',
- value: 'MockProject2',
- url: 'project/2',
- },
- {
- category: 'Help',
- html_id: 'autocomplete-Help-3',
- label: 'GitLab Help',
- url: 'help/gitlab',
- },
-];
-
-export const MOCK_GROUPED_AUTOCOMPLETE_OPTIONS_HELP = [
- {
- category: 'Help',
- data: [
- {
- html_id: 'autocomplete-Help-1',
- category: 'Help',
- label: 'Rake Tasks Help',
- url: '/help/raketasks/index',
- },
- {
- html_id: 'autocomplete-Help-2',
- category: 'Help',
- label: 'System Hooks Help',
- url: '/help/system_hooks/system_hooks',
- },
- ],
- },
-];
-
-export const MOCK_GROUPED_AUTOCOMPLETE_OPTIONS_SETTINGS_HELP = [
- {
- category: 'Settings',
- data: [
- {
- html_id: 'autocomplete-Settings-0',
- category: 'Settings',
- label: 'User settings',
- url: '/-/profile',
- },
- {
- html_id: 'autocomplete-Settings-3',
- category: 'Settings',
- label: 'Admin Section',
- url: '/admin',
- },
- ],
- },
- {
- category: 'Help',
- data: [
- {
- html_id: 'autocomplete-Help-1',
- category: 'Help',
- label: 'Rake Tasks Help',
- url: '/help/raketasks/index',
- },
- {
- html_id: 'autocomplete-Help-2',
- category: 'Help',
- label: 'System Hooks Help',
- url: '/help/system_hooks/system_hooks',
- },
- ],
- },
-];
-
-export const MOCK_GROUPED_AUTOCOMPLETE_OPTIONS_2 = [
- {
- category: 'Groups',
- data: [
- {
- html_id: 'autocomplete-Groups-0',
- category: 'Groups',
- id: 148,
- label: 'Jashkenas / Test Subgroup / test-subgroup',
- url: '/jashkenas/test-subgroup/test-subgroup',
- avatar_url: '',
- },
- {
- html_id: 'autocomplete-Groups-1',
- category: 'Groups',
- id: 147,
- label: 'Jashkenas / Test Subgroup',
- url: '/jashkenas/test-subgroup',
- avatar_url: '',
- },
- ],
- },
- {
- category: 'Projects',
- data: [
- {
- html_id: 'autocomplete-Projects-2',
- category: 'Projects',
- id: 1,
- value: 'Gitlab Test',
- label: 'Gitlab Org / Gitlab Test',
- url: '/gitlab-org/gitlab-test',
- avatar_url: '/uploads/-/system/project/avatar/1/icons8-gitlab-512.png',
- },
- ],
- },
-];
diff --git a/spec/frontend/header_search/store/actions_spec.js b/spec/frontend/header_search/store/actions_spec.js
deleted file mode 100644
index 95a619ebeca..00000000000
--- a/spec/frontend/header_search/store/actions_spec.js
+++ /dev/null
@@ -1,113 +0,0 @@
-import MockAdapter from 'axios-mock-adapter';
-import testAction from 'helpers/vuex_action_helper';
-import * as actions from '~/header_search/store/actions';
-import * as types from '~/header_search/store/mutation_types';
-import initState from '~/header_search/store/state';
-import axios from '~/lib/utils/axios_utils';
-import { HTTP_STATUS_INTERNAL_SERVER_ERROR, HTTP_STATUS_OK } from '~/lib/utils/http_status';
-import {
- MOCK_SEARCH,
- MOCK_AUTOCOMPLETE_OPTIONS_RES,
- MOCK_AUTOCOMPLETE_PATH,
- MOCK_PROJECT,
- MOCK_SEARCH_CONTEXT,
- MOCK_SEARCH_PATH,
- MOCK_MR_PATH,
- MOCK_ISSUE_PATH,
-} from '../mock_data';
-
-jest.mock('~/alert');
-
-describe('Header Search Store Actions', () => {
- let state;
- let mock;
-
- const createState = (initialState) =>
- initState({
- searchPath: MOCK_SEARCH_PATH,
- issuesPath: MOCK_ISSUE_PATH,
- mrPath: MOCK_MR_PATH,
- autocompletePath: MOCK_AUTOCOMPLETE_PATH,
- searchContext: MOCK_SEARCH_CONTEXT,
- ...initialState,
- });
-
- afterEach(() => {
- state = null;
- mock.restore();
- });
-
- describe.each`
- axiosMock | type | expectedMutations
- ${{ method: 'onGet', code: HTTP_STATUS_OK, res: MOCK_AUTOCOMPLETE_OPTIONS_RES }} | ${'success'} | ${[{ type: types.REQUEST_AUTOCOMPLETE }, { type: types.RECEIVE_AUTOCOMPLETE_SUCCESS, payload: MOCK_AUTOCOMPLETE_OPTIONS_RES }, { type: types.RECEIVE_AUTOCOMPLETE_SUCCESS, payload: MOCK_AUTOCOMPLETE_OPTIONS_RES }]}
- ${{ method: 'onGet', code: HTTP_STATUS_INTERNAL_SERVER_ERROR, res: null }} | ${'error'} | ${[{ type: types.REQUEST_AUTOCOMPLETE }, { type: types.RECEIVE_AUTOCOMPLETE_ERROR }, { type: types.RECEIVE_AUTOCOMPLETE_ERROR }]}
- `('fetchAutocompleteOptions', ({ axiosMock, type, expectedMutations }) => {
- describe(`on ${type}`, () => {
- beforeEach(() => {
- state = createState({});
- mock = new MockAdapter(axios);
- mock[axiosMock.method]().reply(axiosMock.code, axiosMock.res);
- });
- it(`should dispatch the correct mutations`, () => {
- return testAction({
- action: actions.fetchAutocompleteOptions,
- state,
- expectedMutations,
- });
- });
- });
- });
-
- describe.each`
- project | ref | fetchType | expectedPath
- ${null} | ${null} | ${null} | ${`${MOCK_AUTOCOMPLETE_PATH}?term=${MOCK_SEARCH}`}
- ${MOCK_PROJECT} | ${null} | ${'generic'} | ${`${MOCK_AUTOCOMPLETE_PATH}?term=${MOCK_SEARCH}&project_id=${MOCK_PROJECT.id}&filter=generic`}
- ${null} | ${MOCK_PROJECT.id} | ${'generic'} | ${`${MOCK_AUTOCOMPLETE_PATH}?term=${MOCK_SEARCH}&project_ref=${MOCK_PROJECT.id}&filter=generic`}
- ${MOCK_PROJECT} | ${MOCK_PROJECT.id} | ${'search'} | ${`${MOCK_AUTOCOMPLETE_PATH}?term=${MOCK_SEARCH}&project_id=${MOCK_PROJECT.id}&project_ref=${MOCK_PROJECT.id}&filter=search`}
- `('autocompleteQuery', ({ project, ref, fetchType, expectedPath }) => {
- describe(`when project is ${project?.name} and project ref is ${ref}`, () => {
- beforeEach(() => {
- state = createState({
- search: MOCK_SEARCH,
- searchContext: {
- project,
- ref,
- },
- });
- });
-
- it(`should return ${expectedPath}`, () => {
- expect(actions.autocompleteQuery({ state, fetchType })).toBe(expectedPath);
- });
- });
- });
-
- describe('clearAutocomplete', () => {
- beforeEach(() => {
- state = createState({});
- });
-
- it('calls the CLEAR_AUTOCOMPLETE mutation', () => {
- return testAction({
- action: actions.clearAutocomplete,
- state,
- expectedMutations: [{ type: types.CLEAR_AUTOCOMPLETE }],
- });
- });
- });
-
- describe('setSearch', () => {
- beforeEach(() => {
- state = createState({});
- });
-
- it('calls the SET_SEARCH mutation', () => {
- return testAction({
- action: actions.setSearch,
- payload: MOCK_SEARCH,
- state,
- expectedMutations: [{ type: types.SET_SEARCH, payload: MOCK_SEARCH }],
- });
- });
- });
-});
diff --git a/spec/frontend/header_search/store/getters_spec.js b/spec/frontend/header_search/store/getters_spec.js
deleted file mode 100644
index 7a7a00178f1..00000000000
--- a/spec/frontend/header_search/store/getters_spec.js
+++ /dev/null
@@ -1,333 +0,0 @@
-import * as getters from '~/header_search/store/getters';
-import initState from '~/header_search/store/state';
-import {
- MOCK_USERNAME,
- MOCK_SEARCH_PATH,
- MOCK_ISSUE_PATH,
- MOCK_MR_PATH,
- MOCK_AUTOCOMPLETE_PATH,
- MOCK_SEARCH_CONTEXT,
- MOCK_DEFAULT_SEARCH_OPTIONS,
- MOCK_SCOPED_SEARCH_OPTIONS,
- MOCK_SCOPED_SEARCH_OPTIONS_DEF,
- MOCK_PROJECT,
- MOCK_GROUP,
- MOCK_ALL_PATH,
- MOCK_SEARCH,
- MOCK_AUTOCOMPLETE_OPTIONS,
- MOCK_GROUPED_AUTOCOMPLETE_OPTIONS,
- MOCK_SORTED_AUTOCOMPLETE_OPTIONS,
-} from '../mock_data';
-
-describe('Header Search Store Getters', () => {
- let state;
-
- const createState = (initialState) => {
- state = initState({
- searchPath: MOCK_SEARCH_PATH,
- issuesPath: MOCK_ISSUE_PATH,
- mrPath: MOCK_MR_PATH,
- autocompletePath: MOCK_AUTOCOMPLETE_PATH,
- searchContext: MOCK_SEARCH_CONTEXT,
- ...initialState,
- });
- };
-
- afterEach(() => {
- state = null;
- });
-
- describe.each`
- group | project | scope | forSnippets | codeSearch | ref | expectedPath
- ${null} | ${null} | ${null} | ${false} | ${false} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar`}
- ${null} | ${null} | ${null} | ${true} | ${false} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&snippets=true`}
- ${null} | ${null} | ${null} | ${false} | ${true} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&search_code=true`}
- ${null} | ${null} | ${null} | ${false} | ${false} | ${'test-branch'} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&repository_ref=test-branch`}
- ${MOCK_GROUP} | ${null} | ${null} | ${false} | ${false} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&group_id=${MOCK_GROUP.id}`}
- ${null} | ${MOCK_PROJECT} | ${null} | ${false} | ${false} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&project_id=${MOCK_PROJECT.id}`}
- ${MOCK_GROUP} | ${MOCK_PROJECT} | ${null} | ${false} | ${false} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&project_id=${MOCK_PROJECT.id}&group_id=${MOCK_GROUP.id}`}
- ${MOCK_GROUP} | ${MOCK_PROJECT} | ${'issues'} | ${false} | ${false} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&project_id=${MOCK_PROJECT.id}&group_id=${MOCK_GROUP.id}&scope=issues`}
- ${MOCK_GROUP} | ${MOCK_PROJECT} | ${'issues'} | ${true} | ${false} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&project_id=${MOCK_PROJECT.id}&group_id=${MOCK_GROUP.id}&scope=issues&snippets=true`}
- ${MOCK_GROUP} | ${MOCK_PROJECT} | ${'issues'} | ${true} | ${true} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&project_id=${MOCK_PROJECT.id}&group_id=${MOCK_GROUP.id}&scope=issues&snippets=true&search_code=true`}
- ${MOCK_GROUP} | ${MOCK_PROJECT} | ${'issues'} | ${true} | ${true} | ${'test-branch'} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&project_id=${MOCK_PROJECT.id}&group_id=${MOCK_GROUP.id}&scope=issues&snippets=true&search_code=true&repository_ref=test-branch`}
- `('searchQuery', ({ group, project, scope, forSnippets, codeSearch, ref, expectedPath }) => {
- describe(`when group is ${group?.name}, project is ${project?.name}, scope is ${scope}, for_snippets is ${forSnippets}, code_search is ${codeSearch}, and ref is ${ref}`, () => {
- beforeEach(() => {
- createState({
- searchContext: {
- group,
- project,
- scope,
- for_snippets: forSnippets,
- code_search: codeSearch,
- ref,
- },
- });
- state.search = MOCK_SEARCH;
- });
-
- it(`should return ${expectedPath}`, () => {
- expect(getters.searchQuery(state)).toBe(expectedPath);
- });
- });
- });
-
- describe.each`
- group | group_metadata | project | project_metadata | expectedPath
- ${null} | ${null} | ${null} | ${null} | ${MOCK_ISSUE_PATH}
- ${{ name: 'Test Group' }} | ${{ issues_path: 'group/path' }} | ${null} | ${null} | ${'group/path'}
- ${{ name: 'Test Group' }} | ${{ issues_path: 'group/path' }} | ${{ name: 'Test Project' }} | ${{ issues_path: 'project/path' }} | ${'project/path'}
- `('scopedIssuesPath', ({ group, group_metadata, project, project_metadata, expectedPath }) => {
- describe(`when group is ${group?.name} and project is ${project?.name}`, () => {
- beforeEach(() => {
- createState({
- searchContext: {
- group,
- group_metadata,
- project,
- project_metadata,
- },
- });
- });
-
- it(`should return ${expectedPath}`, () => {
- expect(getters.scopedIssuesPath(state)).toBe(expectedPath);
- });
- });
- });
-
- describe.each`
- group | group_metadata | project | project_metadata | expectedPath
- ${null} | ${null} | ${null} | ${null} | ${MOCK_MR_PATH}
- ${{ name: 'Test Group' }} | ${{ mr_path: 'group/path' }} | ${null} | ${null} | ${'group/path'}
- ${{ name: 'Test Group' }} | ${{ mr_path: 'group/path' }} | ${{ name: 'Test Project' }} | ${{ mr_path: 'project/path' }} | ${'project/path'}
- `('scopedMRPath', ({ group, group_metadata, project, project_metadata, expectedPath }) => {
- describe(`when group is ${group?.name} and project is ${project?.name}`, () => {
- beforeEach(() => {
- createState({
- searchContext: {
- group,
- group_metadata,
- project,
- project_metadata,
- },
- });
- });
-
- it(`should return ${expectedPath}`, () => {
- expect(getters.scopedMRPath(state)).toBe(expectedPath);
- });
- });
- });
-
- describe.each`
- group | project | scope | forSnippets | codeSearch | ref | expectedPath
- ${null} | ${null} | ${null} | ${false} | ${false} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar`}
- ${null} | ${null} | ${null} | ${true} | ${false} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&snippets=true`}
- ${null} | ${null} | ${null} | ${false} | ${true} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&search_code=true`}
- ${null} | ${null} | ${null} | ${false} | ${false} | ${'test-branch'} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&repository_ref=test-branch`}
- ${MOCK_GROUP} | ${null} | ${null} | ${false} | ${false} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&group_id=${MOCK_GROUP.id}`}
- ${null} | ${MOCK_PROJECT} | ${null} | ${false} | ${false} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&project_id=${MOCK_PROJECT.id}`}
- ${MOCK_GROUP} | ${MOCK_PROJECT} | ${null} | ${false} | ${false} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&project_id=${MOCK_PROJECT.id}&group_id=${MOCK_GROUP.id}`}
- ${MOCK_GROUP} | ${MOCK_PROJECT} | ${'issues'} | ${false} | ${false} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&project_id=${MOCK_PROJECT.id}&group_id=${MOCK_GROUP.id}&scope=issues`}
- ${MOCK_GROUP} | ${MOCK_PROJECT} | ${'issues'} | ${true} | ${false} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&project_id=${MOCK_PROJECT.id}&group_id=${MOCK_GROUP.id}&scope=issues&snippets=true`}
- ${MOCK_GROUP} | ${MOCK_PROJECT} | ${'issues'} | ${true} | ${true} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&project_id=${MOCK_PROJECT.id}&group_id=${MOCK_GROUP.id}&scope=issues&snippets=true&search_code=true`}
- ${MOCK_GROUP} | ${MOCK_PROJECT} | ${'issues'} | ${true} | ${true} | ${'test-branch'} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&project_id=${MOCK_PROJECT.id}&group_id=${MOCK_GROUP.id}&scope=issues&snippets=true&search_code=true&repository_ref=test-branch`}
- `('projectUrl', ({ group, project, scope, forSnippets, codeSearch, ref, expectedPath }) => {
- describe(`when group is ${group?.name}, project is ${project?.name}, scope is ${scope}, for_snippets is ${forSnippets}, code_search is ${codeSearch}, and ref is ${ref}`, () => {
- beforeEach(() => {
- createState({
- searchContext: {
- group,
- project,
- scope,
- for_snippets: forSnippets,
- code_search: codeSearch,
- ref,
- },
- });
- state.search = MOCK_SEARCH;
- });
-
- it(`should return ${expectedPath}`, () => {
- expect(getters.projectUrl(state)).toBe(expectedPath);
- });
- });
- });
-
- describe.each`
- group | project | scope | forSnippets | codeSearch | ref | expectedPath
- ${null} | ${null} | ${null} | ${false} | ${false} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar`}
- ${null} | ${null} | ${null} | ${true} | ${false} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&snippets=true`}
- ${null} | ${null} | ${null} | ${false} | ${true} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&search_code=true`}
- ${null} | ${null} | ${null} | ${false} | ${false} | ${'test-branch'} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&repository_ref=test-branch`}
- ${MOCK_GROUP} | ${null} | ${null} | ${false} | ${false} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&group_id=${MOCK_GROUP.id}`}
- ${null} | ${MOCK_PROJECT} | ${null} | ${false} | ${false} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar`}
- ${MOCK_GROUP} | ${MOCK_PROJECT} | ${null} | ${false} | ${false} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&group_id=${MOCK_GROUP.id}`}
- ${MOCK_GROUP} | ${MOCK_PROJECT} | ${'issues'} | ${false} | ${false} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&group_id=${MOCK_GROUP.id}&scope=issues`}
- ${MOCK_GROUP} | ${MOCK_PROJECT} | ${'issues'} | ${true} | ${false} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&group_id=${MOCK_GROUP.id}&scope=issues&snippets=true`}
- ${MOCK_GROUP} | ${MOCK_PROJECT} | ${'issues'} | ${true} | ${true} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&group_id=${MOCK_GROUP.id}&scope=issues&snippets=true&search_code=true`}
- ${MOCK_GROUP} | ${MOCK_PROJECT} | ${'issues'} | ${true} | ${true} | ${'test-branch'} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&group_id=${MOCK_GROUP.id}&scope=issues&snippets=true&search_code=true&repository_ref=test-branch`}
- `('groupUrl', ({ group, project, scope, forSnippets, codeSearch, ref, expectedPath }) => {
- describe(`when group is ${group?.name}, project is ${project?.name}, scope is ${scope}, for_snippets is ${forSnippets}, code_search is ${codeSearch}, and ref is ${ref}`, () => {
- beforeEach(() => {
- createState({
- searchContext: {
- group,
- project,
- scope,
- for_snippets: forSnippets,
- code_search: codeSearch,
- ref,
- },
- });
- state.search = MOCK_SEARCH;
- });
-
- it(`should return ${expectedPath}`, () => {
- expect(getters.groupUrl(state)).toBe(expectedPath);
- });
- });
- });
-
- describe.each`
- group | project | scope | forSnippets | codeSearch | ref | expectedPath
- ${null} | ${null} | ${null} | ${false} | ${false} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar`}
- ${null} | ${null} | ${null} | ${true} | ${false} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&snippets=true`}
- ${null} | ${null} | ${null} | ${false} | ${true} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&search_code=true`}
- ${null} | ${null} | ${null} | ${false} | ${false} | ${'test-branch'} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&repository_ref=test-branch`}
- ${MOCK_GROUP} | ${null} | ${null} | ${false} | ${false} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar`}
- ${null} | ${MOCK_PROJECT} | ${null} | ${false} | ${false} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar`}
- ${MOCK_GROUP} | ${MOCK_PROJECT} | ${null} | ${false} | ${false} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar`}
- ${MOCK_GROUP} | ${MOCK_PROJECT} | ${'issues'} | ${false} | ${false} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&scope=issues`}
- ${MOCK_GROUP} | ${MOCK_PROJECT} | ${'issues'} | ${true} | ${false} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&scope=issues&snippets=true`}
- ${MOCK_GROUP} | ${MOCK_PROJECT} | ${'issues'} | ${true} | ${true} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&scope=issues&snippets=true&search_code=true`}
- ${MOCK_GROUP} | ${MOCK_PROJECT} | ${'issues'} | ${true} | ${true} | ${'test-branch'} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&scope=issues&snippets=true&search_code=true&repository_ref=test-branch`}
- `('allUrl', ({ group, project, scope, forSnippets, codeSearch, ref, expectedPath }) => {
- describe(`when group is ${group?.name}, project is ${project?.name}, scope is ${scope}, for_snippets is ${forSnippets}, code_search is ${codeSearch}, and ref is ${ref}`, () => {
- beforeEach(() => {
- createState({
- searchContext: {
- group,
- project,
- scope,
- for_snippets: forSnippets,
- code_search: codeSearch,
- ref,
- },
- });
- state.search = MOCK_SEARCH;
- });
-
- it(`should return ${expectedPath}`, () => {
- expect(getters.allUrl(state)).toBe(expectedPath);
- });
- });
- });
-
- describe('defaultSearchOptions', () => {
- const mockGetters = {
- scopedIssuesPath: MOCK_ISSUE_PATH,
- scopedMRPath: MOCK_MR_PATH,
- };
-
- beforeEach(() => {
- createState();
- window.gon.current_username = MOCK_USERNAME;
- });
-
- it('returns the correct array', () => {
- expect(getters.defaultSearchOptions(state, mockGetters)).toStrictEqual(
- MOCK_DEFAULT_SEARCH_OPTIONS,
- );
- });
-
- it('returns the correct array if issues path is false', () => {
- mockGetters.scopedIssuesPath = undefined;
- expect(getters.defaultSearchOptions(state, mockGetters)).toStrictEqual(
- MOCK_DEFAULT_SEARCH_OPTIONS.slice(2, MOCK_DEFAULT_SEARCH_OPTIONS.length),
- );
- });
- });
-
- describe('scopedSearchOptions', () => {
- const mockGetters = {
- projectUrl: MOCK_PROJECT.path,
- groupUrl: MOCK_GROUP.path,
- allUrl: MOCK_ALL_PATH,
- };
-
- beforeEach(() => {
- createState({
- searchContext: {
- project: MOCK_PROJECT,
- group: MOCK_GROUP,
- },
- });
- });
-
- it('returns the correct array', () => {
- expect(getters.scopedSearchOptions(state, mockGetters)).toStrictEqual(
- MOCK_SCOPED_SEARCH_OPTIONS_DEF,
- );
- });
- });
-
- describe('autocompleteGroupedSearchOptions', () => {
- beforeEach(() => {
- createState();
- state.autocompleteOptions = MOCK_AUTOCOMPLETE_OPTIONS;
- });
-
- it('returns the correct grouped array', () => {
- expect(getters.autocompleteGroupedSearchOptions(state)).toStrictEqual(
- MOCK_GROUPED_AUTOCOMPLETE_OPTIONS,
- );
- });
- });
-
- describe.each`
- search | defaultSearchOptions | scopedSearchOptions | autocompleteGroupedSearchOptions | expectedArray
- ${null} | ${MOCK_DEFAULT_SEARCH_OPTIONS} | ${MOCK_SCOPED_SEARCH_OPTIONS} | ${MOCK_GROUPED_AUTOCOMPLETE_OPTIONS} | ${MOCK_DEFAULT_SEARCH_OPTIONS}
- ${MOCK_SEARCH} | ${MOCK_DEFAULT_SEARCH_OPTIONS} | ${MOCK_SCOPED_SEARCH_OPTIONS} | ${[]} | ${MOCK_SCOPED_SEARCH_OPTIONS}
- ${MOCK_SEARCH} | ${MOCK_DEFAULT_SEARCH_OPTIONS} | ${[]} | ${MOCK_GROUPED_AUTOCOMPLETE_OPTIONS} | ${MOCK_SORTED_AUTOCOMPLETE_OPTIONS}
- ${MOCK_SEARCH} | ${MOCK_DEFAULT_SEARCH_OPTIONS} | ${MOCK_SCOPED_SEARCH_OPTIONS} | ${MOCK_GROUPED_AUTOCOMPLETE_OPTIONS} | ${MOCK_SCOPED_SEARCH_OPTIONS.concat(MOCK_SORTED_AUTOCOMPLETE_OPTIONS)}
- ${1} | ${MOCK_DEFAULT_SEARCH_OPTIONS} | ${[]} | ${[]} | ${[]}
- ${'('} | ${MOCK_DEFAULT_SEARCH_OPTIONS} | ${[]} | ${[]} | ${[]}
- ${'t'} | ${MOCK_DEFAULT_SEARCH_OPTIONS} | ${MOCK_SCOPED_SEARCH_OPTIONS} | ${MOCK_GROUPED_AUTOCOMPLETE_OPTIONS} | ${MOCK_SORTED_AUTOCOMPLETE_OPTIONS}
- ${'te'} | ${MOCK_DEFAULT_SEARCH_OPTIONS} | ${MOCK_SCOPED_SEARCH_OPTIONS} | ${MOCK_GROUPED_AUTOCOMPLETE_OPTIONS} | ${MOCK_SORTED_AUTOCOMPLETE_OPTIONS}
- ${'tes'} | ${MOCK_DEFAULT_SEARCH_OPTIONS} | ${MOCK_SCOPED_SEARCH_OPTIONS} | ${MOCK_GROUPED_AUTOCOMPLETE_OPTIONS} | ${MOCK_SCOPED_SEARCH_OPTIONS.concat(MOCK_SORTED_AUTOCOMPLETE_OPTIONS)}
- `(
- 'searchOptions',
- ({
- search,
- defaultSearchOptions,
- scopedSearchOptions,
- autocompleteGroupedSearchOptions,
- expectedArray,
- }) => {
- describe(`when search is ${search} and the defaultSearchOptions${
- defaultSearchOptions.length ? '' : ' do not'
- } exist, scopedSearchOptions${
- scopedSearchOptions.length ? '' : ' do not'
- } exist, and autocompleteGroupedSearchOptions${
- autocompleteGroupedSearchOptions.length ? '' : ' do not'
- } exist`, () => {
- const mockGetters = {
- defaultSearchOptions,
- scopedSearchOptions,
- autocompleteGroupedSearchOptions,
- };
-
- beforeEach(() => {
- createState();
- state.search = search;
- });
-
- it(`should return the correct combined array`, () => {
- expect(getters.searchOptions(state, mockGetters)).toStrictEqual(expectedArray);
- });
- });
- },
- );
-});
diff --git a/spec/frontend/header_search/store/mutations_spec.js b/spec/frontend/header_search/store/mutations_spec.js
deleted file mode 100644
index e3c15ded948..00000000000
--- a/spec/frontend/header_search/store/mutations_spec.js
+++ /dev/null
@@ -1,63 +0,0 @@
-import * as types from '~/header_search/store/mutation_types';
-import mutations from '~/header_search/store/mutations';
-import createState from '~/header_search/store/state';
-import {
- MOCK_SEARCH,
- MOCK_AUTOCOMPLETE_OPTIONS_RES,
- MOCK_AUTOCOMPLETE_OPTIONS,
-} from '../mock_data';
-
-describe('Header Search Store Mutations', () => {
- let state;
-
- beforeEach(() => {
- state = createState({});
- });
-
- describe('REQUEST_AUTOCOMPLETE', () => {
- it('sets loading to true and empties autocompleteOptions array', () => {
- mutations[types.REQUEST_AUTOCOMPLETE](state);
-
- expect(state.loading).toBe(true);
- expect(state.autocompleteOptions).toStrictEqual([]);
- expect(state.autocompleteError).toBe(false);
- });
- });
-
- describe('RECEIVE_AUTOCOMPLETE_SUCCESS', () => {
- it('sets loading to false and then formats and sets the autocompleteOptions array', () => {
- mutations[types.RECEIVE_AUTOCOMPLETE_SUCCESS](state, MOCK_AUTOCOMPLETE_OPTIONS_RES);
-
- expect(state.loading).toBe(false);
- expect(state.autocompleteOptions).toStrictEqual(MOCK_AUTOCOMPLETE_OPTIONS);
- expect(state.autocompleteError).toBe(false);
- });
- });
-
- describe('RECEIVE_AUTOCOMPLETE_ERROR', () => {
- it('sets loading to false and empties autocompleteOptions array', () => {
- mutations[types.RECEIVE_AUTOCOMPLETE_ERROR](state);
-
- expect(state.loading).toBe(false);
- expect(state.autocompleteOptions).toStrictEqual([]);
- expect(state.autocompleteError).toBe(true);
- });
- });
-
- describe('CLEAR_AUTOCOMPLETE', () => {
- it('empties autocompleteOptions array', () => {
- mutations[types.CLEAR_AUTOCOMPLETE](state);
-
- expect(state.autocompleteOptions).toStrictEqual([]);
- expect(state.autocompleteError).toBe(false);
- });
- });
-
- describe('SET_SEARCH', () => {
- it('sets search to value', () => {
- mutations[types.SET_SEARCH](state, MOCK_SEARCH);
-
- expect(state.search).toBe(MOCK_SEARCH);
- });
- });
-});
diff --git a/spec/frontend/header_spec.js b/spec/frontend/header_spec.js
deleted file mode 100644
index 13c11863443..00000000000
--- a/spec/frontend/header_spec.js
+++ /dev/null
@@ -1,107 +0,0 @@
-import htmlOpenIssue from 'test_fixtures/issues/open-issue.html';
-import { mockTracking, unmockTracking } from 'helpers/tracking_helper';
-import initTodoToggle, { initNavUserDropdownTracking } from '~/header';
-import { setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
-
-// TODO: Remove this with the removal of the old navigation.
-// See https://gitlab.com/groups/gitlab-org/-/epics/11875.
-//
-// This and ~/header will be removed. These tests no longer work due to the
-// corresponding fixtures changing for
-// https://gitlab.com/gitlab-org/gitlab/-/issues/420121.
-// eslint-disable-next-line jest/no-disabled-tests
-describe.skip('Header', () => {
- describe('Todos notification', () => {
- const todosPendingCount = '.js-todos-count';
-
- function isTodosCountHidden() {
- return document.querySelector(todosPendingCount).classList.contains('hidden');
- }
-
- function triggerToggle(newCount) {
- const event = new CustomEvent('todo:toggle', {
- detail: {
- count: newCount,
- },
- });
-
- document.dispatchEvent(event);
- }
-
- beforeEach(() => {
- initTodoToggle();
- setHTMLFixture(htmlOpenIssue);
- });
-
- afterEach(() => {
- resetHTMLFixture();
- });
-
- it('should update todos-count after receiving the todo:toggle event', () => {
- triggerToggle(5);
-
- expect(document.querySelector(todosPendingCount).textContent).toEqual('5');
- });
-
- it('should hide todos-count when it is 0', () => {
- triggerToggle(0);
-
- expect(isTodosCountHidden()).toEqual(true);
- });
-
- it('should show todos-count when it is more than 0', () => {
- triggerToggle(10);
-
- expect(isTodosCountHidden()).toEqual(false);
- });
-
- describe('when todos-count is 1000', () => {
- beforeEach(() => {
- triggerToggle(1000);
- });
-
- it('should show todos-count', () => {
- expect(isTodosCountHidden()).toEqual(false);
- });
-
- it('should show 99+ for todos-count', () => {
- expect(document.querySelector(todosPendingCount).textContent).toEqual('99+');
- });
- });
- });
-
- describe('Track user dropdown open', () => {
- let trackingSpy;
-
- beforeEach(() => {
- setHTMLFixture(`
- <li class="js-nav-user-dropdown">
- <a class="js-buy-pipeline-minutes-link" data-track-action="click_buy_ci_minutes" data-track-label="free" data-track-property="user_dropdown">Buy Pipeline minutes</a>
- </li>`);
-
- trackingSpy = mockTracking(
- '_category_',
- document.querySelector('.js-nav-user-dropdown').element,
- jest.spyOn,
- );
- document.body.dataset.page = 'some:page';
-
- initNavUserDropdownTracking();
- });
-
- afterEach(() => {
- unmockTracking();
- resetHTMLFixture();
- });
-
- it('sends a tracking event when the dropdown is opened and contains Buy Pipeline minutes link', () => {
- const event = new CustomEvent('shown.bs.dropdown');
- document.querySelector('.js-nav-user-dropdown').dispatchEvent(event);
-
- expect(trackingSpy).toHaveBeenCalledWith('some:page', 'show_buy_ci_minutes', {
- label: 'free',
- property: 'user_dropdown',
- });
- });
- });
-});
diff --git a/spec/frontend/ide/components/ide_sidebar_nav_spec.js b/spec/frontend/ide/components/ide_sidebar_nav_spec.js
index 4ee24f63f76..d89891bdd41 100644
--- a/spec/frontend/ide/components/ide_sidebar_nav_spec.js
+++ b/spec/frontend/ide/components/ide_sidebar_nav_spec.js
@@ -45,7 +45,6 @@ describe('ide/components/ide_sidebar_nav', () => {
title: button.attributes('title'),
ariaLabel: button.attributes('aria-label'),
classes: button.classes(),
- qaSelector: button.attributes('data-qa-selector'),
icon: button.findComponent(GlIcon).props('name'),
tooltip: getBinding(button.element, 'tooltip').value,
};
@@ -75,7 +74,6 @@ describe('ide/components/ide_sidebar_nav', () => {
title: tab.title,
ariaLabel: tab.title,
classes: ['ide-sidebar-link', ...classes, ...(classesObj[index] || [])],
- qaSelector: `${tab.title.toLowerCase()}_tab_button`,
icon: tab.icon,
tooltip: {
container: 'body',
diff --git a/spec/frontend/ide/components/ide_status_bar_spec.js b/spec/frontend/ide/components/ide_status_bar_spec.js
index fe392a64013..eb51faaaa16 100644
--- a/spec/frontend/ide/components/ide_status_bar_spec.js
+++ b/spec/frontend/ide/components/ide_status_bar_spec.js
@@ -1,4 +1,4 @@
-import _ from 'lodash';
+import { clone } from 'lodash';
import { TEST_HOST } from 'helpers/test_constants';
import { mountExtended } from 'helpers/vue_test_utils_helper';
import IdeStatusBar from '~/ide/components/ide_status_bar.vue';
@@ -28,7 +28,7 @@ describe('IdeStatusBar component', () => {
currentProjectId: TEST_PROJECT_ID,
projects: {
...store.state.projects,
- [TEST_PROJECT_ID]: _.clone(projectData),
+ [TEST_PROJECT_ID]: clone(projectData),
},
...state,
});
@@ -100,7 +100,7 @@ describe('IdeStatusBar component', () => {
currentMergeRequestId: TEST_MERGE_REQUEST_ID,
projects: {
[TEST_PROJECT_ID]: {
- ..._.clone(projectData),
+ ...clone(projectData),
mergeRequests: {
[TEST_MERGE_REQUEST_ID]: {
web_url: TEST_MERGE_REQUEST_URL,
diff --git a/spec/frontend/ide/components/panes/collapsible_sidebar_spec.js b/spec/frontend/ide/components/panes/collapsible_sidebar_spec.js
index 56e62829971..174e62550d5 100644
--- a/spec/frontend/ide/components/panes/collapsible_sidebar_spec.js
+++ b/spec/frontend/ide/components/panes/collapsible_sidebar_spec.js
@@ -1,5 +1,5 @@
import { shallowMount } from '@vue/test-utils';
-import Vue, { nextTick } from 'vue';
+import Vue from 'vue';
// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import IdeSidebarNav from '~/ide/components/ide_sidebar_nav.vue';
@@ -37,31 +37,26 @@ describe('ide/components/panes/collapsible_sidebar.vue', () => {
});
describe('with a tab', () => {
- let fakeView;
- let extensionTabs;
-
- beforeEach(() => {
- const FakeComponent = Vue.component(fakeComponentName, {
- render: () => null,
- });
-
- fakeView = {
- name: fakeComponentName,
- keepAlive: true,
- component: FakeComponent,
- };
-
- extensionTabs = [
- {
- show: true,
- title: fakeComponentName,
- views: [fakeView],
- icon: 'text-description',
- buttonClasses: ['button-class-1', 'button-class-2'],
- },
- ];
+ const FakeComponent = Vue.component(fakeComponentName, {
+ render: () => null,
});
+ const fakeView = {
+ name: fakeComponentName,
+ keepAlive: true,
+ component: FakeComponent,
+ };
+
+ const extensionTabs = [
+ {
+ show: true,
+ title: fakeComponentName,
+ views: [fakeView],
+ icon: 'text-description',
+ buttonClasses: ['button-class-1', 'button-class-2'],
+ },
+ ];
+
describe.each`
side
${'left'}
@@ -79,10 +74,6 @@ describe('ide/components/panes/collapsible_sidebar.vue', () => {
expect(findSidebarNav().props('side')).toBe(side);
});
- it('nothing is dispatched', () => {
- expect(store.dispatch).not.toHaveBeenCalled();
- });
-
it('when sidebar emits open, dispatch open', () => {
const view = 'lorem-view';
@@ -98,6 +89,13 @@ describe('ide/components/panes/collapsible_sidebar.vue', () => {
});
});
+ describe('when side bar is rendered initially', () => {
+ it('nothing is dispatched', () => {
+ createComponent({ extensionTabs });
+ expect(store.dispatch).not.toHaveBeenCalled();
+ });
+ });
+
describe.each`
isOpen
${true}
@@ -125,25 +123,15 @@ describe('ide/components/panes/collapsible_sidebar.vue', () => {
});
describe('with initOpenView that does not exist', () => {
- beforeEach(async () => {
- createComponent({ extensionTabs, initOpenView: 'does-not-exist' });
-
- await nextTick();
- });
-
it('nothing is dispatched', () => {
+ createComponent({ extensionTabs, initOpenView: 'does-not-exist' });
expect(store.dispatch).not.toHaveBeenCalled();
});
});
describe('with initOpenView that does exist', () => {
- beforeEach(async () => {
- createComponent({ extensionTabs, initOpenView: fakeView.name });
-
- await nextTick();
- });
-
it('dispatches open with view on create', () => {
+ createComponent({ extensionTabs, initOpenView: fakeView.name });
expect(store.dispatch).toHaveBeenCalledWith('rightPane/open', fakeView);
});
});
diff --git a/spec/frontend/ide/components/pipelines/list_spec.js b/spec/frontend/ide/components/pipelines/list_spec.js
index 9c11ae9334b..9d8b3b1d32a 100644
--- a/spec/frontend/ide/components/pipelines/list_spec.js
+++ b/spec/frontend/ide/components/pipelines/list_spec.js
@@ -8,7 +8,7 @@ import JobsList from '~/ide/components/jobs/list.vue';
import List from '~/ide/components/pipelines/list.vue';
import EmptyState from '~/ide/components/pipelines/empty_state.vue';
import IDEServices from '~/ide/services';
-import CiIcon from '~/vue_shared/components/ci_icon.vue';
+import CiIcon from '~/vue_shared/components/ci_icon/ci_icon.vue';
Vue.use(Vuex);
diff --git a/spec/frontend/ide/components/repo_commit_section_spec.js b/spec/frontend/ide/components/repo_commit_section_spec.js
index ead609421b7..3dd9ae1285d 100644
--- a/spec/frontend/ide/components/repo_commit_section_spec.js
+++ b/spec/frontend/ide/components/repo_commit_section_spec.js
@@ -71,11 +71,8 @@ describe('RepoCommitSection', () => {
createComponent();
});
- it('renders no changes text', () => {
- expect(wrapper.findComponent(EmptyState).text().trim()).toContain('No changes');
- expect(wrapper.findComponent(EmptyState).find('img').attributes('src')).toBe(
- TEST_NO_CHANGES_SVG,
- );
+ it('renders empty state component', () => {
+ expect(wrapper.findComponent(EmptyState).exists()).toBe(true);
});
});
diff --git a/spec/frontend/ide/init_gitlab_web_ide_spec.js b/spec/frontend/ide/init_gitlab_web_ide_spec.js
index 6a5bedb0bbb..d7a16bec1c3 100644
--- a/spec/frontend/ide/init_gitlab_web_ide_spec.js
+++ b/spec/frontend/ide/init_gitlab_web_ide_spec.js
@@ -40,6 +40,9 @@ const TEST_EDITOR_FONT_SRC_URL = 'http://gitlab.test/assets/gitlab-mono/GitLabMo
const TEST_EDITOR_FONT_FORMAT = 'woff2';
const TEST_EDITOR_FONT_FAMILY = 'GitLab Mono';
+const TEST_OAUTH_CLIENT_ID = 'oauth-client-id-123abc';
+const TEST_OAUTH_CALLBACK_URL = 'https://example.com/oauth_callback';
+
describe('ide/init_gitlab_web_ide', () => {
let resolveConfirm;
@@ -231,4 +234,29 @@ describe('ide/init_gitlab_web_ide', () => {
);
});
});
+
+ describe('when oauth info is in dataset', () => {
+ beforeEach(() => {
+ findRootElement().dataset.clientId = TEST_OAUTH_CLIENT_ID;
+ findRootElement().dataset.callbackUrl = TEST_OAUTH_CALLBACK_URL;
+
+ createSubject();
+ });
+
+ it('calls start with element', () => {
+ expect(start).toHaveBeenCalledTimes(1);
+ expect(start).toHaveBeenCalledWith(
+ findRootElement(),
+ expect.objectContaining({
+ auth: {
+ type: 'oauth',
+ clientId: TEST_OAUTH_CLIENT_ID,
+ callbackUrl: TEST_OAUTH_CALLBACK_URL,
+ protectRefreshToken: true,
+ },
+ httpHeaders: undefined,
+ }),
+ );
+ });
+ });
});
diff --git a/spec/frontend/ide/lib/gitlab_web_ide/get_oauth_config_spec.js b/spec/frontend/ide/lib/gitlab_web_ide/get_oauth_config_spec.js
new file mode 100644
index 00000000000..3431068937f
--- /dev/null
+++ b/spec/frontend/ide/lib/gitlab_web_ide/get_oauth_config_spec.js
@@ -0,0 +1,16 @@
+import { getOAuthConfig } from '~/ide/lib/gitlab_web_ide/get_oauth_config';
+
+describe('~/ide/lib/gitlab_web_ide/get_oauth_config', () => {
+ it('returns undefined if no clientId found', () => {
+ expect(getOAuthConfig({})).toBeUndefined();
+ });
+
+ it('returns auth config from dataset', () => {
+ expect(getOAuthConfig({ clientId: 'test-clientId', callbackUrl: 'test-callbackUrl' })).toEqual({
+ type: 'oauth',
+ clientId: 'test-clientId',
+ callbackUrl: 'test-callbackUrl',
+ protectRefreshToken: true,
+ });
+ });
+});
diff --git a/spec/frontend/ide/mock_data.js b/spec/frontend/ide/mock_data.js
index b1f192e1d98..722f15db87d 100644
--- a/spec/frontend/ide/mock_data.js
+++ b/spec/frontend/ide/mock_data.js
@@ -14,6 +14,7 @@ export const projectData = {
commit: {
id: '123',
short_id: 'abc123de',
+ committed_date: '2019-09-13T15:37:30+0300',
},
},
},
diff --git a/spec/frontend/ide/mount_oauth_callback_spec.js b/spec/frontend/ide/mount_oauth_callback_spec.js
new file mode 100644
index 00000000000..6ac0b4e4615
--- /dev/null
+++ b/spec/frontend/ide/mount_oauth_callback_spec.js
@@ -0,0 +1,53 @@
+import { oauthCallback } from '@gitlab/web-ide';
+import { TEST_HOST } from 'helpers/test_constants';
+import { mountOAuthCallback } from '~/ide/mount_oauth_callback';
+
+jest.mock('@gitlab/web-ide');
+
+const TEST_USERNAME = 'gandalf.the.grey';
+const TEST_GITLAB_WEB_IDE_PUBLIC_PATH = 'test/webpack/assets/gitlab-web-ide/public/path';
+
+const TEST_OAUTH_CLIENT_ID = 'oauth-client-id-123abc';
+const TEST_OAUTH_CALLBACK_URL = 'https://example.com/oauth_callback';
+
+describe('~/ide/mount_oauth_callback', () => {
+ const createRootElement = () => {
+ const el = document.createElement('div');
+
+ el.id = 'ide';
+ el.dataset.clientId = TEST_OAUTH_CLIENT_ID;
+ el.dataset.callbackUrl = TEST_OAUTH_CALLBACK_URL;
+
+ document.body.append(el);
+ };
+
+ beforeEach(() => {
+ gon.current_username = TEST_USERNAME;
+ process.env.GITLAB_WEB_IDE_PUBLIC_PATH = TEST_GITLAB_WEB_IDE_PUBLIC_PATH;
+
+ createRootElement();
+ });
+
+ afterEach(() => {
+ document.body.innerHTML = '';
+ });
+
+ it('calls oauthCallback', () => {
+ expect(oauthCallback).not.toHaveBeenCalled();
+
+ mountOAuthCallback();
+
+ expect(oauthCallback).toHaveBeenCalledTimes(1);
+ expect(oauthCallback).toHaveBeenCalledWith({
+ auth: {
+ type: 'oauth',
+ callbackUrl: TEST_OAUTH_CALLBACK_URL,
+ clientId: TEST_OAUTH_CLIENT_ID,
+ protectRefreshToken: true,
+ },
+ gitlabUrl: TEST_HOST,
+ baseUrl: `${TEST_HOST}/${TEST_GITLAB_WEB_IDE_PUBLIC_PATH}`,
+ username: TEST_USERNAME,
+ });
+ });
+});
diff --git a/spec/frontend/ide/stores/modules/editor/actions_spec.js b/spec/frontend/ide/stores/modules/editor/actions_spec.js
index f006018364b..e24d54ef6da 100644
--- a/spec/frontend/ide/stores/modules/editor/actions_spec.js
+++ b/spec/frontend/ide/stores/modules/editor/actions_spec.js
@@ -8,7 +8,7 @@ describe('~/ide/stores/modules/editor/actions', () => {
it('commits with payload', () => {
const payload = {};
- testAction(actions.updateFileEditor, payload, {}, [
+ return testAction(actions.updateFileEditor, payload, {}, [
{ type: types.UPDATE_FILE_EDITOR, payload },
]);
});
@@ -18,7 +18,7 @@ describe('~/ide/stores/modules/editor/actions', () => {
it('commits with payload', () => {
const payload = 'path/to/file.txt';
- testAction(actions.removeFileEditor, payload, {}, [
+ return testAction(actions.removeFileEditor, payload, {}, [
{ type: types.REMOVE_FILE_EDITOR, payload },
]);
});
@@ -28,7 +28,7 @@ describe('~/ide/stores/modules/editor/actions', () => {
it('commits with payload', () => {
const payload = createTriggerRenamePayload('test', 'test123');
- testAction(actions.renameFileEditor, payload, {}, [
+ return testAction(actions.renameFileEditor, payload, {}, [
{ type: types.RENAME_FILE_EDITOR, payload },
]);
});
diff --git a/spec/frontend/import/details/components/bulk_import_details_app_spec.js b/spec/frontend/import/details/components/bulk_import_details_app_spec.js
index d32afb7ddcb..18b03ed9802 100644
--- a/spec/frontend/import/details/components/bulk_import_details_app_spec.js
+++ b/spec/frontend/import/details/components/bulk_import_details_app_spec.js
@@ -1,18 +1,30 @@
import { shallowMount } from '@vue/test-utils';
+import { getParameterValues } from '~/lib/utils/url_utility';
+
import BulkImportDetailsApp from '~/import/details/components/bulk_import_details_app.vue';
+jest.mock('~/lib/utils/url_utility');
+
describe('Bulk import details app', () => {
let wrapper;
+ const mockId = 151;
+
const createComponent = () => {
wrapper = shallowMount(BulkImportDetailsApp);
};
+ beforeEach(() => {
+ getParameterValues.mockReturnValueOnce([mockId]);
+ });
+
describe('template', () => {
it('renders heading', () => {
createComponent();
- expect(wrapper.find('h1').text()).toBe('GitLab Migration details');
+ const headingText = wrapper.find('h1').text();
+
+ expect(headingText).toBe(`Items that failed to be imported for ${mockId}`);
});
});
});
diff --git a/spec/frontend/import_entities/import_groups/components/import_history_link_spec.js b/spec/frontend/import_entities/import_groups/components/import_history_link_spec.js
new file mode 100644
index 00000000000..5f530f2c3be
--- /dev/null
+++ b/spec/frontend/import_entities/import_groups/components/import_history_link_spec.js
@@ -0,0 +1,34 @@
+import { shallowMount } from '@vue/test-utils';
+import { GlLink } from '@gitlab/ui';
+
+import ImportHistoryLink from '~/import_entities/import_groups/components/import_history_link.vue';
+
+describe('import history link', () => {
+ let wrapper;
+
+ const mockHistoryPath = '/import/history';
+
+ const createComponent = ({ props } = {}) => {
+ wrapper = shallowMount(ImportHistoryLink, {
+ propsData: {
+ historyPath: mockHistoryPath,
+ ...props,
+ },
+ });
+ };
+
+ const findGlLink = () => wrapper.findComponent(GlLink);
+
+ it('renders link with href', () => {
+ const mockId = 174;
+
+ createComponent({
+ props: {
+ id: mockId,
+ },
+ });
+
+ expect(findGlLink().text()).toBe('View details');
+ expect(findGlLink().attributes('href')).toBe('/import/history?bulk_import_id=174');
+ });
+});
diff --git a/spec/frontend/import_entities/import_groups/components/import_table_spec.js b/spec/frontend/import_entities/import_groups/components/import_table_spec.js
index 4fab22e316a..84f149b4dd5 100644
--- a/spec/frontend/import_entities/import_groups/components/import_table_spec.js
+++ b/spec/frontend/import_entities/import_groups/components/import_table_spec.js
@@ -9,9 +9,12 @@ import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
import { createAlert } from '~/alert';
import { HTTP_STATUS_OK, HTTP_STATUS_TOO_MANY_REQUESTS } from '~/lib/utils/http_status';
import axios from '~/lib/utils/axios_utils';
+
import { STATUSES } from '~/import_entities/constants';
-import { i18n, ROOT_NAMESPACE } from '~/import_entities/import_groups/constants';
+import { ROOT_NAMESPACE } from '~/import_entities/import_groups/constants';
import ImportTable from '~/import_entities/import_groups/components/import_table.vue';
+import ImportStatus from '~/import_entities/import_groups/components/import_status.vue';
+import ImportHistoryLink from '~/import_entities/import_groups/components//import_history_link.vue';
import importGroupsMutation from '~/import_entities/import_groups/graphql/mutations/import_groups.mutation.graphql';
import PaginationBar from '~/vue_shared/components/pagination_bar/pagination_bar.vue';
import PaginationLinks from '~/vue_shared/components/pagination_links.vue';
@@ -39,6 +42,7 @@ describe('import table', () => {
generateFakeEntry({ id: 1, status: STATUSES.NONE }),
generateFakeEntry({ id: 2, status: STATUSES.FINISHED }),
generateFakeEntry({ id: 3, status: STATUSES.NONE }),
+ generateFakeEntry({ id: 4, status: STATUSES.FINISHED, hasFailures: true }),
];
const FAKE_PAGE_INFO = { page: 1, perPage: 20, total: 40, totalPages: 2 };
@@ -64,6 +68,7 @@ describe('import table', () => {
const findSelectionCount = () => wrapper.find('[data-test-id="selection-count"]');
const findNewPathCol = () => wrapper.find('[data-test-id="new-path-col"]');
const findUnavailableFeaturesWarning = () => wrapper.findByTestId('unavailable-features-alert');
+ const findAllImportStatuses = () => wrapper.findAllComponents(ImportStatus);
const triggerSelectAllCheckbox = (checked = true) =>
wrapper.find('thead input[type=checkbox]').setChecked(checked);
@@ -144,7 +149,7 @@ describe('import table', () => {
});
await waitForPromises();
- expect(wrapper.findComponent(GlEmptyState).props().title).toBe(i18n.NO_GROUPS_FOUND);
+ expect(wrapper.findComponent(GlEmptyState).props().title).toBe('No groups found');
});
});
@@ -161,6 +166,38 @@ describe('import table', () => {
expect(wrapper.findAll('tbody tr')).toHaveLength(FAKE_GROUPS.length);
});
+ it('renders correct import status for each group', async () => {
+ const expectedStatuses = ['Not started', 'Complete', 'Not started', 'Partially completed'];
+
+ createComponent({
+ bulkImportSourceGroups: () => ({
+ nodes: FAKE_GROUPS,
+ pageInfo: FAKE_PAGE_INFO,
+ versionValidation: FAKE_VERSION_VALIDATION,
+ }),
+ });
+ await waitForPromises();
+
+ expect(findAllImportStatuses().wrappers.map((w) => w.text())).toEqual(expectedStatuses);
+ });
+
+ it('renders import history link for imports with id', async () => {
+ createComponent({
+ bulkImportSourceGroups: () => ({
+ nodes: FAKE_GROUPS,
+ pageInfo: FAKE_PAGE_INFO,
+ versionValidation: FAKE_VERSION_VALIDATION,
+ }),
+ });
+ await waitForPromises();
+
+ const importHistoryLinks = wrapper.findAllComponents(ImportHistoryLink);
+
+ expect(importHistoryLinks).toHaveLength(2);
+ expect(importHistoryLinks.at(0).props('id')).toBe(FAKE_GROUPS[1].id);
+ expect(importHistoryLinks.at(1).props('id')).toBe(FAKE_GROUPS[3].id);
+ });
+
it('correctly maintains root namespace as last import target', async () => {
createComponent({
bulkImportSourceGroups: () => ({
@@ -260,6 +297,42 @@ describe('import table', () => {
});
});
+ describe('when importGroup query is using stale data from LocalStorageCache', () => {
+ it('displays error', async () => {
+ const mockMutationWithProgressInvalid = jest.fn().mockResolvedValue({
+ __typename: 'ClientBulkImportSourceGroup',
+ id: 1,
+ lastImportTarget: { id: 1, targetNamespace: 'root', newName: 'group1' },
+ progress: {
+ __typename: 'ClientBulkImportProgress',
+ id: null,
+ status: 'failed',
+ message: '',
+ },
+ });
+
+ createComponent({
+ bulkImportSourceGroups: () => ({
+ nodes: [FAKE_GROUP],
+ pageInfo: FAKE_PAGE_INFO,
+ versionValidation: FAKE_VERSION_VALIDATION,
+ }),
+ importGroups: mockMutationWithProgressInvalid,
+ });
+
+ await waitForPromises();
+ await findRowImportDropdownAtIndex(0).trigger('click');
+ await waitForPromises();
+
+ expect(mockMutationWithProgressInvalid).toHaveBeenCalled();
+ expect(createAlert).toHaveBeenCalledWith({
+ message: 'Importing the group failed.',
+ captureError: true,
+ error: expect.any(Error),
+ });
+ });
+ });
+
it('displays error if importing group fails', async () => {
createComponent({
bulkImportSourceGroups: () => ({
@@ -276,11 +349,11 @@ describe('import table', () => {
await findRowImportDropdownAtIndex(0).trigger('click');
await waitForPromises();
- expect(createAlert).toHaveBeenCalledWith(
- expect.objectContaining({
- message: i18n.ERROR_IMPORT,
- }),
- );
+ expect(createAlert).toHaveBeenCalledWith({
+ message: 'Importing the group failed.',
+ captureError: true,
+ error: expect.any(Error),
+ });
});
it('displays inline error if importing group reports rate limit', async () => {
@@ -302,7 +375,9 @@ describe('import table', () => {
await waitForPromises();
expect(createAlert).not.toHaveBeenCalled();
- expect(wrapper.find('tbody tr').text()).toContain(i18n.ERROR_TOO_MANY_REQUESTS);
+ expect(wrapper.find('tbody tr').text()).toContain(
+ 'Over six imports in one minute were attempted. Wait at least one minute and try again.',
+ );
});
it('displays inline error if backend returns validation error', async () => {
@@ -316,6 +391,7 @@ describe('import table', () => {
__typename: 'ClientBulkImportProgress',
id: null,
status: 'failed',
+ hasFailures: true,
message: mockValidationError,
},
});
diff --git a/spec/frontend/import_entities/import_groups/graphql/client_factory_spec.js b/spec/frontend/import_entities/import_groups/graphql/client_factory_spec.js
index 540c42a2854..0976a3294c2 100644
--- a/spec/frontend/import_entities/import_groups/graphql/client_factory_spec.js
+++ b/spec/frontend/import_entities/import_groups/graphql/client_factory_spec.js
@@ -72,6 +72,7 @@ describe('Bulk import resolvers', () => {
progress: {
id: 'DEMO',
status: 'cached',
+ hasFailures: true,
},
};
localStorageCache.get.mockReturnValueOnce(CACHED_DATA);
@@ -234,7 +235,7 @@ describe('Bulk import resolvers', () => {
data: { updateImportStatus: statusInResponse },
} = await client.mutate({
mutation: updateImportStatusMutation,
- variables: { id, status: NEW_STATUS },
+ variables: { id, status: NEW_STATUS, hasFailures: true },
});
expect(statusInResponse).toStrictEqual({
@@ -242,6 +243,7 @@ describe('Bulk import resolvers', () => {
id,
message: null,
status: NEW_STATUS,
+ hasFailures: true,
});
});
});
diff --git a/spec/frontend/import_entities/import_groups/graphql/fixtures.js b/spec/frontend/import_entities/import_groups/graphql/fixtures.js
index 7530e9fc348..edc2d1a2381 100644
--- a/spec/frontend/import_entities/import_groups/graphql/fixtures.js
+++ b/spec/frontend/import_entities/import_groups/graphql/fixtures.js
@@ -1,7 +1,7 @@
import { STATUSES } from '~/import_entities/constants';
import { clientTypenames } from '~/import_entities/import_groups/graphql/client_factory';
-export const generateFakeEntry = ({ id, status, message, ...rest }) => ({
+export const generateFakeEntry = ({ id, status, hasFailures = false, message, ...rest }) => ({
__typename: clientTypenames.BulkImportSourceGroup,
webUrl: `https://fake.host/${id}`,
fullPath: `fake_group_${id}`,
@@ -19,6 +19,7 @@ export const generateFakeEntry = ({ id, status, message, ...rest }) => ({
__typename: clientTypenames.BulkImportProgress,
id,
status,
+ hasFailures,
message: message || '',
},
...rest,
diff --git a/spec/frontend/import_entities/import_groups/graphql/services/local_storage_cache_spec.js b/spec/frontend/import_entities/import_groups/graphql/services/local_storage_cache_spec.js
index b44a2767ad8..d1ecd47b498 100644
--- a/spec/frontend/import_entities/import_groups/graphql/services/local_storage_cache_spec.js
+++ b/spec/frontend/import_entities/import_groups/graphql/services/local_storage_cache_spec.js
@@ -40,10 +40,11 @@ describe('Local storage cache', () => {
progress: {
id: JOB_ID,
status: 'original',
+ hasFailures: false,
},
});
- cache.updateStatusByJobId(JOB_ID, CHANGED_STATUS);
+ cache.updateStatusByJobId(JOB_ID, CHANGED_STATUS, true);
expect(storage.setItem).toHaveBeenCalledWith(
KEY,
@@ -52,6 +53,7 @@ describe('Local storage cache', () => {
progress: {
id: JOB_ID,
status: CHANGED_STATUS,
+ hasFailures: true,
},
},
}),
diff --git a/spec/frontend/import_entities/import_groups/utils_spec.js b/spec/frontend/import_entities/import_groups/utils_spec.js
index 2892c5c217b..3db57170ed3 100644
--- a/spec/frontend/import_entities/import_groups/utils_spec.js
+++ b/spec/frontend/import_entities/import_groups/utils_spec.js
@@ -5,7 +5,7 @@ const FINISHED_STATUSES = [STATUSES.FINISHED, STATUSES.FAILED, STATUSES.TIMEOUT]
const OTHER_STATUSES = Object.values(STATUSES).filter(
(status) => !FINISHED_STATUSES.includes(status),
);
-describe('gitlab migration status utils', () => {
+describe('Direct transfer status utils', () => {
describe('isFinished', () => {
it.each(FINISHED_STATUSES.map((s) => [s]))(
'reports group as finished when import status is %s',
diff --git a/spec/frontend/import_entities/import_projects/components/import_projects_table_spec.js b/spec/frontend/import_entities/import_projects/components/import_projects_table_spec.js
index 92d064846bd..056155a560f 100644
--- a/spec/frontend/import_entities/import_projects/components/import_projects_table_spec.js
+++ b/spec/frontend/import_entities/import_projects/components/import_projects_table_spec.js
@@ -36,6 +36,7 @@ describe('ImportProjectsTable', () => {
.filter((w) => w.props().variant === 'confirm')
.at(0);
const findImportAllModal = () => wrapper.findComponent({ ref: 'importAllModal' });
+ const findIntersectionObserver = () => wrapper.findComponent(GlIntersectionObserver);
const importAllFn = jest.fn();
const importAllModalShowFn = jest.fn();
@@ -203,13 +204,13 @@ describe('ImportProjectsTable', () => {
describe('when paginatable is set to true', () => {
const initState = {
namespaces: [{ fullPath: 'path' }],
- pageInfo: { page: 1, hasNextPage: true },
+ pageInfo: { page: 1, hasNextPage: false },
repositories: [
{ importSource: { id: 1 }, importedProject: null, importStatus: STATUSES.NONE },
],
};
- describe('with hasNextPage true', () => {
+ describe('with hasNextPage false', () => {
beforeEach(() => {
createComponent({
state: initState,
@@ -217,26 +218,14 @@ describe('ImportProjectsTable', () => {
});
});
- it('does not call fetchRepos on mount', () => {
- expect(fetchReposFn).not.toHaveBeenCalled();
- });
-
- it('renders intersection observer component', () => {
- expect(wrapper.findComponent(GlIntersectionObserver).exists()).toBe(true);
- });
-
- it('calls fetchRepos when intersection observer appears', async () => {
- wrapper.findComponent(GlIntersectionObserver).vm.$emit('appear');
-
- await nextTick();
-
- expect(fetchReposFn).toHaveBeenCalled();
+ it('does not render intersection observer component', () => {
+ expect(findIntersectionObserver().exists()).toBe(false);
});
});
- describe('with hasNextPage false', () => {
+ describe('with hasNextPage true', () => {
beforeEach(() => {
- initState.pageInfo.hasNextPage = false;
+ initState.pageInfo.hasNextPage = true;
createComponent({
state: initState,
@@ -244,8 +233,16 @@ describe('ImportProjectsTable', () => {
});
});
- it('does not render intersection observer component', () => {
- expect(wrapper.findComponent(GlIntersectionObserver).exists()).toBe(false);
+ it('renders intersection observer component', () => {
+ expect(findIntersectionObserver().exists()).toBe(true);
+ });
+
+ it('calls fetchRepos again when intersection observer appears', async () => {
+ findIntersectionObserver().vm.$emit('appear');
+
+ await nextTick();
+
+ expect(fetchReposFn).toHaveBeenCalledTimes(2);
});
});
});
diff --git a/spec/frontend/import_entities/import_projects/store/actions_spec.js b/spec/frontend/import_entities/import_projects/store/actions_spec.js
index 3b94db37801..918821dfa59 100644
--- a/spec/frontend/import_entities/import_projects/store/actions_spec.js
+++ b/spec/frontend/import_entities/import_projects/store/actions_spec.js
@@ -17,6 +17,7 @@ import {
SET_PAGE,
SET_FILTER,
SET_PAGE_CURSORS,
+ SET_HAS_NEXT_PAGE,
} from '~/import_entities/import_projects/store/mutation_types';
import state from '~/import_entities/import_projects/store/state';
import axios from '~/lib/utils/axios_utils';
@@ -143,6 +144,44 @@ describe('import_projects store actions', () => {
);
});
});
+
+ describe('when provider is BITBUCKET_SERVER', () => {
+ beforeEach(() => {
+ localState.provider = PROVIDERS.BITBUCKET_SERVER;
+ });
+
+ describe.each`
+ reposLength | expectedHasNextPage
+ ${0} | ${false}
+ ${12} | ${false}
+ ${20} | ${false}
+ ${25} | ${true}
+ `('when reposLength is $reposLength', ({ reposLength, expectedHasNextPage }) => {
+ beforeEach(() => {
+ payload.provider_repos = Array(reposLength).fill({});
+
+ mock.onGet(MOCK_ENDPOINT).reply(HTTP_STATUS_OK, payload);
+ });
+
+ it('commits SET_HAS_NEXT_PAGE', () => {
+ return testAction(
+ fetchRepos,
+ null,
+ localState,
+ [
+ { type: REQUEST_REPOS },
+ { type: SET_PAGE, payload: 1 },
+ { type: SET_HAS_NEXT_PAGE, payload: expectedHasNextPage },
+ {
+ type: RECEIVE_REPOS_SUCCESS,
+ payload: convertObjectPropsToCamelCase(payload, { deep: true }),
+ },
+ ],
+ [],
+ );
+ });
+ });
+ });
});
it('commits REQUEST_REPOS, RECEIVE_REPOS_ERROR mutations on an unsuccessful request', () => {
diff --git a/spec/frontend/import_entities/import_projects/store/mutations_spec.js b/spec/frontend/import_entities/import_projects/store/mutations_spec.js
index 07d247630cc..90053f79bdf 100644
--- a/spec/frontend/import_entities/import_projects/store/mutations_spec.js
+++ b/spec/frontend/import_entities/import_projects/store/mutations_spec.js
@@ -332,6 +332,16 @@ describe('import_projects store mutations', () => {
});
});
+ describe(`${types.SET_HAS_NEXT_PAGE}`, () => {
+ it('sets hasNextPage in pageInfo', () => {
+ const NEW_HAS_NEXT_PAGE = true;
+ state = { pageInfo: { hasNextPage: false } };
+
+ mutations[types.SET_HAS_NEXT_PAGE](state, NEW_HAS_NEXT_PAGE);
+ expect(state.pageInfo.hasNextPage).toBe(NEW_HAS_NEXT_PAGE);
+ });
+ });
+
describe(`${types.CANCEL_IMPORT_SUCCESS}`, () => {
const payload = { repoId: 1 };
diff --git a/spec/frontend/integrations/edit/components/dynamic_field_spec.js b/spec/frontend/integrations/edit/components/dynamic_field_spec.js
index 95d15eb2c00..bf9a77074f4 100644
--- a/spec/frontend/integrations/edit/components/dynamic_field_spec.js
+++ b/spec/frontend/integrations/edit/components/dynamic_field_spec.js
@@ -169,7 +169,7 @@ describe('DynamicField', () => {
expect(findGlFormInput().exists()).toBe(true);
expect(findGlFormInput().attributes()).toMatchObject({
type: 'text',
- id: 'service_project_url',
+ id: 'service-project_url',
name: 'service[project_url]',
placeholder: mockField.placeholder,
required: expect.any(String),
diff --git a/spec/frontend/integrations/edit/components/integration_form_actions_spec.js b/spec/frontend/integrations/edit/components/integration_form_actions_spec.js
index e95e30a1899..d7ee31cc857 100644
--- a/spec/frontend/integrations/edit/components/integration_form_actions_spec.js
+++ b/spec/frontend/integrations/edit/components/integration_form_actions_spec.js
@@ -28,7 +28,7 @@ describe('IntegrationFormActions', () => {
const findConfirmationModal = () => wrapper.findComponent(ConfirmationModal);
const findResetConfirmationModal = () => wrapper.findComponent(ResetConfirmationModal);
const findResetButton = () => wrapper.findByTestId('reset-button');
- const findSaveButton = () => wrapper.findByTestId('save-button');
+ const findSaveButton = () => wrapper.findByTestId('save-changes-button');
const findTestButton = () => wrapper.findByTestId('test-button');
const findCancelButton = () => wrapper.findByTestId('cancel-button');
diff --git a/spec/frontend/integrations/edit/components/jira_trigger_fields_spec.js b/spec/frontend/integrations/edit/components/jira_trigger_fields_spec.js
index a038b63d28c..08f758c1382 100644
--- a/spec/frontend/integrations/edit/components/jira_trigger_fields_spec.js
+++ b/spec/frontend/integrations/edit/components/jira_trigger_fields_spec.js
@@ -1,11 +1,15 @@
import { GlFormCheckbox } from '@gitlab/ui';
-import { nextTick } from 'vue';
+import Vue, { nextTick } from 'vue';
+// eslint-disable-next-line no-restricted-imports
+import Vuex from 'vuex';
import { mountExtended } from 'helpers/vue_test_utils_helper';
-
import JiraTriggerFields from '~/integrations/edit/components/jira_trigger_fields.vue';
+Vue.use(Vuex);
+
describe('JiraTriggerFields', () => {
let wrapper;
+ let store;
const defaultProps = {
initialTriggerCommit: false,
@@ -14,12 +18,16 @@ describe('JiraTriggerFields', () => {
};
const createComponent = (props, isInheriting = false) => {
- wrapper = mountExtended(JiraTriggerFields, {
- propsData: { ...defaultProps, ...props },
- computed: {
+ store = new Vuex.Store({
+ getters: {
isInheriting: () => isInheriting,
},
});
+
+ wrapper = mountExtended(JiraTriggerFields, {
+ propsData: { ...defaultProps, ...props },
+ store,
+ });
};
const findCommentSettings = () => wrapper.findByTestId('comment-settings');
diff --git a/spec/frontend/integrations/edit/components/trigger_field_spec.js b/spec/frontend/integrations/edit/components/trigger_field_spec.js
index b3d6784959f..1dad3b27618 100644
--- a/spec/frontend/integrations/edit/components/trigger_field_spec.js
+++ b/spec/frontend/integrations/edit/components/trigger_field_spec.js
@@ -1,12 +1,17 @@
-import { nextTick } from 'vue';
+import Vue, { nextTick } from 'vue';
+// eslint-disable-next-line no-restricted-imports
+import Vuex from 'vuex';
import { shallowMount } from '@vue/test-utils';
import { GlFormCheckbox, GlFormInput } from '@gitlab/ui';
import TriggerField from '~/integrations/edit/components/trigger_field.vue';
import { integrationTriggerEventTitles } from '~/integrations/constants';
+Vue.use(Vuex);
+
describe('TriggerField', () => {
let wrapper;
+ let store;
const defaultProps = {
event: { name: 'push_events' },
@@ -15,12 +20,16 @@ describe('TriggerField', () => {
const mockField = { name: 'push_channel' };
const createComponent = ({ props = {}, isInheriting = false } = {}) => {
- wrapper = shallowMount(TriggerField, {
- propsData: { ...defaultProps, ...props },
- computed: {
+ store = new Vuex.Store({
+ getters: {
isInheriting: () => isInheriting,
},
});
+
+ wrapper = shallowMount(TriggerField, {
+ propsData: { ...defaultProps, ...props },
+ store,
+ });
};
const findGlFormCheckbox = () => wrapper.findComponent(GlFormCheckbox);
diff --git a/spec/frontend/integrations/edit/components/trigger_fields_spec.js b/spec/frontend/integrations/edit/components/trigger_fields_spec.js
index defa02aefd2..97ac01e2f26 100644
--- a/spec/frontend/integrations/edit/components/trigger_fields_spec.js
+++ b/spec/frontend/integrations/edit/components/trigger_fields_spec.js
@@ -1,23 +1,32 @@
import { GlFormGroup, GlFormCheckbox, GlFormInput } from '@gitlab/ui';
+import Vue from 'vue';
+// eslint-disable-next-line no-restricted-imports
+import Vuex from 'vuex';
import { mountExtended } from 'helpers/vue_test_utils_helper';
import { placeholderForType } from 'jh_else_ce/integrations/constants';
-
import TriggerFields from '~/integrations/edit/components/trigger_fields.vue';
+Vue.use(Vuex);
+
describe('TriggerFields', () => {
let wrapper;
+ let store;
const defaultProps = {
type: 'slack',
};
const createComponent = (props, isInheriting = false) => {
- wrapper = mountExtended(TriggerFields, {
- propsData: { ...defaultProps, ...props },
- computed: {
+ store = new Vuex.Store({
+ getters: {
isInheriting: () => isInheriting,
},
});
+
+ wrapper = mountExtended(TriggerFields, {
+ propsData: { ...defaultProps, ...props },
+ store,
+ });
};
const findTriggerLabel = () => wrapper.findByTestId('trigger-fields-group').find('label');
diff --git a/spec/frontend/invite_members/components/invite_groups_modal_spec.js b/spec/frontend/invite_members/components/invite_groups_modal_spec.js
index 4136de75545..358d70d8117 100644
--- a/spec/frontend/invite_members/components/invite_groups_modal_spec.js
+++ b/spec/frontend/invite_members/components/invite_groups_modal_spec.js
@@ -77,6 +77,16 @@ describe('InviteGroupsModal', () => {
const clickInviteButton = emitClickFromModal('invite-modal-submit');
const clickCancelButton = emitClickFromModal('invite-modal-cancel');
+ describe('passes correct props to InviteModalBase', () => {
+ it('set accessLevel', () => {
+ createInviteGroupToProjectWrapper();
+
+ expect(findBase().props('accessLevels')).toMatchObject({
+ validRoles: propsData.accessLevels,
+ });
+ });
+ });
+
describe('displaying the correct introText and form group description', () => {
describe('when inviting to a project', () => {
it('includes the correct type, and formatted intro text', () => {
diff --git a/spec/frontend/invite_members/components/invite_members_modal_spec.js b/spec/frontend/invite_members/components/invite_members_modal_spec.js
index 19b7fad5fc8..ad3174b8946 100644
--- a/spec/frontend/invite_members/components/invite_members_modal_spec.js
+++ b/spec/frontend/invite_members/components/invite_members_modal_spec.js
@@ -128,6 +128,7 @@ describe('InviteMembersModal', () => {
});
const findModal = () => wrapper.findComponent(GlModal);
+ const findBase = () => wrapper.findComponent(InviteModalBase);
const findIntroText = () => wrapper.findByTestId('modal-base-intro-text').text();
const findEmptyInvitesAlert = () => wrapper.findByTestId('empty-invites-alert');
const findMemberErrorAlert = () => wrapper.findByTestId('alert-member-error');
@@ -168,6 +169,22 @@ describe('InviteMembersModal', () => {
await nextTick();
};
+ describe('passes correct props to InviteModalBase', () => {
+ it('set defaultMemberRoleId', () => {
+ createInviteMembersToProjectWrapper();
+
+ expect(findBase().props('defaultMemberRoleId')).toBeNull();
+ });
+
+ it('set accessLevel', () => {
+ createInviteMembersToProjectWrapper();
+
+ expect(findBase().props('accessLevels')).toMatchObject({
+ validRoles: propsData.accessLevels,
+ });
+ });
+ });
+
describe('rendering with tracking considerations', () => {
describe('when inviting to a project', () => {
describe('when inviting members', () => {
diff --git a/spec/frontend/invite_members/components/invite_members_trigger_spec.js b/spec/frontend/invite_members/components/invite_members_trigger_spec.js
index 58c40a49b3c..f14d24538d8 100644
--- a/spec/frontend/invite_members/components/invite_members_trigger_spec.js
+++ b/spec/frontend/invite_members/components/invite_members_trigger_spec.js
@@ -4,7 +4,6 @@ import InviteMembersTrigger from '~/invite_members/components/invite_members_tri
import eventHub from '~/invite_members/event_hub';
import {
TRIGGER_ELEMENT_BUTTON,
- TRIGGER_DEFAULT_QA_SELECTOR,
TRIGGER_ELEMENT_WITH_EMOJI,
TRIGGER_ELEMENT_DROPDOWN_WITH_EMOJI,
TRIGGER_ELEMENT_DISCLOSURE_DROPDOWN,
@@ -66,18 +65,6 @@ describe.each(triggerItems)('with triggerElement as %s', (triggerItem) => {
expect(findButton().text()).toBe(displayText);
});
-
- it('uses the default qa selector value', () => {
- createComponent();
-
- expect(findButton().attributes('data-qa-selector')).toBe(TRIGGER_DEFAULT_QA_SELECTOR);
- });
-
- it('sets the qa selector value', () => {
- createComponent({ qaSelector: '_qaSelector_' });
-
- expect(findButton().attributes('data-qa-selector')).toBe('_qaSelector_');
- });
});
describe('clicking the link', () => {
diff --git a/spec/frontend/invite_members/components/invite_modal_base_spec.js b/spec/frontend/invite_members/components/invite_modal_base_spec.js
index e70c83a424e..c26d1d921a5 100644
--- a/spec/frontend/invite_members/components/invite_modal_base_spec.js
+++ b/spec/frontend/invite_members/components/invite_modal_base_spec.js
@@ -1,5 +1,5 @@
import {
- GlFormSelect,
+ GlCollapsibleListbox,
GlDatepicker,
GlFormGroup,
GlLink,
@@ -7,9 +7,14 @@ import {
GlModal,
GlIcon,
} from '@gitlab/ui';
+import { nextTick } from 'vue';
import { stubComponent } from 'helpers/stub_component';
import { mockTracking, unmockTracking } from 'helpers/tracking_helper';
-import { mountExtended, shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import {
+ mountExtended,
+ shallowMountExtended,
+ extendedWrapper,
+} from 'helpers/vue_test_utils_helper';
import InviteModalBase from '~/invite_members/components/invite_modal_base.vue';
import ContentTransition from '~/vue_shared/components/content_transition.vue';
@@ -31,7 +36,7 @@ describe('InviteModalBase', () => {
? {}
: {
ContentTransition,
- GlFormSelect: true,
+ GlCollapsibleListbox: true,
GlSprintf,
GlFormGroup: stubComponent(GlFormGroup, {
props: ['state', 'invalidFeedback'],
@@ -41,6 +46,7 @@ describe('InviteModalBase', () => {
wrapper = mountFn(InviteModalBase, {
propsData: {
...propsData,
+ accessLevels: { validRoles: propsData.accessLevels },
...props,
},
stubs: {
@@ -54,8 +60,8 @@ describe('InviteModalBase', () => {
});
};
- const findFormSelect = () => wrapper.findComponent(GlFormSelect);
- const findFormSelectOptions = () => findFormSelect().findAllComponents('option');
+ const findCollapsibleListbox = () => extendedWrapper(wrapper.findComponent(GlCollapsibleListbox));
+ const findCollapsibleListboxOptions = () => findCollapsibleListbox().findAllByRole('option');
const findDatepicker = () => wrapper.findComponent(GlDatepicker);
const findLink = () => wrapper.findComponent(GlLink);
const findIcon = () => wrapper.findComponent(GlIcon);
@@ -91,7 +97,6 @@ describe('InviteModalBase', () => {
const actionButton = findActionButton();
expect(actionButton.text()).toBe(INVITE_BUTTON_TEXT);
- expect(actionButton.attributes('data-qa-selector')).toBe('invite_button');
expect(actionButton.props()).toMatchObject({
variant: 'confirm',
@@ -103,17 +108,47 @@ describe('InviteModalBase', () => {
describe('rendering the access levels dropdown', () => {
beforeEach(() => {
createComponent({
+ props: { isLoadingRoles: true },
mountFn: mountExtended,
});
});
+ it('passes `isLoadingRoles` prop to the dropdown', () => {
+ expect(findCollapsibleListbox().props('loading')).toBe(true);
+ });
+
it('sets the default dropdown text to the default access level name', () => {
- expect(findFormSelect().exists()).toBe(true);
- expect(findFormSelect().element.value).toBe('10');
+ expect(findCollapsibleListbox().exists()).toBe(true);
+ const option = findCollapsibleListbox().find('[aria-selected]');
+ expect(option.text()).toBe('Reporter');
+ });
+
+ it('updates the selection base on changes in the dropdown', async () => {
+ wrapper.setProps({ accessLevels: { validRoles: [] } });
+ expect(findCollapsibleListbox().props('selected')).not.toHaveLength(0);
+ await nextTick();
+
+ expect(findCollapsibleListboxOptions()).toHaveLength(0);
+ expect(findCollapsibleListbox().props('selected')).toHaveLength(0);
+ });
+
+ it('reset the dropdown to the default option', async () => {
+ const developerOption = findCollapsibleListboxOptions().at(2);
+ await developerOption.trigger('click');
+
+ let option;
+ option = findCollapsibleListbox().find('[aria-selected]');
+ expect(option.text()).toBe('Developer');
+
+ // Reset the dropdown by clicking cancel button
+ await findCancelButton().trigger('click');
+
+ option = findCollapsibleListbox().find('[aria-selected]');
+ expect(option.text()).toBe('Reporter');
});
it('renders dropdown items for each accessLevel', () => {
- expect(findFormSelectOptions()).toHaveLength(5);
+ expect(findCollapsibleListboxOptions()).toHaveLength(5);
});
});
@@ -211,7 +246,7 @@ describe('InviteModalBase', () => {
it('renders correct blocks', () => {
expect(findIcon().exists()).toBe(false);
expect(findDisabledInput().exists()).toBe(false);
- expect(findFormSelect().exists()).toBe(true);
+ expect(findCollapsibleListbox().exists()).toBe(true);
expect(findDatepicker().exists()).toBe(true);
expect(wrapper.findComponent(GlModal).text()).toMatch(textRegex);
});
diff --git a/spec/frontend/invite_members/components/members_token_select_spec.js b/spec/frontend/invite_members/components/members_token_select_spec.js
index a4b8a8b0197..a2b21367388 100644
--- a/spec/frontend/invite_members/components/members_token_select_spec.js
+++ b/spec/frontend/invite_members/components/members_token_select_spec.js
@@ -6,23 +6,32 @@ import waitForPromises from 'helpers/wait_for_promises';
import * as UserApi from '~/api/user_api';
import MembersTokenSelect from '~/invite_members/components/members_token_select.vue';
import { VALID_TOKEN_BACKGROUND, INVALID_TOKEN_BACKGROUND } from '~/invite_members/constants';
+import * as Sentry from '~/sentry/sentry_browser_wrapper';
const label = 'testgroup';
const placeholder = 'Search for a member';
+const rootGroupId = '31';
const user1 = { id: 1, name: 'John Smith', username: 'one_1', avatar_url: '' };
const user2 = { id: 2, name: 'Jane Doe', username: 'two_2', avatar_url: '' };
const allUsers = [user1, user2];
+const handleEnterSpy = jest.fn();
-const createComponent = (props) => {
+const createComponent = (props = {}, glFeatures = {}) => {
return shallowMount(MembersTokenSelect, {
propsData: {
ariaLabelledby: label,
invalidMembers: {},
placeholder,
+ rootGroupId,
...props,
},
+ provide: { glFeatures },
stubs: {
- GlTokenSelector: stubComponent(GlTokenSelector),
+ GlTokenSelector: stubComponent(GlTokenSelector, {
+ methods: {
+ handleEnter: handleEnterSpy,
+ },
+ }),
},
});
};
@@ -84,23 +93,11 @@ describe('MembersTokenSelect', () => {
wrapper = createComponent();
});
- describe('when input is focused for the first time (modal auto-focus)', () => {
- it('does not call the API', async () => {
- findTokenSelector().vm.$emit('focus');
-
- await waitForPromises();
-
- expect(UserApi.getUsers).not.toHaveBeenCalled();
- });
- });
-
describe('when input is manually focused', () => {
it('calls the API and sets dropdown items as request result', async () => {
const tokenSelector = findTokenSelector();
tokenSelector.vm.$emit('focus');
- tokenSelector.vm.$emit('blur');
- tokenSelector.vm.$emit('focus');
await waitForPromises();
@@ -173,6 +170,29 @@ describe('MembersTokenSelect', () => {
});
});
});
+
+ describe('when API search fails', () => {
+ beforeEach(() => {
+ jest.spyOn(Sentry, 'captureException');
+ jest.spyOn(UserApi, 'getUsers').mockRejectedValue('error');
+ });
+
+ it('reports to sentry', async () => {
+ tokenSelector.vm.$emit('text-input', 'Den');
+
+ await waitForPromises();
+
+ expect(Sentry.captureException).toHaveBeenCalledWith('error');
+ });
+ });
+
+ it('allows tab to function as enter', () => {
+ tokenSelector.vm.$emit('text-input', 'username');
+
+ tokenSelector.vm.$emit('keydown', new KeyboardEvent('keydown', { key: 'Tab' }));
+
+ expect(handleEnterSpy).toHaveBeenCalled();
+ });
});
describe('when user is selected', () => {
@@ -215,31 +235,45 @@ describe('MembersTokenSelect', () => {
});
});
- describe('when component is mounted for a group using a saml provider', () => {
+ describe('when component is mounted for a group using a SAML provider', () => {
const searchParam = 'name';
- const samlProviderId = 123;
- let resolveApiRequest;
beforeEach(() => {
- jest.spyOn(UserApi, 'getUsers').mockImplementation(
- () =>
- new Promise((resolve) => {
- resolveApiRequest = resolve;
- }),
- );
+ jest.spyOn(UserApi, 'getGroupUsers').mockResolvedValue({ data: allUsers });
- wrapper = createComponent({ filterId: samlProviderId, usersFilter: 'saml_provider_id' });
+ wrapper = createComponent({ usersFilter: 'saml_provider_id' }, { groupUserSaml: true });
findTokenSelector().vm.$emit('text-input', searchParam);
});
- it('calls the API with the saml provider ID param', () => {
- resolveApiRequest({ data: allUsers });
-
- expect(UserApi.getUsers).toHaveBeenCalledWith(searchParam, {
+ it('calls the group API with correct parameters', () => {
+ expect(UserApi.getGroupUsers).toHaveBeenCalledWith(searchParam, rootGroupId, {
active: true,
- without_project_bots: true,
- saml_provider_id: samlProviderId,
+ include_saml_users: true,
+ include_service_accounts: true,
+ });
+ });
+ });
+
+ describe('when group_user_saml feature flag is disabled', () => {
+ describe('when component is mounted for a group using a SAML provider', () => {
+ const searchParam = 'name';
+ const samlProviderId = 123;
+
+ beforeEach(() => {
+ jest.spyOn(UserApi, 'getUsers').mockResolvedValue({ data: allUsers });
+
+ wrapper = createComponent({ filterId: samlProviderId, usersFilter: 'saml_provider_id' });
+
+ findTokenSelector().vm.$emit('text-input', searchParam);
+ });
+
+ it('calls the API with the saml provider ID param', () => {
+ expect(UserApi.getUsers).toHaveBeenCalledWith(searchParam, {
+ active: true,
+ without_project_bots: true,
+ saml_provider_id: samlProviderId,
+ });
});
});
});
diff --git a/spec/frontend/invite_members/mock_data/member_modal.js b/spec/frontend/invite_members/mock_data/member_modal.js
index 8cde13bf69c..0c0e669b894 100644
--- a/spec/frontend/invite_members/mock_data/member_modal.js
+++ b/spec/frontend/invite_members/mock_data/member_modal.js
@@ -40,6 +40,7 @@ export const user6 = {
export const postData = {
user_id: `${user1.id},${user2.id}`,
access_level: propsData.defaultAccessLevel,
+ member_role_id: null,
expires_at: undefined,
invite_source: inviteSource,
format: 'json',
@@ -47,6 +48,7 @@ export const postData = {
export const emailPostData = {
access_level: propsData.defaultAccessLevel,
+ member_role_id: null,
expires_at: undefined,
email: `${user3.name}`,
invite_source: inviteSource,
@@ -55,6 +57,7 @@ export const emailPostData = {
export const singleUserPostData = {
access_level: propsData.defaultAccessLevel,
+ member_role_id: null,
expires_at: undefined,
user_id: `${user1.id}`,
email: `${user3.name}`,
diff --git a/spec/frontend/invite_members/mock_data/modal_base.js b/spec/frontend/invite_members/mock_data/modal_base.js
index 565e8d4df1e..c44e890da3d 100644
--- a/spec/frontend/invite_members/mock_data/modal_base.js
+++ b/spec/frontend/invite_members/mock_data/modal_base.js
@@ -3,7 +3,7 @@ export const propsData = {
modalId: '_modal_id_',
name: '_name_',
accessLevels: { Guest: 10, Reporter: 20, Developer: 30, Maintainer: 40, Owner: 50 },
- defaultAccessLevel: 10,
+ defaultAccessLevel: 20,
helpLink: 'https://example.com',
labelIntroText: '_label_intro_text_',
labelSearchField: '_label_search_field_',
diff --git a/spec/frontend/issuable/popover/components/mr_popover_spec.js b/spec/frontend/issuable/popover/components/mr_popover_spec.js
index 4ed783da853..80b04c05524 100644
--- a/spec/frontend/issuable/popover/components/mr_popover_spec.js
+++ b/spec/frontend/issuable/popover/components/mr_popover_spec.js
@@ -6,7 +6,7 @@ import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import MRPopover from '~/issuable/popover/components/mr_popover.vue';
import mergeRequestQuery from '~/issuable/popover/queries/merge_request.query.graphql';
-import CiIcon from '~/vue_shared/components/ci_icon.vue';
+import CiIcon from '~/vue_shared/components/ci_icon/ci_icon.vue';
describe('MR Popover', () => {
let wrapper;
diff --git a/spec/frontend/issues/dashboard/components/index_spec.js b/spec/frontend/issues/dashboard/components/index_spec.js
new file mode 100644
index 00000000000..51cb5c0acf6
--- /dev/null
+++ b/spec/frontend/issues/dashboard/components/index_spec.js
@@ -0,0 +1,18 @@
+import { setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
+import { mountIssuesDashboardApp } from '~/issues/dashboard';
+
+describe('IssueDashboardRoot', () => {
+ beforeEach(() => {
+ setHTMLFixture(
+ '<div class="js-issues-dashboard" data-has-issue-date-filter-feature="true"></div>',
+ );
+ });
+
+ afterEach(() => {
+ resetHTMLFixture();
+ });
+
+ it('mounts without errors and vue warnings', async () => {
+ await expect(mountIssuesDashboardApp()).resolves.toBeTruthy();
+ });
+});
diff --git a/spec/frontend/issues/list/components/issues_list_app_spec.js b/spec/frontend/issues/list/components/issues_list_app_spec.js
index 6bd952cd215..b432a29ee5c 100644
--- a/spec/frontend/issues/list/components/issues_list_app_spec.js
+++ b/spec/frontend/issues/list/components/issues_list_app_spec.js
@@ -19,6 +19,7 @@ import {
getIssuesCountsQueryResponse,
getIssuesQueryEmptyResponse,
getIssuesQueryResponse,
+ groupedFilteredTokens,
locationSearch,
setSortPreferenceMutationResponse,
setSortPreferenceMutationResponseWithErrors,
@@ -507,6 +508,13 @@ describe('CE IssuesListApp component', () => {
});
describe('filter tokens', () => {
+ it('groups url params of assignee and author', () => {
+ setWindowLocation(locationSearch);
+ wrapper = mountComponent({ provide: { glFeatures: { groupMultiSelectTokens: true } } });
+
+ expect(findIssuableList().props('initialFilterValue')).toEqual(groupedFilteredTokens);
+ });
+
it('is set from the url params', () => {
setWindowLocation(locationSearch);
wrapper = mountComponent();
diff --git a/spec/frontend/issues/list/mock_data.js b/spec/frontend/issues/list/mock_data.js
index b9a8bc171db..e387c924418 100644
--- a/spec/frontend/issues/list/mock_data.js
+++ b/spec/frontend/issues/list/mock_data.js
@@ -231,19 +231,33 @@ export const locationSearchWithSpecialValues = [
'health_status=None',
].join('&');
-export const filteredTokens = [
+const makeFilteredTokens = ({ grouped }) => [
{ type: FILTERED_SEARCH_TERM, value: { data: 'find issues', operator: 'undefined' } },
{ type: TOKEN_TYPE_AUTHOR, value: { data: 'homer', operator: OPERATOR_IS } },
- { type: TOKEN_TYPE_AUTHOR, value: { data: 'marge', operator: OPERATOR_NOT } },
- { type: TOKEN_TYPE_AUTHOR, value: { data: 'burns', operator: OPERATOR_OR } },
- { type: TOKEN_TYPE_AUTHOR, value: { data: 'smithers', operator: OPERATOR_OR } },
+ ...(grouped
+ ? [
+ { type: TOKEN_TYPE_AUTHOR, value: { data: ['marge'], operator: OPERATOR_NOT } },
+ { type: TOKEN_TYPE_AUTHOR, value: { data: ['burns', 'smithers'], operator: OPERATOR_OR } },
+ ]
+ : [
+ { type: TOKEN_TYPE_AUTHOR, value: { data: 'marge', operator: OPERATOR_NOT } },
+ { type: TOKEN_TYPE_AUTHOR, value: { data: 'burns', operator: OPERATOR_OR } },
+ { type: TOKEN_TYPE_AUTHOR, value: { data: 'smithers', operator: OPERATOR_OR } },
+ ]),
{ type: TOKEN_TYPE_ASSIGNEE, value: { data: 'bart', operator: OPERATOR_IS } },
{ type: TOKEN_TYPE_ASSIGNEE, value: { data: 'lisa', operator: OPERATOR_IS } },
{ type: TOKEN_TYPE_ASSIGNEE, value: { data: '5', operator: OPERATOR_IS } },
- { type: TOKEN_TYPE_ASSIGNEE, value: { data: 'patty', operator: OPERATOR_NOT } },
- { type: TOKEN_TYPE_ASSIGNEE, value: { data: 'selma', operator: OPERATOR_NOT } },
- { type: TOKEN_TYPE_ASSIGNEE, value: { data: 'carl', operator: OPERATOR_OR } },
- { type: TOKEN_TYPE_ASSIGNEE, value: { data: 'lenny', operator: OPERATOR_OR } },
+ ...(grouped
+ ? [
+ { type: TOKEN_TYPE_ASSIGNEE, value: { data: ['patty', 'selma'], operator: OPERATOR_NOT } },
+ { type: TOKEN_TYPE_ASSIGNEE, value: { data: ['carl', 'lenny'], operator: OPERATOR_OR } },
+ ]
+ : [
+ { type: TOKEN_TYPE_ASSIGNEE, value: { data: 'patty', operator: OPERATOR_NOT } },
+ { type: TOKEN_TYPE_ASSIGNEE, value: { data: 'selma', operator: OPERATOR_NOT } },
+ { type: TOKEN_TYPE_ASSIGNEE, value: { data: 'carl', operator: OPERATOR_OR } },
+ { type: TOKEN_TYPE_ASSIGNEE, value: { data: 'lenny', operator: OPERATOR_OR } },
+ ]),
{ type: TOKEN_TYPE_MILESTONE, value: { data: 'season 3', operator: OPERATOR_IS } },
{ type: TOKEN_TYPE_MILESTONE, value: { data: 'season 4', operator: OPERATOR_IS } },
{ type: TOKEN_TYPE_MILESTONE, value: { data: 'season 20', operator: OPERATOR_NOT } },
@@ -279,6 +293,9 @@ export const filteredTokens = [
{ type: TOKEN_TYPE_HEALTH, value: { data: 'onTrack', operator: OPERATOR_NOT } },
];
+export const filteredTokens = makeFilteredTokens({ grouped: false });
+export const groupedFilteredTokens = makeFilteredTokens({ grouped: true });
+
export const filteredTokensWithSpecialValues = [
{ type: TOKEN_TYPE_ASSIGNEE, value: { data: '123', operator: OPERATOR_IS } },
{ type: TOKEN_TYPE_ASSIGNEE, value: { data: 'bart', operator: OPERATOR_IS } },
diff --git a/spec/frontend/issues/list/utils_spec.js b/spec/frontend/issues/list/utils_spec.js
index c14dcf96c98..e13a69b7444 100644
--- a/spec/frontend/issues/list/utils_spec.js
+++ b/spec/frontend/issues/list/utils_spec.js
@@ -5,6 +5,7 @@ import {
apiParamsWithSpecialValues,
filteredTokens,
filteredTokensWithSpecialValues,
+ groupedFilteredTokens,
locationSearch,
locationSearchWithSpecialValues,
urlParams,
@@ -19,6 +20,7 @@ import {
getInitialPageParams,
getSortKey,
getSortOptions,
+ groupMultiSelectFilterTokens,
isSortKey,
} from '~/issues/list/utils';
import { DEFAULT_PAGE_SIZE } from '~/vue_shared/issuable/list/constants';
@@ -163,3 +165,14 @@ describe('convertToSearchQuery', () => {
expect(convertToSearchQuery(filteredTokens)).toBe('find issues');
});
});
+
+describe('groupMultiSelectFilterTokens', () => {
+ it('groups multiSelect filter tokens with || and != operators', () => {
+ expect(
+ groupMultiSelectFilterTokens(filteredTokens, [
+ { type: 'assignee', multiSelect: true },
+ { type: 'author', multiSelect: true },
+ ]),
+ ).toEqual(groupedFilteredTokens);
+ });
+});
diff --git a/spec/frontend/issues/show/components/app_spec.js b/spec/frontend/issues/show/components/app_spec.js
index 8999952c54c..f9ce7c20ad6 100644
--- a/spec/frontend/issues/show/components/app_spec.js
+++ b/spec/frontend/issues/show/components/app_spec.js
@@ -94,6 +94,10 @@ describe('Issuable output', () => {
axiosMock.onPut().reply(HTTP_STATUS_OK, putRequest);
});
+ afterEach(() => {
+ document.body.classList?.remove('issuable-sticky-header-visible');
+ });
+
describe('update', () => {
beforeEach(async () => {
await createComponent();
@@ -334,6 +338,29 @@ describe('Issuable output', () => {
});
},
);
+
+ describe('document body class', () => {
+ beforeEach(async () => {
+ await createComponent({ props: { canUpdate: false } });
+ });
+
+ it('adds the css class to the document body', () => {
+ wrapper.findComponent(StickyHeader).vm.$emit('show');
+ expect(document.body.classList?.contains('issuable-sticky-header-visible')).toBe(true);
+ });
+
+ it('removes the css class from the document body', () => {
+ wrapper.findComponent(StickyHeader).vm.$emit('show');
+ wrapper.findComponent(StickyHeader).vm.$emit('hide');
+ expect(document.body.classList?.contains('issuable-sticky-header-visible')).toBe(false);
+ });
+
+ it('removes the css class from the document body when unmounting', () => {
+ wrapper.findComponent(StickyHeader).vm.$emit('show');
+ wrapper.vm.$destroy();
+ expect(document.body.classList?.contains('issuable-sticky-header-visible')).toBe(false);
+ });
+ });
});
describe('Composable description component', () => {
diff --git a/spec/frontend/issues/show/components/header_actions_spec.js b/spec/frontend/issues/show/components/header_actions_spec.js
index d0c2a1a5f1b..33fd9d39feb 100644
--- a/spec/frontend/issues/show/components/header_actions_spec.js
+++ b/spec/frontend/issues/show/components/header_actions_spec.js
@@ -6,13 +6,13 @@ import {
GlModal,
GlButton,
} from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import VueApollo from 'vue-apollo';
import { stubComponent } from 'helpers/stub_component';
import waitForPromises from 'helpers/wait_for_promises';
import { mockTracking } from 'helpers/tracking_helper';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import { createAlert, VARIANT_SUCCESS } from '~/alert';
import {
STATUS_CLOSED,
@@ -132,11 +132,11 @@ describe('HeaderActions component', () => {
const findDesktopDropdownItems = () =>
findDesktopDropdown().findAllComponents(GlDisclosureDropdownItem);
const findAbuseCategorySelector = () => wrapper.findComponent(AbuseCategorySelector);
- const findReportAbuseButton = () => wrapper.find(`[data-testid="report-abuse-item"]`);
- const findNotificationWidget = () => wrapper.find(`[data-testid="notification-toggle"]`);
- const findLockIssueWidget = () => wrapper.find(`[data-testid="lock-issue-toggle"]`);
- const findCopyRefenceDropdownItem = () => wrapper.find(`[data-testid="copy-reference"]`);
- const findCopyEmailItem = () => wrapper.find(`[data-testid="copy-email"]`);
+ const findReportAbuseButton = () => wrapper.findByTestId('report-abuse-item');
+ const findNotificationWidget = () => wrapper.findByTestId('notification-toggle');
+ const findLockIssueWidget = () => wrapper.findByTestId('lock-issue-toggle');
+ const findCopyRefenceDropdownItem = () => wrapper.findByTestId('copy-reference');
+ const findCopyEmailItem = () => wrapper.findByTestId('copy-email');
const findModal = () => wrapper.findComponent(GlModal);
@@ -176,7 +176,7 @@ describe('HeaderActions component', () => {
window.gon.current_user_id = 1;
}
- return shallowMount(HeaderActions, {
+ return shallowMountExtended(HeaderActions, {
apolloProvider: createMockApollo(handlers),
store,
provide: {
@@ -625,6 +625,10 @@ describe('HeaderActions component', () => {
expect(toast).toHaveBeenCalledWith('Reference copied');
});
+
+ it('contains copy reference class', () => {
+ expect(findCopyRefenceDropdownItem().classes()).toContain('js-copy-reference');
+ });
});
});
diff --git a/spec/frontend/jira_connect/subscriptions/pages/sign_in/sign_in_gitlab_multiversion/setup_instructions_spec.js b/spec/frontend/jira_connect/subscriptions/pages/sign_in/sign_in_gitlab_multiversion/setup_instructions_spec.js
index efe89100e90..74c998bfc51 100644
--- a/spec/frontend/jira_connect/subscriptions/pages/sign_in/sign_in_gitlab_multiversion/setup_instructions_spec.js
+++ b/spec/frontend/jira_connect/subscriptions/pages/sign_in/sign_in_gitlab_multiversion/setup_instructions_spec.js
@@ -6,6 +6,7 @@ import {
PREREQUISITES_DOC_LINK,
OAUTH_SELF_MANAGED_DOC_LINK,
SET_UP_INSTANCE_DOC_LINK,
+ JIRA_USER_REQUIREMENTS_DOC_LINK,
} from '~/jira_connect/subscriptions/constants';
import SetupInstructions from '~/jira_connect/subscriptions/pages/sign_in/sign_in_gitlab_multiversion/setup_instructions.vue';
@@ -15,6 +16,7 @@ describe('SetupInstructions', () => {
const findPrerequisitesGlLink = () => wrapper.findAllComponents(GlLink).at(0);
const findOAuthGlLink = () => wrapper.findAllComponents(GlLink).at(1);
const findSetUpInstanceGlLink = () => wrapper.findAllComponents(GlLink).at(2);
+ const findJiraUserRequirementsGlLink = () => wrapper.findAllComponents(GlLink).at(3);
const findBackButton = () => wrapper.findAllComponents(GlButton).at(0);
const findNextButton = () => wrapper.findAllComponents(GlButton).at(1);
const findCheckboxAtIndex = (index) => wrapper.findAllComponents(GlFormCheckbox).at(index);
@@ -40,6 +42,12 @@ describe('SetupInstructions', () => {
expect(findSetUpInstanceGlLink().attributes('href')).toBe(SET_UP_INSTANCE_DOC_LINK);
});
+ it('renders "Jira user requirements" link to documentation', () => {
+ expect(findJiraUserRequirementsGlLink().attributes('href')).toBe(
+ JIRA_USER_REQUIREMENTS_DOC_LINK,
+ );
+ });
+
describe('NextButton', () => {
it('emits next event when clicked and all steps checked', async () => {
createComponent();
@@ -47,6 +55,7 @@ describe('SetupInstructions', () => {
findCheckboxAtIndex(0).vm.$emit('input', true);
findCheckboxAtIndex(1).vm.$emit('input', true);
findCheckboxAtIndex(2).vm.$emit('input', true);
+ findCheckboxAtIndex(3).vm.$emit('input', true);
await nextTick();
diff --git a/spec/frontend/kubernetes_dashboard/components/page_title_spec.js b/spec/frontend/kubernetes_dashboard/components/page_title_spec.js
new file mode 100644
index 00000000000..ee2ac44d6a3
--- /dev/null
+++ b/spec/frontend/kubernetes_dashboard/components/page_title_spec.js
@@ -0,0 +1,35 @@
+import { shallowMount } from '@vue/test-utils';
+import { GlIcon, GlSprintf } from '@gitlab/ui';
+import PageTitle from '~/kubernetes_dashboard/components/page_title.vue';
+
+const agent = {
+ name: 'my-agent',
+ id: '123',
+};
+
+let wrapper;
+
+const createWrapper = () => {
+ wrapper = shallowMount(PageTitle, {
+ provide: {
+ agent,
+ },
+ stubs: { GlSprintf },
+ });
+};
+
+const findIcon = () => wrapper.findComponent(GlIcon);
+
+describe('Page title component', () => {
+ it('renders Kubernetes agent icon', () => {
+ createWrapper();
+
+ expect(findIcon().props('name')).toBe('kubernetes-agent');
+ });
+
+ it('renders agent information', () => {
+ createWrapper();
+
+ expect(wrapper.text()).toMatchInterpolatedText('Agent my-agent ID #123');
+ });
+});
diff --git a/spec/frontend/kubernetes_dashboard/components/workload_details_item_spec.js b/spec/frontend/kubernetes_dashboard/components/workload_details_item_spec.js
new file mode 100644
index 00000000000..72af25e72e5
--- /dev/null
+++ b/spec/frontend/kubernetes_dashboard/components/workload_details_item_spec.js
@@ -0,0 +1,34 @@
+import { shallowMount } from '@vue/test-utils';
+import WorkloadDetailsItem from '~/kubernetes_dashboard/components/workload_details_item.vue';
+
+let wrapper;
+
+const propsData = {
+ label: 'name',
+};
+const slots = {
+ default: '<b>slot value</b>',
+};
+
+const createWrapper = () => {
+ wrapper = shallowMount(WorkloadDetailsItem, {
+ propsData,
+ slots,
+ });
+};
+
+const findLabel = () => wrapper.findComponent('label');
+
+describe('Workload details item component', () => {
+ beforeEach(() => {
+ createWrapper();
+ });
+
+ it('renders the correct label', () => {
+ expect(findLabel().text()).toBe(propsData.label);
+ });
+
+ it('renders slot content', () => {
+ expect(wrapper.html()).toContain(slots.default);
+ });
+});
diff --git a/spec/frontend/kubernetes_dashboard/components/workload_details_spec.js b/spec/frontend/kubernetes_dashboard/components/workload_details_spec.js
new file mode 100644
index 00000000000..fc47c658ebe
--- /dev/null
+++ b/spec/frontend/kubernetes_dashboard/components/workload_details_spec.js
@@ -0,0 +1,53 @@
+import { shallowMount } from '@vue/test-utils';
+import { GlBadge, GlTruncate } from '@gitlab/ui';
+import WorkloadDetails from '~/kubernetes_dashboard/components/workload_details.vue';
+import WorkloadDetailsItem from '~/kubernetes_dashboard/components/workload_details_item.vue';
+import { WORKLOAD_STATUS_BADGE_VARIANTS } from '~/kubernetes_dashboard/constants';
+import { mockPodsTableItems } from '../graphql/mock_data';
+
+let wrapper;
+
+const defaultItem = mockPodsTableItems[0];
+
+const createWrapper = (item = defaultItem) => {
+ wrapper = shallowMount(WorkloadDetails, {
+ propsData: {
+ item,
+ },
+ stubs: { GlTruncate },
+ });
+};
+
+const findAllWorkloadDetailsItems = () => wrapper.findAllComponents(WorkloadDetailsItem);
+const findWorkloadDetailsItem = (at) => findAllWorkloadDetailsItems().at(at);
+const findAllBadges = () => wrapper.findAllComponents(GlBadge);
+const findBadge = (at) => findAllBadges().at(at);
+
+describe('Workload details component', () => {
+ beforeEach(() => {
+ createWrapper();
+ });
+
+ it.each`
+ label | data | index
+ ${'Name'} | ${defaultItem.name} | ${0}
+ ${'Kind'} | ${defaultItem.kind} | ${1}
+ ${'Labels'} | ${'key=value'} | ${2}
+ ${'Status'} | ${defaultItem.status} | ${3}
+ ${'Annotations'} | ${'annotation: text another: text'} | ${4}
+ `('renders a list item for each not empty value', ({ label, data, index }) => {
+ expect(findWorkloadDetailsItem(index).props('label')).toBe(label);
+ expect(findWorkloadDetailsItem(index).text()).toMatchInterpolatedText(data);
+ });
+
+ it('renders a badge for each of the labels', () => {
+ const label = 'key=value';
+ expect(findBadge(0).text()).toBe(label);
+ });
+
+ it('renders a badge for the status value', () => {
+ const { status } = defaultItem;
+ expect(findBadge(1).text()).toBe(status);
+ expect(findBadge(1).props('variant')).toBe(WORKLOAD_STATUS_BADGE_VARIANTS[status]);
+ });
+});
diff --git a/spec/frontend/kubernetes_dashboard/components/workload_layout_spec.js b/spec/frontend/kubernetes_dashboard/components/workload_layout_spec.js
new file mode 100644
index 00000000000..1dc5bd4f165
--- /dev/null
+++ b/spec/frontend/kubernetes_dashboard/components/workload_layout_spec.js
@@ -0,0 +1,141 @@
+import { shallowMount } from '@vue/test-utils';
+import { GlLoadingIcon, GlAlert, GlDrawer } from '@gitlab/ui';
+import WorkloadLayout from '~/kubernetes_dashboard/components/workload_layout.vue';
+import WorkloadStats from '~/kubernetes_dashboard/components/workload_stats.vue';
+import WorkloadTable from '~/kubernetes_dashboard/components/workload_table.vue';
+import WorkloadDetails from '~/kubernetes_dashboard/components/workload_details.vue';
+import { mockPodStats, mockPodsTableItems } from '../graphql/mock_data';
+
+let wrapper;
+
+const defaultProps = {
+ stats: mockPodStats,
+ items: mockPodsTableItems,
+};
+
+const createWrapper = (propsData = {}) => {
+ wrapper = shallowMount(WorkloadLayout, {
+ propsData: {
+ ...defaultProps,
+ ...propsData,
+ },
+ stubs: { GlDrawer },
+ });
+};
+
+const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
+const findErrorAlert = () => wrapper.findComponent(GlAlert);
+const findDrawer = () => wrapper.findComponent(GlDrawer);
+const findWorkloadStats = () => wrapper.findComponent(WorkloadStats);
+const findWorkloadTable = () => wrapper.findComponent(WorkloadTable);
+const findWorkloadDetails = () => wrapper.findComponent(WorkloadDetails);
+
+describe('Workload layout component', () => {
+ describe('when loading', () => {
+ beforeEach(() => {
+ createWrapper({ loading: true, errorMessage: 'error' });
+ });
+
+ it('renders a loading icon', () => {
+ expect(findLoadingIcon().exists()).toBe(true);
+ });
+
+ it("doesn't render an error message", () => {
+ expect(findErrorAlert().exists()).toBe(false);
+ });
+
+ it("doesn't render workload stats", () => {
+ expect(findWorkloadStats().exists()).toBe(false);
+ });
+
+ it("doesn't render workload table", () => {
+ expect(findWorkloadTable().exists()).toBe(false);
+ });
+
+ it("doesn't render details drawer", () => {
+ expect(findDrawer().exists()).toBe(false);
+ });
+ });
+
+ describe('when received an error', () => {
+ beforeEach(() => {
+ createWrapper({ errorMessage: 'error' });
+ });
+
+ it("doesn't render a loading icon", () => {
+ expect(findLoadingIcon().exists()).toBe(false);
+ });
+
+ it('renders an error alert with the correct message and props', () => {
+ expect(findErrorAlert().text()).toBe('error');
+ expect(findErrorAlert().props()).toMatchObject({ variant: 'danger', dismissible: false });
+ });
+
+ it("doesn't render workload stats", () => {
+ expect(findWorkloadStats().exists()).toBe(false);
+ });
+
+ it("doesn't render workload table", () => {
+ expect(findWorkloadTable().exists()).toBe(false);
+ });
+
+ it("doesn't render details drawer", () => {
+ expect(findDrawer().exists()).toBe(false);
+ });
+ });
+
+ describe('when received the data', () => {
+ beforeEach(() => {
+ createWrapper();
+ });
+
+ it("doesn't render a loading icon", () => {
+ expect(findLoadingIcon().exists()).toBe(false);
+ });
+
+ it("doesn't render an error message", () => {
+ expect(findErrorAlert().exists()).toBe(false);
+ });
+
+ it('renders workload-stats component with the correct props', () => {
+ expect(findWorkloadStats().props('stats')).toBe(mockPodStats);
+ });
+
+ it('renders workload-table component with the correct props', () => {
+ expect(findWorkloadTable().props('items')).toBe(mockPodsTableItems);
+ });
+
+ it('renders a drawer', () => {
+ expect(findDrawer().exists()).toBe(true);
+ });
+
+ describe('drawer', () => {
+ it('is closed by default', () => {
+ expect(findDrawer().props('open')).toBe(false);
+ });
+
+ it('is opened when an item was selected', async () => {
+ await findWorkloadTable().vm.$emit('select-item', mockPodsTableItems[0]);
+ expect(findDrawer().props('open')).toBe(true);
+ });
+
+ it('is closed when clicked on a cross button', async () => {
+ await findWorkloadTable().vm.$emit('select-item', mockPodsTableItems[0]);
+ expect(findDrawer().props('open')).toBe(true);
+
+ await findDrawer().vm.$emit('close');
+ expect(findDrawer().props('open')).toBe(false);
+ });
+
+ it('renders a title with the selected item name', async () => {
+ await findWorkloadTable().vm.$emit('select-item', mockPodsTableItems[0]);
+ expect(findDrawer().text()).toContain(mockPodsTableItems[0].name);
+ });
+
+ it('renders WorkloadDetails with the correct props', async () => {
+ await findWorkloadTable().vm.$emit('select-item', mockPodsTableItems[0]);
+ expect(findWorkloadDetails().props('item')).toBe(mockPodsTableItems[0]);
+ });
+ });
+ });
+});
diff --git a/spec/frontend/kubernetes_dashboard/components/workload_stats_spec.js b/spec/frontend/kubernetes_dashboard/components/workload_stats_spec.js
new file mode 100644
index 00000000000..d1bee0c0a16
--- /dev/null
+++ b/spec/frontend/kubernetes_dashboard/components/workload_stats_spec.js
@@ -0,0 +1,43 @@
+import { shallowMount } from '@vue/test-utils';
+import { GlSingleStat } from '@gitlab/ui/dist/charts';
+import WorkloadStats from '~/kubernetes_dashboard/components/workload_stats.vue';
+import { mockPodStats } from '../graphql/mock_data';
+
+let wrapper;
+
+const createWrapper = () => {
+ wrapper = shallowMount(WorkloadStats, {
+ propsData: {
+ stats: mockPodStats,
+ },
+ });
+};
+
+const findAllStats = () => wrapper.findAllComponents(GlSingleStat);
+const findSingleStat = (at) => findAllStats().at(at);
+
+describe('Workload stats component', () => {
+ it('renders GlSingleStat component for each stat', () => {
+ createWrapper();
+
+ expect(findAllStats()).toHaveLength(4);
+ });
+
+ it.each`
+ count | title | index
+ ${2} | ${'Running'} | ${0}
+ ${1} | ${'Pending'} | ${1}
+ ${1} | ${'Succeeded'} | ${2}
+ ${2} | ${'Failed'} | ${3}
+ `(
+ 'renders stat with title "$title" and count "$count" at index $index',
+ ({ count, title, index }) => {
+ createWrapper();
+
+ expect(findSingleStat(index).props()).toMatchObject({
+ value: count,
+ title,
+ });
+ },
+ );
+});
diff --git a/spec/frontend/kubernetes_dashboard/components/workload_table_spec.js b/spec/frontend/kubernetes_dashboard/components/workload_table_spec.js
new file mode 100644
index 00000000000..369b8f32c2d
--- /dev/null
+++ b/spec/frontend/kubernetes_dashboard/components/workload_table_spec.js
@@ -0,0 +1,128 @@
+import { mount } from '@vue/test-utils';
+import { GlTable, GlBadge, GlPagination } from '@gitlab/ui';
+import WorkloadTable from '~/kubernetes_dashboard/components/workload_table.vue';
+import { TABLE_HEADING_CLASSES, PAGE_SIZE } from '~/kubernetes_dashboard/constants';
+import { mockPodsTableItems } from '../graphql/mock_data';
+
+let wrapper;
+
+const createWrapper = (propsData = {}) => {
+ wrapper = mount(WorkloadTable, {
+ propsData,
+ });
+};
+
+const findTable = () => wrapper.findComponent(GlTable);
+const findAllRows = () => findTable().find('tbody').findAll('tr');
+const findRow = (at) => findAllRows().at(at);
+const findAllBadges = () => wrapper.findAllComponents(GlBadge);
+const findBadge = (at) => findAllBadges().at(at);
+const findPagination = () => wrapper.findComponent(GlPagination);
+
+describe('Workload table component', () => {
+ it('renders GlTable component with the default fields if no fields specified in props', () => {
+ createWrapper({ items: mockPodsTableItems });
+ const defaultFields = [
+ {
+ key: 'name',
+ label: 'Name',
+ thClass: TABLE_HEADING_CLASSES,
+ sortable: true,
+ },
+ {
+ key: 'status',
+ label: 'Status',
+ thClass: TABLE_HEADING_CLASSES,
+ sortable: true,
+ },
+ {
+ key: 'namespace',
+ label: 'Namespace',
+ thClass: TABLE_HEADING_CLASSES,
+ sortable: true,
+ },
+ {
+ key: 'age',
+ label: 'Age',
+ thClass: TABLE_HEADING_CLASSES,
+ sortable: true,
+ },
+ ];
+
+ expect(findTable().props('fields')).toEqual(defaultFields);
+ });
+
+ it('renders GlTable component fields specified in props', () => {
+ const customFields = [
+ {
+ key: 'field-1',
+ label: 'Field-1',
+ thClass: TABLE_HEADING_CLASSES,
+ sortable: true,
+ },
+ {
+ key: 'field-2',
+ label: 'Field-2',
+ thClass: TABLE_HEADING_CLASSES,
+ sortable: true,
+ },
+ ];
+ createWrapper({ items: mockPodsTableItems, fields: customFields });
+
+ expect(findTable().props('fields')).toEqual(customFields);
+ });
+
+ describe('table rows', () => {
+ beforeEach(() => {
+ createWrapper({ items: mockPodsTableItems });
+ });
+
+ it('displays the correct number of rows', () => {
+ expect(findAllRows()).toHaveLength(mockPodsTableItems.length);
+ });
+
+ it('emits an event on row click', () => {
+ mockPodsTableItems.forEach((data, index) => {
+ findRow(index).trigger('click');
+
+ expect(wrapper.emitted('select-item')[index]).toEqual([data]);
+ });
+ });
+
+ it('renders correct data for each row', () => {
+ mockPodsTableItems.forEach((data, index) => {
+ expect(findRow(index).text()).toContain(data.name);
+ expect(findRow(index).text()).toContain(data.namespace);
+ expect(findRow(index).text()).toContain(data.status);
+ expect(findRow(index).text()).toContain(data.age);
+ });
+ });
+
+ it('renders a badge for the status', () => {
+ expect(findAllBadges()).toHaveLength(mockPodsTableItems.length);
+ });
+
+ it.each`
+ status | variant | index
+ ${'Running'} | ${'info'} | ${0}
+ ${'Running'} | ${'info'} | ${1}
+ ${'Pending'} | ${'warning'} | ${2}
+ ${'Succeeded'} | ${'success'} | ${3}
+ ${'Failed'} | ${'danger'} | ${4}
+ ${'Failed'} | ${'danger'} | ${5}
+ `(
+ 'renders "$variant" badge for status "$status" at index "$index"',
+ ({ status, variant, index }) => {
+ expect(findBadge(index).text()).toBe(status);
+ expect(findBadge(index).props('variant')).toBe(variant);
+ },
+ );
+
+ it('renders pagination', () => {
+ expect(findPagination().props()).toMatchObject({
+ totalItems: mockPodsTableItems.length,
+ perPage: PAGE_SIZE,
+ });
+ });
+ });
+});
diff --git a/spec/frontend/kubernetes_dashboard/graphql/mock_data.js b/spec/frontend/kubernetes_dashboard/graphql/mock_data.js
new file mode 100644
index 00000000000..674425a5bc9
--- /dev/null
+++ b/spec/frontend/kubernetes_dashboard/graphql/mock_data.js
@@ -0,0 +1,353 @@
+const runningPod = {
+ status: { phase: 'Running' },
+ metadata: {
+ name: 'pod-1',
+ namespace: 'default',
+ creationTimestamp: '2023-07-31T11:50:17Z',
+ labels: { key: 'value' },
+ annotations: { annotation: 'text', another: 'text' },
+ },
+};
+const pendingPod = {
+ status: { phase: 'Pending' },
+ metadata: {
+ name: 'pod-2',
+ namespace: 'new-namespace',
+ creationTimestamp: '2023-11-21T11:50:59Z',
+ labels: {},
+ annotations: {},
+ },
+};
+const succeededPod = {
+ status: { phase: 'Succeeded' },
+ metadata: {
+ name: 'pod-3',
+ namespace: 'default',
+ creationTimestamp: '2023-07-31T11:50:17Z',
+ labels: {},
+ annotations: {},
+ },
+};
+const failedPod = {
+ status: { phase: 'Failed' },
+ metadata: {
+ name: 'pod-4',
+ namespace: 'default',
+ creationTimestamp: '2023-11-21T11:50:59Z',
+ labels: {},
+ annotations: {},
+ },
+};
+
+export const k8sPodsMock = [runningPod, runningPod, pendingPod, succeededPod, failedPod, failedPod];
+
+export const mockPodStats = [
+ {
+ title: 'Running',
+ value: 2,
+ },
+ {
+ title: 'Pending',
+ value: 1,
+ },
+ {
+ title: 'Succeeded',
+ value: 1,
+ },
+ {
+ title: 'Failed',
+ value: 2,
+ },
+];
+
+export const mockPodsTableItems = [
+ {
+ name: 'pod-1',
+ namespace: 'default',
+ status: 'Running',
+ age: '114d',
+ labels: { key: 'value' },
+ annotations: { annotation: 'text', another: 'text' },
+ kind: 'Pod',
+ },
+ {
+ name: 'pod-1',
+ namespace: 'default',
+ status: 'Running',
+ age: '114d',
+ labels: {},
+ annotations: {},
+ kind: 'Pod',
+ },
+ {
+ name: 'pod-2',
+ namespace: 'new-namespace',
+ status: 'Pending',
+ age: '1d',
+ labels: {},
+ annotations: {},
+ kind: 'Pod',
+ },
+ {
+ name: 'pod-3',
+ namespace: 'default',
+ status: 'Succeeded',
+ age: '114d',
+ labels: {},
+ annotations: {},
+ kind: 'Pod',
+ },
+ {
+ name: 'pod-4',
+ namespace: 'default',
+ status: 'Failed',
+ age: '1d',
+ labels: {},
+ annotations: {},
+ kind: 'Pod',
+ },
+ {
+ name: 'pod-4',
+ namespace: 'default',
+ status: 'Failed',
+ age: '1d',
+ labels: {},
+ annotations: {},
+ kind: 'Pod',
+ },
+];
+
+const pendingDeployment = {
+ status: {
+ conditions: [
+ { type: 'Available', status: 'False' },
+ { type: 'Progressing', status: 'True' },
+ ],
+ },
+ metadata: {
+ name: 'deployment-1',
+ namespace: 'new-namespace',
+ creationTimestamp: '2023-11-21T11:50:59Z',
+ labels: {},
+ annotations: {},
+ },
+};
+const readyDeployment = {
+ status: {
+ conditions: [
+ { type: 'Available', status: 'True' },
+ { type: 'Progressing', status: 'False' },
+ ],
+ },
+ metadata: {
+ name: 'deployment-2',
+ namespace: 'default',
+ creationTimestamp: '2023-07-31T11:50:17Z',
+ labels: {},
+ annotations: {},
+ },
+};
+const failedDeployment = {
+ status: {
+ conditions: [
+ { type: 'Available', status: 'False' },
+ { type: 'Progressing', status: 'False' },
+ ],
+ },
+ metadata: {
+ name: 'deployment-3',
+ namespace: 'default',
+ creationTimestamp: '2023-11-21T11:50:59Z',
+ labels: {},
+ annotations: {},
+ },
+};
+
+export const k8sDeploymentsMock = [
+ pendingDeployment,
+ readyDeployment,
+ readyDeployment,
+ failedDeployment,
+];
+
+export const mockDeploymentsStats = [
+ {
+ title: 'Ready',
+ value: 2,
+ },
+ {
+ title: 'Failed',
+ value: 1,
+ },
+ {
+ title: 'Pending',
+ value: 1,
+ },
+];
+
+export const mockDeploymentsTableItems = [
+ {
+ name: 'deployment-1',
+ namespace: 'new-namespace',
+ status: 'Pending',
+ age: '1d',
+ labels: {},
+ annotations: {},
+ kind: 'Deployment',
+ },
+ {
+ name: 'deployment-2',
+ namespace: 'default',
+ status: 'Ready',
+ age: '114d',
+ labels: {},
+ annotations: {},
+ kind: 'Deployment',
+ },
+ {
+ name: 'deployment-2',
+ namespace: 'default',
+ status: 'Ready',
+ age: '114d',
+ labels: {},
+ annotations: {},
+ kind: 'Deployment',
+ },
+ {
+ name: 'deployment-3',
+ namespace: 'default',
+ status: 'Failed',
+ age: '1d',
+ labels: {},
+ annotations: {},
+ kind: 'Deployment',
+ },
+];
+
+const readyStatefulSet = {
+ status: { readyReplicas: 2 },
+ spec: { replicas: 2 },
+ metadata: {
+ name: 'statefulSet-2',
+ namespace: 'default',
+ creationTimestamp: '2023-07-31T11:50:17Z',
+ labels: {},
+ annotations: {},
+ },
+};
+const failedStatefulSet = {
+ status: { readyReplicas: 1 },
+ spec: { replicas: 2 },
+ metadata: {
+ name: 'statefulSet-3',
+ namespace: 'default',
+ creationTimestamp: '2023-11-21T11:50:59Z',
+ labels: {},
+ annotations: {},
+ },
+};
+
+export const k8sStatefulSetsMock = [readyStatefulSet, readyStatefulSet, failedStatefulSet];
+
+export const mockStatefulSetsStats = [
+ {
+ title: 'Ready',
+ value: 2,
+ },
+ {
+ title: 'Failed',
+ value: 1,
+ },
+];
+
+export const mockStatefulSetsTableItems = [
+ {
+ name: 'statefulSet-2',
+ namespace: 'default',
+ status: 'Ready',
+ age: '114d',
+ labels: {},
+ annotations: {},
+ kind: 'StatefulSet',
+ },
+ {
+ name: 'statefulSet-2',
+ namespace: 'default',
+ status: 'Ready',
+ age: '114d',
+ labels: {},
+ annotations: {},
+ kind: 'StatefulSet',
+ },
+ {
+ name: 'statefulSet-3',
+ namespace: 'default',
+ status: 'Failed',
+ age: '1d',
+ labels: {},
+ annotations: {},
+ kind: 'StatefulSet',
+ },
+];
+
+export const k8sReplicaSetsMock = [readyStatefulSet, readyStatefulSet, failedStatefulSet];
+
+export const mockReplicaSetsTableItems = mockStatefulSetsTableItems.map((item) => {
+ return { ...item, kind: 'ReplicaSet' };
+});
+
+const readyDaemonSet = {
+ status: { numberMisscheduled: 0, numberReady: 2, desiredNumberScheduled: 2 },
+ metadata: {
+ name: 'daemonSet-1',
+ namespace: 'default',
+ creationTimestamp: '2023-07-31T11:50:17Z',
+ labels: {},
+ annotations: {},
+ },
+};
+
+const failedDaemonSet = {
+ status: { numberMisscheduled: 1, numberReady: 1, desiredNumberScheduled: 2 },
+ metadata: {
+ name: 'daemonSet-2',
+ namespace: 'default',
+ creationTimestamp: '2023-11-21T11:50:59Z',
+ labels: {},
+ annotations: {},
+ },
+};
+
+export const mockDaemonSetsStats = [
+ {
+ title: 'Ready',
+ value: 1,
+ },
+ {
+ title: 'Failed',
+ value: 1,
+ },
+];
+
+export const mockDaemonSetsTableItems = [
+ {
+ name: 'daemonSet-1',
+ namespace: 'default',
+ status: 'Ready',
+ age: '114d',
+ labels: {},
+ annotations: {},
+ kind: 'DaemonSet',
+ },
+ {
+ name: 'daemonSet-2',
+ namespace: 'default',
+ status: 'Failed',
+ age: '1d',
+ labels: {},
+ annotations: {},
+ kind: 'DaemonSet',
+ },
+];
+
+export const k8sDaemonSetsMock = [readyDaemonSet, failedDaemonSet];
diff --git a/spec/frontend/kubernetes_dashboard/graphql/resolvers/kubernetes_spec.js b/spec/frontend/kubernetes_dashboard/graphql/resolvers/kubernetes_spec.js
new file mode 100644
index 00000000000..516d91af947
--- /dev/null
+++ b/spec/frontend/kubernetes_dashboard/graphql/resolvers/kubernetes_spec.js
@@ -0,0 +1,459 @@
+import { CoreV1Api, WatchApi, AppsV1Api } from '@gitlab/cluster-client';
+import { resolvers } from '~/kubernetes_dashboard/graphql/resolvers';
+import k8sDashboardPodsQuery from '~/kubernetes_dashboard/graphql/queries/k8s_dashboard_pods.query.graphql';
+import k8sDashboardDeploymentsQuery from '~/kubernetes_dashboard/graphql/queries/k8s_dashboard_deployments.query.graphql';
+import k8sDashboardStatefulSetsQuery from '~/kubernetes_dashboard/graphql/queries/k8s_dashboard_stateful_sets.query.graphql';
+import k8sDashboardReplicaSetsQuery from '~/kubernetes_dashboard/graphql/queries/k8s_dashboard_replica_sets.query.graphql';
+import k8sDashboardDaemonSetsQuery from '~/kubernetes_dashboard/graphql/queries/k8s_dashboard_daemon_sets.query.graphql';
+import {
+ k8sPodsMock,
+ k8sDeploymentsMock,
+ k8sStatefulSetsMock,
+ k8sReplicaSetsMock,
+ k8sDaemonSetsMock,
+} from '../mock_data';
+
+describe('~/frontend/environments/graphql/resolvers', () => {
+ let mockResolvers;
+
+ const configuration = {
+ basePath: 'kas-proxy/',
+ baseOptions: {
+ headers: { 'GitLab-Agent-Id': '1' },
+ },
+ };
+
+ beforeEach(() => {
+ mockResolvers = resolvers;
+ });
+
+ describe('k8sPods', () => {
+ const client = { writeQuery: jest.fn() };
+
+ const mockWatcher = WatchApi.prototype;
+ const mockPodsListWatcherFn = jest.fn().mockImplementation(() => {
+ return Promise.resolve(mockWatcher);
+ });
+
+ const mockOnDataFn = jest.fn().mockImplementation((eventName, callback) => {
+ if (eventName === 'data') {
+ callback([]);
+ }
+ });
+
+ const mockPodsListFn = jest.fn().mockImplementation(() => {
+ return Promise.resolve({
+ items: k8sPodsMock,
+ });
+ });
+
+ const mockAllPodsListFn = jest.fn().mockImplementation(mockPodsListFn);
+
+ describe('when the pods data is present', () => {
+ beforeEach(() => {
+ jest
+ .spyOn(CoreV1Api.prototype, 'listCoreV1PodForAllNamespaces')
+ .mockImplementation(mockAllPodsListFn);
+ jest.spyOn(mockWatcher, 'subscribeToStream').mockImplementation(mockPodsListWatcherFn);
+ jest.spyOn(mockWatcher, 'on').mockImplementation(mockOnDataFn);
+ });
+
+ it('should request all pods from the cluster_client library and watch the events', async () => {
+ const pods = await mockResolvers.Query.k8sPods(
+ null,
+ {
+ configuration,
+ },
+ { client },
+ );
+
+ expect(mockAllPodsListFn).toHaveBeenCalled();
+ expect(mockPodsListWatcherFn).toHaveBeenCalled();
+
+ expect(pods).toEqual(k8sPodsMock);
+ });
+
+ it('should update cache with the new data when received from the library', async () => {
+ await mockResolvers.Query.k8sPods(null, { configuration, namespace: '' }, { client });
+
+ expect(client.writeQuery).toHaveBeenCalledWith({
+ query: k8sDashboardPodsQuery,
+ variables: { configuration, namespace: '' },
+ data: { k8sPods: [] },
+ });
+ });
+ });
+
+ it('should not watch pods from the cluster_client library when the pods data is not present', async () => {
+ jest.spyOn(CoreV1Api.prototype, 'listCoreV1PodForAllNamespaces').mockImplementation(
+ jest.fn().mockImplementation(() => {
+ return Promise.resolve({
+ items: [],
+ });
+ }),
+ );
+
+ await mockResolvers.Query.k8sPods(null, { configuration }, { client });
+
+ expect(mockPodsListWatcherFn).not.toHaveBeenCalled();
+ });
+
+ it('should throw an error if the API call fails', async () => {
+ jest
+ .spyOn(CoreV1Api.prototype, 'listCoreV1PodForAllNamespaces')
+ .mockRejectedValue(new Error('API error'));
+
+ await expect(
+ mockResolvers.Query.k8sPods(null, { configuration }, { client }),
+ ).rejects.toThrow('API error');
+ });
+ });
+
+ describe('k8sDeployments', () => {
+ const client = { writeQuery: jest.fn() };
+
+ const mockWatcher = WatchApi.prototype;
+ const mockDeploymentsListWatcherFn = jest.fn().mockImplementation(() => {
+ return Promise.resolve(mockWatcher);
+ });
+
+ const mockOnDataFn = jest.fn().mockImplementation((eventName, callback) => {
+ if (eventName === 'data') {
+ callback([]);
+ }
+ });
+
+ const mockDeploymentsListFn = jest.fn().mockImplementation(() => {
+ return Promise.resolve({
+ items: k8sDeploymentsMock,
+ });
+ });
+
+ const mockAllDeploymentsListFn = jest.fn().mockImplementation(mockDeploymentsListFn);
+
+ describe('when the deployments data is present', () => {
+ beforeEach(() => {
+ jest
+ .spyOn(AppsV1Api.prototype, 'listAppsV1DeploymentForAllNamespaces')
+ .mockImplementation(mockAllDeploymentsListFn);
+ jest
+ .spyOn(mockWatcher, 'subscribeToStream')
+ .mockImplementation(mockDeploymentsListWatcherFn);
+ jest.spyOn(mockWatcher, 'on').mockImplementation(mockOnDataFn);
+ });
+
+ it('should request all deployments from the cluster_client library and watch the events', async () => {
+ const deployments = await mockResolvers.Query.k8sDeployments(
+ null,
+ {
+ configuration,
+ },
+ { client },
+ );
+
+ expect(mockAllDeploymentsListFn).toHaveBeenCalled();
+ expect(mockDeploymentsListWatcherFn).toHaveBeenCalled();
+
+ expect(deployments).toEqual(k8sDeploymentsMock);
+ });
+
+ it('should update cache with the new data when received from the library', async () => {
+ await mockResolvers.Query.k8sDeployments(
+ null,
+ { configuration, namespace: '' },
+ { client },
+ );
+
+ expect(client.writeQuery).toHaveBeenCalledWith({
+ query: k8sDashboardDeploymentsQuery,
+ variables: { configuration, namespace: '' },
+ data: { k8sDeployments: [] },
+ });
+ });
+ });
+
+ it('should not watch deployments from the cluster_client library when the deployments data is not present', async () => {
+ jest.spyOn(AppsV1Api.prototype, 'listAppsV1DeploymentForAllNamespaces').mockImplementation(
+ jest.fn().mockImplementation(() => {
+ return Promise.resolve({
+ items: [],
+ });
+ }),
+ );
+
+ await mockResolvers.Query.k8sDeployments(null, { configuration }, { client });
+
+ expect(mockDeploymentsListWatcherFn).not.toHaveBeenCalled();
+ });
+
+ it('should throw an error if the API call fails', async () => {
+ jest
+ .spyOn(AppsV1Api.prototype, 'listAppsV1DeploymentForAllNamespaces')
+ .mockRejectedValue(new Error('API error'));
+
+ await expect(
+ mockResolvers.Query.k8sDeployments(null, { configuration }, { client }),
+ ).rejects.toThrow('API error');
+ });
+ });
+
+ describe('k8sStatefulSets', () => {
+ const client = { writeQuery: jest.fn() };
+
+ const mockWatcher = WatchApi.prototype;
+ const mockStatefulSetsListWatcherFn = jest.fn().mockImplementation(() => {
+ return Promise.resolve(mockWatcher);
+ });
+
+ const mockOnDataFn = jest.fn().mockImplementation((eventName, callback) => {
+ if (eventName === 'data') {
+ callback([]);
+ }
+ });
+
+ const mockStatefulSetsListFn = jest.fn().mockImplementation(() => {
+ return Promise.resolve({
+ items: k8sStatefulSetsMock,
+ });
+ });
+
+ const mockAllStatefulSetsListFn = jest.fn().mockImplementation(mockStatefulSetsListFn);
+
+ describe('when the StatefulSets data is present', () => {
+ beforeEach(() => {
+ jest
+ .spyOn(AppsV1Api.prototype, 'listAppsV1StatefulSetForAllNamespaces')
+ .mockImplementation(mockAllStatefulSetsListFn);
+ jest
+ .spyOn(mockWatcher, 'subscribeToStream')
+ .mockImplementation(mockStatefulSetsListWatcherFn);
+ jest.spyOn(mockWatcher, 'on').mockImplementation(mockOnDataFn);
+ });
+
+ it('should request all StatefulSets from the cluster_client library and watch the events', async () => {
+ const StatefulSets = await mockResolvers.Query.k8sStatefulSets(
+ null,
+ {
+ configuration,
+ },
+ { client },
+ );
+
+ expect(mockAllStatefulSetsListFn).toHaveBeenCalled();
+ expect(mockStatefulSetsListWatcherFn).toHaveBeenCalled();
+
+ expect(StatefulSets).toEqual(k8sStatefulSetsMock);
+ });
+
+ it('should update cache with the new data when received from the library', async () => {
+ await mockResolvers.Query.k8sStatefulSets(
+ null,
+ { configuration, namespace: '' },
+ { client },
+ );
+
+ expect(client.writeQuery).toHaveBeenCalledWith({
+ query: k8sDashboardStatefulSetsQuery,
+ variables: { configuration, namespace: '' },
+ data: { k8sStatefulSets: [] },
+ });
+ });
+ });
+
+ it('should not watch StatefulSets from the cluster_client library when the StatefulSets data is not present', async () => {
+ jest.spyOn(AppsV1Api.prototype, 'listAppsV1StatefulSetForAllNamespaces').mockImplementation(
+ jest.fn().mockImplementation(() => {
+ return Promise.resolve({
+ items: [],
+ });
+ }),
+ );
+
+ await mockResolvers.Query.k8sStatefulSets(null, { configuration }, { client });
+
+ expect(mockStatefulSetsListWatcherFn).not.toHaveBeenCalled();
+ });
+
+ it('should throw an error if the API call fails', async () => {
+ jest
+ .spyOn(AppsV1Api.prototype, 'listAppsV1StatefulSetForAllNamespaces')
+ .mockRejectedValue(new Error('API error'));
+
+ await expect(
+ mockResolvers.Query.k8sStatefulSets(null, { configuration }, { client }),
+ ).rejects.toThrow('API error');
+ });
+ });
+
+ describe('k8sReplicaSets', () => {
+ const client = { writeQuery: jest.fn() };
+
+ const mockWatcher = WatchApi.prototype;
+ const mockReplicaSetsListWatcherFn = jest.fn().mockImplementation(() => {
+ return Promise.resolve(mockWatcher);
+ });
+
+ const mockOnDataFn = jest.fn().mockImplementation((eventName, callback) => {
+ if (eventName === 'data') {
+ callback([]);
+ }
+ });
+
+ const mockReplicaSetsListFn = jest.fn().mockImplementation(() => {
+ return Promise.resolve({
+ items: k8sReplicaSetsMock,
+ });
+ });
+
+ const mockAllReplicaSetsListFn = jest.fn().mockImplementation(mockReplicaSetsListFn);
+
+ describe('when the ReplicaSets data is present', () => {
+ beforeEach(() => {
+ jest
+ .spyOn(AppsV1Api.prototype, 'listAppsV1ReplicaSetForAllNamespaces')
+ .mockImplementation(mockAllReplicaSetsListFn);
+ jest
+ .spyOn(mockWatcher, 'subscribeToStream')
+ .mockImplementation(mockReplicaSetsListWatcherFn);
+ jest.spyOn(mockWatcher, 'on').mockImplementation(mockOnDataFn);
+ });
+
+ it('should request all ReplicaSets from the cluster_client library and watch the events', async () => {
+ const ReplicaSets = await mockResolvers.Query.k8sReplicaSets(
+ null,
+ {
+ configuration,
+ },
+ { client },
+ );
+
+ expect(mockAllReplicaSetsListFn).toHaveBeenCalled();
+ expect(mockReplicaSetsListWatcherFn).toHaveBeenCalled();
+
+ expect(ReplicaSets).toEqual(k8sReplicaSetsMock);
+ });
+
+ it('should update cache with the new data when received from the library', async () => {
+ await mockResolvers.Query.k8sReplicaSets(
+ null,
+ { configuration, namespace: '' },
+ { client },
+ );
+
+ expect(client.writeQuery).toHaveBeenCalledWith({
+ query: k8sDashboardReplicaSetsQuery,
+ variables: { configuration, namespace: '' },
+ data: { k8sReplicaSets: [] },
+ });
+ });
+ });
+
+ it('should not watch ReplicaSets from the cluster_client library when the ReplicaSets data is not present', async () => {
+ jest.spyOn(AppsV1Api.prototype, 'listAppsV1ReplicaSetForAllNamespaces').mockImplementation(
+ jest.fn().mockImplementation(() => {
+ return Promise.resolve({
+ items: [],
+ });
+ }),
+ );
+
+ await mockResolvers.Query.k8sReplicaSets(null, { configuration }, { client });
+
+ expect(mockReplicaSetsListWatcherFn).not.toHaveBeenCalled();
+ });
+
+ it('should throw an error if the API call fails', async () => {
+ jest
+ .spyOn(AppsV1Api.prototype, 'listAppsV1ReplicaSetForAllNamespaces')
+ .mockRejectedValue(new Error('API error'));
+
+ await expect(
+ mockResolvers.Query.k8sReplicaSets(null, { configuration }, { client }),
+ ).rejects.toThrow('API error');
+ });
+ });
+
+ describe('k8sDaemonSets', () => {
+ const client = { writeQuery: jest.fn() };
+
+ const mockWatcher = WatchApi.prototype;
+ const mockDaemonSetsListWatcherFn = jest.fn().mockImplementation(() => {
+ return Promise.resolve(mockWatcher);
+ });
+
+ const mockOnDataFn = jest.fn().mockImplementation((eventName, callback) => {
+ if (eventName === 'data') {
+ callback([]);
+ }
+ });
+
+ const mockDaemonSetsListFn = jest.fn().mockImplementation(() => {
+ return Promise.resolve({
+ items: k8sDaemonSetsMock,
+ });
+ });
+
+ const mockAllDaemonSetsListFn = jest.fn().mockImplementation(mockDaemonSetsListFn);
+
+ describe('when the DaemonSets data is present', () => {
+ beforeEach(() => {
+ jest
+ .spyOn(AppsV1Api.prototype, 'listAppsV1DaemonSetForAllNamespaces')
+ .mockImplementation(mockAllDaemonSetsListFn);
+ jest
+ .spyOn(mockWatcher, 'subscribeToStream')
+ .mockImplementation(mockDaemonSetsListWatcherFn);
+ jest.spyOn(mockWatcher, 'on').mockImplementation(mockOnDataFn);
+ });
+
+ it('should request all DaemonSets from the cluster_client library and watch the events', async () => {
+ const DaemonSets = await mockResolvers.Query.k8sDaemonSets(
+ null,
+ {
+ configuration,
+ },
+ { client },
+ );
+
+ expect(mockAllDaemonSetsListFn).toHaveBeenCalled();
+ expect(mockDaemonSetsListWatcherFn).toHaveBeenCalled();
+
+ expect(DaemonSets).toEqual(k8sDaemonSetsMock);
+ });
+
+ it('should update cache with the new data when received from the library', async () => {
+ await mockResolvers.Query.k8sDaemonSets(null, { configuration, namespace: '' }, { client });
+
+ expect(client.writeQuery).toHaveBeenCalledWith({
+ query: k8sDashboardDaemonSetsQuery,
+ variables: { configuration, namespace: '' },
+ data: { k8sDaemonSets: [] },
+ });
+ });
+ });
+
+ it('should not watch DaemonSets from the cluster_client library when the DaemonSets data is not present', async () => {
+ jest.spyOn(AppsV1Api.prototype, 'listAppsV1DaemonSetForAllNamespaces').mockImplementation(
+ jest.fn().mockImplementation(() => {
+ return Promise.resolve({
+ items: [],
+ });
+ }),
+ );
+
+ await mockResolvers.Query.k8sDaemonSets(null, { configuration }, { client });
+
+ expect(mockDaemonSetsListWatcherFn).not.toHaveBeenCalled();
+ });
+
+ it('should throw an error if the API call fails', async () => {
+ jest
+ .spyOn(AppsV1Api.prototype, 'listAppsV1DaemonSetForAllNamespaces')
+ .mockRejectedValue(new Error('API error'));
+
+ await expect(
+ mockResolvers.Query.k8sDaemonSets(null, { configuration }, { client }),
+ ).rejects.toThrow('API error');
+ });
+ });
+});
diff --git a/spec/frontend/kubernetes_dashboard/helpers/k8s_integration_helper_spec.js b/spec/frontend/kubernetes_dashboard/helpers/k8s_integration_helper_spec.js
new file mode 100644
index 00000000000..2892d657aea
--- /dev/null
+++ b/spec/frontend/kubernetes_dashboard/helpers/k8s_integration_helper_spec.js
@@ -0,0 +1,93 @@
+import {
+ getAge,
+ calculateDeploymentStatus,
+ calculateStatefulSetStatus,
+ calculateDaemonSetStatus,
+} from '~/kubernetes_dashboard/helpers/k8s_integration_helper';
+import { useFakeDate } from 'helpers/fake_date';
+
+describe('k8s_integration_helper', () => {
+ describe('getAge', () => {
+ useFakeDate(2023, 10, 23, 10, 10);
+
+ it.each`
+ condition | measures | timestamp | expected
+ ${'timestamp > 1 day'} | ${'days'} | ${'2023-07-31T11:50:59Z'} | ${'114d'}
+ ${'timestamp = 1 day'} | ${'days'} | ${'2023-11-21T11:50:59Z'} | ${'1d'}
+ ${'1 day > timestamp > 1 hour'} | ${'hours'} | ${'2023-11-22T11:50:59Z'} | ${'22h'}
+ ${'timestamp = 1 hour'} | ${'hours'} | ${'2023-11-23T08:50:59Z'} | ${'1h'}
+ ${'1 hour > timestamp >1 minute'} | ${'minutes'} | ${'2023-11-23T09:50:59Z'} | ${'19m'}
+ ${'timestamp = 1 minute'} | ${'minutes'} | ${'2023-11-23T10:08:59Z'} | ${'1m'}
+ ${'1 minute > timestamp'} | ${'seconds'} | ${'2023-11-23T10:09:17Z'} | ${'43s'}
+ ${'timestamp = 1 second'} | ${'seconds'} | ${'2023-11-23T10:09:59Z'} | ${'1s'}
+ `('returns age in $measures when $condition', ({ timestamp, expected }) => {
+ expect(getAge(timestamp)).toBe(expected);
+ });
+ });
+
+ describe('calculateDeploymentStatus', () => {
+ const pending = {
+ conditions: [
+ { type: 'Available', status: 'False' },
+ { type: 'Progressing', status: 'True' },
+ ],
+ };
+ const ready = {
+ conditions: [
+ { type: 'Available', status: 'True' },
+ { type: 'Progressing', status: 'False' },
+ ],
+ };
+ const failed = {
+ conditions: [
+ { type: 'Available', status: 'False' },
+ { type: 'Progressing', status: 'False' },
+ ],
+ };
+
+ it.each`
+ condition | status | expected
+ ${'Available is false and Progressing is true'} | ${pending} | ${'Pending'}
+ ${'Available is true and Progressing is false'} | ${ready} | ${'Ready'}
+ ${'Available is false and Progressing is false'} | ${failed} | ${'Failed'}
+ `('returns status as $expected when $condition', ({ status, expected }) => {
+ expect(calculateDeploymentStatus({ status })).toBe(expected);
+ });
+ });
+
+ describe('calculateStatefulSetStatus', () => {
+ const ready = {
+ status: { readyReplicas: 2 },
+ spec: { replicas: 2 },
+ };
+ const failed = {
+ status: { readyReplicas: 1 },
+ spec: { replicas: 2 },
+ };
+
+ it.each`
+ condition | item | expected
+ ${'there are less readyReplicas than replicas in spec'} | ${failed} | ${'Failed'}
+ ${'there are the same amount of readyReplicas as in spec'} | ${ready} | ${'Ready'}
+ `('returns status as $expected when $condition', ({ item, expected }) => {
+ expect(calculateStatefulSetStatus(item)).toBe(expected);
+ });
+ });
+
+ describe('calculateDaemonSetStatus', () => {
+ const ready = {
+ status: { numberMisscheduled: 0, numberReady: 2, desiredNumberScheduled: 2 },
+ };
+ const failed = {
+ status: { numberMisscheduled: 1, numberReady: 1, desiredNumberScheduled: 2 },
+ };
+
+ it.each`
+ condition | item | expected
+ ${'there are less numberReady than desiredNumberScheduled or the numberMisscheduled is present'} | ${failed} | ${'Failed'}
+ ${'there are the same amount of numberReady and desiredNumberScheduled'} | ${ready} | ${'Ready'}
+ `('returns status as $expected when $condition', ({ item, expected }) => {
+ expect(calculateDaemonSetStatus(item)).toBe(expected);
+ });
+ });
+});
diff --git a/spec/frontend/kubernetes_dashboard/pages/app_spec.js b/spec/frontend/kubernetes_dashboard/pages/app_spec.js
new file mode 100644
index 00000000000..7d3b9cd2ee6
--- /dev/null
+++ b/spec/frontend/kubernetes_dashboard/pages/app_spec.js
@@ -0,0 +1,40 @@
+import Vue from 'vue';
+import VueRouter from 'vue-router';
+import { shallowMount } from '@vue/test-utils';
+import createRouter from '~/kubernetes_dashboard/router/index';
+import { PODS_ROUTE_PATH } from '~/kubernetes_dashboard/router/constants';
+import App from '~/kubernetes_dashboard/pages/app.vue';
+import PageTitle from '~/kubernetes_dashboard/components/page_title.vue';
+
+Vue.use(VueRouter);
+
+let wrapper;
+let router;
+const base = 'base/path';
+
+const mountApp = async (route = PODS_ROUTE_PATH) => {
+ await router.push(route);
+
+ wrapper = shallowMount(App, {
+ router,
+ provide: {
+ agent: {},
+ },
+ });
+};
+
+const findPageTitle = () => wrapper.findComponent(PageTitle);
+
+describe('Kubernetes dashboard app component', () => {
+ beforeEach(() => {
+ router = createRouter({
+ base,
+ });
+ });
+
+ it(`sets the correct title for '${PODS_ROUTE_PATH}' path`, async () => {
+ await mountApp();
+
+ expect(findPageTitle().text()).toBe('Pods');
+ });
+});
diff --git a/spec/frontend/kubernetes_dashboard/pages/daemon_sets_page_spec.js b/spec/frontend/kubernetes_dashboard/pages/daemon_sets_page_spec.js
new file mode 100644
index 00000000000..a987f46fd78
--- /dev/null
+++ b/spec/frontend/kubernetes_dashboard/pages/daemon_sets_page_spec.js
@@ -0,0 +1,106 @@
+import Vue from 'vue';
+import VueApollo from 'vue-apollo';
+import { shallowMount } from '@vue/test-utils';
+import waitForPromises from 'helpers/wait_for_promises';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import DaemonSetsPage from '~/kubernetes_dashboard/pages/daemon_sets_page.vue';
+import WorkloadLayout from '~/kubernetes_dashboard/components/workload_layout.vue';
+import { useFakeDate } from 'helpers/fake_date';
+import {
+ k8sDaemonSetsMock,
+ mockDaemonSetsStats,
+ mockDaemonSetsTableItems,
+} from '../graphql/mock_data';
+
+Vue.use(VueApollo);
+
+describe('Kubernetes dashboard daemonSets page', () => {
+ let wrapper;
+
+ const configuration = {
+ basePath: 'kas/tunnel/url',
+ baseOptions: {
+ headers: { 'GitLab-Agent-Id': '1' },
+ },
+ };
+
+ const findWorkloadLayout = () => wrapper.findComponent(WorkloadLayout);
+
+ const createApolloProvider = () => {
+ const mockResolvers = {
+ Query: {
+ k8sDaemonSets: jest.fn().mockReturnValue(k8sDaemonSetsMock),
+ },
+ };
+
+ return createMockApollo([], mockResolvers);
+ };
+
+ const createWrapper = (apolloProvider = createApolloProvider()) => {
+ wrapper = shallowMount(DaemonSetsPage, {
+ provide: { configuration },
+ apolloProvider,
+ });
+ };
+
+ describe('mounted', () => {
+ it('renders WorkloadLayout component', () => {
+ createWrapper();
+
+ expect(findWorkloadLayout().exists()).toBe(true);
+ });
+
+ it('sets loading prop for the WorkloadLayout', () => {
+ createWrapper();
+
+ expect(findWorkloadLayout().props('loading')).toBe(true);
+ });
+
+ it('removes loading prop from the WorkloadLayout when the list of pods loaded', async () => {
+ createWrapper();
+ await waitForPromises();
+
+ expect(findWorkloadLayout().props('loading')).toBe(false);
+ });
+ });
+
+ describe('when gets pods data', () => {
+ useFakeDate(2023, 10, 23, 10, 10);
+
+ it('sets correct stats object for the WorkloadLayout', async () => {
+ createWrapper();
+ await waitForPromises();
+
+ expect(findWorkloadLayout().props('stats')).toEqual(mockDaemonSetsStats);
+ });
+
+ it('sets correct table items object for the WorkloadLayout', async () => {
+ createWrapper();
+ await waitForPromises();
+
+ expect(findWorkloadLayout().props('items')).toMatchObject(mockDaemonSetsTableItems);
+ });
+ });
+
+ describe('when gets an error from the cluster_client API', () => {
+ const error = new Error('Error from the cluster_client API');
+ const createErroredApolloProvider = () => {
+ const mockResolvers = {
+ Query: {
+ k8sDaemonSets: jest.fn().mockRejectedValueOnce(error),
+ },
+ };
+
+ return createMockApollo([], mockResolvers);
+ };
+
+ beforeEach(async () => {
+ createWrapper(createErroredApolloProvider());
+ await waitForPromises();
+ });
+
+ it('sets errorMessage prop for the WorkloadLayout', () => {
+ expect(findWorkloadLayout().props('errorMessage')).toBe(error.message);
+ });
+ });
+});
diff --git a/spec/frontend/kubernetes_dashboard/pages/deployments_page_spec.js b/spec/frontend/kubernetes_dashboard/pages/deployments_page_spec.js
new file mode 100644
index 00000000000..371116f0495
--- /dev/null
+++ b/spec/frontend/kubernetes_dashboard/pages/deployments_page_spec.js
@@ -0,0 +1,106 @@
+import Vue from 'vue';
+import VueApollo from 'vue-apollo';
+import { shallowMount } from '@vue/test-utils';
+import waitForPromises from 'helpers/wait_for_promises';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import DeploymentsPage from '~/kubernetes_dashboard/pages/deployments_page.vue';
+import WorkloadLayout from '~/kubernetes_dashboard/components/workload_layout.vue';
+import { useFakeDate } from 'helpers/fake_date';
+import {
+ k8sDeploymentsMock,
+ mockDeploymentsStats,
+ mockDeploymentsTableItems,
+} from '../graphql/mock_data';
+
+Vue.use(VueApollo);
+
+describe('Kubernetes dashboard deployments page', () => {
+ let wrapper;
+
+ const configuration = {
+ basePath: 'kas/tunnel/url',
+ baseOptions: {
+ headers: { 'GitLab-Agent-Id': '1' },
+ },
+ };
+
+ const findWorkloadLayout = () => wrapper.findComponent(WorkloadLayout);
+
+ const createApolloProvider = () => {
+ const mockResolvers = {
+ Query: {
+ k8sDeployments: jest.fn().mockReturnValue(k8sDeploymentsMock),
+ },
+ };
+
+ return createMockApollo([], mockResolvers);
+ };
+
+ const createWrapper = (apolloProvider = createApolloProvider()) => {
+ wrapper = shallowMount(DeploymentsPage, {
+ provide: { configuration },
+ apolloProvider,
+ });
+ };
+
+ describe('mounted', () => {
+ it('renders WorkloadLayout component', () => {
+ createWrapper();
+
+ expect(findWorkloadLayout().exists()).toBe(true);
+ });
+
+ it('sets loading prop for the WorkloadLayout', () => {
+ createWrapper();
+
+ expect(findWorkloadLayout().props('loading')).toBe(true);
+ });
+
+ it('removes loading prop from the WorkloadLayout when the list of pods loaded', async () => {
+ createWrapper();
+ await waitForPromises();
+
+ expect(findWorkloadLayout().props('loading')).toBe(false);
+ });
+ });
+
+ describe('when gets pods data', () => {
+ useFakeDate(2023, 10, 23, 10, 10);
+
+ it('sets correct stats object for the WorkloadLayout', async () => {
+ createWrapper();
+ await waitForPromises();
+
+ expect(findWorkloadLayout().props('stats')).toEqual(mockDeploymentsStats);
+ });
+
+ it('sets correct table items object for the WorkloadLayout', async () => {
+ createWrapper();
+ await waitForPromises();
+
+ expect(findWorkloadLayout().props('items')).toMatchObject(mockDeploymentsTableItems);
+ });
+ });
+
+ describe('when gets an error from the cluster_client API', () => {
+ const error = new Error('Error from the cluster_client API');
+ const createErroredApolloProvider = () => {
+ const mockResolvers = {
+ Query: {
+ k8sDeployments: jest.fn().mockRejectedValueOnce(error),
+ },
+ };
+
+ return createMockApollo([], mockResolvers);
+ };
+
+ beforeEach(async () => {
+ createWrapper(createErroredApolloProvider());
+ await waitForPromises();
+ });
+
+ it('sets errorMessage prop for the WorkloadLayout', () => {
+ expect(findWorkloadLayout().props('errorMessage')).toBe(error.message);
+ });
+ });
+});
diff --git a/spec/frontend/kubernetes_dashboard/pages/pods_page_spec.js b/spec/frontend/kubernetes_dashboard/pages/pods_page_spec.js
new file mode 100644
index 00000000000..28a98bad211
--- /dev/null
+++ b/spec/frontend/kubernetes_dashboard/pages/pods_page_spec.js
@@ -0,0 +1,102 @@
+import Vue from 'vue';
+import VueApollo from 'vue-apollo';
+import { shallowMount } from '@vue/test-utils';
+import waitForPromises from 'helpers/wait_for_promises';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import PodsPage from '~/kubernetes_dashboard/pages/pods_page.vue';
+import WorkloadLayout from '~/kubernetes_dashboard/components/workload_layout.vue';
+import { useFakeDate } from 'helpers/fake_date';
+import { k8sPodsMock, mockPodStats, mockPodsTableItems } from '../graphql/mock_data';
+
+Vue.use(VueApollo);
+
+describe('Kubernetes dashboard pods page', () => {
+ let wrapper;
+
+ const configuration = {
+ basePath: 'kas/tunnel/url',
+ baseOptions: {
+ headers: { 'GitLab-Agent-Id': '1' },
+ },
+ };
+
+ const findWorkloadLayout = () => wrapper.findComponent(WorkloadLayout);
+
+ const createApolloProvider = () => {
+ const mockResolvers = {
+ Query: {
+ k8sPods: jest.fn().mockReturnValue(k8sPodsMock),
+ },
+ };
+
+ return createMockApollo([], mockResolvers);
+ };
+
+ const createWrapper = (apolloProvider = createApolloProvider()) => {
+ wrapper = shallowMount(PodsPage, {
+ provide: { configuration },
+ apolloProvider,
+ });
+ };
+
+ describe('mounted', () => {
+ it('renders WorkloadLayout component', () => {
+ createWrapper();
+
+ expect(findWorkloadLayout().exists()).toBe(true);
+ });
+
+ it('sets loading prop for the WorkloadLayout', () => {
+ createWrapper();
+
+ expect(findWorkloadLayout().props('loading')).toBe(true);
+ });
+
+ it('removes loading prop from the WorkloadLayout when the list of pods loaded', async () => {
+ createWrapper();
+ await waitForPromises();
+
+ expect(findWorkloadLayout().props('loading')).toBe(false);
+ });
+ });
+
+ describe('when gets pods data', () => {
+ useFakeDate(2023, 10, 23, 10, 10);
+
+ it('sets correct stats object for the WorkloadLayout', async () => {
+ createWrapper();
+ await waitForPromises();
+
+ expect(findWorkloadLayout().props('stats')).toEqual(mockPodStats);
+ });
+
+ it('sets correct table items object for the WorkloadLayout', async () => {
+ createWrapper();
+ await waitForPromises();
+
+ expect(findWorkloadLayout().props('items')).toMatchObject(mockPodsTableItems);
+ });
+ });
+
+ describe('when gets an error from the cluster_client API', () => {
+ const error = new Error('Error from the cluster_client API');
+ const createErroredApolloProvider = () => {
+ const mockResolvers = {
+ Query: {
+ k8sPods: jest.fn().mockRejectedValueOnce(error),
+ },
+ };
+
+ return createMockApollo([], mockResolvers);
+ };
+
+ beforeEach(async () => {
+ createWrapper(createErroredApolloProvider());
+ await waitForPromises();
+ });
+
+ it('sets errorMessage prop for the WorkloadLayout', () => {
+ expect(findWorkloadLayout().props('errorMessage')).toBe(error.message);
+ });
+ });
+});
diff --git a/spec/frontend/kubernetes_dashboard/pages/replica_sets_page_spec.js b/spec/frontend/kubernetes_dashboard/pages/replica_sets_page_spec.js
new file mode 100644
index 00000000000..0e442ec8328
--- /dev/null
+++ b/spec/frontend/kubernetes_dashboard/pages/replica_sets_page_spec.js
@@ -0,0 +1,106 @@
+import Vue from 'vue';
+import VueApollo from 'vue-apollo';
+import { shallowMount } from '@vue/test-utils';
+import waitForPromises from 'helpers/wait_for_promises';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import ReplicaSetsPage from '~/kubernetes_dashboard/pages/replica_sets_page.vue';
+import WorkloadLayout from '~/kubernetes_dashboard/components/workload_layout.vue';
+import { useFakeDate } from 'helpers/fake_date';
+import {
+ k8sReplicaSetsMock,
+ mockStatefulSetsStats,
+ mockReplicaSetsTableItems,
+} from '../graphql/mock_data';
+
+Vue.use(VueApollo);
+
+describe('Kubernetes dashboard replicaSets page', () => {
+ let wrapper;
+
+ const configuration = {
+ basePath: 'kas/tunnel/url',
+ baseOptions: {
+ headers: { 'GitLab-Agent-Id': '1' },
+ },
+ };
+
+ const findWorkloadLayout = () => wrapper.findComponent(WorkloadLayout);
+
+ const createApolloProvider = () => {
+ const mockResolvers = {
+ Query: {
+ k8sReplicaSets: jest.fn().mockReturnValue(k8sReplicaSetsMock),
+ },
+ };
+
+ return createMockApollo([], mockResolvers);
+ };
+
+ const createWrapper = (apolloProvider = createApolloProvider()) => {
+ wrapper = shallowMount(ReplicaSetsPage, {
+ provide: { configuration },
+ apolloProvider,
+ });
+ };
+
+ describe('mounted', () => {
+ it('renders WorkloadLayout component', () => {
+ createWrapper();
+
+ expect(findWorkloadLayout().exists()).toBe(true);
+ });
+
+ it('sets loading prop for the WorkloadLayout', () => {
+ createWrapper();
+
+ expect(findWorkloadLayout().props('loading')).toBe(true);
+ });
+
+ it('removes loading prop from the WorkloadLayout when the list of pods loaded', async () => {
+ createWrapper();
+ await waitForPromises();
+
+ expect(findWorkloadLayout().props('loading')).toBe(false);
+ });
+ });
+
+ describe('when gets pods data', () => {
+ useFakeDate(2023, 10, 23, 10, 10);
+
+ it('sets correct stats object for the WorkloadLayout', async () => {
+ createWrapper();
+ await waitForPromises();
+
+ expect(findWorkloadLayout().props('stats')).toEqual(mockStatefulSetsStats);
+ });
+
+ it('sets correct table items object for the WorkloadLayout', async () => {
+ createWrapper();
+ await waitForPromises();
+
+ expect(findWorkloadLayout().props('items')).toMatchObject(mockReplicaSetsTableItems);
+ });
+ });
+
+ describe('when gets an error from the cluster_client API', () => {
+ const error = new Error('Error from the cluster_client API');
+ const createErroredApolloProvider = () => {
+ const mockResolvers = {
+ Query: {
+ k8sReplicaSets: jest.fn().mockRejectedValueOnce(error),
+ },
+ };
+
+ return createMockApollo([], mockResolvers);
+ };
+
+ beforeEach(async () => {
+ createWrapper(createErroredApolloProvider());
+ await waitForPromises();
+ });
+
+ it('sets errorMessage prop for the WorkloadLayout', () => {
+ expect(findWorkloadLayout().props('errorMessage')).toBe(error.message);
+ });
+ });
+});
diff --git a/spec/frontend/kubernetes_dashboard/pages/stateful_sets_page_spec.js b/spec/frontend/kubernetes_dashboard/pages/stateful_sets_page_spec.js
new file mode 100644
index 00000000000..3e9bd9a42de
--- /dev/null
+++ b/spec/frontend/kubernetes_dashboard/pages/stateful_sets_page_spec.js
@@ -0,0 +1,106 @@
+import Vue from 'vue';
+import VueApollo from 'vue-apollo';
+import { shallowMount } from '@vue/test-utils';
+import waitForPromises from 'helpers/wait_for_promises';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import StatefulSetsPage from '~/kubernetes_dashboard/pages/stateful_sets_page.vue';
+import WorkloadLayout from '~/kubernetes_dashboard/components/workload_layout.vue';
+import { useFakeDate } from 'helpers/fake_date';
+import {
+ k8sStatefulSetsMock,
+ mockStatefulSetsStats,
+ mockStatefulSetsTableItems,
+} from '../graphql/mock_data';
+
+Vue.use(VueApollo);
+
+describe('Kubernetes dashboard statefulSets page', () => {
+ let wrapper;
+
+ const configuration = {
+ basePath: 'kas/tunnel/url',
+ baseOptions: {
+ headers: { 'GitLab-Agent-Id': '1' },
+ },
+ };
+
+ const findWorkloadLayout = () => wrapper.findComponent(WorkloadLayout);
+
+ const createApolloProvider = () => {
+ const mockResolvers = {
+ Query: {
+ k8sStatefulSets: jest.fn().mockReturnValue(k8sStatefulSetsMock),
+ },
+ };
+
+ return createMockApollo([], mockResolvers);
+ };
+
+ const createWrapper = (apolloProvider = createApolloProvider()) => {
+ wrapper = shallowMount(StatefulSetsPage, {
+ provide: { configuration },
+ apolloProvider,
+ });
+ };
+
+ describe('mounted', () => {
+ it('renders WorkloadLayout component', () => {
+ createWrapper();
+
+ expect(findWorkloadLayout().exists()).toBe(true);
+ });
+
+ it('sets loading prop for the WorkloadLayout', () => {
+ createWrapper();
+
+ expect(findWorkloadLayout().props('loading')).toBe(true);
+ });
+
+ it('removes loading prop from the WorkloadLayout when the list of pods loaded', async () => {
+ createWrapper();
+ await waitForPromises();
+
+ expect(findWorkloadLayout().props('loading')).toBe(false);
+ });
+ });
+
+ describe('when gets pods data', () => {
+ useFakeDate(2023, 10, 23, 10, 10);
+
+ it('sets correct stats object for the WorkloadLayout', async () => {
+ createWrapper();
+ await waitForPromises();
+
+ expect(findWorkloadLayout().props('stats')).toEqual(mockStatefulSetsStats);
+ });
+
+ it('sets correct table items object for the WorkloadLayout', async () => {
+ createWrapper();
+ await waitForPromises();
+
+ expect(findWorkloadLayout().props('items')).toMatchObject(mockStatefulSetsTableItems);
+ });
+ });
+
+ describe('when gets an error from the cluster_client API', () => {
+ const error = new Error('Error from the cluster_client API');
+ const createErroredApolloProvider = () => {
+ const mockResolvers = {
+ Query: {
+ k8sStatefulSets: jest.fn().mockRejectedValueOnce(error),
+ },
+ };
+
+ return createMockApollo([], mockResolvers);
+ };
+
+ beforeEach(async () => {
+ createWrapper(createErroredApolloProvider());
+ await waitForPromises();
+ });
+
+ it('sets errorMessage prop for the WorkloadLayout', () => {
+ expect(findWorkloadLayout().props('errorMessage')).toBe(error.message);
+ });
+ });
+});
diff --git a/spec/frontend/lib/utils/breadcrumbs_spec.js b/spec/frontend/lib/utils/breadcrumbs_spec.js
index 3c29e3723d3..481e3db521c 100644
--- a/spec/frontend/lib/utils/breadcrumbs_spec.js
+++ b/spec/frontend/lib/utils/breadcrumbs_spec.js
@@ -26,24 +26,20 @@ describe('Breadcrumbs utils', () => {
`;
const mockRouter = jest.fn();
- let MockComponent;
- let mockApolloProvider;
- beforeEach(() => {
- MockComponent = Vue.component('MockComponent', {
- render: (createElement) =>
- createElement('span', {
- attrs: {
- 'data-testid': 'mock-component',
- },
- }),
- });
- mockApolloProvider = createMockApollo();
+ const MockComponent = Vue.component('MockComponent', {
+ render: (createElement) =>
+ createElement('span', {
+ attrs: {
+ 'data-testid': 'mock-component',
+ },
+ }),
});
+ const mockApolloProvider = createMockApollo();
+
afterEach(() => {
resetHTMLFixture();
- MockComponent = null;
});
describe('injectVueAppBreadcrumbs', () => {
diff --git a/spec/frontend/lib/utils/common_utils_spec.js b/spec/frontend/lib/utils/common_utils_spec.js
index 6295914b127..5c2bcd48f3e 100644
--- a/spec/frontend/lib/utils/common_utils_spec.js
+++ b/spec/frontend/lib/utils/common_utils_spec.js
@@ -151,7 +151,7 @@ describe('common_utils', () => {
jest.spyOn(window, 'scrollBy');
document.body.innerHTML += `
<div id="parent">
- <div class="navbar-gitlab" style="position: fixed; top: 0; height: 50px;"></div>
+ <div class="header-logged-out" style="position: fixed; top: 0; height: 50px;"></div>
<div style="height: 2000px; margin-top: 50px;"></div>
<div id="user-content-test" style="height: 2000px;"></div>
</div>
diff --git a/spec/frontend/lib/utils/datetime/date_format_utility_spec.js b/spec/frontend/lib/utils/datetime/date_format_utility_spec.js
index 65018fe1625..79b09654f00 100644
--- a/spec/frontend/lib/utils/datetime/date_format_utility_spec.js
+++ b/spec/frontend/lib/utils/datetime/date_format_utility_spec.js
@@ -122,12 +122,12 @@ describe('date_format_utility.js', () => {
describe('formatTimeAsSummary', () => {
it.each`
unit | value | result
- ${'months'} | ${1.5} | ${'1.5M'}
- ${'weeks'} | ${1.25} | ${'1.5w'}
- ${'days'} | ${2} | ${'2d'}
- ${'hours'} | ${10} | ${'10h'}
- ${'minutes'} | ${20} | ${'20m'}
- ${'seconds'} | ${10} | ${'<1m'}
+ ${'months'} | ${1.5} | ${'1.5 months'}
+ ${'weeks'} | ${1.25} | ${'1.5 weeks'}
+ ${'days'} | ${2} | ${'2 days'}
+ ${'hours'} | ${10} | ${'10 hours'}
+ ${'minutes'} | ${20} | ${'20 minutes'}
+ ${'seconds'} | ${10} | ${'<1 minute'}
${'seconds'} | ${0} | ${'-'}
`('will format $value $unit to $result', ({ unit, value, result }) => {
expect(utils.formatTimeAsSummary({ [unit]: value })).toBe(result);
diff --git a/spec/frontend/lib/utils/datetime/locale_dateformat_spec.js b/spec/frontend/lib/utils/datetime/locale_dateformat_spec.js
new file mode 100644
index 00000000000..3200f0cc7d7
--- /dev/null
+++ b/spec/frontend/lib/utils/datetime/locale_dateformat_spec.js
@@ -0,0 +1,177 @@
+import { DATE_TIME_FORMATS, localeDateFormat } from '~/lib/utils/datetime/locale_dateformat';
+import { setLanguage } from 'jest/__helpers__/locale_helper';
+import * as localeFns from '~/locale';
+
+describe('localeDateFormat (en-US)', () => {
+ const date = new Date('1983-07-09T14:15:23.123Z');
+ const sameDay = new Date('1983-07-09T18:27:09.198Z');
+ const sameMonth = new Date('1983-07-12T12:36:02.654Z');
+ const nextYear = new Date('1984-01-10T07:47:54.947Z');
+
+ beforeEach(() => {
+ setLanguage('en-US');
+ localeDateFormat.reset();
+ });
+
+ /*
+ Depending on the ICU/Intl version, formatted strings might contain
+ characters which aren't a normal space, e.g. U+2009 THIN SPACE in formatRange or
+ U+202F NARROW NO-BREAK SPACE between time and AM/PM.
+
+ In order for the specs to be more portable and easier to read, as git/gitlab aren't
+ great at rendering these other spaces, we replace them U+0020 SPACE
+ */
+ function expectDateString(str) {
+ // eslint-disable-next-line jest/valid-expect
+ return expect(str.replace(/[\s\u2009]+/g, ' '));
+ }
+
+ describe('#asDateTime', () => {
+ it('exposes a working date formatter', () => {
+ expectDateString(localeDateFormat.asDateTime.format(date)).toBe('Jul 9, 1983, 2:15 PM');
+ expectDateString(localeDateFormat.asDateTime.format(nextYear)).toBe('Jan 10, 1984, 7:47 AM');
+ });
+
+ it('exposes a working date range formatter', () => {
+ expectDateString(localeDateFormat.asDateTime.formatRange(date, nextYear)).toBe(
+ 'Jul 9, 1983, 2:15 PM – Jan 10, 1984, 7:47 AM',
+ );
+ expectDateString(localeDateFormat.asDateTime.formatRange(date, sameMonth)).toBe(
+ 'Jul 9, 1983, 2:15 PM – Jul 12, 1983, 12:36 PM',
+ );
+ expectDateString(localeDateFormat.asDateTime.formatRange(date, sameDay)).toBe(
+ 'Jul 9, 1983, 2:15 – 6:27 PM',
+ );
+ });
+
+ it.each([
+ ['automatic', 0, '2:15 PM'],
+ ['h12 preference', 1, '2:15 PM'],
+ ['h24 preference', 2, '14:15'],
+ ])("respects user's hourCycle preference: %s", (_, timeDisplayFormat, result) => {
+ window.gon.time_display_format = timeDisplayFormat;
+ expectDateString(localeDateFormat.asDateTime.format(date)).toContain(result);
+ expectDateString(localeDateFormat.asDateTime.formatRange(date, nextYear)).toContain(result);
+ });
+ });
+
+ describe('#asDateTimeFull', () => {
+ it('exposes a working date formatter', () => {
+ expectDateString(localeDateFormat.asDateTimeFull.format(date)).toBe(
+ 'July 9, 1983 at 2:15:23 PM GMT',
+ );
+ expectDateString(localeDateFormat.asDateTimeFull.format(nextYear)).toBe(
+ 'January 10, 1984 at 7:47:54 AM GMT',
+ );
+ });
+
+ it('exposes a working date range formatter', () => {
+ expectDateString(localeDateFormat.asDateTimeFull.formatRange(date, nextYear)).toBe(
+ 'July 9, 1983 at 2:15:23 PM GMT – January 10, 1984 at 7:47:54 AM GMT',
+ );
+ expectDateString(localeDateFormat.asDateTimeFull.formatRange(date, sameMonth)).toBe(
+ 'July 9, 1983 at 2:15:23 PM GMT – July 12, 1983 at 12:36:02 PM GMT',
+ );
+ expectDateString(localeDateFormat.asDateTimeFull.formatRange(date, sameDay)).toBe(
+ 'July 9, 1983, 2:15:23 PM GMT – 6:27:09 PM GMT',
+ );
+ });
+
+ it.each([
+ ['automatic', 0, '2:15:23 PM'],
+ ['h12 preference', 1, '2:15:23 PM'],
+ ['h24 preference', 2, '14:15:23'],
+ ])("respects user's hourCycle preference: %s", (_, timeDisplayFormat, result) => {
+ window.gon.time_display_format = timeDisplayFormat;
+ expectDateString(localeDateFormat.asDateTimeFull.format(date)).toContain(result);
+ expectDateString(localeDateFormat.asDateTimeFull.formatRange(date, nextYear)).toContain(
+ result,
+ );
+ });
+ });
+
+ describe('#asDate', () => {
+ it('exposes a working date formatter', () => {
+ expectDateString(localeDateFormat.asDate.format(date)).toBe('Jul 9, 1983');
+ expectDateString(localeDateFormat.asDate.format(nextYear)).toBe('Jan 10, 1984');
+ });
+
+ it('exposes a working date range formatter', () => {
+ expectDateString(localeDateFormat.asDate.formatRange(date, nextYear)).toBe(
+ 'Jul 9, 1983 – Jan 10, 1984',
+ );
+ expectDateString(localeDateFormat.asDate.formatRange(date, sameMonth)).toBe(
+ 'Jul 9 – 12, 1983',
+ );
+ expectDateString(localeDateFormat.asDate.formatRange(date, sameDay)).toBe('Jul 9, 1983');
+ });
+ });
+
+ describe('#asTime', () => {
+ it('exposes a working date formatter', () => {
+ expectDateString(localeDateFormat.asTime.format(date)).toBe('2:15 PM');
+ expectDateString(localeDateFormat.asTime.format(nextYear)).toBe('7:47 AM');
+ });
+
+ it('exposes a working date range formatter', () => {
+ expectDateString(localeDateFormat.asTime.formatRange(date, nextYear)).toBe(
+ '7/9/1983, 2:15 PM – 1/10/1984, 7:47 AM',
+ );
+ expectDateString(localeDateFormat.asTime.formatRange(date, sameMonth)).toBe(
+ '7/9/1983, 2:15 PM – 7/12/1983, 12:36 PM',
+ );
+ expectDateString(localeDateFormat.asTime.formatRange(date, sameDay)).toBe('2:15 – 6:27 PM');
+ });
+
+ it.each([
+ ['automatic', 0, '2:15 PM'],
+ ['h12 preference', 1, '2:15 PM'],
+ ['h24 preference', 2, '14:15'],
+ ])("respects user's hourCycle preference: %s", (_, timeDisplayFormat, result) => {
+ window.gon.time_display_format = timeDisplayFormat;
+ expectDateString(localeDateFormat.asTime.format(date)).toContain(result);
+ expectDateString(localeDateFormat.asTime.formatRange(date, nextYear)).toContain(result);
+ });
+ });
+
+ describe('#reset', () => {
+ it('removes the cached formatters', () => {
+ const spy = jest.spyOn(localeFns, 'createDateTimeFormat');
+
+ localeDateFormat.asDate.format(date);
+ localeDateFormat.asDate.format(date);
+ expect(spy).toHaveBeenCalledTimes(1);
+
+ localeDateFormat.reset();
+
+ localeDateFormat.asDate.format(date);
+ localeDateFormat.asDate.format(date);
+ expect(spy).toHaveBeenCalledTimes(2);
+ });
+ });
+
+ describe.each(DATE_TIME_FORMATS)('formatter for %p', (format) => {
+ it('is defined', () => {
+ expect(localeDateFormat[format]).toBeDefined();
+ expect(localeDateFormat[format].format(date)).toBeDefined();
+ expect(localeDateFormat[format].formatRange(date, nextYear)).toBeDefined();
+ });
+
+ it('getting the formatter multiple times, just calls the Intl API once', () => {
+ const spy = jest.spyOn(localeFns, 'createDateTimeFormat');
+
+ localeDateFormat[format].format(date);
+ localeDateFormat[format].format(date);
+
+ expect(spy).toHaveBeenCalledTimes(1);
+ });
+
+ it('getting the formatter memoized the correct formatter', () => {
+ const spy = jest.spyOn(localeFns, 'createDateTimeFormat');
+
+ expect(localeDateFormat[format].format(date)).toBe(localeDateFormat[format].format(date));
+
+ expect(spy).toHaveBeenCalledTimes(1);
+ });
+ });
+});
diff --git a/spec/frontend/lib/utils/datetime/timeago_utility_spec.js b/spec/frontend/lib/utils/datetime/timeago_utility_spec.js
index 44db4cf88a2..53ed524116e 100644
--- a/spec/frontend/lib/utils/datetime/timeago_utility_spec.js
+++ b/spec/frontend/lib/utils/datetime/timeago_utility_spec.js
@@ -1,5 +1,6 @@
-import { DATE_ONLY_FORMAT } from '~/lib/utils/datetime/constants';
import { getTimeago, localTimeAgo, timeFor, duration } from '~/lib/utils/datetime/timeago_utility';
+import { DATE_ONLY_FORMAT, localeDateFormat } from '~/lib/utils/datetime/locale_dateformat';
+
import { s__ } from '~/locale';
import '~/commons/bootstrap';
@@ -143,7 +144,7 @@ describe('TimeAgo utils', () => {
it.each`
updateTooltip | title
${false} | ${'some time'}
- ${true} | ${'Feb 18, 2020 10:22pm UTC'}
+ ${true} | ${'February 18, 2020 at 10:22:32 PM GMT'}
`(
`has content: '${text}' and tooltip: '$title' with updateTooltip = $updateTooltip`,
({ updateTooltip, title }) => {
@@ -168,6 +169,7 @@ describe('TimeAgo utils', () => {
${1} | ${'12-hour'} | ${'Feb 18, 2020, 10:22 PM'}
${2} | ${'24-hour'} | ${'Feb 18, 2020, 22:22'}
`(`'$display' renders as '$text'`, ({ timeDisplayFormat, text }) => {
+ localeDateFormat.reset();
gon.time_display_relative = false;
gon.time_display_format = timeDisplayFormat;
diff --git a/spec/frontend/lib/utils/datetime_utility_spec.js b/spec/frontend/lib/utils/datetime_utility_spec.js
index 330bfca7029..73a4af2c85d 100644
--- a/spec/frontend/lib/utils/datetime_utility_spec.js
+++ b/spec/frontend/lib/utils/datetime_utility_spec.js
@@ -800,6 +800,21 @@ describe('date addition/subtraction methods', () => {
);
});
+ describe('nYearsBefore', () => {
+ it.each`
+ date | numberOfYears | expected
+ ${'2020-07-06'} | ${4} | ${'2016-07-06'}
+ ${'2020-07-06'} | ${1} | ${'2019-07-06'}
+ `(
+ 'returns $expected for "$numberOfYears year(s) before $date"',
+ ({ date, numberOfYears, expected }) => {
+ expect(datetimeUtility.nYearsBefore(new Date(date), numberOfYears)).toEqual(
+ new Date(expected),
+ );
+ },
+ );
+ });
+
describe('nMonthsBefore', () => {
// The previous month (February) has 28 days
const march2019 = '2019-03-15T00:00:00.000Z';
diff --git a/spec/frontend/lib/utils/secret_detection_spec.js b/spec/frontend/lib/utils/secret_detection_spec.js
index 761062f0340..a8da6e8969f 100644
--- a/spec/frontend/lib/utils/secret_detection_spec.js
+++ b/spec/frontend/lib/utils/secret_detection_spec.js
@@ -31,6 +31,7 @@ describe('containsSensitiveToken', () => {
'token: gloas-a8cc74ccb0de004d09a968705ba49099229b288b3de43f26c473a9d8d7fb7693',
'https://example.com/feed?feed_token=123456789_abcdefghij',
'glpat-1234567890 and feed_token=ABCDEFGHIJKLMNOPQRSTUVWXYZ',
+ 'token: gldt-cgyKc1k_AsnEpmP-5fRL',
];
it.each(sensitiveMessages)('returns true for message: %s', (message) => {
diff --git a/spec/frontend/lib/utils/vuex_module_mappers_spec.js b/spec/frontend/lib/utils/vuex_module_mappers_spec.js
deleted file mode 100644
index 9070903728b..00000000000
--- a/spec/frontend/lib/utils/vuex_module_mappers_spec.js
+++ /dev/null
@@ -1,133 +0,0 @@
-import { mount } from '@vue/test-utils';
-import Vue from 'vue';
-// eslint-disable-next-line no-restricted-imports
-import Vuex from 'vuex';
-import {
- mapVuexModuleActions,
- mapVuexModuleGetters,
- mapVuexModuleState,
- REQUIRE_STRING_ERROR_MESSAGE,
-} from '~/lib/utils/vuex_module_mappers';
-
-const TEST_MODULE_NAME = 'testModuleName';
-
-Vue.use(Vuex);
-
-// setup test component and store ----------------------------------------------
-//
-// These are used to indirectly test `vuex_module_mappers`.
-const TestComponent = {
- props: {
- vuexModule: {
- type: String,
- required: true,
- },
- },
- computed: {
- ...mapVuexModuleState((vm) => vm.vuexModule, { name: 'name', value: 'count' }),
- ...mapVuexModuleGetters((vm) => vm.vuexModule, ['hasValue', 'hasName']),
- stateJson() {
- return JSON.stringify({
- name: this.name,
- value: this.value,
- });
- },
- gettersJson() {
- return JSON.stringify({
- hasValue: this.hasValue,
- hasName: this.hasName,
- });
- },
- },
- methods: {
- ...mapVuexModuleActions((vm) => vm.vuexModule, ['increment']),
- },
- template: `
-<div>
- <pre data-testid="state">{{ stateJson }}</pre>
- <pre data-testid="getters">{{ gettersJson }}</pre>
-</div>`,
-};
-
-const createTestStore = () => {
- return new Vuex.Store({
- modules: {
- [TEST_MODULE_NAME]: {
- namespaced: true,
- state: {
- name: 'Lorem',
- count: 0,
- },
- mutations: {
- INCREMENT: (state, amount) => {
- state.count += amount;
- },
- },
- actions: {
- increment({ commit }, amount) {
- commit('INCREMENT', amount);
- },
- },
- getters: {
- hasValue: (state) => state.count > 0,
- hasName: (state) => Boolean(state.name.length),
- },
- },
- },
- });
-};
-
-describe('~/lib/utils/vuex_module_mappers', () => {
- let store;
- let wrapper;
-
- const getJsonInTemplate = (testId) =>
- JSON.parse(wrapper.find(`[data-testid="${testId}"]`).text());
- const getMappedState = () => getJsonInTemplate('state');
- const getMappedGetters = () => getJsonInTemplate('getters');
-
- beforeEach(() => {
- store = createTestStore();
-
- wrapper = mount(TestComponent, {
- propsData: {
- vuexModule: TEST_MODULE_NAME,
- },
- store,
- });
- });
-
- describe('from module defined by prop', () => {
- it('maps state', () => {
- expect(getMappedState()).toEqual({
- name: store.state[TEST_MODULE_NAME].name,
- value: store.state[TEST_MODULE_NAME].count,
- });
- });
-
- it('maps getters', () => {
- expect(getMappedGetters()).toEqual({
- hasName: true,
- hasValue: false,
- });
- });
-
- it('maps action', () => {
- jest.spyOn(store, 'dispatch');
-
- expect(store.dispatch).not.toHaveBeenCalled();
-
- wrapper.vm.increment(10);
-
- expect(store.dispatch).toHaveBeenCalledWith(`${TEST_MODULE_NAME}/increment`, 10);
- });
- });
-
- describe('with non-string object value', () => {
- it('throws helpful error', () => {
- expect(() => mapVuexModuleActions((vm) => vm.bogus, { foo: () => {} })).toThrow(
- REQUIRE_STRING_ERROR_MESSAGE,
- );
- });
- });
-});
diff --git a/spec/frontend/loading_icon_for_legacy_js_spec.js b/spec/frontend/loading_icon_for_legacy_js_spec.js
index 46deee555ba..1e4acffdfd0 100644
--- a/spec/frontend/loading_icon_for_legacy_js_spec.js
+++ b/spec/frontend/loading_icon_for_legacy_js_spec.js
@@ -8,7 +8,7 @@ describe('loadingIconForLegacyJS', () => {
expect(el.className).toBe('gl-spinner-container');
expect(el.querySelector('.gl-spinner-sm')).toEqual(expect.any(HTMLElement));
expect(el.querySelector('.gl-spinner-dark')).toEqual(expect.any(HTMLElement));
- expect(el.querySelector('[aria-label="Loading"]')).toEqual(expect.any(HTMLElement));
+ expect(el.getAttribute('aria-label')).toEqual('Loading');
expect(el.getAttribute('role')).toBe('status');
});
@@ -31,7 +31,7 @@ describe('loadingIconForLegacyJS', () => {
it('can render a different aria-label', () => {
const el = loadingIconForLegacyJS({ label: 'Foo' });
- expect(el.querySelector('[aria-label="Foo"]')).toEqual(expect.any(HTMLElement));
+ expect(el.getAttribute('aria-label')).toEqual('Foo');
});
it('can render additional classes', () => {
diff --git a/spec/frontend/logo_spec.js b/spec/frontend/logo_spec.js
new file mode 100644
index 00000000000..8e39e75bd3b
--- /dev/null
+++ b/spec/frontend/logo_spec.js
@@ -0,0 +1,55 @@
+import { setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
+import { initPortraitLogoDetection } from '~/logo';
+
+describe('initPortraitLogoDetection', () => {
+ let img;
+
+ const loadImage = () => {
+ const loadEvent = new Event('load');
+ img.dispatchEvent(loadEvent);
+ };
+
+ beforeEach(() => {
+ setHTMLFixture('<img class="gl-visibility-hidden gl-h-9 js-portrait-logo-detection" />');
+ initPortraitLogoDetection();
+ img = document.querySelector('img');
+ });
+
+ afterEach(() => {
+ resetHTMLFixture();
+ });
+
+ describe('when logo does not have portrait format', () => {
+ beforeEach(() => {
+ img.height = 10;
+ img.width = 10;
+ });
+
+ it('removes gl-visibility-hidden', () => {
+ expect(img.classList).toContain('gl-visibility-hidden');
+ expect(img.classList).toContain('gl-h-9');
+
+ loadImage();
+
+ expect(img.classList).not.toContain('gl-visibility-hidden');
+ expect(img.classList).toContain('gl-h-9');
+ });
+ });
+
+ describe('when logo has portrait format', () => {
+ beforeEach(() => {
+ img.height = 11;
+ img.width = 10;
+ });
+
+ it('removes gl-visibility-hidden', () => {
+ expect(img.classList).toContain('gl-visibility-hidden');
+ expect(img.classList).toContain('gl-h-9');
+
+ loadImage();
+
+ expect(img.classList).not.toContain('gl-visibility-hidden');
+ expect(img.classList).toContain('gl-w-10');
+ });
+ });
+});
diff --git a/spec/frontend/members/components/table/role_dropdown_spec.js b/spec/frontend/members/components/table/max_role_spec.js
index 62275a05dc5..75e1e05afb1 100644
--- a/spec/frontend/members/components/table/role_dropdown_spec.js
+++ b/spec/frontend/members/components/table/max_role_spec.js
@@ -1,4 +1,4 @@
-import { GlCollapsibleListbox, GlListboxItem } from '@gitlab/ui';
+import { GlBadge, GlCollapsibleListbox, GlListboxItem } from '@gitlab/ui';
import { GlBreakpointInstance as bp } from '@gitlab/ui/dist/utils';
import { mount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
@@ -6,16 +6,19 @@ import Vue, { nextTick } from 'vue';
import Vuex from 'vuex';
import * as Sentry from '~/sentry/sentry_browser_wrapper';
import waitForPromises from 'helpers/wait_for_promises';
-import RoleDropdown from '~/members/components/table/role_dropdown.vue';
+import MaxRole from '~/members/components/table/max_role.vue';
import { MEMBER_TYPES } from '~/members/constants';
import { guestOverageConfirmAction } from 'ee_else_ce/members/guest_overage_confirm_action';
import { member } from '../../mock_data';
Vue.use(Vuex);
+
jest.mock('ee_else_ce/members/guest_overage_confirm_action');
jest.mock('~/sentry/sentry_browser_wrapper');
-describe('RoleDropdown', () => {
+guestOverageConfirmAction.mockReturnValue(true);
+
+describe('MaxRole', () => {
let wrapper;
let actions;
const $toast = {
@@ -35,7 +38,7 @@ describe('RoleDropdown', () => {
};
const createComponent = (propsData = {}, store = createStore()) => {
- wrapper = mount(RoleDropdown, {
+ wrapper = mount(MaxRole, {
provide: {
namespace: MEMBER_TYPES.user,
group: {
@@ -45,7 +48,9 @@ describe('RoleDropdown', () => {
},
propsData: {
member,
- permissions: {},
+ permissions: {
+ canUpdate: true,
+ },
...propsData,
},
store,
@@ -55,6 +60,7 @@ describe('RoleDropdown', () => {
});
};
+ const findBadge = () => wrapper.findComponent(GlBadge);
const findListbox = () => wrapper.findComponent(GlCollapsibleListbox);
const findListboxItems = () => wrapper.findAllComponents(GlListboxItem);
const findListboxItemByText = (text) =>
@@ -64,6 +70,18 @@ describe('RoleDropdown', () => {
gon.features = { showOverageOnRolePromotion: true };
});
+ describe('when member can not be updated', () => {
+ it('renders a badge instead of a collapsible listbox', () => {
+ createComponent({
+ permissions: {
+ canUpdate: false,
+ },
+ });
+
+ expect(findBadge().text()).toBe('Owner');
+ });
+ });
+
it('has correct header text props', () => {
createComponent();
expect(findListbox().props('headerText')).toBe('Change role');
@@ -77,14 +95,12 @@ describe('RoleDropdown', () => {
describe('when listbox is open', () => {
beforeEach(async () => {
- guestOverageConfirmAction.mockReturnValue(true);
createComponent();
await findListbox().vm.$emit('click');
});
it('sets dropdown toggle and checks selected role', () => {
- expect(findListbox().props('toggleText')).toBe('Owner');
expect(findListbox().find('[aria-selected=true]').text()).toBe('Owner');
});
@@ -100,7 +116,8 @@ describe('RoleDropdown', () => {
expect(actions.updateMemberRole).toHaveBeenCalledWith(expect.any(Object), {
memberId: member.id,
- accessLevel: { integerValue: 30, memberRoleId: null },
+ accessLevel: 30,
+ memberRoleId: null,
});
});
@@ -108,7 +125,7 @@ describe('RoleDropdown', () => {
it('displays toast', async () => {
await findListboxItemByText('Developer').trigger('click');
- await nextTick();
+ await waitForPromises();
expect($toast.show).toHaveBeenCalledWith('Role updated successfully.');
});
@@ -146,7 +163,7 @@ describe('RoleDropdown', () => {
it('does not display toast', async () => {
await findListboxItemByText('Developer').trigger('click');
- await nextTick();
+ await waitForPromises();
expect($toast.show).not.toHaveBeenCalled();
});
@@ -176,12 +193,6 @@ describe('RoleDropdown', () => {
});
});
- it("sets initial dropdown toggle value to member's role", () => {
- createComponent();
-
- expect(findListbox().props('toggleText')).toBe('Owner');
- });
-
it('sets the dropdown alignment to right on mobile', async () => {
jest.spyOn(bp, 'isDesktop').mockReturnValue(false);
createComponent();
@@ -199,54 +210,4 @@ describe('RoleDropdown', () => {
expect(findListbox().props('placement')).toBe('left');
});
-
- describe('guestOverageConfirmAction', () => {
- const mockConfirmAction = ({ confirmed }) => {
- guestOverageConfirmAction.mockResolvedValueOnce(confirmed);
- };
-
- beforeEach(() => {
- createComponent();
-
- findListbox().vm.$emit('click');
- });
-
- afterEach(() => {
- guestOverageConfirmAction.mockReset();
- });
-
- describe('when guestOverageConfirmAction returns true', () => {
- beforeEach(() => {
- mockConfirmAction({ confirmed: true });
-
- findListboxItemByText('Reporter').trigger('click');
- });
-
- it('calls updateMemberRole', () => {
- expect(actions.updateMemberRole).toHaveBeenCalled();
- });
- });
-
- describe('when guestOverageConfirmAction returns false', () => {
- beforeEach(() => {
- mockConfirmAction({ confirmed: false });
-
- findListboxItemByText('Reporter').trigger('click');
- });
-
- it('does not call updateMemberRole', () => {
- expect(actions.updateMemberRole).not.toHaveBeenCalled();
- });
-
- it('re-enables dropdown', async () => {
- await waitForPromises();
-
- expect(findListbox().props('disabled')).toBe(false);
- });
-
- it('resets selected dropdown item', () => {
- expect(findListbox().props('selected')).toMatch(/role-static-\d+/);
- });
- });
- });
});
diff --git a/spec/frontend/members/components/table/members_table_spec.js b/spec/frontend/members/components/table/members_table_spec.js
index 791155fcd1b..c2400fbc142 100644
--- a/spec/frontend/members/components/table/members_table_spec.js
+++ b/spec/frontend/members/components/table/members_table_spec.js
@@ -1,4 +1,4 @@
-import { GlBadge, GlPagination, GlTable } from '@gitlab/ui';
+import { GlPagination, GlTable } from '@gitlab/ui';
import Vue from 'vue';
// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
@@ -11,7 +11,7 @@ import MemberAvatar from '~/members/components/table/member_avatar.vue';
import MemberSource from '~/members/components/table/member_source.vue';
import MemberActivity from '~/members/components/table/member_activity.vue';
import MembersTable from '~/members/components/table/members_table.vue';
-import RoleDropdown from '~/members/components/table/role_dropdown.vue';
+import MaxRole from '~/members/components/table/max_role.vue';
import {
MEMBER_TYPES,
MEMBER_STATE_CREATED,
@@ -74,7 +74,7 @@ describe('MembersTable', () => {
'member-source',
'created-at',
'member-actions',
- 'role-dropdown',
+ 'max-role',
'remove-group-link-modal',
'remove-member-modal',
'expiration-datepicker',
@@ -110,7 +110,7 @@ describe('MembersTable', () => {
${'source'} | ${'Source'} | ${memberMock} | ${MemberSource}
${'invited'} | ${'Invited'} | ${invite} | ${CreatedAt}
${'requested'} | ${'Requested'} | ${accessRequest} | ${CreatedAt}
- ${'maxRole'} | ${'Max role'} | ${memberCanUpdate} | ${RoleDropdown}
+ ${'maxRole'} | ${'Max role'} | ${memberCanUpdate} | ${MaxRole}
${'expiration'} | ${'Expiration'} | ${memberMock} | ${ExpirationDatepicker}
${'activity'} | ${'Activity'} | ${memberMock} | ${MemberActivity}
`('renders the $label field', ({ field, label, member, expectedComponent }) => {
@@ -274,16 +274,6 @@ describe('MembersTable', () => {
});
});
- describe('when member can not be updated', () => {
- it('renders badge in "Max role" field', () => {
- createComponent({ members: [memberMock], tableFields: ['maxRole'] });
-
- expect(
- wrapper.find(`[data-label="Max role"][role="cell"]`).findComponent(GlBadge).text(),
- ).toBe(memberMock.accessLevel.stringValue);
- });
- });
-
it('adds QA selector to table', () => {
createComponent();
diff --git a/spec/frontend/members/mock_data.js b/spec/frontend/members/mock_data.js
index e0dc765b9e4..f550039bfdc 100644
--- a/spec/frontend/members/mock_data.js
+++ b/spec/frontend/members/mock_data.js
@@ -51,6 +51,7 @@ export const member = {
'Minimal access': 5,
},
customRoles: [],
+ customPermissions: [],
};
export const group = {
diff --git a/spec/frontend/members/store/actions_spec.js b/spec/frontend/members/store/actions_spec.js
index 3df3d85c4f1..30ea83abf22 100644
--- a/spec/frontend/members/store/actions_spec.js
+++ b/spec/frontend/members/store/actions_spec.js
@@ -37,28 +37,21 @@ describe('Vuex members actions', () => {
describe('updateMemberRole', () => {
const memberId = members[0].id;
- const accessLevel = { integerValue: 30, memberRoleId: 90 };
+ const accessLevel = 30;
+ const memberRoleId = 90;
- const payload = {
- memberId,
- accessLevel,
- };
+ const payload = { memberId, accessLevel, memberRoleId };
describe('successful request', () => {
- it(`commits ${types.RECEIVE_MEMBER_ROLE_SUCCESS} mutation`, async () => {
+ it(`updates member role`, async () => {
mock.onPut().replyOnce(HTTP_STATUS_OK);
- await testAction(updateMemberRole, payload, state, [
- {
- type: types.RECEIVE_MEMBER_ROLE_SUCCESS,
- payload,
- },
- ]);
+ await testAction(updateMemberRole, payload, state, []);
expect(mock.history.put[0].url).toBe('/groups/foo-bar/-/group_members/238');
expect(mockedRequestFormatter).toHaveBeenCalledWith({
- accessLevel: accessLevel.integerValue,
- memberRoleId: accessLevel.memberRoleId,
+ accessLevel,
+ memberRoleId,
});
});
});
@@ -142,7 +135,7 @@ describe('Vuex members actions', () => {
describe('showRemoveGroupLinkModal', () => {
it(`commits ${types.SHOW_REMOVE_GROUP_LINK_MODAL} mutation`, () => {
- testAction(showRemoveGroupLinkModal, group, state, [
+ return testAction(showRemoveGroupLinkModal, group, state, [
{
type: types.SHOW_REMOVE_GROUP_LINK_MODAL,
payload: group,
@@ -153,7 +146,7 @@ describe('Vuex members actions', () => {
describe('hideRemoveGroupLinkModal', () => {
it(`commits ${types.HIDE_REMOVE_GROUP_LINK_MODAL} mutation`, () => {
- testAction(hideRemoveGroupLinkModal, group, state, [
+ return testAction(hideRemoveGroupLinkModal, group, state, [
{
type: types.HIDE_REMOVE_GROUP_LINK_MODAL,
},
@@ -170,7 +163,7 @@ describe('Vuex members actions', () => {
describe('showRemoveMemberModal', () => {
it(`commits ${types.SHOW_REMOVE_MEMBER_MODAL} mutation`, () => {
- testAction(showRemoveMemberModal, modalData, state, [
+ return testAction(showRemoveMemberModal, modalData, state, [
{
type: types.SHOW_REMOVE_MEMBER_MODAL,
payload: modalData,
@@ -181,7 +174,7 @@ describe('Vuex members actions', () => {
describe('hideRemoveMemberModal', () => {
it(`commits ${types.HIDE_REMOVE_MEMBER_MODAL} mutation`, () => {
- testAction(hideRemoveMemberModal, undefined, state, [
+ return testAction(hideRemoveMemberModal, undefined, state, [
{
type: types.HIDE_REMOVE_MEMBER_MODAL,
},
diff --git a/spec/frontend/members/store/mutations_spec.js b/spec/frontend/members/store/mutations_spec.js
index 8160cc373d8..240a14b2836 100644
--- a/spec/frontend/members/store/mutations_spec.js
+++ b/spec/frontend/members/store/mutations_spec.js
@@ -14,19 +14,6 @@ describe('Vuex members mutations', () => {
};
});
- describe(types.RECEIVE_MEMBER_ROLE_SUCCESS, () => {
- it('updates member', () => {
- const accessLevel = { integerValue: 30, stringValue: 'Developer' };
-
- mutations[types.RECEIVE_MEMBER_ROLE_SUCCESS](state, {
- memberId: members[0].id,
- accessLevel,
- });
-
- expect(state.members[0].accessLevel).toEqual(accessLevel);
- });
- });
-
describe(types.RECEIVE_MEMBER_ROLE_ERROR, () => {
describe('when error does not have a message', () => {
it('shows default error message', () => {
diff --git a/spec/frontend/merge_conflicts/components/merge_conflict_resolver_app_spec.js b/spec/frontend/merge_conflicts/components/merge_conflict_resolver_app_spec.js
index edd18c57f43..1a45ada98f9 100644
--- a/spec/frontend/merge_conflicts/components/merge_conflict_resolver_app_spec.js
+++ b/spec/frontend/merge_conflicts/components/merge_conflict_resolver_app_spec.js
@@ -6,6 +6,7 @@ import { shallowMountExtended, extendedWrapper } from 'helpers/vue_test_utils_he
import InlineConflictLines from '~/merge_conflicts/components/inline_conflict_lines.vue';
import ParallelConflictLines from '~/merge_conflicts/components/parallel_conflict_lines.vue';
import component from '~/merge_conflicts/merge_conflict_resolver_app.vue';
+import ClipboardButton from '~/vue_shared/components/clipboard_button.vue';
import { createStore } from '~/merge_conflicts/store';
import { decorateFiles } from '~/merge_conflicts/utils';
import { conflictsMock } from '../mock_data';
@@ -49,6 +50,7 @@ describe('Merge Conflict Resolver App', () => {
const findInlineConflictLines = (w = wrapper) => w.findComponent(InlineConflictLines);
const findParallelConflictLines = (w = wrapper) => w.findComponent(ParallelConflictLines);
const findCommitMessageTextarea = () => wrapper.findByTestId('commit-message');
+ const findClipboardButton = (w = wrapper) => w.findComponent(ClipboardButton);
it('shows the amount of conflicts', () => {
mountComponent();
@@ -131,6 +133,21 @@ describe('Merge Conflict Resolver App', () => {
expect(parallelConflictLinesComponent.props('file')).toEqual(decoratedMockFiles[0]);
});
});
+
+ describe('clipboard button', () => {
+ it('exists', () => {
+ mountComponent();
+ expect(findClipboardButton().exists()).toBe(true);
+ });
+
+ it('has the correct props', () => {
+ mountComponent();
+ expect(findClipboardButton().attributes()).toMatchObject({
+ text: decoratedMockFiles[0].filePath,
+ title: 'Copy file path',
+ });
+ });
+ });
});
describe('submit form', () => {
diff --git a/spec/frontend/merge_conflicts/store/actions_spec.js b/spec/frontend/merge_conflicts/store/actions_spec.js
index d2c4c8b796c..6fbd17af5af 100644
--- a/spec/frontend/merge_conflicts/store/actions_spec.js
+++ b/spec/frontend/merge_conflicts/store/actions_spec.js
@@ -134,7 +134,7 @@ describe('merge conflicts actions', () => {
describe('setLoadingState', () => {
it('commits the right mutation', () => {
- testAction(
+ return testAction(
actions.setLoadingState,
true,
{},
@@ -151,7 +151,7 @@ describe('merge conflicts actions', () => {
describe('setErrorState', () => {
it('commits the right mutation', () => {
- testAction(
+ return testAction(
actions.setErrorState,
true,
{},
@@ -168,7 +168,7 @@ describe('merge conflicts actions', () => {
describe('setFailedRequest', () => {
it('commits the right mutation', () => {
- testAction(
+ return testAction(
actions.setFailedRequest,
'errors in the request',
{},
@@ -207,7 +207,7 @@ describe('merge conflicts actions', () => {
describe('setSubmitState', () => {
it('commits the right mutation', () => {
- testAction(
+ return testAction(
actions.setSubmitState,
true,
{},
@@ -224,7 +224,7 @@ describe('merge conflicts actions', () => {
describe('updateCommitMessage', () => {
it('commits the right mutation', () => {
- testAction(
+ return testAction(
actions.updateCommitMessage,
'some message',
{},
diff --git a/spec/frontend/milestones/stores/actions_spec.js b/spec/frontend/milestones/stores/actions_spec.js
index 4355ea71fb2..4be12f17f9e 100644
--- a/spec/frontend/milestones/stores/actions_spec.js
+++ b/spec/frontend/milestones/stores/actions_spec.js
@@ -28,7 +28,7 @@ describe('Milestone combobox Vuex store actions', () => {
describe('setProjectId', () => {
it(`commits ${types.SET_PROJECT_ID} with the new project ID`, () => {
const projectId = '4';
- testAction(actions.setProjectId, projectId, state, [
+ return testAction(actions.setProjectId, projectId, state, [
{ type: types.SET_PROJECT_ID, payload: projectId },
]);
});
@@ -37,7 +37,7 @@ describe('Milestone combobox Vuex store actions', () => {
describe('setGroupId', () => {
it(`commits ${types.SET_GROUP_ID} with the new group ID`, () => {
const groupId = '123';
- testAction(actions.setGroupId, groupId, state, [
+ return testAction(actions.setGroupId, groupId, state, [
{ type: types.SET_GROUP_ID, payload: groupId },
]);
});
@@ -46,16 +46,19 @@ describe('Milestone combobox Vuex store actions', () => {
describe('setGroupMilestonesAvailable', () => {
it(`commits ${types.SET_GROUP_MILESTONES_AVAILABLE} with the boolean indicating if group milestones are available (Premium)`, () => {
state.groupMilestonesAvailable = true;
- testAction(actions.setGroupMilestonesAvailable, state.groupMilestonesAvailable, state, [
- { type: types.SET_GROUP_MILESTONES_AVAILABLE, payload: state.groupMilestonesAvailable },
- ]);
+ return testAction(
+ actions.setGroupMilestonesAvailable,
+ state.groupMilestonesAvailable,
+ state,
+ [{ type: types.SET_GROUP_MILESTONES_AVAILABLE, payload: state.groupMilestonesAvailable }],
+ );
});
});
describe('setSelectedMilestones', () => {
it(`commits ${types.SET_SELECTED_MILESTONES} with the new selected milestones name`, () => {
const selectedMilestones = ['v1.2.3'];
- testAction(actions.setSelectedMilestones, selectedMilestones, state, [
+ return testAction(actions.setSelectedMilestones, selectedMilestones, state, [
{ type: types.SET_SELECTED_MILESTONES, payload: selectedMilestones },
]);
});
@@ -63,7 +66,7 @@ describe('Milestone combobox Vuex store actions', () => {
describe('clearSelectedMilestones', () => {
it(`commits ${types.CLEAR_SELECTED_MILESTONES} with the new selected milestones name`, () => {
- testAction(actions.clearSelectedMilestones, null, state, [
+ return testAction(actions.clearSelectedMilestones, null, state, [
{ type: types.CLEAR_SELECTED_MILESTONES },
]);
});
@@ -72,14 +75,14 @@ describe('Milestone combobox Vuex store actions', () => {
describe('toggleMilestones', () => {
const selectedMilestone = 'v1.2.3';
it(`commits ${types.ADD_SELECTED_MILESTONE} with the new selected milestone name`, () => {
- testAction(actions.toggleMilestones, selectedMilestone, state, [
+ return testAction(actions.toggleMilestones, selectedMilestone, state, [
{ type: types.ADD_SELECTED_MILESTONE, payload: selectedMilestone },
]);
});
it(`commits ${types.REMOVE_SELECTED_MILESTONE} with the new selected milestone name`, () => {
state.selectedMilestones = [selectedMilestone];
- testAction(actions.toggleMilestones, selectedMilestone, state, [
+ return testAction(actions.toggleMilestones, selectedMilestone, state, [
{ type: types.REMOVE_SELECTED_MILESTONE, payload: selectedMilestone },
]);
});
@@ -93,7 +96,7 @@ describe('Milestone combobox Vuex store actions', () => {
};
const searchQuery = 'v1.0';
- testAction(
+ return testAction(
actions.search,
searchQuery,
{ ...state, ...getters },
@@ -106,7 +109,7 @@ describe('Milestone combobox Vuex store actions', () => {
describe('when project does not have license to add group milestones', () => {
it(`commits ${types.SET_SEARCH_QUERY} with the new search query to search for project milestones`, () => {
const searchQuery = 'v1.0';
- testAction(
+ return testAction(
actions.search,
searchQuery,
state,
@@ -192,7 +195,7 @@ describe('Milestone combobox Vuex store actions', () => {
groupMilestonesEnabled: () => true,
};
- testAction(
+ return testAction(
actions.fetchMilestones,
undefined,
{ ...state, ...getters },
@@ -204,7 +207,7 @@ describe('Milestone combobox Vuex store actions', () => {
describe('when project does not have license to add group milestones', () => {
it(`dispatchs fetchProjectMilestones`, () => {
- testAction(
+ return testAction(
actions.fetchMilestones,
undefined,
state,
diff --git a/spec/frontend/ml/experiment_tracking/routes/candidates/show/ml_candidates_show_spec.js b/spec/frontend/ml/experiment_tracking/routes/candidates/show/ml_candidates_show_spec.js
index 296728af46a..3999e906cec 100644
--- a/spec/frontend/ml/experiment_tracking/routes/candidates/show/ml_candidates_show_spec.js
+++ b/spec/frontend/ml/experiment_tracking/routes/candidates/show/ml_candidates_show_spec.js
@@ -1,206 +1,39 @@
-import { GlAvatarLabeled, GlLink, GlTableLite } from '@gitlab/ui';
-import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import { shallowMount } from '@vue/test-utils';
import MlCandidatesShow from '~/ml/experiment_tracking/routes/candidates/show';
-import DetailRow from '~/ml/experiment_tracking/routes/candidates/show/components/candidate_detail_row.vue';
-import {
- TITLE_LABEL,
- NO_PARAMETERS_MESSAGE,
- NO_METRICS_MESSAGE,
- NO_METADATA_MESSAGE,
- NO_CI_MESSAGE,
-} from '~/ml/experiment_tracking/routes/candidates/show/translations';
import DeleteButton from '~/ml/experiment_tracking/components/delete_button.vue';
+import CandidateDetail from '~/ml/model_registry/components/candidate_detail.vue';
import ModelExperimentsHeader from '~/ml/experiment_tracking/components/model_experiments_header.vue';
-import { stubComponent } from 'helpers/stub_component';
-import { newCandidate } from './mock_data';
+import { newCandidate } from 'jest/ml/model_registry/mock_data';
describe('MlCandidatesShow', () => {
let wrapper;
const CANDIDATE = newCandidate();
- const USER_ROW = 1;
- const INFO_SECTION = 0;
- const CI_SECTION = 1;
- const PARAMETER_SECTION = 2;
- const METADATA_SECTION = 3;
-
- const createWrapper = (createCandidate = () => CANDIDATE) => {
- wrapper = shallowMountExtended(MlCandidatesShow, {
- propsData: { candidate: createCandidate() },
- stubs: {
- GlTableLite: { ...stubComponent(GlTableLite), props: ['items', 'fields'] },
- },
+ const createWrapper = () => {
+ wrapper = shallowMount(MlCandidatesShow, {
+ propsData: { candidate: CANDIDATE },
});
};
const findDeleteButton = () => wrapper.findComponent(DeleteButton);
const findHeader = () => wrapper.findComponent(ModelExperimentsHeader);
- const findSection = (section) => wrapper.findAll('section').at(section);
- const findRowInSection = (section, row) =>
- findSection(section).findAllComponents(DetailRow).at(row);
- const findLinkAtRow = (section, rowIndex) =>
- findRowInSection(section, rowIndex).findComponent(GlLink);
- const findNoDataMessage = (label) => wrapper.findByText(label);
- const findLabel = (label) => wrapper.find(`[label='${label}']`);
- const findCiUserDetailRow = () => findRowInSection(CI_SECTION, USER_ROW);
- const findCiUserAvatar = () => findCiUserDetailRow().findComponent(GlAvatarLabeled);
- const findCiUserAvatarNameLink = () => findCiUserAvatar().findComponent(GlLink);
- const findMetricsTable = () => wrapper.findComponent(GlTableLite);
-
- describe('Header', () => {
- beforeEach(() => createWrapper());
-
- it('shows delete button', () => {
- expect(findDeleteButton().exists()).toBe(true);
- });
+ const findCandidateDetail = () => wrapper.findComponent(CandidateDetail);
- it('passes the delete path to delete button', () => {
- expect(findDeleteButton().props('deletePath')).toBe('path_to_candidate');
- });
+ beforeEach(() => createWrapper());
- it('passes the right title', () => {
- expect(findHeader().props('pageTitle')).toBe(TITLE_LABEL);
- });
+ it('shows delete button', () => {
+ expect(findDeleteButton().exists()).toBe(true);
});
- describe('Detail Table', () => {
- describe('All info available', () => {
- beforeEach(() => createWrapper());
-
- const mrText = `!${CANDIDATE.info.ci_job.merge_request.iid} ${CANDIDATE.info.ci_job.merge_request.title}`;
- const expectedTable = [
- [INFO_SECTION, 0, 'ID', CANDIDATE.info.iid],
- [INFO_SECTION, 1, 'MLflow run ID', CANDIDATE.info.eid],
- [INFO_SECTION, 2, 'Status', CANDIDATE.info.status],
- [INFO_SECTION, 3, 'Experiment', CANDIDATE.info.experiment_name],
- [INFO_SECTION, 4, 'Artifacts', 'Artifacts'],
- [CI_SECTION, 0, 'Job', CANDIDATE.info.ci_job.name],
- [CI_SECTION, 1, 'Triggered by', 'CI User'],
- [CI_SECTION, 2, 'Merge request', mrText],
- [PARAMETER_SECTION, 0, CANDIDATE.params[0].name, CANDIDATE.params[0].value],
- [PARAMETER_SECTION, 1, CANDIDATE.params[1].name, CANDIDATE.params[1].value],
- [METADATA_SECTION, 0, CANDIDATE.metadata[0].name, CANDIDATE.metadata[0].value],
- [METADATA_SECTION, 1, CANDIDATE.metadata[1].name, CANDIDATE.metadata[1].value],
- ];
-
- it.each(expectedTable)('row %s is created correctly', (section, rowIndex, label, text) => {
- const row = findRowInSection(section, rowIndex);
-
- expect(row.props()).toMatchObject({ label });
- expect(row.text()).toBe(text);
- });
-
- describe('Table links', () => {
- const linkRows = [
- [INFO_SECTION, 3, CANDIDATE.info.path_to_experiment],
- [INFO_SECTION, 4, CANDIDATE.info.path_to_artifact],
- [CI_SECTION, 0, CANDIDATE.info.ci_job.path],
- [CI_SECTION, 2, CANDIDATE.info.ci_job.merge_request.path],
- ];
-
- it.each(linkRows)('row %s is created correctly', (section, rowIndex, href) => {
- expect(findLinkAtRow(section, rowIndex).attributes().href).toBe(href);
- });
- });
-
- describe('Metrics table', () => {
- it('computes metrics table items correctly', () => {
- expect(findMetricsTable().props('items')).toEqual([
- { name: 'AUC', 0: '.55' },
- { name: 'Accuracy', 1: '.99', 2: '.98', 3: '.97' },
- { name: 'F1', 3: '.1' },
- ]);
- });
-
- it('computes metrics table fields correctly', () => {
- expect(findMetricsTable().props('fields')).toEqual([
- expect.objectContaining({ key: 'name', label: 'Metric' }),
- expect.objectContaining({ key: '0', label: 'Step 0' }),
- expect.objectContaining({ key: '1', label: 'Step 1' }),
- expect.objectContaining({ key: '2', label: 'Step 2' }),
- expect.objectContaining({ key: '3', label: 'Step 3' }),
- ]);
- });
- });
-
- describe('CI triggerer', () => {
- it('renders user row', () => {
- const avatar = findCiUserAvatar();
- expect(avatar.props()).toMatchObject({
- label: '',
- });
- expect(avatar.attributes().src).toEqual('/img.png');
- });
-
- it('renders user name', () => {
- const nameLink = findCiUserAvatarNameLink();
-
- expect(nameLink.attributes().href).toEqual('path/to/ci/user');
- expect(nameLink.text()).toEqual('CI User');
- });
- });
- });
-
- describe('No artifact path', () => {
- beforeEach(() =>
- createWrapper(() => {
- const candidate = newCandidate();
- delete candidate.info.path_to_artifact;
- return candidate;
- }),
- );
-
- it('does not render artifact row', () => {
- expect(findLabel('Artifacts').exists()).toBe(false);
- });
- });
-
- describe('No params, metrics, ci or metadata available', () => {
- beforeEach(() =>
- createWrapper(() => {
- const candidate = newCandidate();
- delete candidate.params;
- delete candidate.metrics;
- delete candidate.metadata;
- delete candidate.info.ci_job;
- return candidate;
- }),
- );
-
- it('does not render params', () => {
- expect(findNoDataMessage(NO_PARAMETERS_MESSAGE).exists()).toBe(true);
- });
-
- it('does not render metadata', () => {
- expect(findNoDataMessage(NO_METADATA_MESSAGE).exists()).toBe(true);
- });
-
- it('does not render metrics', () => {
- expect(findNoDataMessage(NO_METRICS_MESSAGE).exists()).toBe(true);
- });
-
- it('does not render CI info', () => {
- expect(findNoDataMessage(NO_CI_MESSAGE).exists()).toBe(true);
- });
- });
-
- describe('Has CI, but no user or mr', () => {
- beforeEach(() =>
- createWrapper(() => {
- const candidate = newCandidate();
- delete candidate.info.ci_job.user;
- delete candidate.info.ci_job.merge_request;
- return candidate;
- }),
- );
+ it('passes the delete path to delete button', () => {
+ expect(findDeleteButton().props('deletePath')).toBe('path_to_candidate');
+ });
- it('does not render MR info', () => {
- expect(findLabel('Merge request').exists()).toBe(false);
- });
+ it('passes the right title', () => {
+ expect(findHeader().props('pageTitle')).toBe('Model candidate details');
+ });
- it('does not render CI user info', () => {
- expect(findLabel('Triggered by').exists()).toBe(false);
- });
- });
+ it('creates the candidate detail section', () => {
+ expect(findCandidateDetail().props('candidate')).toBe(CANDIDATE);
});
});
diff --git a/spec/frontend/ml/experiment_tracking/routes/candidates/show/mock_data.js b/spec/frontend/ml/experiment_tracking/routes/candidates/show/mock_data.js
deleted file mode 100644
index 4ea23ed2513..00000000000
--- a/spec/frontend/ml/experiment_tracking/routes/candidates/show/mock_data.js
+++ /dev/null
@@ -1,41 +0,0 @@
-export const newCandidate = () => ({
- params: [
- { name: 'Algorithm', value: 'Decision Tree' },
- { name: 'MaxDepth', value: '3' },
- ],
- metrics: [
- { name: 'AUC', value: '.55', step: 0 },
- { name: 'Accuracy', value: '.99', step: 1 },
- { name: 'Accuracy', value: '.98', step: 2 },
- { name: 'Accuracy', value: '.97', step: 3 },
- { name: 'F1', value: '.1', step: 3 },
- ],
- metadata: [
- { name: 'FileName', value: 'test.py' },
- { name: 'ExecutionTime', value: '.0856' },
- ],
- info: {
- iid: 'candidate_iid',
- eid: 'abcdefg',
- path_to_artifact: 'path_to_artifact',
- experiment_name: 'The Experiment',
- path_to_experiment: 'path/to/experiment',
- status: 'SUCCESS',
- path: 'path_to_candidate',
- ci_job: {
- name: 'test',
- path: 'path/to/job',
- merge_request: {
- path: 'path/to/mr',
- iid: 1,
- title: 'Some MR',
- },
- user: {
- path: 'path/to/ci/user',
- name: 'CI User',
- username: 'ciuser',
- avatar: '/img.png',
- },
- },
- },
-});
diff --git a/spec/frontend/ml/model_registry/apps/index_ml_models_spec.js b/spec/frontend/ml/model_registry/apps/index_ml_models_spec.js
index 6e0ab2ebe2d..66a447e73d3 100644
--- a/spec/frontend/ml/model_registry/apps/index_ml_models_spec.js
+++ b/spec/frontend/ml/model_registry/apps/index_ml_models_spec.js
@@ -1,12 +1,13 @@
+import { GlBadge } from '@gitlab/ui';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import { IndexMlModels } from '~/ml/model_registry/apps';
import ModelRow from '~/ml/model_registry/components/model_row.vue';
-import { TITLE_LABEL, NO_MODELS_LABEL } from '~/ml/model_registry/translations';
import Pagination from '~/vue_shared/components/incubation/pagination.vue';
import SearchBar from '~/ml/model_registry/components/search_bar.vue';
-import { BASE_SORT_FIELDS } from '~/ml/model_registry/constants';
+import { BASE_SORT_FIELDS, MODEL_ENTITIES } from '~/ml/model_registry/constants';
import TitleArea from '~/vue_shared/components/registry/title_area.vue';
import MetadataItem from '~/vue_shared/components/registry/metadata_item.vue';
+import EmptyState from '~/ml/model_registry/components/empty_state.vue';
import { mockModels, startCursor, defaultPageInfo } from '../mock_data';
let wrapper;
@@ -18,17 +19,18 @@ const createWrapper = (
const findModelRow = (index) => wrapper.findAllComponents(ModelRow).at(index);
const findPagination = () => wrapper.findComponent(Pagination);
-const findEmptyLabel = () => wrapper.findByText(NO_MODELS_LABEL);
+const findEmptyState = () => wrapper.findComponent(EmptyState);
const findSearchBar = () => wrapper.findComponent(SearchBar);
const findTitleArea = () => wrapper.findComponent(TitleArea);
const findModelCountMetadataItem = () => findTitleArea().findComponent(MetadataItem);
+const findBadge = () => wrapper.findComponent(GlBadge);
describe('MlModelsIndex', () => {
describe('empty state', () => {
beforeEach(() => createWrapper({ models: [], pageInfo: defaultPageInfo }));
- it('displays empty state when no experiment', () => {
- expect(findEmptyLabel().exists()).toBe(true);
+ it('shows empty state', () => {
+ expect(findEmptyState().props('entityType')).toBe(MODEL_ENTITIES.model);
});
it('does not show pagination', () => {
@@ -46,12 +48,16 @@ describe('MlModelsIndex', () => {
});
it('does not show empty state', () => {
- expect(findEmptyLabel().exists()).toBe(false);
+ expect(findEmptyState().exists()).toBe(false);
});
describe('header', () => {
it('displays the title', () => {
- expect(findTitleArea().props('title')).toBe(TITLE_LABEL);
+ expect(findTitleArea().text()).toContain('Model registry');
+ });
+
+ it('displays the experiment badge', () => {
+ expect(findBadge().attributes().href).toBe('/help/user/project/ml/model_registry/index.md');
});
it('sets model metadata item to model count', () => {
diff --git a/spec/frontend/ml/model_registry/apps/show_ml_model_spec.js b/spec/frontend/ml/model_registry/apps/show_ml_model_spec.js
index bc4770976a9..1fe0f5f88b3 100644
--- a/spec/frontend/ml/model_registry/apps/show_ml_model_spec.js
+++ b/spec/frontend/ml/model_registry/apps/show_ml_model_spec.js
@@ -1,22 +1,41 @@
import { GlBadge, GlTab } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
+import Vue from 'vue';
+import VueApollo from 'vue-apollo';
import { ShowMlModel } from '~/ml/model_registry/apps';
+import ModelVersionList from '~/ml/model_registry/components/model_version_list.vue';
+import CandidateList from '~/ml/model_registry/components/candidate_list.vue';
+import ModelVersionDetail from '~/ml/model_registry/components/model_version_detail.vue';
+import EmptyState from '~/ml/model_registry/components/empty_state.vue';
import TitleArea from '~/vue_shared/components/registry/title_area.vue';
import MetadataItem from '~/vue_shared/components/registry/metadata_item.vue';
-import { NO_VERSIONS_LABEL } from '~/ml/model_registry/translations';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import { MODEL_ENTITIES } from '~/ml/model_registry/constants';
import { MODEL, makeModel } from '../mock_data';
+const apolloProvider = createMockApollo([]);
let wrapper;
+
+Vue.use(VueApollo);
+
const createWrapper = (model = MODEL) => {
- wrapper = shallowMount(ShowMlModel, { propsData: { model } });
+ wrapper = shallowMount(ShowMlModel, {
+ apolloProvider,
+ propsData: { model },
+ stubs: { GlTab },
+ });
};
const findDetailTab = () => wrapper.findAllComponents(GlTab).at(0);
const findVersionsTab = () => wrapper.findAllComponents(GlTab).at(1);
const findVersionsCountBadge = () => findVersionsTab().findComponent(GlBadge);
+const findModelVersionList = () => findVersionsTab().findComponent(ModelVersionList);
+const findModelVersionDetail = () => findDetailTab().findComponent(ModelVersionDetail);
const findCandidateTab = () => wrapper.findAllComponents(GlTab).at(2);
+const findCandidateList = () => findCandidateTab().findComponent(CandidateList);
const findCandidatesCountBadge = () => findCandidateTab().findComponent(GlBadge);
const findTitleArea = () => wrapper.findComponent(TitleArea);
+const findEmptyState = () => wrapper.findComponent(EmptyState);
const findVersionCountMetadataItem = () => findTitleArea().findComponent(MetadataItem);
describe('ShowMlModel', () => {
@@ -45,7 +64,11 @@ describe('ShowMlModel', () => {
describe('when it has latest version', () => {
it('displays the version', () => {
- expect(findDetailTab().text()).toContain(MODEL.latestVersion.version);
+ expect(findModelVersionDetail().props('modelVersion')).toBe(MODEL.latestVersion);
+ });
+
+ it('displays the title', () => {
+ expect(findDetailTab().text()).toContain('Latest version: 1.2.3');
});
});
@@ -54,8 +77,12 @@ describe('ShowMlModel', () => {
createWrapper(makeModel({ latestVersion: null }));
});
- it('shows no version message', () => {
- expect(findDetailTab().text()).toContain(NO_VERSIONS_LABEL);
+ it('shows empty state', () => {
+ expect(findEmptyState().props('entityType')).toBe(MODEL_ENTITIES.modelVersion);
+ });
+
+ it('does not render model version detail', () => {
+ expect(findModelVersionDetail().exists()).toBe(false);
});
});
});
@@ -66,6 +93,10 @@ describe('ShowMlModel', () => {
it('shows the number of versions in the tab', () => {
expect(findVersionsCountBadge().text()).toBe(MODEL.versionCount.toString());
});
+
+ it('shows a list of model versions', () => {
+ expect(findModelVersionList().props('modelId')).toBe(MODEL.id);
+ });
});
describe('Candidates tab', () => {
@@ -74,5 +105,9 @@ describe('ShowMlModel', () => {
it('shows the number of candidates in the tab', () => {
expect(findCandidatesCountBadge().text()).toBe(MODEL.candidateCount.toString());
});
+
+ it('shows a list of candidates', () => {
+ expect(findCandidateList().props('modelId')).toBe(MODEL.id);
+ });
});
});
diff --git a/spec/frontend/ml/model_registry/apps/show_ml_model_version_spec.js b/spec/frontend/ml/model_registry/apps/show_ml_model_version_spec.js
index 77fca53c00e..2605a75d961 100644
--- a/spec/frontend/ml/model_registry/apps/show_ml_model_version_spec.js
+++ b/spec/frontend/ml/model_registry/apps/show_ml_model_version_spec.js
@@ -1,5 +1,7 @@
import { shallowMount } from '@vue/test-utils';
import { ShowMlModelVersion } from '~/ml/model_registry/apps';
+import ModelVersionDetail from '~/ml/model_registry/components/model_version_detail.vue';
+import TitleArea from '~/vue_shared/components/registry/title_area.vue';
import { MODEL_VERSION } from '../mock_data';
let wrapper;
@@ -7,9 +9,17 @@ const createWrapper = () => {
wrapper = shallowMount(ShowMlModelVersion, { propsData: { modelVersion: MODEL_VERSION } });
};
-describe('ShowMlModelVersion', () => {
+const findTitleArea = () => wrapper.findComponent(TitleArea);
+const findModelVersionDetail = () => wrapper.findComponent(ModelVersionDetail);
+
+describe('ml/model_registry/apps/show_model_version.vue', () => {
beforeEach(() => createWrapper());
- it('renders the app', () => {
- expect(wrapper.text()).toContain(`${MODEL_VERSION.model.name} - ${MODEL_VERSION.version}`);
+
+ it('renders the title', () => {
+ expect(findTitleArea().props('title')).toBe('blah / 1.2.3');
+ });
+
+ it('renders the model version detail', () => {
+ expect(findModelVersionDetail().props('modelVersion')).toBe(MODEL_VERSION);
});
});
diff --git a/spec/frontend/ml/experiment_tracking/routes/candidates/show/components/candidate_detail_row_spec.js b/spec/frontend/ml/model_registry/components/candidate_detail_row_spec.js
index cd252560590..24b18b6b42d 100644
--- a/spec/frontend/ml/experiment_tracking/routes/candidates/show/components/candidate_detail_row_spec.js
+++ b/spec/frontend/ml/model_registry/components/candidate_detail_row_spec.js
@@ -1,5 +1,5 @@
import { shallowMount } from '@vue/test-utils';
-import DetailRow from '~/ml/experiment_tracking/routes/candidates/show/components/candidate_detail_row.vue';
+import DetailRow from '~/ml/model_registry/components/candidate_detail_row.vue';
describe('CandidateDetailRow', () => {
const ROW_LABEL_CELL = 0;
diff --git a/spec/frontend/ml/model_registry/components/candidate_detail_spec.js b/spec/frontend/ml/model_registry/components/candidate_detail_spec.js
new file mode 100644
index 00000000000..94aa65a1690
--- /dev/null
+++ b/spec/frontend/ml/model_registry/components/candidate_detail_spec.js
@@ -0,0 +1,191 @@
+import { GlAvatarLabeled, GlLink, GlTableLite } from '@gitlab/ui';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import CandidateDetail from '~/ml/model_registry/components/candidate_detail.vue';
+import DetailRow from '~/ml/model_registry/components/candidate_detail_row.vue';
+import {
+ NO_PARAMETERS_MESSAGE,
+ NO_METRICS_MESSAGE,
+ NO_METADATA_MESSAGE,
+ NO_CI_MESSAGE,
+} from '~/ml/model_registry/translations';
+import { stubComponent } from 'helpers/stub_component';
+import { newCandidate } from '../mock_data';
+
+describe('ml/model_registry/components/candidate_detail.vue', () => {
+ let wrapper;
+ const CANDIDATE = newCandidate();
+ const USER_ROW = 1;
+
+ const INFO_SECTION = 0;
+ const CI_SECTION = 1;
+ const PARAMETER_SECTION = 2;
+ const METADATA_SECTION = 3;
+
+ const createWrapper = (createCandidate = () => CANDIDATE, showInfoSection = true) => {
+ wrapper = shallowMountExtended(CandidateDetail, {
+ propsData: { candidate: createCandidate(), showInfoSection },
+ stubs: {
+ GlTableLite: { ...stubComponent(GlTableLite), props: ['items', 'fields'] },
+ },
+ });
+ };
+
+ const findSection = (section) => wrapper.findAll('section').at(section);
+ const findRowInSection = (section, row) =>
+ findSection(section).findAllComponents(DetailRow).at(row);
+ const findLinkAtRow = (section, rowIndex) =>
+ findRowInSection(section, rowIndex).findComponent(GlLink);
+ const findNoDataMessage = (label) => wrapper.findByText(label);
+ const findLabel = (label) => wrapper.find(`[label='${label}']`);
+ const findCiUserDetailRow = () => findRowInSection(CI_SECTION, USER_ROW);
+ const findCiUserAvatar = () => findCiUserDetailRow().findComponent(GlAvatarLabeled);
+ const findCiUserAvatarNameLink = () => findCiUserAvatar().findComponent(GlLink);
+ const findMetricsTable = () => wrapper.findComponent(GlTableLite);
+
+ describe('All info available', () => {
+ beforeEach(() => createWrapper());
+
+ const mrText = `!${CANDIDATE.info.ciJob.mergeRequest.iid} ${CANDIDATE.info.ciJob.mergeRequest.title}`;
+ const expectedTable = [
+ [INFO_SECTION, 0, 'ID', CANDIDATE.info.iid],
+ [INFO_SECTION, 1, 'MLflow run ID', CANDIDATE.info.eid],
+ [INFO_SECTION, 2, 'Status', CANDIDATE.info.status],
+ [INFO_SECTION, 3, 'Experiment', CANDIDATE.info.experimentName],
+ [INFO_SECTION, 4, 'Artifacts', 'Artifacts'],
+ [CI_SECTION, 0, 'Job', CANDIDATE.info.ciJob.name],
+ [CI_SECTION, 1, 'Triggered by', 'CI User'],
+ [CI_SECTION, 2, 'Merge request', mrText],
+ [PARAMETER_SECTION, 0, CANDIDATE.params[0].name, CANDIDATE.params[0].value],
+ [PARAMETER_SECTION, 1, CANDIDATE.params[1].name, CANDIDATE.params[1].value],
+ [METADATA_SECTION, 0, CANDIDATE.metadata[0].name, CANDIDATE.metadata[0].value],
+ [METADATA_SECTION, 1, CANDIDATE.metadata[1].name, CANDIDATE.metadata[1].value],
+ ];
+
+ it.each(expectedTable)('row %s is created correctly', (section, rowIndex, label, text) => {
+ const row = findRowInSection(section, rowIndex);
+
+ expect(row.props()).toMatchObject({ label });
+ expect(row.text()).toBe(text);
+ });
+
+ describe('Table links', () => {
+ const linkRows = [
+ [INFO_SECTION, 3, CANDIDATE.info.pathToExperiment],
+ [INFO_SECTION, 4, CANDIDATE.info.pathToArtifact],
+ [CI_SECTION, 0, CANDIDATE.info.ciJob.path],
+ [CI_SECTION, 2, CANDIDATE.info.ciJob.mergeRequest.path],
+ ];
+
+ it.each(linkRows)('row %s is created correctly', (section, rowIndex, href) => {
+ expect(findLinkAtRow(section, rowIndex).attributes().href).toBe(href);
+ });
+ });
+
+ describe('Metrics table', () => {
+ it('computes metrics table items correctly', () => {
+ expect(findMetricsTable().props('items')).toEqual([
+ { name: 'AUC', 0: '.55' },
+ { name: 'Accuracy', 1: '.99', 2: '.98', 3: '.97' },
+ { name: 'F1', 3: '.1' },
+ ]);
+ });
+
+ it('computes metrics table fields correctly', () => {
+ expect(findMetricsTable().props('fields')).toEqual([
+ expect.objectContaining({ key: 'name', label: 'Metric' }),
+ expect.objectContaining({ key: '0', label: 'Step 0' }),
+ expect.objectContaining({ key: '1', label: 'Step 1' }),
+ expect.objectContaining({ key: '2', label: 'Step 2' }),
+ expect.objectContaining({ key: '3', label: 'Step 3' }),
+ ]);
+ });
+ });
+
+ describe('CI triggerer', () => {
+ it('renders user row', () => {
+ const avatar = findCiUserAvatar();
+ expect(avatar.props()).toMatchObject({
+ label: '',
+ });
+ expect(avatar.attributes().src).toEqual('/img.png');
+ });
+
+ it('renders user name', () => {
+ const nameLink = findCiUserAvatarNameLink();
+
+ expect(nameLink.attributes().href).toEqual('path/to/ci/user');
+ expect(nameLink.text()).toEqual('CI User');
+ });
+ });
+ });
+
+ describe('No artifact path', () => {
+ beforeEach(() =>
+ createWrapper(() => {
+ const candidate = newCandidate();
+ delete candidate.info.pathToArtifact;
+ return candidate;
+ }),
+ );
+
+ it('does not render artifact row', () => {
+ expect(findLabel('Artifacts').exists()).toBe(false);
+ });
+ });
+
+ describe('No params, metrics, ci or metadata available', () => {
+ beforeEach(() =>
+ createWrapper(() => {
+ const candidate = newCandidate();
+ delete candidate.params;
+ delete candidate.metrics;
+ delete candidate.metadata;
+ delete candidate.info.ciJob;
+ return candidate;
+ }),
+ );
+
+ it('does not render params', () => {
+ expect(findNoDataMessage(NO_PARAMETERS_MESSAGE).exists()).toBe(true);
+ });
+
+ it('does not render metadata', () => {
+ expect(findNoDataMessage(NO_METADATA_MESSAGE).exists()).toBe(true);
+ });
+
+ it('does not render metrics', () => {
+ expect(findNoDataMessage(NO_METRICS_MESSAGE).exists()).toBe(true);
+ });
+
+ it('does not render CI info', () => {
+ expect(findNoDataMessage(NO_CI_MESSAGE).exists()).toBe(true);
+ });
+ });
+
+ describe('Has CI, but no user or mr', () => {
+ beforeEach(() =>
+ createWrapper(() => {
+ const candidate = newCandidate();
+ delete candidate.info.ciJob.user;
+ delete candidate.info.ciJob.mergeRequest;
+ return candidate;
+ }),
+ );
+
+ it('does not render MR info', () => {
+ expect(findLabel('Merge request').exists()).toBe(false);
+ });
+
+ it('does not render CI user info', () => {
+ expect(findLabel('Triggered by').exists()).toBe(false);
+ });
+ });
+
+ describe('showInfoSection is set to false', () => {
+ beforeEach(() => createWrapper(() => CANDIDATE, false));
+
+ it('does not render the info section', () => {
+ expect(findLabel('MLflow run ID').exists()).toBe(false);
+ });
+ });
+});
diff --git a/spec/frontend/ml/model_registry/components/candidate_list_row_spec.js b/spec/frontend/ml/model_registry/components/candidate_list_row_spec.js
new file mode 100644
index 00000000000..5ac8d07ff01
--- /dev/null
+++ b/spec/frontend/ml/model_registry/components/candidate_list_row_spec.js
@@ -0,0 +1,39 @@
+import { GlLink, GlSprintf, GlTruncate } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import ListItem from '~/vue_shared/components/registry/list_item.vue';
+import TimeAgoTooltip from '~/vue_shared/components/time_ago_tooltip.vue';
+import CandidateListRow from '~/ml/model_registry/components/candidate_list_row.vue';
+import { graphqlCandidates } from '../graphql_mock_data';
+
+const CANDIDATE = graphqlCandidates[0];
+
+let wrapper;
+const createWrapper = (candidate = CANDIDATE) => {
+ wrapper = shallowMount(CandidateListRow, {
+ propsData: { candidate },
+ stubs: {
+ GlSprintf,
+ GlTruncate,
+ },
+ });
+};
+
+const findListItem = () => wrapper.findComponent(ListItem);
+const findLink = () => findListItem().findComponent(GlLink);
+const findTruncated = () => findLink().findComponent(GlTruncate);
+const findTooltip = () => findListItem().findComponent(TimeAgoTooltip);
+
+describe('ml/model_registry/components/candidate_list_row.vue', () => {
+ beforeEach(() => {
+ createWrapper();
+ });
+
+ it('Has a link to the candidate', () => {
+ expect(findTruncated().props('text')).toBe(CANDIDATE.name);
+ expect(findLink().attributes('href')).toBe(CANDIDATE._links.showPath);
+ });
+
+ it('Shows created at', () => {
+ expect(findTooltip().props('time')).toBe(CANDIDATE.createdAt);
+ });
+});
diff --git a/spec/frontend/ml/model_registry/components/candidate_list_spec.js b/spec/frontend/ml/model_registry/components/candidate_list_spec.js
new file mode 100644
index 00000000000..c10222a99fd
--- /dev/null
+++ b/spec/frontend/ml/model_registry/components/candidate_list_spec.js
@@ -0,0 +1,182 @@
+import Vue from 'vue';
+import VueApollo from 'vue-apollo';
+import { GlAlert } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import * as Sentry from '~/sentry/sentry_browser_wrapper';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import CandidateList from '~/ml/model_registry/components/candidate_list.vue';
+import PackagesListLoader from '~/packages_and_registries/shared/components/packages_list_loader.vue';
+import RegistryList from '~/packages_and_registries/shared/components/registry_list.vue';
+import CandidateListRow from '~/ml/model_registry/components/candidate_list_row.vue';
+import getModelCandidatesQuery from '~/ml/model_registry/graphql/queries/get_model_candidates.query.graphql';
+import { GRAPHQL_PAGE_SIZE } from '~/ml/model_registry/constants';
+import {
+ emptyCandidateQuery,
+ modelCandidatesQuery,
+ graphqlCandidates,
+ graphqlPageInfo,
+} from '../graphql_mock_data';
+
+Vue.use(VueApollo);
+
+describe('ml/model_registry/components/candidate_list.vue', () => {
+ let wrapper;
+ let apolloProvider;
+
+ const findAlert = () => wrapper.findComponent(GlAlert);
+ const findLoader = () => wrapper.findComponent(PackagesListLoader);
+ const findRegistryList = () => wrapper.findComponent(RegistryList);
+ const findListRow = () => wrapper.findComponent(CandidateListRow);
+ const findAllRows = () => wrapper.findAllComponents(CandidateListRow);
+
+ const mountComponent = ({
+ props = {},
+ resolver = jest.fn().mockResolvedValue(modelCandidatesQuery()),
+ } = {}) => {
+ const requestHandlers = [[getModelCandidatesQuery, resolver]];
+ apolloProvider = createMockApollo(requestHandlers);
+
+ wrapper = shallowMount(CandidateList, {
+ apolloProvider,
+ propsData: {
+ modelId: 2,
+ ...props,
+ },
+ stubs: {
+ RegistryList,
+ },
+ });
+ };
+
+ beforeEach(() => {
+ jest.spyOn(Sentry, 'captureException').mockImplementation();
+ });
+
+ describe('when list is loaded and has no data', () => {
+ const resolver = jest.fn().mockResolvedValue(emptyCandidateQuery);
+ beforeEach(async () => {
+ mountComponent({ resolver });
+ await waitForPromises();
+ });
+
+ it('displays empty slot message', () => {
+ expect(wrapper.text()).toContain('This model has no candidates');
+ });
+
+ it('does not display loader', () => {
+ expect(findLoader().exists()).toBe(false);
+ });
+
+ it('does not display rows', () => {
+ expect(findListRow().exists()).toBe(false);
+ });
+
+ it('does not display registry list', () => {
+ expect(findRegistryList().exists()).toBe(false);
+ });
+
+ it('does not display alert', () => {
+ expect(findAlert().exists()).toBe(false);
+ });
+ });
+
+ describe('if load fails, alert', () => {
+ beforeEach(async () => {
+ const error = new Error('Failure!');
+ mountComponent({ resolver: jest.fn().mockRejectedValue(error) });
+
+ await waitForPromises();
+ });
+
+ it('is displayed', () => {
+ expect(findAlert().exists()).toBe(true);
+ });
+
+ it('shows error message', () => {
+ expect(findAlert().text()).toContain('Failed to load model candidates with error: Failure!');
+ });
+
+ it('is not dismissible', () => {
+ expect(findAlert().props('dismissible')).toBe(false);
+ });
+
+ it('is of variant danger', () => {
+ expect(findAlert().attributes('variant')).toBe('danger');
+ });
+
+ it('error is logged in sentry', () => {
+ expect(Sentry.captureException).toHaveBeenCalled();
+ });
+ });
+
+ describe('when list is loaded with data', () => {
+ beforeEach(async () => {
+ mountComponent();
+ await waitForPromises();
+ });
+
+ it('displays package registry list', () => {
+ expect(findRegistryList().exists()).toEqual(true);
+ });
+
+ it('binds the right props', () => {
+ expect(findRegistryList().props()).toMatchObject({
+ items: graphqlCandidates,
+ pagination: {},
+ isLoading: false,
+ hiddenDelete: true,
+ });
+ });
+
+ it('displays candidate rows', () => {
+ expect(findAllRows().exists()).toEqual(true);
+ expect(findAllRows()).toHaveLength(graphqlCandidates.length);
+ });
+
+ it('binds the correct props', () => {
+ expect(findAllRows().at(0).props()).toMatchObject({
+ candidate: expect.objectContaining(graphqlCandidates[0]),
+ });
+
+ expect(findAllRows().at(1).props()).toMatchObject({
+ candidate: expect.objectContaining(graphqlCandidates[1]),
+ });
+ });
+
+ it('does not display loader', () => {
+ expect(findLoader().exists()).toBe(false);
+ });
+
+ it('does not display empty message', () => {
+ expect(findAlert().exists()).toBe(false);
+ });
+ });
+
+ describe('when user interacts with pagination', () => {
+ const resolver = jest.fn().mockResolvedValue(modelCandidatesQuery());
+
+ beforeEach(async () => {
+ mountComponent({ resolver });
+ await waitForPromises();
+ });
+
+ it('when list emits next-page fetches the next set of records', async () => {
+ findRegistryList().vm.$emit('next-page');
+ await waitForPromises();
+
+ expect(resolver).toHaveBeenLastCalledWith(
+ expect.objectContaining({ after: graphqlPageInfo.endCursor, first: GRAPHQL_PAGE_SIZE }),
+ );
+ });
+
+ it('when list emits prev-page fetches the prev set of records', async () => {
+ findRegistryList().vm.$emit('prev-page');
+ await waitForPromises();
+
+ expect(resolver).toHaveBeenLastCalledWith(
+ expect.objectContaining({ before: graphqlPageInfo.startCursor, last: GRAPHQL_PAGE_SIZE }),
+ );
+ });
+ });
+});
diff --git a/spec/frontend/ml/model_registry/components/empty_state_spec.js b/spec/frontend/ml/model_registry/components/empty_state_spec.js
new file mode 100644
index 00000000000..e9477518f7d
--- /dev/null
+++ b/spec/frontend/ml/model_registry/components/empty_state_spec.js
@@ -0,0 +1,47 @@
+import { GlEmptyState } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import { MODEL_ENTITIES } from '~/ml/model_registry/constants';
+import EmptyState from '~/ml/model_registry/components/empty_state.vue';
+
+let wrapper;
+const createWrapper = (entityType) => {
+ wrapper = shallowMount(EmptyState, { propsData: { entityType } });
+};
+
+const findEmptyState = () => wrapper.findComponent(GlEmptyState);
+
+describe('ml/model_registry/components/empty_state.vue', () => {
+ describe('when entity type is model', () => {
+ beforeEach(() => {
+ createWrapper(MODEL_ENTITIES.model);
+ });
+
+ it('shows the correct empty state', () => {
+ expect(findEmptyState().props()).toMatchObject({
+ title: 'Start tracking your machine learning models',
+ description: 'Store and manage your machine learning models and versions',
+ primaryButtonText: 'Add a model',
+ primaryButtonLink:
+ '/help/user/project/ml/model_registry/index#creating-machine-learning-models-and-model-versions',
+ svgPath: 'file-mock',
+ });
+ });
+ });
+
+ describe('when entity type is model version', () => {
+ beforeEach(() => {
+ createWrapper(MODEL_ENTITIES.modelVersion);
+ });
+
+ it('shows the correct empty state', () => {
+ expect(findEmptyState().props()).toMatchObject({
+ title: 'Manage versions of your machine learning model',
+ description: 'Use versions to track performance, parameters, and metadata',
+ primaryButtonText: 'Create a model version',
+ primaryButtonLink:
+ '/help/user/project/ml/model_registry/index#creating-machine-learning-models-and-model-versions',
+ svgPath: 'file-mock',
+ });
+ });
+ });
+});
diff --git a/spec/frontend/ml/model_registry/components/model_version_detail_spec.js b/spec/frontend/ml/model_registry/components/model_version_detail_spec.js
new file mode 100644
index 00000000000..d1874346ad7
--- /dev/null
+++ b/spec/frontend/ml/model_registry/components/model_version_detail_spec.js
@@ -0,0 +1,66 @@
+import { shallowMount } from '@vue/test-utils';
+import Vue from 'vue';
+import VueApollo from 'vue-apollo';
+import ModelVersionDetail from '~/ml/model_registry/components/model_version_detail.vue';
+import PackageFiles from '~/packages_and_registries/package_registry/components/details/package_files.vue';
+import CandidateDetail from '~/ml/model_registry/components/candidate_detail.vue';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import { makeModelVersion, MODEL_VERSION } from '../mock_data';
+
+Vue.use(VueApollo);
+
+let wrapper;
+const createWrapper = (modelVersion = MODEL_VERSION) => {
+ const apolloProvider = createMockApollo([]);
+ wrapper = shallowMount(ModelVersionDetail, { apolloProvider, propsData: { modelVersion } });
+};
+
+const findPackageFiles = () => wrapper.findComponent(PackageFiles);
+const findCandidateDetail = () => wrapper.findComponent(CandidateDetail);
+
+describe('ml/model_registry/components/model_version_detail.vue', () => {
+ describe('base behaviour', () => {
+ beforeEach(() => createWrapper());
+
+ it('shows the description', () => {
+ expect(wrapper.text()).toContain(MODEL_VERSION.description);
+ });
+
+ it('shows the candidate', () => {
+ expect(findCandidateDetail().props('candidate')).toBe(MODEL_VERSION.candidate);
+ });
+
+ it('shows the mlflow label string', () => {
+ expect(wrapper.text()).toContain('MLflow run ID');
+ });
+
+ it('shows the mlflow id', () => {
+ expect(wrapper.text()).toContain(MODEL_VERSION.candidate.info.eid);
+ });
+
+ it('renders files', () => {
+ expect(findPackageFiles().props()).toEqual({
+ packageId: 'gid://gitlab/Packages::Package/12',
+ projectPath: MODEL_VERSION.projectPath,
+ packageType: 'ml_model',
+ canDelete: false,
+ });
+ });
+ });
+
+ describe('if package does not exist', () => {
+ beforeEach(() => createWrapper(makeModelVersion({ packageId: 0 })));
+
+ it('does not render files', () => {
+ expect(findPackageFiles().exists()).toBe(false);
+ });
+ });
+
+ describe('if model version does not have description', () => {
+ beforeEach(() => createWrapper(makeModelVersion({ description: null })));
+
+ it('renders no description provided label', () => {
+ expect(wrapper.text()).toContain('No description provided');
+ });
+ });
+});
diff --git a/spec/frontend/ml/model_registry/components/model_version_list_spec.js b/spec/frontend/ml/model_registry/components/model_version_list_spec.js
new file mode 100644
index 00000000000..41f7e71c543
--- /dev/null
+++ b/spec/frontend/ml/model_registry/components/model_version_list_spec.js
@@ -0,0 +1,184 @@
+import Vue from 'vue';
+import VueApollo from 'vue-apollo';
+import { GlAlert } from '@gitlab/ui';
+import * as Sentry from '~/sentry/sentry_browser_wrapper';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import ModelVersionList from '~/ml/model_registry/components/model_version_list.vue';
+import PackagesListLoader from '~/packages_and_registries/shared/components/packages_list_loader.vue';
+import RegistryList from '~/packages_and_registries/shared/components/registry_list.vue';
+import ModelVersionRow from '~/ml/model_registry/components/model_version_row.vue';
+import getModelVersionsQuery from '~/ml/model_registry/graphql/queries/get_model_versions.query.graphql';
+import EmptyState from '~/ml/model_registry/components/empty_state.vue';
+import { GRAPHQL_PAGE_SIZE, MODEL_ENTITIES } from '~/ml/model_registry/constants';
+import {
+ emptyModelVersionsQuery,
+ modelVersionsQuery,
+ graphqlModelVersions,
+ graphqlPageInfo,
+} from '../graphql_mock_data';
+
+Vue.use(VueApollo);
+
+describe('ModelVersionList', () => {
+ let wrapper;
+ let apolloProvider;
+
+ const findAlert = () => wrapper.findComponent(GlAlert);
+ const findLoader = () => wrapper.findComponent(PackagesListLoader);
+ const findRegistryList = () => wrapper.findComponent(RegistryList);
+ const findEmptyState = () => wrapper.findComponent(EmptyState);
+ const findListRow = () => wrapper.findComponent(ModelVersionRow);
+ const findAllRows = () => wrapper.findAllComponents(ModelVersionRow);
+
+ const mountComponent = ({
+ props = {},
+ resolver = jest.fn().mockResolvedValue(modelVersionsQuery()),
+ } = {}) => {
+ const requestHandlers = [[getModelVersionsQuery, resolver]];
+ apolloProvider = createMockApollo(requestHandlers);
+
+ wrapper = shallowMountExtended(ModelVersionList, {
+ apolloProvider,
+ propsData: {
+ modelId: 2,
+ ...props,
+ },
+ stubs: {
+ RegistryList,
+ },
+ });
+ };
+
+ beforeEach(() => {
+ jest.spyOn(Sentry, 'captureException').mockImplementation();
+ });
+
+ describe('when list is loaded and has no data', () => {
+ const resolver = jest.fn().mockResolvedValue(emptyModelVersionsQuery);
+ beforeEach(async () => {
+ mountComponent({ resolver });
+ await waitForPromises();
+ });
+
+ it('shows empty state', () => {
+ expect(findEmptyState().props('entityType')).toBe(MODEL_ENTITIES.modelVersion);
+ });
+
+ it('does not display loader', () => {
+ expect(findLoader().exists()).toBe(false);
+ });
+
+ it('does not display rows', () => {
+ expect(findListRow().exists()).toBe(false);
+ });
+
+ it('does not display registry list', () => {
+ expect(findRegistryList().exists()).toBe(false);
+ });
+
+ it('does not display alert', () => {
+ expect(findAlert().exists()).toBe(false);
+ });
+ });
+
+ describe('if load fails, alert', () => {
+ beforeEach(async () => {
+ const error = new Error('Failure!');
+ mountComponent({ resolver: jest.fn().mockRejectedValue(error) });
+
+ await waitForPromises();
+ });
+
+ it('is displayed', () => {
+ expect(findAlert().exists()).toBe(true);
+ });
+
+ it('shows error message', () => {
+ expect(findAlert().text()).toContain('Failed to load model versions with error: Failure!');
+ });
+
+ it('is not dismissible', () => {
+ expect(findAlert().props('dismissible')).toBe(false);
+ });
+
+ it('is of variant danger', () => {
+ expect(findAlert().attributes('variant')).toBe('danger');
+ });
+
+ it('error is logged in sentry', () => {
+ expect(Sentry.captureException).toHaveBeenCalled();
+ });
+ });
+
+ describe('when list is loaded with data', () => {
+ beforeEach(async () => {
+ mountComponent();
+ await waitForPromises();
+ });
+
+ it('displays package registry list', () => {
+ expect(findRegistryList().exists()).toEqual(true);
+ });
+
+ it('binds the right props', () => {
+ expect(findRegistryList().props()).toMatchObject({
+ items: graphqlModelVersions,
+ pagination: {},
+ isLoading: false,
+ hiddenDelete: true,
+ });
+ });
+
+ it('displays package version rows', () => {
+ expect(findAllRows().exists()).toEqual(true);
+ expect(findAllRows()).toHaveLength(graphqlModelVersions.length);
+ });
+
+ it('binds the correct props', () => {
+ expect(findAllRows().at(0).props()).toMatchObject({
+ modelVersion: expect.objectContaining(graphqlModelVersions[0]),
+ });
+
+ expect(findAllRows().at(1).props()).toMatchObject({
+ modelVersion: expect.objectContaining(graphqlModelVersions[1]),
+ });
+ });
+
+ it('does not display loader', () => {
+ expect(findLoader().exists()).toBe(false);
+ });
+
+ it('does not display empty state', () => {
+ expect(findEmptyState().exists()).toBe(false);
+ });
+ });
+
+ describe('when user interacts with pagination', () => {
+ const resolver = jest.fn().mockResolvedValue(modelVersionsQuery());
+
+ beforeEach(async () => {
+ mountComponent({ resolver });
+ await waitForPromises();
+ });
+
+ it('when list emits next-page fetches the next set of records', async () => {
+ findRegistryList().vm.$emit('next-page');
+ await waitForPromises();
+
+ expect(resolver).toHaveBeenLastCalledWith(
+ expect.objectContaining({ after: graphqlPageInfo.endCursor, first: GRAPHQL_PAGE_SIZE }),
+ );
+ });
+
+ it('when list emits prev-page fetches the prev set of records', async () => {
+ findRegistryList().vm.$emit('prev-page');
+ await waitForPromises();
+
+ expect(resolver).toHaveBeenLastCalledWith(
+ expect.objectContaining({ before: graphqlPageInfo.startCursor, last: GRAPHQL_PAGE_SIZE }),
+ );
+ });
+ });
+});
diff --git a/spec/frontend/ml/model_registry/components/model_version_row_spec.js b/spec/frontend/ml/model_registry/components/model_version_row_spec.js
new file mode 100644
index 00000000000..9f709f2e072
--- /dev/null
+++ b/spec/frontend/ml/model_registry/components/model_version_row_spec.js
@@ -0,0 +1,37 @@
+import { GlLink, GlSprintf, GlTruncate } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import ListItem from '~/vue_shared/components/registry/list_item.vue';
+import TimeAgoTooltip from '~/vue_shared/components/time_ago_tooltip.vue';
+import ModelVersionRow from '~/ml/model_registry/components/model_version_row.vue';
+import { graphqlModelVersions } from '../graphql_mock_data';
+
+let wrapper;
+const createWrapper = (modelVersion = graphqlModelVersions[0]) => {
+ wrapper = shallowMount(ModelVersionRow, {
+ propsData: { modelVersion },
+ stubs: {
+ GlSprintf,
+ GlTruncate,
+ },
+ });
+};
+
+const findListItem = () => wrapper.findComponent(ListItem);
+const findLink = () => findListItem().findComponent(GlLink);
+const findTruncated = () => findLink().findComponent(GlTruncate);
+const findTooltip = () => findListItem().findComponent(TimeAgoTooltip);
+
+describe('ModelVersionRow', () => {
+ beforeEach(() => {
+ createWrapper();
+ });
+
+ it('Has a link to the model version', () => {
+ expect(findTruncated().props('text')).toBe(graphqlModelVersions[0].version);
+ expect(findLink().attributes('href')).toBe(graphqlModelVersions[0]._links.showPath);
+ });
+
+ it('Shows created at', () => {
+ expect(findTooltip().props('time')).toBe(graphqlModelVersions[0].createdAt);
+ });
+});
diff --git a/spec/frontend/ml/model_registry/graphql_mock_data.js b/spec/frontend/ml/model_registry/graphql_mock_data.js
new file mode 100644
index 00000000000..1c31ee4627f
--- /dev/null
+++ b/spec/frontend/ml/model_registry/graphql_mock_data.js
@@ -0,0 +1,116 @@
+import { defaultPageInfo } from './mock_data';
+
+export const graphqlPageInfo = {
+ ...defaultPageInfo,
+ __typename: 'PageInfo',
+};
+
+export const graphqlModelVersions = [
+ {
+ createdAt: '2021-08-10T09:33:54Z',
+ id: 'gid://gitlab/Ml::ModelVersion/243',
+ version: '1.0.1',
+ _links: {
+ showPath: '/path/to/modelversion/243',
+ },
+ __typename: 'MlModelVersion',
+ },
+ {
+ createdAt: '2021-08-10T09:33:54Z',
+ id: 'gid://gitlab/Ml::ModelVersion/244',
+ version: '1.0.2',
+ _links: {
+ showPath: '/path/to/modelversion/244',
+ },
+ __typename: 'MlModelVersion',
+ },
+];
+
+export const modelVersionsQuery = (versions = graphqlModelVersions) => ({
+ data: {
+ mlModel: {
+ id: 'gid://gitlab/Ml::Model/2',
+ versions: {
+ count: versions.length,
+ nodes: versions,
+ pageInfo: graphqlPageInfo,
+ __typename: 'MlModelConnection',
+ },
+ __typename: 'MlModelType',
+ },
+ },
+});
+
+export const graphqlCandidates = [
+ {
+ id: 'gid://gitlab/Ml::Candidate/1',
+ name: 'narwhal-aardvark-heron-6953',
+ createdAt: '2023-12-06T12:41:48Z',
+ _links: {
+ showPath: '/path/to/candidate/1',
+ },
+ },
+ {
+ id: 'gid://gitlab/Ml::Candidate/2',
+ name: 'anteater-chimpanzee-snake-1254',
+ createdAt: '2023-12-06T12:41:48Z',
+ _links: {
+ showPath: '/path/to/candidate/2',
+ },
+ },
+];
+
+export const modelCandidatesQuery = (candidates = graphqlCandidates) => ({
+ data: {
+ mlModel: {
+ id: 'gid://gitlab/Ml::Model/2',
+ candidates: {
+ count: candidates.length,
+ nodes: candidates,
+ pageInfo: graphqlPageInfo,
+ __typename: 'MlCandidateConnection',
+ },
+ __typename: 'MlModelType',
+ },
+ },
+});
+
+export const emptyModelVersionsQuery = {
+ data: {
+ mlModel: {
+ id: 'gid://gitlab/Ml::Model/2',
+ versions: {
+ count: 0,
+ nodes: [],
+ pageInfo: {
+ hasNextPage: false,
+ hasPreviousPage: false,
+ endCursor: 'endCursor',
+ startCursor: 'startCursor',
+ },
+ __typename: 'MlModelConnection',
+ },
+ __typename: 'MlModelType',
+ },
+ },
+};
+
+export const emptyCandidateQuery = {
+ data: {
+ mlModel: {
+ id: 'gid://gitlab/Ml::Model/2',
+ candidates: {
+ count: 0,
+ nodes: [],
+ pageInfo: {
+ hasNextPage: false,
+ hasPreviousPage: false,
+ endCursor: 'endCursor',
+ startCursor: 'startCursor',
+ },
+ __typename: 'MlCandidateConnection',
+ },
+ __typename: 'MlModelType',
+ },
+ },
+};
diff --git a/spec/frontend/ml/model_registry/mock_data.js b/spec/frontend/ml/model_registry/mock_data.js
index a820c323103..4399df38990 100644
--- a/spec/frontend/ml/model_registry/mock_data.js
+++ b/spec/frontend/ml/model_registry/mock_data.js
@@ -1,3 +1,45 @@
+export const newCandidate = () => ({
+ params: [
+ { name: 'Algorithm', value: 'Decision Tree' },
+ { name: 'MaxDepth', value: '3' },
+ ],
+ metrics: [
+ { name: 'AUC', value: '.55', step: 0 },
+ { name: 'Accuracy', value: '.99', step: 1 },
+ { name: 'Accuracy', value: '.98', step: 2 },
+ { name: 'Accuracy', value: '.97', step: 3 },
+ { name: 'F1', value: '.1', step: 3 },
+ ],
+ metadata: [
+ { name: 'FileName', value: 'test.py' },
+ { name: 'ExecutionTime', value: '.0856' },
+ ],
+ info: {
+ iid: 'candidate_iid',
+ eid: 'abcdefg',
+ pathToArtifact: 'path_to_artifact',
+ experimentName: 'The Experiment',
+ pathToExperiment: 'path/to/experiment',
+ status: 'SUCCESS',
+ path: 'path_to_candidate',
+ ciJob: {
+ name: 'test',
+ path: 'path/to/job',
+ mergeRequest: {
+ path: 'path/to/mr',
+ iid: 1,
+ title: 'Some MR',
+ },
+ user: {
+ path: 'path/to/ci/user',
+ name: 'CI User',
+ username: 'ciuser',
+ avatar: '/img.png',
+ },
+ },
+ },
+});
+
const LATEST_VERSION = {
version: '1.2.3',
};
@@ -14,7 +56,21 @@ export const makeModel = ({ latestVersion } = { latestVersion: LATEST_VERSION })
export const MODEL = makeModel();
-export const MODEL_VERSION = { version: '1.2.3', model: MODEL };
+export const makeModelVersion = ({
+ version = '1.2.3',
+ model = MODEL,
+ packageId = 12,
+ description = 'Model version description',
+} = {}) => ({
+ version,
+ model,
+ packageId,
+ description,
+ projectPath: 'path/to/project',
+ candidate: newCandidate(),
+});
+
+export const MODEL_VERSION = makeModelVersion();
export const mockModels = [
{
diff --git a/spec/frontend/ml/model_registry/routes/models/index/components/mock_data.js b/spec/frontend/ml/model_registry/routes/models/index/components/mock_data.js
deleted file mode 100644
index 841a543606f..00000000000
--- a/spec/frontend/ml/model_registry/routes/models/index/components/mock_data.js
+++ /dev/null
@@ -1,29 +0,0 @@
-export const mockModels = [
- {
- name: 'model_1',
- version: '1.0',
- path: 'path/to/model_1',
- versionCount: 3,
- },
- {
- name: 'model_2',
- version: '1.1',
- path: 'path/to/model_2',
- versionCount: 1,
- },
-];
-
-export const modelWithoutVersion = {
- name: 'model_without_version',
- path: 'path/to/model_without_version',
- versionCount: 0,
-};
-
-export const startCursor = 'eyJpZCI6IjE2In0';
-
-export const defaultPageInfo = Object.freeze({
- startCursor,
- endCursor: 'eyJpZCI6IjIifQ',
- hasNextPage: true,
- hasPreviousPage: true,
-});
diff --git a/spec/frontend/nav/components/new_nav_toggle_spec.js b/spec/frontend/nav/components/new_nav_toggle_spec.js
deleted file mode 100644
index cf8e59d6522..00000000000
--- a/spec/frontend/nav/components/new_nav_toggle_spec.js
+++ /dev/null
@@ -1,214 +0,0 @@
-import { mount, createWrapper } from '@vue/test-utils';
-import MockAdapter from 'axios-mock-adapter';
-import { getByText as getByTextHelper } from '@testing-library/dom';
-import { GlDisclosureDropdownItem, GlToggle } from '@gitlab/ui';
-import axios from '~/lib/utils/axios_utils';
-import { HTTP_STATUS_INTERNAL_SERVER_ERROR, HTTP_STATUS_OK } from '~/lib/utils/http_status';
-import { useMockLocationHelper } from 'helpers/mock_window_location_helper';
-import NewNavToggle from '~/nav/components/new_nav_toggle.vue';
-import waitForPromises from 'helpers/wait_for_promises';
-import { createAlert } from '~/alert';
-import { s__ } from '~/locale';
-import { mockTracking } from 'helpers/tracking_helper';
-
-jest.mock('~/alert');
-
-const TEST_ENDPONT = 'https://example.com/toggle';
-
-describe('NewNavToggle', () => {
- useMockLocationHelper();
-
- let wrapper;
- let trackingSpy;
-
- const findToggle = () => wrapper.findComponent(GlToggle);
- const findDisclosureItem = () => wrapper.findComponent(GlDisclosureDropdownItem);
-
- const createComponent = (propsData = { enabled: false }) => {
- wrapper = mount(NewNavToggle, {
- propsData: {
- endpoint: TEST_ENDPONT,
- ...propsData,
- },
- });
-
- trackingSpy = mockTracking(undefined, wrapper.element, jest.spyOn);
- };
-
- const getByText = (text, options) =>
- createWrapper(getByTextHelper(wrapper.element, text, options));
-
- describe('When rendered in scope of the new navigation', () => {
- it('renders the disclosure item', () => {
- createComponent({ newNavigation: true, enabled: true });
- expect(findDisclosureItem().exists()).toBe(true);
- });
-
- describe('when user preference is enabled', () => {
- beforeEach(() => {
- createComponent({ newNavigation: true, enabled: true });
- });
-
- it('renders the toggle as enabled', () => {
- expect(findToggle().props('value')).toBe(true);
- });
- });
-
- describe('when user preference is disabled', () => {
- beforeEach(() => {
- createComponent({ enabled: false });
- });
-
- it('renders the toggle as disabled', () => {
- expect(findToggle().props('value')).toBe(false);
- });
- });
-
- describe.each`
- desc | actFn | toggleValue | trackingLabel | trackingProperty
- ${'when toggle button is clicked'} | ${() => findToggle().trigger('click')} | ${false} | ${'enable_new_nav_beta'} | ${'navigation_top'}
- ${'when menu item text is clicked'} | ${() => getByText('New navigation').trigger('click')} | ${false} | ${'enable_new_nav_beta'} | ${'navigation_top'}
- ${'when toggle button is clicked'} | ${() => findToggle().trigger('click')} | ${true} | ${'disable_new_nav_beta'} | ${'nav_user_menu'}
- ${'when menu item text is clicked'} | ${() => getByText('New navigation').trigger('click')} | ${true} | ${'disable_new_nav_beta'} | ${'nav_user_menu'}
- `('$desc', ({ actFn, toggleValue, trackingLabel, trackingProperty }) => {
- let mock;
-
- beforeEach(() => {
- mock = new MockAdapter(axios);
- createComponent({ enabled: toggleValue });
- });
-
- it('reloads the page on success', async () => {
- mock.onPut(TEST_ENDPONT).reply(HTTP_STATUS_OK);
-
- actFn();
- await waitForPromises();
-
- expect(window.location.reload).toHaveBeenCalled();
- });
-
- it('shows an alert on error', async () => {
- mock.onPut(TEST_ENDPONT).reply(HTTP_STATUS_INTERNAL_SERVER_ERROR);
-
- actFn();
- await waitForPromises();
-
- expect(createAlert).toHaveBeenCalledWith(
- expect.objectContaining({
- message: s__(
- 'NorthstarNavigation|Could not update the new navigation preference. Please try again later.',
- ),
- }),
- );
- expect(window.location.reload).not.toHaveBeenCalled();
- });
-
- it('changes the toggle', async () => {
- await actFn();
-
- expect(findToggle().props('value')).toBe(!toggleValue);
- });
-
- it('tracks the Snowplow event', async () => {
- mock.onPut(TEST_ENDPONT).reply(HTTP_STATUS_OK);
- await actFn();
- await waitForPromises();
- expect(trackingSpy).toHaveBeenCalledWith(undefined, 'click_toggle', {
- label: trackingLabel,
- property: trackingProperty,
- });
- });
-
- afterEach(() => {
- mock.restore();
- });
- });
- });
-
- describe('When rendered in scope of the current navigation', () => {
- it('renders its title', () => {
- createComponent();
- expect(getByText('Navigation redesign').exists()).toBe(true);
- });
-
- describe('when user preference is enabled', () => {
- beforeEach(() => {
- createComponent({ enabled: true });
- });
-
- it('renders the toggle as enabled', () => {
- expect(findToggle().props('value')).toBe(true);
- });
- });
-
- describe('when user preference is disabled', () => {
- beforeEach(() => {
- createComponent({ enabled: false });
- });
-
- it('renders the toggle as disabled', () => {
- expect(findToggle().props('value')).toBe(false);
- });
- });
-
- describe.each`
- desc | actFn | toggleValue | trackingLabel | trackingProperty
- ${'when toggle button is clicked'} | ${() => findToggle().trigger('click')} | ${false} | ${'enable_new_nav_beta'} | ${'navigation_top'}
- ${'when menu item text is clicked'} | ${() => getByText('New navigation').trigger('click')} | ${false} | ${'enable_new_nav_beta'} | ${'navigation_top'}
- ${'when toggle button is clicked'} | ${() => findToggle().trigger('click')} | ${true} | ${'disable_new_nav_beta'} | ${'nav_user_menu'}
- ${'when menu item text is clicked'} | ${() => getByText('New navigation').trigger('click')} | ${true} | ${'disable_new_nav_beta'} | ${'nav_user_menu'}
- `('$desc', ({ actFn, toggleValue, trackingLabel, trackingProperty }) => {
- let mock;
-
- beforeEach(() => {
- mock = new MockAdapter(axios);
- createComponent({ enabled: toggleValue });
- });
-
- it('reloads the page on success', async () => {
- mock.onPut(TEST_ENDPONT).reply(HTTP_STATUS_OK);
-
- actFn();
- await waitForPromises();
-
- expect(window.location.reload).toHaveBeenCalled();
- });
-
- it('shows an alert on error', async () => {
- mock.onPut(TEST_ENDPONT).reply(HTTP_STATUS_INTERNAL_SERVER_ERROR);
-
- actFn();
- await waitForPromises();
-
- expect(createAlert).toHaveBeenCalledWith(
- expect.objectContaining({
- message: s__(
- 'NorthstarNavigation|Could not update the new navigation preference. Please try again later.',
- ),
- }),
- );
- expect(window.location.reload).not.toHaveBeenCalled();
- });
-
- it('changes the toggle', async () => {
- await actFn();
-
- expect(findToggle().props('value')).toBe(!toggleValue);
- });
-
- it('tracks the Snowplow event', async () => {
- mock.onPut(TEST_ENDPONT).reply(HTTP_STATUS_OK);
- await actFn();
- await waitForPromises();
- expect(trackingSpy).toHaveBeenCalledWith(undefined, 'click_toggle', {
- label: trackingLabel,
- property: trackingProperty,
- });
- });
-
- afterEach(() => {
- mock.restore();
- });
- });
- });
-});
diff --git a/spec/frontend/nav/components/responsive_app_spec.js b/spec/frontend/nav/components/responsive_app_spec.js
deleted file mode 100644
index 9d3b43520ec..00000000000
--- a/spec/frontend/nav/components/responsive_app_spec.js
+++ /dev/null
@@ -1,122 +0,0 @@
-import { shallowMount } from '@vue/test-utils';
-import { nextTick } from 'vue';
-import ResponsiveApp from '~/nav/components/responsive_app.vue';
-import ResponsiveHeader from '~/nav/components/responsive_header.vue';
-import ResponsiveHome from '~/nav/components/responsive_home.vue';
-import TopNavContainerView from '~/nav/components/top_nav_container_view.vue';
-import { resetMenuItemsActive } from '~/nav/utils/reset_menu_items_active';
-import KeepAliveSlots from '~/vue_shared/components/keep_alive_slots.vue';
-import { TEST_NAV_DATA } from '../mock_data';
-
-describe('~/nav/components/responsive_app.vue', () => {
- let wrapper;
-
- const createComponent = () => {
- wrapper = shallowMount(ResponsiveApp, {
- propsData: {
- navData: TEST_NAV_DATA,
- },
- stubs: {
- KeepAliveSlots,
- },
- });
- };
- const findHome = () => wrapper.findComponent(ResponsiveHome);
- const findMobileOverlay = () => wrapper.find('[data-testid="mobile-overlay"]');
- const findSubviewHeader = () => wrapper.findComponent(ResponsiveHeader);
- const findSubviewContainer = () => wrapper.findComponent(TopNavContainerView);
- const hasMobileOverlayVisible = () => findMobileOverlay().classes('mobile-nav-open');
-
- beforeEach(() => {
- document.body.innerHTML = '';
- // Add test class to reset state + assert that we're adding classes correctly
- document.body.className = 'test-class';
- });
-
- describe('default', () => {
- beforeEach(() => {
- createComponent();
- });
-
- it('shows home by default', () => {
- expect(findHome().isVisible()).toBe(true);
- expect(findHome().props()).toEqual({
- navData: resetMenuItemsActive(TEST_NAV_DATA),
- });
- });
-
- it.each`
- events | expectation
- ${[]} | ${false}
- ${['bv::dropdown::show']} | ${true}
- ${['bv::dropdown::show', 'bv::dropdown::hide']} | ${false}
- `(
- 'with root events $events, movile overlay visible = $expectation',
- async ({ events, expectation }) => {
- // `await...reduce(async` is like doing an `forEach(async (...))` excpet it works
- await events.reduce(async (acc, evt) => {
- await acc;
-
- wrapper.vm.$root.$emit(evt);
-
- await nextTick();
- }, Promise.resolve());
-
- expect(hasMobileOverlayVisible()).toBe(expectation);
- },
- );
- });
-
- const projectsContainerProps = {
- containerClass: 'gl-px-3',
- frequentItemsDropdownType: ResponsiveApp.FREQUENT_ITEMS_PROJECTS.namespace,
- frequentItemsVuexModule: ResponsiveApp.FREQUENT_ITEMS_PROJECTS.vuexModule,
- currentItem: {},
- linksPrimary: TEST_NAV_DATA.views.projects.linksPrimary,
- linksSecondary: TEST_NAV_DATA.views.projects.linksSecondary,
- };
- const groupsContainerProps = {
- containerClass: 'gl-px-3',
- frequentItemsDropdownType: ResponsiveApp.FREQUENT_ITEMS_GROUPS.namespace,
- frequentItemsVuexModule: ResponsiveApp.FREQUENT_ITEMS_GROUPS.vuexModule,
- currentItem: {},
- linksPrimary: TEST_NAV_DATA.views.groups.linksPrimary,
- linksSecondary: TEST_NAV_DATA.views.groups.linksSecondary,
- };
-
- describe.each`
- view | header | containerProps
- ${'projects'} | ${'Projects'} | ${projectsContainerProps}
- ${'groups'} | ${'Groups'} | ${groupsContainerProps}
- `('when menu item with $view is clicked', ({ view, header, containerProps }) => {
- beforeEach(async () => {
- createComponent();
-
- findHome().vm.$emit('menu-item-click', { view });
-
- await nextTick();
- });
-
- it('shows header', () => {
- expect(findSubviewHeader().text()).toBe(header);
- });
-
- it('shows container subview', () => {
- expect(findSubviewContainer().props()).toEqual(containerProps);
- });
-
- it('hides home', () => {
- expect(findHome().isVisible()).toBe(false);
- });
-
- describe('when header back button is clicked', () => {
- beforeEach(() => {
- findSubviewHeader().vm.$emit('menu-item-click', { view: 'home' });
- });
-
- it('shows home', () => {
- expect(findHome().isVisible()).toBe(true);
- });
- });
- });
-});
diff --git a/spec/frontend/nav/components/responsive_header_spec.js b/spec/frontend/nav/components/responsive_header_spec.js
deleted file mode 100644
index 2514035270a..00000000000
--- a/spec/frontend/nav/components/responsive_header_spec.js
+++ /dev/null
@@ -1,63 +0,0 @@
-import { shallowMount } from '@vue/test-utils';
-import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
-import ResponsiveHeader from '~/nav/components/responsive_header.vue';
-import TopNavMenuItem from '~/nav/components/top_nav_menu_item.vue';
-
-const TEST_SLOT_CONTENT = 'Test slot content';
-
-describe('~/nav/components/top_nav_menu_sections.vue', () => {
- let wrapper;
-
- const createComponent = () => {
- wrapper = shallowMount(ResponsiveHeader, {
- slots: {
- default: TEST_SLOT_CONTENT,
- },
- directives: {
- GlTooltip: createMockDirective('gl-tooltip'),
- },
- });
- };
-
- const findMenuItem = () => wrapper.findComponent(TopNavMenuItem);
-
- beforeEach(() => {
- createComponent();
- });
-
- it('renders slot', () => {
- expect(wrapper.text()).toBe(TEST_SLOT_CONTENT);
- });
-
- it('renders back button', () => {
- const button = findMenuItem();
-
- const tooltip = getBinding(button.element, 'gl-tooltip').value.title;
-
- expect(tooltip).toBe('Go back');
- expect(button.props()).toEqual({
- menuItem: {
- id: 'home',
- view: 'home',
- icon: 'chevron-lg-left',
- },
- iconOnly: true,
- });
- });
-
- it('emits nothing', () => {
- expect(wrapper.emitted()).toEqual({});
- });
-
- describe('when back button is clicked', () => {
- beforeEach(() => {
- findMenuItem().vm.$emit('click');
- });
-
- it('emits menu-item-click', () => {
- expect(wrapper.emitted()).toEqual({
- 'menu-item-click': [[{ id: 'home', view: 'home', icon: 'chevron-lg-left' }]],
- });
- });
- });
-});
diff --git a/spec/frontend/nav/components/responsive_home_spec.js b/spec/frontend/nav/components/responsive_home_spec.js
deleted file mode 100644
index 5a5cfc93607..00000000000
--- a/spec/frontend/nav/components/responsive_home_spec.js
+++ /dev/null
@@ -1,133 +0,0 @@
-import { shallowMount } from '@vue/test-utils';
-import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
-import ResponsiveHome from '~/nav/components/responsive_home.vue';
-import TopNavMenuItem from '~/nav/components/top_nav_menu_item.vue';
-import TopNavMenuSections from '~/nav/components/top_nav_menu_sections.vue';
-import TopNavNewDropdown from '~/nav/components/top_nav_new_dropdown.vue';
-import { TEST_NAV_DATA } from '../mock_data';
-
-const TEST_SEARCH_MENU_ITEM = {
- id: 'search',
- title: 'search',
- icon: 'search',
- href: '/search',
-};
-
-const TEST_NEW_DROPDOWN_VIEW_MODEL = {
- title: 'new',
- menu_sections: [],
-};
-
-describe('~/nav/components/responsive_home.vue', () => {
- let wrapper;
- let menuItemClickListener;
-
- const createComponent = (props = {}) => {
- wrapper = shallowMount(ResponsiveHome, {
- propsData: {
- navData: TEST_NAV_DATA,
- ...props,
- },
- directives: {
- GlTooltip: createMockDirective('gl-tooltip'),
- },
- listeners: {
- 'menu-item-click': menuItemClickListener,
- },
- });
- };
-
- const findSearchMenuItem = () => wrapper.findComponent(TopNavMenuItem);
- const findNewDropdown = () => wrapper.findComponent(TopNavNewDropdown);
- const findMenuSections = () => wrapper.findComponent(TopNavMenuSections);
-
- beforeEach(() => {
- menuItemClickListener = jest.fn();
- });
-
- describe('default', () => {
- beforeEach(() => {
- createComponent();
- });
-
- it.each`
- desc | fn
- ${'does not show search menu item'} | ${findSearchMenuItem}
- ${'does not show new dropdown'} | ${findNewDropdown}
- `('$desc', ({ fn }) => {
- expect(fn().exists()).toBe(false);
- });
-
- it('shows menu sections', () => {
- expect(findMenuSections().props('sections')).toEqual([
- { id: 'primary', menuItems: TEST_NAV_DATA.primary },
- { id: 'secondary', menuItems: TEST_NAV_DATA.secondary },
- ]);
- });
-
- it('emits when menu sections emits', () => {
- expect(menuItemClickListener).not.toHaveBeenCalled();
-
- findMenuSections().vm.$emit('menu-item-click', TEST_NAV_DATA.primary[0]);
-
- expect(menuItemClickListener).toHaveBeenCalledWith(TEST_NAV_DATA.primary[0]);
- });
- });
-
- describe('without secondary', () => {
- beforeEach(() => {
- createComponent({ navData: { ...TEST_NAV_DATA, secondary: null } });
- });
-
- it('shows menu sections', () => {
- expect(findMenuSections().props('sections')).toEqual([
- { id: 'primary', menuItems: TEST_NAV_DATA.primary },
- ]);
- });
- });
-
- describe('with search view', () => {
- beforeEach(() => {
- createComponent({
- navData: {
- ...TEST_NAV_DATA,
- views: { search: TEST_SEARCH_MENU_ITEM },
- },
- });
- });
-
- it('shows search menu item', () => {
- expect(findSearchMenuItem().props()).toEqual({
- menuItem: TEST_SEARCH_MENU_ITEM,
- iconOnly: true,
- });
- });
-
- it('shows tooltip for search', () => {
- const tooltip = getBinding(findSearchMenuItem().element, 'gl-tooltip');
- expect(tooltip.value).toEqual({ title: TEST_SEARCH_MENU_ITEM.title });
- });
- });
-
- describe('with new view', () => {
- beforeEach(() => {
- createComponent({
- navData: {
- ...TEST_NAV_DATA,
- views: { new: TEST_NEW_DROPDOWN_VIEW_MODEL },
- },
- });
- });
-
- it('shows new dropdown', () => {
- expect(findNewDropdown().props()).toEqual({
- viewModel: TEST_NEW_DROPDOWN_VIEW_MODEL,
- });
- });
-
- it('shows tooltip for new dropdown', () => {
- const tooltip = getBinding(findNewDropdown().element, 'gl-tooltip');
- expect(tooltip.value).toEqual({ title: TEST_NEW_DROPDOWN_VIEW_MODEL.title });
- });
- });
-});
diff --git a/spec/frontend/nav/components/top_nav_app_spec.js b/spec/frontend/nav/components/top_nav_app_spec.js
deleted file mode 100644
index 7f39552eb42..00000000000
--- a/spec/frontend/nav/components/top_nav_app_spec.js
+++ /dev/null
@@ -1,68 +0,0 @@
-import { GlNavItemDropdown } from '@gitlab/ui';
-import { mount, shallowMount } from '@vue/test-utils';
-import { mockTracking } from 'helpers/tracking_helper';
-import TopNavApp from '~/nav/components/top_nav_app.vue';
-import TopNavDropdownMenu from '~/nav/components/top_nav_dropdown_menu.vue';
-import { TEST_NAV_DATA } from '../mock_data';
-
-describe('~/nav/components/top_nav_app.vue', () => {
- let wrapper;
-
- const createComponent = () => {
- wrapper = mount(TopNavApp, {
- propsData: {
- navData: TEST_NAV_DATA,
- },
- });
- };
-
- const createComponentShallow = () => {
- wrapper = shallowMount(TopNavApp, {
- propsData: {
- navData: TEST_NAV_DATA,
- },
- });
- };
-
- const findNavItemDropdown = () => wrapper.findComponent(GlNavItemDropdown);
- const findNavItemDropdowToggle = () => findNavItemDropdown().find('.js-top-nav-dropdown-toggle');
- const findMenu = () => wrapper.findComponent(TopNavDropdownMenu);
-
- describe('default', () => {
- beforeEach(() => {
- createComponentShallow();
- });
-
- it('renders nav item dropdown', () => {
- expect(findNavItemDropdown().attributes('href')).toBeUndefined();
- expect(findNavItemDropdown().attributes()).toMatchObject({
- icon: '',
- text: '',
- 'no-flip': '',
- 'no-caret': '',
- });
- });
-
- it('renders top nav dropdown menu', () => {
- expect(findMenu().props()).toStrictEqual({
- primary: TEST_NAV_DATA.primary,
- secondary: TEST_NAV_DATA.secondary,
- views: TEST_NAV_DATA.views,
- });
- });
- });
-
- describe('tracking', () => {
- it('emits a tracking event when the toggle is clicked', () => {
- const trackingSpy = mockTracking(undefined, wrapper.element, jest.spyOn);
- createComponent();
-
- findNavItemDropdowToggle().trigger('click');
-
- expect(trackingSpy).toHaveBeenCalledWith(undefined, 'click_nav', {
- label: 'hamburger_menu',
- property: 'navigation_top',
- });
- });
- });
-});
diff --git a/spec/frontend/nav/components/top_nav_container_view_spec.js b/spec/frontend/nav/components/top_nav_container_view_spec.js
deleted file mode 100644
index 388ac243648..00000000000
--- a/spec/frontend/nav/components/top_nav_container_view_spec.js
+++ /dev/null
@@ -1,120 +0,0 @@
-import { shallowMount } from '@vue/test-utils';
-import { merge } from 'lodash';
-import { nextTick } from 'vue';
-import FrequentItemsApp from '~/frequent_items/components/app.vue';
-import { FREQUENT_ITEMS_PROJECTS } from '~/frequent_items/constants';
-import eventHub from '~/frequent_items/event_hub';
-import TopNavContainerView from '~/nav/components/top_nav_container_view.vue';
-import TopNavMenuSections from '~/nav/components/top_nav_menu_sections.vue';
-import VuexModuleProvider from '~/vue_shared/components/vuex_module_provider.vue';
-import { TEST_NAV_DATA } from '../mock_data';
-
-const DEFAULT_PROPS = {
- frequentItemsDropdownType: FREQUENT_ITEMS_PROJECTS.namespace,
- frequentItemsVuexModule: FREQUENT_ITEMS_PROJECTS.vuexModule,
- linksPrimary: TEST_NAV_DATA.primary,
- linksSecondary: TEST_NAV_DATA.secondary,
- containerClass: 'test-frequent-items-container-class',
-};
-const TEST_OTHER_PROPS = {
- namespace: 'projects',
- currentUserName: 'test-user',
- currentItem: { id: 'test' },
-};
-
-describe('~/nav/components/top_nav_container_view.vue', () => {
- let wrapper;
-
- const createComponent = (props = {}, options = {}) => {
- wrapper = shallowMount(TopNavContainerView, {
- propsData: {
- ...DEFAULT_PROPS,
- ...TEST_OTHER_PROPS,
- ...props,
- },
- ...options,
- });
- };
-
- const findMenuSections = () => wrapper.findComponent(TopNavMenuSections);
- const findFrequentItemsApp = () => {
- const parent = wrapper.findComponent(VuexModuleProvider);
-
- return {
- vuexModule: parent.props('vuexModule'),
- props: parent.findComponent(FrequentItemsApp).props(),
- attributes: parent.findComponent(FrequentItemsApp).attributes(),
- };
- };
- const findFrequentItemsContainer = () => wrapper.find('[data-testid="frequent-items-container"]');
-
- it.each(['projects', 'groups'])(
- 'emits frequent items event to event hub (%s)',
- async (frequentItemsDropdownType) => {
- const listener = jest.fn();
- eventHub.$on(`${frequentItemsDropdownType}-dropdownOpen`, listener);
- createComponent({ frequentItemsDropdownType });
-
- expect(listener).not.toHaveBeenCalled();
-
- await nextTick();
-
- expect(listener).toHaveBeenCalled();
- },
- );
-
- describe('default', () => {
- const EXTRA_ATTRS = { 'data-test-attribute': 'foo' };
-
- beforeEach(() => {
- createComponent({}, { attrs: EXTRA_ATTRS });
- });
-
- it('does not inherit extra attrs', () => {
- expect(wrapper.attributes()).toEqual({
- class: expect.any(String),
- });
- });
-
- it('renders frequent items app', () => {
- expect(findFrequentItemsApp()).toEqual({
- vuexModule: DEFAULT_PROPS.frequentItemsVuexModule,
- props: expect.objectContaining(
- merge({ currentItem: { lastAccessedOn: Date.now() } }, TEST_OTHER_PROPS),
- ),
- attributes: expect.objectContaining(EXTRA_ATTRS),
- });
- });
-
- it('renders given container class', () => {
- expect(findFrequentItemsContainer().classes(DEFAULT_PROPS.containerClass)).toBe(true);
- });
-
- it('renders menu sections', () => {
- const sections = [
- { id: 'primary', menuItems: TEST_NAV_DATA.primary },
- { id: 'secondary', menuItems: TEST_NAV_DATA.secondary },
- ];
-
- expect(findMenuSections().props()).toEqual({
- sections,
- withTopBorder: true,
- isPrimarySection: false,
- });
- });
- });
-
- describe('without secondary links', () => {
- beforeEach(() => {
- createComponent({
- linksSecondary: [],
- });
- });
-
- it('renders one menu item group', () => {
- expect(findMenuSections().props('sections')).toEqual([
- { id: 'primary', menuItems: TEST_NAV_DATA.primary },
- ]);
- });
- });
-});
diff --git a/spec/frontend/nav/components/top_nav_dropdown_menu_spec.js b/spec/frontend/nav/components/top_nav_dropdown_menu_spec.js
deleted file mode 100644
index 1d516240306..00000000000
--- a/spec/frontend/nav/components/top_nav_dropdown_menu_spec.js
+++ /dev/null
@@ -1,146 +0,0 @@
-import { shallowMount, mount } from '@vue/test-utils';
-import { nextTick } from 'vue';
-import TopNavDropdownMenu from '~/nav/components/top_nav_dropdown_menu.vue';
-import TopNavMenuItem from '~/nav/components/top_nav_menu_item.vue';
-import TopNavMenuSections from '~/nav/components/top_nav_menu_sections.vue';
-import KeepAliveSlots from '~/vue_shared/components/keep_alive_slots.vue';
-import { TEST_NAV_DATA } from '../mock_data';
-import { stubComponent } from '../../__helpers__/stub_component';
-
-describe('~/nav/components/top_nav_dropdown_menu.vue', () => {
- let wrapper;
-
- const createComponent = (props = {}, mountFn = shallowMount) => {
- wrapper = mountFn(TopNavDropdownMenu, {
- propsData: {
- primary: TEST_NAV_DATA.primary,
- secondary: TEST_NAV_DATA.secondary,
- views: TEST_NAV_DATA.views,
- ...props,
- },
- stubs: {
- // Stub the keep-alive-slots so we don't render frequent items which uses a store
- KeepAliveSlots: stubComponent(KeepAliveSlots),
- },
- });
- };
-
- const findMenuItems = () => wrapper.findAllComponents(TopNavMenuItem);
- const findMenuSections = () => wrapper.findComponent(TopNavMenuSections);
- const findMenuSidebar = () => wrapper.find('[data-testid="menu-sidebar"]');
- const findMenuSubview = () => wrapper.findComponent(KeepAliveSlots);
- const hasFullWidthMenuSidebar = () => findMenuSidebar().classes('gl-w-full');
-
- const withActiveIndex = (menuItems, activeIndex) =>
- menuItems.map((x, idx) => ({
- ...x,
- active: idx === activeIndex,
- }));
-
- beforeEach(() => {
- jest.spyOn(console, 'error').mockImplementation();
- });
-
- describe('default', () => {
- beforeEach(() => {
- createComponent();
- });
-
- it('renders menu sections', () => {
- expect(findMenuSections().props()).toEqual({
- sections: [
- { id: 'primary', menuItems: TEST_NAV_DATA.primary },
- { id: 'secondary', menuItems: TEST_NAV_DATA.secondary },
- ],
- withTopBorder: false,
- isPrimarySection: true,
- });
- });
-
- it('has full width menu sidebar', () => {
- expect(hasFullWidthMenuSidebar()).toBe(true);
- });
-
- it('renders hidden subview with no slot key', () => {
- const subview = findMenuSubview();
-
- expect(subview.isVisible()).toBe(false);
- expect(subview.props()).toEqual({ slotKey: '' });
- });
- });
-
- describe('with pre-initialized active view', () => {
- beforeEach(() => {
- // We opt for a small integration test, to make sure the event is handled correctly
- // as it would in prod.
- createComponent(
- {
- primary: withActiveIndex(TEST_NAV_DATA.primary, 1),
- },
- mount,
- );
- });
-
- it('renders menu sections', () => {
- expect(findMenuSections().props('sections')).toStrictEqual([
- { id: 'primary', menuItems: withActiveIndex(TEST_NAV_DATA.primary, 1) },
- { id: 'secondary', menuItems: TEST_NAV_DATA.secondary },
- ]);
- });
-
- it('does not have full width menu sidebar', () => {
- expect(hasFullWidthMenuSidebar()).toBe(false);
- });
-
- it('renders visible subview with slot key', () => {
- const subview = findMenuSubview();
-
- expect(subview.isVisible()).toBe(true);
- expect(subview.props('slotKey')).toBe(TEST_NAV_DATA.primary[1].view);
- });
-
- it('does not change view if non-view menu item is clicked', async () => {
- const secondaryLink = findMenuItems().at(TEST_NAV_DATA.primary.length);
-
- // Ensure this doesn't have a view
- expect(secondaryLink.props('menuItem').view).toBeUndefined();
-
- secondaryLink.vm.$emit('click');
-
- await nextTick();
-
- expect(findMenuSubview().props('slotKey')).toBe(TEST_NAV_DATA.primary[1].view);
- });
-
- describe('when menu item is clicked', () => {
- let primaryLink;
-
- beforeEach(async () => {
- primaryLink = findMenuItems().at(0);
- primaryLink.vm.$emit('click');
- await nextTick();
- });
-
- it('clicked on link with view', () => {
- expect(primaryLink.props('menuItem').view).toBe(TEST_NAV_DATA.views.projects.namespace);
- });
-
- it('changes active view', () => {
- expect(findMenuSubview().props('slotKey')).toBe(TEST_NAV_DATA.primary[0].view);
- });
-
- it('changes active status on menu item', () => {
- expect(findMenuSections().props('sections')).toStrictEqual([
- {
- id: 'primary',
- menuItems: withActiveIndex(TEST_NAV_DATA.primary, 0),
- },
- {
- id: 'secondary',
- menuItems: withActiveIndex(TEST_NAV_DATA.secondary, -1),
- },
- ]);
- });
- });
- });
-});
diff --git a/spec/frontend/nav/components/top_nav_menu_item_spec.js b/spec/frontend/nav/components/top_nav_menu_item_spec.js
deleted file mode 100644
index b9cf39b8c1d..00000000000
--- a/spec/frontend/nav/components/top_nav_menu_item_spec.js
+++ /dev/null
@@ -1,145 +0,0 @@
-import { GlButton, GlIcon } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
-import TopNavMenuItem from '~/nav/components/top_nav_menu_item.vue';
-
-const TEST_MENU_ITEM = {
- title: 'Cheeseburger',
- icon: 'search',
- href: '/pretty/good/burger',
- view: 'burger-view',
- data: { qa_selector: 'not-a-real-selector', method: 'post', testFoo: 'test' },
-};
-
-describe('~/nav/components/top_nav_menu_item.vue', () => {
- let listener;
- let wrapper;
-
- const createComponent = (props = {}) => {
- wrapper = shallowMount(TopNavMenuItem, {
- propsData: {
- menuItem: TEST_MENU_ITEM,
- ...props,
- },
- listeners: {
- click: listener,
- },
- });
- };
-
- const findButton = () => wrapper.findComponent(GlButton);
- const findButtonIcons = () =>
- findButton()
- .findAllComponents(GlIcon)
- .wrappers.map((x) => ({
- name: x.props('name'),
- classes: x.classes(),
- }));
-
- beforeEach(() => {
- listener = jest.fn();
- });
-
- describe('default', () => {
- beforeEach(() => {
- createComponent();
- });
-
- it('renders button href and text', () => {
- const button = findButton();
-
- expect(button.attributes('href')).toBe(TEST_MENU_ITEM.href);
- expect(button.text()).toBe(TEST_MENU_ITEM.title);
- });
-
- it('renders button data attributes', () => {
- const button = findButton();
-
- expect(button.attributes()).toMatchObject({
- 'data-qa-selector': TEST_MENU_ITEM.data.qa_selector,
- 'data-method': TEST_MENU_ITEM.data.method,
- 'data-test-foo': TEST_MENU_ITEM.data.testFoo,
- });
- });
-
- it('passes listeners to button', () => {
- expect(listener).not.toHaveBeenCalled();
-
- findButton().vm.$emit('click', 'TEST');
-
- expect(listener).toHaveBeenCalledWith('TEST');
- });
-
- it('renders expected icons', () => {
- expect(findButtonIcons()).toEqual([
- {
- name: TEST_MENU_ITEM.icon,
- classes: ['gl-mr-3!'],
- },
- {
- name: 'chevron-right',
- classes: ['gl-ml-auto'],
- },
- ]);
- });
- });
-
- describe('with icon-only', () => {
- beforeEach(() => {
- createComponent({ iconOnly: true });
- });
-
- it('does not render title or view icon', () => {
- expect(wrapper.text()).toBe('');
- });
-
- it('only renders menuItem icon', () => {
- expect(findButtonIcons()).toEqual([
- {
- name: TEST_MENU_ITEM.icon,
- classes: [],
- },
- ]);
- });
- });
-
- describe.each`
- desc | menuItem | expectedIcons
- ${'with no icon'} | ${{ ...TEST_MENU_ITEM, icon: null }} | ${['chevron-right']}
- ${'with no view'} | ${{ ...TEST_MENU_ITEM, view: null }} | ${[TEST_MENU_ITEM.icon]}
- ${'with no icon or view'} | ${{ ...TEST_MENU_ITEM, view: null, icon: null }} | ${[]}
- `('$desc', ({ menuItem, expectedIcons }) => {
- beforeEach(() => {
- createComponent({ menuItem });
- });
-
- it(`renders expected icons ${JSON.stringify(expectedIcons)}`, () => {
- expect(findButtonIcons().map((x) => x.name)).toEqual(expectedIcons);
- });
- });
-
- describe.each`
- desc | active | cssClass | expectedClasses
- ${'default'} | ${false} | ${''} | ${[]}
- ${'with css class'} | ${false} | ${'test-css-class testing-123'} | ${['test-css-class', 'testing-123']}
- ${'with css class & active'} | ${true} | ${'test-css-class'} | ${['test-css-class', ...TopNavMenuItem.ACTIVE_CLASS.split(' ')]}
- `('$desc', ({ active, cssClass, expectedClasses }) => {
- beforeEach(() => {
- createComponent({
- menuItem: {
- ...TEST_MENU_ITEM,
- active,
- css_class: cssClass,
- },
- });
- });
-
- it('renders expected classes', () => {
- expect(wrapper.classes()).toStrictEqual([
- 'top-nav-menu-item',
- 'gl-display-block',
- 'gl-pr-3!',
- ...expectedClasses,
- ]);
- });
- });
-});
diff --git a/spec/frontend/nav/components/top_nav_menu_sections_spec.js b/spec/frontend/nav/components/top_nav_menu_sections_spec.js
deleted file mode 100644
index 7a3e58fd964..00000000000
--- a/spec/frontend/nav/components/top_nav_menu_sections_spec.js
+++ /dev/null
@@ -1,138 +0,0 @@
-import { shallowMount } from '@vue/test-utils';
-import TopNavMenuSections from '~/nav/components/top_nav_menu_sections.vue';
-
-const TEST_SECTIONS = [
- {
- id: 'primary',
- menuItems: [
- { type: 'header', title: 'Heading' },
- { type: 'item', id: 'test', href: '/test/href' },
- { type: 'header', title: 'Another Heading' },
- { type: 'item', id: 'foo' },
- { type: 'item', id: 'bar' },
- ],
- },
- {
- id: 'secondary',
- menuItems: [
- { type: 'item', id: 'lorem' },
- { type: 'item', id: 'ipsum' },
- ],
- },
-];
-
-describe('~/nav/components/top_nav_menu_sections.vue', () => {
- let wrapper;
-
- const createComponent = (props = {}) => {
- wrapper = shallowMount(TopNavMenuSections, {
- propsData: {
- sections: TEST_SECTIONS,
- ...props,
- },
- });
- };
-
- const findMenuItemModels = (parent) =>
- parent.findAll('[data-testid="menu-header"],[data-testid="menu-item"]').wrappers.map((x) => {
- return {
- menuItem: x.vm
- ? {
- type: 'item',
- ...x.props('menuItem'),
- }
- : {
- type: 'header',
- title: x.text(),
- },
- classes: x.classes(),
- };
- });
- const findSectionModels = () =>
- wrapper.findAll('[data-testid="menu-section"]').wrappers.map((x) => ({
- classes: x.classes(),
- menuItems: findMenuItemModels(x),
- }));
-
- describe('default', () => {
- beforeEach(() => {
- createComponent();
- });
-
- it('renders sections with menu items', () => {
- const headerClasses = ['gl-px-4', 'gl-py-2', 'gl-text-gray-900', 'gl-display-block'];
- const itemClasses = ['gl-w-full'];
-
- expect(findSectionModels()).toEqual([
- {
- classes: [],
- menuItems: TEST_SECTIONS[0].menuItems.map((menuItem, index) => {
- const classes = menuItem.type === 'header' ? [...headerClasses] : [...itemClasses];
- if (index > 0) classes.push(menuItem.type === 'header' ? 'gl-pt-3!' : 'gl-mt-1');
- return {
- menuItem,
- classes,
- };
- }),
- },
- {
- classes: [
- ...TopNavMenuSections.BORDER_CLASSES.split(' '),
- 'gl-border-gray-50',
- 'gl-mt-3',
- ],
- menuItems: TEST_SECTIONS[1].menuItems.map((menuItem, index) => {
- const classes = menuItem.type === 'header' ? [...headerClasses] : [...itemClasses];
- if (index > 0) classes.push(menuItem.type === 'header' ? 'gl-pt-3!' : 'gl-mt-1');
- return {
- menuItem,
- classes,
- };
- }),
- },
- ]);
- });
-
- it('when clicked menu item with href, does nothing', () => {
- const menuItem = wrapper.findAll('[data-testid="menu-item"]').at(0);
-
- menuItem.vm.$emit('click');
-
- expect(wrapper.emitted()).toEqual({});
- });
-
- it('when clicked menu item without href, emits "menu-item-click"', () => {
- const menuItem = wrapper.findAll('[data-testid="menu-item"]').at(1);
-
- menuItem.vm.$emit('click');
-
- expect(wrapper.emitted('menu-item-click')).toEqual([[TEST_SECTIONS[0].menuItems[3]]]);
- });
- });
-
- describe('with withTopBorder=true', () => {
- beforeEach(() => {
- createComponent({ withTopBorder: true });
- });
-
- it('renders border classes for top section', () => {
- expect(findSectionModels().map((x) => x.classes)).toEqual([
- [...TopNavMenuSections.BORDER_CLASSES.split(' '), 'gl-border-gray-50'],
- [...TopNavMenuSections.BORDER_CLASSES.split(' '), 'gl-border-gray-50', 'gl-mt-3'],
- ]);
- });
- });
-
- describe('with isPrimarySection=true', () => {
- beforeEach(() => {
- createComponent({ isPrimarySection: true });
- });
-
- it('renders border classes for top section', () => {
- expect(findSectionModels().map((x) => x.classes)).toEqual([
- [],
- [...TopNavMenuSections.BORDER_CLASSES.split(' '), 'gl-border-gray-100', 'gl-mt-3'],
- ]);
- });
- });
-});
diff --git a/spec/frontend/nav/components/top_nav_new_dropdown_spec.js b/spec/frontend/nav/components/top_nav_new_dropdown_spec.js
deleted file mode 100644
index 432ee5e9ecd..00000000000
--- a/spec/frontend/nav/components/top_nav_new_dropdown_spec.js
+++ /dev/null
@@ -1,142 +0,0 @@
-import { GlDropdown } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
-import TopNavNewDropdown from '~/nav/components/top_nav_new_dropdown.vue';
-import InviteMembersTrigger from '~/invite_members/components/invite_members_trigger.vue';
-import { TOP_NAV_INVITE_MEMBERS_COMPONENT } from '~/invite_members/constants';
-
-const TEST_VIEW_MODEL = {
- title: 'Dropdown',
- menu_sections: [
- {
- title: 'Section 1',
- menu_items: [
- { id: 'foo-1', title: 'Foo 1', href: '/foo/1' },
- { id: 'foo-2', title: 'Foo 2', href: '/foo/2' },
- { id: 'foo-3', title: 'Foo 3', href: '/foo/3' },
- ],
- },
- {
- title: 'Section 2',
- menu_items: [
- { id: 'bar-1', title: 'Bar 1', href: '/bar/1' },
- { id: 'bar-2', title: 'Bar 2', href: '/bar/2' },
- {
- id: 'invite',
- title: '_invite members title_',
- component: TOP_NAV_INVITE_MEMBERS_COMPONENT,
- icon: '_icon_',
- data: {
- trigger_element: '_trigger_element_',
- trigger_source: '_trigger_source_',
- },
- },
- ],
- },
- ],
-};
-
-describe('~/nav/components/top_nav_menu_sections.vue', () => {
- let wrapper;
-
- const createComponent = (props = {}) => {
- wrapper = shallowMount(TopNavNewDropdown, {
- propsData: {
- viewModel: TEST_VIEW_MODEL,
- ...props,
- },
- });
- };
-
- const findDropdown = () => wrapper.findComponent(GlDropdown);
- const findInviteMembersTrigger = () => wrapper.findComponent(InviteMembersTrigger);
- const findDropdownContents = () =>
- findDropdown()
- .findAll('[data-testid]')
- .wrappers.map((child) => {
- const type = child.attributes('data-testid');
-
- if (type === 'divider') {
- return { type };
- }
- if (type === 'header') {
- return { type, text: child.text() };
- }
-
- return {
- type,
- text: child.text(),
- href: child.attributes('href'),
- };
- });
-
- describe('default', () => {
- beforeEach(() => {
- createComponent();
- });
-
- it('renders dropdown parent', () => {
- expect(findDropdown().props()).toMatchObject({
- text: TEST_VIEW_MODEL.title,
- textSrOnly: true,
- icon: 'plus',
- });
- });
-
- it('renders dropdown content', () => {
- const hrefItems = TEST_VIEW_MODEL.menu_sections[1].menu_items.filter((item) =>
- Boolean(item.href),
- );
-
- expect(findDropdownContents()).toEqual([
- {
- type: 'header',
- text: TEST_VIEW_MODEL.menu_sections[0].title,
- },
- ...TEST_VIEW_MODEL.menu_sections[0].menu_items.map(({ title, href }) => ({
- type: 'item',
- href,
- text: title,
- })),
- {
- type: 'divider',
- },
- {
- type: 'header',
- text: TEST_VIEW_MODEL.menu_sections[1].title,
- },
- ...hrefItems.map(({ title, href }) => ({
- type: 'item',
- href,
- text: title,
- })),
- ]);
- expect(findInviteMembersTrigger().props()).toMatchObject({
- displayText: '_invite members title_',
- icon: '_icon_',
- triggerElement: 'dropdown-_trigger_element_',
- triggerSource: '_trigger_source_',
- });
- });
- });
-
- describe('with only 1 section', () => {
- beforeEach(() => {
- createComponent({
- viewModel: {
- ...TEST_VIEW_MODEL,
- menu_sections: TEST_VIEW_MODEL.menu_sections.slice(0, 1),
- },
- });
- });
-
- it('renders dropdown content without headers and dividers', () => {
- expect(findDropdownContents()).toEqual(
- TEST_VIEW_MODEL.menu_sections[0].menu_items.map(({ title, href }) => ({
- type: 'item',
- href,
- text: title,
- })),
- );
- });
- });
-});
diff --git a/spec/frontend/nav/mock_data.js b/spec/frontend/nav/mock_data.js
deleted file mode 100644
index 2052acfe001..00000000000
--- a/spec/frontend/nav/mock_data.js
+++ /dev/null
@@ -1,39 +0,0 @@
-import { range } from 'lodash';
-
-export const TEST_NAV_DATA = {
- menuTitle: 'Test Menu Title',
- primary: [
- ...['projects', 'groups'].map((view) => ({
- id: view,
- href: null,
- title: view,
- view,
- })),
- ...range(0, 2).map((idx) => ({
- id: `primary-link-${idx}`,
- href: `/path/to/primary/${idx}`,
- title: `Title ${idx}`,
- })),
- ],
- secondary: range(0, 2).map((idx) => ({
- id: `secondary-link-${idx}`,
- href: `/path/to/secondary/${idx}`,
- title: `SecTitle ${idx}`,
- })),
- views: {
- projects: {
- namespace: 'projects',
- currentUserName: '',
- currentItem: {},
- linksPrimary: [{ id: 'project-link', href: '/path/to/projects', title: 'Project Link' }],
- linksSecondary: [],
- },
- groups: {
- namespace: 'groups',
- currentUserName: '',
- currentItem: {},
- linksPrimary: [],
- linksSecondary: [{ id: 'group-link', href: '/path/to/groups', title: 'Group Link' }],
- },
- },
-};
diff --git a/spec/frontend/notes/components/comment_form_spec.js b/spec/frontend/notes/components/comment_form_spec.js
index 1309fd79c14..8f761476c7c 100644
--- a/spec/frontend/notes/components/comment_form_spec.js
+++ b/spec/frontend/notes/components/comment_form_spec.js
@@ -9,7 +9,7 @@ import waitForPromises from 'helpers/wait_for_promises';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import { useLocalStorageSpy } from 'helpers/local_storage_helper';
import batchComments from '~/batch_comments/stores/modules/batch_comments';
-import { refreshUserMergeRequestCounts } from '~/commons/nav/user_merge_requests';
+import { fetchUserCounts } from '~/super_sidebar/user_counts_fetch';
import { createAlert } from '~/alert';
import { STATUS_CLOSED, STATUS_OPEN } from '~/issues/constants';
import axios from '~/lib/utils/axios_utils';
@@ -26,7 +26,7 @@ import { mockTracking } from 'helpers/tracking_helper';
import { loggedOutnoteableData, notesDataMock, userDataMock, noteableDataMock } from '../mock_data';
jest.mock('autosize');
-jest.mock('~/commons/nav/user_merge_requests');
+jest.mock('~/super_sidebar/user_counts_fetch');
jest.mock('~/alert');
Vue.use(Vuex);
@@ -586,7 +586,7 @@ describe('issue_comment_form component', () => {
await nextTick();
- expect(refreshUserMergeRequestCounts).toHaveBeenCalled();
+ expect(fetchUserCounts).toHaveBeenCalled();
});
});
});
diff --git a/spec/frontend/notes/components/discussion_filter_spec.js b/spec/frontend/notes/components/discussion_filter_spec.js
index 87ccb5b7394..dfc901bf1b3 100644
--- a/spec/frontend/notes/components/discussion_filter_spec.js
+++ b/spec/frontend/notes/components/discussion_filter_spec.js
@@ -6,7 +6,7 @@ import AxiosMockAdapter from 'axios-mock-adapter';
import Vuex from 'vuex';
import { TEST_HOST } from 'helpers/test_constants';
import createEventHub from '~/helpers/event_hub_factory';
-
+import * as urlUtility from '~/lib/utils/url_utility';
import axios from '~/lib/utils/axios_utils';
import { HTTP_STATUS_OK } from '~/lib/utils/http_status';
import DiscussionFilter from '~/notes/components/discussion_filter.vue';
@@ -40,7 +40,7 @@ describe('DiscussionFilter component', () => {
const findLocalStorageSync = () => wrapper.findComponent(LocalStorageSync);
- const mountComponent = () => {
+ const mountComponent = ({ propsData = {} } = {}) => {
const discussions = [
{
...discussionMock,
@@ -63,11 +63,12 @@ describe('DiscussionFilter component', () => {
store.state.discussions = discussions;
- return mount(DiscussionFilter, {
+ wrapper = mount(DiscussionFilter, {
store,
propsData: {
filters: discussionFiltersMock,
selectedValue: DISCUSSION_FILTERS_DEFAULT_VALUE,
+ ...propsData,
},
});
};
@@ -88,7 +89,7 @@ describe('DiscussionFilter component', () => {
describe('default', () => {
beforeEach(() => {
- wrapper = mountComponent();
+ mountComponent();
jest.spyOn(store, 'dispatch').mockImplementation();
});
@@ -105,7 +106,7 @@ describe('DiscussionFilter component', () => {
describe('when asc', () => {
beforeEach(() => {
- wrapper = mountComponent();
+ mountComponent();
jest.spyOn(store, 'dispatch').mockImplementation();
});
@@ -125,7 +126,7 @@ describe('DiscussionFilter component', () => {
describe('when desc', () => {
beforeEach(() => {
- wrapper = mountComponent();
+ mountComponent();
store.state.discussionSortOrder = DESC;
jest.spyOn(store, 'dispatch').mockImplementation();
});
@@ -150,7 +151,7 @@ describe('DiscussionFilter component', () => {
describe('discussion filter functionality', () => {
beforeEach(() => {
- wrapper = mountComponent();
+ mountComponent();
});
it('renders the all filters', () => {
@@ -215,7 +216,7 @@ describe('DiscussionFilter component', () => {
currentTab: 'show',
};
- wrapper = mountComponent();
+ mountComponent();
});
afterEach(() => {
@@ -239,7 +240,7 @@ describe('DiscussionFilter component', () => {
it('does not update the filter when the current filter is "Show all activity"', async () => {
window.location.hash = `note_${discussionMock.notes[0].id}`;
- wrapper = mountComponent();
+ mountComponent();
await nextTick();
const filtered = findGlDisclosureDropdownItems().filter((el) => el.classes('is-active'));
@@ -250,7 +251,7 @@ describe('DiscussionFilter component', () => {
it('only updates filter when the URL links to a note', async () => {
window.location.hash = `testing123`;
- wrapper = mountComponent();
+ mountComponent();
await nextTick();
const filtered = findGlDisclosureDropdownItems().filter((el) => el.classes('is-active'));
@@ -260,12 +261,32 @@ describe('DiscussionFilter component', () => {
});
it('does not fetch discussions when there is no hash', async () => {
- window.location.hash = '';
- const selectFilterSpy = jest.spyOn(wrapper.vm, 'selectFilter').mockImplementation(() => {});
- wrapper = mountComponent();
+ mountComponent();
+ const dispatchSpy = jest.spyOn(store, 'dispatch');
await nextTick();
- expect(selectFilterSpy).not.toHaveBeenCalled();
+ expect(dispatchSpy).not.toHaveBeenCalled();
+ });
+
+ describe('selected value is not default state', () => {
+ beforeEach(() => {
+ mountComponent({
+ propsData: { selectedValue: 2 },
+ });
+ });
+ it('fetch discussions when there is hash', async () => {
+ jest.spyOn(urlUtility, 'getLocationHash').mockReturnValueOnce('note_123');
+ const dispatchSpy = jest.spyOn(store, 'dispatch');
+
+ window.dispatchEvent(new Event('hashchange'));
+
+ await nextTick();
+ expect(dispatchSpy).toHaveBeenCalledWith('filterDiscussion', {
+ filter: 0,
+ path: 'http://test.host/example',
+ persistFilter: false,
+ });
+ });
});
});
});
diff --git a/spec/frontend/notes/components/note_actions_spec.js b/spec/frontend/notes/components/note_actions_spec.js
index fc50afcb01d..47663360ce8 100644
--- a/spec/frontend/notes/components/note_actions_spec.js
+++ b/spec/frontend/notes/components/note_actions_spec.js
@@ -1,5 +1,5 @@
import { GlDisclosureDropdown, GlDisclosureDropdownItem } from '@gitlab/ui';
-import { mount } from '@vue/test-utils';
+import { shallowMount } from '@vue/test-utils';
import AxiosMockAdapter from 'axios-mock-adapter';
import { nextTick } from 'vue';
import { stubComponent } from 'helpers/stub_component';
@@ -43,11 +43,10 @@ describe('noteActions', () => {
store.state.isPromoteCommentToTimelineEventInProgress = isPromotionInProgress;
};
- const mountNoteActions = (propsData, computed) => {
- return mount(noteActions, {
+ const mountNoteActions = (propsData) => {
+ return shallowMount(noteActions, {
store,
propsData,
- computed,
stubs: {
GlDisclosureDropdown: stubComponent(GlDisclosureDropdown, {
methods: {
@@ -190,15 +189,14 @@ describe('noteActions', () => {
};
beforeEach(() => {
- wrapper = mountNoteActions(props, {
- targetType: () => 'issue',
- });
+ wrapper = mountNoteActions(props);
store.state.noteableData = {
current_user: {
can_set_issue_metadata: true,
},
};
store.state.userData = userDataMock;
+ store.state.noteableData.targetType = 'issue';
});
afterEach(() => {
diff --git a/spec/frontend/notes/stores/actions_spec.js b/spec/frontend/notes/stores/actions_spec.js
index f07ba1e032f..938ca1f5939 100644
--- a/spec/frontend/notes/stores/actions_spec.js
+++ b/spec/frontend/notes/stores/actions_spec.js
@@ -1129,9 +1129,12 @@ describe('Actions Notes Store', () => {
describe('setConfidentiality', () => {
it('calls the correct mutation with the correct args', () => {
- testAction(actions.setConfidentiality, true, { noteableData: { confidential: false } }, [
- { type: mutationTypes.SET_ISSUE_CONFIDENTIAL, payload: true },
- ]);
+ return testAction(
+ actions.setConfidentiality,
+ true,
+ { noteableData: { confidential: false } },
+ [{ type: mutationTypes.SET_ISSUE_CONFIDENTIAL, payload: true }],
+ );
});
});
diff --git a/spec/frontend/observability/client_spec.js b/spec/frontend/observability/client_spec.js
index b41b303f57d..e7b68a2346e 100644
--- a/spec/frontend/observability/client_spec.js
+++ b/spec/frontend/observability/client_spec.js
@@ -18,6 +18,7 @@ describe('buildClient', () => {
const servicesUrl = 'https://example.com/services';
const operationsUrl = 'https://example.com/services/$SERVICE_NAME$/operations';
const metricsUrl = 'https://example.com/metrics';
+ const metricsSearchUrl = 'https://example.com/metrics/search';
const FETCHING_TRACES_ERROR = 'traces are missing/invalid in the response';
const apiConfig = {
@@ -26,6 +27,7 @@ describe('buildClient', () => {
servicesUrl,
operationsUrl,
metricsUrl,
+ metricsSearchUrl,
};
const getQueryParam = () => decodeURIComponent(axios.get.mock.calls[0][1].params.toString());
@@ -311,6 +313,16 @@ describe('buildClient', () => {
expect(getQueryParam()).toBe(`sort=${SORTING_OPTIONS.TIMESTAMP_DESC}`);
});
+ it('ignores non-array filters', async () => {
+ await client.fetchTraces({
+ filters: {
+ traceId: { operator: '=', value: 'foo' },
+ },
+ });
+
+ expect(getQueryParam()).toBe(`sort=${SORTING_OPTIONS.TIMESTAMP_DESC}`);
+ });
+
it('ignores unsupported operators', async () => {
await client.fetchTraces({
filters: {
@@ -429,10 +441,84 @@ describe('buildClient', () => {
expect(axios.get).toHaveBeenCalledTimes(1);
expect(axios.get).toHaveBeenCalledWith(metricsUrl, {
withCredentials: true,
+ params: expect.any(URLSearchParams),
});
expect(result).toEqual(mockResponse);
});
+ describe('query filter', () => {
+ beforeEach(() => {
+ axiosMock.onGet(metricsUrl).reply(200, {
+ metrics: [],
+ });
+ });
+
+ it('does not set any query param without filters', async () => {
+ await client.fetchMetrics();
+
+ expect(getQueryParam()).toBe('');
+ });
+
+ it('sets the start_with query param based on the search filter', async () => {
+ await client.fetchMetrics({
+ filters: { search: [{ value: 'foo' }, { value: 'bar' }, { value: ' ' }] },
+ });
+ expect(getQueryParam()).toBe('starts_with=foo+bar');
+ });
+
+ it('ignores empty search', async () => {
+ await client.fetchMetrics({
+ filters: {
+ search: [{ value: ' ' }, { value: '' }, { value: null }, { value: undefined }],
+ },
+ });
+ expect(getQueryParam()).toBe('');
+ });
+
+ it('ignores unsupported filters', async () => {
+ await client.fetchMetrics({
+ filters: {
+ unsupportedFilter: [{ operator: '=', value: 'foo' }],
+ },
+ });
+
+ expect(getQueryParam()).toBe('');
+ });
+
+ it('ignores non-array search filters', async () => {
+ await client.fetchMetrics({
+ filters: {
+ search: { value: 'foo' },
+ },
+ });
+
+ expect(getQueryParam()).toBe('');
+ });
+
+ it('adds the search limit param if specified with the search filter', async () => {
+ await client.fetchMetrics({
+ filters: { search: [{ value: 'foo' }] },
+ limit: 50,
+ });
+ expect(getQueryParam()).toBe('starts_with=foo&limit=50');
+ });
+
+ it('does not add the search limit param if the search filter is missing', async () => {
+ await client.fetchMetrics({
+ limit: 50,
+ });
+ expect(getQueryParam()).toBe('');
+ });
+
+ it('does not add the search limit param if the search filter is empty', async () => {
+ await client.fetchMetrics({
+ limit: 50,
+ search: [{ value: ' ' }, { value: '' }, { value: null }, { value: undefined }],
+ });
+ expect(getQueryParam()).toBe('');
+ });
+ });
+
it('rejects if metrics are missing', async () => {
axiosMock.onGet(metricsUrl).reply(200, {});
@@ -447,4 +533,40 @@ describe('buildClient', () => {
expectErrorToBeReported(new Error(FETCHING_METRICS_ERROR));
});
});
+
+ describe('fetchMetric', () => {
+ it('fetches the metric from the API', async () => {
+ const data = { results: [] };
+ axiosMock.onGet(metricsSearchUrl).reply(200, data);
+
+ const result = await client.fetchMetric('name', 'type');
+
+ expect(axios.get).toHaveBeenCalledTimes(1);
+ expect(axios.get).toHaveBeenCalledWith(metricsSearchUrl, {
+ withCredentials: true,
+ params: new URLSearchParams({ mname: 'name', mtype: 'type' }),
+ });
+ expect(result).toEqual(data.results);
+ });
+
+ it('rejects if results is missing from the response', async () => {
+ axiosMock.onGet(metricsSearchUrl).reply(200, {});
+ const e = 'metrics are missing/invalid in the response';
+
+ await expect(client.fetchMetric('name', 'type')).rejects.toThrow(e);
+ expectErrorToBeReported(new Error(e));
+ });
+
+ it('rejects if metric name is missing', async () => {
+ const e = 'fetchMetric() - metric name is required.';
+ await expect(client.fetchMetric()).rejects.toThrow(e);
+ expectErrorToBeReported(new Error(e));
+ });
+
+ it('rejects if metric type is missing', async () => {
+ const e = 'fetchMetric() - metric type is required.';
+ await expect(client.fetchMetric('name')).rejects.toThrow(e);
+ expectErrorToBeReported(new Error(e));
+ });
+ });
});
diff --git a/spec/frontend/organizations/index/components/app_spec.js b/spec/frontend/organizations/index/components/app_spec.js
index 175b1e1c552..670eb34bffd 100644
--- a/spec/frontend/organizations/index/components/app_spec.js
+++ b/spec/frontend/organizations/index/components/app_spec.js
@@ -5,9 +5,9 @@ import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import { createAlert } from '~/alert';
-import { organizations } from '~/organizations/mock_data';
-import resolvers from '~/organizations/shared/graphql/resolvers';
-import organizationsQuery from '~/organizations/index/graphql/organizations.query.graphql';
+import { DEFAULT_PER_PAGE } from '~/api';
+import { organizations as nodes, pageInfo, pageInfoEmpty } from '~/organizations/mock_data';
+import organizationsQuery from '~/organizations/shared/graphql/queries/organizations.query.graphql';
import OrganizationsIndexApp from '~/organizations/index/components/app.vue';
import OrganizationsView from '~/organizations/index/components/organizations_view.vue';
import { MOCK_NEW_ORG_URL } from '../mock_data';
@@ -20,8 +20,27 @@ describe('OrganizationsIndexApp', () => {
let wrapper;
let mockApollo;
- const createComponent = (mockResolvers = resolvers) => {
- mockApollo = createMockApollo([[organizationsQuery, mockResolvers]]);
+ const organizations = {
+ nodes,
+ pageInfo,
+ };
+
+ const organizationEmpty = {
+ nodes: [],
+ pageInfo: pageInfoEmpty,
+ };
+
+ const successHandler = jest.fn().mockResolvedValue({
+ data: {
+ currentUser: {
+ id: 'gid://gitlab/User/1',
+ organizations,
+ },
+ },
+ });
+
+ const createComponent = (handler = successHandler) => {
+ mockApollo = createMockApollo([[organizationsQuery, handler]]);
wrapper = shallowMountExtended(OrganizationsIndexApp, {
apolloProvider: mockApollo,
@@ -35,53 +54,168 @@ describe('OrganizationsIndexApp', () => {
mockApollo = null;
});
+ // Finders
const findOrganizationHeaderText = () => wrapper.findByText('Organizations');
const findNewOrganizationButton = () => wrapper.findComponent(GlButton);
const findOrganizationsView = () => wrapper.findComponent(OrganizationsView);
- const loadingResolver = jest.fn().mockReturnValue(new Promise(() => {}));
- const successfulResolver = (nodes) =>
- jest.fn().mockResolvedValue({
- data: { currentUser: { id: 1, organizations: { nodes } } },
+ // Assertions
+ const itRendersHeaderText = () => {
+ it('renders the header text', () => {
+ expect(findOrganizationHeaderText().exists()).toBe(true);
+ });
+ };
+
+ const itRendersNewOrganizationButton = () => {
+ it('render new organization button with correct link', () => {
+ expect(findNewOrganizationButton().attributes('href')).toBe(MOCK_NEW_ORG_URL);
+ });
+ };
+
+ const itDoesNotRenderErrorMessage = () => {
+ it('does not render an error message', () => {
+ expect(createAlert).not.toHaveBeenCalled();
+ });
+ };
+
+ const itDoesNotRenderHeaderText = () => {
+ it('does not render the header text', () => {
+ expect(findOrganizationHeaderText().exists()).toBe(false);
+ });
+ };
+
+ const itDoesNotRenderNewOrganizationButton = () => {
+ it('does not render new organization button', () => {
+ expect(findNewOrganizationButton().exists()).toBe(false);
+ });
+ };
+
+ describe('when API call is loading', () => {
+ beforeEach(() => {
+ createComponent(jest.fn().mockReturnValue(new Promise(() => {})));
+ });
+
+ itRendersHeaderText();
+ itRendersNewOrganizationButton();
+ itDoesNotRenderErrorMessage();
+
+ it('renders the organizations view with loading prop set to true', () => {
+ expect(findOrganizationsView().props('loading')).toBe(true);
+ });
+ });
+
+ describe('when API call is successful', () => {
+ beforeEach(async () => {
+ createComponent();
+ await waitForPromises();
+ });
+
+ itRendersHeaderText();
+ itRendersNewOrganizationButton();
+ itDoesNotRenderErrorMessage();
+
+ it('passes organizations to view component', () => {
+ expect(findOrganizationsView().props()).toMatchObject({
+ loading: false,
+ organizations,
+ });
});
- const errorResolver = jest.fn().mockRejectedValue('error');
+ });
- describe.each`
- description | mockResolver | headerText | newOrgLink | loading | orgsData | error
- ${'when API call is loading'} | ${loadingResolver} | ${true} | ${MOCK_NEW_ORG_URL} | ${true} | ${[]} | ${false}
- ${'when API returns successful with results'} | ${successfulResolver(organizations)} | ${true} | ${MOCK_NEW_ORG_URL} | ${false} | ${organizations} | ${false}
- ${'when API returns successful without results'} | ${successfulResolver([])} | ${false} | ${false} | ${false} | ${[]} | ${false}
- ${'when API returns error'} | ${errorResolver} | ${false} | ${false} | ${false} | ${[]} | ${true}
- `('$description', ({ mockResolver, headerText, newOrgLink, loading, orgsData, error }) => {
+ describe('when API call is successful and returns no organizations', () => {
beforeEach(async () => {
- createComponent(mockResolver);
+ createComponent(
+ jest.fn().mockResolvedValue({
+ data: {
+ currentUser: {
+ id: 'gid://gitlab/User/1',
+ organizations: organizationEmpty,
+ },
+ },
+ }),
+ );
await waitForPromises();
});
- it(`does ${headerText ? '' : 'not '}render the header text`, () => {
- expect(findOrganizationHeaderText().exists()).toBe(headerText);
+ itDoesNotRenderHeaderText();
+ itDoesNotRenderNewOrganizationButton();
+ itDoesNotRenderErrorMessage();
+
+ it('renders view component with correct organizations and loading props', () => {
+ expect(findOrganizationsView().props()).toMatchObject({
+ loading: false,
+ organizations: organizationEmpty,
+ });
});
+ });
+
+ describe('when API call is not successful', () => {
+ const error = new Error();
- it(`does ${newOrgLink ? '' : 'not '}render new organization button with correct link`, () => {
- expect(
- findNewOrganizationButton().exists() && findNewOrganizationButton().attributes('href'),
- ).toBe(newOrgLink);
+ beforeEach(async () => {
+ createComponent(jest.fn().mockRejectedValue(error));
+ await waitForPromises();
});
- it(`renders the organizations view with ${loading} loading prop`, () => {
- expect(findOrganizationsView().props('loading')).toBe(loading);
+ itDoesNotRenderHeaderText();
+ itDoesNotRenderNewOrganizationButton();
+
+ it('renders view component with correct organizations and loading props', () => {
+ expect(findOrganizationsView().props()).toMatchObject({
+ loading: false,
+ organizations: {},
+ });
});
- it(`renders the organizations view with ${
- orgsData ? 'correct' : 'empty'
- } organizations array prop`, () => {
- expect(findOrganizationsView().props('organizations')).toStrictEqual(orgsData);
+ it('renders error message', () => {
+ expect(createAlert).toHaveBeenCalledWith({
+ message:
+ 'An error occurred loading user organizations. Please refresh the page to try again.',
+ error,
+ captureError: true,
+ });
});
+ });
+
+ describe('when view component emits `next` event', () => {
+ const endCursor = 'mockEndCursor';
+
+ beforeEach(async () => {
+ createComponent();
+ await waitForPromises();
+ });
+
+ it('calls GraphQL query with correct pageInfo variables', async () => {
+ findOrganizationsView().vm.$emit('next', endCursor);
+ await waitForPromises();
+
+ expect(successHandler).toHaveBeenCalledWith({
+ first: DEFAULT_PER_PAGE,
+ after: endCursor,
+ last: null,
+ before: null,
+ });
+ });
+ });
+
+ describe('when view component emits `prev` event', () => {
+ const startCursor = 'mockStartCursor';
+
+ beforeEach(async () => {
+ createComponent();
+ await waitForPromises();
+ });
+
+ it('calls GraphQL query with correct pageInfo variables', async () => {
+ findOrganizationsView().vm.$emit('prev', startCursor);
+ await waitForPromises();
- it(`does ${error ? '' : 'not '}render an error message`, () => {
- return error
- ? expect(createAlert).toHaveBeenCalled()
- : expect(createAlert).not.toHaveBeenCalled();
+ expect(successHandler).toHaveBeenCalledWith({
+ first: null,
+ after: null,
+ last: DEFAULT_PER_PAGE,
+ before: startCursor,
+ });
});
});
});
diff --git a/spec/frontend/organizations/index/components/organizations_list_spec.js b/spec/frontend/organizations/index/components/organizations_list_spec.js
index 0b59c212314..7d904ee802f 100644
--- a/spec/frontend/organizations/index/components/organizations_list_spec.js
+++ b/spec/frontend/organizations/index/components/organizations_list_spec.js
@@ -1,28 +1,84 @@
+import { GlKeysetPagination } from '@gitlab/ui';
+import { omit } from 'lodash';
import { shallowMount } from '@vue/test-utils';
import OrganizationsList from '~/organizations/index/components/organizations_list.vue';
import OrganizationsListItem from '~/organizations/index/components/organizations_list_item.vue';
-import { organizations } from '~/organizations/mock_data';
+import { organizations as nodes, pageInfo, pageInfoOnePage } from '~/organizations/mock_data';
describe('OrganizationsList', () => {
let wrapper;
- const createComponent = () => {
+ const createComponent = ({ propsData = {} } = {}) => {
wrapper = shallowMount(OrganizationsList, {
propsData: {
- organizations,
+ organizations: {
+ nodes,
+ pageInfo,
+ },
+ ...propsData,
},
});
};
const findAllOrganizationsListItem = () => wrapper.findAllComponents(OrganizationsListItem);
+ const findPagination = () => wrapper.findComponent(GlKeysetPagination);
describe('template', () => {
- beforeEach(() => {
+ it('renders a list item for each organization', () => {
createComponent();
+
+ expect(findAllOrganizationsListItem()).toHaveLength(nodes.length);
});
- it('renders a list item for each organization', () => {
- expect(findAllOrganizationsListItem()).toHaveLength(organizations.length);
+ describe('when there is one page of organizations', () => {
+ beforeEach(() => {
+ createComponent({
+ propsData: {
+ organizations: {
+ nodes,
+ pageInfo: pageInfoOnePage,
+ },
+ },
+ });
+ });
+
+ it('does not render pagination', () => {
+ expect(findPagination().exists()).toBe(false);
+ });
+ });
+
+ describe('when there are multiple pages of organizations', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('renders pagination', () => {
+ expect(findPagination().props()).toMatchObject(omit(pageInfo, '__typename'));
+ });
+
+ describe('when `GlKeysetPagination` emits `next` event', () => {
+ const endCursor = 'mockEndCursor';
+
+ beforeEach(() => {
+ findPagination().vm.$emit('next', endCursor);
+ });
+
+ it('emits `next` event', () => {
+ expect(wrapper.emitted('next')).toEqual([[endCursor]]);
+ });
+ });
+
+ describe('when `GlKeysetPagination` emits `prev` event', () => {
+ const startCursor = 'startEndCursor';
+
+ beforeEach(() => {
+ findPagination().vm.$emit('prev', startCursor);
+ });
+
+ it('emits `prev` event', () => {
+ expect(wrapper.emitted('prev')).toEqual([[startCursor]]);
+ });
+ });
});
});
});
diff --git a/spec/frontend/organizations/index/components/organizations_view_spec.js b/spec/frontend/organizations/index/components/organizations_view_spec.js
index 85a1c11a2b1..fe167a1418f 100644
--- a/spec/frontend/organizations/index/components/organizations_view_spec.js
+++ b/spec/frontend/organizations/index/components/organizations_view_spec.js
@@ -31,7 +31,7 @@ describe('OrganizationsView', () => {
${'when not loading and has no organizations'} | ${false} | ${[]} | ${MOCK_ORG_EMPTY_STATE_SVG} | ${MOCK_NEW_ORG_URL}
`('$description', ({ loading, orgsData, emptyStateSvg, emptyStateUrl }) => {
beforeEach(() => {
- createComponent({ loading, organizations: orgsData });
+ createComponent({ loading, organizations: { nodes: orgsData, pageInfo: {} } });
});
it(`does ${loading ? '' : 'not '}render loading icon`, () => {
@@ -54,4 +54,30 @@ describe('OrganizationsView', () => {
).toBe(emptyStateUrl);
});
});
+
+ describe('when `OrganizationsList` emits `next` event', () => {
+ const endCursor = 'mockEndCursor';
+
+ beforeEach(() => {
+ createComponent({ loading: false, organizations: { nodes: organizations, pageInfo: {} } });
+ findOrganizationsList().vm.$emit('next', endCursor);
+ });
+
+ it('emits `next` event', () => {
+ expect(wrapper.emitted('next')).toEqual([[endCursor]]);
+ });
+ });
+
+ describe('when `OrganizationsList` emits `prev` event', () => {
+ const startCursor = 'mockStartCursor';
+
+ beforeEach(() => {
+ createComponent({ loading: false, organizations: { nodes: organizations, pageInfo: {} } });
+ findOrganizationsList().vm.$emit('prev', startCursor);
+ });
+
+ it('emits `next` event', () => {
+ expect(wrapper.emitted('prev')).toEqual([[startCursor]]);
+ });
+ });
});
diff --git a/spec/frontend/organizations/settings/general/components/advanced_settings_spec.js b/spec/frontend/organizations/settings/general/components/advanced_settings_spec.js
new file mode 100644
index 00000000000..34793200b0d
--- /dev/null
+++ b/spec/frontend/organizations/settings/general/components/advanced_settings_spec.js
@@ -0,0 +1,25 @@
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import AdvancedSettings from '~/organizations/settings/general/components/advanced_settings.vue';
+import ChangeUrl from '~/organizations/settings/general/components/change_url.vue';
+import SettingsBlock from '~/vue_shared/components/settings/settings_block.vue';
+
+describe('AdvancedSettings', () => {
+ let wrapper;
+ const createComponent = () => {
+ wrapper = shallowMountExtended(AdvancedSettings);
+ };
+
+ const findSettingsBlock = () => wrapper.findComponent(SettingsBlock);
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('renders settings block', () => {
+ expect(findSettingsBlock().exists()).toBe(true);
+ });
+
+ it('renders `ChangeUrl` component', () => {
+ expect(findSettingsBlock().findComponent(ChangeUrl).exists()).toBe(true);
+ });
+});
diff --git a/spec/frontend/organizations/settings/general/components/app_spec.js b/spec/frontend/organizations/settings/general/components/app_spec.js
index 6d75f8a9949..e954b927715 100644
--- a/spec/frontend/organizations/settings/general/components/app_spec.js
+++ b/spec/frontend/organizations/settings/general/components/app_spec.js
@@ -1,8 +1,9 @@
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import OrganizationSettings from '~/organizations/settings/general/components/organization_settings.vue';
+import AdvancedSettings from '~/organizations/settings/general/components/advanced_settings.vue';
import App from '~/organizations/settings/general/components/app.vue';
-describe('OrganizationSettings', () => {
+describe('OrganizationSettingsGeneralApp', () => {
let wrapper;
const createComponent = () => {
@@ -16,4 +17,8 @@ describe('OrganizationSettings', () => {
it('renders `Organization settings` section', () => {
expect(wrapper.findComponent(OrganizationSettings).exists()).toBe(true);
});
+
+ it('renders `Advanced` section', () => {
+ expect(wrapper.findComponent(AdvancedSettings).exists()).toBe(true);
+ });
});
diff --git a/spec/frontend/organizations/settings/general/components/change_url_spec.js b/spec/frontend/organizations/settings/general/components/change_url_spec.js
new file mode 100644
index 00000000000..a4e3db0557c
--- /dev/null
+++ b/spec/frontend/organizations/settings/general/components/change_url_spec.js
@@ -0,0 +1,191 @@
+import { GlButton, GlForm } from '@gitlab/ui';
+import VueApollo from 'vue-apollo';
+import Vue, { nextTick } from 'vue';
+
+import { mountExtended } from 'helpers/vue_test_utils_helper';
+import ChangeUrl from '~/organizations/settings/general/components/change_url.vue';
+import organizationUpdateMutation from '~/organizations/settings/general/graphql/mutations/organization_update.mutation.graphql';
+import {
+ organizationUpdateResponse,
+ organizationUpdateResponseWithErrors,
+} from '~/organizations/mock_data';
+import { createAlert } from '~/alert';
+import { visitUrlWithAlerts } from '~/lib/utils/url_utility';
+import FormErrorsAlert from '~/vue_shared/components/form/errors_alert.vue';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+
+jest.mock('~/alert');
+jest.mock('~/lib/utils/url_utility', () => ({
+ ...jest.requireActual('~/lib/utils/url_utility'),
+ visitUrlWithAlerts: jest.fn(),
+}));
+
+Vue.use(VueApollo);
+
+describe('ChangeUrl', () => {
+ let wrapper;
+ let mockApollo;
+
+ const defaultProvide = {
+ organization: {
+ id: 1,
+ name: 'GitLab',
+ path: 'foo-bar',
+ },
+ organizationsPath: '/-/organizations',
+ rootUrl: 'http://127.0.0.1:3000/',
+ };
+
+ const successfulResponseHandler = jest.fn().mockResolvedValue(organizationUpdateResponse);
+
+ const createComponent = ({
+ handlers = [[organizationUpdateMutation, successfulResponseHandler]],
+ } = {}) => {
+ mockApollo = createMockApollo(handlers);
+
+ wrapper = mountExtended(ChangeUrl, {
+ attachTo: document.body,
+ provide: defaultProvide,
+ apolloProvider: mockApollo,
+ });
+ };
+
+ const findSubmitButton = () => wrapper.findComponent(GlButton);
+ const findOrganizationUrlField = () => wrapper.findByLabelText('Organization URL');
+ const submitForm = async () => {
+ await wrapper.findComponent(GlForm).trigger('submit');
+ await nextTick();
+ };
+
+ afterEach(() => {
+ mockApollo = null;
+ });
+
+ it('renders `Organization URL` field', () => {
+ createComponent();
+
+ expect(findOrganizationUrlField().exists()).toBe(true);
+ });
+
+ it('disables submit button until `Organization URL` field is changed', async () => {
+ createComponent();
+
+ expect(findSubmitButton().props('disabled')).toBe(true);
+
+ await findOrganizationUrlField().setValue('foo-bar-baz');
+
+ expect(findSubmitButton().props('disabled')).toBe(false);
+ });
+
+ describe('when form is submitted', () => {
+ it('requires `Organization URL` field', async () => {
+ createComponent();
+
+ await findOrganizationUrlField().setValue('');
+ await submitForm();
+
+ expect(wrapper.findByText('Organization URL is required.').exists()).toBe(true);
+ });
+
+ it('requires `Organization URL` field to be a minimum of two characters', async () => {
+ createComponent();
+
+ await findOrganizationUrlField().setValue('f');
+ await submitForm();
+
+ expect(
+ wrapper.findByText('Organization URL is too short (minimum is 2 characters).').exists(),
+ ).toBe(true);
+ });
+
+ describe('when API is loading', () => {
+ beforeEach(async () => {
+ createComponent({
+ handlers: [
+ [organizationUpdateMutation, jest.fn().mockReturnValueOnce(new Promise(() => {}))],
+ ],
+ });
+
+ await findOrganizationUrlField().setValue('foo-bar-baz');
+ await submitForm();
+ });
+
+ it('shows submit button as loading', () => {
+ expect(findSubmitButton().props('loading')).toBe(true);
+ });
+ });
+
+ describe('when API request is successful', () => {
+ beforeEach(async () => {
+ createComponent();
+ await findOrganizationUrlField().setValue('foo-bar-baz');
+ await submitForm();
+ await waitForPromises();
+ });
+
+ it('calls mutation with correct variables and redirects user to new organization settings page with success alert', () => {
+ expect(successfulResponseHandler).toHaveBeenCalledWith({
+ input: {
+ id: 'gid://gitlab/Organizations::Organization/1',
+ path: 'foo-bar-baz',
+ },
+ });
+ expect(visitUrlWithAlerts).toHaveBeenCalledWith(
+ `${organizationUpdateResponse.data.organizationUpdate.organization.webUrl}/settings/general`,
+ [
+ {
+ id: 'organization-url-successfully-changed',
+ message: 'Organization URL successfully changed.',
+ variant: 'info',
+ },
+ ],
+ );
+ });
+ });
+
+ describe('when API request is not successful', () => {
+ describe('when there is a network error', () => {
+ const error = new Error();
+
+ beforeEach(async () => {
+ createComponent({
+ handlers: [[organizationUpdateMutation, jest.fn().mockRejectedValue(error)]],
+ });
+ await findOrganizationUrlField().setValue('foo-bar-baz');
+ await submitForm();
+ await waitForPromises();
+ });
+
+ it('displays error alert', () => {
+ expect(createAlert).toHaveBeenCalledWith({
+ message: 'An error occurred changing your organization URL. Please try again.',
+ error,
+ captureError: true,
+ });
+ });
+ });
+
+ describe('when there are GraphQL errors', () => {
+ beforeEach(async () => {
+ createComponent({
+ handlers: [
+ [
+ organizationUpdateMutation,
+ jest.fn().mockResolvedValue(organizationUpdateResponseWithErrors),
+ ],
+ ],
+ });
+ await submitForm();
+ await waitForPromises();
+ });
+
+ it('displays form errors alert', () => {
+ expect(wrapper.findComponent(FormErrorsAlert).props('errors')).toEqual(
+ organizationUpdateResponseWithErrors.data.organizationUpdate.errors,
+ );
+ });
+ });
+ });
+ });
+});
diff --git a/spec/frontend/organizations/settings/general/components/organization_settings_spec.js b/spec/frontend/organizations/settings/general/components/organization_settings_spec.js
index 7645b41e3bd..d1c637331a8 100644
--- a/spec/frontend/organizations/settings/general/components/organization_settings_spec.js
+++ b/spec/frontend/organizations/settings/general/components/organization_settings_spec.js
@@ -6,14 +6,26 @@ import OrganizationSettings from '~/organizations/settings/general/components/or
import SettingsBlock from '~/vue_shared/components/settings/settings_block.vue';
import NewEditForm from '~/organizations/shared/components/new_edit_form.vue';
import { FORM_FIELD_NAME, FORM_FIELD_ID } from '~/organizations/shared/constants';
-import resolvers from '~/organizations/shared/graphql/resolvers';
-import { createAlert, VARIANT_INFO } from '~/alert';
+import organizationUpdateMutation from '~/organizations/settings/general/graphql/mutations/organization_update.mutation.graphql';
+import {
+ organizationUpdateResponse,
+ organizationUpdateResponseWithErrors,
+} from '~/organizations/mock_data';
+import { createAlert } from '~/alert';
+import { visitUrlWithAlerts } from '~/lib/utils/url_utility';
+import FormErrorsAlert from '~/vue_shared/components/form/errors_alert.vue';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
+import { useMockLocationHelper } from 'helpers/mock_window_location_helper';
Vue.use(VueApollo);
-jest.useFakeTimers();
jest.mock('~/alert');
+jest.mock('~/lib/utils/url_utility', () => ({
+ ...jest.requireActual('~/lib/utils/url_utility'),
+ visitUrlWithAlerts: jest.fn(),
+}));
+
+useMockLocationHelper();
describe('OrganizationSettings', () => {
let wrapper;
@@ -26,8 +38,12 @@ describe('OrganizationSettings', () => {
},
};
- const createComponent = ({ mockResolvers = resolvers } = {}) => {
- mockApollo = createMockApollo([], mockResolvers);
+ const successfulResponseHandler = jest.fn().mockResolvedValue(organizationUpdateResponse);
+
+ const createComponent = ({
+ handlers = [[organizationUpdateMutation, successfulResponseHandler]],
+ } = {}) => {
+ mockApollo = createMockApollo(handlers);
wrapper = shallowMountExtended(OrganizationSettings, {
provide: defaultProvide,
@@ -66,13 +82,11 @@ describe('OrganizationSettings', () => {
describe('when form is submitted', () => {
describe('when API is loading', () => {
beforeEach(async () => {
- const mockResolvers = {
- Mutation: {
- updateOrganization: jest.fn().mockReturnValueOnce(new Promise(() => {})),
- },
- };
-
- createComponent({ mockResolvers });
+ createComponent({
+ handlers: [
+ [organizationUpdateMutation, jest.fn().mockReturnValueOnce(new Promise(() => {}))],
+ ],
+ });
await submitForm();
});
@@ -86,39 +100,65 @@ describe('OrganizationSettings', () => {
beforeEach(async () => {
createComponent();
await submitForm();
- jest.runAllTimers();
await waitForPromises();
});
- it('displays info alert', () => {
- expect(createAlert).toHaveBeenCalledWith({
- message: 'Organization was successfully updated.',
- variant: VARIANT_INFO,
+ it('calls mutation with correct variables and displays info alert', () => {
+ expect(successfulResponseHandler).toHaveBeenCalledWith({
+ input: {
+ id: 'gid://gitlab/Organizations::Organization/1',
+ name: 'Foo bar',
+ },
});
+ expect(visitUrlWithAlerts).toHaveBeenCalledWith(window.location.href, [
+ {
+ id: 'organization-successfully-updated',
+ message: 'Organization was successfully updated.',
+ variant: 'info',
+ },
+ ]);
});
});
describe('when API request is not successful', () => {
- const error = new Error();
-
- beforeEach(async () => {
- const mockResolvers = {
- Mutation: {
- updateOrganization: jest.fn().mockRejectedValueOnce(error),
- },
- };
+ describe('when there is a network error', () => {
+ const error = new Error();
+
+ beforeEach(async () => {
+ createComponent({
+ handlers: [[organizationUpdateMutation, jest.fn().mockRejectedValue(error)]],
+ });
+ await submitForm();
+ await waitForPromises();
+ });
- createComponent({ mockResolvers });
- await submitForm();
- jest.runAllTimers();
- await waitForPromises();
+ it('displays error alert', () => {
+ expect(createAlert).toHaveBeenCalledWith({
+ message: 'An error occurred updating your organization. Please try again.',
+ error,
+ captureError: true,
+ });
+ });
});
- it('displays error alert', () => {
- expect(createAlert).toHaveBeenCalledWith({
- message: 'An error occurred updating your organization. Please try again.',
- error,
- captureError: true,
+ describe('when there are GraphQL errors', () => {
+ beforeEach(async () => {
+ createComponent({
+ handlers: [
+ [
+ organizationUpdateMutation,
+ jest.fn().mockResolvedValue(organizationUpdateResponseWithErrors),
+ ],
+ ],
+ });
+ await submitForm();
+ await waitForPromises();
+ });
+
+ it('displays form errors alert', () => {
+ expect(wrapper.findComponent(FormErrorsAlert).props('errors')).toEqual(
+ organizationUpdateResponseWithErrors.data.organizationUpdate.errors,
+ );
});
});
});
diff --git a/spec/frontend/organizations/shared/components/new_edit_form_spec.js b/spec/frontend/organizations/shared/components/new_edit_form_spec.js
index 93f022a3259..1fcfc20bf1a 100644
--- a/spec/frontend/organizations/shared/components/new_edit_form_spec.js
+++ b/spec/frontend/organizations/shared/components/new_edit_form_spec.js
@@ -1,6 +1,8 @@
-import { GlButton, GlInputGroupText, GlTruncate } from '@gitlab/ui';
+import { GlButton } from '@gitlab/ui';
+import { nextTick } from 'vue';
import NewEditForm from '~/organizations/shared/components/new_edit_form.vue';
+import OrganizationUrlField from '~/organizations/shared/components/organization_url_field.vue';
import { FORM_FIELD_NAME, FORM_FIELD_ID, FORM_FIELD_PATH } from '~/organizations/shared/constants';
import { mountExtended } from 'helpers/vue_test_utils_helper';
@@ -29,7 +31,12 @@ describe('NewEditForm', () => {
const findNameField = () => wrapper.findByLabelText('Organization name');
const findIdField = () => wrapper.findByLabelText('Organization ID');
- const findUrlField = () => wrapper.findByLabelText('Organization URL');
+ const findUrlField = () => wrapper.findComponent(OrganizationUrlField);
+
+ const setUrlFieldValue = async (value) => {
+ findUrlField().vm.$emit('input', value);
+ await nextTick();
+ };
const submitForm = async () => {
await wrapper.findByRole('button', { name: 'Create organization' }).trigger('click');
};
@@ -43,20 +50,17 @@ describe('NewEditForm', () => {
it('renders `Organization URL` field', () => {
createComponent();
- expect(wrapper.findComponent(GlInputGroupText).findComponent(GlTruncate).props('text')).toBe(
- 'http://127.0.0.1:3000/-/organizations/',
- );
expect(findUrlField().exists()).toBe(true);
});
it('requires `Organization URL` field to be a minimum of two characters', async () => {
createComponent();
- await findUrlField().setValue('f');
+ await setUrlFieldValue('f');
await submitForm();
expect(
- wrapper.findByText('Organization URL must be a minimum of two characters.').exists(),
+ wrapper.findByText('Organization URL is too short (minimum is 2 characters).').exists(),
).toBe(true);
});
@@ -89,7 +93,7 @@ describe('NewEditForm', () => {
it('sets initial values for fields', () => {
expect(findNameField().element.value).toBe('Foo bar');
expect(findIdField().element.value).toBe('1');
- expect(findUrlField().element.value).toBe('foo-bar');
+ expect(findUrlField().props('value')).toBe('foo-bar');
});
});
@@ -116,7 +120,7 @@ describe('NewEditForm', () => {
createComponent();
await findNameField().setValue('Foo bar');
- await findUrlField().setValue('foo-bar');
+ await setUrlFieldValue('foo-bar');
await submitForm();
});
@@ -134,7 +138,7 @@ describe('NewEditForm', () => {
});
it('sets `Organization URL` when typing in `Organization name`', () => {
- expect(findUrlField().element.value).toBe('foo-bar');
+ expect(findUrlField().props('value')).toBe('foo-bar');
});
});
@@ -142,13 +146,13 @@ describe('NewEditForm', () => {
beforeEach(async () => {
createComponent();
- await findUrlField().setValue('foo-bar-baz');
+ await setUrlFieldValue('foo-bar-baz');
await findNameField().setValue('Foo bar');
await submitForm();
});
it('does not modify `Organization URL` when typing in `Organization name`', () => {
- expect(findUrlField().element.value).toBe('foo-bar-baz');
+ expect(findUrlField().props('value')).toBe('foo-bar-baz');
});
});
diff --git a/spec/frontend/organizations/shared/components/organization_url_field_spec.js b/spec/frontend/organizations/shared/components/organization_url_field_spec.js
new file mode 100644
index 00000000000..d854134e596
--- /dev/null
+++ b/spec/frontend/organizations/shared/components/organization_url_field_spec.js
@@ -0,0 +1,66 @@
+import { GlFormInputGroup, GlInputGroupText, GlTruncate, GlFormInput } from '@gitlab/ui';
+
+import OrganizedUrlField from '~/organizations/shared/components/organization_url_field.vue';
+import { mountExtended } from 'helpers/vue_test_utils_helper';
+
+describe('OrganizationUrlField', () => {
+ let wrapper;
+
+ const defaultProvide = {
+ organizationsPath: '/-/organizations',
+ rootUrl: 'http://127.0.0.1:3000/',
+ };
+
+ const defaultPropsData = {
+ id: 'organization-url',
+ value: 'foo-bar',
+ validation: {
+ invalidFeedback: 'Invalid',
+ state: false,
+ },
+ };
+
+ const createComponent = ({ propsData = {} } = {}) => {
+ wrapper = mountExtended(OrganizedUrlField, {
+ attachTo: document.body,
+ provide: defaultProvide,
+ propsData: {
+ ...defaultPropsData,
+ ...propsData,
+ },
+ });
+ };
+
+ const findInputGroup = () => wrapper.findComponent(GlFormInputGroup);
+ const findInput = () => findInputGroup().findComponent(GlFormInput);
+
+ it('renders organization url field with correct props', () => {
+ createComponent();
+
+ expect(
+ findInputGroup().findComponent(GlInputGroupText).findComponent(GlTruncate).props('text'),
+ ).toBe('http://127.0.0.1:3000/-/organizations/');
+ expect(findInput().attributes('id')).toBe(defaultPropsData.id);
+ expect(findInput().vm.$attrs).toMatchObject({
+ value: defaultPropsData.value,
+ invalidFeedback: defaultPropsData.validation.invalidFeedback,
+ state: defaultPropsData.validation.state,
+ });
+ });
+
+ it('emits `input` event', () => {
+ createComponent();
+
+ findInput().vm.$emit('input', 'foo');
+
+ expect(wrapper.emitted('input')).toEqual([['foo']]);
+ });
+
+ it('emits `blur` event', () => {
+ createComponent();
+
+ findInput().vm.$emit('blur', true);
+
+ expect(wrapper.emitted('blur')).toEqual([[true]]);
+ });
+});
diff --git a/spec/frontend/organizations/users/components/app_spec.js b/spec/frontend/organizations/users/components/app_spec.js
index b30fd984099..30380bcf6a5 100644
--- a/spec/frontend/organizations/users/components/app_spec.js
+++ b/spec/frontend/organizations/users/components/app_spec.js
@@ -4,9 +4,16 @@ import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import { createAlert } from '~/alert';
+import { ORGANIZATION_USERS_PER_PAGE } from '~/organizations/constants';
import organizationUsersQuery from '~/organizations/users/graphql/organization_users.query.graphql';
import OrganizationsUsersApp from '~/organizations/users/components/app.vue';
-import { MOCK_ORGANIZATION_GID, MOCK_USERS } from '../mock_data';
+import OrganizationsUsersView from '~/organizations/users/components/users_view.vue';
+import {
+ MOCK_ORGANIZATION_GID,
+ MOCK_USERS,
+ MOCK_USERS_FORMATTED,
+ MOCK_PAGE_INFO,
+} from '../mock_data';
jest.mock('~/alert');
@@ -15,10 +22,11 @@ Vue.use(VueApollo);
const mockError = new Error();
const loadingResolver = jest.fn().mockReturnValue(new Promise(() => {}));
-const successfulResolver = (nodes) =>
- jest.fn().mockResolvedValue({
- data: { organization: { id: 1, organizationUsers: { nodes } } },
+const successfulResolver = (nodes, pageInfo = {}) => {
+ return jest.fn().mockResolvedValue({
+ data: { organization: { id: 1, organizationUsers: { nodes, pageInfo } } },
});
+};
const errorResolver = jest.fn().mockRejectedValueOnce(mockError);
describe('OrganizationsUsersApp', () => {
@@ -40,31 +48,31 @@ describe('OrganizationsUsersApp', () => {
mockApollo = null;
});
- const findOrganizationUsersLoading = () => wrapper.findByText('Loading');
- const findOrganizationUsers = () => wrapper.findByTestId('organization-users');
+ const findOrganizationUsersView = () => wrapper.findComponent(OrganizationsUsersView);
describe.each`
- description | mockResolver | loading | userData | error
- ${'when API call is loading'} | ${loadingResolver} | ${true} | ${[]} | ${false}
- ${'when API returns successful with results'} | ${successfulResolver(MOCK_USERS)} | ${false} | ${MOCK_USERS} | ${false}
- ${'when API returns successful without results'} | ${successfulResolver([])} | ${false} | ${[]} | ${false}
- ${'when API returns error'} | ${errorResolver} | ${false} | ${[]} | ${true}
- `('$description', ({ mockResolver, loading, userData, error }) => {
+ description | mockResolver | loading | userData | pageInfo | error
+ ${'when API call is loading'} | ${loadingResolver} | ${true} | ${[]} | ${{}} | ${false}
+ ${'when API returns successful with one page of results'} | ${successfulResolver(MOCK_USERS)} | ${false} | ${MOCK_USERS_FORMATTED} | ${{}} | ${false}
+ ${'when API returns successful with multiple pages of results'} | ${successfulResolver(MOCK_USERS, MOCK_PAGE_INFO)} | ${false} | ${MOCK_USERS_FORMATTED} | ${MOCK_PAGE_INFO} | ${false}
+ ${'when API returns successful without results'} | ${successfulResolver([])} | ${false} | ${[]} | ${{}} | ${false}
+ ${'when API returns error'} | ${errorResolver} | ${false} | ${[]} | ${{}} | ${true}
+ `('$description', ({ mockResolver, loading, userData, pageInfo, error }) => {
beforeEach(async () => {
createComponent(mockResolver);
await waitForPromises();
});
- it(`does ${
- loading ? '' : 'not '
- }render the organization users view with loading placeholder`, () => {
- expect(findOrganizationUsersLoading().exists()).toBe(loading);
+ it(`renders OrganizationUsersView with loading prop set to ${loading}`, () => {
+ expect(findOrganizationUsersView().props('loading')).toBe(loading);
});
- it(`renders the organization users view with ${
- userData.length ? 'correct' : 'empty'
- } users array raw data`, () => {
- expect(JSON.parse(findOrganizationUsers().text())).toStrictEqual(userData);
+ it('renders OrganizationUsersView with correct users prop', () => {
+ expect(findOrganizationUsersView().props('users')).toStrictEqual(userData);
+ });
+
+ it('renders OrganizationUsersView with correct pageInfo prop', () => {
+ expect(findOrganizationUsersView().props('pageInfo')).toStrictEqual(pageInfo);
});
it(`does ${error ? '' : 'not '}render an error message`, () => {
@@ -78,4 +86,40 @@ describe('OrganizationsUsersApp', () => {
: expect(createAlert).not.toHaveBeenCalled();
});
});
+
+ describe('Pagination', () => {
+ const mockResolver = successfulResolver(MOCK_USERS, MOCK_PAGE_INFO);
+
+ beforeEach(async () => {
+ createComponent(mockResolver);
+ await waitForPromises();
+ mockResolver.mockClear();
+ });
+
+ it('handleNextPage calls organizationUsersQuery with correct pagination data', async () => {
+ findOrganizationUsersView().vm.$emit('next');
+ await waitForPromises();
+
+ expect(mockResolver).toHaveBeenCalledWith({
+ id: MOCK_ORGANIZATION_GID,
+ before: '',
+ after: MOCK_PAGE_INFO.endCursor,
+ first: ORGANIZATION_USERS_PER_PAGE,
+ last: null,
+ });
+ });
+
+ it('handlePrevPage calls organizationUsersQuery with correct pagination data', async () => {
+ findOrganizationUsersView().vm.$emit('prev');
+ await waitForPromises();
+
+ expect(mockResolver).toHaveBeenCalledWith({
+ id: MOCK_ORGANIZATION_GID,
+ before: MOCK_PAGE_INFO.startCursor,
+ after: '',
+ first: ORGANIZATION_USERS_PER_PAGE,
+ last: null,
+ });
+ });
+ });
});
diff --git a/spec/frontend/organizations/users/components/users_view_spec.js b/spec/frontend/organizations/users/components/users_view_spec.js
new file mode 100644
index 00000000000..d665c60d425
--- /dev/null
+++ b/spec/frontend/organizations/users/components/users_view_spec.js
@@ -0,0 +1,68 @@
+import { GlLoadingIcon, GlKeysetPagination } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import UsersView from '~/organizations/users/components/users_view.vue';
+import UsersTable from '~/vue_shared/components/users_table/users_table.vue';
+import { MOCK_PATHS, MOCK_USERS_FORMATTED, MOCK_PAGE_INFO } from '../mock_data';
+
+describe('UsersView', () => {
+ let wrapper;
+
+ const createComponent = (props = {}) => {
+ wrapper = shallowMount(UsersView, {
+ propsData: {
+ loading: false,
+ users: MOCK_USERS_FORMATTED,
+ pageInfo: MOCK_PAGE_INFO,
+ ...props,
+ },
+ provide: {
+ paths: MOCK_PATHS,
+ },
+ });
+ };
+
+ const findGlLoading = () => wrapper.findComponent(GlLoadingIcon);
+ const findUsersTable = () => wrapper.findComponent(UsersTable);
+ const findGlKeysetPagination = () => wrapper.findComponent(GlKeysetPagination);
+
+ describe.each`
+ description | loading | usersData
+ ${'when loading'} | ${true} | ${[]}
+ ${'when not loading and has users'} | ${false} | ${MOCK_USERS_FORMATTED}
+ ${'when not loading and has no users'} | ${false} | ${[]}
+ `('$description', ({ loading, usersData }) => {
+ beforeEach(() => {
+ createComponent({ loading, users: usersData });
+ });
+
+ it(`does ${loading ? '' : 'not '}render loading icon`, () => {
+ expect(findGlLoading().exists()).toBe(loading);
+ });
+
+ it(`does ${!loading ? '' : 'not '}render users table`, () => {
+ expect(findUsersTable().exists()).toBe(!loading);
+ });
+
+ it(`does ${!loading ? '' : 'not '}render pagination`, () => {
+ expect(findGlKeysetPagination().exists()).toBe(Boolean(!loading));
+ });
+ });
+
+ describe('Pagination', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('@next event forwards up to the parent component', () => {
+ findGlKeysetPagination().vm.$emit('next');
+
+ expect(wrapper.emitted('next')).toHaveLength(1);
+ });
+
+ it('@prev event forwards up to the parent component', () => {
+ findGlKeysetPagination().vm.$emit('prev');
+
+ expect(wrapper.emitted('prev')).toHaveLength(1);
+ });
+ });
+});
diff --git a/spec/frontend/organizations/users/mock_data.js b/spec/frontend/organizations/users/mock_data.js
index 4f159c70c2c..16b3ec3bbcb 100644
--- a/spec/frontend/organizations/users/mock_data.js
+++ b/spec/frontend/organizations/users/mock_data.js
@@ -1,15 +1,31 @@
+const createUser = (id) => {
+ return {
+ id: `gid://gitlab/User/${id}`,
+ username: `test_user_${id}`,
+ avatarUrl: `/path/test_user_${id}`,
+ name: `Test User ${id}`,
+ publicEmail: `test_user_${id}@gitlab.com`,
+ createdAt: Date.now(),
+ lastActivityOn: Date.now(),
+ };
+};
+
export const MOCK_ORGANIZATION_GID = 'gid://gitlab/Organizations::Organization/1';
+export const MOCK_PATHS = {
+ adminUser: '/admin/users/:id',
+};
+
export const MOCK_USERS = [
{
badges: [],
id: 'gid://gitlab/Organizations::OrganizationUser/3',
- user: { id: 'gid://gitlab/User/3' },
+ user: createUser(3),
},
{
badges: [],
id: 'gid://gitlab/Organizations::OrganizationUser/2',
- user: { id: 'gid://gitlab/User/2' },
+ user: createUser(2),
},
{
badges: [
@@ -17,6 +33,18 @@ export const MOCK_USERS = [
{ text: "It's you!", variant: 'muted' },
],
id: 'gid://gitlab/Organizations::OrganizationUser/1',
- user: { id: 'gid://gitlab/User/1' },
+ user: createUser(1),
},
];
+
+export const MOCK_USERS_FORMATTED = MOCK_USERS.map(({ badges, user }) => {
+ return { ...user, badges, email: user.publicEmail };
+});
+
+export const MOCK_PAGE_INFO = {
+ startCursor: 'aaaa',
+ endCursor: 'bbbb',
+ hasNextPage: true,
+ hasPreviousPage: true,
+ __typename: 'PageInfo',
+};
diff --git a/spec/frontend/packages_and_registries/container_registry/explorer/components/details_page/tags_list_row_spec.js b/spec/frontend/packages_and_registries/container_registry/explorer/components/details_page/tags_list_row_spec.js
index 09e2c35d449..9f3431ef5a5 100644
--- a/spec/frontend/packages_and_registries/container_registry/explorer/components/details_page/tags_list_row_spec.js
+++ b/spec/frontend/packages_and_registries/container_registry/explorer/components/details_page/tags_list_row_spec.js
@@ -342,6 +342,13 @@ describe('tags list row', () => {
expect(findDetailsRows().length).toBe(3);
});
+ it('has 2 details rows when revision is empty', async () => {
+ mountComponent({ tag: { ...tag, revision: '' } });
+ await nextTick();
+
+ expect(findDetailsRows().length).toBe(2);
+ });
+
describe.each`
name | finderFunction | text | icon | clipboard
${'published date detail'} | ${findPublishedDateDetail} | ${'Published to the gitlab-org/gitlab-test/rails-12009 image repository at 13:29:38 UTC on 2020-11-03'} | ${'clock'} | ${false}
diff --git a/spec/frontend/packages_and_registries/infrastructure_registry/components/details/components/__snapshots__/file_sha_spec.js.snap b/spec/frontend/packages_and_registries/infrastructure_registry/components/details/components/__snapshots__/file_sha_spec.js.snap
index 8e757c136ec..a544a679ff4 100644
--- a/spec/frontend/packages_and_registries/infrastructure_registry/components/details/components/__snapshots__/file_sha_spec.js.snap
+++ b/spec/frontend/packages_and_registries/infrastructure_registry/components/details/components/__snapshots__/file_sha_spec.js.snap
@@ -2,9 +2,9 @@
exports[`FileSha renders 1`] = `
<div
- class="gl-align-items-center gl-border-b-1 gl-border-b-solid gl-border-gray-100 gl-display-flex gl-font-monospace gl-font-sm gl-py-2 gl-word-break-all"
+ class="gl-align-items-top gl-border-b-1 gl-border-b-solid gl-border-gray-100 gl-display-flex gl-font-monospace gl-font-sm gl-py-2 gl-word-break-all"
>
- <span>
+ <div>
<div
class="gl-px-4"
>
@@ -23,6 +23,6 @@ exports[`FileSha renders 1`] = `
variant="default"
/>
</div>
- </span>
+ </div>
</div>
`;
diff --git a/spec/frontend/packages_and_registries/infrastructure_registry/components/shared/__snapshots__/package_list_row_spec.js.snap b/spec/frontend/packages_and_registries/infrastructure_registry/components/shared/__snapshots__/package_list_row_spec.js.snap
index edba81da1f5..75cc7e5b78d 100644
--- a/spec/frontend/packages_and_registries/infrastructure_registry/components/shared/__snapshots__/package_list_row_spec.js.snap
+++ b/spec/frontend/packages_and_registries/infrastructure_registry/components/shared/__snapshots__/package_list_row_spec.js.snap
@@ -9,10 +9,10 @@ exports[`packages_list_row renders 1`] = `
class="gl-align-items-center gl-display-flex gl-py-3"
>
<div
- class="gl-align-items-stretch gl-display-flex gl-flex-grow-1 gl-justify-content-space-between gl-xs-flex-direction-column"
+ class="gl-align-items-stretch gl-display-flex gl-flex-direction-column gl-flex-grow-1 gl-justify-content-space-between gl-sm-flex-direction-row"
>
<div
- class="gl-display-flex gl-flex-direction-column gl-flex-grow-1 gl-min-w-0 gl-xs-mb-3"
+ class="gl-display-flex gl-flex-direction-column gl-flex-grow-1 gl-mb-3 gl-min-w-0 gl-sm-mb-0"
>
<div
class="gl-align-items-center gl-display-flex gl-font-weight-bold gl-min-h-6 gl-min-w-0 gl-text-body"
diff --git a/spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/file_sha_spec.js.snap b/spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/file_sha_spec.js.snap
index 8e757c136ec..a544a679ff4 100644
--- a/spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/file_sha_spec.js.snap
+++ b/spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/file_sha_spec.js.snap
@@ -2,9 +2,9 @@
exports[`FileSha renders 1`] = `
<div
- class="gl-align-items-center gl-border-b-1 gl-border-b-solid gl-border-gray-100 gl-display-flex gl-font-monospace gl-font-sm gl-py-2 gl-word-break-all"
+ class="gl-align-items-top gl-border-b-1 gl-border-b-solid gl-border-gray-100 gl-display-flex gl-font-monospace gl-font-sm gl-py-2 gl-word-break-all"
>
- <span>
+ <div>
<div
class="gl-px-4"
>
@@ -23,6 +23,6 @@ exports[`FileSha renders 1`] = `
variant="default"
/>
</div>
- </span>
+ </div>
</div>
`;
diff --git a/spec/frontend/packages_and_registries/package_registry/components/details/additional_metadata_spec.js b/spec/frontend/packages_and_registries/package_registry/components/details/additional_metadata_spec.js
index 133941bbb2e..283c394a135 100644
--- a/spec/frontend/packages_and_registries/package_registry/components/details/additional_metadata_spec.js
+++ b/spec/frontend/packages_and_registries/package_registry/components/details/additional_metadata_spec.js
@@ -13,7 +13,7 @@ import {
pypiMetadata,
packageMetadataQuery,
} from 'jest/packages_and_registries/package_registry/mock_data';
-import component from '~/packages_and_registries/package_registry/components/details/additional_metadata.vue';
+import AdditionalMetadata from '~/packages_and_registries/package_registry/components/details/additional_metadata.vue';
import {
FETCH_PACKAGE_METADATA_ERROR_MESSAGE,
PACKAGE_TYPE_NUGET,
@@ -52,12 +52,9 @@ describe('Package Additional metadata', () => {
const requestHandlers = [[getPackageMetadata, resolver]];
apolloProvider = createMockApollo(requestHandlers);
- wrapper = shallowMountExtended(component, {
+ wrapper = shallowMountExtended(AdditionalMetadata, {
apolloProvider,
propsData: { ...defaultProps, ...props },
- stubs: {
- component: { template: '<div data-testid="component-is"></div>' },
- },
});
};
@@ -91,7 +88,7 @@ describe('Package Additional metadata', () => {
const title = findTitle();
expect(title.exists()).toBe(true);
- expect(title.text()).toMatchInterpolatedText(component.i18n.componentTitle);
+ expect(title.text()).toMatchInterpolatedText(AdditionalMetadata.i18n.componentTitle);
});
it('does not render gl-alert', () => {
diff --git a/spec/frontend/packages_and_registries/package_registry/components/details/metadata/pypi_spec.js b/spec/frontend/packages_and_registries/package_registry/components/details/metadata/pypi_spec.js
index 67f5fbc9e80..39b525efdbc 100644
--- a/spec/frontend/packages_and_registries/package_registry/components/details/metadata/pypi_spec.js
+++ b/spec/frontend/packages_and_registries/package_registry/components/details/metadata/pypi_spec.js
@@ -21,14 +21,20 @@ describe('Package Additional Metadata', () => {
};
const findPypiRequiredPython = () => wrapper.findByTestId('pypi-required-python');
+ const findPypiAuthorEmail = () => wrapper.findByTestId('pypi-author-email');
+ const findPypiSummary = () => wrapper.findByTestId('pypi-summary');
+ const findPypiKeywords = () => wrapper.findByTestId('pypi-keywords');
beforeEach(() => {
mountComponent();
});
it.each`
- name | finderFunction | text | icon
- ${'pypi-required-python'} | ${findPypiRequiredPython} | ${'Required Python: 1.0.0'} | ${'information-o'}
+ name | finderFunction | text | icon
+ ${'pypi-required-python'} | ${findPypiRequiredPython} | ${'Required Python: 1.0.0'} | ${'information-o'}
+ ${'pypi-author-email'} | ${findPypiAuthorEmail} | ${'Author email: "C. Schultz" <cschultz@example.com>'} | ${'mail'}
+ ${'pypi-summary'} | ${findPypiSummary} | ${'Summary: A module for collecting votes from beagles.'} | ${'doc-text'}
+ ${'pypi-keywords'} | ${findPypiKeywords} | ${'Keywords: dog,puppy,voting,election'} | ${'doc-text'}
`('$name element', ({ finderFunction, text, icon }) => {
const element = finderFunction();
expect(element.exists()).toBe(true);
diff --git a/spec/frontend/packages_and_registries/package_registry/components/list/__snapshots__/package_list_row_spec.js.snap b/spec/frontend/packages_and_registries/package_registry/components/list/__snapshots__/package_list_row_spec.js.snap
index 40fcd290b33..cbf2184d879 100644
--- a/spec/frontend/packages_and_registries/package_registry/components/list/__snapshots__/package_list_row_spec.js.snap
+++ b/spec/frontend/packages_and_registries/package_registry/components/list/__snapshots__/package_list_row_spec.js.snap
@@ -17,10 +17,10 @@ exports[`packages_list_row renders 1`] = `
/>
</div>
<div
- class="gl-align-items-stretch gl-display-flex gl-flex-grow-1 gl-justify-content-space-between gl-xs-flex-direction-column"
+ class="gl-align-items-stretch gl-display-flex gl-flex-direction-column gl-flex-grow-1 gl-justify-content-space-between gl-sm-flex-direction-row"
>
<div
- class="gl-display-flex gl-flex-direction-column gl-flex-grow-1 gl-min-w-0 gl-xs-mb-3"
+ class="gl-display-flex gl-flex-direction-column gl-flex-grow-1 gl-mb-3 gl-min-w-0 gl-sm-mb-0"
>
<div
class="gl-align-items-center gl-display-flex gl-font-weight-bold gl-min-h-6 gl-min-w-0 gl-text-body"
@@ -82,7 +82,7 @@ exports[`packages_list_row renders 1`] = `
Published
<time
datetime="2020-05-17T14:23:32Z"
- title="May 17, 2020 2:23pm UTC"
+ title="May 17, 2020 at 2:23:32 PM GMT"
>
1 month ago
</time>
diff --git a/spec/frontend/packages_and_registries/package_registry/components/list/packages_search_spec.js b/spec/frontend/packages_and_registries/package_registry/components/list/packages_search_spec.js
index f4e36f51c27..6a1c34df596 100644
--- a/spec/frontend/packages_and_registries/package_registry/components/list/packages_search_spec.js
+++ b/spec/frontend/packages_and_registries/package_registry/components/list/packages_search_spec.js
@@ -1,4 +1,5 @@
import { nextTick } from 'vue';
+import { GlFilteredSearchToken } from '@gitlab/ui';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import { sortableFields } from '~/packages_and_registries/package_registry/utils';
import component from '~/packages_and_registries/package_registry/components/list/package_search.vue';
@@ -7,7 +8,11 @@ import LocalStorageSync from '~/vue_shared/components/local_storage_sync.vue';
import PersistedSearch from '~/packages_and_registries/shared/components/persisted_search.vue';
import { LIST_KEY_CREATED_AT } from '~/packages_and_registries/package_registry/constants';
-import { TOKEN_TYPE_TYPE } from '~/vue_shared/components/filtered_search_bar/constants';
+import {
+ OPERATORS_IS,
+ TOKEN_TYPE_TYPE,
+ TOKEN_TYPE_VERSION,
+} from '~/vue_shared/components/filtered_search_bar/constants';
describe('Package Search', () => {
let wrapper;
@@ -74,6 +79,13 @@ describe('Package Search', () => {
token: PackageTypeToken,
type: TOKEN_TYPE_TYPE,
icon: 'package',
+ operators: OPERATORS_IS,
+ }),
+ expect.objectContaining({
+ token: GlFilteredSearchToken,
+ type: TOKEN_TYPE_VERSION,
+ icon: 'doc-versions',
+ operators: OPERATORS_IS,
}),
]),
sortableFields: sortableFields(isGroupPage),
@@ -102,6 +114,7 @@ describe('Package Search', () => {
filters: {
packageName: '',
packageType: undefined,
+ packageVersion: '',
},
sort: payload.sort,
sorting: payload.sorting,
@@ -114,6 +127,7 @@ describe('Package Search', () => {
sort: 'CREATED_FOO',
filters: [
{ type: 'type', value: { data: 'Generic', operator: '=' }, id: 'token-3' },
+ { type: 'version', value: { data: '1.0.1', operator: '=' }, id: 'token-6' },
{ id: 'token-4', type: 'filtered-search-term', value: { data: 'gl' } },
{ id: 'token-5', type: 'filtered-search-term', value: { data: '' } },
],
@@ -133,6 +147,7 @@ describe('Package Search', () => {
filters: {
packageName: 'gl',
packageType: 'GENERIC',
+ packageVersion: '1.0.1',
},
sort: payload.sort,
sorting: payload.sorting,
diff --git a/spec/frontend/packages_and_registries/package_registry/mock_data.js b/spec/frontend/packages_and_registries/package_registry/mock_data.js
index 6c03f91b73d..fdd64cbe6a5 100644
--- a/spec/frontend/packages_and_registries/package_registry/mock_data.js
+++ b/spec/frontend/packages_and_registries/package_registry/mock_data.js
@@ -183,7 +183,10 @@ export const composerMetadata = () => ({
export const pypiMetadata = () => ({
__typename: 'PypiMetadata',
id: 'pypi-1',
+ authorEmail: '"C. Schultz" <cschultz@example.com>',
+ keywords: 'dog,puppy,voting,election',
requiredPython: '1.0.0',
+ summary: 'A module for collecting votes from beagles.',
});
export const mavenMetadata = () => ({
diff --git a/spec/frontend/packages_and_registries/package_registry/pages/list_spec.js b/spec/frontend/packages_and_registries/package_registry/pages/list_spec.js
index 0ce2b86b9a4..db86be3b8ee 100644
--- a/spec/frontend/packages_and_registries/package_registry/pages/list_spec.js
+++ b/spec/frontend/packages_and_registries/package_registry/pages/list_spec.js
@@ -44,7 +44,7 @@ describe('PackagesListApp', () => {
const searchPayload = {
sort: 'VERSION_DESC',
- filters: { packageName: 'foo', packageType: 'CONAN' },
+ filters: { packageName: 'foo', packageType: 'CONAN', packageVersion: '1.0.1' },
};
const findPackageTitle = () => wrapper.findComponent(PackageTitle);
@@ -304,7 +304,12 @@ describe('PackagesListApp', () => {
await waitForFirstRequest();
- findSearch().vm.$emit('update', searchPayload);
+ findSearch().vm.$emit('update', {
+ sort: 'VERSION_DESC',
+ filters: {
+ packageName: 'test',
+ },
+ });
return nextTick();
});
diff --git a/spec/frontend/packages_and_registries/settings/project/settings/components/registry_settings_app_spec.js b/spec/frontend/packages_and_registries/settings/project/settings/components/registry_settings_app_spec.js
index 12425909454..dfcabd14489 100644
--- a/spec/frontend/packages_and_registries/settings/project/settings/components/registry_settings_app_spec.js
+++ b/spec/frontend/packages_and_registries/settings/project/settings/components/registry_settings_app_spec.js
@@ -6,6 +6,7 @@ import * as commonUtils from '~/lib/utils/common_utils';
import component from '~/packages_and_registries/settings/project/components/registry_settings_app.vue';
import ContainerExpirationPolicy from '~/packages_and_registries/settings/project/components/container_expiration_policy.vue';
import PackagesCleanupPolicy from '~/packages_and_registries/settings/project/components/packages_cleanup_policy.vue';
+import DependencyProxyPackagesSettings from 'ee_component/packages_and_registries/settings/project/components/dependency_proxy_packages_settings.vue';
import {
SHOW_SETUP_SUCCESS_ALERT,
UPDATE_SETTINGS_SUCCESS_MESSAGE,
@@ -18,11 +19,16 @@ describe('Registry Settings app', () => {
const findContainerExpirationPolicy = () => wrapper.findComponent(ContainerExpirationPolicy);
const findPackagesCleanupPolicy = () => wrapper.findComponent(PackagesCleanupPolicy);
+ const findDependencyProxyPackagesSettings = () =>
+ wrapper.findComponent(DependencyProxyPackagesSettings);
const findAlert = () => wrapper.findComponent(GlAlert);
const defaultProvide = {
+ projectPath: 'path',
showContainerRegistrySettings: true,
showPackageRegistrySettings: true,
+ showDependencyProxySettings: false,
+ ...(IS_EE && { showDependencyProxySettings: true }),
};
const mountComponent = (provide = defaultProvide) => {
@@ -82,6 +88,7 @@ describe('Registry Settings app', () => {
'container cleanup policy $showContainerRegistrySettings and package cleanup policy is $showPackageRegistrySettings',
({ showContainerRegistrySettings, showPackageRegistrySettings }) => {
mountComponent({
+ ...defaultProvide,
showContainerRegistrySettings,
showPackageRegistrySettings,
});
@@ -90,5 +97,16 @@ describe('Registry Settings app', () => {
expect(findPackagesCleanupPolicy().exists()).toBe(showPackageRegistrySettings);
},
);
+
+ if (IS_EE) {
+ it.each([true, false])('when showDependencyProxySettings is %s', (value) => {
+ mountComponent({
+ ...defaultProvide,
+ showDependencyProxySettings: value,
+ });
+
+ expect(findDependencyProxyPackagesSettings().exists()).toBe(value);
+ });
+ }
});
});
diff --git a/spec/frontend/packages_and_registries/shared/utils_spec.js b/spec/frontend/packages_and_registries/shared/utils_spec.js
index 1dc6bb261de..4676544c324 100644
--- a/spec/frontend/packages_and_registries/shared/utils_spec.js
+++ b/spec/frontend/packages_and_registries/shared/utils_spec.js
@@ -41,19 +41,20 @@ describe('Packages And Registries shared utils', () => {
});
describe('extractFilterAndSorting', () => {
it.each`
- search | type | sort | orderBy | result
- ${['one']} | ${'myType'} | ${'asc'} | ${'foo'} | ${{ sorting: { sort: 'asc', orderBy: 'foo' }, filters: [{ type: 'type', value: { data: 'myType' } }, { type: FILTERED_SEARCH_TERM, value: { data: 'one' } }] }}
- ${['one']} | ${null} | ${'asc'} | ${'foo'} | ${{ sorting: { sort: 'asc', orderBy: 'foo' }, filters: [{ type: FILTERED_SEARCH_TERM, value: { data: 'one' } }] }}
- ${[]} | ${null} | ${'asc'} | ${'foo'} | ${{ sorting: { sort: 'asc', orderBy: 'foo' }, filters: [] }}
- ${null} | ${null} | ${'asc'} | ${'foo'} | ${{ sorting: { sort: 'asc', orderBy: 'foo' }, filters: [] }}
- ${null} | ${null} | ${null} | ${'foo'} | ${{ sorting: { orderBy: 'foo' }, filters: [] }}
- ${null} | ${null} | ${null} | ${null} | ${{ sorting: {}, filters: [] }}
+ search | type | version | sort | orderBy | result
+ ${['one']} | ${'myType'} | ${'1.0.1'} | ${'asc'} | ${'foo'} | ${{ sorting: { sort: 'asc', orderBy: 'foo' }, filters: [{ type: 'type', value: { data: 'myType' } }, { type: 'version', value: { data: '1.0.1' } }, { type: FILTERED_SEARCH_TERM, value: { data: 'one' } }] }}
+ ${['one']} | ${null} | ${null} | ${'asc'} | ${'foo'} | ${{ sorting: { sort: 'asc', orderBy: 'foo' }, filters: [{ type: FILTERED_SEARCH_TERM, value: { data: 'one' } }] }}
+ ${[]} | ${null} | ${null} | ${'asc'} | ${'foo'} | ${{ sorting: { sort: 'asc', orderBy: 'foo' }, filters: [] }}
+ ${null} | ${null} | ${null} | ${'asc'} | ${'foo'} | ${{ sorting: { sort: 'asc', orderBy: 'foo' }, filters: [] }}
+ ${null} | ${null} | ${null} | ${null} | ${'foo'} | ${{ sorting: { orderBy: 'foo' }, filters: [] }}
+ ${null} | ${null} | ${null} | ${null} | ${null} | ${{ sorting: {}, filters: [] }}
`(
'returns sorting and filters objects in the correct form',
- ({ search, type, sort, orderBy, result }) => {
+ ({ search, type, version, sort, orderBy, result }) => {
const queryObject = {
search,
type,
+ version,
sort,
orderBy,
};
diff --git a/spec/frontend/pages/import/bulk_imports/history/components/bulk_imports_history_app_spec.js b/spec/frontend/pages/import/bulk_imports/history/components/bulk_imports_history_app_spec.js
index be50858bc88..3db77469d6b 100644
--- a/spec/frontend/pages/import/bulk_imports/history/components/bulk_imports_history_app_spec.js
+++ b/spec/frontend/pages/import/bulk_imports/history/components/bulk_imports_history_app_spec.js
@@ -1,16 +1,23 @@
import { GlEmptyState, GlLoadingIcon, GlTableLite } from '@gitlab/ui';
import { mount, shallowMount } from '@vue/test-utils';
import MockAdapter from 'axios-mock-adapter';
+import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import axios from '~/lib/utils/axios_utils';
import waitForPromises from 'helpers/wait_for_promises';
import { HTTP_STATUS_OK } from '~/lib/utils/http_status';
+import { getParameterValues } from '~/lib/utils/url_utility';
+
+import BulkImportsHistoryApp from '~/pages/import/bulk_imports/history/components/bulk_imports_history_app.vue';
import PaginationBar from '~/vue_shared/components/pagination_bar/pagination_bar.vue';
import LocalStorageSync from '~/vue_shared/components/local_storage_sync.vue';
-import BulkImportsHistoryApp from '~/pages/import/bulk_imports/history/components/bulk_imports_history_app.vue';
-import { extendedWrapper } from 'helpers/vue_test_utils_helper';
+
+jest.mock('~/lib/utils/url_utility', () => ({
+ ...jest.requireActual('~/lib/utils/url_utility'),
+ getParameterValues: jest.fn().mockReturnValue([]),
+}));
describe('BulkImportsHistoryApp', () => {
- const API_URL = '/api/v4/bulk_imports/entities';
+ const BULK_IMPORTS_API_URL = '/api/v4/bulk_imports/entities';
const DEFAULT_HEADERS = {
'x-page': 1,
@@ -73,14 +80,14 @@ describe('BulkImportsHistoryApp', () => {
}
const findLocalStorageSync = () => wrapper.findComponent(LocalStorageSync);
+ const findPaginationBar = () => wrapper.findComponent(PaginationBar);
beforeEach(() => {
gon.api_version = 'v4';
- });
- beforeEach(() => {
+ getParameterValues.mockReturnValue([]);
mock = new MockAdapter(axios);
- mock.onGet(API_URL).reply(HTTP_STATUS_OK, DUMMY_RESPONSE, DEFAULT_HEADERS);
+ mock.onGet(BULK_IMPORTS_API_URL).reply(HTTP_STATUS_OK, DUMMY_RESPONSE, DEFAULT_HEADERS);
});
afterEach(() => {
@@ -94,9 +101,9 @@ describe('BulkImportsHistoryApp', () => {
});
it('renders empty state when no data is available', async () => {
- mock.onGet(API_URL).reply(HTTP_STATUS_OK, [], DEFAULT_HEADERS);
+ mock.onGet(BULK_IMPORTS_API_URL).reply(HTTP_STATUS_OK, [], DEFAULT_HEADERS);
createComponent();
- await axios.waitForAll();
+ await waitForPromises();
expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(false);
expect(wrapper.findComponent(GlEmptyState).exists()).toBe(true);
@@ -104,7 +111,7 @@ describe('BulkImportsHistoryApp', () => {
it('renders table with data when history is available', async () => {
createComponent();
- await axios.waitForAll();
+ await waitForPromises();
const table = wrapper.findComponent(GlTableLite);
expect(table.exists()).toBe(true);
@@ -116,26 +123,46 @@ describe('BulkImportsHistoryApp', () => {
const NEW_PAGE = 4;
createComponent();
- await axios.waitForAll();
+ await waitForPromises();
mock.resetHistory();
- wrapper.findComponent(PaginationBar).vm.$emit('set-page', NEW_PAGE);
- await axios.waitForAll();
+ findPaginationBar().vm.$emit('set-page', NEW_PAGE);
+ await waitForPromises();
expect(mock.history.get.length).toBe(1);
expect(mock.history.get[0].params).toStrictEqual(expect.objectContaining({ page: NEW_PAGE }));
});
});
+ describe('when filtering by bulk_import_id param', () => {
+ const mockId = 2;
+
+ beforeEach(() => {
+ getParameterValues.mockReturnValue([mockId]);
+ });
+
+ it('makes a request to bulk_import_history endpoint', async () => {
+ createComponent();
+ await waitForPromises();
+
+ expect(mock.history.get.length).toBe(1);
+ expect(mock.history.get[0].url).toBe(`/api/v4/bulk_imports/${mockId}/entities`);
+ expect(mock.history.get[0].params).toStrictEqual({
+ page: 1,
+ per_page: 20,
+ });
+ });
+ });
+
it('changes page size when requested by pagination bar', async () => {
const NEW_PAGE_SIZE = 4;
createComponent();
- await axios.waitForAll();
+ await waitForPromises();
mock.resetHistory();
- wrapper.findComponent(PaginationBar).vm.$emit('set-page-size', NEW_PAGE_SIZE);
- await axios.waitForAll();
+ findPaginationBar().vm.$emit('set-page-size', NEW_PAGE_SIZE);
+ await waitForPromises();
expect(mock.history.get.length).toBe(1);
expect(mock.history.get[0].params).toStrictEqual(
@@ -146,15 +173,14 @@ describe('BulkImportsHistoryApp', () => {
it('resets page to 1 when page size is changed', async () => {
const NEW_PAGE_SIZE = 4;
- mock.onGet(API_URL).reply(200, DUMMY_RESPONSE, DEFAULT_HEADERS);
createComponent();
- await axios.waitForAll();
- wrapper.findComponent(PaginationBar).vm.$emit('set-page', 2);
- await axios.waitForAll();
+ await waitForPromises();
+ findPaginationBar().vm.$emit('set-page', 2);
+ await waitForPromises();
mock.resetHistory();
- wrapper.findComponent(PaginationBar).vm.$emit('set-page-size', NEW_PAGE_SIZE);
- await axios.waitForAll();
+ findPaginationBar().vm.$emit('set-page-size', NEW_PAGE_SIZE);
+ await waitForPromises();
expect(mock.history.get.length).toBe(1);
expect(mock.history.get[0].params).toStrictEqual(
@@ -166,18 +192,18 @@ describe('BulkImportsHistoryApp', () => {
const NEW_PAGE_SIZE = 4;
createComponent();
- await axios.waitForAll();
+ await waitForPromises();
mock.resetHistory();
- wrapper.findComponent(PaginationBar).vm.$emit('set-page-size', NEW_PAGE_SIZE);
- await axios.waitForAll();
+ findPaginationBar().vm.$emit('set-page-size', NEW_PAGE_SIZE);
+ await waitForPromises();
expect(findLocalStorageSync().props('value')).toBe(NEW_PAGE_SIZE);
});
it('renders link to destination_full_path for destination group', async () => {
createComponent({ shallow: false });
- await axios.waitForAll();
+ await waitForPromises();
expect(wrapper.find('tbody tr a').attributes().href).toBe(
`/${DUMMY_RESPONSE[0].destination_full_path}`,
@@ -187,9 +213,9 @@ describe('BulkImportsHistoryApp', () => {
it('renders destination as text when destination_full_path is not defined', async () => {
const RESPONSE = [{ ...DUMMY_RESPONSE[0], destination_full_path: null }];
- mock.onGet(API_URL).reply(HTTP_STATUS_OK, RESPONSE, DEFAULT_HEADERS);
+ mock.onGet(BULK_IMPORTS_API_URL).reply(HTTP_STATUS_OK, RESPONSE, DEFAULT_HEADERS);
createComponent({ shallow: false });
- await axios.waitForAll();
+ await waitForPromises();
expect(wrapper.find('tbody tr a').exists()).toBe(false);
expect(wrapper.find('tbody tr span').text()).toBe(
@@ -199,14 +225,14 @@ describe('BulkImportsHistoryApp', () => {
it('adds slash to group urls', async () => {
createComponent({ shallow: false });
- await axios.waitForAll();
+ await waitForPromises();
expect(wrapper.find('tbody tr a').text()).toBe(`${DUMMY_RESPONSE[0].destination_full_path}/`);
});
it('does not prefixes project urls with slash', async () => {
createComponent({ shallow: false });
- await axios.waitForAll();
+ await waitForPromises();
expect(wrapper.findAll('tbody tr a').at(1).text()).toBe(
DUMMY_RESPONSE[1].destination_full_path,
@@ -215,9 +241,9 @@ describe('BulkImportsHistoryApp', () => {
describe('details button', () => {
beforeEach(() => {
- mock.onGet(API_URL).reply(HTTP_STATUS_OK, DUMMY_RESPONSE, DEFAULT_HEADERS);
+ mock.onGet(BULK_IMPORTS_API_URL).reply(HTTP_STATUS_OK, DUMMY_RESPONSE, DEFAULT_HEADERS);
createComponent({ shallow: false });
- return axios.waitForAll();
+ return waitForPromises();
});
it('renders details button if relevant item has failures', () => {
@@ -255,7 +281,7 @@ describe('BulkImportsHistoryApp', () => {
createComponent({ shallow: false });
await waitForPromises();
- expect(mock.history.get.map((x) => x.url)).toEqual([API_URL]);
+ expect(mock.history.get.map((x) => x.url)).toEqual([BULK_IMPORTS_API_URL]);
});
});
@@ -279,7 +305,7 @@ describe('BulkImportsHistoryApp', () => {
const RESPONSE = [mockCreatedImport, ...DUMMY_RESPONSE];
const POLL_HEADERS = { 'poll-interval': pollInterval };
- mock.onGet(API_URL).reply(HTTP_STATUS_OK, RESPONSE, DEFAULT_HEADERS);
+ mock.onGet(BULK_IMPORTS_API_URL).reply(HTTP_STATUS_OK, RESPONSE, DEFAULT_HEADERS);
mock.onGet(mockRealtimeChangesPath).replyOnce(HTTP_STATUS_OK, [], POLL_HEADERS);
mock
.onGet(mockRealtimeChangesPath)
@@ -293,7 +319,10 @@ describe('BulkImportsHistoryApp', () => {
it('starts polling for realtime changes', () => {
jest.advanceTimersByTime(pollInterval);
- expect(mock.history.get.map((x) => x.url)).toEqual([API_URL, mockRealtimeChangesPath]);
+ expect(mock.history.get.map((x) => x.url)).toEqual([
+ BULK_IMPORTS_API_URL,
+ mockRealtimeChangesPath,
+ ]);
expect(wrapper.findAll('tbody tr').at(0).text()).toContain('Pending');
});
@@ -305,7 +334,7 @@ describe('BulkImportsHistoryApp', () => {
await waitForPromises();
expect(mock.history.get.map((x) => x.url)).toEqual([
- API_URL,
+ BULK_IMPORTS_API_URL,
mockRealtimeChangesPath,
mockRealtimeChangesPath,
]);
diff --git a/spec/frontend/pages/projects/pipeline_schedules/shared/components/interval_pattern_input_spec.js b/spec/frontend/pages/projects/pipeline_schedules/shared/components/interval_pattern_input_spec.js
index f6ecee4cd53..7cb0e3ee38b 100644
--- a/spec/frontend/pages/projects/pipeline_schedules/shared/components/interval_pattern_input_spec.js
+++ b/spec/frontend/pages/projects/pipeline_schedules/shared/components/interval_pattern_input_spec.js
@@ -7,14 +7,15 @@ describe('Interval Pattern Input Component', () => {
let oldWindowGl;
let wrapper;
+ const mockMinute = 3;
const mockHour = 4;
const mockWeekDayIndex = 1;
const mockDay = 1;
const cronIntervalPresets = {
- everyDay: `0 ${mockHour} * * *`,
- everyWeek: `0 ${mockHour} * * ${mockWeekDayIndex}`,
- everyMonth: `0 ${mockHour} ${mockDay} * *`,
+ everyDay: `${mockMinute} ${mockHour} * * *`,
+ everyWeek: `${mockMinute} ${mockHour} * * ${mockWeekDayIndex}`,
+ everyMonth: `${mockMinute} ${mockHour} ${mockDay} * *`,
};
const customKey = 'custom';
const everyDayKey = 'everyDay';
@@ -40,6 +41,7 @@ describe('Interval Pattern Input Component', () => {
propsData: { ...props },
data() {
return {
+ randomMinute: data?.minute || mockMinute,
randomHour: data?.hour || mockHour,
randomWeekDayIndex: mockWeekDayIndex,
randomDay: mockDay,
@@ -108,12 +110,12 @@ describe('Interval Pattern Input Component', () => {
describe('formattedTime computed property', () => {
it.each`
- desc | hour | expectedValue
- ${'returns a time in the afternoon if the value of `random time` is higher than 12'} | ${13} | ${'1:00pm'}
- ${'returns a time in the morning if the value of `random time` is lower than 12'} | ${11} | ${'11:00am'}
- ${'returns "12:00pm" if the value of `random time` is exactly 12'} | ${12} | ${'12:00pm'}
- `('$desc', ({ hour, expectedValue }) => {
- createWrapper({}, { hour });
+ desc | hour | minute | expectedValue
+ ${'returns a time in the afternoon if the value of `random time` is higher than 12'} | ${13} | ${7} | ${'1:07pm'}
+ ${'returns a time in the morning if the value of `random time` is lower than 12'} | ${11} | ${30} | ${'11:30am'}
+ ${'returns "12:05pm" if the value of `random time` is exactly 12 and the value of random minutes is 5'} | ${12} | ${5} | ${'12:05pm'}
+ `('$desc', ({ hour, minute, expectedValue }) => {
+ createWrapper({}, { hour, minute });
expect(wrapper.vm.formattedTime).toBe(expectedValue);
});
@@ -128,9 +130,9 @@ describe('Interval Pattern Input Component', () => {
const labels = findAllLabels().wrappers.map((el) => trimText(el.text()));
expect(labels).toEqual([
- 'Every day (at 4:00am)',
- 'Every week (Monday at 4:00am)',
- 'Every month (Day 1 at 4:00am)',
+ 'Every day (at 4:03am)',
+ 'Every week (Monday at 4:03am)',
+ 'Every month (Day 1 at 4:03am)',
'Custom',
]);
});
diff --git a/spec/frontend/pages/projects/shared/permissions/components/ci_catalog_settings_spec.js b/spec/frontend/pages/projects/shared/permissions/components/ci_catalog_settings_spec.js
index 4ac3a511fa2..8145eb6fbd4 100644
--- a/spec/frontend/pages/projects/shared/permissions/components/ci_catalog_settings_spec.js
+++ b/spec/frontend/pages/projects/shared/permissions/components/ci_catalog_settings_spec.js
@@ -1,27 +1,30 @@
import Vue from 'vue';
import VueApollo from 'vue-apollo';
-import { GlBadge, GlLoadingIcon, GlModal, GlSprintf, GlToggle } from '@gitlab/ui';
+import { GlLoadingIcon, GlModal, GlSprintf, GlToggle } from '@gitlab/ui';
import { createAlert } from '~/alert';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
import createMockApollo from 'helpers/mock_apollo_helper';
+import BetaBadge from '~/vue_shared/components/badges/beta_badge.vue';
import catalogResourcesCreate from '~/pages/projects/shared/permissions/graphql/mutations/catalog_resources_create.mutation.graphql';
+import catalogResourcesDestroy from '~/pages/projects/shared/permissions/graphql/mutations/catalog_resources_destroy.mutation.graphql';
import getCiCatalogSettingsQuery from '~/pages/projects/shared/permissions/graphql/queries/get_ci_catalog_settings.query.graphql';
-import CiCatalogSettings, {
- i18n,
-} from '~/pages/projects/shared/permissions/components/ci_catalog_settings.vue';
+import CiCatalogSettings from '~/pages/projects/shared/permissions/components/ci_catalog_settings.vue';
-import { mockCiCatalogSettingsResponse } from './mock_data';
+import { generateCatalogSettingsResponse } from './mock_data';
Vue.use(VueApollo);
jest.mock('~/alert');
+const showToast = jest.fn();
+
describe('CiCatalogSettings', () => {
let wrapper;
let ciCatalogSettingsResponse;
let catalogResourcesCreateResponse;
+ let catalogResourcesDestroyResponse;
const fullPath = 'gitlab-org/gitlab';
@@ -29,6 +32,7 @@ describe('CiCatalogSettings', () => {
const handlers = [
[getCiCatalogSettingsQuery, ciCatalogSettingsHandler],
[catalogResourcesCreate, catalogResourcesCreateResponse],
+ [catalogResourcesDestroy, catalogResourcesDestroyResponse],
];
const mockApollo = createMockApollo(handlers);
@@ -39,6 +43,11 @@ describe('CiCatalogSettings', () => {
stubs: {
GlSprintf,
},
+ mocks: {
+ $toast: {
+ show: showToast,
+ },
+ },
apolloProvider: mockApollo,
});
@@ -46,15 +55,34 @@ describe('CiCatalogSettings', () => {
};
const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
- const findBadge = () => wrapper.findComponent(GlBadge);
+ const findBadge = () => wrapper.findComponent(BetaBadge);
const findModal = () => wrapper.findComponent(GlModal);
const findToggle = () => wrapper.findComponent(GlToggle);
-
const findCiCatalogSettings = () => wrapper.findByTestId('ci-catalog-settings');
+ const removeCatalogResource = () => {
+ findToggle().vm.$emit('change');
+ findModal().vm.$emit('primary');
+ return waitForPromises();
+ };
+
+ const setCatalogResource = () => {
+ findToggle().vm.$emit('change');
+ return waitForPromises();
+ };
+
beforeEach(() => {
- ciCatalogSettingsResponse = jest.fn().mockResolvedValue(mockCiCatalogSettingsResponse);
+ ciCatalogSettingsResponse = jest.fn();
+ catalogResourcesDestroyResponse = jest.fn();
catalogResourcesCreateResponse = jest.fn();
+
+ ciCatalogSettingsResponse.mockResolvedValue(generateCatalogSettingsResponse());
+ catalogResourcesCreateResponse.mockResolvedValue({
+ data: { catalogResourcesCreate: { errors: [] } },
+ });
+ catalogResourcesDestroyResponse.mockResolvedValue({
+ data: { catalogResourcesDestroy: { errors: [] } },
+ });
});
describe('when initial queries are loading', () => {
@@ -81,31 +109,68 @@ describe('CiCatalogSettings', () => {
expect(findCiCatalogSettings().exists()).toBe(true);
});
- it('renders the experiment badge', () => {
+ it('renders the beta badge', () => {
expect(findBadge().exists()).toBe(true);
});
it('renders the toggle', () => {
expect(findToggle().exists()).toBe(true);
});
+ });
- it('renders the modal', () => {
- expect(findModal().exists()).toBe(true);
- expect(findModal().attributes('title')).toBe(i18n.modal.title);
+ describe('when the project is not a CI/CD resource', () => {
+ beforeEach(async () => {
+ await createComponent();
});
- describe('when queries have loaded', () => {
- beforeEach(() => {
- catalogResourcesCreateResponse.mockResolvedValue(mockCiCatalogSettingsResponse);
+ describe('and the toggle is clicked', () => {
+ it('does not show a confirmation modal', async () => {
+ expect(findModal().props('visible')).toBe(false);
+
+ await findToggle().vm.$emit('change', true);
+
+ expect(findModal().props('visible')).toBe(false);
+ });
+
+ it('calls the mutation with the correct input', async () => {
+ expect(catalogResourcesCreateResponse).toHaveBeenCalledTimes(0);
+
+ await setCatalogResource();
+
+ expect(catalogResourcesCreateResponse).toHaveBeenCalledTimes(1);
+ expect(catalogResourcesCreateResponse).toHaveBeenCalledWith({
+ input: {
+ projectPath: fullPath,
+ },
+ });
});
- it('shows the modal when the toggle is clicked', async () => {
+ describe('when the mutation is successful', () => {
+ it('shows a toast message with a success message', async () => {
+ expect(showToast).not.toHaveBeenCalled();
+
+ await setCatalogResource();
+
+ expect(showToast).toHaveBeenCalledWith('This project is now a CI/CD Catalog resource.');
+ });
+ });
+ });
+ });
+
+ describe('when the project is a CI/CD resource', () => {
+ beforeEach(async () => {
+ ciCatalogSettingsResponse.mockResolvedValue(generateCatalogSettingsResponse(true));
+ await createComponent();
+ });
+
+ describe('and the toggle is clicked', () => {
+ it('shows a confirmation modal', async () => {
expect(findModal().props('visible')).toBe(false);
- await findToggle().vm.$emit('change', true);
+ await findToggle().vm.$emit('change', false);
expect(findModal().props('visible')).toBe(true);
- expect(findModal().props('actionPrimary').text).toBe(i18n.modal.actionPrimary.text);
+ expect(findModal().props('actionPrimary').text).toBe('Remove from the CI/CD catalog');
});
it('hides the modal when cancel is clicked', () => {
@@ -117,31 +182,85 @@ describe('CiCatalogSettings', () => {
});
it('calls the mutation with the correct input from the modal click', async () => {
- expect(catalogResourcesCreateResponse).toHaveBeenCalledTimes(0);
+ expect(catalogResourcesDestroyResponse).toHaveBeenCalledTimes(0);
- findToggle().vm.$emit('change', true);
- findModal().vm.$emit('primary');
- await waitForPromises();
+ await removeCatalogResource();
- expect(catalogResourcesCreateResponse).toHaveBeenCalledTimes(1);
- expect(catalogResourcesCreateResponse).toHaveBeenCalledWith({
+ expect(catalogResourcesDestroyResponse).toHaveBeenCalledTimes(1);
+ expect(catalogResourcesDestroyResponse).toHaveBeenCalledWith({
input: {
projectPath: fullPath,
},
});
});
+
+ it('shows a toast message when the mutation has worked', async () => {
+ expect(showToast).not.toHaveBeenCalled();
+
+ await removeCatalogResource();
+
+ expect(showToast).toHaveBeenCalledWith(
+ 'This project is no longer a CI/CD Catalog resource.',
+ );
+ });
});
});
- describe('when the query is unsuccessful', () => {
- const failedHandler = jest.fn().mockRejectedValue(new Error('GraphQL error'));
+ describe('mutation errors', () => {
+ const createGraphqlError = { data: { catalogResourcesCreate: { errors: ['graphql error'] } } };
+ const destroyGraphqlError = {
+ data: { catalogResourcesDestroy: { errors: ['graphql error'] } },
+ };
- it('throws an error', async () => {
- await createComponent({ ciCatalogSettingsHandler: failedHandler });
+ beforeEach(() => {
+ createAlert.mockClear();
+ });
+ it.each`
+ name | errorType | jestResolver | mockResponse | expectedMessage
+ ${'create'} | ${'unhandled server error with a message'} | ${'mockRejectedValue'} | ${new Error('server error')} | ${'server error'}
+ ${'create'} | ${'unhandled server error without a message'} | ${'mockRejectedValue'} | ${new Error()} | ${'Unable to set project as a CI/CD Catalog resource.'}
+ ${'create'} | ${'handled Graphql error'} | ${'mockResolvedValue'} | ${createGraphqlError} | ${'graphql error'}
+ ${'destroy'} | ${'unhandled server'} | ${'mockRejectedValue'} | ${new Error('server error')} | ${'server error'}
+ ${'destroy'} | ${'unhandled server'} | ${'mockRejectedValue'} | ${new Error()} | ${'Unable to remove project as a CI/CD Catalog resource.'}
+ ${'destroy'} | ${'handled Graphql error'} | ${'mockResolvedValue'} | ${destroyGraphqlError} | ${'graphql error'}
+ `(
+ 'when $name mutation returns an $errorType',
+ async ({ name, jestResolver, mockResponse, expectedMessage }) => {
+ let mutationMock = catalogResourcesCreateResponse;
+ let toggleAction = setCatalogResource;
+
+ if (name === 'destroy') {
+ mutationMock = catalogResourcesDestroyResponse;
+ toggleAction = removeCatalogResource;
+ ciCatalogSettingsResponse.mockResolvedValue(generateCatalogSettingsResponse(true));
+ }
+
+ await createComponent();
+ mutationMock[jestResolver](mockResponse);
+
+ expect(showToast).not.toHaveBeenCalled();
+ expect(createAlert).not.toHaveBeenCalled();
+
+ await toggleAction();
+
+ expect(showToast).not.toHaveBeenCalled();
+ expect(createAlert).toHaveBeenCalledWith({ message: expectedMessage });
+ },
+ );
+ });
+
+ describe('when the query is unsuccessful', () => {
+ beforeEach(async () => {
+ const failedHandler = jest.fn().mockRejectedValue(new Error('GraphQL error'));
+ await createComponent({ ciCatalogSettingsHandler: failedHandler });
await waitForPromises();
+ });
- expect(createAlert).toHaveBeenCalledWith({ message: i18n.catalogResourceQueryError });
+ it('throws an error', () => {
+ expect(createAlert).toHaveBeenCalledWith({
+ message: 'There was a problem fetching the CI/CD Catalog setting.',
+ });
});
});
});
diff --git a/spec/frontend/pages/projects/shared/permissions/components/mock_data.js b/spec/frontend/pages/projects/shared/permissions/components/mock_data.js
index 44bbf2a5eb2..cf51604e1b0 100644
--- a/spec/frontend/pages/projects/shared/permissions/components/mock_data.js
+++ b/spec/frontend/pages/projects/shared/permissions/components/mock_data.js
@@ -1,7 +1,10 @@
-export const mockCiCatalogSettingsResponse = {
- data: {
- catalogResourcesCreate: {
- errors: [],
+export const generateCatalogSettingsResponse = (isCatalogResource = false) => {
+ return {
+ data: {
+ project: {
+ id: 'gid://gitlab/Project/149',
+ isCatalogResource,
+ },
},
- },
+ };
};
diff --git a/spec/frontend/pages/projects/shared/permissions/components/settings_panel_spec.js b/spec/frontend/pages/projects/shared/permissions/components/settings_panel_spec.js
index 8b672ff3f32..207ce8c1ffa 100644
--- a/spec/frontend/pages/projects/shared/permissions/components/settings_panel_spec.js
+++ b/spec/frontend/pages/projects/shared/permissions/components/settings_panel_spec.js
@@ -137,6 +137,7 @@ describe('Settings Panel', () => {
const findMonitorSettings = () => wrapper.findComponent({ ref: 'monitor-settings' });
const findModelExperimentsSettings = () =>
wrapper.findComponent({ ref: 'model-experiments-settings' });
+ const findModelRegistrySettings = () => wrapper.findComponent({ ref: 'model-registry-settings' });
describe('Project Visibility', () => {
it('should set the project visibility help path', () => {
@@ -758,4 +759,11 @@ describe('Settings Panel', () => {
expect(findModelExperimentsSettings().exists()).toBe(true);
});
});
+ describe('Model registry', () => {
+ it('shows model registry toggle', () => {
+ wrapper = mountComponent({});
+
+ expect(findModelRegistrySettings().exists()).toBe(true);
+ });
+ });
});
diff --git a/spec/frontend/pages/shared/nav/sidebar_tracking_spec.js b/spec/frontend/pages/shared/nav/sidebar_tracking_spec.js
deleted file mode 100644
index 04f53e048ed..00000000000
--- a/spec/frontend/pages/shared/nav/sidebar_tracking_spec.js
+++ /dev/null
@@ -1,160 +0,0 @@
-import { setHTMLFixture } from 'helpers/fixtures';
-import { initSidebarTracking } from '~/pages/shared/nav/sidebar_tracking';
-
-describe('~/pages/shared/nav/sidebar_tracking.js', () => {
- beforeEach(() => {
- setHTMLFixture(`
- <aside class="nav-sidebar">
- <div class="nav-sidebar-inner-scroll">
- <ul class="sidebar-top-level-items">
- <li data-track-label="project_information_menu" class="home">
- <a aria-label="Project information" class="shortcuts-project-information has-sub-items" href="">
- <span class="nav-icon-container">
- <svg class="s16" data-testid="project-icon">
- <use xlink:href="/assets/icons-1b2dadc4c3d49797908ba67b8f10da5d63dd15d859bde28d66fb60bbb97a4dd5.svg#project"></use>
- </svg>
- </span>
- <span class="nav-item-name">Project information</span>
- </a>
- <ul class="sidebar-sub-level-items">
- <li class="fly-out-top-item">
- <a aria-label="Project information" href="#">
- <strong class="fly-out-top-item-name">Project information</strong>
- </a>
- </li>
- <li class="divider fly-out-top-item"></li>
- <li data-track-label="activity" class="">
- <a aria-label="Activity" class="shortcuts-project-activity" href=#">
- <span>Activity</span>
- </a>
- </li>
- <li data-track-label="labels" class="">
- <a aria-label="Labels" href="#">
- <span>Labels</span>
- </a>
- </li>
- <li data-track-label="members" class="">
- <a aria-label="Members" href="#">
- <span>Members</span>
- </a>
- </li>
- </ul>
- </li>
- </ul>
- </div>
- </aside>
- `);
-
- initSidebarTracking();
- });
-
- describe('sidebar is not collapsed', () => {
- describe('menu is not expanded', () => {
- it('sets the proper data tracking attributes when clicking on menu', () => {
- const menu = document.querySelector('li[data-track-label="project_information_menu"]');
- const menuLink = menu.querySelector('a');
-
- menu.classList.add('is-over', 'is-showing-fly-out');
- menuLink.click();
-
- expect(menu).toHaveTrackingAttributes({
- action: 'click_menu',
- extra: JSON.stringify({
- sidebar_display: 'Expanded',
- menu_display: 'Fly out',
- }),
- });
- });
-
- it('sets the proper data tracking attributes when clicking on submenu', () => {
- const menu = document.querySelector('li[data-track-label="activity"]');
- const menuLink = menu.querySelector('a');
- const submenuList = document.querySelector('ul.sidebar-sub-level-items');
-
- submenuList.classList.add('fly-out-list');
- menuLink.click();
-
- expect(menu).toHaveTrackingAttributes({
- action: 'click_menu_item',
- extra: JSON.stringify({
- sidebar_display: 'Expanded',
- menu_display: 'Fly out',
- }),
- });
- });
- });
-
- describe('menu is expanded', () => {
- it('sets the proper data tracking attributes when clicking on menu', () => {
- const menu = document.querySelector('li[data-track-label="project_information_menu"]');
- const menuLink = menu.querySelector('a');
-
- menu.classList.add('active');
- menuLink.click();
-
- expect(menu).toHaveTrackingAttributes({
- action: 'click_menu',
- extra: JSON.stringify({
- sidebar_display: 'Expanded',
- menu_display: 'Expanded',
- }),
- });
- });
-
- it('sets the proper data tracking attributes when clicking on submenu', () => {
- const menu = document.querySelector('li[data-track-label="activity"]');
- const menuLink = menu.querySelector('a');
-
- menu.classList.add('active');
- menuLink.click();
-
- expect(menu).toHaveTrackingAttributes({
- action: 'click_menu_item',
- extra: JSON.stringify({
- sidebar_display: 'Expanded',
- menu_display: 'Expanded',
- }),
- });
- });
- });
- });
-
- describe('sidebar is collapsed', () => {
- beforeEach(() => {
- document.querySelector('aside.nav-sidebar').classList.add('js-sidebar-collapsed');
- });
-
- it('sets the proper data tracking attributes when clicking on menu', () => {
- const menu = document.querySelector('li[data-track-label="project_information_menu"]');
- const menuLink = menu.querySelector('a');
-
- menu.classList.add('is-over', 'is-showing-fly-out');
- menuLink.click();
-
- expect(menu).toHaveTrackingAttributes({
- action: 'click_menu',
- extra: JSON.stringify({
- sidebar_display: 'Collapsed',
- menu_display: 'Fly out',
- }),
- });
- });
-
- it('sets the proper data tracking attributes when clicking on submenu', () => {
- const menu = document.querySelector('li[data-track-label="activity"]');
- const menuLink = menu.querySelector('a');
- const submenuList = document.querySelector('ul.sidebar-sub-level-items');
-
- submenuList.classList.add('fly-out-list');
- menuLink.click();
-
- expect(menu).toHaveTrackingAttributes({
- action: 'click_menu_item',
- extra: JSON.stringify({
- sidebar_display: 'Collapsed',
- menu_display: 'Fly out',
- }),
- });
- });
- });
-});
diff --git a/spec/frontend/pages/shared/wikis/components/wiki_export_spec.js b/spec/frontend/pages/shared/wikis/components/wiki_export_spec.js
deleted file mode 100644
index b7002412561..00000000000
--- a/spec/frontend/pages/shared/wikis/components/wiki_export_spec.js
+++ /dev/null
@@ -1,48 +0,0 @@
-import { GlDisclosureDropdown } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
-import WikiExport from '~/pages/shared/wikis/components/wiki_export.vue';
-import printMarkdownDom from '~/lib/print_markdown_dom';
-
-jest.mock('~/lib/print_markdown_dom');
-
-describe('pages/shared/wikis/components/wiki_export', () => {
- let wrapper;
-
- const createComponent = (provide) => {
- wrapper = shallowMount(WikiExport, {
- provide,
- });
- };
-
- const findDropdown = () => wrapper.findComponent(GlDisclosureDropdown);
- const findPrintItem = () =>
- findDropdown()
- .props('items')
- .find((x) => x.text === 'Print as PDF');
-
- describe('print', () => {
- beforeEach(() => {
- document.body.innerHTML = '<div id="content-body">Content</div>';
- });
-
- afterEach(() => {
- document.body.innerHTML = '';
- });
-
- it('should print the content', () => {
- createComponent({
- target: '#content-body',
- title: 'test title',
- stylesheet: [],
- });
-
- findPrintItem().action();
-
- expect(printMarkdownDom).toHaveBeenCalledWith({
- target: document.querySelector('#content-body'),
- title: 'test title',
- stylesheet: [],
- });
- });
- });
-});
diff --git a/spec/frontend/pages/shared/wikis/components/wiki_more_dropdown_spec.js b/spec/frontend/pages/shared/wikis/components/wiki_more_dropdown_spec.js
new file mode 100644
index 00000000000..830377ff39f
--- /dev/null
+++ b/spec/frontend/pages/shared/wikis/components/wiki_more_dropdown_spec.js
@@ -0,0 +1,83 @@
+import { GlDisclosureDropdown, GlDisclosureDropdownItem } from '@gitlab/ui';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import WikiMoreDropdown from '~/pages/shared/wikis/components/wiki_more_dropdown.vue';
+import printMarkdownDom from '~/lib/print_markdown_dom';
+
+jest.mock('~/lib/print_markdown_dom');
+
+describe('pages/shared/wikis/components/wiki_more_dropdown', () => {
+ let wrapper;
+
+ const createComponent = (provide) => {
+ wrapper = shallowMountExtended(WikiMoreDropdown, {
+ provide: {
+ history: 'https://history.url/path',
+ print: {
+ target: '#content-body',
+ title: 'test title',
+ stylesheet: [],
+ },
+ ...provide,
+ },
+ stubs: {
+ GlDisclosureDropdown,
+ GlDisclosureDropdownItem,
+ },
+ });
+ };
+
+ const findHistoryItem = () => wrapper.findByTestId('page-history-button');
+ const findPrintItem = () => wrapper.findByTestId('page-print-button');
+
+ describe('history', () => {
+ it('renders if `history` is set', () => {
+ createComponent({ history: false });
+
+ expect(findHistoryItem().exists()).toBe(false);
+
+ createComponent();
+
+ expect(findHistoryItem().exists()).toBe(true);
+ });
+
+ it('should have history page url', () => {
+ createComponent();
+
+ expect(findHistoryItem().attributes('href')).toBe('https://history.url/path');
+ });
+ });
+
+ describe('print', () => {
+ beforeEach(() => {
+ document.body.innerHTML = '<div id="content-body">Content</div>';
+ });
+
+ afterEach(() => {
+ document.body.innerHTML = '';
+ });
+
+ it('renders if `print` is set', () => {
+ createComponent({ print: false });
+
+ expect(findPrintItem().exists()).toBe(false);
+
+ createComponent();
+
+ expect(findPrintItem().exists()).toBe(true);
+ });
+
+ it('should print the content', () => {
+ createComponent();
+
+ expect(findPrintItem().exists()).toBe(true);
+
+ findPrintItem().trigger('click');
+
+ expect(printMarkdownDom).toHaveBeenCalledWith({
+ target: document.querySelector('#content-body'),
+ title: 'test title',
+ stylesheet: [],
+ });
+ });
+ });
+});
diff --git a/spec/frontend/persistent_user_callout_spec.js b/spec/frontend/persistent_user_callout_spec.js
index 376575a8acb..a9bfc0003bf 100644
--- a/spec/frontend/persistent_user_callout_spec.js
+++ b/spec/frontend/persistent_user_callout_spec.js
@@ -24,6 +24,7 @@ describe('PersistentUserCallout', () => {
>
<button type="button" class="js-close js-close-primary"></button>
<button type="button" class="js-close js-close-secondary"></button>
+ <a class="js-close-and-follow-link" href="/somewhere-pleasant">A Link</a>
</div>
`;
@@ -65,6 +66,8 @@ describe('PersistentUserCallout', () => {
return fixture;
}
+ useMockLocationHelper();
+
describe('dismiss', () => {
const buttons = {};
let mockAxios;
@@ -178,8 +181,6 @@ describe('PersistentUserCallout', () => {
let mockAxios;
let persistentUserCallout;
- useMockLocationHelper();
-
beforeEach(() => {
const fixture = createFollowLinkFixture();
const container = fixture.querySelector('.container');
@@ -222,6 +223,53 @@ describe('PersistentUserCallout', () => {
});
});
+ describe('dismiss and follow links', () => {
+ let link;
+ let mockAxios;
+ let persistentUserCallout;
+
+ beforeEach(() => {
+ const fixture = createFixture();
+ const container = fixture.querySelector('.container');
+ link = fixture.querySelector('.js-close-and-follow-link');
+ mockAxios = new MockAdapter(axios);
+
+ persistentUserCallout = new PersistentUserCallout(container);
+ jest.spyOn(persistentUserCallout.container, 'remove').mockImplementation(() => {});
+ });
+
+ afterEach(() => {
+ mockAxios.restore();
+ });
+
+ it('uses a link to trigger callout and defers following until callout is finished', async () => {
+ const { href } = link;
+ mockAxios.onPost(dismissEndpoint).replyOnce(HTTP_STATUS_OK);
+
+ link.click();
+
+ await waitForPromises();
+
+ expect(window.location.assign).toHaveBeenCalledWith(href);
+ expect(persistentUserCallout.container.remove).not.toHaveBeenCalled();
+ expect(mockAxios.history.post[0].data).toBe(JSON.stringify({ feature_name: featureName }));
+ });
+
+ it('invokes Flash when the dismiss request fails', async () => {
+ mockAxios.onPost(dismissEndpoint).replyOnce(HTTP_STATUS_INTERNAL_SERVER_ERROR);
+
+ link.click();
+
+ await waitForPromises();
+
+ expect(window.location.assign).not.toHaveBeenCalled();
+ expect(createAlert).toHaveBeenCalledWith({
+ message:
+ 'An error occurred while acknowledging the notification. Refresh the page and try again.',
+ });
+ });
+ });
+
describe('factory', () => {
it('returns an instance of PersistentUserCallout with the provided container property', () => {
const fixture = createFixture();
diff --git a/spec/frontend/profile/edit/components/profile_edit_app_spec.js b/spec/frontend/profile/edit/components/profile_edit_app_spec.js
index 31a368aefa9..39bf597352b 100644
--- a/spec/frontend/profile/edit/components/profile_edit_app_spec.js
+++ b/spec/frontend/profile/edit/components/profile_edit_app_spec.js
@@ -3,7 +3,6 @@ import MockAdapter from 'axios-mock-adapter';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
-import { readFileAsDataURL } from '~/lib/utils/file_utility';
import axios from '~/lib/utils/axios_utils';
import ProfileEditApp from '~/profile/edit/components/profile_edit_app.vue';
import UserAvatar from '~/profile/edit/components/user_avatar.vue';
@@ -103,6 +102,8 @@ describe('Profile Edit App', () => {
});
it('syncs header avatars', async () => {
+ jest.spyOn(document, 'dispatchEvent');
+ jest.spyOn(URL, 'createObjectURL');
mockAxios.onPut(stubbedProfilePath).reply(200, {
message: successMessage,
});
@@ -112,7 +113,8 @@ describe('Profile Edit App', () => {
await waitForPromises();
- expect(readFileAsDataURL).toHaveBeenCalledWith(mockAvatarFile);
+ expect(URL.createObjectURL).toHaveBeenCalledWith(mockAvatarFile);
+ expect(document.dispatchEvent).toHaveBeenCalledWith(new CustomEvent('userAvatar:update'));
});
it('contains changes from the status form', async () => {
diff --git a/spec/frontend/profile/edit/components/user_avatar_spec.js b/spec/frontend/profile/edit/components/user_avatar_spec.js
index caa3356b49f..7c4f74d6bfb 100644
--- a/spec/frontend/profile/edit/components/user_avatar_spec.js
+++ b/spec/frontend/profile/edit/components/user_avatar_spec.js
@@ -46,6 +46,7 @@ describe('Edit User Avatar', () => {
...defaultProvides,
...provides,
},
+ attachTo: document.body,
});
};
@@ -65,7 +66,7 @@ describe('Edit User Avatar', () => {
modalCrop: '.modal-profile-crop',
pickImageEl: '.js-choose-user-avatar-button',
uploadImageBtn: '.js-upload-user-avatar',
- modalCropImg: '.modal-profile-crop-image',
+ modalCropImg: expect.any(HTMLImageElement),
onBlobChange: expect.any(Function),
});
expect(glCropDataMock).toHaveBeenCalledWith('glcrop');
diff --git a/spec/frontend/projects/commit/store/actions_spec.js b/spec/frontend/projects/commit/store/actions_spec.js
index adb87142fee..7ff1af86f35 100644
--- a/spec/frontend/projects/commit/store/actions_spec.js
+++ b/spec/frontend/projects/commit/store/actions_spec.js
@@ -25,7 +25,7 @@ describe('Commit form modal store actions', () => {
describe('clearModal', () => {
it('commits CLEAR_MODAL mutation', () => {
- testAction(actions.clearModal, {}, {}, [
+ return testAction(actions.clearModal, {}, {}, [
{
type: types.CLEAR_MODAL,
},
@@ -35,7 +35,7 @@ describe('Commit form modal store actions', () => {
describe('requestBranches', () => {
it('commits REQUEST_BRANCHES mutation', () => {
- testAction(actions.requestBranches, {}, {}, [
+ return testAction(actions.requestBranches, {}, {}, [
{
type: types.REQUEST_BRANCHES,
},
@@ -74,7 +74,7 @@ describe('Commit form modal store actions', () => {
describe('setBranch', () => {
it('commits SET_BRANCH mutation', () => {
- testAction(
+ return testAction(
actions.setBranch,
{},
{},
@@ -96,7 +96,7 @@ describe('Commit form modal store actions', () => {
describe('setSelectedBranch', () => {
it('commits SET_SELECTED_BRANCH mutation', () => {
- testAction(actions.setSelectedBranch, {}, {}, [
+ return testAction(actions.setSelectedBranch, {}, {}, [
{
type: types.SET_SELECTED_BRANCH,
payload: {},
@@ -109,7 +109,7 @@ describe('Commit form modal store actions', () => {
it('commits SET_BRANCHES_ENDPOINT mutation', () => {
const endpoint = 'some/endpoint';
- testAction(actions.setBranchesEndpoint, endpoint, {}, [
+ return testAction(actions.setBranchesEndpoint, endpoint, {}, [
{
type: types.SET_BRANCHES_ENDPOINT,
payload: endpoint,
@@ -122,7 +122,7 @@ describe('Commit form modal store actions', () => {
const id = 1;
it('commits SET_SELECTED_PROJECT mutation', () => {
- testAction(
+ return testAction(
actions.setSelectedProject,
id,
{},
diff --git a/spec/frontend/projects/commits/store/actions_spec.js b/spec/frontend/projects/commits/store/actions_spec.js
index 8afa2a6fb8f..e42587d5aad 100644
--- a/spec/frontend/projects/commits/store/actions_spec.js
+++ b/spec/frontend/projects/commits/store/actions_spec.js
@@ -53,7 +53,7 @@ describe('Project commits actions', () => {
const data = [{ id: 1 }];
mock.onGet(path).replyOnce(HTTP_STATUS_OK, data);
- testAction(
+ return testAction(
actions.fetchAuthors,
null,
state,
@@ -66,7 +66,7 @@ describe('Project commits actions', () => {
const path = '/-/autocomplete/users.json';
mock.onGet(path).replyOnce(HTTP_STATUS_INTERNAL_SERVER_ERROR);
- testAction(actions.fetchAuthors, null, state, [], [{ type: 'receiveAuthorsError' }]);
+ return testAction(actions.fetchAuthors, null, state, [], [{ type: 'receiveAuthorsError' }]);
});
});
});
diff --git a/spec/frontend/projects/components/shared/delete_modal_spec.js b/spec/frontend/projects/components/shared/delete_modal_spec.js
index c6213fd4b6d..7e040db4beb 100644
--- a/spec/frontend/projects/components/shared/delete_modal_spec.js
+++ b/spec/frontend/projects/components/shared/delete_modal_spec.js
@@ -49,7 +49,7 @@ describe('DeleteModal', () => {
attributes: {
variant: 'danger',
disabled: true,
- 'data-qa-selector': 'confirm_delete_button',
+ 'data-testid': 'confirm-delete-button',
},
},
actionCancel: {
diff --git a/spec/frontend/projects/settings/components/default_branch_selector_spec.js b/spec/frontend/projects/settings/components/default_branch_selector_spec.js
index 9baea5c5517..aa50683b185 100644
--- a/spec/frontend/projects/settings/components/default_branch_selector_spec.js
+++ b/spec/frontend/projects/settings/components/default_branch_selector_spec.js
@@ -4,6 +4,7 @@ import RefSelector from '~/ref/components/ref_selector.vue';
import { REF_TYPE_BRANCHES } from '~/ref/constants';
describe('projects/settings/components/default_branch_selector', () => {
+ const disabled = true;
const persistedDefaultBranch = 'main';
const projectId = '123';
let wrapper;
@@ -13,6 +14,7 @@ describe('projects/settings/components/default_branch_selector', () => {
const buildWrapper = () => {
wrapper = shallowMount(DefaultBranchSelector, {
propsData: {
+ disabled,
persistedDefaultBranch,
projectId,
},
@@ -25,6 +27,7 @@ describe('projects/settings/components/default_branch_selector', () => {
it('displays a RefSelector component', () => {
expect(findRefSelector().props()).toEqual({
+ disabled,
value: persistedDefaultBranch,
enabledRefTypes: [REF_TYPE_BRANCHES],
projectId,
diff --git a/spec/frontend/projects/settings/components/new_access_dropdown_spec.js b/spec/frontend/projects/settings/components/new_access_dropdown_spec.js
index 7c8cc1bb38d..4e3554131c6 100644
--- a/spec/frontend/projects/settings/components/new_access_dropdown_spec.js
+++ b/spec/frontend/projects/settings/components/new_access_dropdown_spec.js
@@ -8,6 +8,7 @@ import {
import { last } from 'lodash';
import { nextTick } from 'vue';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import { stubComponent } from 'helpers/stub_component';
import waitForPromises from 'helpers/wait_for_promises';
import { getUsers, getGroups, getDeployKeys } from '~/projects/settings/api/access_dropdown_api';
import AccessDropdown, { i18n } from '~/projects/settings/components/access_dropdown.vue';
@@ -77,6 +78,7 @@ describe('Access Level Dropdown', () => {
label,
disabled,
preselectedItems,
+ stubs = {},
} = {}) => {
wrapper = shallowMountExtended(AccessDropdown, {
propsData: {
@@ -90,6 +92,7 @@ describe('Access Level Dropdown', () => {
stubs: {
GlSprintf,
GlDropdown,
+ ...stubs,
},
});
};
@@ -373,15 +376,22 @@ describe('Access Level Dropdown', () => {
});
describe('on dropdown open', () => {
+ const focusInput = jest.fn();
+
beforeEach(() => {
- createComponent();
+ createComponent({
+ stubs: {
+ GlSearchBoxByType: stubComponent(GlSearchBoxByType, {
+ methods: { focusInput },
+ }),
+ },
+ });
});
it('should set the search input focus', () => {
- wrapper.vm.$refs.search.focusInput = jest.fn();
findDropdown().vm.$emit('shown');
- expect(wrapper.vm.$refs.search.focusInput).toHaveBeenCalled();
+ expect(focusInput).toHaveBeenCalled();
});
});
diff --git a/spec/frontend/projects/settings_service_desk/components/custom_email_spec.js b/spec/frontend/projects/settings_service_desk/components/custom_email_spec.js
index 2808a25296d..0a593f3812a 100644
--- a/spec/frontend/projects/settings_service_desk/components/custom_email_spec.js
+++ b/spec/frontend/projects/settings_service_desk/components/custom_email_spec.js
@@ -3,7 +3,6 @@ import { mount } from '@vue/test-utils';
import { nextTick } from 'vue';
import CustomEmail from '~/projects/settings_service_desk/components/custom_email.vue';
import {
- I18N_VERIFICATION_ERRORS,
I18N_STATE_VERIFICATION_STARTED,
I18N_STATE_VERIFICATION_FAILED,
I18N_STATE_VERIFICATION_FAILED_RESET_PARAGRAPH,
@@ -15,6 +14,7 @@ describe('CustomEmail', () => {
let wrapper;
const defaultProps = {
+ incomingEmail: 'incoming+test-1-issue-@example.com',
customEmail: 'user@example.com',
smtpAddress: 'smtp.example.com',
verificationState: 'started',
@@ -70,18 +70,21 @@ describe('CustomEmail', () => {
});
describe('verification error', () => {
- it.each([
- 'smtp_host_issue',
- 'invalid_credentials',
- 'mail_not_received_within_timeframe',
- 'incorrect_from',
- 'incorrect_token',
- ])('displays %s label and description', (error) => {
+ it.each`
+ error | label | description
+ ${'smtp_host_issue'} | ${'SMTP host issue'} | ${'A connection to the specified host could not be made or an SSL issue occurred.'}
+ ${'invalid_credentials'} | ${'Invalid credentials'} | ${'The given credentials (username and password) were rejected by the SMTP server, or you need to explicitly set an authentication method.'}
+ ${'mail_not_received_within_timeframe'} | ${'Verification email not received within timeframe'} | ${"The verification email wasn't received in time. There is a 30 minutes timeframe for verification emails to appear in your instance's Service Desk. Make sure that you have set up email forwarding correctly."}
+ ${'incorrect_from'} | ${'Incorrect From header'} | ${'Check your forwarding settings and make sure the original email sender remains in the From header.'}
+ ${'incorrect_token'} | ${'Incorrect verification token'} | ${"The received email didn't contain the verification token that was sent to your email address."}
+ ${'read_timeout'} | ${'Read timeout'} | ${'The SMTP server did not respond in time.'}
+ ${'incorrect_forwarding_target'} | ${'Incorrect forwarding target'} | ${`Forward all emails to the custom email address to ${defaultProps.incomingEmail}`}
+ `('displays $error label and description', ({ error, label, description }) => {
createWrapper({ verificationError: error });
const text = wrapper.text();
- expect(text).toContain(I18N_VERIFICATION_ERRORS[error].label);
- expect(text).toContain(I18N_VERIFICATION_ERRORS[error].description);
+ expect(text).toContain(label);
+ expect(text).toContain(description);
});
});
diff --git a/spec/frontend/projects/settings_service_desk/components/custom_email_wrapper_spec.js b/spec/frontend/projects/settings_service_desk/components/custom_email_wrapper_spec.js
index 174e05ceeee..8d3a7a5fde5 100644
--- a/spec/frontend/projects/settings_service_desk/components/custom_email_wrapper_spec.js
+++ b/spec/frontend/projects/settings_service_desk/components/custom_email_wrapper_spec.js
@@ -38,6 +38,12 @@ describe('CustomEmailWrapper', () => {
customEmailEndpoint: '/flightjs/Flight/-/service_desk/custom_email',
};
+ const defaultCustomEmailProps = {
+ incomingEmail: defaultProps.incomingEmail,
+ customEmail: 'user@example.com',
+ smtpAddress: 'smtp.example.com',
+ };
+
const showToast = jest.fn();
const createWrapper = (props = {}) => {
@@ -117,8 +123,7 @@ describe('CustomEmailWrapper', () => {
expect(showToast).toHaveBeenCalledWith(I18N_TOAST_SAVED);
expect(findCustomEmail().props()).toEqual({
- customEmail: 'user@example.com',
- smtpAddress: 'smtp.example.com',
+ ...defaultCustomEmailProps,
verificationState: 'started',
verificationError: null,
isEnabled: false,
@@ -140,8 +145,7 @@ describe('CustomEmailWrapper', () => {
it('displays CustomEmail component', () => {
expect(findCustomEmail().props()).toEqual({
- customEmail: 'user@example.com',
- smtpAddress: 'smtp.example.com',
+ ...defaultCustomEmailProps,
verificationState: 'started',
verificationError: null,
isEnabled: false,
@@ -193,8 +197,7 @@ describe('CustomEmailWrapper', () => {
it('fetches data from endpoint and displays CustomEmail component', () => {
expect(findCustomEmail().props()).toEqual({
- customEmail: 'user@example.com',
- smtpAddress: 'smtp.example.com',
+ ...defaultCustomEmailProps,
verificationState: 'failed',
verificationError: 'smtp_host_issue',
isEnabled: false,
@@ -225,8 +228,7 @@ describe('CustomEmailWrapper', () => {
it('fetches data from endpoint and displays CustomEmail component', () => {
expect(findCustomEmail().props()).toEqual({
- customEmail: 'user@example.com',
- smtpAddress: 'smtp.example.com',
+ ...defaultCustomEmailProps,
verificationState: 'finished',
verificationError: null,
isEnabled: false,
@@ -257,8 +259,7 @@ describe('CustomEmailWrapper', () => {
expect(showToast).toHaveBeenCalledWith(I18N_TOAST_ENABLED);
expect(findCustomEmail().props()).toEqual({
- customEmail: 'user@example.com',
- smtpAddress: 'smtp.example.com',
+ ...defaultCustomEmailProps,
verificationState: 'finished',
verificationError: null,
isEnabled: true,
@@ -279,8 +280,7 @@ describe('CustomEmailWrapper', () => {
it('fetches data from endpoint and displays CustomEmail component', () => {
expect(findCustomEmail().props()).toEqual({
- customEmail: 'user@example.com',
- smtpAddress: 'smtp.example.com',
+ ...defaultCustomEmailProps,
verificationState: 'finished',
verificationError: null,
isEnabled: true,
@@ -301,8 +301,7 @@ describe('CustomEmailWrapper', () => {
expect(showToast).toHaveBeenCalledWith(I18N_TOAST_DISABLED);
expect(findCustomEmail().props()).toEqual({
- customEmail: 'user@example.com',
- smtpAddress: 'smtp.example.com',
+ ...defaultCustomEmailProps,
verificationState: 'finished',
verificationError: null,
isEnabled: false,
diff --git a/spec/frontend/projects/settings_service_desk/components/service_desk_root_spec.js b/spec/frontend/projects/settings_service_desk/components/service_desk_root_spec.js
index 0eec981b67d..185a85cdb80 100644
--- a/spec/frontend/projects/settings_service_desk/components/service_desk_root_spec.js
+++ b/spec/frontend/projects/settings_service_desk/components/service_desk_root_spec.js
@@ -22,15 +22,13 @@ describe('ServiceDeskRoot', () => {
isIssueTrackerEnabled: true,
outgoingName: 'GitLab Support Bot',
projectKey: 'key',
+ reopenIssueOnExternalParticipantNote: true,
addExternalParticipantsFromCc: true,
selectedTemplate: 'Bug',
selectedFileTemplateProjectId: 42,
templates: ['Bug', 'Documentation'],
publicProject: false,
customEmailEndpoint: '/gitlab-org/gitlab-test/-/service_desk/custom_email',
- glFeatures: {
- serviceDeskCustomEmail: true,
- },
};
const getAlertText = () => wrapper.findComponent(GlAlert).text();
@@ -63,6 +61,8 @@ describe('ServiceDeskRoot', () => {
incomingEmail: provideData.initialIncomingEmail,
initialOutgoingName: provideData.outgoingName,
initialProjectKey: provideData.projectKey,
+ initialReopenIssueOnExternalParticipantNote:
+ provideData.reopenIssueOnExternalParticipantNote,
initialAddExternalParticipantsFromCc: provideData.addExternalParticipantsFromCc,
initialSelectedTemplate: provideData.selectedTemplate,
initialSelectedFileTemplateProjectId: provideData.selectedFileTemplateProjectId,
@@ -87,7 +87,7 @@ describe('ServiceDeskRoot', () => {
const alertBodyLink = alertEl.findComponent(GlLink);
expect(alertBodyLink.exists()).toBe(true);
expect(alertBodyLink.attributes('href')).toBe(
- '/help/user/project/service_desk.html#use-an-additional-service-desk-alias-email',
+ '/help/user/project/service_desk/configure.html#use-an-additional-service-desk-alias-email',
);
expect(alertBodyLink.text()).toBe('How do I create a custom email address?');
});
@@ -149,6 +149,7 @@ describe('ServiceDeskRoot', () => {
selectedTemplate: 'Bug',
outgoingName: 'GitLab Support Bot',
projectKey: 'key',
+ reopenIssueOnExternalParticipantNote: true,
addExternalParticipantsFromCc: true,
};
@@ -163,6 +164,7 @@ describe('ServiceDeskRoot', () => {
outgoing_name: 'GitLab Support Bot',
project_key: 'key',
service_desk_enabled: true,
+ reopen_issue_on_external_participant_note: true,
add_external_participants_from_cc: true,
});
});
@@ -182,6 +184,7 @@ describe('ServiceDeskRoot', () => {
selectedTemplate: 'Bug',
outgoingName: 'GitLab Support Bot',
projectKey: 'key',
+ reopen_issue_on_external_participant_note: true,
addExternalParticipantsFromCc: true,
};
@@ -227,15 +230,5 @@ describe('ServiceDeskRoot', () => {
expect(wrapper.findComponent(CustomEmailWrapper).exists()).toBe(false);
});
});
-
- describe('when feature flag service_desk_custom_email is disabled', () => {
- beforeEach(() => {
- wrapper = createComponent({ glFeatures: { serviceDeskCustomEmail: false } });
- });
-
- it('is not rendered', () => {
- expect(wrapper.findComponent(CustomEmailWrapper).exists()).toBe(false);
- });
- });
});
});
diff --git a/spec/frontend/projects/settings_service_desk/components/service_desk_setting_spec.js b/spec/frontend/projects/settings_service_desk/components/service_desk_setting_spec.js
index 6449f9bb68e..f7bdb2455e9 100644
--- a/spec/frontend/projects/settings_service_desk/components/service_desk_setting_spec.js
+++ b/spec/frontend/projects/settings_service_desk/components/service_desk_setting_spec.js
@@ -1,4 +1,4 @@
-import { GlButton, GlDropdown, GlFormCheckbox, GlLoadingIcon, GlToggle, GlAlert } from '@gitlab/ui';
+import { GlButton, GlDropdown, GlLoadingIcon, GlToggle, GlAlert } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
import { nextTick } from 'vue';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
@@ -19,7 +19,10 @@ describe('ServiceDeskSetting', () => {
const findSuffixFormGroup = () => wrapper.findByTestId('suffix-form-group');
const findIssueTrackerInfo = () => wrapper.findComponent(GlAlert);
const findIssueHelpLink = () => wrapper.findByTestId('issue-help-page');
- const findAddExternalParticipantsFromCcCheckbox = () => wrapper.findComponent(GlFormCheckbox);
+ const findReopenIssueOnExternalParticipantNoteCheckbox = () =>
+ wrapper.findByTestId('reopen-issue-on-external-participant-note');
+ const findAddExternalParticipantsFromCcCheckbox = () =>
+ wrapper.findByTestId('add-external-participants-from-cc');
const createComponent = ({ props = {}, provide = {} } = {}) =>
extendedWrapper(
@@ -212,6 +215,27 @@ describe('ServiceDeskSetting', () => {
});
});
+ describe('reopen issue on external participant note checkbox', () => {
+ it('is rendered', () => {
+ wrapper = createComponent();
+ expect(findReopenIssueOnExternalParticipantNoteCheckbox().exists()).toBe(true);
+ });
+
+ it('forwards false as initial value to the checkbox', () => {
+ wrapper = createComponent({ props: { initialReopenIssueOnExternalParticipantNote: false } });
+ expect(findReopenIssueOnExternalParticipantNoteCheckbox().find('input').element.checked).toBe(
+ false,
+ );
+ });
+
+ it('forwards true as initial value to the checkbox', () => {
+ wrapper = createComponent({ props: { initialReopenIssueOnExternalParticipantNote: true } });
+ expect(findReopenIssueOnExternalParticipantNoteCheckbox().find('input').element.checked).toBe(
+ true,
+ );
+ });
+ });
+
describe('add external participants from cc checkbox', () => {
it('is rendered', () => {
wrapper = createComponent();
@@ -249,7 +273,8 @@ describe('ServiceDeskSetting', () => {
initialSelectedFileTemplateProjectId: 42,
initialOutgoingName: 'GitLab Support Bot',
initialProjectKey: 'key',
- initialAddExternalParticipantsFromCc: false,
+ initialReopenIssueOnExternalParticipantNote: true,
+ initialAddExternalParticipantsFromCc: true,
},
});
@@ -262,7 +287,8 @@ describe('ServiceDeskSetting', () => {
fileTemplateProjectId: 42,
outgoingName: 'GitLab Support Bot',
projectKey: 'key',
- addExternalParticipantsFromCc: false,
+ reopenIssueOnExternalParticipantNote: true,
+ addExternalParticipantsFromCc: true,
};
expect(wrapper.emitted('save')[0]).toEqual([payload]);
@@ -288,6 +314,10 @@ describe('ServiceDeskSetting', () => {
expect(findButton().exists()).toBe(false);
});
+ it('does not render reopen issue on external participant note checkbox', () => {
+ expect(findReopenIssueOnExternalParticipantNoteCheckbox().exists()).toBe(false);
+ });
+
it('does not render add external participants from cc checkbox', () => {
expect(findAddExternalParticipantsFromCcCheckbox().exists()).toBe(false);
});
diff --git a/spec/frontend/read_more_spec.js b/spec/frontend/read_more_spec.js
index 5f7bd32e231..9b25c56f193 100644
--- a/spec/frontend/read_more_spec.js
+++ b/spec/frontend/read_more_spec.js
@@ -1,4 +1,3 @@
-import htmlProjectsOverview from 'test_fixtures/projects/overview.html';
import { setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
import initReadMore from '~/read_more';
@@ -11,7 +10,12 @@ describe('Read more click-to-expand functionality', () => {
describe('expands target element', () => {
beforeEach(() => {
- setHTMLFixture(htmlProjectsOverview);
+ setHTMLFixture(`
+ <p class="read-more-container">Target</p>
+ <button type="button" class="js-read-more-trigger">
+ <span>Button text</span>
+ </button>
+ `);
});
it('adds "is-expanded" class to target element', () => {
diff --git a/spec/frontend/ref/components/ref_selector_spec.js b/spec/frontend/ref/components/ref_selector_spec.js
index 26010a1cfa6..39924a3a77a 100644
--- a/spec/frontend/ref/components/ref_selector_spec.js
+++ b/spec/frontend/ref/components/ref_selector_spec.js
@@ -46,7 +46,7 @@ describe('Ref selector component', () => {
let commitApiCallSpy;
let requestSpies;
- const createComponent = (mountOverrides = {}, propsData = {}) => {
+ const createComponent = ({ overrides = {}, propsData = {} } = {}) => {
wrapper = mountExtended(
RefSelector,
merge(
@@ -64,7 +64,7 @@ describe('Ref selector component', () => {
},
store: createStore(),
},
- mountOverrides,
+ overrides,
),
);
};
@@ -211,7 +211,7 @@ describe('Ref selector component', () => {
const id = 'git-ref';
beforeEach(() => {
- createComponent({ attrs: { id } });
+ createComponent({ overrides: { attrs: { id } } });
return waitForRequests();
});
@@ -326,7 +326,7 @@ describe('Ref selector component', () => {
describe('branches', () => {
describe('when the branches search returns results', () => {
beforeEach(() => {
- createComponent({}, { useSymbolicRefNames: true });
+ createComponent({ propsData: { useSymbolicRefNames: true } });
return waitForRequests();
});
@@ -389,7 +389,7 @@ describe('Ref selector component', () => {
describe('tags', () => {
describe('when the tags search returns results', () => {
beforeEach(() => {
- createComponent({}, { useSymbolicRefNames: true });
+ createComponent({ propsData: { useSymbolicRefNames: true } });
return waitForRequests();
});
@@ -569,6 +569,20 @@ describe('Ref selector component', () => {
});
});
});
+
+ describe('disabled', () => {
+ it('does not disable the dropdown', () => {
+ createComponent();
+ expect(findListbox().props('disabled')).toBe(false);
+ });
+
+ it('disables the dropdown', async () => {
+ createComponent({ propsData: { disabled: true } });
+ expect(findListbox().props('disabled')).toBe(true);
+ await selectFirstBranch();
+ expect(wrapper.emitted('input')).toBeUndefined();
+ });
+ });
});
describe('with non-default ref types', () => {
@@ -691,9 +705,7 @@ describe('Ref selector component', () => {
});
beforeEach(() => {
- createComponent({
- scopedSlots: { footer: createFooter },
- });
+ createComponent({ overrides: { scopedSlots: { footer: createFooter } } });
updateQuery('abcd1234');
diff --git a/spec/frontend/ref/stores/actions_spec.js b/spec/frontend/ref/stores/actions_spec.js
index c6aac8c9c98..49e0b36259c 100644
--- a/spec/frontend/ref/stores/actions_spec.js
+++ b/spec/frontend/ref/stores/actions_spec.js
@@ -28,7 +28,7 @@ describe('Ref selector Vuex store actions', () => {
describe('setEnabledRefTypes', () => {
it(`commits ${types.SET_ENABLED_REF_TYPES} with the enabled ref types`, () => {
- testAction(actions.setProjectId, ALL_REF_TYPES, state, [
+ return testAction(actions.setProjectId, ALL_REF_TYPES, state, [
{ type: types.SET_PROJECT_ID, payload: ALL_REF_TYPES },
]);
});
@@ -37,7 +37,7 @@ describe('Ref selector Vuex store actions', () => {
describe('setProjectId', () => {
it(`commits ${types.SET_PROJECT_ID} with the new project ID`, () => {
const projectId = '4';
- testAction(actions.setProjectId, projectId, state, [
+ return testAction(actions.setProjectId, projectId, state, [
{ type: types.SET_PROJECT_ID, payload: projectId },
]);
});
@@ -46,7 +46,7 @@ describe('Ref selector Vuex store actions', () => {
describe('setSelectedRef', () => {
it(`commits ${types.SET_SELECTED_REF} with the new selected ref name`, () => {
const selectedRef = 'v1.2.3';
- testAction(actions.setSelectedRef, selectedRef, state, [
+ return testAction(actions.setSelectedRef, selectedRef, state, [
{ type: types.SET_SELECTED_REF, payload: selectedRef },
]);
});
@@ -55,14 +55,16 @@ describe('Ref selector Vuex store actions', () => {
describe('setParams', () => {
it(`commits ${types.SET_PARAMS} with the provided params`, () => {
const params = { sort: 'updated_asc' };
- testAction(actions.setParams, params, state, [{ type: types.SET_PARAMS, payload: params }]);
+ return testAction(actions.setParams, params, state, [
+ { type: types.SET_PARAMS, payload: params },
+ ]);
});
});
describe('search', () => {
it(`commits ${types.SET_QUERY} with the new search query`, () => {
const query = 'hello';
- testAction(actions.search, query, state, [{ type: types.SET_QUERY, payload: query }]);
+ return testAction(actions.search, query, state, [{ type: types.SET_QUERY, payload: query }]);
});
it.each`
@@ -73,7 +75,7 @@ describe('Ref selector Vuex store actions', () => {
`(`dispatches fetch actions for enabled ref types`, ({ enabledRefTypes, expectedActions }) => {
const query = 'hello';
state.enabledRefTypes = enabledRefTypes;
- testAction(
+ return testAction(
actions.search,
query,
state,
diff --git a/spec/frontend/releases/stores/modules/detail/actions_spec.js b/spec/frontend/releases/stores/modules/detail/actions_spec.js
index d18437ccec3..a55b6cdef92 100644
--- a/spec/frontend/releases/stores/modules/detail/actions_spec.js
+++ b/spec/frontend/releases/stores/modules/detail/actions_spec.js
@@ -93,7 +93,7 @@ describe('Release edit/new actions', () => {
describe('loadDraftRelease', () => {
it(`with no saved release, it commits ${types.INITIALIZE_EMPTY_RELEASE}`, () => {
- testAction({
+ return testAction({
action: actions.loadDraftRelease,
state,
expectedMutations: [{ type: types.INITIALIZE_EMPTY_RELEASE }],
@@ -203,7 +203,7 @@ describe('Release edit/new actions', () => {
describe('saveRelease', () => {
it(`commits ${types.REQUEST_SAVE_RELEASE} and then dispatched "createRelease"`, () => {
- testAction({
+ return testAction({
action: actions.saveRelease,
state,
expectedMutations: [{ type: types.REQUEST_SAVE_RELEASE }],
@@ -218,7 +218,7 @@ describe('Release edit/new actions', () => {
describe('initializeRelease', () => {
it('dispatches "fetchRelease"', () => {
- testAction({
+ return testAction({
action: actions.initializeRelease,
state,
expectedActions: [{ type: 'fetchRelease' }],
@@ -228,7 +228,7 @@ describe('Release edit/new actions', () => {
describe('saveRelease', () => {
it(`commits ${types.REQUEST_SAVE_RELEASE} and then dispatched "updateRelease"`, () => {
- testAction({
+ return testAction({
action: actions.saveRelease,
state,
expectedMutations: [{ type: types.REQUEST_SAVE_RELEASE }],
diff --git a/spec/frontend/repository/commits_service_spec.js b/spec/frontend/repository/commits_service_spec.js
index 5fb683bd370..d779abcbfd6 100644
--- a/spec/frontend/repository/commits_service_spec.js
+++ b/spec/frontend/repository/commits_service_spec.js
@@ -14,7 +14,7 @@ describe('commits service', () => {
beforeEach(() => {
mock = new MockAdapter(axios);
-
+ window.gon.features = { encodingLogsTree: true };
mock.onGet(url).reply(HTTP_STATUS_OK, [], {});
jest.spyOn(axios, 'get');
@@ -48,14 +48,27 @@ describe('commits service', () => {
});
it('encodes the path and ref', async () => {
- const encodedRef = encodeURIComponent(refWithSpecialCharMock);
- const encodedUrl = `/some-project/-/refs/${encodedRef}/logs_tree/with%20%24peci%40l%20ch%40rs%2F`;
+ const encodedRef = encodeURI(refWithSpecialCharMock);
+ const encodedUrl = `/some-project/-/refs/${encodedRef}/logs_tree/with%20$peci@l%20ch@rs/`;
await requestCommits(1, 'some-project', 'with $peci@l ch@rs/', refWithSpecialCharMock);
expect(axios.get).toHaveBeenCalledWith(encodedUrl, expect.anything());
});
+ describe('when encodingLogsTree FF is off', () => {
+ beforeEach(() => {
+ window.gon.features = {};
+ });
+
+ it('encodes the path and ref with encodeURIComponent', async () => {
+ const encodedRef = encodeURIComponent(refWithSpecialCharMock);
+ const encodedUrl = `/some-project/-/refs/${encodedRef}/logs_tree/with%20%24peci%40l%20ch%40rs%2F`;
+ await requestCommits(1, 'some-project', 'with $peci@l ch@rs/', refWithSpecialCharMock);
+ expect(axios.get).toHaveBeenCalledWith(encodedUrl, expect.anything());
+ });
+ });
+
it('calls axios get once per batch', async () => {
await Promise.all([requestCommits(0), requestCommits(1), requestCommits(23)]);
diff --git a/spec/frontend/repository/components/blob_content_viewer_spec.js b/spec/frontend/repository/components/blob_content_viewer_spec.js
index e0d2984893b..cd5bc08faf0 100644
--- a/spec/frontend/repository/components/blob_content_viewer_spec.js
+++ b/spec/frontend/repository/components/blob_content_viewer_spec.js
@@ -75,6 +75,7 @@ const createComponent = async (mockData = {}, mountFn = shallowMount, mockRoute
createMergeRequestIn = userPermissionsMock.createMergeRequestIn,
isBinary,
inject = {},
+ blobBlameInfo = true,
} = mockData;
const blobInfo = {
@@ -138,7 +139,7 @@ const createComponent = async (mockData = {}, mountFn = shallowMount, mockRoute
...inject,
glFeatures: {
highlightJsWorker: false,
- blobBlameInfo: true,
+ blobBlameInfo,
},
},
}),
@@ -185,7 +186,7 @@ describe('Blob content viewer component', () => {
expect(findBlobHeader().props('hideViewerSwitcher')).toEqual(false);
expect(findBlobHeader().props('blob')).toEqual(simpleViewerMock);
expect(findBlobHeader().props('showForkSuggestion')).toEqual(false);
- expect(findBlobHeader().props('showBlameToggle')).toEqual(false);
+ expect(findBlobHeader().props('showBlameToggle')).toEqual(true);
expect(findBlobHeader().props('projectPath')).toEqual(propsMock.projectPath);
expect(findBlobHeader().props('projectId')).toEqual(projectMock.id);
expect(mockRouterPush).not.toHaveBeenCalled();
@@ -197,15 +198,15 @@ describe('Blob content viewer component', () => {
await nextTick();
};
- it('renders a blame toggle for JSON files', async () => {
- await createComponent({ blob: { ...simpleViewerMock, language: 'json' } });
+ it('renders a blame toggle', async () => {
+ await createComponent({ blob: simpleViewerMock });
expect(findBlobHeader().props('showBlameToggle')).toEqual(true);
});
it('adds blame param to the URL and passes `showBlame` to the SourceViewer', async () => {
loadViewer.mockReturnValueOnce(SourceViewerNew);
- await createComponent({ blob: { ...simpleViewerMock, language: 'json' } });
+ await createComponent({ blob: simpleViewerMock });
await triggerBlame();
@@ -217,6 +218,25 @@ describe('Blob content viewer component', () => {
expect(mockRouterPush).toHaveBeenCalledWith({ query: { blame: '0' } });
expect(findSourceViewerNew().props('showBlame')).toBe(false);
});
+
+ describe('blobBlameInfo feature flag disabled', () => {
+ it('does not render a blame toggle', async () => {
+ await createComponent({ blob: simpleViewerMock, blobBlameInfo: false });
+
+ expect(findBlobHeader().props('showBlameToggle')).toEqual(false);
+ });
+ });
+
+ describe('when viewing rich content', () => {
+ it('always shows the blame when clicking on the blame button', async () => {
+ loadViewer.mockReturnValueOnce(SourceViewerNew);
+ const query = { plain: '0', blame: '1' };
+ await createComponent({ blob: simpleViewerMock }, shallowMount, { query });
+ await triggerBlame();
+
+ expect(findSourceViewerNew().props('showBlame')).toBe(true);
+ });
+ });
});
it('creates an alert when the BlobHeader component emits an error', async () => {
@@ -260,6 +280,7 @@ describe('Blob content viewer component', () => {
expect(mockAxios.history.get).toHaveLength(1);
expect(mockAxios.history.get[0].url).toBe(legacyViewerUrl);
+ expect(findBlobHeader().props('showBlameToggle')).toEqual(false);
});
it('loads a legacy viewer when a viewer component is not available', async () => {
diff --git a/spec/frontend/repository/components/blob_controls_spec.js b/spec/frontend/repository/components/blob_controls_spec.js
index 3ced5f6c4d2..53ebabebf1d 100644
--- a/spec/frontend/repository/components/blob_controls_spec.js
+++ b/spec/frontend/repository/components/blob_controls_spec.js
@@ -8,6 +8,7 @@ import blobControlsQuery from '~/repository/queries/blob_controls.query.graphql'
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import createRouter from '~/repository/router';
import { updateElementsVisibility } from '~/repository/utils/dom';
+import { resetShortcutsForTests } from '~/behaviors/shortcuts';
import ShortcutsBlob from '~/behaviors/shortcuts/shortcuts_blob';
import BlobLinePermalinkUpdater from '~/blob/blob_line_permalink_updater';
import { blobControlsDataMock, refMock } from '../mock_data';
@@ -32,6 +33,8 @@ const createComponent = async () => {
mockResolver = jest.fn().mockResolvedValue({ data: { project } });
+ await resetShortcutsForTests();
+
wrapper = shallowMountExtended(BlobControls, {
router,
apolloProvider: createMockApollo([[blobControlsQuery, mockResolver]]),
diff --git a/spec/frontend/repository/components/table/__snapshots__/row_spec.js.snap b/spec/frontend/repository/components/table/__snapshots__/row_spec.js.snap
index e14f41e2ed2..378aacd47fa 100644
--- a/spec/frontend/repository/components/table/__snapshots__/row_spec.js.snap
+++ b/spec/frontend/repository/components/table/__snapshots__/row_spec.js.snap
@@ -47,7 +47,7 @@ exports[`Repository table row component renders a symlink table row 1`] = `
<gl-intersection-observer-stub>
<timeago-tooltip-stub
cssclass=""
- datetimeformat="DATE_WITH_TIME_FORMAT"
+ datetimeformat="asDateTime"
time="2019-01-01"
tooltipplacement="top"
/>
@@ -103,7 +103,7 @@ exports[`Repository table row component renders table row 1`] = `
<gl-intersection-observer-stub>
<timeago-tooltip-stub
cssclass=""
- datetimeformat="DATE_WITH_TIME_FORMAT"
+ datetimeformat="asDateTime"
time="2019-01-01"
tooltipplacement="top"
/>
@@ -159,7 +159,7 @@ exports[`Repository table row component renders table row for path with special
<gl-intersection-observer-stub>
<timeago-tooltip-stub
cssclass=""
- datetimeformat="DATE_WITH_TIME_FORMAT"
+ datetimeformat="asDateTime"
time="2019-01-01"
tooltipplacement="top"
/>
diff --git a/spec/frontend/repository/components/tree_content_spec.js b/spec/frontend/repository/components/tree_content_spec.js
index c0eb65b28fe..311e5ca86f8 100644
--- a/spec/frontend/repository/components/tree_content_spec.js
+++ b/spec/frontend/repository/components/tree_content_spec.js
@@ -162,26 +162,19 @@ describe('Repository table component', () => {
describe('commit data', () => {
const path = '';
- it('loads commit data for both top and bottom batches when row-appear event is emitted', () => {
- const rowNumber = 50;
-
+ it('loads commit data for the nearest page', () => {
createComponent({ path });
- findFileTable().vm.$emit('row-appear', rowNumber);
+ findFileTable().vm.$emit('row-appear', 49);
+ findFileTable().vm.$emit('row-appear', 15);
- expect(isRequested).toHaveBeenCalledWith(rowNumber);
+ expect(isRequested).toHaveBeenCalledWith(49);
+ expect(isRequested).toHaveBeenCalledWith(15);
expect(loadCommits.mock.calls).toEqual([
- ['', path, '', rowNumber, 'heads'],
- ['', path, '', rowNumber - 25, 'heads'],
+ ['', path, '', 25, 'heads'],
+ ['', path, '', 0, 'heads'],
]);
});
-
- it('loads commit data once if rowNumber is zero', () => {
- createComponent({ path });
- findFileTable().vm.$emit('row-appear', 0);
-
- expect(loadCommits.mock.calls).toEqual([['', path, '', 0, 'heads']]);
- });
});
describe('error handling', () => {
diff --git a/spec/frontend/repository/mixins/highlight_mixin_spec.js b/spec/frontend/repository/mixins/highlight_mixin_spec.js
index 50cfd71d686..c635c09d1aa 100644
--- a/spec/frontend/repository/mixins/highlight_mixin_spec.js
+++ b/spec/frontend/repository/mixins/highlight_mixin_spec.js
@@ -1,9 +1,18 @@
import { shallowMount } from '@vue/test-utils';
+import axios from 'axios';
+import MockAdapter from 'axios-mock-adapter';
import { splitIntoChunks } from '~/vue_shared/components/source_viewer/workers/highlight_utils';
import highlightMixin from '~/repository/mixins/highlight_mixin';
import LineHighlighter from '~/blob/line_highlighter';
+import waitForPromises from 'helpers/wait_for_promises';
+import { HTTP_STATUS_OK } from '~/lib/utils/http_status';
import { TEXT_FILE_TYPE } from '~/repository/constants';
-import { LINES_PER_CHUNK } from '~/vue_shared/components/source_viewer/constants';
+import {
+ LINES_PER_CHUNK,
+ EVENT_ACTION,
+ EVENT_LABEL_FALLBACK,
+} from '~/vue_shared/components/source_viewer/constants';
+import Tracking from '~/tracking';
const lineHighlighter = new LineHighlighter();
jest.mock('~/blob/line_highlighter', () => jest.fn().mockReturnValue({ highlightHash: jest.fn() }));
@@ -11,6 +20,7 @@ jest.mock('~/vue_shared/components/source_viewer/workers/highlight_utils', () =>
splitIntoChunks: jest.fn().mockResolvedValue([]),
}));
+const mockAxios = new MockAdapter(axios);
const workerMock = { postMessage: jest.fn() };
const onErrorMock = jest.fn();
@@ -21,7 +31,10 @@ describe('HighlightMixin', () => {
const rawTextBlob = contentArray.join('\n');
const languageMock = 'json';
- const createComponent = ({ fileType = TEXT_FILE_TYPE, language = languageMock } = {}) => {
+ const createComponent = (
+ { fileType = TEXT_FILE_TYPE, language = languageMock, externalStorageUrl, rawPath } = {},
+ isUsingLfs = false,
+ ) => {
const simpleViewer = { fileType };
const dummyComponent = {
@@ -32,7 +45,10 @@ describe('HighlightMixin', () => {
},
template: '<div>{{chunks[0]?.highlightedContent}}</div>',
created() {
- this.initHighlightWorker({ rawTextBlob, simpleViewer, language, fileType });
+ this.initHighlightWorker(
+ { rawTextBlob, simpleViewer, language, fileType, externalStorageUrl, rawPath },
+ isUsingLfs,
+ );
},
methods: { onError: onErrorMock },
};
@@ -45,13 +61,6 @@ describe('HighlightMixin', () => {
describe('initHighlightWorker', () => {
const firstSeventyLines = contentArray.slice(0, LINES_PER_CHUNK).join('\n');
- it('does not instruct worker if file is not a JSON file', () => {
- workerMock.postMessage.mockClear();
- createComponent({ language: 'javascript' });
-
- expect(workerMock.postMessage).not.toHaveBeenCalled();
- });
-
it('generates a chunk for the first 70 lines of raw text', () => {
expect(splitIntoChunks).toHaveBeenCalledWith(languageMock, firstSeventyLines);
});
@@ -74,6 +83,23 @@ describe('HighlightMixin', () => {
});
});
+ describe('auto-detects if a language cannot be loaded', () => {
+ const unknownLanguage = 'some_unknown_language';
+ beforeEach(() => {
+ jest.spyOn(Tracking, 'event');
+ createComponent({ language: unknownLanguage });
+ });
+
+ it('emits a tracking event for the fallback', () => {
+ const eventData = { label: EVENT_LABEL_FALLBACK, property: unknownLanguage };
+ expect(Tracking.event).toHaveBeenCalledWith(undefined, EVENT_ACTION, eventData);
+ });
+
+ it('calls the onError method', () => {
+ expect(onErrorMock).toHaveBeenCalled();
+ });
+ });
+
describe('worker message handling', () => {
const CHUNK_MOCK = { startingFrom: 0, totalLines: 70, highlightedContent: 'some content' };
@@ -87,4 +113,32 @@ describe('HighlightMixin', () => {
expect(lineHighlighter.highlightHash).toHaveBeenCalledWith(hash);
});
});
+
+ describe('LFS blobs', () => {
+ const rawPath = '/org/project/-/raw/file.xml';
+ const externalStorageUrl = 'http://127.0.0.1:9000/lfs-objects/91/12/1341234';
+ const mockParams = { content: rawTextBlob, language: languageMock, fileType: TEXT_FILE_TYPE };
+
+ afterEach(() => mockAxios.reset());
+
+ it('Uses externalStorageUrl to fetch content if present', async () => {
+ mockAxios.onGet(externalStorageUrl).replyOnce(HTTP_STATUS_OK, rawTextBlob);
+ createComponent({ rawPath, externalStorageUrl }, true);
+ await waitForPromises();
+
+ expect(mockAxios.history.get).toHaveLength(1);
+ expect(mockAxios.history.get[0].url).toBe(externalStorageUrl);
+ expect(workerMock.postMessage).toHaveBeenCalledWith(mockParams);
+ });
+
+ it('Falls back to rawPath to fetch content', async () => {
+ mockAxios.onGet(rawPath).replyOnce(HTTP_STATUS_OK, rawTextBlob);
+ createComponent({ rawPath }, true);
+ await waitForPromises();
+
+ expect(mockAxios.history.get).toHaveLength(1);
+ expect(mockAxios.history.get[0].url).toBe(rawPath);
+ expect(workerMock.postMessage).toHaveBeenCalledWith(mockParams);
+ });
+ });
});
diff --git a/spec/frontend/search/sidebar/components/all_scopes_start_filters_spec.js b/spec/frontend/search/sidebar/components/all_scopes_start_filters_spec.js
new file mode 100644
index 00000000000..cd43214ed38
--- /dev/null
+++ b/spec/frontend/search/sidebar/components/all_scopes_start_filters_spec.js
@@ -0,0 +1,28 @@
+import { shallowMount } from '@vue/test-utils';
+import GroupFilter from '~/search/sidebar/components/group_filter.vue';
+import ProjectFilter from '~/search/sidebar/components/project_filter.vue';
+import AllScopesStartFilters from '~/search/sidebar/components/all_scopes_start_filters.vue';
+
+describe('GlobalSearch AllScopesStartFilters', () => {
+ let wrapper;
+
+ const findGroupFilter = () => wrapper.findComponent(GroupFilter);
+ const findProjectFilter = () => wrapper.findComponent(ProjectFilter);
+
+ const createComponent = () => {
+ wrapper = shallowMount(AllScopesStartFilters);
+ };
+
+ describe('Renders correctly', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+ it('renders ArchivedFilter', () => {
+ expect(findGroupFilter().exists()).toBe(true);
+ });
+
+ it('renders FiltersTemplate', () => {
+ expect(findProjectFilter().exists()).toBe(true);
+ });
+ });
+});
diff --git a/spec/frontend/search/sidebar/components/app_spec.js b/spec/frontend/search/sidebar/components/app_spec.js
index c2d88493d71..3ff6bbf7666 100644
--- a/spec/frontend/search/sidebar/components/app_spec.js
+++ b/spec/frontend/search/sidebar/components/app_spec.js
@@ -18,10 +18,9 @@ import NotesFilters from '~/search/sidebar/components/notes_filters.vue';
import CommitsFilters from '~/search/sidebar/components/commits_filters.vue';
import MilestonesFilters from '~/search/sidebar/components/milestones_filters.vue';
import WikiBlobsFilters from '~/search/sidebar/components/wiki_blobs_filters.vue';
-import ScopeLegacyNavigation from '~/search/sidebar/components/scope_legacy_navigation.vue';
-import SmallScreenDrawerNavigation from '~/search/sidebar/components/small_screen_drawer_navigation.vue';
import ScopeSidebarNavigation from '~/search/sidebar/components/scope_sidebar_navigation.vue';
import DomElementListener from '~/vue_shared/components/dom_element_listener.vue';
+import AllScopesStartFilters from '~/search/sidebar/components/all_scopes_start_filters.vue';
jest.mock('~/super_sidebar/super_sidebar_collapsed_state_manager');
@@ -45,11 +44,6 @@ describe('GlobalSearchSidebar', () => {
wrapper = shallowMount(GlobalSearchSidebar, {
store,
- provide: {
- glFeatures: {
- searchProjectWikisHideArchivedProjects: true,
- },
- },
});
};
@@ -62,10 +56,9 @@ describe('GlobalSearchSidebar', () => {
const findCommitsFilters = () => wrapper.findComponent(CommitsFilters);
const findMilestonesFilters = () => wrapper.findComponent(MilestonesFilters);
const findWikiBlobsFilters = () => wrapper.findComponent(WikiBlobsFilters);
- const findScopeLegacyNavigation = () => wrapper.findComponent(ScopeLegacyNavigation);
- const findSmallScreenDrawerNavigation = () => wrapper.findComponent(SmallScreenDrawerNavigation);
const findScopeSidebarNavigation = () => wrapper.findComponent(ScopeSidebarNavigation);
const findDomElementListener = () => wrapper.findComponent(DomElementListener);
+ const findAllScopesStartFilters = () => wrapper.findComponent(AllScopesStartFilters);
describe('renders properly', () => {
describe('always', () => {
@@ -79,31 +72,50 @@ describe('GlobalSearchSidebar', () => {
});
describe.each`
- scope | filter | searchType | isShown
- ${'issues'} | ${findIssuesFilters} | ${SEARCH_TYPE_BASIC} | ${true}
- ${'merge_requests'} | ${findMergeRequestsFilters} | ${SEARCH_TYPE_BASIC} | ${true}
- ${'projects'} | ${findProjectsFilters} | ${SEARCH_TYPE_BASIC} | ${true}
- ${'blobs'} | ${findBlobsFilters} | ${SEARCH_TYPE_BASIC} | ${false}
- ${'blobs'} | ${findBlobsFilters} | ${SEARCH_TYPE_ADVANCED} | ${true}
- ${'blobs'} | ${findBlobsFilters} | ${SEARCH_TYPE_ZOEKT} | ${false}
- ${'notes'} | ${findNotesFilters} | ${SEARCH_TYPE_BASIC} | ${true}
- ${'notes'} | ${findNotesFilters} | ${SEARCH_TYPE_ADVANCED} | ${true}
- ${'commits'} | ${findCommitsFilters} | ${SEARCH_TYPE_BASIC} | ${true}
- ${'commits'} | ${findCommitsFilters} | ${SEARCH_TYPE_ADVANCED} | ${true}
- ${'milestones'} | ${findMilestonesFilters} | ${SEARCH_TYPE_BASIC} | ${true}
- ${'milestones'} | ${findMilestonesFilters} | ${SEARCH_TYPE_ADVANCED} | ${true}
- ${'wiki_blobs'} | ${findWikiBlobsFilters} | ${SEARCH_TYPE_BASIC} | ${true}
- ${'wiki_blobs'} | ${findWikiBlobsFilters} | ${SEARCH_TYPE_ADVANCED} | ${true}
- `('with sidebar $scope scope:', ({ scope, filter, searchType, isShown }) => {
+ scope | filter
+ ${'issues'} | ${findIssuesFilters}
+ ${'issues'} | ${findAllScopesStartFilters}
+ ${'merge_requests'} | ${findMergeRequestsFilters}
+ ${'merge_requests'} | ${findAllScopesStartFilters}
+ ${'projects'} | ${findProjectsFilters}
+ ${'projects'} | ${findAllScopesStartFilters}
+ ${'blobs'} | ${findAllScopesStartFilters}
+ ${'notes'} | ${findNotesFilters}
+ ${'notes'} | ${findAllScopesStartFilters}
+ ${'commits'} | ${findCommitsFilters}
+ ${'commits'} | ${findAllScopesStartFilters}
+ ${'milestones'} | ${findMilestonesFilters}
+ ${'milestones'} | ${findAllScopesStartFilters}
+ ${'wiki_blobs'} | ${findWikiBlobsFilters}
+ ${'wiki_blobs'} | ${findAllScopesStartFilters}
+ `('with sidebar scope: $scope', ({ scope, filter }) => {
+ describe.each([SEARCH_TYPE_BASIC, SEARCH_TYPE_ADVANCED])(
+ 'with search_type %s',
+ (searchType) => {
+ beforeEach(() => {
+ getterSpies.currentScope = jest.fn(() => scope);
+ createComponent({ urlQuery: { scope }, searchType });
+ });
+
+ it(`renders correctly ${filter.name.replace('find', '')}`, () => {
+ expect(filter().exists()).toBe(true);
+ });
+ },
+ );
+ });
+
+ describe.each`
+ scope | filter | searchType | isShown
+ ${'blobs'} | ${findBlobsFilters} | ${SEARCH_TYPE_BASIC} | ${false}
+ ${'blobs'} | ${findBlobsFilters} | ${SEARCH_TYPE_ADVANCED} | ${true}
+ ${'blobs'} | ${findBlobsFilters} | ${SEARCH_TYPE_ZOEKT} | ${false}
+ `('sidebar blobs scope:', ({ scope, filter, searchType, isShown }) => {
beforeEach(() => {
getterSpies.currentScope = jest.fn(() => scope);
createComponent({ urlQuery: { scope }, searchType });
});
- it(`renders correctly filter ${filter.name.replace(
- 'find',
- '',
- )} when search_type ${searchType}`, () => {
+ it(`renders correctly filter BlobsFilters when search_type ${searchType}`, () => {
expect(filter().exists()).toBe(isShown);
});
});
@@ -129,46 +141,27 @@ describe('GlobalSearchSidebar', () => {
});
});
- describe.each`
- currentScope | sidebarNavShown | legacyNavShown
- ${'issues'} | ${false} | ${true}
- ${'test'} | ${false} | ${true}
- ${'issues'} | ${true} | ${false}
- ${'test'} | ${true} | ${false}
- `(
- 'renders navigation for scope $currentScope',
- ({ currentScope, sidebarNavShown, legacyNavShown }) => {
- beforeEach(() => {
- getterSpies.currentScope = jest.fn(() => currentScope);
- createComponent({ useSidebarNavigation: sidebarNavShown });
- });
-
- it(`renders navigation correctly with legacyNavShown ${legacyNavShown}`, () => {
- expect(findScopeLegacyNavigation().exists()).toBe(legacyNavShown);
- expect(findSmallScreenDrawerNavigation().exists()).toBe(legacyNavShown);
- });
-
- it(`renders navigation correctly with sidebarNavShown ${sidebarNavShown}`, () => {
- expect(findScopeSidebarNavigation().exists()).toBe(sidebarNavShown);
- });
- },
- );
- });
+ describe.each(['issues', 'test'])('for scope %p', (currentScope) => {
+ beforeEach(() => {
+ getterSpies.currentScope = jest.fn(() => currentScope);
+ createComponent();
+ });
- describe('when useSidebarNavigation=true', () => {
- beforeEach(() => {
- createComponent({ useSidebarNavigation: true });
+ it(`renders navigation correctly`, () => {
+ expect(findScopeSidebarNavigation().exists()).toBe(true);
+ });
});
+ });
- it('toggles super sidebar when button is clicked', () => {
- const elListener = findDomElementListener();
+ it('toggles super sidebar when button is clicked', () => {
+ createComponent();
+ const elListener = findDomElementListener();
- expect(toggleSuperSidebarCollapsed).not.toHaveBeenCalled();
+ expect(toggleSuperSidebarCollapsed).not.toHaveBeenCalled();
- elListener.vm.$emit('click');
+ elListener.vm.$emit('click');
- expect(toggleSuperSidebarCollapsed).toHaveBeenCalledTimes(1);
- expect(elListener.props('selector')).toBe('#js-open-mobile-filters');
- });
+ expect(toggleSuperSidebarCollapsed).toHaveBeenCalledTimes(1);
+ expect(elListener.props('selector')).toBe('#js-open-mobile-filters');
});
});
diff --git a/spec/frontend/search/sidebar/components/archived_filter_spec.js b/spec/frontend/search/sidebar/components/archived_filter_spec.js
index 9ed677ca297..9e8ababa5da 100644
--- a/spec/frontend/search/sidebar/components/archived_filter_spec.js
+++ b/spec/frontend/search/sidebar/components/archived_filter_spec.js
@@ -33,7 +33,7 @@ describe('ArchivedFilter', () => {
const findCheckboxFilter = () => wrapper.findComponent(GlFormCheckboxGroup);
const findCheckboxFilterLabel = () => wrapper.findByTestId('label');
- const findH5 = () => wrapper.findComponent('h5');
+ const findTitle = () => wrapper.findByTestId('archived-filter-title');
describe('old sidebar', () => {
beforeEach(() => {
@@ -45,8 +45,8 @@ describe('ArchivedFilter', () => {
});
it('renders the divider', () => {
- expect(findH5().exists()).toBe(true);
- expect(findH5().text()).toBe(archivedFilterData.headerLabel);
+ expect(findTitle().exists()).toBe(true);
+ expect(findTitle().text()).toBe(archivedFilterData.headerLabel);
});
it('wraps the label element with a tooltip', () => {
@@ -66,8 +66,8 @@ describe('ArchivedFilter', () => {
});
it("doesn't render the divider", () => {
- expect(findH5().exists()).toBe(true);
- expect(findH5().text()).toBe(archivedFilterData.headerLabel);
+ expect(findTitle().exists()).toBe(true);
+ expect(findTitle().text()).toBe(archivedFilterData.headerLabel);
});
it('wraps the label element with a tooltip', () => {
diff --git a/spec/frontend/search/sidebar/components/blobs_filters_spec.js b/spec/frontend/search/sidebar/components/blobs_filters_spec.js
index 245ddb8f8bb..3f1feae8527 100644
--- a/spec/frontend/search/sidebar/components/blobs_filters_spec.js
+++ b/spec/frontend/search/sidebar/components/blobs_filters_spec.js
@@ -17,13 +17,11 @@ describe('GlobalSearch BlobsFilters', () => {
currentScope: () => 'blobs',
};
- const createComponent = ({ initialState = {} } = {}) => {
+ const createComponent = () => {
const store = new Vuex.Store({
state: {
urlQuery: MOCK_QUERY,
- useSidebarNavigation: false,
searchType: SEARCH_TYPE_ADVANCED,
- ...initialState,
},
getters: defaultGetters,
});
@@ -35,10 +33,9 @@ describe('GlobalSearch BlobsFilters', () => {
const findLanguageFilter = () => wrapper.findComponent(LanguageFilter);
const findArchivedFilter = () => wrapper.findComponent(ArchivedFilter);
- const findDividers = () => wrapper.findAll('hr');
beforeEach(() => {
- createComponent({});
+ createComponent();
});
it('renders LanguageFilter', () => {
@@ -48,31 +45,4 @@ describe('GlobalSearch BlobsFilters', () => {
it('renders ArchivedFilter', () => {
expect(findArchivedFilter().exists()).toBe(true);
});
-
- it('renders divider correctly', () => {
- expect(findDividers()).toHaveLength(1);
- });
-
- describe('Renders correctly in new nav', () => {
- beforeEach(() => {
- createComponent({
- initialState: {
- searchType: SEARCH_TYPE_ADVANCED,
- useSidebarNavigation: true,
- },
- });
- });
-
- it('renders correctly LanguageFilter', () => {
- expect(findLanguageFilter().exists()).toBe(true);
- });
-
- it('renders correctly ArchivedFilter', () => {
- expect(findArchivedFilter().exists()).toBe(true);
- });
-
- it("doesn't render dividers", () => {
- expect(findDividers()).toHaveLength(0);
- });
- });
});
diff --git a/spec/frontend/search/sidebar/components/confidentiality_filter_spec.js b/spec/frontend/search/sidebar/components/confidentiality_filter_spec.js
index 6444ec10466..fedbd407b0b 100644
--- a/spec/frontend/search/sidebar/components/confidentiality_filter_spec.js
+++ b/spec/frontend/search/sidebar/components/confidentiality_filter_spec.js
@@ -22,23 +22,11 @@ describe('ConfidentialityFilter', () => {
const findRadioFilter = () => wrapper.findComponent(RadioFilter);
- describe('old sidebar', () => {
- beforeEach(() => {
- createComponent({ useSidebarNavigation: false });
- });
-
- it('renders the component', () => {
- expect(findRadioFilter().exists()).toBe(true);
- });
+ beforeEach(() => {
+ createComponent();
});
- describe('new sidebar', () => {
- beforeEach(() => {
- createComponent({ useSidebarNavigation: true });
- });
-
- it('renders the component', () => {
- expect(findRadioFilter().exists()).toBe(true);
- });
+ it('renders the component', () => {
+ expect(findRadioFilter().exists()).toBe(true);
});
});
diff --git a/spec/frontend/search/sidebar/components/filters_template_spec.js b/spec/frontend/search/sidebar/components/filters_template_spec.js
index f1a807c5ceb..18144e25ac3 100644
--- a/spec/frontend/search/sidebar/components/filters_template_spec.js
+++ b/spec/frontend/search/sidebar/components/filters_template_spec.js
@@ -52,7 +52,6 @@ describe('GlobalSearchSidebarLanguageFilter', () => {
};
const findForm = () => wrapper.findComponent(GlForm);
- const findDividers = () => wrapper.findAll('hr');
const findApplyButton = () => wrapper.findComponent(GlButton);
const findResetButton = () => wrapper.findComponent(GlLink);
const findSlotContent = () => wrapper.findByText('Filters Content');
@@ -66,10 +65,6 @@ describe('GlobalSearchSidebarLanguageFilter', () => {
expect(findForm().exists()).toBe(true);
});
- it('renders dividers', () => {
- expect(findDividers()).toHaveLength(2);
- });
-
it('renders slot content', () => {
expect(findSlotContent().exists()).toBe(true);
});
diff --git a/spec/frontend/search/topbar/components/group_filter_spec.js b/spec/frontend/search/sidebar/components/group_filter_spec.js
index fa8036a7f97..a90a8a38267 100644
--- a/spec/frontend/search/topbar/components/group_filter_spec.js
+++ b/spec/frontend/search/sidebar/components/group_filter_spec.js
@@ -1,13 +1,14 @@
import { shallowMount } from '@vue/test-utils';
+import { cloneDeep } from 'lodash';
import Vue from 'vue';
// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import { MOCK_GROUP, MOCK_QUERY, CURRENT_SCOPE } from 'jest/search/mock_data';
import { visitUrl, setUrlParams } from '~/lib/utils/url_utility';
import { GROUPS_LOCAL_STORAGE_KEY } from '~/search/store/constants';
-import GroupFilter from '~/search/topbar/components/group_filter.vue';
-import SearchableDropdown from '~/search/topbar/components/searchable_dropdown.vue';
-import { ANY_OPTION, GROUP_DATA, PROJECT_DATA } from '~/search/topbar/constants';
+import GroupFilter from '~/search/sidebar/components/group_filter.vue';
+import SearchableDropdown from '~/search/sidebar/components/searchable_dropdown.vue';
+import { ANY_OPTION, GROUP_DATA, PROJECT_DATA } from '~/search/sidebar/constants';
Vue.use(Vuex);
@@ -27,6 +28,7 @@ describe('GroupFilter', () => {
const defaultProps = {
initialData: null,
+ searchHandler: jest.fn(),
};
const createComponent = (initialState, props) => {
@@ -68,19 +70,6 @@ describe('GroupFilter', () => {
createComponent();
});
- describe('when @search is emitted', () => {
- const search = 'test';
-
- beforeEach(() => {
- findSearchableDropdown().vm.$emit('search', search);
- });
-
- it('calls fetchGroups with the search paramter', () => {
- expect(actionSpies.fetchGroups).toHaveBeenCalledTimes(1);
- expect(actionSpies.fetchGroups).toHaveBeenCalledWith(expect.any(Object), search);
- });
- });
-
describe('when @change is emitted with Any', () => {
beforeEach(() => {
findSearchableDropdown().vm.$emit('change', ANY_OPTION);
@@ -148,11 +137,12 @@ describe('GroupFilter', () => {
describe('when initialData is set', () => {
beforeEach(() => {
- createComponent({}, { initialData: MOCK_GROUP });
+ createComponent({ groupInitialJson: { ...MOCK_GROUP } }, {});
});
it('sets selectedGroup to ANY_OPTION', () => {
- expect(wrapper.vm.selectedGroup).toBe(MOCK_GROUP);
+ // cloneDeep to fix Property or method `nodeType` is not defined bug
+ expect(cloneDeep(wrapper.vm.selectedGroup)).toStrictEqual(MOCK_GROUP);
});
});
});
@@ -169,7 +159,13 @@ describe('GroupFilter', () => {
initialData ? 'has' : 'does not have'
} an initial group`, () => {
beforeEach(() => {
- createComponent({ query: { ...MOCK_QUERY, nav_source: navSource } }, { initialData });
+ createComponent(
+ {
+ query: { ...MOCK_QUERY, nav_source: navSource },
+ groupInitialJson: { ...initialData },
+ },
+ {},
+ );
});
it(`${callMethod ? 'does' : 'does not'} call setFrequentGroup`, () => {
diff --git a/spec/frontend/search/sidebar/components/issues_filters_spec.js b/spec/frontend/search/sidebar/components/issues_filters_spec.js
index 860c5c147a6..ce9c6c2bb0c 100644
--- a/spec/frontend/search/sidebar/components/issues_filters_spec.js
+++ b/spec/frontend/search/sidebar/components/issues_filters_spec.js
@@ -19,11 +19,10 @@ describe('GlobalSearch IssuesFilters', () => {
currentScope: () => 'issues',
};
- const createComponent = ({ initialState = {}, searchIssueLabelAggregation = true } = {}) => {
+ const createComponent = ({ initialState = {} } = {}) => {
const store = new Vuex.Store({
state: {
urlQuery: MOCK_QUERY,
- useSidebarNavigation: false,
searchType: SEARCH_TYPE_ADVANCED,
...initialState,
},
@@ -32,11 +31,6 @@ describe('GlobalSearch IssuesFilters', () => {
wrapper = shallowMount(IssuesFilters, {
store,
- provide: {
- glFeatures: {
- searchIssueLabelAggregation,
- },
- },
});
};
@@ -44,17 +38,10 @@ describe('GlobalSearch IssuesFilters', () => {
const findConfidentialityFilter = () => wrapper.findComponent(ConfidentialityFilter);
const findLabelFilter = () => wrapper.findComponent(LabelFilter);
const findArchivedFilter = () => wrapper.findComponent(ArchivedFilter);
- const findDividers = () => wrapper.findAll('hr');
- describe.each`
- description | searchIssueLabelAggregation
- ${'Renders correctly with Label Filter disabled'} | ${false}
- ${'Renders correctly with Label Filter enabled'} | ${true}
- `('$description', ({ searchIssueLabelAggregation }) => {
+ describe('Renders filters correctly with advanced search', () => {
beforeEach(() => {
- createComponent({
- searchIssueLabelAggregation,
- });
+ createComponent();
});
it('renders StatusFilter', () => {
@@ -69,17 +56,8 @@ describe('GlobalSearch IssuesFilters', () => {
expect(findArchivedFilter().exists()).toBe(true);
});
- it(`renders correctly LabelFilter when searchIssueLabelAggregation is ${searchIssueLabelAggregation}`, () => {
- expect(findLabelFilter().exists()).toBe(searchIssueLabelAggregation);
- });
-
- it('renders divider correctly', () => {
- // two dividers can't be disabled
- let dividersCount = 2;
- if (searchIssueLabelAggregation) {
- dividersCount += 1;
- }
- expect(findDividers()).toHaveLength(dividersCount);
+ it('renders correctly LabelFilter', () => {
+ expect(findLabelFilter().exists()).toBe(true);
});
});
@@ -102,41 +80,6 @@ describe('GlobalSearch IssuesFilters', () => {
it("doesn't render ArchivedFilter", () => {
expect(findArchivedFilter().exists()).toBe(true);
});
-
- it('renders 1 divider', () => {
- expect(findDividers()).toHaveLength(2);
- });
- });
-
- describe('Renders correctly in new nav', () => {
- beforeEach(() => {
- createComponent({
- initialState: {
- searchType: SEARCH_TYPE_ADVANCED,
- useSidebarNavigation: true,
- },
- searchIssueLabelAggregation: true,
- });
- });
- it('renders StatusFilter', () => {
- expect(findStatusFilter().exists()).toBe(true);
- });
-
- it('renders ConfidentialityFilter', () => {
- expect(findConfidentialityFilter().exists()).toBe(true);
- });
-
- it('renders LabelFilter', () => {
- expect(findLabelFilter().exists()).toBe(true);
- });
-
- it('renders ArchivedFilter', () => {
- expect(findArchivedFilter().exists()).toBe(true);
- });
-
- it("doesn't render dividers", () => {
- expect(findDividers()).toHaveLength(0);
- });
});
describe('Renders correctly with wrong scope', () => {
@@ -159,9 +102,5 @@ describe('GlobalSearch IssuesFilters', () => {
it("doesn't render ArchivedFilter", () => {
expect(findArchivedFilter().exists()).toBe(false);
});
-
- it("doesn't render dividers", () => {
- expect(findDividers()).toHaveLength(0);
- });
});
});
diff --git a/spec/frontend/search/sidebar/components/label_filter_spec.js b/spec/frontend/search/sidebar/components/label_filter_spec.js
index 9d2a0c5e739..7641036b9f6 100644
--- a/spec/frontend/search/sidebar/components/label_filter_spec.js
+++ b/spec/frontend/search/sidebar/components/label_filter_spec.js
@@ -85,11 +85,6 @@ describe('GlobalSearchSidebarLabelFilter', () => {
wrapper = mountExtended(LabelFilter, {
store,
- provide: {
- glFeatures: {
- searchIssueLabelAggregation: true,
- },
- },
});
};
diff --git a/spec/frontend/search/sidebar/components/merge_requests_filters_spec.js b/spec/frontend/search/sidebar/components/merge_requests_filters_spec.js
index b02228a418f..8cd3cb45a20 100644
--- a/spec/frontend/search/sidebar/components/merge_requests_filters_spec.js
+++ b/spec/frontend/search/sidebar/components/merge_requests_filters_spec.js
@@ -21,7 +21,6 @@ describe('GlobalSearch MergeRequestsFilters', () => {
const store = new Vuex.Store({
state: {
urlQuery: MOCK_QUERY,
- useSidebarNavigation: false,
searchType: SEARCH_TYPE_ADVANCED,
...initialState,
},
@@ -35,7 +34,6 @@ describe('GlobalSearch MergeRequestsFilters', () => {
const findStatusFilter = () => wrapper.findComponent(StatusFilter);
const findArchivedFilter = () => wrapper.findComponent(ArchivedFilter);
- const findDividers = () => wrapper.findAll('hr');
describe('Renders correctly with Archived Filter', () => {
beforeEach(() => {
@@ -46,8 +44,8 @@ describe('GlobalSearch MergeRequestsFilters', () => {
expect(findStatusFilter().exists()).toBe(true);
});
- it('renders divider correctly', () => {
- expect(findDividers()).toHaveLength(1);
+ it('renders ArchivedFilter', () => {
+ expect(findArchivedFilter().exists()).toBe(true);
});
});
@@ -60,33 +58,9 @@ describe('GlobalSearch MergeRequestsFilters', () => {
expect(findStatusFilter().exists()).toBe(true);
});
- it('renders render ArchivedFilter', () => {
- expect(findArchivedFilter().exists()).toBe(true);
- });
-
- it('renders 1 divider', () => {
- expect(findDividers()).toHaveLength(1);
- });
- });
-
- describe('Renders correctly in new nav', () => {
- beforeEach(() => {
- createComponent({
- searchType: SEARCH_TYPE_ADVANCED,
- useSidebarNavigation: true,
- });
- });
- it('renders StatusFilter', () => {
- expect(findStatusFilter().exists()).toBe(true);
- });
-
it('renders ArchivedFilter', () => {
expect(findArchivedFilter().exists()).toBe(true);
});
-
- it("doesn't render divider", () => {
- expect(findDividers()).toHaveLength(0);
- });
});
describe('Renders correctly with wrong scope', () => {
@@ -101,9 +75,5 @@ describe('GlobalSearch MergeRequestsFilters', () => {
it("doesn't render ArchivedFilter", () => {
expect(findArchivedFilter().exists()).toBe(false);
});
-
- it("doesn't render dividers", () => {
- expect(findDividers()).toHaveLength(0);
- });
});
});
diff --git a/spec/frontend/search/topbar/components/project_filter_spec.js b/spec/frontend/search/sidebar/components/project_filter_spec.js
index e7808370098..817ec77380f 100644
--- a/spec/frontend/search/topbar/components/project_filter_spec.js
+++ b/spec/frontend/search/sidebar/components/project_filter_spec.js
@@ -1,13 +1,14 @@
import { shallowMount } from '@vue/test-utils';
+import { cloneDeep } from 'lodash';
import Vue from 'vue';
// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import { MOCK_PROJECT, MOCK_QUERY, CURRENT_SCOPE } from 'jest/search/mock_data';
import { visitUrl, setUrlParams } from '~/lib/utils/url_utility';
import { PROJECTS_LOCAL_STORAGE_KEY } from '~/search/store/constants';
-import ProjectFilter from '~/search/topbar/components/project_filter.vue';
-import SearchableDropdown from '~/search/topbar/components/searchable_dropdown.vue';
-import { ANY_OPTION, GROUP_DATA, PROJECT_DATA } from '~/search/topbar/constants';
+import ProjectFilter from '~/search/sidebar/components/project_filter.vue';
+import SearchableDropdown from '~/search/sidebar/components/searchable_dropdown.vue';
+import { ANY_OPTION, GROUP_DATA, PROJECT_DATA } from '~/search/sidebar/constants';
Vue.use(Vuex);
@@ -27,12 +28,15 @@ describe('ProjectFilter', () => {
const defaultProps = {
initialData: null,
+ projectInitialJson: MOCK_PROJECT,
+ searchHandler: jest.fn(),
};
const createComponent = (initialState, props) => {
const store = new Vuex.Store({
state: {
query: MOCK_QUERY,
+ projectInitialJson: MOCK_PROJECT,
...initialState,
},
actions: actionSpies,
@@ -68,18 +72,6 @@ describe('ProjectFilter', () => {
createComponent();
});
- describe('when @search is emitted', () => {
- const search = 'test';
-
- beforeEach(() => {
- findSearchableDropdown().vm.$emit('search', search);
- });
-
- it('calls fetchProjects with the search paramter', () => {
- expect(actionSpies.fetchProjects).toHaveBeenCalledWith(expect.any(Object), search);
- });
- });
-
describe('when @change is emitted', () => {
describe('with Any', () => {
beforeEach(() => {
@@ -139,17 +131,17 @@ describe('ProjectFilter', () => {
describe('selectedProject', () => {
describe('when initialData is null', () => {
beforeEach(() => {
- createComponent();
+ createComponent({ projectInitialJson: ANY_OPTION }, {});
});
it('sets selectedProject to ANY_OPTION', () => {
- expect(wrapper.vm.selectedProject).toBe(ANY_OPTION);
+ expect(cloneDeep(wrapper.vm.selectedProject)).toStrictEqual(ANY_OPTION);
});
});
describe('when initialData is set', () => {
beforeEach(() => {
- createComponent({}, { initialData: MOCK_PROJECT });
+ createComponent({ projectInitialJson: MOCK_PROJECT }, {});
});
it('sets selectedProject to the initialData', () => {
@@ -170,7 +162,13 @@ describe('ProjectFilter', () => {
initialData ? 'has' : 'does not have'
} an initial project`, () => {
beforeEach(() => {
- createComponent({ query: { ...MOCK_QUERY, nav_source: navSource } }, { initialData });
+ createComponent(
+ {
+ query: { ...MOCK_QUERY, nav_source: navSource },
+ projectInitialJson: { ...initialData },
+ },
+ {},
+ );
});
it(`${callMethod ? 'does' : 'does not'} call setFrequentProject`, () => {
diff --git a/spec/frontend/search/sidebar/components/scope_legacy_navigation_spec.js b/spec/frontend/search/sidebar/components/scope_legacy_navigation_spec.js
deleted file mode 100644
index 63d8b34fcf0..00000000000
--- a/spec/frontend/search/sidebar/components/scope_legacy_navigation_spec.js
+++ /dev/null
@@ -1,145 +0,0 @@
-import { GlNav, GlNavItem, GlIcon } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
-import Vue from 'vue';
-// eslint-disable-next-line no-restricted-imports
-import Vuex from 'vuex';
-import { MOCK_QUERY, MOCK_NAVIGATION } from 'jest/search/mock_data';
-import ScopeLegacyNavigation from '~/search/sidebar/components/scope_legacy_navigation.vue';
-
-Vue.use(Vuex);
-
-const MOCK_NAVIGATION_ENTRIES = Object.entries(MOCK_NAVIGATION);
-
-describe('ScopeLegacyNavigation', () => {
- let wrapper;
-
- const actionSpies = {
- fetchSidebarCount: jest.fn(),
- };
-
- const getterSpies = {
- currentScope: jest.fn(() => 'issues'),
- };
-
- const createComponent = (initialState) => {
- const store = new Vuex.Store({
- state: {
- urlQuery: MOCK_QUERY,
- navigation: MOCK_NAVIGATION,
- ...initialState,
- },
- actions: actionSpies,
- getters: getterSpies,
- });
-
- wrapper = shallowMount(ScopeLegacyNavigation, {
- store,
- });
- };
-
- const findNavElement = () => wrapper.find('nav');
- const findGlNav = () => wrapper.findComponent(GlNav);
- const findGlNavItems = () => wrapper.findAllComponents(GlNavItem);
- const findGlNavItemActive = () => wrapper.find('[active=true]');
- const findGlNavItemActiveLabel = () => findGlNavItemActive().find('[data-testid="label"]');
- const findGlNavItemActiveCount = () => findGlNavItemActive().find('[data-testid="count"]');
-
- describe('scope navigation', () => {
- beforeEach(() => {
- createComponent();
- });
-
- it('renders section', () => {
- expect(findNavElement().exists()).toBe(true);
- });
-
- it('renders nav component', () => {
- expect(findGlNav().exists()).toBe(true);
- });
-
- it('renders all nav item components', () => {
- expect(findGlNavItems()).toHaveLength(MOCK_NAVIGATION_ENTRIES.length);
- });
-
- it('has all proper links', () => {
- const linkAtPosition = 3;
- const { link } = MOCK_NAVIGATION_ENTRIES[linkAtPosition][1];
-
- expect(findGlNavItems().at(linkAtPosition).attributes('href')).toBe(link);
- });
- });
-
- describe('scope navigation sets proper state with url scope set', () => {
- beforeEach(() => {
- createComponent();
- });
-
- it('has correct active item', () => {
- expect(findGlNavItemActive().exists()).toBe(true);
- expect(findGlNavItemActiveLabel().text()).toBe('Issues');
- });
-
- it('has correct active item count', () => {
- expect(findGlNavItemActiveCount().text()).toBe('2.4K');
- });
-
- it('does not have plus sign after count text', () => {
- expect(findGlNavItemActive().findComponent(GlIcon).exists()).toBe(false);
- });
-
- it('has count is highlighted correctly', () => {
- expect(findGlNavItemActiveCount().classes('gl-text-gray-900')).toBe(true);
- });
- });
-
- describe('scope navigation sets proper state with NO url scope set', () => {
- beforeEach(() => {
- getterSpies.currentScope = jest.fn(() => 'projects');
- createComponent({
- urlQuery: {},
- navigation: {
- ...MOCK_NAVIGATION,
- projects: {
- ...MOCK_NAVIGATION.projects,
- active: true,
- },
- issues: {
- ...MOCK_NAVIGATION.issues,
- active: false,
- },
- },
- });
- });
-
- it('has correct active item', () => {
- expect(findGlNavItemActive().exists()).toBe(true);
- expect(findGlNavItemActiveLabel().text()).toBe('Projects');
- });
-
- it('has correct active item count', () => {
- expect(findGlNavItemActiveCount().text()).toBe('10K');
- });
-
- it('has correct active item count and over limit sign', () => {
- expect(findGlNavItemActive().findComponent(GlIcon).exists()).toBe(true);
- });
- });
-
- describe.each`
- searchTherm | hasBeenCalled
- ${null} | ${0}
- ${'test'} | ${1}
- `('fetchSidebarCount', ({ searchTherm, hasBeenCalled }) => {
- beforeEach(() => {
- createComponent({
- urlQuery: {
- search: searchTherm,
- },
- });
- });
-
- it('is only called when search term is set', () => {
- expect(actionSpies.fetchSidebarCount).toHaveBeenCalledTimes(hasBeenCalled);
- });
- });
-});
diff --git a/spec/frontend/search/sidebar/components/scope_sidebar_navigation_spec.js b/spec/frontend/search/sidebar/components/scope_sidebar_navigation_spec.js
index d85942b9634..44c243d15f7 100644
--- a/spec/frontend/search/sidebar/components/scope_sidebar_navigation_spec.js
+++ b/spec/frontend/search/sidebar/components/scope_sidebar_navigation_spec.js
@@ -2,6 +2,7 @@ import { mount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
+import sidebarEventHub from '~/super_sidebar/event_hub';
import ScopeSidebarNavigation from '~/search/sidebar/components/scope_sidebar_navigation.vue';
import NavItem from '~/super_sidebar/components/nav_item.vue';
import { MOCK_QUERY, MOCK_NAVIGATION, MOCK_NAVIGATION_ITEMS } from '../../mock_data';
@@ -49,6 +50,7 @@ describe('ScopeSidebarNavigation', () => {
describe('scope navigation', () => {
beforeEach(() => {
+ jest.spyOn(sidebarEventHub, '$emit');
createComponent({ urlQuery: { ...MOCK_QUERY, search: 'test' } });
});
@@ -71,6 +73,11 @@ describe('ScopeSidebarNavigation', () => {
expect(findNavItems().at(linkAtPosition).findComponent('a').attributes('href')).toBe(link);
});
+
+ it('always emits toggle-menu-header event', () => {
+ expect(sidebarEventHub.$emit).toHaveBeenCalledWith('toggle-menu-header', false);
+ expect(sidebarEventHub.$emit).toHaveBeenCalledTimes(1);
+ });
});
describe('scope navigation sets proper state with url scope set', () => {
diff --git a/spec/frontend/search/sidebar/components/searchable_dropdown_spec.js b/spec/frontend/search/sidebar/components/searchable_dropdown_spec.js
new file mode 100644
index 00000000000..c8f157e4fe4
--- /dev/null
+++ b/spec/frontend/search/sidebar/components/searchable_dropdown_spec.js
@@ -0,0 +1,117 @@
+import { GlCollapsibleListbox } from '@gitlab/ui';
+import { cloneDeep } from 'lodash';
+import { shallowMount } from '@vue/test-utils';
+import Vue from 'vue';
+// eslint-disable-next-line no-restricted-imports
+import Vuex from 'vuex';
+import waitForPromises from 'helpers/wait_for_promises';
+import { MOCK_GROUPS, MOCK_QUERY } from 'jest/search/mock_data';
+import SearchableDropdown from '~/search/sidebar/components/searchable_dropdown.vue';
+import { ANY_OPTION, GROUP_DATA } from '~/search/sidebar/constants';
+import { DEFAULT_DEBOUNCE_AND_THROTTLE_MS } from '~/lib/utils/constants';
+
+Vue.use(Vuex);
+
+describe('Global Search Searchable Dropdown', () => {
+ let wrapper;
+
+ const defaultProps = {
+ headerText: GROUP_DATA.headerText,
+ name: GROUP_DATA.name,
+ fullName: GROUP_DATA.fullName,
+ loading: false,
+ selectedItem: ANY_OPTION,
+ items: [],
+ frequentItems: [{ ...MOCK_GROUPS[0] }],
+ searchHandler: jest.fn(),
+ };
+
+ const createComponent = (initialState, props) => {
+ const store = new Vuex.Store({
+ state: {
+ query: MOCK_QUERY,
+ ...initialState,
+ },
+ });
+
+ wrapper = shallowMount(SearchableDropdown, {
+ store,
+ propsData: {
+ ...defaultProps,
+ ...props,
+ },
+ });
+ };
+
+ const findGlDropdown = () => wrapper.findComponent(GlCollapsibleListbox);
+
+ describe('template', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('renders GlDropdown', () => {
+ expect(findGlDropdown().exists()).toBe(true);
+ });
+
+ const propItems = [
+ { text: '', options: [{ value: ANY_OPTION.name, text: ANY_OPTION.name, ...ANY_OPTION }] },
+ {
+ text: 'Frequently searched',
+ options: [{ value: MOCK_GROUPS[0].id, text: MOCK_GROUPS[0].full_name, ...MOCK_GROUPS[0] }],
+ },
+ {
+ text: 'All available groups',
+ options: [{ value: MOCK_GROUPS[1].id, text: MOCK_GROUPS[1].full_name, ...MOCK_GROUPS[1] }],
+ },
+ ];
+
+ beforeEach(() => {
+ createComponent({}, { items: MOCK_GROUPS });
+ });
+
+ it('contains correct set of items', () => {
+ expect(findGlDropdown().props('items')).toStrictEqual(propItems);
+ });
+
+ it('renders searchable prop', () => {
+ expect(findGlDropdown().props('searchable')).toBe(true);
+ });
+
+ describe('events', () => {
+ it('emits select', () => {
+ findGlDropdown().vm.$emit('select', 1);
+ expect(cloneDeep(wrapper.emitted('change')[0][0])).toStrictEqual(MOCK_GROUPS[0]);
+ });
+
+ it('emits reset', () => {
+ findGlDropdown().vm.$emit('reset');
+ expect(cloneDeep(wrapper.emitted('change')[0][0])).toStrictEqual(ANY_OPTION);
+ });
+
+ it('emits first-open', () => {
+ findGlDropdown().vm.$emit('shown');
+ expect(wrapper.emitted('first-open')).toHaveLength(1);
+ findGlDropdown().vm.$emit('shown');
+ expect(wrapper.emitted('first-open')).toHaveLength(1);
+ });
+ });
+ });
+
+ describe('when @search is emitted', () => {
+ const search = 'test';
+
+ beforeEach(async () => {
+ createComponent();
+ findGlDropdown().vm.$emit('search', search);
+
+ jest.advanceTimersByTime(DEFAULT_DEBOUNCE_AND_THROTTLE_MS);
+ await waitForPromises();
+ });
+
+ it('calls fetchGroups with the search paramter', () => {
+ expect(defaultProps.searchHandler).toHaveBeenCalledTimes(1);
+ expect(defaultProps.searchHandler).toHaveBeenCalledWith(search);
+ });
+ });
+});
diff --git a/spec/frontend/search/sidebar/components/small_screen_drawer_navigation_spec.js b/spec/frontend/search/sidebar/components/small_screen_drawer_navigation_spec.js
deleted file mode 100644
index 5ab4afba7f0..00000000000
--- a/spec/frontend/search/sidebar/components/small_screen_drawer_navigation_spec.js
+++ /dev/null
@@ -1,68 +0,0 @@
-import { nextTick } from 'vue';
-import { GlDrawer } from '@gitlab/ui';
-import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
-import DomElementListener from '~/vue_shared/components/dom_element_listener.vue';
-import { DRAWER_Z_INDEX } from '~/lib/utils/constants';
-import SmallScreenDrawerNavigation from '~/search/sidebar/components/small_screen_drawer_navigation.vue';
-
-describe('ScopeLegacyNavigation', () => {
- let wrapper;
- let closeSpy;
- let toggleSpy;
-
- const createComponent = () => {
- wrapper = shallowMountExtended(SmallScreenDrawerNavigation, {
- slots: {
- default: '<div data-testid="default-slot-content">test</div>',
- },
- });
- };
-
- const findGlDrawer = () => wrapper.findComponent(GlDrawer);
- const findTitle = () => wrapper.findComponent('h2');
- const findSlot = () => wrapper.findByTestId('default-slot-content');
- const findDomElementListener = () => wrapper.findComponent(DomElementListener);
-
- describe('small screen navigation', () => {
- beforeEach(() => {
- createComponent();
- });
-
- it('renders drawer', () => {
- expect(findGlDrawer().exists()).toBe(true);
- expect(findGlDrawer().attributes('zindex')).toBe(DRAWER_Z_INDEX.toString());
- expect(findGlDrawer().attributes('headerheight')).toBe('0');
- });
-
- it('renders title', () => {
- expect(findTitle().exists()).toBe(true);
- });
-
- it('renders slots', () => {
- expect(findSlot().exists()).toBe(true);
- });
- });
-
- describe('actions', () => {
- beforeEach(() => {
- closeSpy = jest.spyOn(SmallScreenDrawerNavigation.methods, 'closeSmallScreenFilters');
- toggleSpy = jest.spyOn(SmallScreenDrawerNavigation.methods, 'toggleSmallScreenFilters');
- createComponent();
- });
-
- it('calls onClose', () => {
- findGlDrawer().vm.$emit('close');
- expect(closeSpy).toHaveBeenCalled();
- });
-
- it('calls toggleSmallScreenFilters', async () => {
- expect(findGlDrawer().props('open')).toBe(false);
-
- findDomElementListener().vm.$emit('click');
- await nextTick();
-
- expect(toggleSpy).toHaveBeenCalled();
- expect(findGlDrawer().props('open')).toBe(true);
- });
- });
-});
diff --git a/spec/frontend/search/sidebar/components/status_filter_spec.js b/spec/frontend/search/sidebar/components/status_filter_spec.js
index c230341c172..719932a79ef 100644
--- a/spec/frontend/search/sidebar/components/status_filter_spec.js
+++ b/spec/frontend/search/sidebar/components/status_filter_spec.js
@@ -22,23 +22,9 @@ describe('StatusFilter', () => {
const findRadioFilter = () => wrapper.findComponent(RadioFilter);
- describe('old sidebar', () => {
- beforeEach(() => {
- createComponent({ useSidebarNavigation: false });
- });
-
- it('renders the component', () => {
- expect(findRadioFilter().exists()).toBe(true);
- });
- });
+ it('renders the component', () => {
+ createComponent();
- describe('new sidebar', () => {
- beforeEach(() => {
- createComponent({ useSidebarNavigation: true });
- });
-
- it('renders the component', () => {
- expect(findRadioFilter().exists()).toBe(true);
- });
+ expect(findRadioFilter().exists()).toBe(true);
});
});
diff --git a/spec/frontend/search/store/mutations_spec.js b/spec/frontend/search/store/mutations_spec.js
index a517932b0eb..3462d4a326b 100644
--- a/spec/frontend/search/store/mutations_spec.js
+++ b/spec/frontend/search/store/mutations_spec.js
@@ -31,7 +31,7 @@ describe('Global Search Store Mutations', () => {
mutations[types.RECEIVE_GROUPS_SUCCESS](state, MOCK_GROUPS);
expect(state.fetchingGroups).toBe(false);
- expect(state.groups).toBe(MOCK_GROUPS);
+ expect(state.groups).toStrictEqual(MOCK_GROUPS);
});
});
@@ -57,7 +57,7 @@ describe('Global Search Store Mutations', () => {
mutations[types.RECEIVE_PROJECTS_SUCCESS](state, MOCK_PROJECTS);
expect(state.fetchingProjects).toBe(false);
- expect(state.projects).toBe(MOCK_PROJECTS);
+ expect(state.projects).toStrictEqual(MOCK_PROJECTS);
});
});
diff --git a/spec/frontend/search/topbar/components/app_spec.js b/spec/frontend/search/topbar/components/app_spec.js
index 9704277c86b..d17bdc2a6e1 100644
--- a/spec/frontend/search/topbar/components/app_spec.js
+++ b/spec/frontend/search/topbar/components/app_spec.js
@@ -1,14 +1,14 @@
-import { GlSearchBoxByClick, GlButton } from '@gitlab/ui';
+import { GlSearchBoxByType, GlButton } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
-import Vue from 'vue';
+import Vue, { nextTick } from 'vue';
// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import { MOCK_QUERY } from 'jest/search/mock_data';
import { stubComponent } from 'helpers/stub_component';
import GlobalSearchTopbar from '~/search/topbar/components/app.vue';
-import GroupFilter from '~/search/topbar/components/group_filter.vue';
-import ProjectFilter from '~/search/topbar/components/project_filter.vue';
import MarkdownDrawer from '~/vue_shared/components/markdown_drawer/markdown_drawer.vue';
+import SearchTypeIndicator from '~/search/topbar/components/search_type_indicator.vue';
+import { ENTER_KEY } from '~/lib/utils/keys';
import {
SYNTAX_OPTIONS_ADVANCED_DOCUMENT,
SYNTAX_OPTIONS_ZOEKT_DOCUMENT,
@@ -41,42 +41,22 @@ describe('GlobalSearchTopbar', () => {
});
};
- const findGlSearchBox = () => wrapper.findComponent(GlSearchBoxByClick);
- const findGroupFilter = () => wrapper.findComponent(GroupFilter);
- const findProjectFilter = () => wrapper.findComponent(ProjectFilter);
+ const findGlSearchBox = () => wrapper.findComponent(GlSearchBoxByType);
const findSyntaxOptionButton = () => wrapper.findComponent(GlButton);
const findSyntaxOptionDrawer = () => wrapper.findComponent(MarkdownDrawer);
+ const findSearchTypeIndicator = () => wrapper.findComponent(SearchTypeIndicator);
describe('template', () => {
beforeEach(() => {
createComponent();
});
- describe('Search box', () => {
- it('renders always', () => {
- expect(findGlSearchBox().exists()).toBe(true);
- });
+ it('always renders Search box', () => {
+ expect(findGlSearchBox().exists()).toBe(true);
});
- describe.each`
- snippets | showFilters
- ${null} | ${true}
- ${{ query: { snippets: '' } }} | ${true}
- ${{ query: { snippets: false } }} | ${true}
- ${{ query: { snippets: true } }} | ${false}
- ${{ query: { snippets: 'false' } }} | ${true}
- ${{ query: { snippets: 'true' } }} | ${false}
- `('topbar filters', ({ snippets, showFilters }) => {
- beforeEach(() => {
- createComponent(snippets);
- });
-
- it(`does${showFilters ? '' : ' not'} render when snippets is ${JSON.stringify(
- snippets,
- )}`, () => {
- expect(findGroupFilter().exists()).toBe(showFilters);
- expect(findProjectFilter().exists()).toBe(showFilters);
- });
+ it('always renders Search indicator', () => {
+ expect(findSearchTypeIndicator().exists()).toBe(true);
});
describe.each`
@@ -128,15 +108,15 @@ describe('GlobalSearchTopbar', () => {
});
describe.each`
- state | defaultBranchName | hasSyntaxOptions
- ${{ query: { repository_ref: '' }, searchType: 'basic' }} | ${'master'} | ${false}
- ${{ query: { repository_ref: 'v0.1' }, searchType: 'basic' }} | ${''} | ${false}
- ${{ query: { repository_ref: 'master' }, searchType: 'basic' }} | ${'master'} | ${false}
- ${{ query: { repository_ref: 'master' }, searchType: 'advanced' }} | ${''} | ${false}
- ${{ query: { repository_ref: '' }, searchType: 'advanced' }} | ${'master'} | ${true}
- ${{ query: { repository_ref: 'v0.1' }, searchType: 'advanced' }} | ${''} | ${false}
- ${{ query: { repository_ref: 'master' }, searchType: 'advanced' }} | ${'master'} | ${true}
- ${{ query: { repository_ref: 'master' }, searchType: 'zoekt' }} | ${'master'} | ${true}
+ state | hasSyntaxOptions
+ ${{ query: { repository_ref: '' }, searchType: 'basic', searchLevel: 'project', defaultBranchName: 'master' }} | ${false}
+ ${{ query: { repository_ref: 'v0.1' }, searchType: 'basic', searchLevel: 'project', defaultBranchName: '' }} | ${false}
+ ${{ query: { repository_ref: 'master' }, searchType: 'basic', searchLevel: 'project', defaultBranchName: 'master' }} | ${false}
+ ${{ query: { repository_ref: 'master' }, searchType: 'advanced', searchLevel: 'project', defaultBranchName: '' }} | ${false}
+ ${{ query: { repository_ref: '' }, searchType: 'advanced', searchLevel: 'project', defaultBranchName: 'master' }} | ${true}
+ ${{ query: { repository_ref: 'v0.1' }, searchType: 'advanced', searchLevel: 'project', defaultBranchName: '' }} | ${false}
+ ${{ query: { repository_ref: 'master' }, searchType: 'advanced', searchLevel: 'project', defaultBranchName: 'master' }} | ${true}
+ ${{ query: { repository_ref: 'master' }, searchType: 'zoekt', searchLevel: 'project', defaultBranchName: 'master' }} | ${true}
`(
`the syntax option based on component state`,
({ state, defaultBranchName, hasSyntaxOptions }) => {
@@ -162,9 +142,10 @@ describe('GlobalSearchTopbar', () => {
createComponent();
});
- it('clicking search button inside search box calls applyQuery', () => {
- findGlSearchBox().vm.$emit('submit', { preventDefault: () => {} });
+ it('clicking search button inside search box calls applyQuery', async () => {
+ await nextTick();
+ findGlSearchBox().vm.$emit('keydown', new KeyboardEvent({ key: ENTER_KEY }));
expect(actionSpies.applyQuery).toHaveBeenCalled();
});
});
diff --git a/spec/frontend/search/topbar/components/search_type_indicator_spec.js b/spec/frontend/search/topbar/components/search_type_indicator_spec.js
new file mode 100644
index 00000000000..d69ca6dfb16
--- /dev/null
+++ b/spec/frontend/search/topbar/components/search_type_indicator_spec.js
@@ -0,0 +1,128 @@
+import Vue from 'vue';
+// eslint-disable-next-line no-restricted-imports
+import Vuex from 'vuex';
+import { GlSprintf } from '@gitlab/ui';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import { MOCK_QUERY } from 'jest/search/mock_data';
+import SearchTypeIndicator from '~/search/topbar/components/search_type_indicator.vue';
+
+Vue.use(Vuex);
+
+describe('SearchTypeIndicator', () => {
+ let wrapper;
+
+ const actionSpies = {
+ applyQuery: jest.fn(),
+ setQuery: jest.fn(),
+ preloadStoredFrequentItems: jest.fn(),
+ };
+
+ const createComponent = (initialState = {}) => {
+ const store = new Vuex.Store({
+ state: {
+ query: MOCK_QUERY,
+ ...initialState,
+ },
+ actions: actionSpies,
+ });
+
+ wrapper = shallowMountExtended(SearchTypeIndicator, {
+ store,
+ stubs: {
+ GlSprintf,
+ },
+ });
+ };
+
+ const findIndicator = (id) => wrapper.findAllByTestId(id);
+ const findDocsLink = () => wrapper.findComponentByTestId('docs-link');
+ const findSyntaxDocsLink = () => wrapper.findComponentByTestId('syntax-docs-link');
+
+ // searchType and search level params cobination in this test reflects
+ // all possible combinations
+
+ describe.each`
+ searchType | searchLevel | repository | showSearchTypeIndicator
+ ${'advanced'} | ${'project'} | ${'master'} | ${'advanced-enabled'}
+ ${'advanced'} | ${'project'} | ${'v0.1'} | ${'advanced-disabled'}
+ ${'advanced'} | ${'group'} | ${'master'} | ${'advanced-enabled'}
+ ${'advanced'} | ${'global'} | ${'master'} | ${'advanced-enabled'}
+ ${'zoekt'} | ${'project'} | ${'master'} | ${'zoekt-enabled'}
+ ${'zoekt'} | ${'project'} | ${'v0.1'} | ${'zoekt-disabled'}
+ ${'zoekt'} | ${'group'} | ${'master'} | ${'zoekt-enabled'}
+ `(
+ 'search type indicator for $searchType $searchLevel',
+ ({ searchType, repository, showSearchTypeIndicator, searchLevel }) => {
+ beforeEach(() => {
+ createComponent({
+ query: { repository_ref: repository },
+ searchType,
+ searchLevel,
+ defaultBranchName: 'master',
+ });
+ });
+ it('renders correctly', () => {
+ expect(findIndicator(showSearchTypeIndicator).exists()).toBe(true);
+ });
+ },
+ );
+
+ describe.each`
+ searchType | repository | showSearchTypeIndicator
+ ${'basic'} | ${'master'} | ${true}
+ ${'basic'} | ${'v0.1'} | ${true}
+ `(
+ 'search type indicator for $searchType and $repository',
+ ({ searchType, repository, showSearchTypeIndicator }) => {
+ beforeEach(() => {
+ createComponent({
+ query: { repository_ref: repository },
+ searchType,
+ defaultBranchName: 'master',
+ });
+ });
+ it.each(['zoekt-enabled', 'zoekt-disabled', 'advanced-enabled', 'advanced-disabled'])(
+ 'renders correct indicator %s',
+ () => {
+ expect(findIndicator(searchType).exists()).toBe(showSearchTypeIndicator);
+ },
+ );
+ },
+ );
+
+ describe.each`
+ searchType | docsLink
+ ${'advanced'} | ${'/help/user/search/advanced_search'}
+ ${'zoekt'} | ${'/help/user/search/exact_code_search'}
+ `('documentation link for $searchType', ({ searchType, docsLink }) => {
+ beforeEach(() => {
+ createComponent({
+ query: { repository_ref: 'master' },
+ searchType,
+ searchLevel: 'project',
+ defaultBranchName: 'master',
+ });
+ });
+ it('has correct link', () => {
+ expect(findDocsLink().attributes('href')).toBe(docsLink);
+ });
+ });
+
+ describe.each`
+ searchType | syntaxdocsLink
+ ${'advanced'} | ${'/help/user/search/advanced_search#use-the-advanced-search-syntax'}
+ ${'zoekt'} | ${'/help/user/search/exact_code_search#syntax'}
+ `('Syntax documentation $searchType', ({ searchType, syntaxdocsLink }) => {
+ beforeEach(() => {
+ createComponent({
+ query: { repository_ref: '000' },
+ searchType,
+ searchLevel: 'project',
+ defaultBranchName: 'master',
+ });
+ });
+ it('has correct link', () => {
+ expect(findSyntaxDocsLink().attributes('href')).toBe(syntaxdocsLink);
+ });
+ });
+});
diff --git a/spec/frontend/search/topbar/components/searchable_dropdown_item_spec.js b/spec/frontend/search/topbar/components/searchable_dropdown_item_spec.js
deleted file mode 100644
index c911fe53d40..00000000000
--- a/spec/frontend/search/topbar/components/searchable_dropdown_item_spec.js
+++ /dev/null
@@ -1,93 +0,0 @@
-import { GlDropdownItem, GlAvatar } from '@gitlab/ui';
-import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
-import { MOCK_GROUPS } from 'jest/search/mock_data';
-import { truncateNamespace } from '~/lib/utils/text_utility';
-import SearchableDropdownItem from '~/search/topbar/components/searchable_dropdown_item.vue';
-import { GROUP_DATA } from '~/search/topbar/constants';
-
-describe('Global Search Searchable Dropdown Item', () => {
- let wrapper;
-
- const defaultProps = {
- item: MOCK_GROUPS[0],
- selectedItem: MOCK_GROUPS[0],
- name: GROUP_DATA.name,
- fullName: GROUP_DATA.fullName,
- };
-
- const createComponent = (props) => {
- wrapper = shallowMountExtended(SearchableDropdownItem, {
- propsData: {
- ...defaultProps,
- ...props,
- },
- });
- };
-
- const findGlDropdownItem = () => wrapper.findComponent(GlDropdownItem);
- const findGlAvatar = () => wrapper.findComponent(GlAvatar);
- const findDropdownTitle = () => wrapper.findByTestId('item-title');
- const findDropdownSubtitle = () => wrapper.findByTestId('item-namespace');
-
- describe('template', () => {
- describe('always', () => {
- beforeEach(() => {
- createComponent();
- });
-
- it('renders GlDropdownItem', () => {
- expect(findGlDropdownItem().exists()).toBe(true);
- });
-
- it('renders GlAvatar', () => {
- expect(findGlAvatar().exists()).toBe(true);
- });
-
- it('renders Dropdown Title correctly', () => {
- const titleEl = findDropdownTitle();
-
- expect(titleEl.exists()).toBe(true);
- expect(titleEl.text()).toBe(MOCK_GROUPS[0][GROUP_DATA.name]);
- });
-
- it('renders Dropdown Subtitle correctly', () => {
- const subtitleEl = findDropdownSubtitle();
-
- expect(subtitleEl.exists()).toBe(true);
- expect(subtitleEl.text()).toBe(truncateNamespace(MOCK_GROUPS[0][GROUP_DATA.fullName]));
- });
- });
-
- describe('when item === selectedItem', () => {
- beforeEach(() => {
- createComponent({ item: MOCK_GROUPS[0], selectedItem: MOCK_GROUPS[0] });
- });
-
- it('marks the dropdown as checked', () => {
- expect(findGlDropdownItem().attributes('ischecked')).toBe('true');
- });
- });
-
- describe('when item !== selectedItem', () => {
- beforeEach(() => {
- createComponent({ item: MOCK_GROUPS[0], selectedItem: MOCK_GROUPS[1] });
- });
-
- it('marks the dropdown as not checked', () => {
- expect(findGlDropdownItem().attributes('ischecked')).toBeUndefined();
- });
- });
- });
-
- describe('actions', () => {
- beforeEach(() => {
- createComponent();
- });
-
- it('clicking the dropdown item $emits change with the item', () => {
- findGlDropdownItem().vm.$emit('click');
-
- expect(wrapper.emitted('change')[0]).toEqual([MOCK_GROUPS[0]]);
- });
- });
-});
diff --git a/spec/frontend/search/topbar/components/searchable_dropdown_spec.js b/spec/frontend/search/topbar/components/searchable_dropdown_spec.js
deleted file mode 100644
index 5acaa1c1900..00000000000
--- a/spec/frontend/search/topbar/components/searchable_dropdown_spec.js
+++ /dev/null
@@ -1,220 +0,0 @@
-import { GlDropdown, GlDropdownItem, GlSearchBoxByType, GlSkeletonLoader } from '@gitlab/ui';
-import { shallowMount, mount } from '@vue/test-utils';
-import Vue, { nextTick } from 'vue';
-// eslint-disable-next-line no-restricted-imports
-import Vuex from 'vuex';
-import { extendedWrapper } from 'helpers/vue_test_utils_helper';
-import { MOCK_GROUPS, MOCK_GROUP, MOCK_QUERY } from 'jest/search/mock_data';
-import SearchableDropdown from '~/search/topbar/components/searchable_dropdown.vue';
-import { ANY_OPTION, GROUP_DATA } from '~/search/topbar/constants';
-
-Vue.use(Vuex);
-
-describe('Global Search Searchable Dropdown', () => {
- let wrapper;
-
- const defaultProps = {
- headerText: GROUP_DATA.headerText,
- name: GROUP_DATA.name,
- fullName: GROUP_DATA.fullName,
- loading: false,
- selectedItem: ANY_OPTION,
- items: [],
- };
-
- const createComponent = (initialState, props, mountFn = shallowMount) => {
- const store = new Vuex.Store({
- state: {
- query: MOCK_QUERY,
- ...initialState,
- },
- });
-
- wrapper = extendedWrapper(
- mountFn(SearchableDropdown, {
- store,
- propsData: {
- ...defaultProps,
- ...props,
- },
- }),
- );
- };
-
- const findGlDropdown = () => wrapper.findComponent(GlDropdown);
- const findGlDropdownSearch = () => findGlDropdown().findComponent(GlSearchBoxByType);
- const findDropdownText = () => findGlDropdown().find('.dropdown-toggle-text');
- const findSearchableDropdownItems = () => wrapper.findAllByTestId('searchable-items');
- const findFrequentDropdownItems = () => wrapper.findAllByTestId('frequent-items');
- const findAnyDropdownItem = () => findGlDropdown().findComponent(GlDropdownItem);
- const findFirstSearchableDropdownItem = () => findSearchableDropdownItems().at(0);
- const findFirstFrequentDropdownItem = () => findFrequentDropdownItems().at(0);
- const findLoader = () => wrapper.findComponent(GlSkeletonLoader);
-
- describe('template', () => {
- beforeEach(() => {
- createComponent();
- });
-
- it('renders GlDropdown', () => {
- expect(findGlDropdown().exists()).toBe(true);
- });
-
- describe('findGlDropdownSearch', () => {
- it('renders always', () => {
- expect(findGlDropdownSearch().exists()).toBe(true);
- });
-
- it('has debounce prop', () => {
- expect(findGlDropdownSearch().attributes('debounce')).toBe('500');
- });
-
- describe('onSearch', () => {
- const search = 'test search';
-
- beforeEach(() => {
- findGlDropdownSearch().vm.$emit('input', search);
- });
-
- it('$emits @search when input event is fired from GlSearchBoxByType', () => {
- expect(wrapper.emitted('search')[0]).toEqual([search]);
- });
- });
- });
-
- describe('Searchable Dropdown Items', () => {
- describe('when loading is false', () => {
- beforeEach(() => {
- createComponent({}, { items: MOCK_GROUPS });
- });
-
- it('does not render loader', () => {
- expect(findLoader().exists()).toBe(false);
- });
-
- it('renders the Any Dropdown', () => {
- expect(findAnyDropdownItem().exists()).toBe(true);
- });
-
- it('renders searchable dropdown item for each item', () => {
- expect(findSearchableDropdownItems()).toHaveLength(MOCK_GROUPS.length);
- });
- });
-
- describe('when loading is true', () => {
- beforeEach(() => {
- createComponent({}, { loading: true, items: MOCK_GROUPS });
- });
-
- it('does render loader', () => {
- expect(findLoader().exists()).toBe(true);
- });
-
- it('renders the Any Dropdown', () => {
- expect(findAnyDropdownItem().exists()).toBe(true);
- });
-
- it('does not render searchable dropdown items', () => {
- expect(findSearchableDropdownItems()).toHaveLength(0);
- });
- });
- });
-
- describe.each`
- searchText | frequentItems | length
- ${''} | ${[]} | ${0}
- ${''} | ${MOCK_GROUPS} | ${MOCK_GROUPS.length}
- ${'test'} | ${[]} | ${0}
- ${'test'} | ${MOCK_GROUPS} | ${0}
- `('Frequent Dropdown Items', ({ searchText, frequentItems, length }) => {
- describe(`when search is ${searchText} and frequentItems length is ${frequentItems.length}`, () => {
- beforeEach(() => {
- createComponent({}, { frequentItems });
- findGlDropdownSearch().vm.$emit('input', searchText);
- });
-
- it(`should${length ? '' : ' not'} render frequent dropdown items`, () => {
- expect(findFrequentDropdownItems()).toHaveLength(length);
- });
- });
- });
-
- describe('Dropdown Text', () => {
- describe('when selectedItem is any', () => {
- beforeEach(() => {
- createComponent({}, {}, mount);
- });
-
- it('sets dropdown text to Any', () => {
- expect(findDropdownText().text()).toBe(ANY_OPTION.name);
- });
- });
-
- describe('selectedItem is set', () => {
- beforeEach(() => {
- createComponent({}, { selectedItem: MOCK_GROUP }, mount);
- });
-
- it('sets dropdown text to the selectedItem name', () => {
- expect(findDropdownText().text()).toBe(MOCK_GROUP[GROUP_DATA.name]);
- });
- });
- });
- });
-
- describe('actions', () => {
- beforeEach(() => {
- createComponent({}, { items: MOCK_GROUPS, frequentItems: MOCK_GROUPS });
- });
-
- it('clicking "Any" dropdown item $emits @change with ANY_OPTION', () => {
- findAnyDropdownItem().vm.$emit('click');
-
- expect(wrapper.emitted('change')[0]).toEqual([ANY_OPTION]);
- });
-
- it('on searchable item @change, the wrapper $emits change with the item', () => {
- findFirstSearchableDropdownItem().vm.$emit('change', MOCK_GROUPS[0]);
-
- expect(wrapper.emitted('change')[0]).toEqual([MOCK_GROUPS[0]]);
- });
-
- it('on frequent item @change, the wrapper $emits change with the item', () => {
- findFirstFrequentDropdownItem().vm.$emit('change', MOCK_GROUPS[0]);
-
- expect(wrapper.emitted('change')[0]).toEqual([MOCK_GROUPS[0]]);
- });
-
- describe('opening the dropdown', () => {
- beforeEach(() => {
- findGlDropdown().vm.$emit('show');
- });
-
- it('$emits @search and @first-open on the first open', () => {
- expect(wrapper.emitted('search')[0]).toStrictEqual(['']);
- expect(wrapper.emitted('first-open')[0]).toStrictEqual([]);
- });
-
- describe('when the dropdown has been opened', () => {
- it('$emits @search with the searchText', async () => {
- const searchText = 'foo';
-
- findGlDropdownSearch().vm.$emit('input', searchText);
- await nextTick();
-
- expect(wrapper.emitted('search')[1]).toStrictEqual([searchText]);
- expect(wrapper.emitted('first-open')).toHaveLength(1);
- });
-
- it('does not emit @first-open again', async () => {
- expect(wrapper.emitted('first-open')).toHaveLength(1);
-
- findGlDropdownSearch().vm.$emit('input');
- await nextTick();
-
- expect(wrapper.emitted('first-open')).toHaveLength(1);
- });
- });
- });
- });
-});
diff --git a/spec/frontend/security_configuration/components/app_spec.js b/spec/frontend/security_configuration/components/app_spec.js
index 364fe733a41..94d888bb067 100644
--- a/spec/frontend/security_configuration/components/app_spec.js
+++ b/spec/frontend/security_configuration/components/app_spec.js
@@ -5,10 +5,10 @@ import { useLocalStorageSpy } from 'helpers/local_storage_helper';
import { makeMockUserCalloutDismisser } from 'helpers/mock_user_callout_dismisser';
import stubChildren from 'helpers/stub_children';
import { mountExtended } from 'helpers/vue_test_utils_helper';
-import SecurityConfigurationApp, { i18n } from '~/security_configuration/components/app.vue';
+import SecurityConfigurationApp from '~/security_configuration/components/app.vue';
import AutoDevopsAlert from '~/security_configuration/components/auto_dev_ops_alert.vue';
import AutoDevopsEnabledAlert from '~/security_configuration/components/auto_dev_ops_enabled_alert.vue';
-import { AUTO_DEVOPS_ENABLED_ALERT_DISMISSED_STORAGE_KEY } from '~/security_configuration/components/constants';
+import { AUTO_DEVOPS_ENABLED_ALERT_DISMISSED_STORAGE_KEY } from '~/security_configuration/constants';
import FeatureCard from '~/security_configuration/components/feature_card.vue';
import TrainingProviderList from '~/security_configuration/components/training_provider_list.vue';
import { securityFeaturesMock, provideMock } from '../mock_data';
@@ -19,6 +19,8 @@ const { vulnerabilityTrainingDocsPath, projectFullPath } = provideMock;
useLocalStorageSpy();
Vue.use(VueApollo);
+const { i18n } = SecurityConfigurationApp;
+
describe('~/security_configuration/components/app', () => {
let wrapper;
let userCalloutDismissSpy;
diff --git a/spec/frontend/security_configuration/components/continuous_vulnerability_scan_spec.js b/spec/frontend/security_configuration/components/continuous_vulnerability_scan_spec.js
deleted file mode 100644
index 84a468e4dd8..00000000000
--- a/spec/frontend/security_configuration/components/continuous_vulnerability_scan_spec.js
+++ /dev/null
@@ -1,124 +0,0 @@
-import { shallowMount } from '@vue/test-utils';
-import { GlBadge, GlToggle } from '@gitlab/ui';
-import VueApollo from 'vue-apollo';
-import Vue from 'vue';
-import ProjectSetContinuousVulnerabilityScanning from '~/security_configuration/graphql/project_set_continuous_vulnerability_scanning.graphql';
-import ContinuousVulnerabilityScan from '~/security_configuration/components/continuous_vulnerability_scan.vue';
-import createMockApollo from 'helpers/mock_apollo_helper';
-
-Vue.use(VueApollo);
-
-const setCVSMockResponse = {
- data: {
- projectSetContinuousVulnerabilityScanning: {
- continuousVulnerabilityScanningEnabled: true,
- errors: [],
- },
- },
-};
-
-const defaultProvide = {
- continuousVulnerabilityScansEnabled: true,
- projectFullPath: 'project/full/path',
-};
-
-describe('ContinuousVulnerabilityScan', () => {
- let wrapper;
- let apolloProvider;
- let requestHandlers;
-
- const createComponent = (options) => {
- requestHandlers = {
- setCVSMutationHandler: jest.fn().mockResolvedValue(setCVSMockResponse),
- };
-
- apolloProvider = createMockApollo([
- [ProjectSetContinuousVulnerabilityScanning, requestHandlers.setCVSMutationHandler],
- ]);
-
- wrapper = shallowMount(ContinuousVulnerabilityScan, {
- propsData: {
- feature: {
- available: true,
- configured: true,
- },
- },
- provide: {
- glFeatures: {
- dependencyScanningOnAdvisoryIngestion: true,
- },
- ...defaultProvide,
- },
- apolloProvider,
- ...options,
- });
- };
-
- beforeEach(() => {
- createComponent();
- });
-
- afterEach(() => {
- apolloProvider = null;
- });
-
- const findBadge = () => wrapper.findComponent(GlBadge);
- const findToggle = () => wrapper.findComponent(GlToggle);
-
- it('renders the component', () => {
- expect(wrapper.exists()).toBe(true);
- });
-
- it('renders the correct title', () => {
- expect(wrapper.text()).toContain('Continuous Vulnerability Scan');
- });
-
- it('renders the badge and toggle component with correct values', () => {
- expect(findBadge().exists()).toBe(true);
- expect(findBadge().text()).toBe('Experiment');
-
- expect(findToggle().exists()).toBe(true);
- expect(findToggle().props('value')).toBe(defaultProvide.continuousVulnerabilityScansEnabled);
- });
-
- it('should disable toggle when feature is not configured', () => {
- createComponent({
- propsData: {
- feature: {
- available: true,
- configured: false,
- },
- },
- });
- expect(findToggle().props('disabled')).toBe(true);
- });
-
- it('calls mutation on toggle change with correct payload', () => {
- findToggle().vm.$emit('change', true);
-
- expect(requestHandlers.setCVSMutationHandler).toHaveBeenCalledWith({
- input: {
- projectPath: 'project/full/path',
- enable: true,
- },
- });
- });
-
- describe('when feature flag is disabled', () => {
- beforeEach(() => {
- createComponent({
- provide: {
- glFeatures: {
- dependencyScanningOnAdvisoryIngestion: false,
- },
- ...defaultProvide,
- },
- });
- });
-
- it('should not render toggle and badge', () => {
- expect(findToggle().exists()).toBe(false);
- expect(findBadge().exists()).toBe(false);
- });
- });
-});
diff --git a/spec/frontend/security_configuration/components/feature_card_spec.js b/spec/frontend/security_configuration/components/feature_card_spec.js
index c715d01dd58..9efee2a409a 100644
--- a/spec/frontend/security_configuration/components/feature_card_spec.js
+++ b/spec/frontend/security_configuration/components/feature_card_spec.js
@@ -1,8 +1,7 @@
import { GlIcon } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
-import Vue from 'vue';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
-import { securityFeatures } from '~/security_configuration/components/constants';
+import { securityFeatures } from '~/security_configuration/constants';
import FeatureCard from '~/security_configuration/components/feature_card.vue';
import FeatureCardBadge from '~/security_configuration/components/feature_card_badge.vue';
import ManageViaMr from '~/vue_shared/security_configuration/components/manage_via_mr.vue';
@@ -14,10 +13,6 @@ import {
import { manageViaMRErrorMessage } from '../constants';
import { makeFeature } from './utils';
-const MockComponent = Vue.component('MockComponent', {
- render: (createElement) => createElement('span'),
-});
-
describe('FeatureCard component', () => {
let feature;
let wrapper;
@@ -394,17 +389,4 @@ describe('FeatureCard component', () => {
});
});
});
-
- describe('when a slot component is passed', () => {
- beforeEach(() => {
- feature = makeFeature({
- slotComponent: MockComponent,
- });
- createComponent({ feature });
- });
-
- it('renders the component properly', () => {
- expect(wrapper.findComponent(MockComponent).exists()).toBe(true);
- });
- });
});
diff --git a/spec/frontend/security_configuration/components/training_provider_list_spec.js b/spec/frontend/security_configuration/components/training_provider_list_spec.js
index 5b2b3f46df6..ef20d8f56a4 100644
--- a/spec/frontend/security_configuration/components/training_provider_list_spec.js
+++ b/spec/frontend/security_configuration/components/training_provider_list_spec.js
@@ -19,8 +19,8 @@ import {
TRACK_TOGGLE_TRAINING_PROVIDER_LABEL,
TRACK_PROVIDER_LEARN_MORE_CLICK_ACTION,
TRACK_PROVIDER_LEARN_MORE_CLICK_LABEL,
+ TEMP_PROVIDER_URLS,
} from '~/security_configuration/constants';
-import { TEMP_PROVIDER_URLS } from '~/security_configuration/components/constants';
import TrainingProviderList from '~/security_configuration/components/training_provider_list.vue';
import { updateSecurityTrainingOptimisticResponse } from '~/security_configuration/graphql/cache_utils';
import securityTrainingProvidersQuery from '~/security_configuration/graphql/security_training_providers.query.graphql';
@@ -61,10 +61,9 @@ const TEMP_PROVIDER_LOGOS = {
svg: '<svg>Secure Code Warrior</svg>',
},
};
-jest.mock('~/security_configuration/components/constants', () => {
+jest.mock('~/security_configuration/constants', () => {
return {
- TEMP_PROVIDER_URLS: jest.requireActual('~/security_configuration/components/constants')
- .TEMP_PROVIDER_URLS,
+ TEMP_PROVIDER_URLS: jest.requireActual('~/security_configuration/constants').TEMP_PROVIDER_URLS,
// NOTE: Jest hoists all mocks to the top so we can't use TEMP_PROVIDER_LOGOS
// here directly.
TEMP_PROVIDER_LOGOS: {
diff --git a/spec/frontend/security_configuration/mock_data.js b/spec/frontend/security_configuration/mock_data.js
index df10d33e2f0..208256afdbd 100644
--- a/spec/frontend/security_configuration/mock_data.js
+++ b/spec/frontend/security_configuration/mock_data.js
@@ -4,7 +4,7 @@ import {
SAST_DESCRIPTION,
SAST_HELP_PATH,
SAST_CONFIG_HELP_PATH,
-} from '~/security_configuration/components/constants';
+} from '~/security_configuration/constants';
import { REPORT_TYPE_SAST } from '~/vue_shared/security_reports/constants';
export const testProjectPath = 'foo/bar';
diff --git a/spec/frontend/security_configuration/utils_spec.js b/spec/frontend/security_configuration/utils_spec.js
index ea04e9e7993..3c6d4baa30f 100644
--- a/spec/frontend/security_configuration/utils_spec.js
+++ b/spec/frontend/security_configuration/utils_spec.js
@@ -1,5 +1,5 @@
import { augmentFeatures, translateScannerNames } from '~/security_configuration/utils';
-import { SCANNER_NAMES_MAP } from '~/security_configuration/components/constants';
+import { SCANNER_NAMES_MAP } from '~/security_configuration/constants';
describe('augmentFeatures', () => {
const mockSecurityFeatures = [
diff --git a/spec/frontend/set_status_modal/set_status_form_spec.js b/spec/frontend/set_status_modal/set_status_form_spec.js
index e24561a9862..5fcbecfa1dc 100644
--- a/spec/frontend/set_status_modal/set_status_form_spec.js
+++ b/spec/frontend/set_status_modal/set_status_form_spec.js
@@ -84,11 +84,11 @@ describe('SetStatusForm', () => {
it('displays time that status will clear', async () => {
await createComponent({
propsData: {
- currentClearStatusAfter: '2022-12-05 11:00:00 UTC',
+ currentClearStatusAfter: '2022-12-05T11:00:00Z',
},
});
- expect(wrapper.findByRole('button', { name: '11:00am' }).exists()).toBe(true);
+ expect(wrapper.findByRole('button', { name: '11:00 AM' }).exists()).toBe(true);
});
});
@@ -96,11 +96,13 @@ describe('SetStatusForm', () => {
it('displays date and time that status will clear', async () => {
await createComponent({
propsData: {
- currentClearStatusAfter: '2022-12-06 11:00:00 UTC',
+ currentClearStatusAfter: '2022-12-06T11:00:00Z',
},
});
- expect(wrapper.findByRole('button', { name: 'Dec 6, 2022 11:00am' }).exists()).toBe(true);
+ expect(wrapper.findByRole('button', { name: 'Dec 6, 2022, 11:00 AM' }).exists()).toBe(
+ true,
+ );
});
});
@@ -110,11 +112,11 @@ describe('SetStatusForm', () => {
await createComponent({
propsData: {
clearStatusAfter: thirtyMinutes,
- currentClearStatusAfter: '2022-12-05 11:00:00 UTC',
+ currentClearStatusAfter: '2022-12-05T11:00:00Z',
},
});
- expect(wrapper.findByRole('button', { name: '12:30am' }).exists()).toBe(true);
+ expect(wrapper.findByRole('button', { name: '12:30 AM' }).exists()).toBe(true);
});
});
@@ -123,11 +125,11 @@ describe('SetStatusForm', () => {
await createComponent({
propsData: {
clearStatusAfter: oneDay,
- currentClearStatusAfter: '2022-12-06 11:00:00 UTC',
+ currentClearStatusAfter: '2022-12-06T11:00:00Z',
},
});
- expect(wrapper.findByRole('button', { name: 'Dec 6, 2022 12:00am' }).exists()).toBe(
+ expect(wrapper.findByRole('button', { name: 'Dec 6, 2022, 12:00 AM' }).exists()).toBe(
true,
);
});
diff --git a/spec/frontend/set_status_modal/set_status_modal_wrapper_spec.js b/spec/frontend/set_status_modal/set_status_modal_wrapper_spec.js
index 9c79d564625..7ae2884170e 100644
--- a/spec/frontend/set_status_modal/set_status_modal_wrapper_spec.js
+++ b/spec/frontend/set_status_modal/set_status_modal_wrapper_spec.js
@@ -131,12 +131,12 @@ describe('SetStatusModalWrapper', () => {
beforeEach(async () => {
await initEmojiMock();
- wrapper = createComponent({ currentClearStatusAfter: '2022-12-06 11:00:00 UTC' });
+ wrapper = createComponent({ currentClearStatusAfter: '2022-12-06T11:00:00Z' });
return initModal();
});
it('displays date and time that status will expire in dropdown toggle button', () => {
- expect(wrapper.findByRole('button', { name: 'Dec 6, 2022 11:00am' }).exists()).toBe(true);
+ expect(wrapper.findByRole('button', { name: 'Dec 6, 2022, 11:00 AM' }).exists()).toBe(true);
});
});
diff --git a/spec/frontend/sidebar/components/labels/labels_select_vue/dropdown_contents_create_view_spec.js b/spec/frontend/sidebar/components/labels/labels_select_vue/dropdown_contents_create_view_spec.js
index cd391765dde..bcef99afc46 100644
--- a/spec/frontend/sidebar/components/labels/labels_select_vue/dropdown_contents_create_view_spec.js
+++ b/spec/frontend/sidebar/components/labels/labels_select_vue/dropdown_contents_create_view_spec.js
@@ -14,13 +14,14 @@ Vue.use(Vuex);
describe('DropdownContentsCreateView', () => {
let wrapper;
+ let store;
+
const colors = Object.keys(mockSuggestedColors).map((color) => ({
[color]: mockSuggestedColors[color],
}));
const createComponent = (initialState = mockConfig) => {
- const store = new Vuex.Store(labelSelectModule());
-
+ store = new Vuex.Store(labelSelectModule());
store.dispatch('setInitialState', initialState);
wrapper = shallowMountExtended(DropdownContentsCreateView, {
@@ -47,7 +48,7 @@ describe('DropdownContentsCreateView', () => {
it('returns `true` when `labelCreateInProgress` is true', async () => {
await findColorSelectorInput().vm.$emit('input', '#ff0000');
await findLabelTitleInput().vm.$emit('input', 'Foo');
- wrapper.vm.$store.dispatch('requestCreateLabel');
+ store.dispatch('requestCreateLabel');
await nextTick();
@@ -81,7 +82,6 @@ describe('DropdownContentsCreateView', () => {
describe('getColorName', () => {
it('returns color name from color object', () => {
expect(findAllLinks().at(0).attributes('title')).toBe(Object.values(colors[0]).pop());
- expect(wrapper.vm.getColorName(colors[0])).toBe(Object.values(colors[0]).pop());
});
});
@@ -97,20 +97,17 @@ describe('DropdownContentsCreateView', () => {
describe('handleCreateClick', () => {
it('calls action `createLabel` with object containing `labelTitle` & `selectedColor`', async () => {
- jest.spyOn(wrapper.vm, 'createLabel').mockImplementation();
-
+ jest.spyOn(store, 'dispatch').mockImplementation();
await findColorSelectorInput().vm.$emit('input', '#ff0000');
await findLabelTitleInput().vm.$emit('input', 'Foo');
findCreateClickButton().vm.$emit('click');
await nextTick();
- expect(wrapper.vm.createLabel).toHaveBeenCalledWith(
- expect.objectContaining({
- title: 'Foo',
- color: '#ff0000',
- }),
- );
+ expect(store.dispatch).toHaveBeenCalledWith('createLabel', {
+ title: 'Foo',
+ color: '#ff0000',
+ });
});
});
});
@@ -186,7 +183,7 @@ describe('DropdownContentsCreateView', () => {
});
it('shows gl-loading-icon within create button element when `labelCreateInProgress` is `true`', async () => {
- wrapper.vm.$store.dispatch('requestCreateLabel');
+ store.dispatch('requestCreateLabel');
await nextTick();
const loadingIconEl = wrapper.find('.dropdown-actions').findComponent(GlLoadingIcon);
diff --git a/spec/frontend/sidebar/components/labels/labels_select_vue/store/actions_spec.js b/spec/frontend/sidebar/components/labels/labels_select_vue/store/actions_spec.js
index c27afb75375..663bfbb48cc 100644
--- a/spec/frontend/sidebar/components/labels/labels_select_vue/store/actions_spec.js
+++ b/spec/frontend/sidebar/components/labels/labels_select_vue/store/actions_spec.js
@@ -254,7 +254,7 @@ describe('LabelsSelect Actions', () => {
describe('updateLabelsSetState', () => {
it('updates labels `set` state to match `selectedLabels`', () => {
- testAction(
+ return testAction(
actions.updateLabelsSetState,
{},
state,
diff --git a/spec/frontend/sidebar/components/labels/labels_select_widget/dropdown_value_spec.js b/spec/frontend/sidebar/components/labels/labels_select_widget/dropdown_value_spec.js
index d70b989b493..21068c2858d 100644
--- a/spec/frontend/sidebar/components/labels/labels_select_widget/dropdown_value_spec.js
+++ b/spec/frontend/sidebar/components/labels/labels_select_widget/dropdown_value_spec.js
@@ -3,14 +3,15 @@ import { shallowMount } from '@vue/test-utils';
import DropdownValue from '~/sidebar/components/labels/labels_select_widget/dropdown_value.vue';
-import { mockRegularLabel, mockScopedLabel } from './mock_data';
+import { mockRegularLabel, mockScopedLabel, mockLockedLabel } from './mock_data';
describe('DropdownValue', () => {
let wrapper;
const findAllLabels = () => wrapper.findAllComponents(GlLabel);
- const findRegularLabel = () => findAllLabels().at(1);
+ const findRegularLabel = () => findAllLabels().at(2);
const findScopedLabel = () => findAllLabels().at(0);
+ const findLockedLabel = () => findAllLabels().at(1);
const findWrapper = () => wrapper.find('[data-testid="value-wrapper"]');
const findEmptyPlaceholder = () => wrapper.find('[data-testid="empty-placeholder"]');
@@ -18,7 +19,7 @@ describe('DropdownValue', () => {
wrapper = shallowMount(DropdownValue, {
slots,
propsData: {
- selectedLabels: [mockRegularLabel, mockScopedLabel],
+ selectedLabels: [mockLockedLabel, mockRegularLabel, mockScopedLabel],
allowLabelRemove: true,
labelsFilterBasePath: '/gitlab-org/my-project/issues',
labelsFilterParam: 'label_name',
@@ -69,8 +70,8 @@ describe('DropdownValue', () => {
expect(findEmptyPlaceholder().exists()).toBe(false);
});
- it('renders a list of two labels', () => {
- expect(findAllLabels().length).toBe(2);
+ it('renders a list of three labels', () => {
+ expect(findAllLabels().length).toBe(3);
});
it('passes correct props to the regular label', () => {
@@ -96,5 +97,19 @@ describe('DropdownValue', () => {
wrapper.find('.sidebar-collapsed-icon').trigger('click');
expect(wrapper.emitted('onCollapsedValueClick')).toEqual([[]]);
});
+
+ it('does not show close button if label is locked', () => {
+ createComponent({
+ supportsLockOnMerge: true,
+ });
+ expect(findLockedLabel().props('showCloseButton')).toBe(false);
+ });
+
+ it('shows close button if label is not locked', () => {
+ createComponent({
+ supportsLockOnMerge: true,
+ });
+ expect(findRegularLabel().props('showCloseButton')).toBe(true);
+ });
});
});
diff --git a/spec/frontend/sidebar/components/labels/labels_select_widget/embedded_labels_list_spec.js b/spec/frontend/sidebar/components/labels/labels_select_widget/embedded_labels_list_spec.js
index 715dd4e034e..c516dddf0ce 100644
--- a/spec/frontend/sidebar/components/labels/labels_select_widget/embedded_labels_list_spec.js
+++ b/spec/frontend/sidebar/components/labels/labels_select_widget/embedded_labels_list_spec.js
@@ -1,7 +1,7 @@
import { GlLabel } from '@gitlab/ui';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import EmbeddedLabelsList from '~/sidebar/components/labels/labels_select_widget/embedded_labels_list.vue';
-import { mockRegularLabel, mockScopedLabel } from './mock_data';
+import { mockRegularLabel, mockScopedLabel, mockLockedLabel } from './mock_data';
describe('EmbeddedLabelsList', () => {
let wrapper;
@@ -13,12 +13,13 @@ describe('EmbeddedLabelsList', () => {
.at(0);
const findRegularLabel = () => findLabelByTitle(mockRegularLabel.title);
const findScopedLabel = () => findLabelByTitle(mockScopedLabel.title);
+ const findLockedLabel = () => findLabelByTitle(mockLockedLabel.title);
const createComponent = (props = {}, slots = {}) => {
wrapper = shallowMountExtended(EmbeddedLabelsList, {
slots,
propsData: {
- selectedLabels: [mockRegularLabel, mockScopedLabel],
+ selectedLabels: [mockRegularLabel, mockScopedLabel, mockLockedLabel],
allowLabelRemove: true,
labelsFilterBasePath: '/gitlab-org/my-project/issues',
labelsFilterParam: 'label_name',
@@ -47,8 +48,8 @@ describe('EmbeddedLabelsList', () => {
createComponent();
});
- it('renders a list of two labels', () => {
- expect(findAllLabels()).toHaveLength(2);
+ it('renders a list of three labels', () => {
+ expect(findAllLabels()).toHaveLength(3);
});
it('passes correct props to the regular label', () => {
@@ -69,5 +70,12 @@ describe('EmbeddedLabelsList', () => {
findRegularLabel().vm.$emit('close');
expect(wrapper.emitted('onLabelRemove')).toStrictEqual([[mockRegularLabel.id]]);
});
+
+ it('does not show close button if label is locked', () => {
+ createComponent({
+ supportsLockOnMerge: true,
+ });
+ expect(findLockedLabel().props('showCloseButton')).toBe(false);
+ });
});
});
diff --git a/spec/frontend/sidebar/components/labels/labels_select_widget/mock_data.js b/spec/frontend/sidebar/components/labels/labels_select_widget/mock_data.js
index b0b473625bb..5039f00fe4b 100644
--- a/spec/frontend/sidebar/components/labels/labels_select_widget/mock_data.js
+++ b/spec/frontend/sidebar/components/labels/labels_select_widget/mock_data.js
@@ -14,6 +14,16 @@ export const mockScopedLabel = {
textColor: '#FFFFFF',
};
+export const mockLockedLabel = {
+ id: 30,
+ title: 'Bar Label',
+ description: 'Bar',
+ color: '#DADA55',
+ textColor: '#FFFFFF',
+ lockOnMerge: true,
+ lock_on_merge: true,
+};
+
export const mockLabels = [
mockRegularLabel,
mockScopedLabel,
diff --git a/spec/frontend/sidebar/components/reviewers/sidebar_reviewers_spec.js b/spec/frontend/sidebar/components/reviewers/sidebar_reviewers_spec.js
index a221d28704b..ae31e60254f 100644
--- a/spec/frontend/sidebar/components/reviewers/sidebar_reviewers_spec.js
+++ b/spec/frontend/sidebar/components/reviewers/sidebar_reviewers_spec.js
@@ -1,5 +1,5 @@
import { shallowMount } from '@vue/test-utils';
-import Vue from 'vue';
+import Vue, { nextTick } from 'vue';
import axios from 'axios';
import AxiosMockAdapter from 'axios-mock-adapter';
import VueApollo from 'vue-apollo';
@@ -8,8 +8,11 @@ import SidebarReviewers from '~/sidebar/components/reviewers/sidebar_reviewers.v
import SidebarService from '~/sidebar/services/sidebar_service';
import SidebarMediator from '~/sidebar/sidebar_mediator';
import SidebarStore from '~/sidebar/stores/sidebar_store';
+import { fetchUserCounts } from '~/super_sidebar/user_counts_fetch';
import Mock from '../../mock_data';
+jest.mock('~/super_sidebar/user_counts_fetch');
+
Vue.use(VueApollo);
describe('sidebar reviewers', () => {
@@ -39,7 +42,7 @@ describe('sidebar reviewers', () => {
axiosMock = new AxiosMockAdapter(axios);
mediator = new SidebarMediator(Mock.mediator);
- jest.spyOn(mediator, 'saveReviewers');
+ jest.spyOn(mediator, 'saveReviewers').mockResolvedValue({});
jest.spyOn(mediator, 'addSelfReview');
});
@@ -60,6 +63,17 @@ describe('sidebar reviewers', () => {
expect(mediator.saveReviewers).toHaveBeenCalled();
});
+ it('re-fetches user counts after saving reviewers', async () => {
+ createComponent();
+
+ expect(fetchUserCounts).not.toHaveBeenCalled();
+
+ wrapper.vm.saveReviewers();
+ await nextTick();
+
+ expect(fetchUserCounts).toHaveBeenCalled();
+ });
+
it('calls the mediator when "reviewBySelf" method is called', () => {
createComponent();
diff --git a/spec/frontend/sidebar/components/reviewers/uncollapsed_reviewer_list_spec.js b/spec/frontend/sidebar/components/reviewers/uncollapsed_reviewer_list_spec.js
index 66bc1f393ae..d7a5e4ba3ba 100644
--- a/spec/frontend/sidebar/components/reviewers/uncollapsed_reviewer_list_spec.js
+++ b/spec/frontend/sidebar/components/reviewers/uncollapsed_reviewer_list_spec.js
@@ -4,7 +4,7 @@ import { TEST_HOST } from 'helpers/test_constants';
import ReviewerAvatarLink from '~/sidebar/components/reviewers/reviewer_avatar_link.vue';
import UncollapsedReviewerList from '~/sidebar/components/reviewers/uncollapsed_reviewer_list.vue';
-const userDataMock = ({ approved = false } = {}) => ({
+const userDataMock = ({ approved = false, reviewState = 'UNREVIEWED' } = {}) => ({
id: 1,
name: 'Root',
state: 'active',
@@ -16,6 +16,7 @@ const userDataMock = ({ approved = false } = {}) => ({
canUpdate: true,
reviewed: true,
approved,
+ reviewState,
},
});
@@ -204,4 +205,28 @@ describe('UncollapsedReviewerList component', () => {
);
});
});
+
+ describe('reviewer state icons', () => {
+ it.each`
+ reviewState | approved | icon
+ ${'UNREVIEWED'} | ${false} | ${'dotted-circle'}
+ ${'REVIEWED'} | ${true} | ${'status-success'}
+ ${'REVIEWED'} | ${false} | ${'comment'}
+ ${'REQUESTED_CHANGES'} | ${false} | ${'status-alert'}
+ `(
+ 'renders $icon for reviewState:$reviewState and approved:$approved',
+ ({ reviewState, approved, icon }) => {
+ const user = userDataMock({ approved, reviewState });
+
+ createComponent(
+ {
+ users: [user],
+ },
+ { mrRequestChanges: true },
+ );
+
+ expect(wrapper.find('[data-testid="reviewer-state-icon"]').props('name')).toBe(icon);
+ },
+ );
+ });
});
diff --git a/spec/frontend/sidebar/components/sidebar_dropdown_widget_spec.js b/spec/frontend/sidebar/components/sidebar_dropdown_widget_spec.js
index c1c3c1fea91..f3709e67037 100644
--- a/spec/frontend/sidebar/components/sidebar_dropdown_widget_spec.js
+++ b/spec/frontend/sidebar/components/sidebar_dropdown_widget_spec.js
@@ -1,11 +1,11 @@
import { GlLink, GlLoadingIcon, GlSearchBoxByType } from '@gitlab/ui';
-import { shallowMount, mount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
import * as Sentry from '~/sentry/sentry_browser_wrapper';
import createMockApollo from 'helpers/mock_apollo_helper';
import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
-import { extendedWrapper } from 'helpers/vue_test_utils_helper';
+import { mountExtended, shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import { stubComponent } from 'helpers/stub_component';
import waitForPromises from 'helpers/wait_for_promises';
import { createAlert } from '~/alert';
import { getIdFromGraphQLId } from '~/graphql_shared/utils';
@@ -97,59 +97,56 @@ describe('SidebarDropdownWidget', () => {
...requestHandlers,
]);
- wrapper = extendedWrapper(
- mount(SidebarDropdownWidget, {
- provide: { canUpdate: true },
- apolloProvider: mockApollo,
- propsData: {
- workspacePath: mockIssue.projectPath,
- attrWorkspacePath: mockIssue.projectPath,
- iid: mockIssue.iid,
- issuableType: TYPE_ISSUE,
- issuableAttribute: IssuableAttributeType.Milestone,
- },
- attachTo: document.body,
- }),
- );
+ wrapper = mountExtended(SidebarDropdownWidget, {
+ provide: { canUpdate: true },
+ apolloProvider: mockApollo,
+ propsData: {
+ workspacePath: mockIssue.projectPath,
+ attrWorkspacePath: mockIssue.projectPath,
+ iid: mockIssue.iid,
+ issuableType: TYPE_ISSUE,
+ issuableAttribute: IssuableAttributeType.Milestone,
+ },
+ attachTo: document.body,
+ });
await waitForApollo();
};
const createComponent = ({ data = {}, mutationPromise = mutationSuccess, queries = {} } = {}) => {
- wrapper = extendedWrapper(
- shallowMount(SidebarDropdownWidget, {
- provide: { canUpdate: true },
- data() {
- return data;
- },
- propsData: {
- workspacePath: '',
- attrWorkspacePath: '',
- iid: '',
- issuableType: TYPE_ISSUE,
- issuableAttribute: IssuableAttributeType.Milestone,
- },
- mocks: {
- $apollo: {
- mutate: mutationPromise(),
- queries: {
- issuable: { loading: false },
- attributesList: { loading: false },
- ...queries,
- },
+ wrapper = shallowMountExtended(SidebarDropdownWidget, {
+ provide: { canUpdate: true },
+ data() {
+ return data;
+ },
+ propsData: {
+ workspacePath: '',
+ attrWorkspacePath: '',
+ iid: '',
+ issuableType: TYPE_ISSUE,
+ issuableAttribute: IssuableAttributeType.Milestone,
+ },
+ mocks: {
+ $apollo: {
+ mutate: mutationPromise(),
+ queries: {
+ issuable: { loading: false },
+ attributesList: { loading: false },
+ ...queries,
},
},
- directives: {
- GlTooltip: createMockDirective('gl-tooltip'),
- },
- stubs: {
- SidebarEditableItem,
- GlSearchBoxByType,
- },
- }),
- );
-
- wrapper.vm.$refs.dropdown.show = jest.fn();
+ },
+ directives: {
+ GlTooltip: createMockDirective('gl-tooltip'),
+ },
+ stubs: {
+ SidebarEditableItem,
+ GlSearchBoxByType,
+ SidebarDropdown: stubComponent(SidebarDropdown, {
+ methods: { show: jest.fn() },
+ }),
+ },
+ });
};
describe('when not editing', () => {
diff --git a/spec/frontend/sidebar/components/time_tracking/set_time_estimate_form_spec.js b/spec/frontend/sidebar/components/time_tracking/set_time_estimate_form_spec.js
index 657fb52d62c..37d7b3b6781 100644
--- a/spec/frontend/sidebar/components/time_tracking/set_time_estimate_form_spec.js
+++ b/spec/frontend/sidebar/components/time_tracking/set_time_estimate_form_spec.js
@@ -6,6 +6,7 @@ import setIssueTimeEstimateWithoutErrors from 'test_fixtures/graphql/issue_set_t
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
+import { stubComponent } from 'helpers/stub_component';
import SetTimeEstimateForm from '~/sidebar/components/time_tracking/set_time_estimate_form.vue';
import issueSetTimeEstimateMutation from '~/sidebar/queries/issue_set_time_estimate.mutation.graphql';
@@ -75,10 +76,13 @@ describe('Set Time Estimate Form', () => {
timeTracking,
},
apolloProvider: createMockApollo([[issueSetTimeEstimateMutation, mutationResolverMock]]),
+ stubs: {
+ GlModal: stubComponent(GlModal, {
+ methods: { close: modalCloseMock },
+ }),
+ },
});
- wrapper.vm.$refs.modal.close = modalCloseMock;
-
findModal().vm.$emit('show');
await nextTick();
};
diff --git a/spec/frontend/sidebar/sidebar_mediator_spec.js b/spec/frontend/sidebar/sidebar_mediator_spec.js
index 9c12088216b..4dc285fc3c8 100644
--- a/spec/frontend/sidebar/sidebar_mediator_spec.js
+++ b/spec/frontend/sidebar/sidebar_mediator_spec.js
@@ -9,7 +9,6 @@ import Mock from './mock_data';
jest.mock('~/alert');
jest.mock('~/vue_shared/plugins/global_toast');
-jest.mock('~/commons/nav/user_merge_requests');
describe('Sidebar mediator', () => {
const { mediator: mediatorMockData } = Mock;
diff --git a/spec/frontend/snippets/components/__snapshots__/snippet_description_edit_spec.js.snap b/spec/frontend/snippets/components/__snapshots__/snippet_description_edit_spec.js.snap
index 92511acc4f8..4a42b7168a3 100644
--- a/spec/frontend/snippets/components/__snapshots__/snippet_description_edit_spec.js.snap
+++ b/spec/frontend/snippets/components/__snapshots__/snippet_description_edit_spec.js.snap
@@ -22,7 +22,7 @@ exports[`Snippet Description Edit component rendering matches the snapshot 1`] =
/>
</div>
<div
- class="gfm-form gl-overflow-hidden js-expanded js-vue-markdown-field md-area position-relative"
+ class="gfm-form js-expanded js-vue-markdown-field md-area position-relative"
data-uploads-path=""
>
<markdown-header-stub
diff --git a/spec/frontend/snippets/components/snippet_blob_edit_spec.js b/spec/frontend/snippets/components/snippet_blob_edit_spec.js
index b699e056576..53993921621 100644
--- a/spec/frontend/snippets/components/snippet_blob_edit_spec.js
+++ b/spec/frontend/snippets/components/snippet_blob_edit_spec.js
@@ -102,10 +102,20 @@ describe('Snippet Blob Edit component', () => {
describe('with unloaded blob and JSON content', () => {
beforeEach(() => {
+ jest.spyOn(axios, 'get');
axiosMock.onGet(TEST_FULL_PATH).reply(HTTP_STATUS_OK, TEST_JSON_CONTENT);
createComponent();
});
+ it('makes an API request for the blob content', () => {
+ const expectedConfig = {
+ transformResponse: [expect.any(Function)],
+ headers: { 'Cache-Control': 'no-cache' },
+ };
+
+ expect(axios.get).toHaveBeenCalledWith(TEST_FULL_PATH, expectedConfig);
+ });
+
// This checks against this issue https://gitlab.com/gitlab-org/gitlab/-/issues/241199
it('emits raw content', async () => {
await waitForPromises();
diff --git a/spec/frontend/snippets/components/snippet_title_spec.js b/spec/frontend/snippets/components/snippet_title_spec.js
index 0a3b57c9244..9e6a30885d4 100644
--- a/spec/frontend/snippets/components/snippet_title_spec.js
+++ b/spec/frontend/snippets/components/snippet_title_spec.js
@@ -1,71 +1,104 @@
-import { GlSprintf } from '@gitlab/ui';
+import { GlSprintf, GlIcon } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import SnippetDescription from '~/snippets/components/snippet_description_view.vue';
+import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
import SnippetTitle from '~/snippets/components/snippet_title.vue';
-describe('Snippet header component', () => {
+describe('Snippet title component', () => {
let wrapper;
const title = 'The property of Thor';
const description = 'Do not touch this hammer';
const descriptionHtml = `<h2>${description}</h2>`;
- const snippet = {
- snippet: {
- title,
- description,
- descriptionHtml,
- },
- };
-
- function createComponent({ props = snippet } = {}) {
- const defaultProps = { ...props };
+ function createComponent({ propsData = {} } = {}) {
wrapper = shallowMount(SnippetTitle, {
propsData: {
- ...defaultProps,
+ snippet: {
+ title,
+ description,
+ descriptionHtml,
+ },
+ ...propsData,
+ },
+ directives: {
+ GlTooltip: createMockDirective('gl-tooltip'),
},
});
}
- it('renders itself', () => {
- createComponent();
- expect(wrapper.find('.snippet-header').exists()).toBe(true);
- });
+ const findIcon = () => wrapper.findComponent(GlIcon);
+ const findTooltip = () => getBinding(findIcon().element, 'gl-tooltip');
- it('renders snippets title and description', () => {
- createComponent();
+ describe('default state', () => {
+ beforeEach(() => {
+ createComponent();
+ });
- expect(wrapper.text().trim()).toContain(title);
- expect(wrapper.findComponent(SnippetDescription).props('description')).toBe(descriptionHtml);
- });
+ it('renders itself', () => {
+ expect(wrapper.find('.snippet-header').exists()).toBe(true);
+ });
- it('does not render recent changes time stamp if there were no updates', () => {
- createComponent();
- expect(wrapper.findComponent(GlSprintf).exists()).toBe(false);
- });
+ it('does not render spam icon when author is not banned', () => {
+ expect(findIcon().exists()).toBe(false);
+ });
- it('does not render recent changes time stamp if the time for creation and updates match', () => {
- const props = Object.assign(snippet, {
- snippet: {
- ...snippet.snippet,
- createdAt: '2019-12-16T21:45:36Z',
- updatedAt: '2019-12-16T21:45:36Z',
- },
+ it('renders snippets title and description', () => {
+ expect(wrapper.text().trim()).toContain(title);
+ expect(wrapper.findComponent(SnippetDescription).props('description')).toBe(descriptionHtml);
});
- createComponent({ props });
- expect(wrapper.findComponent(GlSprintf).exists()).toBe(false);
- });
+ it('does not render recent changes time stamp if there were no updates', () => {
+ expect(wrapper.findComponent(GlSprintf).exists()).toBe(false);
+ });
- it('renders translated string with most recent changes timestamp if changes were made', () => {
- const props = Object.assign(snippet, {
- snippet: {
- ...snippet.snippet,
- createdAt: '2019-12-16T21:45:36Z',
- updatedAt: '2019-15-16T21:45:36Z',
- },
+ it('does not render recent changes time stamp if the time for creation and updates match', () => {
+ createComponent({
+ propsData: {
+ snippet: {
+ createdAt: '2019-12-16T21:45:36Z',
+ updatedAt: '2019-12-16T21:45:36Z',
+ },
+ },
+ });
+
+ expect(wrapper.findComponent(GlSprintf).exists()).toBe(false);
+ });
+
+ it('renders translated string with most recent changes timestamp if changes were made', () => {
+ createComponent({
+ propsData: {
+ snippet: {
+ createdAt: '2019-12-16T21:45:36Z',
+ updatedAt: '2019-15-16T21:45:36Z',
+ },
+ },
+ });
+
+ expect(wrapper.findComponent(GlSprintf).exists()).toBe(true);
});
- createComponent({ props });
+ });
+
+ describe('when author is snippet is banned', () => {
+ it('renders spam icon and tooltip when author is banned', () => {
+ createComponent({
+ propsData: {
+ snippet: {
+ hidden: true,
+ },
+ },
+ });
+
+ expect(findIcon().props()).toMatchObject({
+ ariaLabel: 'Hidden',
+ name: 'spam',
+ size: 16,
+ });
- expect(wrapper.findComponent(GlSprintf).exists()).toBe(true);
+ expect(findIcon().attributes('title')).toBe(
+ 'This snippet is hidden because its author has been banned',
+ );
+
+ expect(findTooltip()).toBeDefined();
+ });
});
});
diff --git a/spec/frontend/snippets/test_utils.js b/spec/frontend/snippets/test_utils.js
index 76b03c0aa0d..9d42e9fa26c 100644
--- a/spec/frontend/snippets/test_utils.js
+++ b/spec/frontend/snippets/test_utils.js
@@ -45,6 +45,7 @@ export const createGQLSnippet = () => ({
message: '',
},
},
+ hidden: false,
});
export const createGQLSnippetsQueryResponse = (snippets) => ({
diff --git a/spec/frontend/super_sidebar/components/global_search/components/frequent_groups_spec.js b/spec/frontend/super_sidebar/components/global_search/components/frequent_groups_spec.js
index e63768a03c0..38e1baabf41 100644
--- a/spec/frontend/super_sidebar/components/global_search/components/frequent_groups_spec.js
+++ b/spec/frontend/super_sidebar/components/global_search/components/frequent_groups_spec.js
@@ -1,14 +1,32 @@
import { shallowMount } from '@vue/test-utils';
+import Vue from 'vue';
+import VueApollo from 'vue-apollo';
import FrequentItems from '~/super_sidebar/components/global_search/components/frequent_items.vue';
import FrequentGroups from '~/super_sidebar/components/global_search/components/frequent_groups.vue';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import currentUserFrecentGroupsQuery from '~/super_sidebar/graphql/queries/current_user_frecent_groups.query.graphql';
+import waitForPromises from 'helpers/wait_for_promises';
+import { frecentGroupsMock } from '../../../mock_data';
+
+Vue.use(VueApollo);
describe('FrequentlyVisitedGroups', () => {
let wrapper;
const groupsPath = '/mock/group/path';
+ const currentUserFrecentGroupsQueryHandler = jest.fn().mockResolvedValue({
+ data: {
+ frecentGroups: frecentGroupsMock,
+ },
+ });
const createComponent = (options) => {
+ const mockApollo = createMockApollo([
+ [currentUserFrecentGroupsQuery, currentUserFrecentGroupsQueryHandler],
+ ]);
+
wrapper = shallowMount(FrequentGroups, {
+ apolloProvider: mockApollo,
provide: {
groupsPath,
},
@@ -28,19 +46,25 @@ describe('FrequentlyVisitedGroups', () => {
expect(findFrequentItems().props()).toMatchObject({
emptyStateText: 'Groups you visit often will appear here.',
groupName: 'Frequently visited groups',
- maxItems: 3,
- storageKey: null,
viewAllItemsIcon: 'group',
viewAllItemsText: 'View all my groups',
viewAllItemsPath: groupsPath,
});
});
- it('with a user, passes a storage key string to FrequentItems', () => {
- gon.current_username = 'test_user';
+ it('loads frecent groups', () => {
+ createComponent();
+
+ expect(currentUserFrecentGroupsQueryHandler).toHaveBeenCalled();
+ expect(findFrequentItems().props('loading')).toBe(true);
+ });
+
+ it('passes fetched groups to FrequentItems', async () => {
createComponent();
+ await waitForPromises();
- expect(findFrequentItems().props('storageKey')).toBe('test_user/frequent-groups');
+ expect(findFrequentItems().props('items')).toEqual(frecentGroupsMock);
+ expect(findFrequentItems().props('loading')).toBe(false);
});
it('passes attrs to FrequentItems', () => {
diff --git a/spec/frontend/super_sidebar/components/global_search/components/frequent_item_spec.js b/spec/frontend/super_sidebar/components/global_search/components/frequent_item_spec.js
index aae1fc543f9..b48a9ca6457 100644
--- a/spec/frontend/super_sidebar/components/global_search/components/frequent_item_spec.js
+++ b/spec/frontend/super_sidebar/components/global_search/components/frequent_item_spec.js
@@ -28,7 +28,6 @@ describe('FrequentlyVisitedItem', () => {
};
const findProjectAvatar = () => wrapper.findComponent(ProjectAvatar);
- const findRemoveButton = () => wrapper.findByRole('button');
const findSubtitle = () => wrapper.findByTestId('subtitle');
beforeEach(() => {
@@ -53,46 +52,4 @@ describe('FrequentlyVisitedItem', () => {
await wrapper.setProps({ item: { ...mockItem, subtitle: null } });
expect(findSubtitle().exists()).toBe(false);
});
-
- describe('clicking the remove button', () => {
- const bubbledClickSpy = jest.fn();
- const clickSpy = jest.fn();
-
- beforeEach(() => {
- wrapper.element.addEventListener('click', bubbledClickSpy);
- const button = findRemoveButton();
- button.element.addEventListener('click', clickSpy);
- button.trigger('click');
- });
-
- it('emits a remove event on clicking the remove button', () => {
- expect(wrapper.emitted('remove')).toEqual([[mockItem]]);
- });
-
- it('stops the native event from bubbling and prevents its default behavior', () => {
- expect(bubbledClickSpy).not.toHaveBeenCalled();
- expect(clickSpy.mock.calls[0][0].defaultPrevented).toBe(true);
- });
- });
-
- describe('pressing enter on the remove button', () => {
- const bubbledKeydownSpy = jest.fn();
- const keydownSpy = jest.fn();
-
- beforeEach(() => {
- wrapper.element.addEventListener('keydown', bubbledKeydownSpy);
- const button = findRemoveButton();
- button.element.addEventListener('keydown', keydownSpy);
- button.trigger('keydown.enter');
- });
-
- it('emits a remove event on clicking the remove button', () => {
- expect(wrapper.emitted('remove')).toEqual([[mockItem]]);
- });
-
- it('stops the native event from bubbling and prevents its default behavior', () => {
- expect(bubbledKeydownSpy).not.toHaveBeenCalled();
- expect(keydownSpy.mock.calls[0][0].defaultPrevented).toBe(true);
- });
- });
});
diff --git a/spec/frontend/super_sidebar/components/global_search/components/frequent_items_spec.js b/spec/frontend/super_sidebar/components/global_search/components/frequent_items_spec.js
index 4700e9c7e10..7876dd92701 100644
--- a/spec/frontend/super_sidebar/components/global_search/components/frequent_items_spec.js
+++ b/spec/frontend/super_sidebar/components/global_search/components/frequent_items_spec.js
@@ -2,28 +2,14 @@ import { GlDisclosureDropdownGroup, GlDisclosureDropdownItem, GlIcon } from '@gi
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import GlobalSearchFrequentItems from '~/super_sidebar/components/global_search/components/frequent_items.vue';
import FrequentItem from '~/super_sidebar/components/global_search/components/frequent_item.vue';
-import { getItemsFromLocalStorage, removeItemFromLocalStorage } from '~/super_sidebar/utils';
-import { cachedFrequentProjects } from 'jest/super_sidebar/mock_data';
-
-jest.mock('~/super_sidebar/utils', () => {
- const original = jest.requireActual('~/super_sidebar/utils');
-
- return {
- ...original,
- getItemsFromLocalStorage: jest.fn(),
- removeItemFromLocalStorage: jest.fn(),
- };
-});
+import FrequentItemSkeleton from '~/super_sidebar/components/global_search/components/frequent_item_skeleton.vue';
+import { frecentGroupsMock } from 'jest/super_sidebar/mock_data';
describe('FrequentlyVisitedItems', () => {
let wrapper;
- const storageKey = 'mockStorageKey';
- const mockStoredItems = JSON.parse(cachedFrequentProjects);
const mockProps = {
emptyStateText: 'mock empty state text',
groupName: 'mock group name',
- maxItems: 42,
- storageKey,
viewAllItemsText: 'View all items',
viewAllItemsIcon: 'question-o',
viewAllItemsPath: '/mock/all_items',
@@ -42,118 +28,97 @@ describe('FrequentlyVisitedItems', () => {
};
const findItems = () => wrapper.findAllComponents(GlDisclosureDropdownItem);
+ const findSkeleton = () => wrapper.findComponent(FrequentItemSkeleton);
const findItemRenderer = (root) => root.findComponent(FrequentItem);
- const setStoredItems = (items) => {
- getItemsFromLocalStorage.mockReturnValue(items);
- };
+ describe('common behavior', () => {
+ beforeEach(() => {
+ createComponent({
+ items: frecentGroupsMock,
+ });
+ });
- beforeEach(() => {
- setStoredItems(mockStoredItems);
+ it('renders the group name', () => {
+ expect(wrapper.text()).toContain(mockProps.groupName);
+ });
+
+ it('renders the view all items link', () => {
+ const lastItem = findItems().at(-1);
+ expect(lastItem.props('item')).toMatchObject({
+ text: mockProps.viewAllItemsText,
+ href: mockProps.viewAllItemsPath,
+ });
+
+ const icon = lastItem.findComponent(GlIcon);
+ expect(icon.props('name')).toBe(mockProps.viewAllItemsIcon);
+ });
});
- describe('without a storage key', () => {
+ describe('while items are being fetched', () => {
beforeEach(() => {
- createComponent({ storageKey: null });
+ createComponent({
+ loading: true,
+ });
});
- it('does not render anything', () => {
- expect(wrapper.html()).toBe('');
+ it('shows the loading state', () => {
+ expect(findSkeleton().exists()).toBe(true);
});
- it('emits a nothing-to-render event', () => {
- expect(wrapper.emitted('nothing-to-render')).toEqual([[]]);
+ it('does not show the empty state', () => {
+ expect(wrapper.text()).not.toContain(mockProps.emptyStateText);
});
});
- describe('with a storageKey', () => {
+ describe('when there are no items', () => {
beforeEach(() => {
createComponent();
});
- describe('common behavior', () => {
- it('calls getItemsFromLocalStorage', () => {
- expect(getItemsFromLocalStorage).toHaveBeenCalledWith({
- storageKey,
- maxItems: mockProps.maxItems,
- });
- });
-
- it('renders the group name', () => {
- expect(wrapper.text()).toContain(mockProps.groupName);
- });
-
- it('renders the view all items link', () => {
- const lastItem = findItems().at(-1);
- expect(lastItem.props('item')).toMatchObject({
- text: mockProps.viewAllItemsText,
- href: mockProps.viewAllItemsPath,
- });
-
- const icon = lastItem.findComponent(GlIcon);
- expect(icon.props('name')).toBe(mockProps.viewAllItemsIcon);
- });
+ it('does not show the loading state', () => {
+ expect(findSkeleton().exists()).toBe(false);
});
- describe('with stored items', () => {
- it('renders the items', () => {
- const items = findItems();
-
- mockStoredItems.forEach((storedItem, index) => {
- const dropdownItem = items.at(index);
-
- // Check GlDisclosureDropdownItem's item has the right structure
- expect(dropdownItem.props('item')).toMatchObject({
- text: storedItem.name,
- href: storedItem.webUrl,
- });
-
- // Check FrequentItem's item has the right structure
- expect(findItemRenderer(dropdownItem).props('item')).toMatchObject({
- id: storedItem.id,
- title: storedItem.name,
- subtitle: expect.any(String),
- avatar: storedItem.avatarUrl,
- });
- });
- });
+ it('shows the empty state', () => {
+ expect(wrapper.text()).toContain(mockProps.emptyStateText);
+ });
+ });
- it('does not render the empty state text', () => {
- expect(wrapper.text()).not.toContain('mock empty state text');
+ describe('when there are items', () => {
+ beforeEach(() => {
+ createComponent({
+ items: frecentGroupsMock,
});
+ });
- describe('removing an item', () => {
- let itemToRemove;
+ it('renders the items', () => {
+ const items = findItems();
- beforeEach(() => {
- const itemRenderer = findItemRenderer(findItems().at(0));
- itemToRemove = itemRenderer.props('item');
- itemRenderer.vm.$emit('remove', itemToRemove);
- });
+ frecentGroupsMock.forEach((item, index) => {
+ const dropdownItem = items.at(index);
- it('calls removeItemFromLocalStorage when an item emits a remove event', () => {
- expect(removeItemFromLocalStorage).toHaveBeenCalledWith({
- storageKey,
- item: itemToRemove,
- });
+ // Check GlDisclosureDropdownItem's item has the right structure
+ expect(dropdownItem.props('item')).toMatchObject({
+ text: item.name,
+ href: item.webUrl,
});
- it('no longer renders that item', () => {
- const renderedItemTexts = findItems().wrappers.map((item) => item.props('item').text);
- expect(renderedItemTexts).not.toContain(itemToRemove.text);
+ // Check FrequentItem's item has the right structure
+ expect(findItemRenderer(dropdownItem).props('item')).toMatchObject({
+ id: item.id,
+ title: item.name,
+ subtitle: expect.any(String),
+ avatar: item.avatarUrl,
});
});
});
- });
- describe('with no stored items', () => {
- beforeEach(() => {
- setStoredItems([]);
- createComponent();
+ it('does not show the loading state', () => {
+ expect(findSkeleton().exists()).toBe(false);
});
- it('renders the empty state text', () => {
- expect(wrapper.text()).toContain(mockProps.emptyStateText);
+ it('does not show the empty state', () => {
+ expect(wrapper.text()).not.toContain(mockProps.emptyStateText);
});
});
});
diff --git a/spec/frontend/super_sidebar/components/global_search/components/frequent_projects_spec.js b/spec/frontend/super_sidebar/components/global_search/components/frequent_projects_spec.js
index 7554c123574..b7123f295f7 100644
--- a/spec/frontend/super_sidebar/components/global_search/components/frequent_projects_spec.js
+++ b/spec/frontend/super_sidebar/components/global_search/components/frequent_projects_spec.js
@@ -1,14 +1,32 @@
import { shallowMount } from '@vue/test-utils';
+import Vue from 'vue';
+import VueApollo from 'vue-apollo';
import FrequentItems from '~/super_sidebar/components/global_search/components/frequent_items.vue';
import FrequentProjects from '~/super_sidebar/components/global_search/components/frequent_projects.vue';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import currentUserFrecentProjectsQuery from '~/super_sidebar/graphql/queries/current_user_frecent_projects.query.graphql';
+import waitForPromises from 'helpers/wait_for_promises';
+import { frecentProjectsMock } from '../../../mock_data';
+
+Vue.use(VueApollo);
describe('FrequentlyVisitedProjects', () => {
let wrapper;
const projectsPath = '/mock/project/path';
+ const currentUserFrecentProjectsQueryHandler = jest.fn().mockResolvedValue({
+ data: {
+ frecentProjects: frecentProjectsMock,
+ },
+ });
const createComponent = (options) => {
+ const mockApollo = createMockApollo([
+ [currentUserFrecentProjectsQuery, currentUserFrecentProjectsQueryHandler],
+ ]);
+
wrapper = shallowMount(FrequentProjects, {
+ apolloProvider: mockApollo,
provide: {
projectsPath,
},
@@ -28,19 +46,25 @@ describe('FrequentlyVisitedProjects', () => {
expect(findFrequentItems().props()).toMatchObject({
emptyStateText: 'Projects you visit often will appear here.',
groupName: 'Frequently visited projects',
- maxItems: 5,
- storageKey: null,
viewAllItemsIcon: 'project',
viewAllItemsText: 'View all my projects',
viewAllItemsPath: projectsPath,
});
});
- it('with a user, passes a storage key string to FrequentItems', () => {
- gon.current_username = 'test_user';
+ it('loads frecent projects', () => {
+ createComponent();
+
+ expect(currentUserFrecentProjectsQueryHandler).toHaveBeenCalled();
+ expect(findFrequentItems().props('loading')).toBe(true);
+ });
+
+ it('passes fetched projects to FrequentItems', async () => {
createComponent();
+ await waitForPromises();
- expect(findFrequentItems().props('storageKey')).toBe('test_user/frequent-projects');
+ expect(findFrequentItems().props('items')).toEqual(frecentProjectsMock);
+ expect(findFrequentItems().props('loading')).toBe(false);
});
it('passes attrs to FrequentItems', () => {
diff --git a/spec/frontend/super_sidebar/components/help_center_spec.js b/spec/frontend/super_sidebar/components/help_center_spec.js
index 39537b65fa5..8e9e3e8ba20 100644
--- a/spec/frontend/super_sidebar/components/help_center_spec.js
+++ b/spec/frontend/super_sidebar/components/help_center_spec.js
@@ -94,7 +94,6 @@ describe('HelpCenter component', () => {
it('passes custom offset to the dropdown', () => {
expect(findDropdown().props('dropdownOffset')).toEqual({
- crossAxis: -4,
mainAxis: 4,
});
});
@@ -169,14 +168,13 @@ describe('HelpCenter component', () => {
describe('showWhatsNew', () => {
beforeEach(() => {
- beforeEach(() => {
- createWrapper({ ...sidebarData, show_version_check: true });
- });
+ createWrapper({ ...sidebarData, show_version_check: true });
+
findButton("What's new 5").click();
});
it('shows the "What\'s new" slideout', () => {
- expect(toggleWhatsNewDrawer).toHaveBeenCalledWith(expect.any(Object));
+ expect(toggleWhatsNewDrawer).toHaveBeenCalledWith(sidebarData.whats_new_version_digest);
});
it('shows the existing "What\'s new" slideout instance on subsequent clicks', () => {
diff --git a/spec/frontend/super_sidebar/components/nav_item_link_spec.js b/spec/frontend/super_sidebar/components/nav_item_link_spec.js
index 5cc1bd01d0f..59fa6d022ae 100644
--- a/spec/frontend/super_sidebar/components/nav_item_link_spec.js
+++ b/spec/frontend/super_sidebar/components/nav_item_link_spec.js
@@ -29,7 +29,7 @@ describe('NavItemLink component', () => {
expect(wrapper.attributes()).toEqual({
href: '/foo',
- class: 'gl-bg-t-gray-a-08',
+ class: 'super-sidebar-nav-item-current',
'aria-current': 'page',
});
});
diff --git a/spec/frontend/super_sidebar/components/nav_item_router_link_spec.js b/spec/frontend/super_sidebar/components/nav_item_router_link_spec.js
index a7ca56325fe..dfae5e96cd8 100644
--- a/spec/frontend/super_sidebar/components/nav_item_router_link_spec.js
+++ b/spec/frontend/super_sidebar/components/nav_item_router_link_spec.js
@@ -45,7 +45,9 @@ describe('NavItemRouterLink component', () => {
routerLinkSlotProps: { isActive: true },
});
- expect(wrapper.findComponent(RouterLinkStub).props('activeClass')).toBe('gl-bg-t-gray-a-08');
+ expect(wrapper.findComponent(RouterLinkStub).props('activeClass')).toBe(
+ 'super-sidebar-nav-item-current',
+ );
expect(wrapper.attributes()).toEqual({
href: '/foo',
'aria-current': 'page',
diff --git a/spec/frontend/super_sidebar/components/scroll_scrim_spec.js b/spec/frontend/super_sidebar/components/scroll_scrim_spec.js
new file mode 100644
index 00000000000..ff1e9968f9b
--- /dev/null
+++ b/spec/frontend/super_sidebar/components/scroll_scrim_spec.js
@@ -0,0 +1,60 @@
+import { nextTick } from 'vue';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import ScrollScrim from '~/super_sidebar/components/scroll_scrim.vue';
+import { useMockIntersectionObserver } from 'helpers/mock_dom_observer';
+
+describe('ScrollScrim', () => {
+ let wrapper;
+ const { trigger: triggerIntersection } = useMockIntersectionObserver();
+
+ const createWrapper = () => {
+ wrapper = shallowMountExtended(ScrollScrim, {});
+ };
+
+ beforeEach(() => {
+ createWrapper();
+ });
+
+ const findTopBoundary = () => wrapper.vm.$refs['top-boundary'];
+ const findBottomBoundary = () => wrapper.vm.$refs['bottom-boundary'];
+
+ describe('top scrim', () => {
+ describe('when top boundary is visible', () => {
+ it('does not show', async () => {
+ triggerIntersection(findTopBoundary(), { entry: { isIntersecting: true } });
+ await nextTick();
+
+ expect(wrapper.classes()).not.toContain('top-scrim-visible');
+ });
+ });
+
+ describe('when top boundary is not visible', () => {
+ it('does show', async () => {
+ triggerIntersection(findTopBoundary(), { entry: { isIntersecting: false } });
+ await nextTick();
+
+ expect(wrapper.classes()).toContain('top-scrim-visible');
+ });
+ });
+ });
+
+ describe('bottom scrim', () => {
+ describe('when bottom boundary is visible', () => {
+ it('does not show', async () => {
+ triggerIntersection(findBottomBoundary(), { entry: { isIntersecting: true } });
+ await nextTick();
+
+ expect(wrapper.classes()).not.toContain('bottom-scrim-visible');
+ });
+ });
+
+ describe('when bottom boundary is not visible', () => {
+ it('does show', async () => {
+ triggerIntersection(findBottomBoundary(), { entry: { isIntersecting: false } });
+ await nextTick();
+
+ expect(wrapper.classes()).toContain('bottom-scrim-visible');
+ });
+ });
+ });
+});
diff --git a/spec/frontend/super_sidebar/components/super_sidebar_spec.js b/spec/frontend/super_sidebar/components/super_sidebar_spec.js
index 92736b99e14..9718cb7ad15 100644
--- a/spec/frontend/super_sidebar/components/super_sidebar_spec.js
+++ b/spec/frontend/super_sidebar/components/super_sidebar_spec.js
@@ -1,4 +1,7 @@
import { nextTick } from 'vue';
+import { GlBreakpointInstance as bp, breakpoints } from '@gitlab/ui/dist/utils';
+import sidebarEventHub from '~/super_sidebar/event_hub';
+import ExtraInfo from 'jh_else_ce/super_sidebar/components/extra_info.vue';
import { Mousetrap } from '~/lib/mousetrap';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import SuperSidebar from '~/super_sidebar/components/super_sidebar.vue';
@@ -23,6 +26,7 @@ import { mockTracking, unmockTracking } from 'helpers/tracking_helper';
import { trackContextAccess } from '~/super_sidebar/utils';
import { sidebarData as mockSidebarData, loggedOutSidebarData } from '../mock_data';
+const { lg, xl } = breakpoints;
const initialSidebarState = { ...sidebarState };
jest.mock('~/super_sidebar/super_sidebar_collapsed_state_manager');
@@ -56,6 +60,8 @@ describe('SuperSidebar component', () => {
const findTrialStatusWidget = () => wrapper.findByTestId(trialStatusWidgetStubTestId);
const findTrialStatusPopover = () => wrapper.findByTestId(trialStatusPopoverStubTestId);
const findSidebarMenu = () => wrapper.findComponent(SidebarMenu);
+ const findAdminLink = () => wrapper.findByTestId('sidebar-admin-link');
+ const findContextHeader = () => wrapper.findComponent('#super-sidebar-context-header');
let trackingSpy = null;
const createWrapper = ({
@@ -128,6 +134,11 @@ describe('SuperSidebar component', () => {
expect(findHelpCenter().props('sidebarData')).toBe(mockSidebarData);
});
+ it('renders extra info section', () => {
+ createWrapper();
+ expect(wrapper.findComponent(ExtraInfo).exists()).toBe(true);
+ });
+
it('does not render SidebarMenu when items are empty', () => {
createWrapper();
expect(findSidebarMenu().exists()).toBe(false);
@@ -207,6 +218,15 @@ describe('SuperSidebar component', () => {
expect(wrapper.text()).toContain('Your work');
});
+ it('handles event toggle-menu-header correctly', async () => {
+ createWrapper();
+
+ sidebarEventHub.$emit('toggle-menu-header', false);
+
+ await nextTick();
+ expect(findContextHeader().exists()).toBe(false);
+ });
+
describe('item access tracking', () => {
it('does not track anything if logged out', () => {
createWrapper({ sidebarData: loggedOutSidebarData });
@@ -299,8 +319,8 @@ describe('SuperSidebar component', () => {
createWrapper();
});
- it('allows overflow', () => {
- expect(findNavContainer().classes()).toContain('gl-overflow-auto');
+ it('allows overflow with scroll scrim', () => {
+ expect(findNavContainer().element.tagName).toContain('SCROLL-SCRIM');
});
});
@@ -314,4 +334,46 @@ describe('SuperSidebar component', () => {
expect(findTrialStatusPopover().exists()).toBe(true);
});
});
+
+ describe('keyboard interactivity', () => {
+ it('does not bind keydown events on screens xl and above', async () => {
+ jest.spyOn(document, 'addEventListener');
+ jest.spyOn(bp, 'windowWidth').mockReturnValue(xl);
+ createWrapper();
+
+ isCollapsed.mockReturnValue(false);
+ await nextTick();
+
+ expect(document.addEventListener).not.toHaveBeenCalled();
+ });
+
+ it('binds keydown events on screens below xl', () => {
+ jest.spyOn(document, 'addEventListener');
+ jest.spyOn(bp, 'windowWidth').mockReturnValue(lg);
+ createWrapper();
+
+ expect(document.addEventListener).toHaveBeenCalledWith('keydown', wrapper.vm.focusTrap);
+ });
+ });
+
+ describe('link to Admin area', () => {
+ describe('when user is admin', () => {
+ it('renders', () => {
+ createWrapper({
+ sidebarData: {
+ ...mockSidebarData,
+ is_admin: true,
+ },
+ });
+ expect(findAdminLink().attributes('href')).toBe(mockSidebarData.admin_url);
+ });
+ });
+
+ describe('when user is not admin', () => {
+ it('renders', () => {
+ createWrapper();
+ expect(findAdminLink().exists()).toBe(false);
+ });
+ });
+ });
});
diff --git a/spec/frontend/super_sidebar/components/user_menu_spec.js b/spec/frontend/super_sidebar/components/user_menu_spec.js
index 45a60fce00a..4af3247693b 100644
--- a/spec/frontend/super_sidebar/components/user_menu_spec.js
+++ b/spec/frontend/super_sidebar/components/user_menu_spec.js
@@ -1,8 +1,10 @@
import { GlAvatar, GlDisclosureDropdown } from '@gitlab/ui';
+import { nextTick } from 'vue';
import { mountExtended } from 'helpers/vue_test_utils_helper';
import { stubComponent } from 'helpers/stub_component';
import UserMenu from '~/super_sidebar/components/user_menu.vue';
import UserMenuProfileItem from '~/super_sidebar/components/user_menu_profile_item.vue';
+import SetStatusModal from '~/set_status_modal/set_status_modal_wrapper.vue';
import { mockTracking } from 'helpers/tracking_helper';
import PersistentUserCallout from '~/persistent_user_callout';
import { userMenuMockData, userMenuMockStatus, userMenuMockPipelineMinutes } from '../mock_data';
@@ -13,6 +15,7 @@ describe('UserMenu component', () => {
const GlEmoji = { template: '<img/>' };
const findDropdown = () => wrapper.findComponent(GlDisclosureDropdown);
+ const findSetStatusModal = () => wrapper.findComponent(SetStatusModal);
const showDropdown = () => findDropdown().vm.$emit('shown');
const closeDropdownSpy = jest.fn();
@@ -28,6 +31,7 @@ describe('UserMenu component', () => {
stubs: {
GlEmoji,
GlAvatar: true,
+ SetStatusModal: stubComponent(SetStatusModal),
...stubs,
},
provide: {
@@ -74,6 +78,20 @@ describe('UserMenu component', () => {
});
});
+ it('updates avatar url on custom avatar update event', async () => {
+ const url = `${userMenuMockData.avatar_url}-new-avatar`;
+
+ document.dispatchEvent(new CustomEvent('userAvatar:update', { detail: { url } }));
+ await nextTick();
+
+ const avatar = toggle.findComponent(GlAvatar);
+ expect(avatar.exists()).toBe(true);
+ expect(avatar.props()).toMatchObject({
+ entityName: userMenuMockData.name,
+ src: url,
+ });
+ });
+
it('renders screen reader text', () => {
expect(toggle.find('.gl-sr-only').text()).toBe(`${userMenuMockData.name} user’s menu`);
});
@@ -91,31 +109,46 @@ describe('UserMenu component', () => {
describe('User status item', () => {
let item;
- const setItem = ({ can_update, busy, customized, stubs } = {}) => {
- createWrapper({ status: { ...userMenuMockStatus, can_update, busy, customized } }, stubs);
+ const setItem = async ({
+ can_update: canUpdate = false,
+ busy = false,
+ customized = false,
+ stubs,
+ } = {}) => {
+ createWrapper(
+ { status: { ...userMenuMockStatus, can_update: canUpdate, busy, customized } },
+ stubs,
+ );
+ // Mock mounting the modal if we can update
+ if (canUpdate) {
+ expect(wrapper.vm.setStatusModalReady).toEqual(false);
+ findSetStatusModal().vm.$emit('mounted');
+ await nextTick();
+ expect(wrapper.vm.setStatusModalReady).toEqual(true);
+ }
item = wrapper.findByTestId('status-item');
};
describe('When user cannot update the status', () => {
- it('does not render the status menu item', () => {
- setItem();
+ it('does not render the status menu item', async () => {
+ await setItem();
expect(item.exists()).toBe(false);
});
});
describe('When user can update the status', () => {
- it('renders the status menu item', () => {
- setItem({ can_update: true });
+ it('renders the status menu item', async () => {
+ await setItem({ can_update: true });
expect(item.exists()).toBe(true);
+ expect(item.find('button').attributes()).toMatchObject({
+ 'data-track-property': 'nav_user_menu',
+ 'data-track-action': 'click_link',
+ 'data-track-label': 'user_edit_status',
+ });
});
- it('should set the CSS class for triggering status update modal', () => {
- setItem({ can_update: true });
- expect(item.find('.js-set-status-modal-trigger').exists()).toBe(true);
- });
-
- it('should close the dropdown when status modal opened', () => {
- setItem({
+ it('should close the dropdown when status modal opened', async () => {
+ await setItem({
can_update: true,
stubs: {
GlDisclosureDropdown: stubComponent(GlDisclosureDropdown, {
@@ -139,57 +172,75 @@ describe('UserMenu component', () => {
${true} | ${true} | ${'Edit status'}
`(
'when busy is "$busy" and customized is "$customized" the label is "$label"',
- ({ busy, customized, label }) => {
- setItem({ can_update: true, busy, customized });
+ async ({ busy, customized, label }) => {
+ await setItem({ can_update: true, busy, customized });
expect(item.text()).toBe(label);
},
);
});
+ });
+ });
+
+ describe('set status modal', () => {
+ describe('when the user cannot update the status', () => {
+ it('should not render the modal', () => {
+ createWrapper({
+ status: { ...userMenuMockStatus, can_update: false },
+ });
- describe('Status update modal wrapper', () => {
- const findModalWrapper = () => wrapper.find('.js-set-status-modal-wrapper');
+ expect(findSetStatusModal().exists()).toBe(false);
+ });
+ });
- it('renders the modal wrapper', () => {
- setItem({ can_update: true });
- expect(findModalWrapper().exists()).toBe(true);
+ describe('when the user can update the status', () => {
+ describe.each`
+ busy | customized
+ ${true} | ${true}
+ ${true} | ${false}
+ ${false} | ${true}
+ `('and the status is busy or customized', ({ busy, customized }) => {
+ it('should pass the current status to the modal', () => {
+ createWrapper({
+ status: { ...userMenuMockStatus, can_update: true, busy, customized },
+ });
+
+ expect(findSetStatusModal().exists()).toBe(true);
+ expect(findSetStatusModal().props()).toMatchObject({
+ defaultEmoji: 'speech_balloon',
+ currentEmoji: userMenuMockStatus.emoji,
+ currentMessage: userMenuMockStatus.message,
+ currentAvailability: userMenuMockStatus.availability,
+ currentClearStatusAfter: userMenuMockStatus.clear_after,
+ });
});
- describe('when user cannot update status', () => {
- it('sets default data attributes', () => {
- setItem({ can_update: true });
- expect(findModalWrapper().attributes()).toMatchObject({
- 'data-current-emoji': '',
- 'data-current-message': '',
- 'data-default-emoji': 'speech_balloon',
- });
+ it('casts falsey values to empty strings', () => {
+ createWrapper({
+ status: { can_update: true, busy, customized },
+ });
+
+ expect(findSetStatusModal().exists()).toBe(true);
+ expect(findSetStatusModal().props()).toMatchObject({
+ defaultEmoji: 'speech_balloon',
+ currentEmoji: '',
+ currentMessage: '',
+ currentAvailability: '',
+ currentClearStatusAfter: '',
});
});
+ });
+
+ describe('and the status is neither busy nor customized', () => {
+ it('should pass an empty status to the modal', () => {
+ createWrapper({
+ status: { ...userMenuMockStatus, can_update: true, busy: false, customized: false },
+ });
- describe.each`
- busy | customized
- ${true} | ${true}
- ${true} | ${false}
- ${false} | ${true}
- ${false} | ${false}
- `(`when user can update status`, ({ busy, customized }) => {
- it(`and ${busy ? 'is busy' : 'is not busy'} and status ${
- customized ? 'is' : 'is not'
- } customized sets user status data attributes`, () => {
- setItem({ can_update: true, busy, customized });
- if (busy || customized) {
- expect(findModalWrapper().attributes()).toMatchObject({
- 'data-current-emoji': userMenuMockStatus.emoji,
- 'data-current-message': userMenuMockStatus.message,
- 'data-current-availability': userMenuMockStatus.availability,
- 'data-current-clear-status-after': userMenuMockStatus.clear_after,
- });
- } else {
- expect(findModalWrapper().attributes()).toMatchObject({
- 'data-current-emoji': '',
- 'data-current-message': '',
- 'data-default-emoji': 'speech_balloon',
- });
- }
+ expect(findSetStatusModal().exists()).toBe(true);
+ expect(findSetStatusModal().props()).toMatchObject({
+ defaultEmoji: 'speech_balloon',
+ currentEmoji: '',
+ currentMessage: '',
});
});
});
diff --git a/spec/frontend/super_sidebar/mock_data.js b/spec/frontend/super_sidebar/mock_data.js
index d2d2faedbf8..fc264ad5e0a 100644
--- a/spec/frontend/super_sidebar/mock_data.js
+++ b/spec/frontend/super_sidebar/mock_data.js
@@ -79,6 +79,8 @@ export const contextSwitcherLinks = [
export const sidebarData = {
is_logged_in: true,
+ is_admin: false,
+ admin_url: '/admin',
current_menu_items: [],
current_context: {},
current_context_header: 'Your work',
@@ -188,6 +190,26 @@ export const userMenuMockData = {
canary_toggle_com_url: 'https://next.gitlab.com',
};
+export const frecentGroupsMock = [
+ {
+ id: 'gid://gitlab/Group/1',
+ name: 'Frecent group 1',
+ namespace: 'Frecent Namespace 1',
+ webUrl: '/frecent-namespace-1/frecent-group-1',
+ avatarUrl: '/uploads/-/avatar1.png',
+ },
+];
+
+export const frecentProjectsMock = [
+ {
+ id: 'gid://gitlab/Project/1',
+ name: 'Frecent project 1',
+ namespace: 'Frecent Namespace 1 / Frecent project 1',
+ webUrl: '/frecent-namespace-1/frecent-project-1',
+ avatarUrl: '/uploads/-/avatar1.png',
+ },
+];
+
export const cachedFrequentProjects = JSON.stringify([
{
id: 1,
@@ -283,3 +305,32 @@ export const cachedFrequentGroups = JSON.stringify([
frequency: 3,
},
]);
+
+export const unsortedFrequentItems = [
+ { id: 1, frequency: 12, lastAccessedOn: 1491400843391 },
+ { id: 2, frequency: 14, lastAccessedOn: 1488240890738 },
+ { id: 3, frequency: 44, lastAccessedOn: 1497675908472 },
+ { id: 4, frequency: 8, lastAccessedOn: 1497979281815 },
+ { id: 5, frequency: 34, lastAccessedOn: 1488089211943 },
+ { id: 6, frequency: 14, lastAccessedOn: 1493517292488 },
+ { id: 7, frequency: 42, lastAccessedOn: 1486815299875 },
+ { id: 8, frequency: 33, lastAccessedOn: 1500762279114 },
+ { id: 10, frequency: 46, lastAccessedOn: 1483251641543 },
+];
+
+/**
+ * This const has a specific order which tests authenticity
+ * of `getTopFrequentItems` method so
+ * DO NOT change order of items in this const.
+ */
+export const sortedFrequentItems = [
+ { id: 10, frequency: 46, lastAccessedOn: 1483251641543 },
+ { id: 3, frequency: 44, lastAccessedOn: 1497675908472 },
+ { id: 7, frequency: 42, lastAccessedOn: 1486815299875 },
+ { id: 5, frequency: 34, lastAccessedOn: 1488089211943 },
+ { id: 8, frequency: 33, lastAccessedOn: 1500762279114 },
+ { id: 6, frequency: 14, lastAccessedOn: 1493517292488 },
+ { id: 2, frequency: 14, lastAccessedOn: 1488240890738 },
+ { id: 1, frequency: 12, lastAccessedOn: 1491400843391 },
+ { id: 4, frequency: 8, lastAccessedOn: 1497979281815 },
+];
diff --git a/spec/frontend/super_sidebar/user_counts_manager_spec.js b/spec/frontend/super_sidebar/user_counts_manager_spec.js
index b5074620195..3b2ee5b0991 100644
--- a/spec/frontend/super_sidebar/user_counts_manager_spec.js
+++ b/spec/frontend/super_sidebar/user_counts_manager_spec.js
@@ -6,6 +6,7 @@ import {
userCounts,
destroyUserCountsManager,
} from '~/super_sidebar/user_counts_manager';
+import { fetchUserCounts } from '~/super_sidebar/user_counts_fetch';
jest.mock('~/api');
@@ -118,15 +119,30 @@ describe('User Merge Requests', () => {
createUserCountsManager();
});
- it('fetches counts from API, stores and rebroadcasts them', async () => {
- expect(userCounts).toMatchObject(userCountDefaults);
+ describe('manually created event', () => {
+ it('fetches counts from API, stores and rebroadcasts them', async () => {
+ expect(userCounts).toMatchObject(userCountDefaults);
+
+ document.dispatchEvent(new CustomEvent('userCounts:fetch'));
+ await waitForPromises();
+
+ expect(UserApi.getUserCounts).toHaveBeenCalled();
+ expect(userCounts).toMatchObject(userCountUpdate);
+ expect(channelMock.postMessage).toHaveBeenLastCalledWith(userCounts);
+ });
+ });
+
+ describe('fetchUserCounts helper', () => {
+ it('fetches counts from API, stores and rebroadcasts them', async () => {
+ expect(userCounts).toMatchObject(userCountDefaults);
- document.dispatchEvent(new CustomEvent('userCounts:fetch'));
- await waitForPromises();
+ fetchUserCounts();
+ await waitForPromises();
- expect(UserApi.getUserCounts).toHaveBeenCalled();
- expect(userCounts).toMatchObject(userCountUpdate);
- expect(channelMock.postMessage).toHaveBeenLastCalledWith(userCounts);
+ expect(UserApi.getUserCounts).toHaveBeenCalled();
+ expect(userCounts).toMatchObject(userCountUpdate);
+ expect(channelMock.postMessage).toHaveBeenLastCalledWith(userCounts);
+ });
});
});
diff --git a/spec/frontend/super_sidebar/utils_spec.js b/spec/frontend/super_sidebar/utils_spec.js
index 43eb82f5928..a9e4345f9cc 100644
--- a/spec/frontend/super_sidebar/utils_spec.js
+++ b/spec/frontend/super_sidebar/utils_spec.js
@@ -1,20 +1,13 @@
import MockAdapter from 'axios-mock-adapter';
import * as Sentry from '~/sentry/sentry_browser_wrapper';
-import {
- getTopFrequentItems,
- trackContextAccess,
- getItemsFromLocalStorage,
- removeItemFromLocalStorage,
- ariaCurrent,
-} from '~/super_sidebar/utils';
+import { getTopFrequentItems, trackContextAccess, ariaCurrent } from '~/super_sidebar/utils';
import axios from '~/lib/utils/axios_utils';
import { useLocalStorageSpy } from 'helpers/local_storage_helper';
import AccessorUtilities from '~/lib/utils/accessor';
-import { FREQUENT_ITEMS, FIFTEEN_MINUTES_IN_MS } from '~/frequent_items/constants';
+import { FREQUENT_ITEMS, FIFTEEN_MINUTES_IN_MS } from '~/super_sidebar/constants';
import { HTTP_STATUS_OK, HTTP_STATUS_INTERNAL_SERVER_ERROR } from '~/lib/utils/http_status';
import waitForPromises from 'helpers/wait_for_promises';
-import { unsortedFrequentItems, sortedFrequentItems } from '../frequent_items/mock_data';
-import { cachedFrequentProjects } from './mock_data';
+import { unsortedFrequentItems, sortedFrequentItems } from './mock_data';
jest.mock('~/sentry/sentry_browser_wrapper');
@@ -24,7 +17,7 @@ describe('Super sidebar utils spec', () => {
describe('getTopFrequentItems', () => {
const maxItems = 3;
- it.each([undefined, null])('returns empty array if `items` is %s', (items) => {
+ it.each([undefined, null, []])('returns empty array if `items` is %s', (items) => {
const result = getTopFrequentItems(items);
expect(result.length).toBe(0);
@@ -224,125 +217,6 @@ describe('Super sidebar utils spec', () => {
});
});
- describe('getItemsFromLocalStorage', () => {
- const storageKey = 'mockStorageKey';
- const maxItems = 5;
- const storedItems = JSON.parse(cachedFrequentProjects);
-
- beforeEach(() => {
- window.localStorage.setItem(storageKey, cachedFrequentProjects);
- });
-
- describe('when localStorage cannot be accessed', () => {
- beforeEach(() => {
- jest.spyOn(AccessorUtilities, 'canUseLocalStorage').mockReturnValue(false);
- });
-
- it('returns an empty array', () => {
- const items = getItemsFromLocalStorage({ storageKey, maxItems });
- expect(items).toEqual([]);
- });
- });
-
- describe('when localStorage contains parseable data', () => {
- it('returns an array of items limited by max items', () => {
- const items = getItemsFromLocalStorage({ storageKey, maxItems });
- expect(items.length).toEqual(maxItems);
-
- items.forEach((item) => {
- expect(storedItems).toContainEqual(item);
- });
- });
-
- it('returns all items if max items is large', () => {
- const items = getItemsFromLocalStorage({ storageKey, maxItems: 1 });
- expect(items.length).toEqual(1);
-
- expect(storedItems).toContainEqual(items[0]);
- });
- });
-
- describe('when localStorage contains unparseable data', () => {
- let items;
-
- beforeEach(() => {
- window.localStorage.setItem(storageKey, 'unparseable');
- items = getItemsFromLocalStorage({ storageKey, maxItems });
- });
-
- it('logs an error to Sentry', () => {
- expect(Sentry.captureException).toHaveBeenCalled();
- });
-
- it('returns an empty array', () => {
- expect(items).toEqual([]);
- });
- });
- });
-
- describe('removeItemFromLocalStorage', () => {
- const storageKey = 'mockStorageKey';
- const originalStoredItems = JSON.parse(cachedFrequentProjects);
-
- beforeEach(() => {
- window.localStorage.setItem(storageKey, cachedFrequentProjects);
- });
-
- describe('when given an item to delete', () => {
- let items;
- let modifiedStoredItems;
-
- beforeEach(() => {
- items = removeItemFromLocalStorage({ storageKey, item: { id: 3 } });
- modifiedStoredItems = JSON.parse(window.localStorage.getItem(storageKey));
- });
-
- it('removes the item from localStorage', () => {
- expect(modifiedStoredItems.length).toBe(originalStoredItems.length - 1);
- expect(modifiedStoredItems).not.toContainEqual(originalStoredItems[2]);
- });
-
- it('returns the resulting stored structure', () => {
- expect(items).toEqual(modifiedStoredItems);
- });
- });
-
- describe('when given an unknown item to delete', () => {
- let items;
- let modifiedStoredItems;
-
- beforeEach(() => {
- items = removeItemFromLocalStorage({ storageKey, item: { id: 'does-not-exist' } });
- modifiedStoredItems = JSON.parse(window.localStorage.getItem(storageKey));
- });
-
- it('does not change the stored value', () => {
- expect(modifiedStoredItems).toEqual(originalStoredItems);
- });
-
- it('returns the stored structure', () => {
- expect(items).toEqual(originalStoredItems);
- });
- });
-
- describe('when localStorage has unparseable data', () => {
- let items;
-
- beforeEach(() => {
- window.localStorage.setItem(storageKey, 'unparseable');
- items = removeItemFromLocalStorage({ storageKey, item: { id: 3 } });
- });
-
- it('logs an error to Sentry', () => {
- expect(Sentry.captureException).toHaveBeenCalled();
- });
-
- it('returns an empty array', () => {
- expect(items).toEqual([]);
- });
- });
- });
-
describe('ariaCurrent', () => {
it.each`
isActive | expected
diff --git a/spec/frontend/task_list_spec.js b/spec/frontend/task_list_spec.js
index e79c516a694..605ae028049 100644
--- a/spec/frontend/task_list_spec.js
+++ b/spec/frontend/task_list_spec.js
@@ -126,14 +126,19 @@ describe('TaskList', () => {
});
describe('update', () => {
- it('should disable task list items and make a patch request then enable them again', () => {
- const response = { data: { lock_version: 3 } };
+ const setupTaskListAndMocks = (options) => {
+ taskList = new TaskList(options);
+
jest.spyOn(taskList, 'enableTaskListItems').mockImplementation(() => {});
jest.spyOn(taskList, 'disableTaskListItems').mockImplementation(() => {});
jest.spyOn(taskList, 'onUpdate').mockImplementation(() => {});
jest.spyOn(taskList, 'onSuccess').mockImplementation(() => {});
- jest.spyOn(axios, 'patch').mockReturnValue(Promise.resolve(response));
+ jest.spyOn(axios, 'patch').mockResolvedValue({ data: { lock_version: 3 } });
+
+ return taskList;
+ };
+ const performTest = (options) => {
const value = 'hello world';
const endpoint = '/foo';
const target = $(`<input data-update-url="${endpoint}" value="${value}" />`);
@@ -144,10 +149,11 @@ describe('TaskList', () => {
lineSource: '- [ ] check item',
};
const event = { target, detail };
+ const dataType = options.dataType === 'incident' ? 'issue' : options.dataType;
const patchData = {
- [taskListOptions.dataType]: {
- [taskListOptions.fieldName]: value,
- lock_version: taskListOptions.lockVersion,
+ [dataType]: {
+ [options.fieldName]: value,
+ lock_version: options.lockVersion,
update_task: {
index: detail.index,
checked: detail.checked,
@@ -165,8 +171,42 @@ describe('TaskList', () => {
expect(taskList.disableTaskListItems).toHaveBeenCalledWith(event);
expect(axios.patch).toHaveBeenCalledWith(endpoint, patchData);
expect(taskList.enableTaskListItems).toHaveBeenCalledWith(event);
- expect(taskList.onSuccess).toHaveBeenCalledWith(response.data);
- expect(taskList.lockVersion).toEqual(response.data.lock_version);
+ expect(taskList.onSuccess).toHaveBeenCalledWith({ lock_version: 3 });
+ expect(taskList.lockVersion).toEqual(3);
+ });
+ };
+
+ it('should disable task list items and make a patch request then enable them again', () => {
+ taskList = setupTaskListAndMocks(taskListOptions);
+
+ return performTest(taskListOptions);
+ });
+
+ describe('for merge requests', () => {
+ it('should wrap the patch request payload in merge_request', () => {
+ const options = {
+ selector: '.task-list',
+ dataType: 'merge_request',
+ fieldName: 'description',
+ lockVersion: 2,
+ };
+ taskList = setupTaskListAndMocks(options);
+
+ return performTest(options);
+ });
+ });
+
+ describe('for incidents', () => {
+ it('should wrap the patch request payload in issue', () => {
+ const options = {
+ selector: '.task-list',
+ dataType: 'incident',
+ fieldName: 'description',
+ lockVersion: 2,
+ };
+ taskList = setupTaskListAndMocks(options);
+
+ return performTest(options);
});
});
});
diff --git a/spec/frontend/tracking/internal_events_spec.js b/spec/frontend/tracking/internal_events_spec.js
index 44a048a4b5f..295b08f4b1c 100644
--- a/spec/frontend/tracking/internal_events_spec.js
+++ b/spec/frontend/tracking/internal_events_spec.js
@@ -1,15 +1,9 @@
import API from '~/api';
-import { mockTracking } from 'helpers/tracking_helper';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import InternalEvents from '~/tracking/internal_events';
-import {
- GITLAB_INTERNAL_EVENT_CATEGORY,
- SERVICE_PING_SCHEMA,
- LOAD_INTERNAL_EVENTS_SELECTOR,
-} from '~/tracking/constants';
+import { LOAD_INTERNAL_EVENTS_SELECTOR } from '~/tracking/constants';
import * as utils from '~/tracking/utils';
import { Tracker } from '~/tracking/tracker';
-import { extraContext } from './mock_data';
jest.mock('~/api', () => ({
trackInternalEvent: jest.fn(),
@@ -41,26 +35,6 @@ describe('InternalEvents', () => {
expect(InternalEvents.trackBrowserSDK).toHaveBeenCalledTimes(1);
expect(InternalEvents.trackBrowserSDK).toHaveBeenCalledWith(event);
});
-
- it('trackEvent calls tracking.event functions with correct arguments', () => {
- const trackingSpy = mockTracking(GITLAB_INTERNAL_EVENT_CATEGORY, undefined, jest.spyOn);
-
- InternalEvents.trackEvent(event, { context: extraContext });
-
- expect(trackingSpy).toHaveBeenCalledTimes(1);
- expect(trackingSpy).toHaveBeenCalledWith(GITLAB_INTERNAL_EVENT_CATEGORY, event, {
- context: [
- {
- schema: SERVICE_PING_SCHEMA,
- data: {
- event_name: event,
- data_source: 'redis_hll',
- },
- },
- extraContext,
- ],
- });
- });
});
describe('mixin', () => {
@@ -68,17 +42,13 @@ describe('InternalEvents', () => {
const Component = {
template: `
<div>
- <button data-testid="button1" @click="handleButton1Click">Button 1</button>
- <button data-testid="button2" @click="handleButton2Click">Button 2</button>
+ <button data-testid="button" @click="handleButton1Click">Button</button>
</div>
`,
methods: {
handleButton1Click() {
this.trackEvent(event);
},
- handleButton2Click() {
- this.trackEvent(event, extraContext);
- },
},
mixins: [InternalEvents.mixin()],
};
@@ -90,20 +60,10 @@ describe('InternalEvents', () => {
it('this.trackEvent function calls InternalEvent`s track function with an event', async () => {
const trackEventSpy = jest.spyOn(InternalEvents, 'trackEvent');
- await wrapper.findByTestId('button1').trigger('click');
-
- expect(trackEventSpy).toHaveBeenCalledTimes(1);
- expect(trackEventSpy).toHaveBeenCalledWith(event, {});
- });
-
- it("this.trackEvent function calls InternalEvent's track function with an event and data", async () => {
- const data = extraContext;
- const trackEventSpy = jest.spyOn(InternalEvents, 'trackEvent');
-
- await wrapper.findByTestId('button2').trigger('click');
+ await wrapper.findByTestId('button').trigger('click');
expect(trackEventSpy).toHaveBeenCalledTimes(1);
- expect(trackEventSpy).toHaveBeenCalledWith(event, data);
+ expect(trackEventSpy).toHaveBeenCalledWith(event);
});
});
diff --git a/spec/frontend/usage_quotas/storage/components/project_storage_app_spec.js b/spec/frontend/usage_quotas/storage/components/project_storage_app_spec.js
index 0ae01083a09..babefe1dd19 100644
--- a/spec/frontend/usage_quotas/storage/components/project_storage_app_spec.js
+++ b/spec/frontend/usage_quotas/storage/components/project_storage_app_spec.js
@@ -16,7 +16,8 @@ import {
NAMESPACE_STORAGE_TYPES,
TOTAL_USAGE_DEFAULT_TEXT,
} from '~/usage_quotas/storage/constants';
-import getProjectStorageStatistics from '~/usage_quotas/storage/queries/project_storage.query.graphql';
+import getCostFactoredProjectStorageStatistics from 'ee_else_ce/usage_quotas/storage/queries/cost_factored_project_storage.query.graphql';
+import getProjectStorageStatistics from 'ee_else_ce/usage_quotas/storage/queries/project_storage.query.graphql';
import { numberToHumanSize } from '~/lib/utils/number_utils';
import {
mockGetProjectStorageStatisticsGraphQLResponse,
@@ -38,7 +39,10 @@ describe('ProjectStorageApp', () => {
response = jest.fn().mockResolvedValue(mockedValue);
}
- const requestHandlers = [[getProjectStorageStatistics, response]];
+ const requestHandlers = [
+ [getProjectStorageStatistics, response],
+ [getCostFactoredProjectStorageStatistics, response],
+ ];
return createMockApollo(requestHandlers);
};
@@ -187,4 +191,30 @@ describe('ProjectStorageApp', () => {
]);
});
});
+
+ describe('when displayCostFactoredStorageSizeOnProjectPages feature flag is enabled', () => {
+ let mockApollo;
+ beforeEach(async () => {
+ mockApollo = createMockApolloProvider({
+ mockedValue: mockGetProjectStorageStatisticsGraphQLResponse,
+ });
+ createComponent({
+ mockApollo,
+ provide: {
+ glFeatures: {
+ displayCostFactoredStorageSizeOnProjectPages: true,
+ },
+ },
+ });
+ await waitForPromises();
+ });
+
+ it('renders correct total usage', () => {
+ const expectedValue = numberToHumanSize(
+ mockGetProjectStorageStatisticsGraphQLResponse.data.project.statistics.storageSize,
+ 1,
+ );
+ expect(findUsagePercentage().text()).toBe(expectedValue);
+ });
+ });
});
diff --git a/spec/frontend/user_lists/components/user_lists_table_spec.js b/spec/frontend/user_lists/components/user_lists_table_spec.js
index 96e9705f02b..26b33bcd46d 100644
--- a/spec/frontend/user_lists/components/user_lists_table_spec.js
+++ b/spec/frontend/user_lists/components/user_lists_table_spec.js
@@ -5,6 +5,7 @@ import { nextTick } from 'vue';
import { timeagoLanguageCode } from '~/lib/utils/datetime/timeago_utility';
import UserListsTable from '~/user_lists/components/user_lists_table.vue';
import { userList } from 'jest/feature_flags/mock_data';
+import { localeDateFormat } from '~/lib/utils/datetime/locale_dateformat';
jest.mock('timeago.js', () => ({
format: jest.fn().mockReturnValue('2 weeks ago'),
@@ -33,7 +34,7 @@ describe('User Lists Table', () => {
it('should set the title for a tooltip on the created stamp', () => {
expect(wrapper.find('[data-testid="ffUserListTimestamp"]').attributes('title')).toBe(
- 'Feb 4, 2020 8:13am UTC',
+ localeDateFormat.asDateTimeFull.format(userList.created_at),
);
});
diff --git a/spec/frontend/vue_merge_request_widget/components/approvals/approvals_spec.js b/spec/frontend/vue_merge_request_widget/components/approvals/approvals_spec.js
index c81f4328d2a..c3ed131d6e3 100644
--- a/spec/frontend/vue_merge_request_widget/components/approvals/approvals_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/approvals/approvals_spec.js
@@ -1,11 +1,11 @@
import Vue, { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
import { GlButton, GlSprintf } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
import { createMockSubscription as createMockApolloSubscription } from 'mock-apollo-client';
import approvedByCurrentUser from 'test_fixtures/graphql/merge_requests/approvals/approvals.query.graphql.json';
-import { visitUrl } from '~/lib/utils/url_utility';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import createMockApollo from 'helpers/mock_apollo_helper';
+import { stubComponent } from 'helpers/stub_component';
import waitForPromises from 'helpers/wait_for_promises';
import { getIdFromGraphQLId } from '~/graphql_shared/utils';
import { createAlert } from '~/alert';
@@ -29,11 +29,6 @@ jest.mock('~/alert', () => ({
dismiss: mockAlertDismiss,
})),
}));
-jest.mock('~/lib/utils/url_utility', () => ({
- ...jest.requireActual('~/lib/utils/url_utility'),
- visitUrl: jest.fn(),
-}));
-
const TEST_HELP_PATH = 'help/path';
const testApprovedBy = () => [1, 7, 10].map((id) => ({ id }));
const testApprovals = () => ({
@@ -53,6 +48,9 @@ describe('MRWidget approvals', () => {
let wrapper;
let service;
let mr;
+ const submitSpy = jest.fn().mockImplementation((e) => {
+ e.preventDefault();
+ });
const createComponent = (options = {}, responses = { query: approvedByCurrentUser }) => {
mockedSubscription = createMockApolloSubscription();
@@ -68,7 +66,7 @@ describe('MRWidget approvals', () => {
apolloProvider.defaultClient.setRequestHandler(query, stream);
});
- wrapper = shallowMount(Approvals, {
+ wrapper = shallowMountExtended(Approvals, {
apolloProvider,
propsData: {
mr,
@@ -78,7 +76,18 @@ describe('MRWidget approvals', () => {
provide,
stubs: {
GlSprintf,
+ GlForm: {
+ data() {
+ return { submitSpy };
+ },
+ // Workaround jsdom not implementing form submit
+ template: '<form @submit="submitSpy"><slot></slot></form>',
+ },
+ GlButton: stubComponent(GlButton, {
+ template: '<button><slot></slot></button>',
+ }),
},
+ attachTo: document.body,
});
};
@@ -257,11 +266,11 @@ describe('MRWidget approvals', () => {
});
describe('when SAML auth is required and user clicks Approve with SAML', () => {
- const fakeGroupSamlPath = '/example_group_saml';
+ const fakeSamlPath = '/example_group_saml';
beforeEach(async () => {
mr.requireSamlAuthToApprove = true;
- mr.samlApprovalPath = fakeGroupSamlPath;
+ mr.samlApprovalPath = fakeSamlPath;
createComponent({}, { query: createCanApproveResponse() });
await waitForPromises();
@@ -269,9 +278,10 @@ describe('MRWidget approvals', () => {
it('redirects the user to the group SAML path', async () => {
const action = findAction();
- action.vm.$emit('click');
- await nextTick();
- expect(visitUrl).toHaveBeenCalledWith(fakeGroupSamlPath);
+
+ await action.trigger('click');
+
+ expect(submitSpy).toHaveBeenCalled();
});
});
diff --git a/spec/frontend/vue_merge_request_widget/components/checks/draft_spec.js b/spec/frontend/vue_merge_request_widget/components/checks/draft_spec.js
new file mode 100644
index 00000000000..cc605c8c83d
--- /dev/null
+++ b/spec/frontend/vue_merge_request_widget/components/checks/draft_spec.js
@@ -0,0 +1,196 @@
+import Vue from 'vue';
+import VueApollo from 'vue-apollo';
+
+import getStateQueryResponse from 'test_fixtures/graphql/merge_requests/get_state.query.graphql.json';
+
+import { createAlert } from '~/alert';
+
+import { mountExtended } from 'helpers/vue_test_utils_helper';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+
+import MergeRequest from '~/merge_request';
+
+import DraftCheck from '~/vue_merge_request_widget/components/checks/draft.vue';
+import {
+ DRAFT_CHECK_READY,
+ DRAFT_CHECK_ERROR,
+} from '~/vue_merge_request_widget/components/checks/i18n';
+import { FAILURE_REASONS } from '~/vue_merge_request_widget/components/checks/message.vue';
+
+import draftQuery from '~/vue_merge_request_widget/queries/states/draft.query.graphql';
+import getStateQuery from '~/vue_merge_request_widget/queries/get_state.query.graphql';
+import removeDraftMutation from '~/vue_merge_request_widget/queries/toggle_draft.mutation.graphql';
+
+Vue.use(VueApollo);
+
+const TEST_PROJECT_ID = getStateQueryResponse.data.project.id;
+const TEST_MR_ID = getStateQueryResponse.data.project.mergeRequest.id;
+const TEST_MR_IID = '23';
+const TEST_MR_TITLE = 'Test MR Title';
+const TEST_PROJECT_PATH = 'lorem/ipsum';
+
+jest.mock('~/alert');
+jest.mock('~/merge_request', () => ({ toggleDraftStatus: jest.fn() }));
+
+describe('~/vue_merge_request_widget/components/checks/draft.vue', () => {
+ let wrapper;
+ let apolloProvider;
+
+ let draftQuerySpy;
+ let removeDraftMutationSpy;
+
+ const findMarkReadyButton = () => wrapper.findByTestId('mark-as-ready-button');
+
+ const createDraftQueryResponse = (canUpdateMergeRequest) => ({
+ data: {
+ project: {
+ __typename: 'Project',
+ id: TEST_PROJECT_ID,
+ mergeRequest: {
+ __typename: 'MergeRequest',
+ id: TEST_MR_ID,
+ draft: true,
+ title: TEST_MR_TITLE,
+ mergeableDiscussionsState: false,
+ userPermissions: {
+ updateMergeRequest: canUpdateMergeRequest,
+ },
+ },
+ },
+ },
+ });
+ const createRemoveDraftMutationResponse = () => ({
+ data: {
+ mergeRequestSetDraft: {
+ __typename: 'MergeRequestSetWipPayload',
+ errors: [],
+ mergeRequest: {
+ __typename: 'MergeRequest',
+ id: TEST_MR_ID,
+ title: TEST_MR_TITLE,
+ draft: false,
+ mergeableDiscussionsState: true,
+ },
+ },
+ },
+ });
+
+ const createComponent = async () => {
+ wrapper = mountExtended(DraftCheck, {
+ apolloProvider,
+ propsData: {
+ mr: {
+ issuableId: TEST_MR_ID,
+ title: TEST_MR_TITLE,
+ iid: TEST_MR_IID,
+ targetProjectFullPath: TEST_PROJECT_PATH,
+ },
+ check: {
+ identifier: 'draft_status',
+ status: 'FAILED',
+ },
+ },
+ });
+
+ await waitForPromises();
+
+ // why: draft.vue has some coupling that this query has been read before
+ // for some reason this has to happen **after** the component has mounted
+ // or apollo throws errors.
+ apolloProvider.defaultClient.cache.writeQuery({
+ query: getStateQuery,
+ variables: {
+ projectPath: TEST_PROJECT_PATH,
+ iid: TEST_MR_IID,
+ },
+ data: getStateQueryResponse.data,
+ });
+ };
+
+ beforeEach(() => {
+ draftQuerySpy = jest.fn().mockResolvedValue(createDraftQueryResponse(true));
+ removeDraftMutationSpy = jest.fn().mockResolvedValue(createRemoveDraftMutationResponse());
+
+ apolloProvider = createMockApollo([
+ [draftQuery, draftQuerySpy],
+ [removeDraftMutation, removeDraftMutationSpy],
+ ]);
+ });
+
+ describe('when user can update MR', () => {
+ beforeEach(async () => {
+ await createComponent();
+ });
+
+ it('renders text', () => {
+ const message = wrapper.text();
+ expect(message).toContain(FAILURE_REASONS.draft_status);
+ });
+
+ it('renders mark ready button', () => {
+ expect(findMarkReadyButton().text()).toBe(DRAFT_CHECK_READY);
+ });
+
+ it('does not call remove draft mutation', () => {
+ expect(removeDraftMutationSpy).not.toHaveBeenCalled();
+ });
+
+ describe('when mark ready button is clicked', () => {
+ beforeEach(async () => {
+ findMarkReadyButton().vm.$emit('click');
+
+ await waitForPromises();
+ });
+
+ it('calls mutation spy', () => {
+ expect(removeDraftMutationSpy).toHaveBeenCalledWith({
+ draft: false,
+ iid: TEST_MR_IID,
+ projectPath: TEST_PROJECT_PATH,
+ });
+ });
+
+ it('does not create alert', () => {
+ expect(createAlert).not.toHaveBeenCalled();
+ });
+
+ it('calls toggleDraftStatus', () => {
+ expect(MergeRequest.toggleDraftStatus).toHaveBeenCalledWith(TEST_MR_TITLE, true);
+ });
+ });
+
+ describe('when mutation fails and ready button is clicked', () => {
+ beforeEach(async () => {
+ removeDraftMutationSpy.mockRejectedValue(new Error('TEST FAIL'));
+ findMarkReadyButton().vm.$emit('click');
+
+ await waitForPromises();
+ });
+
+ it('creates alert', () => {
+ expect(createAlert).toHaveBeenCalledWith({
+ message: DRAFT_CHECK_ERROR,
+ });
+ });
+
+ it('does not call toggleDraftStatus', () => {
+ expect(MergeRequest.toggleDraftStatus).not.toHaveBeenCalled();
+ });
+ });
+ });
+
+ describe('when user cannot update MR', () => {
+ beforeEach(async () => {
+ draftQuerySpy.mockResolvedValue(createDraftQueryResponse(false));
+
+ createComponent();
+
+ await waitForPromises();
+ });
+
+ it('does not render mark ready button', () => {
+ expect(findMarkReadyButton().exists()).toBe(false);
+ });
+ });
+});
diff --git a/spec/frontend/vue_merge_request_widget/components/checks/rebase_spec.js b/spec/frontend/vue_merge_request_widget/components/checks/rebase_spec.js
index d6c01aee3b1..d621999337d 100644
--- a/spec/frontend/vue_merge_request_widget/components/checks/rebase_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/checks/rebase_spec.js
@@ -28,7 +28,7 @@ const mockPipelineNodes = [
const mockQueryHandler = ({
rebaseInProgress = false,
targetBranch = '',
- pushToSourceBranch = false,
+ pushToSourceBranch = true,
nodes = mockPipelineNodes,
} = {}) =>
jest.fn().mockResolvedValue({
@@ -279,7 +279,7 @@ describe('Merge request merge checks rebase component', () => {
await waitForPromises();
- expect(findRebaseWithoutCiButton().exists()).toBe(true);
+ expect(findRebaseWithoutCiButton().exists()).toBe(false);
});
});
diff --git a/spec/frontend/vue_merge_request_widget/components/merge_checks_spec.js b/spec/frontend/vue_merge_request_widget/components/merge_checks_spec.js
index d39098b27c2..b19095cc686 100644
--- a/spec/frontend/vue_merge_request_widget/components/merge_checks_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/merge_checks_spec.js
@@ -138,7 +138,7 @@ describe('Merge request merge checks component', () => {
it.each`
identifier
${'conflict'}
- ${'unresolved_discussions'}
+ ${'discussions_not_resolved'}
${'need_rebase'}
${'default'}
`('renders $identifier merge check', async ({ identifier }) => {
diff --git a/spec/frontend/vue_merge_request_widget/components/mr_widget_expandable_section_spec.js b/spec/frontend/vue_merge_request_widget/components/mr_widget_expandable_section_spec.js
index 8eaed998eb5..5a5d29d3194 100644
--- a/spec/frontend/vue_merge_request_widget/components/mr_widget_expandable_section_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/mr_widget_expandable_section_spec.js
@@ -39,7 +39,7 @@ describe('MrWidgetExpanableSection', () => {
const collapse = findCollapse();
expect(collapse.exists()).toBe(true);
- expect(collapse.attributes('visible')).toBeUndefined();
+ expect(collapse.props('visible')).toBe(false);
});
});
@@ -60,7 +60,7 @@ describe('MrWidgetExpanableSection', () => {
const collapse = findCollapse();
expect(collapse.exists()).toBe(true);
- expect(collapse.attributes('visible')).toBe('true');
+ expect(collapse.props('visible')).toBe(true);
});
});
});
diff --git a/spec/frontend/vue_merge_request_widget/components/mr_widget_pipeline_spec.js b/spec/frontend/vue_merge_request_widget/components/mr_widget_pipeline_spec.js
index 35b4e222e01..3f0eb946194 100644
--- a/spec/frontend/vue_merge_request_widget/components/mr_widget_pipeline_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/mr_widget_pipeline_spec.js
@@ -8,6 +8,7 @@ import { HTTP_STATUS_OK } from '~/lib/utils/http_status';
import MRWidgetPipelineComponent from '~/vue_merge_request_widget/components/mr_widget_pipeline.vue';
import LegacyPipelineMiniGraph from '~/ci/pipeline_mini_graph/legacy_pipeline_mini_graph.vue';
import { SUCCESS } from '~/vue_merge_request_widget/constants';
+import { localeDateFormat } from '~/lib/utils/datetime/locale_dateformat';
import mockData from '../mock_data';
describe('MRWidgetPipeline', () => {
@@ -93,7 +94,7 @@ describe('MRWidgetPipeline', () => {
it('should render pipeline finished timestamp', () => {
expect(findPipelineFinishedAt().attributes()).toMatchObject({
- title: 'Apr 7, 2017 2:00pm UTC',
+ title: localeDateFormat.asDateTimeFull.format(mockData.pipeline.details.finished_at),
datetime: mockData.pipeline.details.finished_at,
});
});
diff --git a/spec/frontend/vue_merge_request_widget/components/mr_widget_status_icon_spec.js b/spec/frontend/vue_merge_request_widget/components/mr_widget_status_icon_spec.js
index b210327aa31..65c4970bc76 100644
--- a/spec/frontend/vue_merge_request_widget/components/mr_widget_status_icon_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/mr_widget_status_icon_spec.js
@@ -54,5 +54,12 @@ describe('MR widget status icon component', () => {
expect(findIcon().exists()).toBe(true);
expect(findIcon().props().name).toBe('merge-request-close');
});
+
+ it('renders empty status icon', () => {
+ createWrapper({ status: 'empty' });
+
+ expect(findStatusIcon().exists()).toBe(true);
+ expect(findStatusIcon().props().iconName).toBe('neutral');
+ });
});
});
diff --git a/spec/frontend/vue_merge_request_widget/components/states/__snapshots__/new_ready_to_merge_spec.js.snap b/spec/frontend/vue_merge_request_widget/components/states/__snapshots__/new_ready_to_merge_spec.js.snap
index ecf4040cbda..ec0af7c8a7b 100644
--- a/spec/frontend/vue_merge_request_widget/components/states/__snapshots__/new_ready_to_merge_spec.js.snap
+++ b/spec/frontend/vue_merge_request_widget/components/states/__snapshots__/new_ready_to_merge_spec.js.snap
@@ -8,7 +8,7 @@ exports[`New ready to merge state component renders permission text if canMerge
status="success"
/>
<p
- class="gl-font-weight-bold gl-m-0! gl-text-gray-900! media-body"
+ class="gl-font-weight-bold gl-mb-0! gl-mt-1 gl-text-gray-900! media-body"
>
Ready to merge by members who can write to the target branch.
</p>
@@ -23,7 +23,7 @@ exports[`New ready to merge state component renders permission text if canMerge
status="success"
/>
<p
- class="gl-font-weight-bold gl-m-0! gl-text-gray-900! media-body"
+ class="gl-font-weight-bold gl-mb-0! gl-mt-1 gl-text-gray-900! media-body"
>
Ready to merge!
</p>
diff --git a/spec/frontend/vue_merge_request_widget/components/states/mr_widget_conflicts_spec.js b/spec/frontend/vue_merge_request_widget/components/states/mr_widget_conflicts_spec.js
index 7f0a171d712..af10d7d5eb7 100644
--- a/spec/frontend/vue_merge_request_widget/components/states/mr_widget_conflicts_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/states/mr_widget_conflicts_spec.js
@@ -1,10 +1,17 @@
import { mount } from '@vue/test-utils';
-import { nextTick } from 'vue';
+import VueApollo from 'vue-apollo';
+import Vue from 'vue';
import { TEST_HOST } from 'helpers/test_constants';
import { removeBreakLine } from 'helpers/text_helper';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import userPermissionsQuery from '~/vue_merge_request_widget/queries/permissions.query.graphql';
+import conflictsStateQuery from '~/vue_merge_request_widget/queries/states/conflicts.query.graphql';
import ConflictsComponent from '~/vue_merge_request_widget/components/states/mr_widget_conflicts.vue';
+Vue.use(VueApollo);
+
describe('MRWidgetConflicts', () => {
let wrapper;
const path = '/conflicts';
@@ -20,34 +27,57 @@ describe('MRWidgetConflicts', () => {
const resolveConflictsBtnText = 'Resolve conflicts';
const mergeLocallyBtnText = 'Resolve locally';
- async function createComponent(propsData = {}) {
- wrapper = extendedWrapper(
- mount(ConflictsComponent, {
- propsData,
- data() {
- return {
+ const defaultApolloProvider = (mockData = {}) => {
+ const userData = {
+ data: {
+ project: {
+ id: 234,
+ mergeRequest: {
+ id: 234,
userPermissions: {
- canMerge: propsData.mr.canMerge,
- pushToSourceBranch: propsData.mr.canPushToSourceBranch,
- },
- state: {
- shouldBeRebased: propsData.mr.shouldBeRebased,
- sourceBranchProtected: propsData.mr.sourceBranchProtected,
+ canMerge: mockData.canMerge || false,
+ pushToSourceBranch: mockData.canPushToSourceBranch || false,
},
- };
+ },
},
- mocks: {
- $apollo: {
- queries: {
- userPermissions: { loading: false },
- stateData: { loading: false },
+ },
+ };
+
+ const mrData = {
+ data: {
+ project: {
+ id: 234,
+ mergeRequest: {
+ id: 234,
+ shouldBeRebased: mockData.shouldBeRebased || false,
+ sourceBranchProtected: mockData.sourceBranchProtected || false,
+ userPermissions: {
+ pushToSourceBranch: mockData.canPushToSourceBranch || false,
},
},
},
+ },
+ };
+
+ return createMockApollo([
+ [userPermissionsQuery, jest.fn().mockResolvedValue(userData)],
+ [conflictsStateQuery, jest.fn().mockResolvedValue(mrData)],
+ ]);
+ };
+
+ async function createComponent({
+ propsData,
+ queryData,
+ apolloProvider = defaultApolloProvider(queryData),
+ } = {}) {
+ wrapper = extendedWrapper(
+ mount(ConflictsComponent, {
+ apolloProvider,
+ propsData,
}),
);
- await nextTick();
+ await waitForPromises();
}
// There are two permissions we need to consider:
@@ -62,11 +92,15 @@ describe('MRWidgetConflicts', () => {
describe('when allowed to merge but not allowed to push to source branch', () => {
beforeEach(async () => {
await createComponent({
- mr: {
+ propsData: {
+ mr: {
+ conflictsDocsPath: '',
+ },
+ },
+ queryData: {
canMerge: true,
canPushToSourceBranch: false,
conflictResolutionPath: path,
- conflictsDocsPath: '',
},
});
});
@@ -89,11 +123,15 @@ describe('MRWidgetConflicts', () => {
describe('when not allowed to merge but allowed to push to source branch', () => {
beforeEach(async () => {
await createComponent({
- mr: {
+ propsData: {
+ mr: {
+ conflictResolutionPath: path,
+ conflictsDocsPath: '',
+ },
+ },
+ queryData: {
canMerge: false,
canPushToSourceBranch: true,
- conflictResolutionPath: path,
- conflictsDocsPath: '',
},
});
});
@@ -116,11 +154,15 @@ describe('MRWidgetConflicts', () => {
describe('when allowed to merge and push to source branch', () => {
beforeEach(async () => {
await createComponent({
- mr: {
+ queryData: {
canMerge: true,
canPushToSourceBranch: true,
- conflictResolutionPath: path,
- conflictsDocsPath: '',
+ },
+ propsData: {
+ mr: {
+ conflictResolutionPath: path,
+ conflictsDocsPath: '',
+ },
},
});
});
@@ -144,10 +186,14 @@ describe('MRWidgetConflicts', () => {
describe('when user does not have permission to push to source branch', () => {
it('should show proper message', async () => {
await createComponent({
- mr: {
+ propsData: {
+ mr: {
+ conflictsDocsPath: '',
+ },
+ },
+ queryData: {
canMerge: false,
canPushToSourceBranch: false,
- conflictsDocsPath: '',
},
});
@@ -156,10 +202,14 @@ describe('MRWidgetConflicts', () => {
it('should not have action buttons', async () => {
await createComponent({
- mr: {
+ queryData: {
canMerge: false,
canPushToSourceBranch: false,
- conflictsDocsPath: '',
+ },
+ propsData: {
+ mr: {
+ conflictsDocsPath: '',
+ },
},
});
@@ -169,10 +219,14 @@ describe('MRWidgetConflicts', () => {
it('should not have resolve button when no conflict resolution path', async () => {
await createComponent({
- mr: {
+ propsData: {
+ mr: {
+ conflictResolutionPath: null,
+ conflictsDocsPath: '',
+ },
+ },
+ queryData: {
canMerge: true,
- conflictResolutionPath: null,
- conflictsDocsPath: '',
},
});
@@ -183,9 +237,13 @@ describe('MRWidgetConflicts', () => {
describe('when fast-forward or semi-linear merge enabled', () => {
it('should tell you to rebase locally', async () => {
await createComponent({
- mr: {
+ propsData: {
+ mr: {
+ conflictsDocsPath: '',
+ },
+ },
+ queryData: {
shouldBeRebased: true,
- conflictsDocsPath: '',
},
});
@@ -196,12 +254,16 @@ describe('MRWidgetConflicts', () => {
describe('when source branch protected', () => {
beforeEach(async () => {
await createComponent({
- mr: {
+ propsData: {
+ mr: {
+ conflictResolutionPath: TEST_HOST,
+ conflictsDocsPath: '',
+ },
+ },
+ queryData: {
canMerge: true,
- canPushToSourceBranch: true,
- conflictResolutionPath: TEST_HOST,
sourceBranchProtected: true,
- conflictsDocsPath: '',
+ canPushToSourceBranch: true,
},
});
});
@@ -214,12 +276,16 @@ describe('MRWidgetConflicts', () => {
describe('when source branch not protected', () => {
beforeEach(async () => {
await createComponent({
- mr: {
- canMerge: true,
+ propsData: {
+ mr: {
+ conflictResolutionPath: TEST_HOST,
+ conflictsDocsPath: '',
+ },
+ },
+ queryData: {
canPushToSourceBranch: true,
- conflictResolutionPath: TEST_HOST,
+ canMerge: true,
sourceBranchProtected: false,
- conflictsDocsPath: '',
},
});
});
@@ -229,4 +295,21 @@ describe('MRWidgetConflicts', () => {
expect(findResolveButton().attributes('href')).toEqual(TEST_HOST);
});
});
+
+ describe('error states', () => {
+ it('when project is null due to expired session it does not throw', async () => {
+ const fn = async () => {
+ await createComponent({
+ propsData: { mr: {} },
+ apolloProvider: createMockApollo([
+ [conflictsStateQuery, jest.fn().mockResolvedValue({ data: { project: null } })],
+ [userPermissionsQuery, jest.fn().mockResolvedValue({ data: { project: null } })],
+ ]),
+ });
+ await waitForPromises();
+ };
+
+ await expect(fn()).resolves.not.toThrow();
+ });
+ });
});
diff --git a/spec/frontend/vue_merge_request_widget/components/states/mr_widget_merging_spec.js b/spec/frontend/vue_merge_request_widget/components/states/mr_widget_merging_spec.js
index 85acd5f9a9e..328c0134368 100644
--- a/spec/frontend/vue_merge_request_widget/components/states/mr_widget_merging_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/states/mr_widget_merging_spec.js
@@ -1,8 +1,12 @@
import { shallowMount } from '@vue/test-utils';
+import { nextTick } from 'vue';
import simplePoll from '~/lib/utils/simple_poll';
import MrWidgetMerging from '~/vue_merge_request_widget/components/states/mr_widget_merging.vue';
import BoldText from '~/vue_merge_request_widget/components/bold_text.vue';
+import { STATUS_MERGED } from '~/issues/constants';
+import { fetchUserCounts } from '~/super_sidebar/user_counts_fetch';
+jest.mock('~/super_sidebar/user_counts_fetch');
jest.mock('~/lib/utils/simple_poll', () =>
jest.fn().mockImplementation(jest.requireActual('~/lib/utils/simple_poll').default),
);
@@ -13,7 +17,7 @@ describe('MRWidgetMerging', () => {
const pollMock = jest.fn().mockResolvedValue();
const GlEmoji = { template: '<img />' };
- beforeEach(() => {
+ const createComponent = () => {
wrapper = shallowMount(MrWidgetMerging, {
propsData: {
mr: {
@@ -29,14 +33,18 @@ describe('MRWidgetMerging', () => {
GlEmoji,
},
});
- });
+ };
it('renders information about merge request being merged', () => {
+ createComponent();
+
const message = wrapper.findComponent(BoldText).props('message');
expect(message).toContain('Merging!');
});
describe('initiateMergePolling', () => {
+ beforeEach(createComponent);
+
it('should call simplePoll', () => {
expect(simplePoll).toHaveBeenCalledWith(expect.any(Function), { timeout: 0 });
});
@@ -45,4 +53,15 @@ describe('MRWidgetMerging', () => {
expect(pollMock).toHaveBeenCalled();
});
});
+
+ describe('on successful merge', () => {
+ it('should re-fetch user counts', async () => {
+ pollMock.mockResolvedValueOnce({ data: { state: STATUS_MERGED } });
+ createComponent();
+
+ await nextTick();
+
+ expect(fetchUserCounts).toHaveBeenCalled();
+ });
+ });
});
diff --git a/spec/frontend/vue_merge_request_widget/components/states/mr_widget_nothing_to_merge_spec.js b/spec/frontend/vue_merge_request_widget/components/states/mr_widget_nothing_to_merge_spec.js
index 016eac05727..d8eec165395 100644
--- a/spec/frontend/vue_merge_request_widget/components/states/mr_widget_nothing_to_merge_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/states/mr_widget_nothing_to_merge_spec.js
@@ -1,5 +1,6 @@
-import { GlSprintf } from '@gitlab/ui';
+import { GlSprintf, GlLink } from '@gitlab/ui';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import { helpPagePath } from '~/helpers/help_page_helper';
import NothingToMerge from '~/vue_merge_request_widget/components/states/nothing_to_merge.vue';
describe('NothingToMerge', () => {
@@ -14,6 +15,7 @@ describe('NothingToMerge', () => {
};
const findNothingToMergeTextBody = () => wrapper.findByTestId('nothing-to-merge-body');
+ const findHelpLink = () => wrapper.findComponent(GlLink);
describe('With Blob link', () => {
beforeEach(() => {
@@ -26,5 +28,9 @@ describe('NothingToMerge', () => {
'Use merge requests to propose changes to your project and discuss them with your team. To make changes, use the Code dropdown list above, then test them with CI/CD before merging.',
);
});
+
+ it('renders text with link to CI Help Page', () => {
+ expect(findHelpLink().attributes('href')).toBe(helpPagePath('ci/quick_start/index.html'));
+ });
});
});
diff --git a/spec/frontend/vue_merge_request_widget/components/states/mr_widget_ready_to_merge_spec.js b/spec/frontend/vue_merge_request_widget/components/states/mr_widget_ready_to_merge_spec.js
index 9239807ae71..1b7338744e8 100644
--- a/spec/frontend/vue_merge_request_widget/components/states/mr_widget_ready_to_merge_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/states/mr_widget_ready_to_merge_spec.js
@@ -1,9 +1,10 @@
-import { shallowMount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
import { GlSprintf } from '@gitlab/ui';
import VueApollo from 'vue-apollo';
import produce from 'immer';
+import { createMockSubscription as createMockApolloSubscription } from 'mock-apollo-client';
import readyToMergeResponse from 'test_fixtures/graphql/merge_requests/states/ready_to_merge.query.graphql.json';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
import createMockApollo from 'helpers/mock_apollo_helper';
import readyToMergeQuery from 'ee_else_ce/vue_merge_request_widget/queries/states/ready_to_merge.query.graphql';
@@ -15,13 +16,11 @@ import SquashBeforeMerge from '~/vue_merge_request_widget/components/states/squa
import MergeFailedPipelineConfirmationDialog from '~/vue_merge_request_widget/components/states/merge_failed_pipeline_confirmation_dialog.vue';
import { MWPS_MERGE_STRATEGY } from '~/vue_merge_request_widget/constants';
import eventHub from '~/vue_merge_request_widget/event_hub';
+import readyToMergeSubscription from '~/vue_merge_request_widget/queries/states/ready_to_merge.subscription.graphql';
jest.mock('~/lib/utils/simple_poll', () =>
jest.fn().mockImplementation(jest.requireActual('~/lib/utils/simple_poll').default),
);
-jest.mock('~/commons/nav/user_merge_requests', () => ({
- refreshUserMergeRequestCounts: jest.fn(),
-}));
const commitMessage = readyToMergeResponse.data.project.mergeRequest.defaultMergeCommitMessage;
const squashCommitMessage =
@@ -82,6 +81,7 @@ Vue.use(VueApollo);
let service;
let wrapper;
let readyToMergeResponseSpy;
+let mockedSubscription;
const createReadyToMergeResponse = (customMr) => {
return produce(readyToMergeResponse, (draft) => {
@@ -90,7 +90,21 @@ const createReadyToMergeResponse = (customMr) => {
};
const createComponent = (customConfig = {}, createState = true) => {
- wrapper = shallowMount(ReadyToMerge, {
+ mockedSubscription = createMockApolloSubscription();
+ const apolloProvider = createMockApollo([[readyToMergeQuery, readyToMergeResponseSpy]]);
+ const subscriptionResponse = {
+ data: { mergeRequestMergeStatusUpdated: { ...readyToMergeResponse.data.project.mergeRequest } },
+ };
+ subscriptionResponse.data.mergeRequestMergeStatusUpdated.defaultMergeCommitMessage =
+ 'New default merge commit message';
+
+ const subscriptionHandlers = [[readyToMergeSubscription, () => mockedSubscription]];
+
+ subscriptionHandlers.forEach(([query, stream]) => {
+ apolloProvider.defaultClient.setRequestHandler(query, stream);
+ });
+
+ wrapper = shallowMountExtended(ReadyToMerge, {
propsData: {
mr: createTestMr(customConfig),
service,
@@ -112,7 +126,7 @@ const createComponent = (customConfig = {}, createState = true) => {
CommitEdit,
GlSprintf,
},
- apolloProvider: createMockApollo([[readyToMergeQuery, readyToMergeResponseSpy]]),
+ apolloProvider,
});
};
@@ -843,4 +857,60 @@ describe('ReadyToMerge', () => {
expect(wrapper.text()).not.toContain('Auto-merge enabled');
});
});
+
+ describe('commit message', () => {
+ it('updates commit message from subscription', async () => {
+ createComponent({ mr: { id: 1 } });
+
+ await waitForPromises();
+
+ await wrapper.findByTestId('widget_edit_commit_message').vm.$emit('input', true);
+
+ expect(wrapper.findByTestId('merge-commit-message').props('value')).not.toEqual(
+ 'Updated commit message',
+ );
+
+ mockedSubscription.next({
+ data: {
+ mergeRequestMergeStatusUpdated: {
+ ...readyToMergeResponse.data.project.mergeRequest,
+ defaultMergeCommitMessage: 'Updated commit message',
+ },
+ },
+ });
+
+ await waitForPromises();
+
+ expect(wrapper.findByTestId('merge-commit-message').props('value')).toEqual(
+ 'Updated commit message',
+ );
+ });
+
+ it('does not update commit message from subscription if commit message has been manually changed', async () => {
+ createComponent({ mr: { id: 1 } });
+
+ await waitForPromises();
+
+ await wrapper.findByTestId('widget_edit_commit_message').vm.$emit('input', true);
+
+ await wrapper
+ .findByTestId('merge-commit-message')
+ .vm.$emit('input', 'Manually updated commit message');
+
+ mockedSubscription.next({
+ data: {
+ mergeRequestMergeStatusUpdated: {
+ ...readyToMergeResponse.data.project.mergeRequest,
+ defaultMergeCommitMessage: 'Updated commit message',
+ },
+ },
+ });
+
+ await waitForPromises();
+
+ expect(wrapper.findByTestId('merge-commit-message').props('value')).toEqual(
+ 'Manually updated commit message',
+ );
+ });
+ });
});
diff --git a/spec/frontend/vue_merge_request_widget/components/states/work_in_progress_spec.js b/spec/frontend/vue_merge_request_widget/components/states/work_in_progress_spec.js
index f46829539a8..f01df2ca419 100644
--- a/spec/frontend/vue_merge_request_widget/components/states/work_in_progress_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/states/work_in_progress_spec.js
@@ -42,6 +42,9 @@ describe('~/vue_merge_request_widget/components/states/work_in_progress.vue', ()
mergeRequest: {
__typename: 'MergeRequest',
id: TEST_MR_ID,
+ draft: true,
+ title: TEST_MR_TITLE,
+ mergeableDiscussionsState: false,
userPermissions: {
updateMergeRequest: canUpdateMergeRequest,
},
@@ -179,4 +182,17 @@ describe('~/vue_merge_request_widget/components/states/work_in_progress.vue', ()
expect(findWIPButton().exists()).toBe(false);
});
});
+
+ describe('when project is null', () => {
+ beforeEach(async () => {
+ draftQuerySpy.mockResolvedValue({ data: { project: null } });
+ createComponent();
+ await waitForPromises();
+ });
+
+ // This is to mitigate https://gitlab.com/gitlab-org/gitlab/-/issues/413627
+ it('does not throw any error', () => {
+ expect(wrapper.exists()).toBe(true);
+ });
+ });
});
diff --git a/spec/frontend/vue_merge_request_widget/components/widget/__snapshots__/dynamic_content_spec.js.snap b/spec/frontend/vue_merge_request_widget/components/widget/__snapshots__/dynamic_content_spec.js.snap
index d5d3f56e451..f2a66ad2ff2 100644
--- a/spec/frontend/vue_merge_request_widget/components/widget/__snapshots__/dynamic_content_spec.js.snap
+++ b/spec/frontend/vue_merge_request_widget/components/widget/__snapshots__/dynamic_content_spec.js.snap
@@ -49,7 +49,7 @@ exports[`~/vue_merge_request_widget/components/widget/dynamic_content.vue render
name="MyWidget"
/>
<div
- class="gl-display-flex gl-w-full"
+ class="gl-display-flex gl-flex-direction-column gl-w-full"
>
<div
class="gl-display-flex gl-flex-grow-1"
@@ -88,8 +88,7 @@ exports[`~/vue_merge_request_widget/components/widget/dynamic_content.vue render
>
<li>
<div
- class="gl-align-items-center gl-display-flex"
- data-qa-selector="child_content"
+ class="gl-align-items-baseline gl-display-flex"
>
<div
class="gl-min-w-0 gl-w-full"
@@ -111,7 +110,7 @@ exports[`~/vue_merge_request_widget/components/widget/dynamic_content.vue render
class="gl-align-items-baseline gl-display-flex"
>
<div
- class="gl-display-flex gl-w-full"
+ class="gl-display-flex gl-flex-direction-column gl-w-full"
>
<div
class="gl-display-flex gl-flex-grow-1"
diff --git a/spec/frontend/vue_merge_request_widget/extentions/code_quality/index_spec.js b/spec/frontend/vue_merge_request_widget/extentions/code_quality/index_spec.js
deleted file mode 100644
index d5e04c666e0..00000000000
--- a/spec/frontend/vue_merge_request_widget/extentions/code_quality/index_spec.js
+++ /dev/null
@@ -1,224 +0,0 @@
-import MockAdapter from 'axios-mock-adapter';
-import { GlBadge } from '@gitlab/ui';
-import { mountExtended } from 'helpers/vue_test_utils_helper';
-import { trimText } from 'helpers/text_helper';
-import waitForPromises from 'helpers/wait_for_promises';
-import axios from '~/lib/utils/axios_utils';
-import codeQualityExtension from '~/vue_merge_request_widget/extensions/code_quality/index.vue';
-import {
- HTTP_STATUS_INTERNAL_SERVER_ERROR,
- HTTP_STATUS_NO_CONTENT,
- HTTP_STATUS_OK,
-} from '~/lib/utils/http_status';
-import {
- i18n,
- codeQualityPrefixes,
-} from '~/vue_merge_request_widget/extensions/code_quality/constants';
-import {
- codeQualityResponseNewErrors,
- codeQualityResponseResolvedErrors,
- codeQualityResponseResolvedAndNewErrors,
- codeQualityResponseNoErrors,
-} from './mock_data';
-
-describe('Code Quality extension', () => {
- let wrapper;
- let mock;
- const endpoint = '/root/repo/-/merge_requests/4/codequality_reports.json';
-
- const mockApi = (statusCode, data) => {
- mock.onGet(endpoint).reply(statusCode, data);
- };
-
- const findToggleCollapsedButton = () => wrapper.findByTestId('toggle-button');
- const findAllExtensionListItems = () => wrapper.findAllByTestId('extension-list-item');
- const isCollapsable = () => wrapper.findByTestId('toggle-button').exists();
- const getNeutralIcon = () => wrapper.findByTestId('status-neutral-icon').exists();
- const getAlertIcon = () => wrapper.findByTestId('status-alert-icon').exists();
- const getSuccessIcon = () => wrapper.findByTestId('status-success-icon').exists();
-
- const createComponent = () => {
- wrapper = mountExtended(codeQualityExtension, {
- propsData: {
- mr: {
- codequality: endpoint,
- codequalityReportsPath: endpoint,
- blobPath: {
- head_path: 'example/path',
- base_path: 'example/path',
- },
- },
- },
- });
- };
-
- beforeEach(() => {
- mock = new MockAdapter(axios);
- });
-
- afterEach(() => {
- mock.restore();
- });
-
- describe('summary', () => {
- it('displays loading text', () => {
- mockApi(HTTP_STATUS_OK, codeQualityResponseNewErrors);
-
- createComponent();
-
- expect(wrapper.text()).toBe(i18n.loading);
- });
-
- it('with a 204 response, continues to display loading state', async () => {
- mockApi(HTTP_STATUS_NO_CONTENT, '');
- createComponent();
-
- await waitForPromises();
-
- expect(wrapper.text()).toBe(i18n.loading);
- });
-
- it('displays failed loading text', async () => {
- mockApi(HTTP_STATUS_INTERNAL_SERVER_ERROR);
-
- createComponent();
-
- await waitForPromises();
-
- expect(wrapper.text()).toBe(i18n.error);
- expect(isCollapsable()).toBe(false);
- });
-
- it('displays new Errors finding', async () => {
- mockApi(HTTP_STATUS_OK, codeQualityResponseNewErrors);
-
- createComponent();
-
- await waitForPromises();
- expect(wrapper.text()).toBe(
- i18n
- .singularCopy(
- i18n.findings(codeQualityResponseNewErrors.new_errors, codeQualityPrefixes.new),
- )
- .replace(/%{strong_start}/g, '')
- .replace(/%{strong_end}/g, ''),
- );
- expect(isCollapsable()).toBe(true);
- expect(getAlertIcon()).toBe(true);
- });
-
- it('displays resolved Errors finding', async () => {
- mockApi(HTTP_STATUS_OK, codeQualityResponseResolvedErrors);
-
- createComponent();
-
- await waitForPromises();
- expect(wrapper.text()).toBe(
- i18n
- .singularCopy(
- i18n.findings(
- codeQualityResponseResolvedErrors.resolved_errors,
- codeQualityPrefixes.fixed,
- ),
- )
- .replace(/%{strong_start}/g, '')
- .replace(/%{strong_end}/g, ''),
- );
- expect(isCollapsable()).toBe(true);
- expect(getSuccessIcon()).toBe(true);
- });
-
- it('displays quality improvement and degradation', async () => {
- mockApi(HTTP_STATUS_OK, codeQualityResponseResolvedAndNewErrors);
-
- createComponent();
- await waitForPromises();
-
- // replacing strong tags because they will not be found in the rendered text
- expect(wrapper.text()).toBe(
- i18n
- .improvementAndDegradationCopy(
- i18n.findings(
- codeQualityResponseResolvedAndNewErrors.resolved_errors,
- codeQualityPrefixes.fixed,
- ),
- i18n.findings(
- codeQualityResponseResolvedAndNewErrors.new_errors,
- codeQualityPrefixes.new,
- ),
- )
- .replace(/%{strong_start}/g, '')
- .replace(/%{strong_end}/g, ''),
- );
- expect(isCollapsable()).toBe(true);
- expect(getAlertIcon()).toBe(true);
- });
-
- it('displays no detected errors', async () => {
- mockApi(HTTP_STATUS_OK, codeQualityResponseNoErrors);
-
- createComponent();
-
- await waitForPromises();
-
- expect(wrapper.text()).toBe(i18n.noChanges);
- expect(isCollapsable()).toBe(false);
- expect(getNeutralIcon()).toBe(true);
- });
- });
-
- describe('expanded data', () => {
- beforeEach(async () => {
- mockApi(HTTP_STATUS_OK, codeQualityResponseResolvedAndNewErrors);
-
- createComponent();
-
- await waitForPromises();
-
- findToggleCollapsedButton().trigger('click');
-
- await waitForPromises();
- });
-
- it('displays all report list items in viewport', () => {
- expect(findAllExtensionListItems()).toHaveLength(4);
- });
-
- it('displays report list item formatted', () => {
- const text = {
- newError: trimText(findAllExtensionListItems().at(0).text().replace(/\s+/g, ' ').trim()),
- resolvedError: findAllExtensionListItems().at(2).text().replace(/\s+/g, ' ').trim(),
- };
-
- expect(text.newError).toContain(
- "Minor - Parsing error: 'return' outside of function in index.js:12",
- );
- expect(text.resolvedError).toContain(
- "Minor - Parsing error: 'return' outside of function in index.js:12 Fixed",
- );
- });
-
- it('displays report list item formatted with check_name', () => {
- const text = {
- newError: trimText(findAllExtensionListItems().at(1).text().replace(/\s+/g, ' ').trim()),
- resolvedError: findAllExtensionListItems().at(3).text().replace(/\s+/g, ' ').trim(),
- };
-
- expect(text.newError).toContain(
- 'Minor - Rubocop/Metrics/ParameterLists - Avoid parameter lists longer than 5 parameters. [12/5] in main.rb:3',
- );
- expect(text.resolvedError).toContain(
- 'Minor - Rubocop/Metrics/ParameterLists - Avoid parameter lists longer than 5 parameters. [12/5] in main.rb:3 Fixed',
- );
- });
-
- it('adds fixed indicator (badge) when error is resolved', () => {
- expect(findAllExtensionListItems().at(3).findComponent(GlBadge).exists()).toBe(true);
- expect(findAllExtensionListItems().at(3).findComponent(GlBadge).text()).toEqual(i18n.fixed);
- });
-
- it('should not add fixed indicator (badge) when error is new', () => {
- expect(findAllExtensionListItems().at(0).findComponent(GlBadge).exists()).toBe(false);
- });
- });
-});
diff --git a/spec/frontend/vue_merge_request_widget/extentions/code_quality/mock_data.js b/spec/frontend/vue_merge_request_widget/extentions/code_quality/mock_data.js
deleted file mode 100644
index e66c1521ff5..00000000000
--- a/spec/frontend/vue_merge_request_widget/extentions/code_quality/mock_data.js
+++ /dev/null
@@ -1,101 +0,0 @@
-export const codeQualityResponseNewErrors = {
- status: 'failed',
- new_errors: [
- {
- description: "Parsing error: 'return' outside of function",
- severity: 'minor',
- file_path: 'index.js',
- line: 12,
- },
- {
- description: 'TODO found',
- severity: 'minor',
- file_path: '.gitlab-ci.yml',
- line: 73,
- },
- ],
- resolved_errors: [],
- existing_errors: [],
- summary: {
- total: 12235,
- resolved: 0,
- errored: 12235,
- },
-};
-
-export const codeQualityResponseResolvedErrors = {
- status: 'success',
- new_errors: [],
- resolved_errors: [
- {
- description: "Parsing error: 'return' outside of function",
- severity: 'minor',
- file_path: 'index.js',
- line: 12,
- },
- {
- description: 'TODO found',
- severity: 'minor',
- file_path: '.gitlab-ci.yml',
- line: 73,
- },
- ],
- existing_errors: [],
- summary: {
- total: 12235,
- resolved: 0,
- errored: 12235,
- },
-};
-
-export const codeQualityResponseResolvedAndNewErrors = {
- status: 'failed',
- new_errors: [
- {
- description: "Parsing error: 'return' outside of function",
- severity: 'minor',
- file_path: 'index.js',
- line: 12,
- },
- {
- description: 'Avoid parameter lists longer than 5 parameters. [12/5]',
- check_name: 'Rubocop/Metrics/ParameterLists',
- severity: 'minor',
- file_path: 'main.rb',
- line: 3,
- },
- ],
- resolved_errors: [
- {
- description: "Parsing error: 'return' outside of function",
- severity: 'minor',
- file_path: 'index.js',
- line: 12,
- },
- {
- description: 'Avoid parameter lists longer than 5 parameters. [12/5]',
- check_name: 'Rubocop/Metrics/ParameterLists',
- severity: 'minor',
- file_path: 'main.rb',
- line: 3,
- },
- ],
- existing_errors: [],
- summary: {
- total: 12233,
- resolved: 1,
- errored: 12233,
- },
-};
-
-export const codeQualityResponseNoErrors = {
- status: 'failed',
- new_errors: [],
- resolved_errors: [],
- existing_errors: [],
- summary: {
- total: 12234,
- resolved: 0,
- errored: 12234,
- },
-};
diff --git a/spec/frontend/vue_shared/alert_details/alert_details_spec.js b/spec/frontend/vue_shared/alert_details/alert_details_spec.js
index 6c2b21053f0..d2dfb6ee1bf 100644
--- a/spec/frontend/vue_shared/alert_details/alert_details_spec.js
+++ b/spec/frontend/vue_shared/alert_details/alert_details_spec.js
@@ -1,16 +1,19 @@
+import Vue, { nextTick } from 'vue';
+import VueApollo from 'vue-apollo';
import { GlAlert, GlLoadingIcon } from '@gitlab/ui';
import { mount, shallowMount } from '@vue/test-utils';
import axios from 'axios';
import MockAdapter from 'axios-mock-adapter';
-import { nextTick } from 'vue';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
+import createMockApollo from 'helpers/mock_apollo_helper';
import { joinPaths } from '~/lib/utils/url_utility';
import Tracking from '~/tracking';
import AlertDetails from '~/vue_shared/alert_details/components/alert_details.vue';
import AlertSummaryRow from '~/vue_shared/alert_details/components/alert_summary_row.vue';
import { PAGE_CONFIG, SEVERITY_LEVELS } from '~/vue_shared/alert_details/constants';
import createIssueMutation from '~/vue_shared/alert_details/graphql/mutations/alert_issue_create.mutation.graphql';
+import alertQuery from '~/vue_shared/alert_details/graphql/queries/alert_sidebar_details.query.graphql';
import AlertDetailsTable from '~/vue_shared/components/alert_details_table.vue';
import MetricImagesTab from '~/vue_shared/components/metric_images/metric_images_tab.vue';
import createStore from '~/vue_shared/components/metric_images/store/';
@@ -27,20 +30,57 @@ describe('AlertDetails', () => {
let environmentData = { name: environmentName, path: environmentPath };
let mock;
let wrapper;
+ let requestHandlers;
const projectPath = 'root/alerts';
const projectIssuesPath = 'root/alerts/-/issues';
const projectId = '1';
const $router = { push: jest.fn() };
+ const defaultHandlers = {
+ createIssueMutationMock: jest.fn().mockResolvedValue({
+ data: {
+ createAlertIssue: {
+ errors: [],
+ issue: {
+ id: 'id',
+ iid: 'iid',
+ webUrl: 'webUrl',
+ },
+ },
+ },
+ }),
+ alertQueryMock: jest.fn().mockResolvedValue({
+ data: {
+ project: {
+ id: '1',
+ alertManagementAlerts: {
+ nodes: [],
+ },
+ },
+ },
+ }),
+ };
+
+ const createMockApolloProvider = (handlers) => {
+ Vue.use(VueApollo);
+ requestHandlers = handlers;
+
+ return createMockApollo([
+ [alertQuery, handlers.alertQueryMock],
+ [createIssueMutation, handlers.createIssueMutationMock],
+ ]);
+ };
+
function mountComponent({
data,
- loading = false,
mountMethod = shallowMount,
provide = {},
stubs = {},
+ handlers = defaultHandlers,
} = {}) {
wrapper = extendedWrapper(
mountMethod(AlertDetails, {
+ apolloProvider: createMockApolloProvider(handlers),
provide: {
alertId: 'alertId',
projectPath,
@@ -59,15 +99,6 @@ describe('AlertDetails', () => {
};
},
mocks: {
- $apollo: {
- mutate: jest.fn(),
- queries: {
- alert: {
- loading,
- },
- sidebarStatus: {},
- },
- },
$router,
$route: { params: {} },
},
@@ -139,7 +170,6 @@ describe('AlertDetails', () => {
describe('Metrics tab', () => {
it('should mount without errors', () => {
mountComponent({
- mountMethod: mount,
provide: {
canUpdate: true,
iid: '1',
@@ -216,7 +246,6 @@ describe('AlertDetails', () => {
it('should display "Create incident" button when incident doesn\'t exist yet', async () => {
const issue = null;
mountComponent({
- mountMethod: mount,
data: { alert: { ...mockAlert, issue } },
});
@@ -226,23 +255,16 @@ describe('AlertDetails', () => {
});
it('calls `$apollo.mutate` with `createIssueQuery`', () => {
- const issueIid = '10';
mountComponent({
mountMethod: mount,
data: { alert: { ...mockAlert } },
});
- jest
- .spyOn(wrapper.vm.$apollo, 'mutate')
- .mockResolvedValue({ data: { createAlertIssue: { issue: { iid: issueIid } } } });
findCreateIncidentBtn().trigger('click');
- expect(wrapper.vm.$apollo.mutate).toHaveBeenCalledWith({
- mutation: createIssueMutation,
- variables: {
- iid: mockAlert.iid,
- projectPath,
- },
+ expect(requestHandlers.createIssueMutationMock).toHaveBeenCalledWith({
+ iid: mockAlert.iid,
+ projectPath,
});
});
@@ -251,25 +273,44 @@ describe('AlertDetails', () => {
mountComponent({
mountMethod: mount,
data: { alert: { ...mockAlert, alertIid: 1 } },
+ handlers: {
+ ...defaultHandlers,
+ createIssueMutationMock: jest.fn().mockRejectedValue(new Error(errorMsg)),
+ },
});
- jest.spyOn(wrapper.vm.$apollo, 'mutate').mockRejectedValue(errorMsg);
findCreateIncidentBtn().trigger('click');
await waitForPromises();
- expect(findIncidentCreationAlert().text()).toBe(errorMsg);
+ expect(findIncidentCreationAlert().text()).toBe(`Error: ${errorMsg}`);
});
});
describe('View full alert details', () => {
- beforeEach(() => {
- mountComponent({ data: { alert: mockAlert } });
+ beforeEach(async () => {
+ mountComponent({
+ data: { alert: mockAlert },
+ handlers: {
+ ...defaultHandlers,
+ alertQueryMock: jest.fn().mockResolvedValue({
+ data: {
+ project: {
+ id: '1',
+ alertManagementAlerts: {
+ nodes: [{ id: '1' }],
+ },
+ },
+ },
+ }),
+ },
+ });
+ await waitForPromises();
});
it('should display a table of raw alert details data', () => {
- const details = findDetailsTable();
- expect(details.exists()).toBe(true);
- expect(details.props()).toStrictEqual({
+ expect(findDetailsTable().exists()).toBe(true);
+
+ expect(findDetailsTable().props()).toStrictEqual({
alert: mockAlert,
statuses: PAGE_CONFIG.OPERATIONS.STATUSES,
loading: false,
@@ -279,7 +320,7 @@ describe('AlertDetails', () => {
describe('loading state', () => {
beforeEach(() => {
- mountComponent({ loading: true });
+ mountComponent();
});
it('displays a loading state when loading', () => {
diff --git a/spec/frontend/vue_shared/components/ci_icon_spec.js b/spec/frontend/vue_shared/components/ci_icon/ci_icon_spec.js
index cbb725bf9e6..792470c8e89 100644
--- a/spec/frontend/vue_shared/components/ci_icon_spec.js
+++ b/spec/frontend/vue_shared/components/ci_icon/ci_icon_spec.js
@@ -1,6 +1,6 @@
import { GlIcon } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
-import CiIcon from '~/vue_shared/components/ci_icon.vue';
+import CiIcon from '~/vue_shared/components/ci_icon/ci_icon.vue';
const mockStatus = {
group: 'success',
diff --git a/spec/frontend/vue_shared/components/confirm_danger/confirm_danger_modal_spec.js b/spec/frontend/vue_shared/components/confirm_danger/confirm_danger_modal_spec.js
index 53218d794c7..b825a578cee 100644
--- a/spec/frontend/vue_shared/components/confirm_danger/confirm_danger_modal_spec.js
+++ b/spec/frontend/vue_shared/components/confirm_danger/confirm_danger_modal_spec.js
@@ -19,7 +19,7 @@ describe('Confirm Danger Modal', () => {
const findModal = () => wrapper.findComponent(GlModal);
const findConfirmationPhrase = () => wrapper.findByTestId('confirm-danger-phrase');
- const findConfirmationInput = () => wrapper.findByTestId('confirm-danger-input');
+ const findConfirmationInput = () => wrapper.findByTestId('confirm-danger-field');
const findDefaultWarning = () => wrapper.findByTestId('confirm-danger-warning');
const findAdditionalMessage = () => wrapper.findByTestId('confirm-danger-message');
const findPrimaryAction = () => findModal().props('actionPrimary');
diff --git a/spec/frontend/vue_shared/components/diff_stats_dropdown_spec.js b/spec/frontend/vue_shared/components/diff_stats_dropdown_spec.js
index 810269257b6..e2c3fc89525 100644
--- a/spec/frontend/vue_shared/components/diff_stats_dropdown_spec.js
+++ b/spec/frontend/vue_shared/components/diff_stats_dropdown_spec.js
@@ -152,7 +152,8 @@ describe('Diff Stats Dropdown', () => {
});
it('focuses the first item when pressing the down key within the search box', () => {
- const spy = jest.spyOn(wrapper.vm, 'focusFirstItem');
+ const { element } = wrapper.find('.gl-new-dropdown-item');
+ const spy = jest.spyOn(element, 'focus');
findSearchBox().vm.$emit('keydown', new KeyboardEvent({ key: ARROW_DOWN_KEY }));
expect(spy).toHaveBeenCalled();
diff --git a/spec/frontend/vue_shared/components/dropdown/dropdown_widget_spec.js b/spec/frontend/vue_shared/components/dropdown/dropdown_widget_spec.js
index dd5a05a40c6..1a9a08a9656 100644
--- a/spec/frontend/vue_shared/components/dropdown/dropdown_widget_spec.js
+++ b/spec/frontend/vue_shared/components/dropdown/dropdown_widget_spec.js
@@ -1,8 +1,8 @@
import { GlDropdown, GlSearchBoxByType, GlDropdownItem } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
-
import { nextTick } from 'vue';
import DropdownWidget from '~/vue_shared/components/dropdown/dropdown_widget/dropdown_widget.vue';
+import { stubComponent, RENDER_ALL_SLOTS_TEMPLATE } from 'helpers/stub_component';
describe('DropdownWidget component', () => {
let wrapper;
@@ -27,11 +27,14 @@ describe('DropdownWidget component', () => {
...props,
},
stubs: {
- GlDropdown,
+ GlDropdown: stubComponent(GlDropdown, {
+ methods: {
+ hide: jest.fn(),
+ },
+ template: RENDER_ALL_SLOTS_TEMPLATE,
+ }),
},
});
-
- jest.spyOn(findDropdown().vm, 'hide').mockImplementation();
};
beforeEach(() => {
diff --git a/spec/frontend/vue_shared/components/entity_select/entity_select_spec.js b/spec/frontend/vue_shared/components/entity_select/entity_select_spec.js
index 1376133ec37..02da6079466 100644
--- a/spec/frontend/vue_shared/components/entity_select/entity_select_spec.js
+++ b/spec/frontend/vue_shared/components/entity_select/entity_select_spec.js
@@ -8,7 +8,7 @@ import waitForPromises from 'helpers/wait_for_promises';
describe('EntitySelect', () => {
let wrapper;
let fetchItemsMock;
- let fetchInitialSelectionTextMock;
+ let fetchInitialSelectionMock;
// Mocks
const itemMock = {
@@ -96,16 +96,16 @@ describe('EntitySelect', () => {
});
it("fetches the initially selected value's name", async () => {
- fetchInitialSelectionTextMock = jest.fn().mockImplementation(() => itemMock.text);
+ fetchInitialSelectionMock = jest.fn().mockImplementation(() => itemMock);
createComponent({
props: {
- fetchInitialSelectionText: fetchInitialSelectionTextMock,
+ fetchInitialSelection: fetchInitialSelectionMock,
initialSelection: itemMock.value,
},
});
await nextTick();
- expect(fetchInitialSelectionTextMock).toHaveBeenCalledTimes(1);
+ expect(fetchInitialSelectionMock).toHaveBeenCalledTimes(1);
expect(findListbox().props('toggleText')).toBe(itemMock.text);
});
});
@@ -188,7 +188,7 @@ describe('EntitySelect', () => {
findListbox().vm.$emit('reset');
await nextTick();
- expect(Object.keys(wrapper.emitted('input')[2][0]).length).toBe(0);
+ expect(wrapper.emitted('input')[2][0]).toEqual({});
});
});
});
diff --git a/spec/frontend/vue_shared/components/entity_select/organization_select_spec.js b/spec/frontend/vue_shared/components/entity_select/organization_select_spec.js
index ea029ba4f27..6dc38bbd0c6 100644
--- a/spec/frontend/vue_shared/components/entity_select/organization_select_spec.js
+++ b/spec/frontend/vue_shared/components/entity_select/organization_select_spec.js
@@ -1,35 +1,35 @@
import VueApollo from 'vue-apollo';
-import Vue, { nextTick } from 'vue';
-import { GlCollapsibleListbox } from '@gitlab/ui';
-import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import Vue from 'vue';
+import { GlCollapsibleListbox, GlAlert } from '@gitlab/ui';
+import { chunk } from 'lodash';
+import { mountExtended } from 'helpers/vue_test_utils_helper';
import OrganizationSelect from '~/vue_shared/components/entity_select/organization_select.vue';
import EntitySelect from '~/vue_shared/components/entity_select/entity_select.vue';
+import { DEFAULT_PER_PAGE } from '~/api';
import {
ORGANIZATION_TOGGLE_TEXT,
ORGANIZATION_HEADER_TEXT,
FETCH_ORGANIZATIONS_ERROR,
FETCH_ORGANIZATION_ERROR,
} from '~/vue_shared/components/entity_select/constants';
-import resolvers from '~/organizations/shared/graphql/resolvers';
-import organizationsQuery from '~/organizations/index/graphql/organizations.query.graphql';
-import { organizations as organizationsMock } from '~/organizations/mock_data';
+import getCurrentUserOrganizationsQuery from '~/organizations/shared/graphql/queries/organizations.query.graphql';
+import getOrganizationQuery from '~/organizations/shared/graphql/queries/organization.query.graphql';
+import { organizations as nodes, pageInfo, pageInfoEmpty } from '~/organizations/mock_data';
import waitForPromises from 'helpers/wait_for_promises';
import createMockApollo from 'helpers/mock_apollo_helper';
+import { getIdFromGraphQLId } from '~/graphql_shared/utils';
Vue.use(VueApollo);
-jest.useFakeTimers();
-
describe('OrganizationSelect', () => {
let wrapper;
let mockApollo;
// Mocks
- const [organizationMock] = organizationsMock;
-
- // Stubs
- const GlAlert = {
- template: '<div><slot /></div>',
+ const [organization] = nodes;
+ const organizations = {
+ nodes,
+ pageInfo,
};
// Props
@@ -44,23 +44,26 @@ describe('OrganizationSelect', () => {
const findEntitySelect = () => wrapper.findComponent(EntitySelect);
const findAlert = () => wrapper.findComponent(GlAlert);
+ // Mock handlers
const handleInput = jest.fn();
+ const getCurrentUserOrganizationsQueryHandler = jest.fn().mockResolvedValue({
+ data: { currentUser: { id: 'gid://gitlab/User/1', __typename: 'CurrentUser', organizations } },
+ });
+ const getOrganizationQueryHandler = jest.fn().mockResolvedValue({
+ data: { organization },
+ });
// Helpers
- const createComponent = ({ props = {}, mockResolvers = resolvers, handlers } = {}) => {
- mockApollo = createMockApollo(
- handlers || [
- [
- organizationsQuery,
- jest.fn().mockResolvedValueOnce({
- data: { currentUser: { id: 1, organizations: { nodes: organizationsMock } } },
- }),
- ],
- ],
- mockResolvers,
- );
-
- wrapper = shallowMountExtended(OrganizationSelect, {
+ const createComponent = ({
+ props = {},
+ handlers = [
+ [getCurrentUserOrganizationsQuery, getCurrentUserOrganizationsQueryHandler],
+ [getOrganizationQuery, getOrganizationQueryHandler],
+ ],
+ } = {}) => {
+ mockApollo = createMockApollo(handlers);
+
+ wrapper = mountExtended(OrganizationSelect, {
apolloProvider: mockApollo,
propsData: {
label,
@@ -70,10 +73,6 @@ describe('OrganizationSelect', () => {
toggleClass,
...props,
},
- stubs: {
- GlAlert,
- EntitySelect,
- },
listeners: {
input: handleInput,
},
@@ -81,10 +80,6 @@ describe('OrganizationSelect', () => {
};
const openListbox = () => findListbox().vm.$emit('shown');
- afterEach(() => {
- mockApollo = null;
- });
-
describe('entity_select props', () => {
beforeEach(() => {
createComponent();
@@ -107,40 +102,31 @@ describe('OrganizationSelect', () => {
describe('on mount', () => {
it('fetches organizations when the listbox is opened', async () => {
createComponent();
- await nextTick();
- jest.runAllTimers();
- await waitForPromises();
-
openListbox();
- jest.runAllTimers();
await waitForPromises();
- expect(findListbox().props('items')).toEqual([
- { text: organizationsMock[0].name, value: 1 },
- { text: organizationsMock[1].name, value: 2 },
- { text: organizationsMock[2].name, value: 3 },
- ]);
+
+ const expectedItems = nodes.map((node) => ({
+ ...node,
+ text: node.name,
+ value: getIdFromGraphQLId(node.id),
+ }));
+
+ expect(findListbox().props('items')).toEqual(expectedItems);
});
describe('with an initial selection', () => {
it("fetches the initially selected value's name", async () => {
- createComponent({ props: { initialSelection: organizationMock.id } });
- await nextTick();
- jest.runAllTimers();
+ createComponent({ props: { initialSelection: organization.id } });
await waitForPromises();
- expect(findListbox().props('toggleText')).toBe(organizationMock.name);
+ expect(findListbox().props('toggleText')).toBe(organization.name);
});
it('show an error if fetching initially selected fails', async () => {
- const mockResolvers = {
- Query: {
- organization: jest.fn().mockRejectedValueOnce(new Error()),
- },
- };
-
- createComponent({ props: { initialSelection: organizationMock.id }, mockResolvers });
- await nextTick();
- jest.runAllTimers();
+ createComponent({
+ props: { initialSelection: organization.id },
+ handlers: [[getOrganizationQuery, jest.fn().mockRejectedValueOnce()]],
+ });
expect(findAlert().exists()).toBe(false);
@@ -152,18 +138,59 @@ describe('OrganizationSelect', () => {
});
});
+ describe('when listbox bottom is reached and there are more organizations to load', () => {
+ const [firstPage, secondPage] = chunk(nodes, Math.ceil(nodes.length / 2));
+ const getCurrentUserOrganizationsQueryMultiplePagesHandler = jest
+ .fn()
+ .mockResolvedValueOnce({
+ data: {
+ currentUser: {
+ id: 'gid://gitlab/User/1',
+ __typename: 'CurrentUser',
+ organizations: { nodes: firstPage, pageInfo },
+ },
+ },
+ })
+ .mockResolvedValueOnce({
+ data: {
+ currentUser: {
+ id: 'gid://gitlab/User/1',
+ __typename: 'CurrentUser',
+ organizations: { nodes: secondPage, pageInfo: pageInfoEmpty },
+ },
+ },
+ });
+
+ beforeEach(async () => {
+ createComponent({
+ handlers: [
+ [getCurrentUserOrganizationsQuery, getCurrentUserOrganizationsQueryMultiplePagesHandler],
+ [getOrganizationQuery, getOrganizationQueryHandler],
+ ],
+ });
+ openListbox();
+ await waitForPromises();
+
+ findListbox().vm.$emit('bottom-reached');
+ await waitForPromises();
+ });
+
+ it('calls graphQL query correct `after` variable', () => {
+ expect(getCurrentUserOrganizationsQueryMultiplePagesHandler).toHaveBeenCalledWith({
+ after: pageInfo.endCursor,
+ first: DEFAULT_PER_PAGE,
+ });
+ expect(findListbox().props('infiniteScroll')).toBe(false);
+ });
+ });
+
it('shows an error when fetching organizations fails', async () => {
createComponent({
- handlers: [[organizationsQuery, jest.fn().mockRejectedValueOnce(new Error())]],
+ handlers: [[getCurrentUserOrganizationsQuery, jest.fn().mockRejectedValueOnce()]],
});
- await nextTick();
- jest.runAllTimers();
- await waitForPromises();
-
openListbox();
expect(findAlert().exists()).toBe(false);
- jest.runAllTimers();
await waitForPromises();
expect(findAlert().exists()).toBe(true);
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/filtered_search_bar_root_spec.js b/spec/frontend/vue_shared/components/filtered_search_bar/filtered_search_bar_root_spec.js
index bb612a13209..3a5c7d7729f 100644
--- a/spec/frontend/vue_shared/components/filtered_search_bar/filtered_search_bar_root_spec.js
+++ b/spec/frontend/vue_shared/components/filtered_search_bar/filtered_search_bar_root_spec.js
@@ -1,11 +1,4 @@
-import {
- GlFilteredSearch,
- GlButtonGroup,
- GlButton,
- GlDropdown,
- GlDropdownItem,
- GlFormCheckbox,
-} from '@gitlab/ui';
+import { GlDropdownItem, GlSorting, GlFilteredSearch, GlFormCheckbox } from '@gitlab/ui';
import { shallowMount, mount } from '@vue/test-utils';
import { nextTick } from 'vue';
@@ -13,7 +6,6 @@ import RecentSearchesService from '~/filtered_search/services/recent_searches_se
import RecentSearchesStore from '~/filtered_search/stores/recent_searches_store';
import {
FILTERED_SEARCH_TERM,
- SORT_DIRECTION,
TOKEN_TYPE_AUTHOR,
TOKEN_TYPE_LABEL,
TOKEN_TYPE_MILESTONE,
@@ -48,6 +40,7 @@ const createComponent = ({
recentSearchesStorageKey = 'requirements',
tokens = mockAvailableTokens,
sortOptions,
+ initialSortBy,
initialFilterValue = [],
showCheckbox = false,
checkboxChecked = false,
@@ -61,6 +54,7 @@ const createComponent = ({
recentSearchesStorageKey,
tokens,
sortOptions,
+ initialSortBy,
initialFilterValue,
showCheckbox,
checkboxChecked,
@@ -72,34 +66,38 @@ const createComponent = ({
describe('FilteredSearchBarRoot', () => {
let wrapper;
- const findGlButton = () => wrapper.findComponent(GlButton);
- const findGlDropdown = () => wrapper.findComponent(GlDropdown);
+ const findGlSorting = () => wrapper.findComponent(GlSorting);
const findGlFilteredSearch = () => wrapper.findComponent(GlFilteredSearch);
- beforeEach(() => {
- wrapper = createComponent({ sortOptions: mockSortOptions });
- });
-
describe('data', () => {
- it('initializes `filterValue`, `selectedSortOption` and `selectedSortDirection` data props and displays the sort dropdown', () => {
- expect(wrapper.vm.filterValue).toEqual([]);
- expect(wrapper.vm.selectedSortOption).toBe(mockSortOptions[0]);
- expect(wrapper.vm.selectedSortDirection).toBe(SORT_DIRECTION.descending);
- expect(wrapper.findComponent(GlButtonGroup).exists()).toBe(true);
- expect(wrapper.findComponent(GlButton).exists()).toBe(true);
- expect(wrapper.findComponent(GlDropdown).exists()).toBe(true);
- expect(wrapper.findComponent(GlDropdownItem).exists()).toBe(true);
+ describe('when `sortOptions` are provided', () => {
+ beforeEach(() => {
+ wrapper = createComponent({ sortOptions: mockSortOptions });
+ });
+
+ it('sets a correct initial value for GlFilteredSearch', () => {
+ expect(findGlFilteredSearch().props('value')).toEqual([]);
+ });
+
+ it('emits an event with the selectedSortOption provided by default', async () => {
+ findGlSorting().vm.$emit('sortByChange', mockSortOptions[1].id);
+ await nextTick();
+
+ expect(wrapper.emitted('onSort')[0]).toEqual([mockSortOptions[1].sortDirection.descending]);
+ });
+
+ it('emits an event with the selectedSortDirection provided by default', async () => {
+ findGlSorting().vm.$emit('sortDirectionChange', true);
+ await nextTick();
+
+ expect(wrapper.emitted('onSort')[0]).toEqual([mockSortOptions[0].sortDirection.ascending]);
+ });
});
- it('does not initialize `selectedSortOption` and `selectedSortDirection` when `sortOptions` is not applied and hides the sort dropdown', () => {
- const wrapperNoSort = createComponent();
+ it('does not initialize the sort dropdown when `sortOptions` are not provided', () => {
+ wrapper = createComponent();
- expect(wrapperNoSort.vm.filterValue).toEqual([]);
- expect(wrapperNoSort.vm.selectedSortOption).toBe(undefined);
- expect(wrapperNoSort.findComponent(GlButtonGroup).exists()).toBe(false);
- expect(wrapperNoSort.findComponent(GlButton).exists()).toBe(false);
- expect(wrapperNoSort.findComponent(GlDropdown).exists()).toBe(false);
- expect(wrapperNoSort.findComponent(GlDropdownItem).exists()).toBe(false);
+ expect(findGlSorting().exists()).toBe(false);
});
});
@@ -125,27 +123,27 @@ describe('FilteredSearchBarRoot', () => {
});
describe('sortDirectionIcon', () => {
- it('renders `sort-highest` descending icon by default', () => {
- expect(findGlButton().props('icon')).toBe('sort-highest');
- expect(findGlButton().attributes()).toMatchObject({
- 'aria-label': 'Sort direction: Descending',
- title: 'Sort direction: Descending',
- });
+ beforeEach(() => {
+ wrapper = createComponent({ sortOptions: mockSortOptions });
+ });
+
+ it('passes isAscending=false to GlSorting by default', () => {
+ expect(findGlSorting().props('isAscending')).toBe(false);
});
it('renders `sort-lowest` ascending icon when the sort button is clicked', async () => {
- findGlButton().vm.$emit('click');
+ findGlSorting().vm.$emit('sortDirectionChange', true);
await nextTick();
- expect(findGlButton().props('icon')).toBe('sort-lowest');
- expect(findGlButton().attributes()).toMatchObject({
- 'aria-label': 'Sort direction: Ascending',
- title: 'Sort direction: Ascending',
- });
+ expect(findGlSorting().props('isAscending')).toBe(true);
});
});
describe('filteredRecentSearches', () => {
+ beforeEach(() => {
+ wrapper = createComponent();
+ });
+
it('returns array of recent searches filtering out any string type (unsupported) items', async () => {
// setData usage is discouraged. See https://gitlab.com/groups/gitlab-org/-/epics/7330 for details
// eslint-disable-next-line no-restricted-syntax
@@ -227,34 +225,37 @@ describe('FilteredSearchBarRoot', () => {
});
});
- describe('handleSortOptionClick', () => {
- it('emits component event `onSort` with selected sort by value', () => {
- wrapper.vm.handleSortOptionClick(mockSortOptions[1]);
+ describe('handleSortOptionChange', () => {
+ it('emits component event `onSort` with selected sort by value', async () => {
+ wrapper = createComponent({ sortOptions: mockSortOptions });
+
+ findGlSorting().vm.$emit('sortByChange', mockSortOptions[1].id);
+ await nextTick();
expect(wrapper.vm.selectedSortOption).toBe(mockSortOptions[1]);
expect(wrapper.emitted('onSort')[0]).toEqual([mockSortOptions[1].sortDirection.descending]);
});
});
- describe('handleSortDirectionClick', () => {
+ describe('handleSortDirectionChange', () => {
beforeEach(() => {
- // setData usage is discouraged. See https://gitlab.com/groups/gitlab-org/-/epics/7330 for details
- // eslint-disable-next-line no-restricted-syntax
- wrapper.setData({
- selectedSortOption: mockSortOptions[0],
+ wrapper = createComponent({
+ sortOptions: mockSortOptions,
+ initialSortBy: mockSortOptions[0].sortDirection.descending,
});
});
- it('sets `selectedSortDirection` to be opposite of its current value', () => {
- expect(wrapper.vm.selectedSortDirection).toBe(SORT_DIRECTION.descending);
+ it('sets sort direction to be opposite of its current value', async () => {
+ expect(findGlSorting().props('isAscending')).toBe(false);
- wrapper.vm.handleSortDirectionClick();
+ findGlSorting().vm.$emit('sortDirectionChange', true);
+ await nextTick();
- expect(wrapper.vm.selectedSortDirection).toBe(SORT_DIRECTION.ascending);
+ expect(findGlSorting().props('isAscending')).toBe(true);
});
it('emits component event `onSort` with opposite of currently selected sort by value', () => {
- wrapper.vm.handleSortDirectionClick();
+ findGlSorting().vm.$emit('sortDirectionChange', true);
expect(wrapper.emitted('onSort')[0]).toEqual([mockSortOptions[0].sortDirection.ascending]);
});
@@ -288,6 +289,8 @@ describe('FilteredSearchBarRoot', () => {
const mockFilters = [tokenValueAuthor, 'foo'];
beforeEach(async () => {
+ wrapper = createComponent();
+
// setData usage is discouraged. See https://gitlab.com/groups/gitlab-org/-/epics/7330 for details
// eslint-disable-next-line no-restricted-syntax
wrapper.setData({
@@ -358,19 +361,14 @@ describe('FilteredSearchBarRoot', () => {
});
describe('template', () => {
- beforeEach(async () => {
+ it('renders gl-filtered-search component', async () => {
+ wrapper = createComponent();
// setData usage is discouraged. See https://gitlab.com/groups/gitlab-org/-/epics/7330 for details
// eslint-disable-next-line no-restricted-syntax
- wrapper.setData({
- selectedSortOption: mockSortOptions[0],
- selectedSortDirection: SORT_DIRECTION.descending,
+ await wrapper.setData({
recentSearches: mockHistoryItems,
});
- await nextTick();
- });
-
- it('renders gl-filtered-search component', () => {
const glFilteredSearchEl = wrapper.findComponent(GlFilteredSearch);
expect(glFilteredSearchEl.props('placeholder')).toBe('Filter requirements');
@@ -454,25 +452,28 @@ describe('FilteredSearchBarRoot', () => {
});
it('renders sort dropdown component', () => {
- expect(wrapper.findComponent(GlButtonGroup).exists()).toBe(true);
- expect(wrapper.findComponent(GlDropdown).exists()).toBe(true);
- expect(wrapper.findComponent(GlDropdown).props('text')).toBe(mockSortOptions[0].title);
- });
-
- it('renders sort dropdown items', () => {
- const dropdownItemsEl = wrapper.findAllComponents(GlDropdownItem);
+ wrapper = createComponent({ sortOptions: mockSortOptions });
- expect(dropdownItemsEl).toHaveLength(mockSortOptions.length);
- expect(dropdownItemsEl.at(0).text()).toBe(mockSortOptions[0].title);
- expect(dropdownItemsEl.at(0).props('isChecked')).toBe(true);
- expect(dropdownItemsEl.at(1).text()).toBe(mockSortOptions[1].title);
+ expect(findGlSorting().exists()).toBe(true);
});
- it('renders sort direction button', () => {
- const sortButtonEl = wrapper.findComponent(GlButton);
-
- expect(sortButtonEl.attributes('title')).toBe('Sort direction: Descending');
- expect(sortButtonEl.props('icon')).toBe('sort-highest');
+ it('renders sort dropdown items', () => {
+ wrapper = createComponent({ sortOptions: mockSortOptions });
+
+ const { sortOptions, sortBy } = findGlSorting().props();
+
+ expect(sortOptions).toEqual([
+ {
+ value: mockSortOptions[0].id,
+ text: mockSortOptions[0].title,
+ },
+ {
+ value: mockSortOptions[1].id,
+ text: mockSortOptions[1].title,
+ },
+ ]);
+
+ expect(sortBy).toBe(mockSortOptions[0].id);
});
});
@@ -483,6 +484,10 @@ describe('FilteredSearchBarRoot', () => {
value: { data: '' },
};
+ beforeEach(() => {
+ wrapper = createComponent({ sortOptions: mockSortOptions });
+ });
+
it('syncs filter value', async () => {
await wrapper.setProps({ initialFilterValue: [tokenValue], syncFilterAndSort: true });
@@ -498,17 +503,33 @@ describe('FilteredSearchBarRoot', () => {
it('syncs sort values', async () => {
await wrapper.setProps({ initialSortBy: 'updated_asc', syncFilterAndSort: true });
- expect(findGlDropdown().props('text')).toBe('Last updated');
- expect(findGlButton().props('icon')).toBe('sort-lowest');
- expect(findGlButton().attributes('aria-label')).toBe('Sort direction: Ascending');
+ expect(findGlSorting().props()).toMatchObject({
+ sortBy: 2,
+ isAscending: true,
+ });
});
it('does not sync sort values when syncFilterAndSort=false', async () => {
await wrapper.setProps({ initialSortBy: 'updated_asc', syncFilterAndSort: false });
- expect(findGlDropdown().props('text')).toBe('Created date');
- expect(findGlButton().props('icon')).toBe('sort-highest');
- expect(findGlButton().attributes('aria-label')).toBe('Sort direction: Descending');
+ expect(findGlSorting().props()).toMatchObject({
+ sortBy: 1,
+ isAscending: false,
+ });
+ });
+
+ it('does not sync sort values when initialSortBy is unset', async () => {
+ // Give initialSort some value which changes the current sort option...
+ await wrapper.setProps({ initialSortBy: 'updated_asc', syncFilterAndSort: true });
+
+ // ... Read the new sort options...
+ const { sortBy, isAscending } = findGlSorting().props();
+
+ // ... Then *unset* initialSortBy...
+ await wrapper.setProps({ initialSortBy: undefined });
+
+ // ... The sort options should not have changed.
+ expect(findGlSorting().props()).toMatchObject({ sortBy, isAscending });
});
});
});
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/base_token_spec.js b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/base_token_spec.js
index 88618de6979..1d6834a5604 100644
--- a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/base_token_spec.js
+++ b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/base_token_spec.js
@@ -156,9 +156,12 @@ describe('BaseToken', () => {
it('uses last item in list when value is an array', () => {
const mockGetActiveTokenValue = jest.fn();
+ const config = { ...mockConfig, multiSelect: true };
+
wrapper = createComponent({
props: {
- value: { data: mockLabels.map((l) => l.title) },
+ config,
+ value: { data: mockLabels.map((l) => l.title), operator: '||' },
suggestions: mockLabels,
getActiveTokenValue: mockGetActiveTokenValue,
},
@@ -409,8 +412,9 @@ describe('BaseToken', () => {
});
it('emits token-selected event when groupMultiSelectTokens: true', () => {
+ const config = { ...mockConfig, multiSelect: true };
wrapper = createComponent({
- props: { suggestions: mockLabels },
+ props: { suggestions: mockLabels, config, value: { operator: '||' } },
groupMultiSelectTokens: true,
});
@@ -419,9 +423,10 @@ describe('BaseToken', () => {
expect(wrapper.emitted('token-selected')).toEqual([[mockTokenValue.title]]);
});
- it('does not emit token-selected event when groupMultiSelectTokens: true', () => {
+ it('does not emit token-selected event when groupMultiSelectTokens: false', () => {
+ const config = { ...mockConfig, multiSelect: true };
wrapper = createComponent({
- props: { suggestions: mockLabels },
+ props: { suggestions: mockLabels, config, value: { operator: '||' } },
groupMultiSelectTokens: false,
});
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/date_token_spec.js b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/date_token_spec.js
index 56a59790210..34d0c7f0566 100644
--- a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/date_token_spec.js
+++ b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/date_token_spec.js
@@ -42,7 +42,7 @@ describe('DateToken', () => {
findDatepicker().vm.$emit('close');
expect(findGlFilteredSearchToken().emitted()).toEqual({
- complete: [[]],
+ complete: [['2014-10-13']],
select: [['2014-10-13']],
});
});
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/milestone_token_spec.js b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/milestone_token_spec.js
index 36e82b39df4..ee54fb5b941 100644
--- a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/milestone_token_spec.js
+++ b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/milestone_token_spec.js
@@ -5,15 +5,12 @@ import {
GlDropdownDivider,
} from '@gitlab/ui';
import { mount } from '@vue/test-utils';
-import MockAdapter from 'axios-mock-adapter';
import Vue, { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import { createAlert } from '~/alert';
-import axios from '~/lib/utils/axios_utils';
import { sortMilestonesByDueDate } from '~/milestones/utils';
-
import searchMilestonesQuery from '~/issues/list/queries/search_milestones.query.graphql';
import { DEFAULT_MILESTONES } from '~/vue_shared/components/filtered_search_bar/constants';
import MilestoneToken from '~/vue_shared/components/filtered_search_bar/tokens/milestone_token.vue';
@@ -70,7 +67,6 @@ function createComponent(options = {}) {
}
describe('MilestoneToken', () => {
- let mock;
let wrapper;
const findBaseToken = () => wrapper.findComponent(BaseToken);
@@ -80,14 +76,9 @@ describe('MilestoneToken', () => {
};
beforeEach(() => {
- mock = new MockAdapter(axios);
wrapper = createComponent();
});
- afterEach(() => {
- mock.restore();
- });
-
describe('methods', () => {
describe('fetchMilestones', () => {
it('sets loading state', async () => {
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/user_token_spec.js b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/user_token_spec.js
index 4462d1bfaf5..decf843091e 100644
--- a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/user_token_spec.js
+++ b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/user_token_spec.js
@@ -313,11 +313,11 @@ describe('UserToken', () => {
describe('multiSelect', () => {
it('renders check icons in suggestions when multiSelect is true', async () => {
wrapper = createComponent({
- value: { data: [mockUsers[0].username, mockUsers[1].username], operator: '=' },
+ value: { data: [mockUsers[0].username, mockUsers[1].username], operator: '||' },
data: {
users: mockUsers,
},
- config: { ...mockAuthorToken, multiSelect: true, initialUsers: mockUsers },
+ config: { ...mockAuthorToken, multiSelect: true },
active: true,
stubs: { Portal: true },
groupMultiSelectTokens: true,
@@ -327,18 +327,17 @@ describe('UserToken', () => {
const suggestions = wrapper.findAllComponents(GlFilteredSearchSuggestion);
- expect(findIconAtSuggestion(1).exists()).toBe(false);
- expect(findIconAtSuggestion(2).props('name')).toBe('check');
- expect(findIconAtSuggestion(3).props('name')).toBe('check');
+ expect(findIconAtSuggestion(0).props('name')).toBe('check');
+ expect(findIconAtSuggestion(1).props('name')).toBe('check');
+ expect(findIconAtSuggestion(2).exists()).toBe(false);
// test for left padding on unchecked items (so alignment is correct)
- expect(findIconAtSuggestion(4).exists()).toBe(false);
- expect(suggestions.at(4).find('.gl-pl-6').exists()).toBe(true);
+ expect(suggestions.at(2).find('.gl-pl-6').exists()).toBe(true);
});
it('renders multiple users when multiSelect is true', async () => {
wrapper = createComponent({
- value: { data: [mockUsers[0].username, mockUsers[1].username], operator: '=' },
+ value: { data: [mockUsers[0].username, mockUsers[1].username], operator: '||' },
data: {
users: mockUsers,
},
@@ -363,7 +362,7 @@ describe('UserToken', () => {
it('adds new user to multi-select-values', () => {
wrapper = createComponent({
- value: { data: [mockUsers[0].username], operator: '=' },
+ value: { data: [mockUsers[0].username], operator: '||' },
data: {
users: mockUsers,
},
@@ -383,7 +382,7 @@ describe('UserToken', () => {
it('removes existing user from array', () => {
const initialUsers = [mockUsers[0].username, mockUsers[1].username];
wrapper = createComponent({
- value: { data: initialUsers, operator: '=' },
+ value: { data: initialUsers, operator: '||' },
data: {
users: mockUsers,
},
@@ -399,7 +398,7 @@ describe('UserToken', () => {
it('clears input field after token selected', () => {
wrapper = createComponent({
- value: { data: [mockUsers[0].username, mockUsers[1].username], operator: '=' },
+ value: { data: [mockUsers[0].username, mockUsers[1].username], operator: '||' },
data: {
users: mockUsers,
},
@@ -410,7 +409,7 @@ describe('UserToken', () => {
findBaseToken().vm.$emit('token-selected', 'test');
- expect(wrapper.emitted('input')).toEqual([[{ operator: '=', data: '' }]]);
+ expect(wrapper.emitted('input')).toEqual([[{ operator: '||', data: '' }]]);
});
});
diff --git a/spec/frontend/vue_shared/components/keep_alive_slots_spec.js b/spec/frontend/vue_shared/components/keep_alive_slots_spec.js
deleted file mode 100644
index f69a883ee4d..00000000000
--- a/spec/frontend/vue_shared/components/keep_alive_slots_spec.js
+++ /dev/null
@@ -1,118 +0,0 @@
-import { nextTick } from 'vue';
-import { mountExtended } from 'helpers/vue_test_utils_helper';
-import KeepAliveSlots from '~/vue_shared/components/keep_alive_slots.vue';
-
-const SLOT_1 = {
- slotKey: 'slot-1',
- title: 'Hello 1',
-};
-const SLOT_2 = {
- slotKey: 'slot-2',
- title: 'Hello 2',
-};
-
-describe('~/vue_shared/components/keep_alive_slots.vue', () => {
- let wrapper;
-
- const createSlotContent = ({ slotKey, title }) => `
- <div data-testid="slot-child" data-slot-id="${slotKey}">
- <h1>${title}</h1>
- <input type="text" />
- </div>
- `;
- const createComponent = (props = {}) => {
- wrapper = mountExtended(KeepAliveSlots, {
- propsData: props,
- slots: {
- [SLOT_1.slotKey]: createSlotContent(SLOT_1),
- [SLOT_2.slotKey]: createSlotContent(SLOT_2),
- },
- });
- };
-
- const findRenderedSlots = () =>
- wrapper.findAllByTestId('slot-child').wrappers.map((x) => ({
- title: x.find('h1').text(),
- inputValue: x.find('input').element.value,
- isVisible: x.isVisible(),
- }));
-
- describe('default', () => {
- beforeEach(() => {
- createComponent();
- });
-
- it('doesnt show anything', () => {
- expect(findRenderedSlots()).toEqual([]);
- });
-
- describe('when slotKey is changed', () => {
- beforeEach(async () => {
- wrapper.setProps({ slotKey: SLOT_1.slotKey });
- await nextTick();
- });
-
- it('shows slot', () => {
- expect(findRenderedSlots()).toEqual([
- {
- title: SLOT_1.title,
- isVisible: true,
- inputValue: '',
- },
- ]);
- });
-
- it('hides everything when slotKey cannot be found', async () => {
- wrapper.setProps({ slotKey: '' });
- await nextTick();
-
- expect(findRenderedSlots()).toEqual([
- {
- title: SLOT_1.title,
- isVisible: false,
- inputValue: '',
- },
- ]);
- });
-
- describe('when user intreracts then slotKey changes again', () => {
- beforeEach(async () => {
- wrapper.find('input').setValue('TEST');
- wrapper.setProps({ slotKey: SLOT_2.slotKey });
- await nextTick();
- });
-
- it('keeps first slot alive but hidden', () => {
- expect(findRenderedSlots()).toEqual([
- {
- title: SLOT_1.title,
- isVisible: false,
- inputValue: 'TEST',
- },
- {
- title: SLOT_2.title,
- isVisible: true,
- inputValue: '',
- },
- ]);
- });
- });
- });
- });
-
- describe('initialized with slotKey', () => {
- beforeEach(() => {
- createComponent({ slotKey: SLOT_2.slotKey });
- });
-
- it('shows slot', () => {
- expect(findRenderedSlots()).toEqual([
- {
- title: SLOT_2.title,
- isVisible: true,
- inputValue: '',
- },
- ]);
- });
- });
-});
diff --git a/spec/frontend/vue_shared/components/list_selector/deploy_key_item_spec.js b/spec/frontend/vue_shared/components/list_selector/deploy_key_item_spec.js
new file mode 100644
index 00000000000..96be5b345a1
--- /dev/null
+++ b/spec/frontend/vue_shared/components/list_selector/deploy_key_item_spec.js
@@ -0,0 +1,61 @@
+import { GlIcon, GlButton } from '@gitlab/ui';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import DeployKeyItem from '~/vue_shared/components/list_selector/deploy_key_item.vue';
+
+describe('DeployKeyItem spec', () => {
+ let wrapper;
+
+ const MOCK_DATA = { title: 'Some key', owner: 'root', id: '123' };
+
+ const createComponent = (props) => {
+ wrapper = shallowMountExtended(DeployKeyItem, {
+ propsData: {
+ data: MOCK_DATA,
+ ...props,
+ },
+ });
+ };
+
+ const findIcon = () => wrapper.findComponent(GlIcon);
+ const findDeleteButton = () => wrapper.findComponent(GlButton);
+ const findWrapper = () => wrapper.findByTestId('deploy-key-wrapper');
+
+ beforeEach(() => createComponent());
+
+ it('renders a key icon component', () => {
+ expect(findIcon().props('name')).toBe('key');
+ });
+
+ it('renders a title and username', () => {
+ expect(wrapper.text()).toContain('Some key');
+ expect(wrapper.text()).toContain('@root');
+ });
+
+ it('does not render a delete button by default', () => {
+ expect(findDeleteButton().exists()).toBe(false);
+ });
+
+ it('emits a select event when the wrapper is clicked', () => {
+ findWrapper().trigger('click');
+
+ expect(wrapper.emitted('select')).toEqual([[MOCK_DATA.id]]);
+ });
+
+ describe('Delete button', () => {
+ beforeEach(() => createComponent({ canDelete: true }));
+
+ it('renders a delete button', () => {
+ expect(findDeleteButton().exists()).toBe(true);
+ expect(findDeleteButton().props('icon')).toBe('remove');
+ });
+
+ it('emits a delete event if the delete button is clicked', () => {
+ const stopPropagation = jest.fn();
+
+ findDeleteButton().vm.$emit('click', { stopPropagation });
+
+ expect(stopPropagation).toHaveBeenCalled();
+ expect(wrapper.emitted('delete')).toEqual([[MOCK_DATA.id]]);
+ });
+ });
+});
diff --git a/spec/frontend/vue_shared/components/list_selector/index_spec.js b/spec/frontend/vue_shared/components/list_selector/index_spec.js
index 11e64a91eb0..6de9a77582c 100644
--- a/spec/frontend/vue_shared/components/list_selector/index_spec.js
+++ b/spec/frontend/vue_shared/components/list_selector/index_spec.js
@@ -7,6 +7,7 @@ import { mountExtended } from 'helpers/vue_test_utils_helper';
import ListSelector from '~/vue_shared/components/list_selector/index.vue';
import UserItem from '~/vue_shared/components/list_selector/user_item.vue';
import GroupItem from '~/vue_shared/components/list_selector/group_item.vue';
+import DeployKeyItem from '~/vue_shared/components/list_selector/deploy_key_item.vue';
import groupsAutocompleteQuery from '~/graphql_shared/queries/groups_autocomplete.query.graphql';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
@@ -20,18 +21,21 @@ describe('List Selector spec', () => {
let fakeApollo;
const USERS_MOCK_PROPS = {
- title: 'Users',
projectPath: 'some/project/path',
groupPath: 'some/group/path',
type: 'users',
};
const GROUPS_MOCK_PROPS = {
- title: 'Groups',
projectPath: 'some/project/path',
type: 'groups',
};
+ const DEPLOY_KEYS_MOCK_PROPS = {
+ projectPath: 'some/project/path',
+ type: 'deployKeys',
+ };
+
const groupsAutocompleteQuerySuccess = jest.fn().mockResolvedValue(GROUPS_RESPONSE_MOCK);
const createComponent = async (props) => {
@@ -56,6 +60,7 @@ describe('List Selector spec', () => {
const findSearchBox = () => wrapper.findComponent(GlSearchBoxByType);
const findAllUserComponents = () => wrapper.findAllComponents(UserItem);
const findAllGroupComponents = () => wrapper.findAllComponents(GroupItem);
+ const findAllDeployKeyComponents = () => wrapper.findAllComponents(DeployKeyItem);
beforeEach(() => {
jest.spyOn(Api, 'projectUsers').mockResolvedValue(USERS_RESPONSE_MOCK);
@@ -254,4 +259,46 @@ describe('List Selector spec', () => {
});
});
});
+
+ describe('Deploy keys type', () => {
+ beforeEach(() => createComponent(DEPLOY_KEYS_MOCK_PROPS));
+
+ it('renders a correct title', () => {
+ expect(findTitle().exists()).toBe(true);
+ expect(findTitle().text()).toContain('Deploy keys');
+ });
+
+ it('renders the correct icon', () => {
+ expect(findIcon().props('name')).toBe('key');
+ });
+
+ describe('selected items', () => {
+ const selectedKey = { title: 'MyKey', owner: 'peter', id: '123' };
+ const selectedItems = [selectedKey];
+ beforeEach(() => createComponent({ ...DEPLOY_KEYS_MOCK_PROPS, selectedItems }));
+
+ it('renders a heading with the total selected items', () => {
+ expect(findTitle().text()).toContain('Deploy keys');
+ expect(findTitle().text()).toContain('1');
+ });
+
+ it('renders a deploy key component for each selected item', () => {
+ expect(findAllDeployKeyComponents().length).toBe(selectedItems.length);
+ expect(findAllDeployKeyComponents().at(0).props()).toMatchObject({
+ data: selectedKey,
+ canDelete: true,
+ });
+ });
+
+ it('emits a delete event when a delete event is emitted from the deploy key component', () => {
+ const id = '123';
+ findAllDeployKeyComponents().at(0).vm.$emit('delete', id);
+
+ expect(wrapper.emitted('delete')).toEqual([[id]]);
+ });
+
+ // TODO - add a test for the select event once we have API integration
+ // https://gitlab.com/gitlab-org/gitlab/-/issues/432494
+ });
+ });
});
diff --git a/spec/frontend/vue_shared/components/markdown/header_spec.js b/spec/frontend/vue_shared/components/markdown/header_spec.js
index 40875ed5dbc..57f6d751efd 100644
--- a/spec/frontend/vue_shared/components/markdown/header_spec.js
+++ b/spec/frontend/vue_shared/components/markdown/header_spec.js
@@ -82,6 +82,14 @@ describe('Markdown field header component', () => {
});
});
+ it('attach file button should have data-button-type attribute', () => {
+ const attachButton = findToolbarButtonByProp('icon', 'paperclip');
+
+ // Used for dropzone_input.js as `clickable` property
+ // to prevent triggers upload file by clicking on the edge of textarea
+ expect(attachButton.attributes('data-button-type')).toBe('attach-file');
+ });
+
it('hides markdown preview when previewMarkdown is false', () => {
expect(findPreviewToggle().text()).toBe('Preview');
});
diff --git a/spec/frontend/vue_shared/components/metric_images/store/actions_spec.js b/spec/frontend/vue_shared/components/metric_images/store/actions_spec.js
index 544466a22ca..626b1df5474 100644
--- a/spec/frontend/vue_shared/components/metric_images/store/actions_spec.js
+++ b/spec/frontend/vue_shared/components/metric_images/store/actions_spec.js
@@ -43,7 +43,7 @@ describe('Metrics tab store actions', () => {
it('should call success action when fetching metric images', () => {
service.getMetricImages.mockImplementation(() => Promise.resolve(fileList));
- testAction(actions.fetchImages, null, state, [
+ return testAction(actions.fetchImages, null, state, [
{ type: types.REQUEST_METRIC_IMAGES },
{
type: types.RECEIVE_METRIC_IMAGES_SUCCESS,
@@ -80,7 +80,7 @@ describe('Metrics tab store actions', () => {
it('should call success action when uploading an image', () => {
service.uploadMetricImage.mockImplementation(() => Promise.resolve(fileList[0]));
- testAction(actions.uploadImage, payload, state, [
+ return testAction(actions.uploadImage, payload, state, [
{ type: types.REQUEST_METRIC_UPLOAD },
{
type: types.RECEIVE_METRIC_UPLOAD_SUCCESS,
@@ -112,7 +112,7 @@ describe('Metrics tab store actions', () => {
it('should call success action when updating an image', () => {
service.updateMetricImage.mockImplementation(() => Promise.resolve());
- testAction(actions.updateImage, payload, state, [
+ return testAction(actions.updateImage, payload, state, [
{ type: types.REQUEST_METRIC_UPLOAD },
{
type: types.RECEIVE_METRIC_UPDATE_SUCCESS,
@@ -140,7 +140,7 @@ describe('Metrics tab store actions', () => {
it('should call success action when deleting an image', () => {
service.deleteMetricImage.mockImplementation(() => Promise.resolve());
- testAction(actions.deleteImage, payload, state, [
+ return testAction(actions.deleteImage, payload, state, [
{
type: types.RECEIVE_METRIC_DELETE_SUCCESS,
payload,
@@ -151,7 +151,7 @@ describe('Metrics tab store actions', () => {
describe('initial data', () => {
it('should set the initial data correctly', () => {
- testAction(actions.setInitialData, initialData, state, [
+ return testAction(actions.setInitialData, initialData, state, [
{ type: types.SET_INITIAL_DATA, payload: initialData },
]);
});
diff --git a/spec/frontend/vue_shared/components/new_resource_dropdown/new_resource_dropdown_spec.js b/spec/frontend/vue_shared/components/new_resource_dropdown/new_resource_dropdown_spec.js
index 7efc0e162b8..a67276ac64a 100644
--- a/spec/frontend/vue_shared/components/new_resource_dropdown/new_resource_dropdown_spec.js
+++ b/spec/frontend/vue_shared/components/new_resource_dropdown/new_resource_dropdown_spec.js
@@ -11,6 +11,7 @@ import searchProjectsWithinGroupQuery from '~/issues/list/queries/search_project
import { DASH_SCOPE, joinPaths } from '~/lib/utils/url_utility';
import { DEBOUNCE_DELAY } from '~/vue_shared/components/filtered_search_bar/constants';
import { useLocalStorageSpy } from 'helpers/local_storage_helper';
+import { stubComponent } from 'helpers/stub_component';
import {
emptySearchProjectsQueryResponse,
emptySearchProjectsWithinGroupQueryResponse,
@@ -42,6 +43,7 @@ describe('NewResourceDropdown component', () => {
queryResponse = searchProjectsQueryResponse,
mountFn = shallowMount,
propsData = {},
+ stubs = {},
} = {}) => {
const requestHandlers = [[query, jest.fn().mockResolvedValue(queryResponse)]];
const apolloProvider = createMockApollo(requestHandlers);
@@ -49,6 +51,9 @@ describe('NewResourceDropdown component', () => {
wrapper = mountFn(NewResourceDropdown, {
apolloProvider,
propsData,
+ stubs: {
+ ...stubs,
+ },
});
};
@@ -81,13 +86,18 @@ describe('NewResourceDropdown component', () => {
});
it('focuses on input when dropdown is shown', async () => {
- mountComponent({ mountFn: mount });
-
- const inputSpy = jest.spyOn(findInput().vm, 'focusInput');
+ const inputMock = jest.fn();
+ mountComponent({
+ stubs: {
+ GlSearchBoxByType: stubComponent(GlSearchBoxByType, {
+ methods: { focusInput: inputMock },
+ }),
+ },
+ });
await showDropdown();
- expect(inputSpy).toHaveBeenCalledTimes(1);
+ expect(inputMock).toHaveBeenCalledTimes(1);
});
describe.each`
diff --git a/spec/frontend/vue_shared/components/number_to_human_size/number_to_human_size_spec.js b/spec/frontend/vue_shared/components/number_to_human_size/number_to_human_size_spec.js
new file mode 100644
index 00000000000..6dd22211c96
--- /dev/null
+++ b/spec/frontend/vue_shared/components/number_to_human_size/number_to_human_size_spec.js
@@ -0,0 +1,47 @@
+import { shallowMount } from '@vue/test-utils';
+import NumberToHumanSize from '~/vue_shared/components/number_to_human_size/number_to_human_size.vue';
+import { numberToHumanSize } from '~/lib/utils/number_utils';
+
+describe('NumberToHumanSize', () => {
+ /** @type {import('@vue/test-utils').Wrapper} */
+ let wrapper;
+
+ const createComponent = (props = {}) => {
+ wrapper = shallowMount(NumberToHumanSize, {
+ propsData: {
+ ...props,
+ },
+ });
+ };
+
+ it('formats the value', () => {
+ const value = 1024;
+ createComponent({ value });
+
+ const expectedValue = numberToHumanSize(value, 1);
+ expect(wrapper.text()).toBe(expectedValue);
+ });
+
+ it('handles number of fraction digits', () => {
+ const value = 1024 + 254;
+ const fractionDigits = 2;
+ createComponent({ value, fractionDigits });
+
+ const expectedValue = numberToHumanSize(value, fractionDigits);
+ expect(wrapper.text()).toBe(expectedValue);
+ });
+
+ describe('plain-zero', () => {
+ it('hides label for zero values', () => {
+ createComponent({ value: 0, plainZero: true });
+ expect(wrapper.text()).toBe('0');
+ });
+
+ it('shows text for non-zero values', () => {
+ const value = 163;
+ const expectedValue = numberToHumanSize(value, 1);
+ createComponent({ value, plainZero: true });
+ expect(wrapper.text()).toBe(expectedValue);
+ });
+ });
+});
diff --git a/spec/frontend/vue_shared/components/source_viewer/components/chunk_new_spec.js b/spec/frontend/vue_shared/components/source_viewer/components/chunk_new_spec.js
index c7b2363026a..cd18058abec 100644
--- a/spec/frontend/vue_shared/components/source_viewer/components/chunk_new_spec.js
+++ b/spec/frontend/vue_shared/components/source_viewer/components/chunk_new_spec.js
@@ -62,6 +62,7 @@ describe('Chunk component', () => {
it('renders highlighted content', () => {
expect(findContent().text()).toBe(CHUNK_2.highlightedContent);
+ expect(findContent().attributes('style')).toBe('margin-left: 96px;');
});
});
});
diff --git a/spec/frontend/vue_shared/components/source_viewer/highlight_util_spec.js b/spec/frontend/vue_shared/components/source_viewer/highlight_util_spec.js
index 49e3083f8ed..c84a39274f8 100644
--- a/spec/frontend/vue_shared/components/source_viewer/highlight_util_spec.js
+++ b/spec/frontend/vue_shared/components/source_viewer/highlight_util_spec.js
@@ -6,6 +6,7 @@ import { LINES_PER_CHUNK, NEWLINE } from '~/vue_shared/components/source_viewer/
jest.mock('highlight.js/lib/core', () => ({
highlight: jest.fn().mockReturnValue({ value: 'highlighted content' }),
registerLanguage: jest.fn(),
+ getLanguage: jest.fn(),
}));
jest.mock('~/vue_shared/components/source_viewer/plugins/index', () => ({
@@ -28,11 +29,37 @@ describe('Highlight utility', () => {
expect(registerPlugins).toHaveBeenCalled();
});
+ describe('sub-languages', () => {
+ const languageDefinition = {
+ subLanguage: 'xml',
+ contains: [{ subLanguage: 'javascript' }, { subLanguage: 'typescript' }],
+ };
+
+ beforeEach(async () => {
+ jest.spyOn(hljs, 'getLanguage').mockReturnValue(languageDefinition);
+ await highlight(fileType, rawContent, language);
+ });
+
+ it('registers the primary sub-language', () => {
+ expect(hljs.registerLanguage).toHaveBeenCalledWith(
+ languageDefinition.subLanguage,
+ expect.any(Function),
+ );
+ });
+
+ it.each(languageDefinition.contains)(
+ 'registers the rest of the sub-languages',
+ ({ subLanguage }) => {
+ expect(hljs.registerLanguage).toHaveBeenCalledWith(subLanguage, expect.any(Function));
+ },
+ );
+ });
+
it('highlights the content', () => {
expect(hljs.highlight).toHaveBeenCalledWith(rawContent, { language });
});
- it('splits the content into chunks', () => {
+ it('splits the content into chunks', async () => {
const contentArray = Array.from({ length: 140 }, () => 'newline'); // simulate 140 lines of code
const chunks = [
@@ -52,7 +79,7 @@ describe('Highlight utility', () => {
},
];
- expect(highlight(fileType, contentArray.join(NEWLINE), language)).toEqual(
+ expect(await highlight(fileType, contentArray.join(NEWLINE), language)).toEqual(
expect.arrayContaining(chunks),
);
});
@@ -71,7 +98,7 @@ describe('unsupported languages', () => {
expect(hljs.highlight).not.toHaveBeenCalled();
});
- it('does not return a result', () => {
- expect(highlight(fileType, rawContent, unsupportedLanguage)).toBe(undefined);
+ it('does not return a result', async () => {
+ expect(await highlight(fileType, rawContent, unsupportedLanguage)).toBe(undefined);
});
});
diff --git a/spec/frontend/vue_shared/components/source_viewer/mock_data.js b/spec/frontend/vue_shared/components/source_viewer/mock_data.js
index cfff3a15b77..c98f945fc54 100644
--- a/spec/frontend/vue_shared/components/source_viewer/mock_data.js
+++ b/spec/frontend/vue_shared/components/source_viewer/mock_data.js
@@ -79,6 +79,7 @@ export const BLAME_DATA_QUERY_RESPONSE_MOCK = {
titleHtml: 'Upload New File',
message: 'Upload New File',
authoredDate: '2022-10-31T10:38:30+00:00',
+ authorName: 'Peter',
authorGravatar: 'path/to/gravatar',
webPath: '/commit/1234',
author: {},
diff --git a/spec/frontend/vue_shared/components/source_viewer/source_viewer_new_spec.js b/spec/frontend/vue_shared/components/source_viewer/source_viewer_new_spec.js
index ee7164515f6..86dc9afaacc 100644
--- a/spec/frontend/vue_shared/components/source_viewer/source_viewer_new_spec.js
+++ b/spec/frontend/vue_shared/components/source_viewer/source_viewer_new_spec.js
@@ -1,11 +1,15 @@
-import Vue from 'vue';
+import Vue, { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import { setHTMLFixture } from 'helpers/fixtures';
import SourceViewer from '~/vue_shared/components/source_viewer/source_viewer_new.vue';
import Chunk from '~/vue_shared/components/source_viewer/components/chunk_new.vue';
-import { EVENT_ACTION, EVENT_LABEL_VIEWER } from '~/vue_shared/components/source_viewer/constants';
+import {
+ EVENT_ACTION,
+ EVENT_LABEL_VIEWER,
+ CODEOWNERS_FILE_NAME,
+} from '~/vue_shared/components/source_viewer/constants';
import Tracking from '~/tracking';
import LineHighlighter from '~/blob/line_highlighter';
import addBlobLinksTracking from '~/blob/blob_links_tracking';
@@ -13,6 +17,7 @@ import waitForPromises from 'helpers/wait_for_promises';
import blameDataQuery from '~/vue_shared/components/source_viewer/queries/blame_data.query.graphql';
import Blame from '~/vue_shared/components/source_viewer/components/blame_info.vue';
import * as utils from '~/vue_shared/components/source_viewer/utils';
+import CodeownersValidation from 'ee_component/blob/components/codeowners_validation.vue';
import {
BLOB_DATA_MOCK,
@@ -43,16 +48,17 @@ describe('Source Viewer component', () => {
const blameInfo =
BLAME_DATA_QUERY_RESPONSE_MOCK.data.project.repository.blobs.nodes[0].blame.groups;
- const createComponent = ({ showBlame = true } = {}) => {
+ const createComponent = ({ showBlame = true, blob = {} } = {}) => {
fakeApollo = createMockApollo([[blameDataQuery, blameDataQueryHandlerSuccess]]);
wrapper = shallowMountExtended(SourceViewer, {
apolloProvider: fakeApollo,
mocks: { $route: { hash } },
propsData: {
- blob: BLOB_DATA_MOCK,
+ blob: { ...blob, ...BLOB_DATA_MOCK },
chunks: CHUNKS_MOCK,
projectPath: 'test',
+ currentRef: 'main',
showBlame,
},
});
@@ -111,22 +117,18 @@ describe('Source Viewer component', () => {
});
it('calls the query only once per chunk', async () => {
- jest.spyOn(wrapper.vm.$apollo, 'query');
-
// We trigger the `appear` event multiple times here in order to simulate the user scrolling past the chunk more than once.
// In this scenario we only want to query the backend once.
await triggerChunkAppear();
await triggerChunkAppear();
- expect(wrapper.vm.$apollo.query).toHaveBeenCalledTimes(1);
+ expect(blameDataQueryHandlerSuccess).toHaveBeenCalledTimes(1);
});
it('requests blame information for overlapping chunk', async () => {
- jest.spyOn(wrapper.vm.$apollo, 'query');
-
await triggerChunkAppear(1);
- expect(wrapper.vm.$apollo.query).toHaveBeenCalledTimes(2);
+ expect(blameDataQueryHandlerSuccess).toHaveBeenCalledTimes(2);
expect(blameDataQueryHandlerSuccess).toHaveBeenCalledWith(
expect.objectContaining({ fromLine: 71, toLine: 110 }),
);
@@ -156,4 +158,20 @@ describe('Source Viewer component', () => {
expect(lineHighlighter.highlightHash).toHaveBeenCalledWith(hash);
});
});
+
+ describe('Codeowners validation', () => {
+ const findCodeownersValidation = () => wrapper.findComponent(CodeownersValidation);
+
+ it('does not render codeowners validation when file is not CODEOWNERS', async () => {
+ await createComponent();
+ await nextTick();
+ expect(findCodeownersValidation().exists()).toBe(false);
+ });
+
+ it('renders codeowners validation when file is CODEOWNERS', async () => {
+ await createComponent({ blob: { name: CODEOWNERS_FILE_NAME } });
+ await nextTick();
+ expect(findCodeownersValidation().exists()).toBe(true);
+ });
+ });
});
diff --git a/spec/frontend/vue_shared/components/time_ago_tooltip_spec.js b/spec/frontend/vue_shared/components/time_ago_tooltip_spec.js
index 41cf1d2b2e8..21c58d662e3 100644
--- a/spec/frontend/vue_shared/components/time_ago_tooltip_spec.js
+++ b/spec/frontend/vue_shared/components/time_ago_tooltip_spec.js
@@ -2,9 +2,9 @@ import { shallowMount } from '@vue/test-utils';
import { GlTruncate } from '@gitlab/ui';
import timezoneMock from 'timezone-mock';
-import { formatDate, getTimeago } from '~/lib/utils/datetime_utility';
-import { DATE_ONLY_FORMAT } from '~/lib/utils/datetime/constants';
+import { getTimeago } from '~/lib/utils/datetime_utility';
import TimeAgoTooltip from '~/vue_shared/components/time_ago_tooltip.vue';
+import { DATE_ONLY_FORMAT } from '~/lib/utils/datetime/locale_dateformat';
describe('Time ago with tooltip component', () => {
let vm;
@@ -33,7 +33,7 @@ describe('Time ago with tooltip component', () => {
it('should render timeago with a bootstrap tooltip', () => {
buildVm();
- expect(vm.attributes('title')).toEqual(formatDate(timestamp));
+ expect(vm.attributes('title')).toEqual('May 8, 2017 at 2:57:39 PM GMT');
expect(vm.text()).toEqual(timeAgoTimestamp);
});
diff --git a/spec/frontend/vue_shared/components/vuex_module_provider_spec.js b/spec/frontend/vue_shared/components/vuex_module_provider_spec.js
deleted file mode 100644
index 95f557b10c1..00000000000
--- a/spec/frontend/vue_shared/components/vuex_module_provider_spec.js
+++ /dev/null
@@ -1,39 +0,0 @@
-import { mount } from '@vue/test-utils';
-import VuexModuleProvider from '~/vue_shared/components/vuex_module_provider.vue';
-
-const TestComponent = {
- inject: ['vuexModule'],
- template: `<div data-testid="vuexModule">{{ vuexModule }}</div> `,
-};
-
-const TEST_VUEX_MODULE = 'testVuexModule';
-
-describe('~/vue_shared/components/vuex_module_provider', () => {
- let wrapper;
-
- const findProvidedVuexModule = () => wrapper.find('[data-testid="vuexModule"]').text();
-
- const createComponent = (extraParams = {}) => {
- wrapper = mount(VuexModuleProvider, {
- propsData: {
- vuexModule: TEST_VUEX_MODULE,
- },
- slots: {
- default: TestComponent,
- },
- ...extraParams,
- });
- };
-
- it('provides "vuexModule" set from prop', () => {
- createComponent();
- expect(findProvidedVuexModule()).toBe(TEST_VUEX_MODULE);
- });
-
- it('provides "vuexModel" set from "vuex-module" prop when using @vue/compat', () => {
- createComponent({
- propsData: { 'vuex-module': TEST_VUEX_MODULE },
- });
- expect(findProvidedVuexModule()).toBe(TEST_VUEX_MODULE);
- });
-});
diff --git a/spec/frontend/vue_shared/directives/track_event_spec.js b/spec/frontend/vue_shared/directives/track_event_spec.js
index fc69e884258..8b4a68e394a 100644
--- a/spec/frontend/vue_shared/directives/track_event_spec.js
+++ b/spec/frontend/vue_shared/directives/track_event_spec.js
@@ -1,5 +1,5 @@
import { shallowMount } from '@vue/test-utils';
-import Vue from 'vue';
+import Vue, { nextTick } from 'vue';
import Tracking from '~/tracking';
import TrackEvent from '~/vue_shared/directives/track_event';
@@ -10,34 +10,53 @@ describe('TrackEvent directive', () => {
const clickButton = () => wrapper.find('button').trigger('click');
- const createComponent = (trackingOptions) =>
- Vue.component('DummyElement', {
- directives: {
- TrackEvent,
+ const DummyTrackComponent = Vue.component('DummyTrackComponent', {
+ directives: {
+ TrackEvent,
+ },
+ props: {
+ category: {
+ type: String,
+ required: false,
+ default: '',
},
- data() {
- return {
- trackingOptions,
- };
+ action: {
+ type: String,
+ required: false,
+ default: '',
},
- template: '<button v-track-event="trackingOptions"></button>',
- });
+ label: {
+ type: String,
+ required: false,
+ default: '',
+ },
+ },
+ template: '<button v-track-event="{ category, action, label }"></button>',
+ });
- const mountComponent = (trackingOptions) => shallowMount(createComponent(trackingOptions));
+ const mountComponent = ({ propsData = {} } = {}) => {
+ wrapper = shallowMount(DummyTrackComponent, {
+ propsData,
+ });
+ };
it('does not track the event if required arguments are not provided', () => {
- wrapper = mountComponent();
+ mountComponent();
clickButton();
expect(Tracking.event).not.toHaveBeenCalled();
});
- it('tracks event on click if tracking info provided', () => {
- wrapper = mountComponent({
- category: 'Tracking',
- action: 'click_trackable_btn',
- label: 'Trackable Info',
+ it('tracks event on click if tracking info provided', async () => {
+ mountComponent({
+ propsData: {
+ category: 'Tracking',
+ action: 'click_trackable_btn',
+ label: 'Trackable Info',
+ },
});
+
+ await nextTick();
clickButton();
expect(Tracking.event).toHaveBeenCalledWith('Tracking', 'click_trackable_btn', {
diff --git a/spec/frontend/vue_shared/issuable/create/components/issuable_label_selector_spec.js b/spec/frontend/vue_shared/issuable/create/components/issuable_label_selector_spec.js
index 1a490359040..94234a03664 100644
--- a/spec/frontend/vue_shared/issuable/create/components/issuable_label_selector_spec.js
+++ b/spec/frontend/vue_shared/issuable/create/components/issuable_label_selector_spec.js
@@ -16,6 +16,7 @@ const fullPath = '/full-path';
const labelsFilterBasePath = '/labels-filter-base-path';
const initialLabels = [];
const issuableType = 'issue';
+const issuableSupportsLockOnMerge = false;
const labelType = WORKSPACE_PROJECT;
const variant = VARIANT_EMBEDDED;
const workspaceType = WORKSPACE_PROJECT;
@@ -36,6 +37,7 @@ describe('IssuableLabelSelector', () => {
labelsFilterBasePath,
initialLabels,
issuableType,
+ issuableSupportsLockOnMerge,
labelType,
variant,
workspaceType,
diff --git a/spec/frontend/vue_shared/issuable/list/components/issuable_item_spec.js b/spec/frontend/vue_shared/issuable/list/components/issuable_item_spec.js
index 47da111b604..98a87ddbcce 100644
--- a/spec/frontend/vue_shared/issuable/list/components/issuable_item_spec.js
+++ b/spec/frontend/vue_shared/issuable/list/components/issuable_item_spec.js
@@ -6,6 +6,7 @@ import IssuableItem from '~/vue_shared/issuable/list/components/issuable_item.vu
import WorkItemTypeIcon from '~/work_items/components/work_item_type_icon.vue';
import IssuableAssignees from '~/issuable/components/issue_assignees.vue';
+import { localeDateFormat } from '~/lib/utils/datetime/locale_dateformat';
import { mockIssuable, mockRegularLabel } from '../mock_data';
const createComponent = ({
@@ -168,15 +169,20 @@ describe('IssuableItem', () => {
it('returns timestamp based on `issuable.updatedAt` when the issue is open', () => {
wrapper = createComponent();
- expect(findTimestampWrapper().attributes('title')).toBe('Sep 10, 2020 11:41am UTC');
+ expect(findTimestampWrapper().attributes('title')).toBe(
+ localeDateFormat.asDateTimeFull.format(mockIssuable.updatedAt),
+ );
});
it('returns timestamp based on `issuable.closedAt` when the issue is closed', () => {
+ const closedAt = '2020-06-18T11:30:00Z';
wrapper = createComponent({
- issuable: { ...mockIssuable, closedAt: '2020-06-18T11:30:00Z', state: 'closed' },
+ issuable: { ...mockIssuable, closedAt, state: 'closed' },
});
- expect(findTimestampWrapper().attributes('title')).toBe('Jun 18, 2020 11:30am UTC');
+ expect(findTimestampWrapper().attributes('title')).toBe(
+ localeDateFormat.asDateTimeFull.format(closedAt),
+ );
});
it('returns timestamp based on `issuable.updatedAt` when the issue is closed but `issuable.closedAt` is undefined', () => {
@@ -184,7 +190,9 @@ describe('IssuableItem', () => {
issuable: { ...mockIssuable, closedAt: undefined, state: 'closed' },
});
- expect(findTimestampWrapper().attributes('title')).toBe('Sep 10, 2020 11:41am UTC');
+ expect(findTimestampWrapper().attributes('title')).toBe(
+ localeDateFormat.asDateTimeFull.format(mockIssuable.updatedAt),
+ );
});
});
@@ -409,7 +417,9 @@ describe('IssuableItem', () => {
const createdAtEl = wrapper.find('[data-testid="issuable-created-at"]');
expect(createdAtEl.exists()).toBe(true);
- expect(createdAtEl.attributes('title')).toBe('Jun 29, 2020 1:52pm UTC');
+ expect(createdAtEl.attributes('title')).toBe(
+ localeDateFormat.asDateTimeFull.format(mockIssuable.createdAt),
+ );
expect(createdAtEl.text()).toBe(wrapper.vm.createdAt);
});
@@ -535,7 +545,9 @@ describe('IssuableItem', () => {
const timestampEl = wrapper.find('[data-testid="issuable-timestamp"]');
- expect(timestampEl.attributes('title')).toBe('Sep 10, 2020 11:41am UTC');
+ expect(timestampEl.attributes('title')).toBe(
+ localeDateFormat.asDateTimeFull.format(mockIssuable.updatedAt),
+ );
expect(timestampEl.text()).toBe(wrapper.vm.formattedTimestamp);
});
@@ -549,13 +561,16 @@ describe('IssuableItem', () => {
});
it('renders issuable closedAt info and does not render updatedAt info', () => {
+ const closedAt = '2022-06-18T11:30:00Z';
wrapper = createComponent({
- issuable: { ...mockIssuable, closedAt: '2022-06-18T11:30:00Z', state: 'closed' },
+ issuable: { ...mockIssuable, closedAt, state: 'closed' },
});
const timestampEl = wrapper.find('[data-testid="issuable-timestamp"]');
- expect(timestampEl.attributes('title')).toBe('Jun 18, 2022 11:30am UTC');
+ expect(timestampEl.attributes('title')).toBe(
+ localeDateFormat.asDateTimeFull.format(closedAt),
+ );
expect(timestampEl.text()).toBe(wrapper.vm.formattedTimestamp);
});
});
diff --git a/spec/frontend/vue_shared/issuable/list/components/issuable_list_root_spec.js b/spec/frontend/vue_shared/issuable/list/components/issuable_list_root_spec.js
index 51aae9b4512..a2a059d5b18 100644
--- a/spec/frontend/vue_shared/issuable/list/components/issuable_list_root_spec.js
+++ b/spec/frontend/vue_shared/issuable/list/components/issuable_list_root_spec.js
@@ -4,6 +4,7 @@ import VueDraggable from 'vuedraggable';
import { nextTick } from 'vue';
import { TEST_HOST } from 'helpers/test_constants';
+import { DRAG_DELAY } from '~/sortable/constants';
import IssuableItem from '~/vue_shared/issuable/list/components/issuable_item.vue';
import IssuableListRoot from '~/vue_shared/issuable/list/components/issuable_list_root.vue';
@@ -476,6 +477,11 @@ describe('IssuableListRoot', () => {
expect(findIssuableItem().classes()).toContain('gl-cursor-grab');
});
+ it('sets delay and delayOnTouchOnly attributes on list', () => {
+ expect(findVueDraggable().vm.$attrs.delay).toBe(DRAG_DELAY);
+ expect(findVueDraggable().vm.$attrs.delayOnTouchOnly).toBe(true);
+ });
+
it('emits a "reorder" event when user updates the issue order', () => {
const oldIndex = 4;
const newIndex = 6;
diff --git a/spec/frontend/vue_shared/issuable/sidebar/components/issuable_sidebar_root_spec.js b/spec/frontend/vue_shared/issuable/sidebar/components/issuable_sidebar_root_spec.js
index f2509aead77..d5c6ece8cb5 100644
--- a/spec/frontend/vue_shared/issuable/sidebar/components/issuable_sidebar_root_spec.js
+++ b/spec/frontend/vue_shared/issuable/sidebar/components/issuable_sidebar_root_spec.js
@@ -1,3 +1,4 @@
+import { GlButton, GlIcon } from '@gitlab/ui';
import { GlBreakpointInstance as bp } from '@gitlab/ui/dist/utils';
import { nextTick } from 'vue';
import Cookies from '~/lib/utils/cookies';
@@ -18,6 +19,10 @@ const createComponent = () => {
<button class="js-todo">Todo</button>
`,
},
+ stubs: {
+ GlButton,
+ GlIcon,
+ },
});
};
@@ -62,9 +67,8 @@ describe('IssuableSidebarRoot', () => {
const buttonEl = findToggleSidebarButton();
expect(buttonEl.exists()).toBe(true);
- expect(buttonEl.attributes('title')).toBe('Toggle sidebar');
- expect(buttonEl.find('span').text()).toBe('Collapse sidebar');
- expect(wrapper.findByTestId('icon-collapse').isVisible()).toBe(true);
+ expect(buttonEl.attributes('title')).toBe('Collapse sidebar');
+ expect(wrapper.findByTestId('chevron-double-lg-right-icon').isVisible()).toBe(true);
});
describe('when collapsing the sidebar', () => {
@@ -116,12 +120,12 @@ describe('IssuableSidebarRoot', () => {
assertPageLayoutClasses({ isExpanded: false });
});
- it('renders sidebar toggle button with text and icon', () => {
+ it('renders sidebar toggle button with title and icon', () => {
const buttonEl = findToggleSidebarButton();
expect(buttonEl.exists()).toBe(true);
- expect(buttonEl.attributes('title')).toBe('Toggle sidebar');
- expect(wrapper.findByTestId('icon-expand').isVisible()).toBe(true);
+ expect(buttonEl.attributes('title')).toBe('Expand sidebar');
+ expect(wrapper.findByTestId('chevron-double-lg-left-icon').isVisible()).toBe(true);
});
});
diff --git a/spec/frontend/vue_shared/new_namespace/new_namespace_page_spec.js b/spec/frontend/vue_shared/new_namespace/new_namespace_page_spec.js
index 109b7732539..716de45f4b4 100644
--- a/spec/frontend/vue_shared/new_namespace/new_namespace_page_spec.js
+++ b/spec/frontend/vue_shared/new_namespace/new_namespace_page_spec.js
@@ -116,21 +116,23 @@ describe('Experimental new namespace creation app', () => {
expect(findLegacyContainer().exists()).toBe(true);
});
- describe.each`
- featureFlag | isSuperSidebarCollapsed | isToggleVisible
- ${true} | ${true} | ${true}
- ${true} | ${false} | ${false}
- ${false} | ${true} | ${false}
- ${false} | ${false} | ${false}
- `('Super sidebar toggle', ({ featureFlag, isSuperSidebarCollapsed, isToggleVisible }) => {
- beforeEach(() => {
- sidebarState.isCollapsed = isSuperSidebarCollapsed;
- gon.use_new_navigation = featureFlag;
- createComponent();
+ describe('SuperSidebarToggle', () => {
+ describe('when collapsed', () => {
+ it('shows sidebar toggle', () => {
+ sidebarState.isCollapsed = true;
+ createComponent();
+
+ expect(findSuperSidebarToggle().exists()).toBe(true);
+ });
});
- it(`${isToggleVisible ? 'is visible' : 'is not visible'}`, () => {
- expect(findSuperSidebarToggle().exists()).toBe(isToggleVisible);
+ describe('when not collapsed', () => {
+ it('does not show sidebar toggle', () => {
+ sidebarState.isCollapsed = false;
+ createComponent();
+
+ expect(findSuperSidebarToggle().exists()).toBe(false);
+ });
});
});
@@ -170,17 +172,10 @@ describe('Experimental new namespace creation app', () => {
});
describe('top bar', () => {
- it('adds "top-bar-fixed" and "container-fluid" classes when new navigation enabled', () => {
- gon.use_new_navigation = true;
+ it('has "top-bar-fixed" and "container-fluid" classes', () => {
createComponent();
expect(findTopBar().classes()).toEqual(['top-bar-fixed', 'container-fluid']);
});
-
- it('does not add classes when new navigation is not enabled', () => {
- createComponent();
-
- expect(findTopBar().classes()).toEqual([]);
- });
});
});
diff --git a/spec/frontend/vue_shared/security_reports/components/manage_via_mr_spec.js b/spec/frontend/vue_shared/security_reports/components/manage_via_mr_spec.js
index f3d0d66cdd1..2b36344cfa8 100644
--- a/spec/frontend/vue_shared/security_reports/components/manage_via_mr_spec.js
+++ b/spec/frontend/vue_shared/security_reports/components/manage_via_mr_spec.js
@@ -2,7 +2,7 @@ import { GlButton } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
import Vue from 'vue';
import VueApollo from 'vue-apollo';
-import { featureToMutationMap } from 'ee_else_ce/security_configuration/components/constants';
+import { featureToMutationMap } from 'ee_else_ce/security_configuration/constants';
import createMockApollo from 'helpers/mock_apollo_helper';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
diff --git a/spec/frontend/webhooks/components/form_url_app_spec.js b/spec/frontend/webhooks/components/form_url_app_spec.js
index cbeff184e9d..fe8bba68610 100644
--- a/spec/frontend/webhooks/components/form_url_app_spec.js
+++ b/spec/frontend/webhooks/components/form_url_app_spec.js
@@ -1,5 +1,5 @@
import { nextTick } from 'vue';
-import { GlFormGroup, GlFormRadio, GlFormRadioGroup, GlLink } from '@gitlab/ui';
+import { GlFormGroup, GlFormRadio, GlFormRadioGroup, GlLink, GlAlert } from '@gitlab/ui';
import { scrollToElement } from '~/lib/utils/common_utils';
import FormUrlApp from '~/webhooks/components/form_url_app.vue';
@@ -30,6 +30,7 @@ describe('FormUrlApp', () => {
const findFormUrlPreview = () => wrapper.findByTestId('form-url-preview');
const findUrlMaskSection = () => wrapper.findByTestId('url-mask-section');
const findFormEl = () => document.querySelector('.js-webhook-form');
+ const findAlert = () => wrapper.findComponent(GlAlert);
const submitForm = () => findFormEl().dispatchEvent(new Event('submit'));
describe('template', () => {
@@ -156,6 +157,23 @@ describe('FormUrlApp', () => {
});
});
+ describe('token will be cleared warning', () => {
+ beforeEach(() => {
+ createComponent({ initialUrl: 'url' });
+ });
+
+ it('is hidden when URL has not changed', () => {
+ expect(findAlert().exists()).toBe(false);
+ });
+
+ it('is displayed when URL has changed', async () => {
+ findFormUrl().vm.$emit('input', 'another_url');
+ await nextTick();
+
+ expect(findAlert().exists()).toBe(true);
+ });
+ });
+
describe('validations', () => {
const inputRequiredText = FormUrlApp.i18n.inputRequired;
diff --git a/spec/frontend/whats_new/store/actions_spec.js b/spec/frontend/whats_new/store/actions_spec.js
index 5f5e4e53be2..908aa3aaeea 100644
--- a/spec/frontend/whats_new/store/actions_spec.js
+++ b/spec/frontend/whats_new/store/actions_spec.js
@@ -11,8 +11,8 @@ describe('whats new actions', () => {
describe('openDrawer', () => {
useLocalStorageSpy();
- it('should commit openDrawer', () => {
- testAction(actions.openDrawer, 'digest-hash', {}, [{ type: types.OPEN_DRAWER }]);
+ it('should commit openDrawer', async () => {
+ await testAction(actions.openDrawer, 'digest-hash', {}, [{ type: types.OPEN_DRAWER }]);
expect(window.localStorage.setItem).toHaveBeenCalledWith(
'display-whats-new-notification',
@@ -23,7 +23,7 @@ describe('whats new actions', () => {
describe('closeDrawer', () => {
it('should commit closeDrawer', () => {
- testAction(actions.closeDrawer, {}, {}, [{ type: types.CLOSE_DRAWER }]);
+ return testAction(actions.closeDrawer, {}, {}, [{ type: types.CLOSE_DRAWER }]);
});
});
@@ -52,7 +52,7 @@ describe('whats new actions', () => {
.onGet('/-/whats_new', { params: { page: undefined, v: undefined } })
.replyOnce(HTTP_STATUS_OK, [{ title: 'GitLab Stories' }]);
- testAction(
+ return testAction(
actions.fetchItems,
{},
{},
@@ -69,7 +69,7 @@ describe('whats new actions', () => {
.onGet('/-/whats_new', { params: { page: 8, v: 42 } })
.replyOnce(HTTP_STATUS_OK, [{ title: 'GitLab Stories' }]);
- testAction(
+ return testAction(
actions.fetchItems,
{ page: 8, versionDigest: 42 },
{},
@@ -80,11 +80,11 @@ describe('whats new actions', () => {
});
it('if already fetching, does not fetch', () => {
- testAction(actions.fetchItems, {}, { fetching: true }, []);
+ return testAction(actions.fetchItems, {}, { fetching: true }, []);
});
it('should commit fetching, setFeatures and setPagination', () => {
- testAction(actions.fetchItems, {}, {}, [
+ return testAction(actions.fetchItems, {}, {}, [
{ type: types.SET_FETCHING, payload: true },
{ type: types.ADD_FEATURES, payload: [{ title: 'Whats New Drawer', url: 'www.url.com' }] },
{ type: types.SET_PAGE_INFO, payload: { nextPage: 2 } },
@@ -94,8 +94,10 @@ describe('whats new actions', () => {
});
describe('setDrawerBodyHeight', () => {
- testAction(actions.setDrawerBodyHeight, 42, {}, [
- { type: types.SET_DRAWER_BODY_HEIGHT, payload: 42 },
- ]);
+ it('should commit setDrawerBodyHeight', () => {
+ return testAction(actions.setDrawerBodyHeight, 42, {}, [
+ { type: types.SET_DRAWER_BODY_HEIGHT, payload: 42 },
+ ]);
+ });
});
});
diff --git a/spec/frontend/whats_new/utils/notification_spec.js b/spec/frontend/whats_new/utils/notification_spec.js
deleted file mode 100644
index 020d833c578..00000000000
--- a/spec/frontend/whats_new/utils/notification_spec.js
+++ /dev/null
@@ -1,73 +0,0 @@
-import htmlWhatsNewNotification from 'test_fixtures_static/whats_new_notification.html';
-import { setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
-import { useLocalStorageSpy } from 'helpers/local_storage_helper';
-import { setNotification, getVersionDigest } from '~/whats_new/utils/notification';
-
-describe('~/whats_new/utils/notification', () => {
- useLocalStorageSpy();
-
- let wrapper;
-
- const findNotificationEl = () => wrapper.querySelector('.header-help');
- const findNotificationCountEl = () => wrapper.querySelector('.js-whats-new-notification-count');
- const getAppEl = () => wrapper.querySelector('.app');
-
- beforeEach(() => {
- setHTMLFixture(htmlWhatsNewNotification);
- wrapper = document.querySelector('.whats-new-notification-fixture-root');
- });
-
- afterEach(() => {
- wrapper.remove();
- resetHTMLFixture();
- });
-
- describe('setNotification', () => {
- const subject = () => setNotification(getAppEl());
-
- it("when storage key doesn't exist it adds notifications class", () => {
- const notificationEl = findNotificationEl();
-
- expect(notificationEl.classList).not.toContain('with-notifications');
-
- subject();
-
- expect(findNotificationCountEl()).not.toBe(null);
- expect(notificationEl.classList).toContain('with-notifications');
- });
-
- it('removes class and count element when storage key has current digest', () => {
- const notificationEl = findNotificationEl();
-
- notificationEl.classList.add('with-notifications');
- localStorage.setItem('display-whats-new-notification', 'version-digest');
-
- expect(findNotificationCountEl()).not.toBe(null);
-
- subject();
-
- expect(findNotificationCountEl()).toBe(null);
- expect(notificationEl.classList).not.toContain('with-notifications');
- });
-
- it('removes class and count element when no records and digest undefined', () => {
- const notificationEl = findNotificationEl();
-
- notificationEl.classList.add('with-notifications');
- localStorage.setItem('display-whats-new-notification', 'version-digest');
-
- expect(findNotificationCountEl()).not.toBe(null);
-
- setNotification(wrapper.querySelector('[data-testid="without-digest"]'));
-
- expect(findNotificationCountEl()).toBe(null);
- expect(notificationEl.classList).not.toContain('with-notifications');
- });
- });
-
- describe('getVersionDigest', () => {
- it('retrieves the storage key data attribute from the el', () => {
- expect(getVersionDigest(getAppEl())).toBe('version-digest');
- });
- });
-});
diff --git a/spec/frontend/work_items/components/item_title_spec.js b/spec/frontend/work_items/components/item_title_spec.js
index 3a84ba4bd5e..660ff671a80 100644
--- a/spec/frontend/work_items/components/item_title_spec.js
+++ b/spec/frontend/work_items/components/item_title_spec.js
@@ -2,11 +2,12 @@ import { shallowMount } from '@vue/test-utils';
import { escape } from 'lodash';
import ItemTitle from '~/work_items/components/item_title.vue';
-const createComponent = ({ title = 'Sample title', disabled = false } = {}) =>
+const createComponent = ({ title = 'Sample title', disabled = false, useH1 = false } = {}) =>
shallowMount(ItemTitle, {
propsData: {
title,
disabled,
+ useH1,
},
});
@@ -27,6 +28,12 @@ describe('ItemTitle', () => {
expect(findInputEl().text()).toBe('Sample title');
});
+ it('renders H1 if useH1 is true, otherwise renders H2', () => {
+ expect(wrapper.element.tagName).toBe('H2');
+ wrapper = createComponent({ useH1: true });
+ expect(wrapper.element.tagName).toBe('H1');
+ });
+
it('renders title contents with editing disabled', () => {
wrapper = createComponent({
disabled: true,
diff --git a/spec/frontend/work_items/components/notes/work_item_note_actions_spec.js b/spec/frontend/work_items/components/notes/work_item_note_actions_spec.js
index 596283a9590..97aed1d548e 100644
--- a/spec/frontend/work_items/components/notes/work_item_note_actions_spec.js
+++ b/spec/frontend/work_items/components/notes/work_item_note_actions_spec.js
@@ -1,4 +1,4 @@
-import { GlButton, GlDisclosureDropdown } from '@gitlab/ui';
+import { GlDisclosureDropdown } from '@gitlab/ui';
import Vue from 'vue';
import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
@@ -17,7 +17,7 @@ describe('Work Item Note Actions', () => {
const showSpy = jest.fn();
const findReplyButton = () => wrapper.findComponent(ReplyButton);
- const findEditButton = () => wrapper.findComponent(GlButton);
+ const findEditButton = () => wrapper.findByTestId('note-actions-edit');
const findEmojiButton = () => wrapper.findByTestId('note-emoji-button');
const findDropdown = () => wrapper.findComponent(GlDisclosureDropdown);
const findDeleteNoteButton = () => wrapper.findByTestId('delete-note-action');
@@ -64,6 +64,7 @@ describe('Work Item Note Actions', () => {
projectName,
},
provide: {
+ isGroup: false,
glFeatures: {
workItemsMvc2: true,
},
diff --git a/spec/frontend/work_items/components/notes/work_item_note_awards_list_spec.js b/spec/frontend/work_items/components/notes/work_item_note_awards_list_spec.js
index ce915635946..6ce4c09329f 100644
--- a/spec/frontend/work_items/components/notes/work_item_note_awards_list_spec.js
+++ b/spec/frontend/work_items/components/notes/work_item_note_awards_list_spec.js
@@ -9,6 +9,7 @@ import AwardsList from '~/vue_shared/components/awards_list.vue';
import WorkItemNoteAwardsList from '~/work_items/components/notes/work_item_note_awards_list.vue';
import addAwardEmojiMutation from '~/work_items/graphql/notes/work_item_note_add_award_emoji.mutation.graphql';
import removeAwardEmojiMutation from '~/work_items/graphql/notes/work_item_note_remove_award_emoji.mutation.graphql';
+import groupWorkItemNotesByIidQuery from '~/work_items/graphql/notes/group_work_item_notes_by_iid.query.graphql';
import workItemNotesByIidQuery from '~/work_items/graphql/notes/work_item_notes_by_iid.query.graphql';
import {
mockWorkItemNotesResponseWithComments,
@@ -45,7 +46,9 @@ describe('Work Item Note Awards List', () => {
const findAwardsList = () => wrapper.findComponent(AwardsList);
const createComponent = ({
+ isGroup = false,
note = firstNote,
+ query = workItemNotesByIidQuery,
addAwardEmojiMutationHandler = addAwardEmojiMutationSuccessHandler,
removeAwardEmojiMutationHandler = removeAwardEmojiMutationSuccessHandler,
} = {}) => {
@@ -55,12 +58,15 @@ describe('Work Item Note Awards List', () => {
]);
apolloProvider.clients.defaultClient.writeQuery({
- query: workItemNotesByIidQuery,
+ query,
variables: { fullPath, iid: workItemIid },
...mockWorkItemNotesResponseWithComments,
});
wrapper = shallowMount(WorkItemNoteAwardsList, {
+ provide: {
+ isGroup,
+ },
propsData: {
fullPath,
workItemIid,
@@ -89,54 +95,58 @@ describe('Work Item Note Awards List', () => {
expect(findAwardsList().props('canAwardEmoji')).toBe(hasAwardEmojiPermission);
});
- it('adds award if not already awarded', async () => {
- createComponent();
- await waitForPromises();
-
- findAwardsList().vm.$emit('award', EMOJI_THUMBSUP);
-
- expect(addAwardEmojiMutationSuccessHandler).toHaveBeenCalledWith({
- awardableId: firstNote.id,
- name: EMOJI_THUMBSUP,
- });
- });
+ it.each`
+ isGroup | query
+ ${true} | ${groupWorkItemNotesByIidQuery}
+ ${false} | ${workItemNotesByIidQuery}
+ `(
+ 'adds award if not already awarded in both group and project contexts',
+ async ({ isGroup, query }) => {
+ createComponent({ isGroup, query });
+ await waitForPromises();
+
+ findAwardsList().vm.$emit('award', EMOJI_THUMBSUP);
+
+ expect(addAwardEmojiMutationSuccessHandler).toHaveBeenCalledWith({
+ awardableId: firstNote.id,
+ name: EMOJI_THUMBSUP,
+ });
+ },
+ );
it('emits error if awarding emoji fails', async () => {
- createComponent({
- addAwardEmojiMutationHandler: jest.fn().mockRejectedValue('oh no'),
- });
- await waitForPromises();
+ createComponent({ addAwardEmojiMutationHandler: jest.fn().mockRejectedValue('oh no') });
findAwardsList().vm.$emit('award', EMOJI_THUMBSUP);
-
await waitForPromises();
expect(wrapper.emitted('error')).toEqual([[__('Failed to add emoji. Please try again')]]);
});
- it('removes award if already awarded', async () => {
- const removeAwardEmojiMutationHandler = removeAwardEmojiMutationSuccessHandler;
-
- createComponent({ removeAwardEmojiMutationHandler });
-
- findAwardsList().vm.$emit('award', EMOJI_THUMBSDOWN);
-
- await waitForPromises();
-
- expect(removeAwardEmojiMutationHandler).toHaveBeenCalledWith({
- awardableId: firstNote.id,
- name: EMOJI_THUMBSDOWN,
- });
- });
+ it.each`
+ isGroup | query
+ ${true} | ${groupWorkItemNotesByIidQuery}
+ ${false} | ${workItemNotesByIidQuery}
+ `(
+ 'removes award if already awarded in both group and project contexts',
+ async ({ isGroup, query }) => {
+ const removeAwardEmojiMutationHandler = removeAwardEmojiMutationSuccessHandler;
+ createComponent({ isGroup, query, removeAwardEmojiMutationHandler });
+
+ findAwardsList().vm.$emit('award', EMOJI_THUMBSDOWN);
+ await waitForPromises();
+
+ expect(removeAwardEmojiMutationHandler).toHaveBeenCalledWith({
+ awardableId: firstNote.id,
+ name: EMOJI_THUMBSDOWN,
+ });
+ },
+ );
it('restores award if remove fails', async () => {
- createComponent({
- removeAwardEmojiMutationHandler: jest.fn().mockRejectedValue('oh no'),
- });
- await waitForPromises();
+ createComponent({ removeAwardEmojiMutationHandler: jest.fn().mockRejectedValue('oh no') });
findAwardsList().vm.$emit('award', EMOJI_THUMBSDOWN);
-
await waitForPromises();
expect(wrapper.emitted('error')).toEqual([[__('Failed to remove emoji. Please try again')]]);
diff --git a/spec/frontend/work_items/components/notes/work_item_notes_activity_header_spec.js b/spec/frontend/work_items/components/notes/work_item_notes_activity_header_spec.js
index daf74f7a93b..dff54fef9fe 100644
--- a/spec/frontend/work_items/components/notes/work_item_notes_activity_header_spec.js
+++ b/spec/frontend/work_items/components/notes/work_item_notes_activity_header_spec.js
@@ -9,7 +9,8 @@ import {
describe('Work Item Note Activity Header', () => {
let wrapper;
- const findActivityLabelHeading = () => wrapper.find('h3');
+ const findActivityLabelH2Heading = () => wrapper.find('h2');
+ const findActivityLabelH3Heading = () => wrapper.find('h3');
const findActivityFilterDropdown = () => wrapper.findByTestId('work-item-filter');
const findActivitySortDropdown = () => wrapper.findByTestId('work-item-sort');
@@ -18,6 +19,7 @@ describe('Work Item Note Activity Header', () => {
sortOrder = ASC,
workItemType = 'Task',
discussionFilter = WORK_ITEM_NOTES_FILTER_ALL_NOTES,
+ useH2 = false,
} = {}) => {
wrapper = shallowMountExtended(WorkItemNotesActivityHeader, {
propsData: {
@@ -25,6 +27,7 @@ describe('Work Item Note Activity Header', () => {
sortOrder,
workItemType,
discussionFilter,
+ useH2,
},
});
};
@@ -34,7 +37,18 @@ describe('Work Item Note Activity Header', () => {
});
it('Should have the Activity label', () => {
- expect(findActivityLabelHeading().text()).toBe(WorkItemNotesActivityHeader.i18n.activityLabel);
+ expect(findActivityLabelH3Heading().text()).toBe(
+ WorkItemNotesActivityHeader.i18n.activityLabel,
+ );
+ });
+
+ it('Should render an H2 instead of an H3 if useH2 is true', () => {
+ createComponent();
+ expect(findActivityLabelH3Heading().exists()).toBe(true);
+ expect(findActivityLabelH2Heading().exists()).toBe(false);
+ createComponent({ useH2: true });
+ expect(findActivityLabelH2Heading().exists()).toBe(true);
+ expect(findActivityLabelH3Heading().exists()).toBe(false);
});
it('Should have Activity filtering dropdown', () => {
diff --git a/spec/frontend/work_items/components/work_item_ancestors/disclosure_hierarchy_item_spec.js b/spec/frontend/work_items/components/work_item_ancestors/disclosure_hierarchy_item_spec.js
new file mode 100644
index 00000000000..2cfe61654ad
--- /dev/null
+++ b/spec/frontend/work_items/components/work_item_ancestors/disclosure_hierarchy_item_spec.js
@@ -0,0 +1,53 @@
+import { GlIcon } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+
+import DisclosureHierarchyItem from '~/work_items/components/work_item_ancestors/disclosure_hierarchy_item.vue';
+import { mockDisclosureHierarchyItems } from './mock_data';
+
+describe('DisclosurePathItem', () => {
+ let wrapper;
+
+ const findIcon = () => wrapper.findComponent(GlIcon);
+
+ const createComponent = (props = {}, options = {}) => {
+ return shallowMount(DisclosureHierarchyItem, {
+ propsData: {
+ item: mockDisclosureHierarchyItems[0],
+ ...props,
+ },
+ ...options,
+ });
+ };
+
+ beforeEach(() => {
+ wrapper = createComponent();
+ });
+
+ describe('renders the item', () => {
+ it('renders the inline icon', () => {
+ expect(findIcon().exists()).toBe(true);
+ expect(findIcon().props('name')).toBe(mockDisclosureHierarchyItems[0].icon);
+ });
+ });
+
+ describe('item slot', () => {
+ beforeEach(() => {
+ wrapper = createComponent(null, {
+ scopedSlots: {
+ default: `
+ <div
+ data-testid="item-slot-content">
+ {{ props.item.title }}
+ </div>
+ `,
+ },
+ });
+ });
+
+ it('contains all elements passed into the additional slot', () => {
+ const item = wrapper.find('[data-testid="item-slot-content"]');
+
+ expect(item.text()).toBe(mockDisclosureHierarchyItems[0].title);
+ });
+ });
+});
diff --git a/spec/frontend/work_items/components/work_item_ancestors/disclosure_hierarchy_spec.js b/spec/frontend/work_items/components/work_item_ancestors/disclosure_hierarchy_spec.js
new file mode 100644
index 00000000000..b808c13c3e7
--- /dev/null
+++ b/spec/frontend/work_items/components/work_item_ancestors/disclosure_hierarchy_spec.js
@@ -0,0 +1,99 @@
+import { shallowMount } from '@vue/test-utils';
+
+import { GlDisclosureDropdown, GlTooltip } from '@gitlab/ui';
+import DisclosureHierarchy from '~/work_items/components/work_item_ancestors//disclosure_hierarchy.vue';
+import DisclosureHierarchyItem from '~/work_items/components/work_item_ancestors/disclosure_hierarchy_item.vue';
+import { mockDisclosureHierarchyItems } from './mock_data';
+
+describe('DisclosurePath', () => {
+ let wrapper;
+
+ const createComponent = (props = {}, options = {}) => {
+ return shallowMount(DisclosureHierarchy, {
+ propsData: {
+ items: mockDisclosureHierarchyItems,
+ ...props,
+ },
+ ...options,
+ });
+ };
+
+ const listItems = () => wrapper.findAllComponents(DisclosureHierarchyItem);
+ const itemAt = (index) => listItems().at(index);
+ const itemTextAt = (index) => itemAt(index).props('item').title;
+
+ beforeEach(() => {
+ wrapper = createComponent();
+ });
+
+ describe('renders the list of items', () => {
+ it('renders the correct number of items', () => {
+ expect(listItems().length).toBe(mockDisclosureHierarchyItems.length);
+ });
+
+ it('renders the items in the correct order', () => {
+ expect(itemTextAt(0)).toContain(mockDisclosureHierarchyItems[0].title);
+ expect(itemTextAt(4)).toContain(mockDisclosureHierarchyItems[4].title);
+ expect(itemTextAt(9)).toContain(mockDisclosureHierarchyItems[9].title);
+ });
+ });
+
+ describe('slots', () => {
+ beforeEach(() => {
+ wrapper = createComponent(null, {
+ scopedSlots: {
+ default: `
+ <div
+ :data-itemid="props.itemId"
+ data-testid="item-slot-content">
+ {{ props.item.title }}
+ </div>
+ `,
+ },
+ });
+ });
+
+ it('contains all elements passed into the default slot', () => {
+ mockDisclosureHierarchyItems.forEach((item, index) => {
+ const disclosureItem = wrapper.findAll('[data-testid="item-slot-content"]').at(index);
+
+ expect(disclosureItem.text()).toBe(item.title);
+ expect(disclosureItem.attributes('data-itemid')).toContain('disclosure-');
+ });
+ });
+ });
+
+ describe('with ellipsis', () => {
+ const findDropdown = () => wrapper.findComponent(GlDisclosureDropdown);
+ const findTooltip = () => wrapper.findComponent(GlTooltip);
+ const findTooltipText = () => findTooltip().text();
+ const tooltipText = 'Display more items';
+
+ beforeEach(() => {
+ wrapper = createComponent({ withEllipsis: true, ellipsisTooltipLabel: tooltipText });
+ });
+
+ describe('renders items and dropdown', () => {
+ it('renders 2 items', () => {
+ expect(listItems().length).toBe(2);
+ });
+
+ it('renders first and last items', () => {
+ expect(itemTextAt(0)).toContain(mockDisclosureHierarchyItems[0].title);
+ expect(itemTextAt(1)).toContain(
+ mockDisclosureHierarchyItems[mockDisclosureHierarchyItems.length - 1].title,
+ );
+ });
+
+ it('renders dropdown with the rest of the items passed down', () => {
+ expect(findDropdown().exists()).toBe(true);
+ expect(findDropdown().props('items').length).toBe(mockDisclosureHierarchyItems.length - 2);
+ });
+
+ it('renders tooltip with text passed as prop', () => {
+ expect(findTooltip().exists()).toBe(true);
+ expect(findTooltipText()).toBe(tooltipText);
+ });
+ });
+ });
+});
diff --git a/spec/frontend/work_items/components/work_item_ancestors/mock_data.js b/spec/frontend/work_items/components/work_item_ancestors/mock_data.js
new file mode 100644
index 00000000000..8e7f99658de
--- /dev/null
+++ b/spec/frontend/work_items/components/work_item_ancestors/mock_data.js
@@ -0,0 +1,197 @@
+export const mockDisclosureHierarchyItems = [
+ {
+ title: 'First',
+ icon: 'epic',
+ href: '#',
+ },
+ {
+ title: 'Second',
+ icon: 'epic',
+ href: '#',
+ },
+ {
+ title: 'Third',
+ icon: 'epic',
+ href: '#',
+ },
+ {
+ title: 'Fourth',
+ icon: 'epic',
+ href: '#',
+ },
+ {
+ title: 'Fifth',
+ icon: 'issues',
+ href: '#',
+ },
+ {
+ title: 'Sixth',
+ icon: 'issues',
+ href: '#',
+ },
+ {
+ title: 'Seventh',
+ icon: 'issues',
+ href: '#',
+ },
+ {
+ title: 'Eighth',
+ icon: 'issue-type-task',
+ href: '#',
+ disabled: true,
+ },
+ {
+ title: 'Ninth',
+ icon: 'issue-type-task',
+ href: '#',
+ },
+ {
+ title: 'Tenth',
+ icon: 'issue-type-task',
+ href: '#',
+ },
+];
+
+export const workItemAncestorsQueryResponse = {
+ data: {
+ workItem: {
+ __typename: 'WorkItem',
+ id: 'gid://gitlab/WorkItem/1',
+ title: 'Test',
+ widgets: [
+ {
+ __typename: 'WorkItemWidgetHierarchy',
+ type: 'HIERARCHY',
+ parent: {
+ id: 'gid://gitlab/Issue/1',
+ },
+ ancestors: {
+ nodes: [
+ {
+ id: 'gid://gitlab/WorkItem/444',
+ iid: '4',
+ reference: '#40',
+ createdAt: '2022-08-03T12:41:54Z',
+ closedAt: null,
+ confidential: false,
+ title: '123',
+ state: 'OPEN',
+ webUrl: '/gitlab-org/gitlab-test/-/work_items/4',
+ workItemType: {
+ id: 'gid://gitlab/WorkItems::Type/2',
+ name: 'Issue',
+ iconName: 'issue-type-issue',
+ },
+ },
+ ],
+ },
+ },
+ ],
+ },
+ },
+};
+
+export const workItemThreeAncestorsQueryResponse = {
+ data: {
+ workItem: {
+ __typename: 'WorkItem',
+ id: 'gid://gitlab/WorkItem/1',
+ title: 'Test',
+ workItemType: {
+ __typename: 'WorkItemType',
+ id: 'gid://gitlab/WorkItems::Type/5',
+ name: 'Task',
+ iconName: 'issue-type-task',
+ },
+ widgets: [
+ {
+ __typename: 'WorkItemWidgetHierarchy',
+ type: 'HIERARCHY',
+ parent: {
+ id: 'gid://gitlab/Issue/1',
+ },
+ ancestors: {
+ nodes: [
+ {
+ id: 'gid://gitlab/WorkItem/444',
+ iid: '4',
+ reference: '#40',
+ createdAt: '2022-08-03T12:41:54Z',
+ closedAt: null,
+ confidential: false,
+ title: '123',
+ state: 'OPEN',
+ webUrl: '/gitlab-org/gitlab-test/-/work_items/4',
+ workItemType: {
+ id: 'gid://gitlab/WorkItems::Type/2',
+ name: 'Issue',
+ iconName: 'issue-type-issue',
+ },
+ },
+ {
+ id: 'gid://gitlab/WorkItem/445',
+ iid: '5',
+ reference: '#41',
+ createdAt: '2022-08-03T12:41:54Z',
+ closedAt: null,
+ confidential: false,
+ title: '1234',
+ state: 'OPEN',
+ webUrl: '/gitlab-org/gitlab-test/-/work_items/5',
+ workItemType: {
+ id: 'gid://gitlab/WorkItems::Type/2',
+ name: 'Issue',
+ iconName: 'issue-type-issue',
+ },
+ },
+ {
+ id: 'gid://gitlab/WorkItem/446',
+ iid: '6',
+ reference: '#42',
+ createdAt: '2022-08-03T12:41:54Z',
+ closedAt: null,
+ confidential: false,
+ title: '12345',
+ state: 'OPEN',
+ webUrl: '/gitlab-org/gitlab-test/-/work_items/6',
+ workItemType: {
+ id: 'gid://gitlab/WorkItems::Type/2',
+ name: 'Issue',
+ iconName: 'issue-type-issue',
+ },
+ },
+ ],
+ },
+ },
+ ],
+ },
+ },
+};
+
+export const workItemEmptyAncestorsQueryResponse = {
+ data: {
+ workItem: {
+ __typename: 'WorkItem',
+ id: 'gid://gitlab/WorkItem/1',
+ title: 'Test',
+ workItemType: {
+ __typename: 'WorkItemType',
+ id: 'gid://gitlab/WorkItems::Type/5',
+ name: 'Task',
+ iconName: 'issue-type-task',
+ },
+ widgets: [
+ {
+ __typename: 'WorkItemWidgetHierarchy',
+ type: 'HIERARCHY',
+ parent: {
+ id: null,
+ },
+ ancestors: {
+ nodes: [],
+ },
+ },
+ ],
+ },
+ },
+};
diff --git a/spec/frontend/work_items/components/work_item_ancestors/work_item_ancestors_spec.js b/spec/frontend/work_items/components/work_item_ancestors/work_item_ancestors_spec.js
new file mode 100644
index 00000000000..a9f66b20f06
--- /dev/null
+++ b/spec/frontend/work_items/components/work_item_ancestors/work_item_ancestors_spec.js
@@ -0,0 +1,117 @@
+import Vue from 'vue';
+import VueApollo from 'vue-apollo';
+import { GlPopover } from '@gitlab/ui';
+import { mountExtended } from 'helpers/vue_test_utils_helper';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+
+import { createAlert } from '~/alert';
+import DisclosureHierarchy from '~/work_items/components/work_item_ancestors/disclosure_hierarchy.vue';
+import WorkItemAncestors from '~/work_items/components/work_item_ancestors/work_item_ancestors.vue';
+import workItemAncestorsQuery from '~/work_items/graphql/work_item_ancestors.query.graphql';
+import { formatAncestors } from '~/work_items/utils';
+
+import { workItemTask } from '../../mock_data';
+import {
+ workItemAncestorsQueryResponse,
+ workItemEmptyAncestorsQueryResponse,
+ workItemThreeAncestorsQueryResponse,
+} from './mock_data';
+
+Vue.use(VueApollo);
+jest.mock('~/alert');
+
+describe('WorkItemAncestors', () => {
+ let wrapper;
+ let mockApollo;
+
+ const workItemAncestorsQueryHandler = jest.fn().mockResolvedValue(workItemAncestorsQueryResponse);
+ const workItemEmptyAncestorsQueryHandler = jest
+ .fn()
+ .mockResolvedValue(workItemEmptyAncestorsQueryResponse);
+ const workItemThreeAncestorsQueryHandler = jest
+ .fn()
+ .mockResolvedValue(workItemThreeAncestorsQueryResponse);
+ const workItemAncestorsFailureHandler = jest.fn().mockRejectedValue(new Error());
+
+ const findDisclosureHierarchy = () => wrapper.findComponent(DisclosureHierarchy);
+ const findPopover = () => wrapper.findComponent(GlPopover);
+
+ const createComponent = ({
+ props = {},
+ options = {},
+ ancestorsQueryHandler = workItemAncestorsQueryHandler,
+ } = {}) => {
+ mockApollo = createMockApollo([[workItemAncestorsQuery, ancestorsQueryHandler]]);
+ return mountExtended(WorkItemAncestors, {
+ apolloProvider: mockApollo,
+ propsData: {
+ workItem: workItemTask,
+ ...props,
+ },
+ ...options,
+ });
+ };
+
+ beforeEach(async () => {
+ createAlert.mockClear();
+ wrapper = createComponent();
+ await waitForPromises();
+ });
+
+ it('fetches work item ancestors', () => {
+ expect(workItemAncestorsQueryHandler).toHaveBeenCalled();
+ });
+
+ it('displays DisclosureHierarchy component with ancestors when work item has at least one ancestor', () => {
+ expect(findDisclosureHierarchy().exists()).toBe(true);
+ expect(findDisclosureHierarchy().props('items')).toEqual(
+ expect.objectContaining(formatAncestors(workItemAncestorsQueryResponse.data.workItem)),
+ );
+ });
+
+ it('does not display DisclosureHierarchy component when work item has no ancestor', async () => {
+ wrapper = createComponent({ ancestorsQueryHandler: workItemEmptyAncestorsQueryHandler });
+ await waitForPromises();
+
+ expect(findDisclosureHierarchy().exists()).toBe(false);
+ });
+
+ it('displays work item info in popover on hover and focus', () => {
+ expect(findPopover().exists()).toBe(true);
+ expect(findPopover().props('triggers')).toBe('hover focus');
+
+ const ancestor = findDisclosureHierarchy().props('items')[0];
+
+ expect(findPopover().text()).toContain(ancestor.title);
+ expect(findPopover().text()).toContain(ancestor.reference);
+ });
+
+ describe('when work item has less than 3 ancestors', () => {
+ it('does not activate ellipsis option for DisclosureHierarchy component', () => {
+ expect(findDisclosureHierarchy().props('withEllipsis')).toBe(false);
+ });
+ });
+
+ describe('when work item has at least 3 ancestors', () => {
+ beforeEach(async () => {
+ wrapper = createComponent({ ancestorsQueryHandler: workItemThreeAncestorsQueryHandler });
+ await waitForPromises();
+ });
+
+ it('activates ellipsis option for DisclosureHierarchy component', () => {
+ expect(findDisclosureHierarchy().props('withEllipsis')).toBe(true);
+ });
+ });
+
+ it('creates alert when the query fails', async () => {
+ createComponent({ ancestorsQueryHandler: workItemAncestorsFailureHandler });
+ await waitForPromises();
+
+ expect(createAlert).toHaveBeenCalledWith({
+ captureError: true,
+ error: expect.any(Object),
+ message: 'Something went wrong while fetching ancestors.',
+ });
+ });
+});
diff --git a/spec/frontend/work_items/components/work_item_attributes_wrapper_spec.js b/spec/frontend/work_items/components/work_item_attributes_wrapper_spec.js
index 123cf647674..48ec84ceb85 100644
--- a/spec/frontend/work_items/components/work_item_attributes_wrapper_spec.js
+++ b/spec/frontend/work_items/components/work_item_attributes_wrapper_spec.js
@@ -1,11 +1,20 @@
+import { nextTick } from 'vue';
import { shallowMount } from '@vue/test-utils';
import WorkItemAssignees from '~/work_items/components/work_item_assignees.vue';
import WorkItemDueDate from '~/work_items/components/work_item_due_date.vue';
import WorkItemLabels from '~/work_items/components/work_item_labels.vue';
import WorkItemMilestone from '~/work_items/components/work_item_milestone.vue';
-
+import WorkItemParentInline from '~/work_items/components/work_item_parent_inline.vue';
+import WorkItemParent from '~/work_items/components/work_item_parent_with_edit.vue';
+import waitForPromises from 'helpers/wait_for_promises';
import WorkItemAttributesWrapper from '~/work_items/components/work_item_attributes_wrapper.vue';
-import { workItemResponseFactory } from '../mock_data';
+import {
+ workItemResponseFactory,
+ taskType,
+ issueType,
+ objectiveType,
+ keyResultType,
+} from '../mock_data';
describe('WorkItemAttributesWrapper component', () => {
let wrapper;
@@ -16,8 +25,13 @@ describe('WorkItemAttributesWrapper component', () => {
const findWorkItemAssignees = () => wrapper.findComponent(WorkItemAssignees);
const findWorkItemLabels = () => wrapper.findComponent(WorkItemLabels);
const findWorkItemMilestone = () => wrapper.findComponent(WorkItemMilestone);
+ const findWorkItemParentInline = () => wrapper.findComponent(WorkItemParentInline);
+ const findWorkItemParent = () => wrapper.findComponent(WorkItemParent);
- const createComponent = ({ workItem = workItemQueryResponse.data.workItem } = {}) => {
+ const createComponent = ({
+ workItem = workItemQueryResponse.data.workItem,
+ workItemsMvc2 = true,
+ } = {}) => {
wrapper = shallowMount(WorkItemAttributesWrapper, {
propsData: {
fullPath: 'group/project',
@@ -29,6 +43,9 @@ describe('WorkItemAttributesWrapper component', () => {
hasOkrsFeature: true,
hasIssuableHealthStatusFeature: true,
projectNamespace: 'namespace',
+ glFeatures: {
+ workItemsMvc2,
+ },
},
stubs: {
WorkItemWeight: true,
@@ -94,4 +111,54 @@ describe('WorkItemAttributesWrapper component', () => {
expect(findWorkItemMilestone().exists()).toBe(exists);
});
});
+
+ describe('parent widget', () => {
+ describe.each`
+ description | workItemType | exists
+ ${'when work item type is task'} | ${taskType} | ${true}
+ ${'when work item type is objective'} | ${objectiveType} | ${true}
+ ${'when work item type is keyresult'} | ${keyResultType} | ${true}
+ ${'when work item type is issue'} | ${issueType} | ${false}
+ `('$description', ({ workItemType, exists }) => {
+ it(`${exists ? 'renders' : 'does not render'} parent component`, async () => {
+ const response = workItemResponseFactory({ workItemType });
+ createComponent({ workItem: response.data.workItem });
+
+ await waitForPromises();
+
+ expect(findWorkItemParent().exists()).toBe(exists);
+ });
+ });
+
+ it('renders WorkItemParent when workItemsMvc2 enabled', async () => {
+ createComponent();
+
+ await waitForPromises();
+
+ expect(findWorkItemParent().exists()).toBe(true);
+ expect(findWorkItemParentInline().exists()).toBe(false);
+ });
+
+ it('renders WorkItemParentInline when workItemsMvc2 disabled', async () => {
+ createComponent({ workItemsMvc2: false });
+
+ await waitForPromises();
+
+ expect(findWorkItemParent().exists()).toBe(false);
+ expect(findWorkItemParentInline().exists()).toBe(true);
+ });
+
+ it('emits an error event to the wrapper', async () => {
+ const response = workItemResponseFactory({ parentWidgetPresent: true });
+ createComponent({ workItem: response.data.workItem });
+ const updateError = 'Failed to update';
+
+ await waitForPromises();
+
+ findWorkItemParent().vm.$emit('error', updateError);
+ await nextTick();
+
+ expect(wrapper.emitted('error')).toEqual([[updateError]]);
+ });
+ });
});
diff --git a/spec/frontend/work_items/components/work_item_detail_modal_spec.js b/spec/frontend/work_items/components/work_item_detail_modal_spec.js
index 6fa3a70c3eb..f77d6c89035 100644
--- a/spec/frontend/work_items/components/work_item_detail_modal_spec.js
+++ b/spec/frontend/work_items/components/work_item_detail_modal_spec.js
@@ -61,7 +61,6 @@ describe('WorkItemDetailModal component', () => {
expect(findWorkItemDetail().props()).toEqual({
isModal: true,
workItemIid: '1',
- workItemParentId: null,
});
});
diff --git a/spec/frontend/work_items/components/work_item_detail_spec.js b/spec/frontend/work_items/components/work_item_detail_spec.js
index acfe4571cd2..d63bb94c3f0 100644
--- a/spec/frontend/work_items/components/work_item_detail_spec.js
+++ b/spec/frontend/work_items/components/work_item_detail_spec.js
@@ -1,10 +1,4 @@
-import {
- GlAlert,
- GlSkeletonLoader,
- GlButton,
- GlEmptyState,
- GlIntersectionObserver,
-} from '@gitlab/ui';
+import { GlAlert, GlSkeletonLoader, GlEmptyState } from '@gitlab/ui';
import Vue, { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
@@ -15,6 +9,7 @@ import setWindowLocation from 'helpers/set_window_location_helper';
import { stubComponent } from 'helpers/stub_component';
import WorkItemDetail from '~/work_items/components/work_item_detail.vue';
import WorkItemActions from '~/work_items/components/work_item_actions.vue';
+import WorkItemAncestors from '~/work_items/components/work_item_ancestors/work_item_ancestors.vue';
import WorkItemDescription from '~/work_items/components/work_item_description.vue';
import WorkItemCreatedUpdated from '~/work_items/components/work_item_created_updated.vue';
import WorkItemAttributesWrapper from '~/work_items/components/work_item_attributes_wrapper.vue';
@@ -23,13 +18,13 @@ import WorkItemTree from '~/work_items/components/work_item_links/work_item_tree
import WorkItemRelationships from '~/work_items/components/work_item_relationships/work_item_relationships.vue';
import WorkItemNotes from '~/work_items/components/work_item_notes.vue';
import WorkItemDetailModal from '~/work_items/components/work_item_detail_modal.vue';
+import WorkItemStickyHeader from '~/work_items/components/work_item_sticky_header.vue';
import AbuseCategorySelector from '~/abuse_reports/components/abuse_category_selector.vue';
import WorkItemTodos from '~/work_items/components/work_item_todos.vue';
import { i18n } from '~/work_items/constants';
import groupWorkItemByIidQuery from '~/work_items/graphql/group_work_item_by_iid.query.graphql';
import workItemByIidQuery from '~/work_items/graphql/work_item_by_iid.query.graphql';
import updateWorkItemMutation from '~/work_items/graphql/update_work_item.mutation.graphql';
-import updateWorkItemTaskMutation from '~/work_items/graphql/update_work_item_task.mutation.graphql';
import workItemUpdatedSubscription from '~/work_items/graphql/work_item_updated.subscription.graphql';
import {
@@ -74,8 +69,7 @@ describe('WorkItemDetail component', () => {
const findCreatedUpdated = () => wrapper.findComponent(WorkItemCreatedUpdated);
const findWorkItemDescription = () => wrapper.findComponent(WorkItemDescription);
const findWorkItemAttributesWrapper = () => wrapper.findComponent(WorkItemAttributesWrapper);
- const findParent = () => wrapper.findByTestId('work-item-parent');
- const findParentButton = () => findParent().findComponent(GlButton);
+ const findAncestors = () => wrapper.findComponent(WorkItemAncestors);
const findCloseButton = () => wrapper.findByTestId('work-item-close');
const findWorkItemType = () => wrapper.findByTestId('work-item-type');
const findHierarchyTree = () => wrapper.findComponent(WorkItemTree);
@@ -84,11 +78,9 @@ describe('WorkItemDetail component', () => {
const findModal = () => wrapper.findComponent(WorkItemDetailModal);
const findAbuseCategorySelector = () => wrapper.findComponent(AbuseCategorySelector);
const findWorkItemTodos = () => wrapper.findComponent(WorkItemTodos);
- const findIntersectionObserver = () => wrapper.findComponent(GlIntersectionObserver);
- const findStickyHeader = () => wrapper.findByTestId('work-item-sticky-header');
+ const findStickyHeader = () => wrapper.findComponent(WorkItemStickyHeader);
const findWorkItemTwoColumnViewContainer = () => wrapper.findByTestId('work-item-overview');
const findRightSidebar = () => wrapper.findByTestId('work-item-overview-right-sidebar');
- const triggerPageScroll = () => findIntersectionObserver().vm.$emit('disappear');
const createComponent = ({
isGroup = false,
@@ -96,7 +88,7 @@ describe('WorkItemDetail component', () => {
updateInProgress = false,
workItemIid = '1',
handler = successHandler,
- confidentialityMock = [updateWorkItemMutation, jest.fn()],
+ mutationHandler,
error = undefined,
workItemsMvc2Enabled = false,
linkedWorkItemsEnabled = false,
@@ -105,8 +97,8 @@ describe('WorkItemDetail component', () => {
apolloProvider: createMockApollo([
[workItemByIidQuery, handler],
[groupWorkItemByIidQuery, groupSuccessHandler],
+ [updateWorkItemMutation, mutationHandler],
[workItemUpdatedSubscription, workItemUpdatedSubscriptionHandler],
- confidentialityMock,
]),
isLoggedIn: isLoggedIn(),
propsData: {
@@ -134,6 +126,7 @@ describe('WorkItemDetail component', () => {
reportAbusePath: '/report/abuse/path',
},
stubs: {
+ WorkItemAncestors: true,
WorkItemWeight: true,
WorkItemIteration: true,
WorkItemHealthStatus: true,
@@ -236,119 +229,52 @@ describe('WorkItemDetail component', () => {
describe('confidentiality', () => {
const errorMessage = 'Mutation failed';
- const confidentialWorkItem = workItemByIidResponseFactory({
- confidential: true,
- });
- const workItem = confidentialWorkItem.data.workspace.workItems.nodes[0];
-
- // Mocks for work item without parent
- const withoutParentExpectedInputVars = { id, confidential: true };
- const toggleConfidentialityWithoutParentHandler = jest.fn().mockResolvedValue({
- data: {
- workItemUpdate: {
- workItem,
- errors: [],
- },
- },
- });
- const withoutParentHandlerMock = jest
- .fn()
- .mockResolvedValue(workItemQueryResponseWithoutParent);
- const confidentialityWithoutParentMock = [
- updateWorkItemMutation,
- toggleConfidentialityWithoutParentHandler,
- ];
- const confidentialityWithoutParentFailureMock = [
- updateWorkItemMutation,
- jest.fn().mockRejectedValue(new Error(errorMessage)),
- ];
-
- // Mocks for work item with parent
- const withParentExpectedInputVars = {
- id: mockParent.parent.id,
- taskData: { id, confidential: true },
- };
- const toggleConfidentialityWithParentHandler = jest.fn().mockResolvedValue({
+ const confidentialWorkItem = workItemByIidResponseFactory({ confidential: true });
+ const mutationHandler = jest.fn().mockResolvedValue({
data: {
workItemUpdate: {
- workItem: {
- id: workItem.id,
- descriptionHtml: workItem.description,
- },
- task: {
- workItem,
- confidential: true,
- },
+ workItem: confidentialWorkItem.data.workspace.workItems.nodes[0],
errors: [],
},
},
});
- const confidentialityWithParentMock = [
- updateWorkItemTaskMutation,
- toggleConfidentialityWithParentHandler,
- ];
- const confidentialityWithParentFailureMock = [
- updateWorkItemTaskMutation,
- jest.fn().mockRejectedValue(new Error(errorMessage)),
- ];
-
- describe.each`
- context | handlerMock | confidentialityMock | confidentialityFailureMock | inputVariables
- ${'no parent'} | ${withoutParentHandlerMock} | ${confidentialityWithoutParentMock} | ${confidentialityWithoutParentFailureMock} | ${withoutParentExpectedInputVars}
- ${'parent'} | ${successHandler} | ${confidentialityWithParentMock} | ${confidentialityWithParentFailureMock} | ${withParentExpectedInputVars}
- `(
- 'when work item has $context',
- ({ handlerMock, confidentialityMock, confidentialityFailureMock, inputVariables }) => {
- it('sends updateInProgress props to child component', async () => {
- createComponent({
- handler: handlerMock,
- confidentialityMock,
- });
-
- await waitForPromises();
-
- findWorkItemActions().vm.$emit('toggleWorkItemConfidentiality', true);
- await nextTick();
-
- expect(findCreatedUpdated().props('updateInProgress')).toBe(true);
- });
+ it('sends updateInProgress props to child component', async () => {
+ createComponent({ mutationHandler });
+ await waitForPromises();
- it('emits workItemUpdated when mutation is successful', async () => {
- createComponent({
- handler: handlerMock,
- confidentialityMock,
- });
+ findWorkItemActions().vm.$emit('toggleWorkItemConfidentiality', true);
+ await nextTick();
- await waitForPromises();
+ expect(findCreatedUpdated().props('updateInProgress')).toBe(true);
+ });
- findWorkItemActions().vm.$emit('toggleWorkItemConfidentiality', true);
- await waitForPromises();
+ it('emits workItemUpdated when mutation is successful', async () => {
+ createComponent({ mutationHandler });
+ await waitForPromises();
- expect(wrapper.emitted('workItemUpdated')).toEqual([[{ confidential: true }]]);
- expect(confidentialityMock[1]).toHaveBeenCalledWith({
- input: inputVariables,
- });
- });
+ findWorkItemActions().vm.$emit('toggleWorkItemConfidentiality', true);
+ await waitForPromises();
- it('shows an alert when mutation fails', async () => {
- createComponent({
- handler: handlerMock,
- confidentialityMock: confidentialityFailureMock,
- });
+ expect(wrapper.emitted('workItemUpdated')).toEqual([[{ confidential: true }]]);
+ expect(mutationHandler).toHaveBeenCalledWith({
+ input: {
+ id: 'gid://gitlab/WorkItem/1',
+ confidential: true,
+ },
+ });
+ });
- await waitForPromises();
- findWorkItemActions().vm.$emit('toggleWorkItemConfidentiality', true);
- await waitForPromises();
- expect(wrapper.emitted('workItemUpdated')).toBeUndefined();
+ it('shows an alert when mutation fails', async () => {
+ createComponent({ mutationHandler: jest.fn().mockRejectedValue(new Error(errorMessage)) });
+ await waitForPromises();
- await nextTick();
+ findWorkItemActions().vm.$emit('toggleWorkItemConfidentiality', true);
+ await waitForPromises();
- expect(findAlert().exists()).toBe(true);
- expect(findAlert().text()).toBe(errorMessage);
- });
- },
- );
+ expect(wrapper.emitted('workItemUpdated')).toBeUndefined();
+ expect(findAlert().text()).toBe(errorMessage);
+ });
});
describe('description', () => {
@@ -366,19 +292,19 @@ describe('WorkItemDetail component', () => {
});
});
- describe('secondary breadcrumbs', () => {
- it('does not show secondary breadcrumbs by default', () => {
+ describe('ancestors widget', () => {
+ it('does not show ancestors widget by default', () => {
createComponent();
- expect(findParent().exists()).toBe(false);
+ expect(findAncestors().exists()).toBe(false);
});
- it('does not show secondary breadcrumbs if there is not a parent', async () => {
+ it('does not show ancestors widget if there is not a parent', async () => {
createComponent({ handler: jest.fn().mockResolvedValue(workItemQueryResponseWithoutParent) });
await waitForPromises();
- expect(findParent().exists()).toBe(false);
+ expect(findAncestors().exists()).toBe(false);
});
it('shows title in the header when there is no parent', async () => {
@@ -396,45 +322,8 @@ describe('WorkItemDetail component', () => {
return waitForPromises();
});
- it('shows secondary breadcrumbs if there is a parent', () => {
- expect(findParent().exists()).toBe(true);
- });
-
- it('shows parent breadcrumb icon', () => {
- expect(findParentButton().props('icon')).toBe(mockParent.parent.workItemType.iconName);
- });
-
- it('shows parent title and iid', () => {
- expect(findParentButton().text()).toBe(
- `${mockParent.parent.title} #${mockParent.parent.iid}`,
- );
- });
-
- it('sets the parent breadcrumb URL pointing to issue page when parent type is `Issue`', () => {
- expect(findParentButton().attributes().href).toBe('../../-/issues/5');
- });
-
- it('sets the parent breadcrumb URL based on parent webUrl when parent type is not `Issue`', async () => {
- const mockParentObjective = {
- parent: {
- ...mockParent.parent,
- workItemType: {
- id: mockParent.parent.workItemType.id,
- name: 'Objective',
- iconName: 'issue-type-objective',
- },
- },
- };
- const parentResponse = workItemByIidResponseFactory(mockParentObjective);
- createComponent({ handler: jest.fn().mockResolvedValue(parentResponse) });
- await waitForPromises();
-
- expect(findParentButton().attributes().href).toBe(mockParentObjective.parent.webUrl);
- });
-
- it('shows work item type and iid', () => {
- const { iid } = workItemQueryResponse.data.workspace.workItems.nodes[0];
- expect(findParent().text()).toContain(`#${iid}`);
+ it('shows ancestors widget if there is a parent', () => {
+ expect(findAncestors().exists()).toBe(true);
});
it('does not show title in the header when parent exists', () => {
@@ -769,8 +658,7 @@ describe('WorkItemDetail component', () => {
expect(findWorkItemTwoColumnViewContainer().classes()).not.toContain('work-item-overview');
});
- it('does not have sticky header', () => {
- expect(findIntersectionObserver().exists()).toBe(false);
+ it('does not have sticky header component', () => {
expect(findStickyHeader().exists()).toBe(false);
});
@@ -789,18 +677,7 @@ describe('WorkItemDetail component', () => {
expect(findWorkItemTwoColumnViewContainer().classes()).toContain('work-item-overview');
});
- it('does not show sticky header by default', () => {
- expect(findStickyHeader().exists()).toBe(false);
- });
-
- it('has the sticky header when the page is scrolled', async () => {
- expect(findIntersectionObserver().exists()).toBe(true);
-
- global.pageYOffset = 100;
- triggerPageScroll();
-
- await nextTick();
-
+ it('renders the work item sticky header component', () => {
expect(findStickyHeader().exists()).toBe(true);
});
diff --git a/spec/frontend/work_items/components/work_item_links/okr_actions_split_button_spec.js b/spec/frontend/work_items/components/work_item_links/work_item_actions_split_button_spec.js
index 55d5b34ae70..630ffa1a699 100644
--- a/spec/frontend/work_items/components/work_item_links/okr_actions_split_button_spec.js
+++ b/spec/frontend/work_items/components/work_item_links/work_item_actions_split_button_spec.js
@@ -1,12 +1,40 @@
import { GlDisclosureDropdown, GlDisclosureDropdownGroup } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
-import OkrActionsSplitButton from '~/work_items/components/work_item_links/okr_actions_split_button.vue';
+import WorkItemActionsSplitButton from '~/work_items/components/work_item_links/work_item_actions_split_button.vue';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
+const okrActions = [
+ {
+ name: 'Objective',
+ items: [
+ {
+ text: 'New objective',
+ },
+ {
+ text: 'Existing objective',
+ },
+ ],
+ },
+ {
+ name: 'Key result',
+ items: [
+ {
+ text: 'New key result',
+ },
+ {
+ text: 'Existing key result',
+ },
+ ],
+ },
+];
+
const createComponent = () => {
return extendedWrapper(
- shallowMount(OkrActionsSplitButton, {
+ shallowMount(WorkItemActionsSplitButton, {
+ propsData: {
+ actions: okrActions,
+ },
stubs: {
GlDisclosureDropdown,
},
@@ -21,7 +49,7 @@ describe('RelatedItemsTree', () => {
wrapper = createComponent();
});
- describe('OkrActionsSplitButton', () => {
+ describe('WorkItemActionsSplitButton', () => {
describe('template', () => {
it('renders objective and key results sections', () => {
expect(wrapper.findAllComponents(GlDisclosureDropdownGroup).at(0).props('group').name).toBe(
diff --git a/spec/frontend/work_items/components/work_item_links/work_item_tree_spec.js b/spec/frontend/work_items/components/work_item_links/work_item_tree_spec.js
index 6c1d1035c3d..49a674e73c8 100644
--- a/spec/frontend/work_items/components/work_item_links/work_item_tree_spec.js
+++ b/spec/frontend/work_items/components/work_item_links/work_item_tree_spec.js
@@ -1,28 +1,36 @@
-import { nextTick } from 'vue';
+import Vue, { nextTick } from 'vue';
+import VueApollo from 'vue-apollo';
import { GlToggle } from '@gitlab/ui';
+import createMockApollo from 'helpers/mock_apollo_helper';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import waitForPromises from 'helpers/wait_for_promises';
import WidgetWrapper from '~/work_items/components/widget_wrapper.vue';
import WorkItemTree from '~/work_items/components/work_item_links/work_item_tree.vue';
import WorkItemChildrenWrapper from '~/work_items/components/work_item_links/work_item_children_wrapper.vue';
import WorkItemLinksForm from '~/work_items/components/work_item_links/work_item_links_form.vue';
-import OkrActionsSplitButton from '~/work_items/components/work_item_links/okr_actions_split_button.vue';
+import WorkItemActionsSplitButton from '~/work_items/components/work_item_links/work_item_actions_split_button.vue';
+import getAllowedWorkItemChildTypes from '~/work_items//graphql/work_item_allowed_children.query.graphql';
import {
FORM_TYPES,
WORK_ITEM_TYPE_ENUM_OBJECTIVE,
WORK_ITEM_TYPE_ENUM_KEY_RESULT,
} from '~/work_items/constants';
-import { childrenWorkItems } from '../../mock_data';
+import { childrenWorkItems, allowedChildrenTypesResponse } from '../../mock_data';
+
+Vue.use(VueApollo);
describe('WorkItemTree', () => {
let wrapper;
const findEmptyState = () => wrapper.findByTestId('tree-empty');
- const findToggleFormSplitButton = () => wrapper.findComponent(OkrActionsSplitButton);
+ const findToggleFormSplitButton = () => wrapper.findComponent(WorkItemActionsSplitButton);
const findForm = () => wrapper.findComponent(WorkItemLinksForm);
const findWidgetWrapper = () => wrapper.findComponent(WidgetWrapper);
const findWorkItemLinkChildrenWrapper = () => wrapper.findComponent(WorkItemChildrenWrapper);
const findShowLabelsToggle = () => wrapper.findComponent(GlToggle);
+ const allowedChildrenTypesHandler = jest.fn().mockResolvedValue(allowedChildrenTypesResponse);
+
const createComponent = ({
workItemType = 'Objective',
parentWorkItemType = 'Objective',
@@ -31,6 +39,9 @@ describe('WorkItemTree', () => {
canUpdate = true,
} = {}) => {
wrapper = shallowMountExtended(WorkItemTree, {
+ apolloProvider: createMockApollo([
+ [getAllowedWorkItemChildTypes, allowedChildrenTypesHandler],
+ ]),
propsData: {
fullPath: 'test/project',
workItemType,
@@ -79,18 +90,25 @@ describe('WorkItemTree', () => {
expect(findWidgetWrapper().props('error')).toBe(errorMessage);
});
+ it('fetches allowed children types for current work item', async () => {
+ createComponent();
+ await waitForPromises();
+
+ expect(allowedChildrenTypesHandler).toHaveBeenCalled();
+ });
+
it.each`
- option | event | formType | childType
- ${'New objective'} | ${'showCreateObjectiveForm'} | ${FORM_TYPES.create} | ${WORK_ITEM_TYPE_ENUM_OBJECTIVE}
- ${'Existing objective'} | ${'showAddObjectiveForm'} | ${FORM_TYPES.add} | ${WORK_ITEM_TYPE_ENUM_OBJECTIVE}
- ${'New key result'} | ${'showCreateKeyResultForm'} | ${FORM_TYPES.create} | ${WORK_ITEM_TYPE_ENUM_KEY_RESULT}
- ${'Existing key result'} | ${'showAddKeyResultForm'} | ${FORM_TYPES.add} | ${WORK_ITEM_TYPE_ENUM_KEY_RESULT}
+ option | formType | childType
+ ${'New objective'} | ${FORM_TYPES.create} | ${WORK_ITEM_TYPE_ENUM_OBJECTIVE}
+ ${'Existing objective'} | ${FORM_TYPES.add} | ${WORK_ITEM_TYPE_ENUM_OBJECTIVE}
+ ${'New key result'} | ${FORM_TYPES.create} | ${WORK_ITEM_TYPE_ENUM_KEY_RESULT}
+ ${'Existing key result'} | ${FORM_TYPES.add} | ${WORK_ITEM_TYPE_ENUM_KEY_RESULT}
`(
- 'when selecting $option from split button, renders the form passing $formType and $childType',
- async ({ event, formType, childType }) => {
+ 'when triggering action $option, renders the form passing $formType and $childType',
+ async ({ formType, childType }) => {
createComponent();
- findToggleFormSplitButton().vm.$emit(event);
+ wrapper.vm.showAddForm(formType, childType);
await nextTick();
expect(findForm().exists()).toBe(true);
@@ -122,7 +140,7 @@ describe('WorkItemTree', () => {
it('emits `addChild` event when form emits `addChild` event', async () => {
createComponent();
- findToggleFormSplitButton().vm.$emit('showCreateObjectiveForm');
+ wrapper.vm.showAddForm(FORM_TYPES.create, WORK_ITEM_TYPE_ENUM_OBJECTIVE);
await nextTick();
findForm().vm.$emit('addChild');
diff --git a/spec/frontend/work_items/components/work_item_notes_spec.js b/spec/frontend/work_items/components/work_item_notes_spec.js
index 9e02e0708d4..2620242000e 100644
--- a/spec/frontend/work_items/components/work_item_notes_spec.js
+++ b/spec/frontend/work_items/components/work_item_notes_spec.js
@@ -10,6 +10,7 @@ import WorkItemNotes from '~/work_items/components/work_item_notes.vue';
import WorkItemDiscussion from '~/work_items/components/notes/work_item_discussion.vue';
import WorkItemAddNote from '~/work_items/components/notes/work_item_add_note.vue';
import WorkItemNotesActivityHeader from '~/work_items/components/notes/work_item_notes_activity_header.vue';
+import groupWorkItemNotesByIidQuery from '~/work_items/graphql/notes/group_work_item_notes_by_iid.query.graphql';
import workItemNotesByIidQuery from '~/work_items/graphql/notes/work_item_notes_by_iid.query.graphql';
import deleteWorkItemNoteMutation from '~/work_items/graphql/notes/delete_work_item_notes.mutation.graphql';
import workItemNoteCreatedSubscription from '~/work_items/graphql/notes/work_item_note_created.subscription.graphql';
@@ -63,6 +64,9 @@ describe('WorkItemNotes component', () => {
const findWorkItemCommentNoteAtIndex = (index) => findAllWorkItemCommentNotes().at(index);
const findDeleteNoteModal = () => wrapper.findComponent(GlModal);
+ const groupWorkItemNotesQueryHandler = jest
+ .fn()
+ .mockResolvedValue(mockWorkItemNotesByIidResponse);
const workItemNotesQueryHandler = jest.fn().mockResolvedValue(mockWorkItemNotesByIidResponse);
const workItemMoreNotesQueryHandler = jest.fn().mockResolvedValue(mockMoreWorkItemNotesResponse);
const workItemNotesWithCommentsQueryHandler = jest
@@ -87,17 +91,22 @@ describe('WorkItemNotes component', () => {
workItemIid = mockWorkItemIid,
defaultWorkItemNotesQueryHandler = workItemNotesQueryHandler,
deleteWINoteMutationHandler = deleteWorkItemNoteMutationSuccessHandler,
+ isGroup = false,
isModal = false,
isWorkItemConfidential = false,
} = {}) => {
wrapper = shallowMount(WorkItemNotes, {
apolloProvider: createMockApollo([
[workItemNotesByIidQuery, defaultWorkItemNotesQueryHandler],
+ [groupWorkItemNotesByIidQuery, groupWorkItemNotesQueryHandler],
[deleteWorkItemNoteMutation, deleteWINoteMutationHandler],
[workItemNoteCreatedSubscription, notesCreateSubscriptionHandler],
[workItemNoteUpdatedSubscription, notesUpdateSubscriptionHandler],
[workItemNoteDeletedSubscription, notesDeleteSubscriptionHandler],
]),
+ provide: {
+ isGroup,
+ },
propsData: {
fullPath: 'test-path',
workItemId,
@@ -354,4 +363,22 @@ describe('WorkItemNotes component', () => {
expect(findWorkItemCommentNoteAtIndex(0).props('isWorkItemConfidential')).toBe(true);
});
+
+ describe('when project context', () => {
+ it('calls the project work item query', async () => {
+ createComponent();
+ await waitForPromises();
+
+ expect(workItemNotesQueryHandler).toHaveBeenCalled();
+ });
+ });
+
+ describe('when group context', () => {
+ it('calls the group work item query', async () => {
+ createComponent({ isGroup: true });
+ await waitForPromises();
+
+ expect(groupWorkItemNotesQueryHandler).toHaveBeenCalled();
+ });
+ });
});
diff --git a/spec/frontend/work_items/components/work_item_parent_spec.js b/spec/frontend/work_items/components/work_item_parent_inline_spec.js
index 11fe6dffbfa..3e4f99d5935 100644
--- a/spec/frontend/work_items/components/work_item_parent_spec.js
+++ b/spec/frontend/work_items/components/work_item_parent_inline_spec.js
@@ -6,7 +6,7 @@ import createMockApollo from 'helpers/mock_apollo_helper';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import * as Sentry from '~/sentry/sentry_browser_wrapper';
-import WorkItemParent from '~/work_items/components/work_item_parent.vue';
+import WorkItemParentInline from '~/work_items/components/work_item_parent_inline.vue';
import { removeHierarchyChild } from '~/work_items/graphql/cache_utils';
import updateWorkItemMutation from '~/work_items/graphql/update_work_item.mutation.graphql';
import groupWorkItemsQuery from '~/work_items/graphql/group_work_items.query.graphql';
@@ -26,7 +26,7 @@ jest.mock('~/work_items/graphql/cache_utils', () => ({
removeHierarchyChild: jest.fn(),
}));
-describe('WorkItemParent component', () => {
+describe('WorkItemParentInline component', () => {
Vue.use(VueApollo);
let wrapper;
@@ -50,7 +50,7 @@ describe('WorkItemParent component', () => {
mutationHandler = successUpdateWorkItemMutationHandler,
isGroup = false,
} = {}) => {
- wrapper = shallowMountExtended(WorkItemParent, {
+ wrapper = shallowMountExtended(WorkItemParentInline, {
apolloProvider: createMockApollo([
[projectWorkItemsQuery, searchQueryHandler],
[groupWorkItemsQuery, groupWorkItemsSuccessHandler],
diff --git a/spec/frontend/work_items/components/work_item_parent_with_edit_spec.js b/spec/frontend/work_items/components/work_item_parent_with_edit_spec.js
new file mode 100644
index 00000000000..61e43456479
--- /dev/null
+++ b/spec/frontend/work_items/components/work_item_parent_with_edit_spec.js
@@ -0,0 +1,409 @@
+import { GlForm, GlCollapsibleListbox } from '@gitlab/ui';
+import Vue, { nextTick } from 'vue';
+import VueApollo from 'vue-apollo';
+import waitForPromises from 'helpers/wait_for_promises';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import { mountExtended } from 'helpers/vue_test_utils_helper';
+import { __ } from '~/locale';
+import * as Sentry from '~/sentry/sentry_browser_wrapper';
+import WorkItemParent from '~/work_items/components/work_item_parent_with_edit.vue';
+import { removeHierarchyChild } from '~/work_items/graphql/cache_utils';
+import updateWorkItemMutation from '~/work_items/graphql/update_work_item.mutation.graphql';
+import groupWorkItemsQuery from '~/work_items/graphql/group_work_items.query.graphql';
+import projectWorkItemsQuery from '~/work_items/graphql/project_work_items.query.graphql';
+import { WORK_ITEM_TYPE_ENUM_OBJECTIVE } from '~/work_items/constants';
+
+import {
+ availableObjectivesResponse,
+ mockParentWidgetResponse,
+ updateWorkItemMutationResponseFactory,
+ searchedObjectiveResponse,
+ updateWorkItemMutationErrorResponse,
+} from '../mock_data';
+
+jest.mock('~/sentry/sentry_browser_wrapper');
+jest.mock('~/work_items/graphql/cache_utils', () => ({
+ removeHierarchyChild: jest.fn(),
+}));
+
+describe('WorkItemParent component', () => {
+ Vue.use(VueApollo);
+
+ let wrapper;
+
+ const workItemId = 'gid://gitlab/WorkItem/1';
+ const workItemType = 'Objective';
+ const mockFullPath = 'full-path';
+
+ const groupWorkItemsSuccessHandler = jest.fn().mockResolvedValue(availableObjectivesResponse);
+ const availableWorkItemsSuccessHandler = jest.fn().mockResolvedValue(availableObjectivesResponse);
+ const availableWorkItemsFailureHandler = jest.fn().mockRejectedValue(new Error());
+
+ const findHeader = () => wrapper.find('h3');
+ const findEditButton = () => wrapper.find('[data-testid="edit-parent"]');
+ const findApplyButton = () => wrapper.find('[data-testid="apply-parent"]');
+
+ const findLoadingIcon = () => wrapper.find('[data-testid="loading-icon-parent"]');
+ const findLabel = () => wrapper.find('label');
+ const findForm = () => wrapper.findComponent(GlForm);
+ const findCollapsibleListbox = () => wrapper.findComponent(GlCollapsibleListbox);
+
+ const successUpdateWorkItemMutationHandler = jest
+ .fn()
+ .mockResolvedValue(updateWorkItemMutationResponseFactory({ parent: mockParentWidgetResponse }));
+
+ const createComponent = ({
+ canUpdate = true,
+ parent = null,
+ searchQueryHandler = availableWorkItemsSuccessHandler,
+ mutationHandler = successUpdateWorkItemMutationHandler,
+ isEditing = false,
+ isGroup = false,
+ } = {}) => {
+ wrapper = mountExtended(WorkItemParent, {
+ apolloProvider: createMockApollo([
+ [projectWorkItemsQuery, searchQueryHandler],
+ [groupWorkItemsQuery, groupWorkItemsSuccessHandler],
+ [updateWorkItemMutation, mutationHandler],
+ ]),
+ provide: {
+ fullPath: mockFullPath,
+ isGroup,
+ },
+ propsData: {
+ canUpdate,
+ parent,
+ workItemId,
+ workItemType,
+ },
+ });
+
+ if (isEditing) {
+ findEditButton().trigger('click');
+ }
+ };
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ describe('label', () => {
+ it('shows header when not editing', () => {
+ createComponent();
+
+ expect(findHeader().exists()).toBe(true);
+ expect(findHeader().classes('gl-sr-only')).toBe(false);
+ expect(findLabel().exists()).toBe(false);
+ });
+
+ it('shows label and hides header while editing', async () => {
+ createComponent({ isEditing: true });
+
+ await nextTick();
+
+ expect(findLabel().exists()).toBe(true);
+ expect(findHeader().classes('gl-sr-only')).toBe(true);
+ });
+ });
+
+ describe('edit button', () => {
+ it('is not shown if user cannot edit', () => {
+ createComponent({ canUpdate: false });
+
+ expect(findEditButton().exists()).toBe(false);
+ });
+
+ it('is shown if user can edit', () => {
+ createComponent({ canUpdate: true });
+
+ expect(findEditButton().exists()).toBe(true);
+ });
+
+ it('triggers edit mode on click', async () => {
+ createComponent();
+
+ findEditButton().trigger('click');
+
+ await nextTick();
+
+ expect(findLabel().exists()).toBe(true);
+ expect(findForm().exists()).toBe(true);
+ });
+
+ it('is replaced by Apply button while editing', async () => {
+ createComponent();
+
+ findEditButton().trigger('click');
+
+ await nextTick();
+
+ expect(findEditButton().exists()).toBe(false);
+ expect(findApplyButton().exists()).toBe(true);
+ });
+ });
+
+ describe('loading icon', () => {
+ const selectWorkItem = async (workItem) => {
+ await findCollapsibleListbox().vm.$emit('select', workItem);
+ };
+
+ it('shows loading icon while update is in progress', async () => {
+ createComponent();
+ findEditButton().trigger('click');
+
+ await nextTick();
+
+ selectWorkItem('gid://gitlab/WorkItem/716');
+
+ await nextTick();
+ expect(findLoadingIcon().exists()).toBe(true);
+ await waitForPromises();
+
+ expect(findLoadingIcon().exists()).toBe(false);
+ });
+
+ it('shows loading icon when unassign is clicked', async () => {
+ createComponent({ parent: mockParentWidgetResponse });
+ findEditButton().trigger('click');
+
+ await nextTick();
+
+ findCollapsibleListbox().vm.$emit('reset');
+
+ await nextTick();
+ expect(findLoadingIcon().exists()).toBe(true);
+ await waitForPromises();
+
+ expect(findLoadingIcon().exists()).toBe(false);
+ });
+ });
+
+ describe('value', () => {
+ it('shows None when no parent is set', () => {
+ createComponent();
+
+ expect(wrapper.text()).toContain(__('None'));
+ });
+
+ it('shows parent when parent is set', () => {
+ createComponent({ parent: mockParentWidgetResponse });
+
+ expect(wrapper.text()).not.toContain(__('None'));
+ expect(wrapper.text()).toContain(mockParentWidgetResponse.title);
+ });
+ });
+
+ describe('form', () => {
+ it('is not shown while not editing', async () => {
+ await createComponent();
+
+ expect(findForm().exists()).toBe(false);
+ });
+
+ it('is shown while editing', async () => {
+ await createComponent({ isEditing: true });
+
+ expect(findForm().exists()).toBe(true);
+ });
+ });
+
+ describe('Parent Input', () => {
+ it('is not shown while not editing', async () => {
+ await createComponent();
+
+ expect(findCollapsibleListbox().exists()).toBe(false);
+ });
+
+ it('renders the collapsible listbox with required props', async () => {
+ await createComponent({ isEditing: true });
+
+ expect(findCollapsibleListbox().exists()).toBe(true);
+ expect(findCollapsibleListbox().props()).toMatchObject({
+ items: [],
+ headerText: 'Assign parent',
+ category: 'primary',
+ loading: false,
+ isCheckCentered: true,
+ searchable: true,
+ searching: false,
+ infiniteScroll: false,
+ noResultsText: 'No matching results',
+ toggleText: 'None',
+ searchPlaceholder: 'Search',
+ resetButtonLabel: 'Unassign',
+ });
+ });
+ it('shows loading while searching', async () => {
+ await createComponent({ isEditing: true });
+
+ await findCollapsibleListbox().vm.$emit('shown');
+ expect(findCollapsibleListbox().props('searching')).toBe(true);
+ });
+ });
+
+ describe('work items query', () => {
+ it('loads work items in the listbox', async () => {
+ await createComponent({ isEditing: true });
+ await findCollapsibleListbox().vm.$emit('shown');
+
+ await waitForPromises();
+
+ expect(findCollapsibleListbox().props('searching')).toBe(false);
+ expect(findCollapsibleListbox().props('items')).toStrictEqual([
+ { text: 'Objective 101', value: 'gid://gitlab/WorkItem/716' },
+ { text: 'Objective 103', value: 'gid://gitlab/WorkItem/712' },
+ { text: 'Objective 102', value: 'gid://gitlab/WorkItem/711' },
+ ]);
+ expect(availableWorkItemsSuccessHandler).toHaveBeenCalled();
+ });
+
+ it('emits error when the query fails', async () => {
+ await createComponent({
+ searchQueryHandler: availableWorkItemsFailureHandler,
+ isEditing: true,
+ });
+
+ await findCollapsibleListbox().vm.$emit('shown');
+
+ await waitForPromises();
+
+ expect(wrapper.emitted('error')).toEqual([
+ ['Something went wrong while fetching items. Please try again.'],
+ ]);
+ });
+
+ it('searches item when input data is entered', async () => {
+ const searchedItemQueryHandler = jest.fn().mockResolvedValue(searchedObjectiveResponse);
+ await createComponent({
+ searchQueryHandler: searchedItemQueryHandler,
+ isEditing: true,
+ });
+
+ await findCollapsibleListbox().vm.$emit('shown');
+
+ await waitForPromises();
+
+ expect(searchedItemQueryHandler).toHaveBeenCalledWith({
+ fullPath: 'full-path',
+ searchTerm: '',
+ types: [WORK_ITEM_TYPE_ENUM_OBJECTIVE],
+ in: undefined,
+ iid: null,
+ isNumber: false,
+ });
+
+ await findCollapsibleListbox().vm.$emit('search', 'Objective 101');
+
+ expect(searchedItemQueryHandler).toHaveBeenCalledWith({
+ fullPath: 'full-path',
+ searchTerm: 'Objective 101',
+ types: [WORK_ITEM_TYPE_ENUM_OBJECTIVE],
+ in: 'TITLE',
+ iid: null,
+ isNumber: false,
+ });
+
+ await nextTick();
+
+ expect(findCollapsibleListbox().props('items')).toStrictEqual([
+ { text: 'Objective 101', value: 'gid://gitlab/WorkItem/716' },
+ ]);
+ });
+ });
+
+ describe('listbox', () => {
+ const selectWorkItem = async (workItem) => {
+ await findCollapsibleListbox().vm.$emit('select', workItem);
+ };
+
+ it('calls mutation when item is selected', async () => {
+ await createComponent({ isEditing: true });
+ selectWorkItem('gid://gitlab/WorkItem/716');
+
+ await waitForPromises();
+
+ expect(successUpdateWorkItemMutationHandler).toHaveBeenCalledWith({
+ input: {
+ id: 'gid://gitlab/WorkItem/1',
+ hierarchyWidget: {
+ parentId: 'gid://gitlab/WorkItem/716',
+ },
+ },
+ });
+
+ expect(removeHierarchyChild).toHaveBeenCalledWith({
+ cache: expect.anything(Object),
+ fullPath: mockFullPath,
+ iid: undefined,
+ isGroup: false,
+ workItem: { id: 'gid://gitlab/WorkItem/1' },
+ });
+ });
+
+ it('calls mutation when item is unassigned', async () => {
+ const unAssignParentWorkItemMutationHandler = jest
+ .fn()
+ .mockResolvedValue(updateWorkItemMutationResponseFactory({ parent: null }));
+ await createComponent({
+ parent: {
+ iid: '1',
+ },
+ mutationHandler: unAssignParentWorkItemMutationHandler,
+ });
+
+ findEditButton().trigger('click');
+
+ await nextTick();
+
+ findCollapsibleListbox().vm.$emit('reset');
+
+ await waitForPromises();
+
+ expect(unAssignParentWorkItemMutationHandler).toHaveBeenCalledWith({
+ input: {
+ id: 'gid://gitlab/WorkItem/1',
+ hierarchyWidget: {
+ parentId: null,
+ },
+ },
+ });
+ expect(removeHierarchyChild).toHaveBeenCalledWith({
+ cache: expect.anything(Object),
+ fullPath: mockFullPath,
+ iid: '1',
+ isGroup: false,
+ workItem: { id: 'gid://gitlab/WorkItem/1' },
+ });
+ });
+
+ it('emits error when mutation fails', async () => {
+ await createComponent({
+ mutationHandler: jest.fn().mockResolvedValue(updateWorkItemMutationErrorResponse),
+ isEditing: true,
+ });
+
+ selectWorkItem('gid://gitlab/WorkItem/716');
+
+ await waitForPromises();
+
+ expect(wrapper.emitted('error')).toEqual([['Error!']]);
+ });
+
+ it('emits error and captures exception in sentry when network request fails', async () => {
+ const error = new Error('error');
+ await createComponent({
+ mutationHandler: jest.fn().mockRejectedValue(error),
+ isEditing: true,
+ });
+
+ selectWorkItem('gid://gitlab/WorkItem/716');
+
+ await waitForPromises();
+
+ expect(wrapper.emitted('error')).toEqual([
+ ['Something went wrong while updating the objective. Please try again.'],
+ ]);
+ expect(Sentry.captureException).toHaveBeenCalledWith(error);
+ });
+ });
+});
diff --git a/spec/frontend/work_items/components/work_item_state_toggle_button_spec.js b/spec/frontend/work_items/components/work_item_state_toggle_spec.js
index a210bd50422..a210bd50422 100644
--- a/spec/frontend/work_items/components/work_item_state_toggle_button_spec.js
+++ b/spec/frontend/work_items/components/work_item_state_toggle_spec.js
diff --git a/spec/frontend/work_items/components/work_item_sticky_header_spec.js b/spec/frontend/work_items/components/work_item_sticky_header_spec.js
new file mode 100644
index 00000000000..4b7818044b1
--- /dev/null
+++ b/spec/frontend/work_items/components/work_item_sticky_header_spec.js
@@ -0,0 +1,59 @@
+import { GlIntersectionObserver } from '@gitlab/ui';
+import { nextTick } from 'vue';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import { STATE_OPEN } from '~/work_items/constants';
+import { workItemResponseFactory } from 'jest/work_items/mock_data';
+import WorkItemStickyHeader from '~/work_items/components/work_item_sticky_header.vue';
+import ConfidentialityBadge from '~/vue_shared/components/confidentiality_badge.vue';
+import WorkItemActions from '~/work_items/components/work_item_actions.vue';
+import WorkItemTodos from '~/work_items/components/work_item_todos.vue';
+
+describe('WorkItemStickyHeader', () => {
+ let wrapper;
+
+ const workItemResponse = workItemResponseFactory({ canUpdate: true, confidential: true }).data
+ .workItem;
+
+ const createComponent = () => {
+ wrapper = shallowMountExtended(WorkItemStickyHeader, {
+ propsData: {
+ workItem: workItemResponse,
+ fullPath: '/test',
+ isStickyHeaderShowing: true,
+ workItemNotificationsSubscribed: true,
+ updateInProgress: false,
+ parentWorkItemConfidentiality: false,
+ showWorkItemCurrentUserTodos: true,
+ isModal: false,
+ currentUserTodos: [],
+ workItemState: STATE_OPEN,
+ },
+ });
+ };
+ const findStickyHeader = () => wrapper.findByTestId('work-item-sticky-header');
+ const findConfidentialityBadge = () => wrapper.findComponent(ConfidentialityBadge);
+ const findWorkItemActions = () => wrapper.findComponent(WorkItemActions);
+ const findWorkItemTodos = () => wrapper.findComponent(WorkItemTodos);
+ const findIntersectionObserver = () => wrapper.findComponent(GlIntersectionObserver);
+ const triggerPageScroll = () => findIntersectionObserver().vm.$emit('disappear');
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('has the sticky header when the page is scrolled', async () => {
+ global.pageYOffset = 100;
+ triggerPageScroll();
+
+ await nextTick();
+
+ expect(findStickyHeader().exists()).toBe(true);
+ });
+
+ it('has the components of confidentiality, actions, todos and title', () => {
+ expect(findConfidentialityBadge().exists()).toBe(true);
+ expect(findWorkItemActions().exists()).toBe(true);
+ expect(findWorkItemTodos().exists()).toBe(true);
+ expect(wrapper.findByText(workItemResponse.title).exists()).toBe(true);
+ });
+});
diff --git a/spec/frontend/work_items/components/work_item_title_spec.js b/spec/frontend/work_items/components/work_item_title_spec.js
index 0f466bcf691..de740e5fbc5 100644
--- a/spec/frontend/work_items/components/work_item_title_spec.js
+++ b/spec/frontend/work_items/components/work_item_title_spec.js
@@ -8,7 +8,6 @@ import ItemTitle from '~/work_items/components/item_title.vue';
import WorkItemTitle from '~/work_items/components/work_item_title.vue';
import { TRACKING_CATEGORY_SHOW } from '~/work_items/constants';
import updateWorkItemMutation from '~/work_items/graphql/update_work_item.mutation.graphql';
-import updateWorkItemTaskMutation from '~/work_items/graphql/update_work_item_task.mutation.graphql';
import { updateWorkItemMutationResponse, workItemQueryResponse } from '../mock_data';
describe('WorkItemTitle component', () => {
@@ -20,22 +19,14 @@ describe('WorkItemTitle component', () => {
const findItemTitle = () => wrapper.findComponent(ItemTitle);
- const createComponent = ({
- workItemParentId,
- mutationHandler = mutationSuccessHandler,
- canUpdate = true,
- } = {}) => {
+ const createComponent = ({ mutationHandler = mutationSuccessHandler, canUpdate = true } = {}) => {
const { id, title, workItemType } = workItemQueryResponse.data.workItem;
wrapper = shallowMount(WorkItemTitle, {
- apolloProvider: createMockApollo([
- [updateWorkItemMutation, mutationHandler],
- [updateWorkItemTaskMutation, mutationHandler],
- ]),
+ apolloProvider: createMockApollo([[updateWorkItemMutation, mutationHandler]]),
propsData: {
workItemId: id,
workItemTitle: title,
workItemType: workItemType.name,
- workItemParentId,
canUpdate,
},
});
@@ -77,27 +68,6 @@ describe('WorkItemTitle component', () => {
});
});
- it('calls WorkItemTaskUpdate if passed workItemParentId prop', () => {
- const title = 'new title!';
- const workItemParentId = '1234';
-
- createComponent({
- workItemParentId,
- });
-
- findItemTitle().vm.$emit('title-changed', title);
-
- expect(mutationSuccessHandler).toHaveBeenCalledWith({
- input: {
- id: workItemParentId,
- taskData: {
- id: workItemQueryResponse.data.workItem.id,
- title,
- },
- },
- });
- });
-
it('does not call a mutation when the title has not changed', () => {
createComponent();
diff --git a/spec/frontend/work_items/mock_data.js b/spec/frontend/work_items/mock_data.js
index 8df46403b90..9d4606eb95a 100644
--- a/spec/frontend/work_items/mock_data.js
+++ b/spec/frontend/work_items/mock_data.js
@@ -445,7 +445,7 @@ export const descriptionHtmlWithCheckboxes = `
</ul>
`;
-const taskType = {
+export const taskType = {
__typename: 'WorkItemType',
id: 'gid://gitlab/WorkItems::Type/5',
name: 'Task',
@@ -459,6 +459,20 @@ export const objectiveType = {
iconName: 'issue-type-objective',
};
+export const keyResultType = {
+ __typename: 'WorkItemType',
+ id: 'gid://gitlab/WorkItems::Type/2411',
+ name: 'Key Result',
+ iconName: 'issue-type-keyresult',
+};
+
+export const issueType = {
+ __typename: 'WorkItemType',
+ id: 'gid://gitlab/WorkItems::Type/2411',
+ name: 'Issue',
+ iconName: 'issue-type-issue',
+};
+
export const mockEmptyLinkedItems = {
type: WIDGET_TYPE_LINKED_ITEMS,
blocked: false,
@@ -3703,5 +3717,40 @@ export const updateWorkItemNotificationsMutationResponse = (subscribed) => ({
},
});
+export const allowedChildrenTypesResponse = {
+ data: {
+ workItem: {
+ id: 'gid://gitlab/WorkItem/634',
+ workItemType: {
+ id: 'gid://gitlab/WorkItems::Type/6',
+ name: 'Objective',
+ widgetDefinitions: [
+ {
+ type: 'HIERARCHY',
+ allowedChildTypes: {
+ nodes: [
+ {
+ id: 'gid://gitlab/WorkItems::Type/7',
+ name: 'Key Result',
+ __typename: 'WorkItemType',
+ },
+ {
+ id: 'gid://gitlab/WorkItems::Type/6',
+ name: 'Objective',
+ __typename: 'WorkItemType',
+ },
+ ],
+ __typename: 'WorkItemTypeConnection',
+ },
+ __typename: 'WorkItemWidgetDefinitionHierarchy',
+ },
+ ],
+ __typename: 'WorkItemType',
+ },
+ __typename: 'WorkItem',
+ },
+ },
+};
+
export const generateWorkItemsListWithId = (count) =>
Array.from({ length: count }, (_, i) => ({ id: `gid://gitlab/WorkItem/${i + 1}` }));
diff --git a/spec/frontend/work_items/notes/award_utils_spec.js b/spec/frontend/work_items/notes/award_utils_spec.js
index 8ae32ce5f40..43eceb13b67 100644
--- a/spec/frontend/work_items/notes/award_utils_spec.js
+++ b/spec/frontend/work_items/notes/award_utils_spec.js
@@ -2,6 +2,7 @@ import { getMutation, optimisticAwardUpdate } from '~/work_items/notes/award_uti
import { getIdFromGraphQLId } from '~/graphql_shared/utils';
import mockApollo from 'helpers/mock_apollo_helper';
import { __ } from '~/locale';
+import groupWorkItemNotesByIidQuery from '~/work_items/graphql/notes/group_work_item_notes_by_iid.query.graphql';
import workItemNotesByIidQuery from '~/work_items/graphql/notes/work_item_notes_by_iid.query.graphql';
import addAwardEmojiMutation from '~/work_items/graphql/notes/work_item_note_add_award_emoji.mutation.graphql';
import removeAwardEmojiMutation from '~/work_items/graphql/notes/work_item_note_remove_award_emoji.mutation.graphql';
@@ -105,5 +106,22 @@ describe('Work item note award utils', () => {
expect(updatedNote.awardEmoji.nodes).toEqual([]);
});
+
+ it.each`
+ description | isGroup | query
+ ${'calls project query when in project context'} | ${false} | ${workItemNotesByIidQuery}
+ ${'calls group query when in group context'} | ${true} | ${groupWorkItemNotesByIidQuery}
+ `('$description', ({ isGroup, query }) => {
+ const note = firstNote;
+ const { name } = mockAwardEmojiThumbsUp;
+ const cacheSpy = { updateQuery: jest.fn() };
+
+ optimisticAwardUpdate({ note, name, fullPath, isGroup, workItemIid })(cacheSpy);
+
+ expect(cacheSpy.updateQuery).toHaveBeenCalledWith(
+ { query, variables: { fullPath, iid: workItemIid } },
+ expect.any(Function),
+ );
+ });
});
});
diff --git a/spec/frontend/work_items/pages/create_work_item_spec.js b/spec/frontend/work_items/pages/create_work_item_spec.js
index 527f5890338..2c898f97ee9 100644
--- a/spec/frontend/work_items/pages/create_work_item_spec.js
+++ b/spec/frontend/work_items/pages/create_work_item_spec.js
@@ -8,7 +8,6 @@ import CreateWorkItem from '~/work_items/pages/create_work_item.vue';
import ItemTitle from '~/work_items/components/item_title.vue';
import projectWorkItemTypesQuery from '~/work_items/graphql/project_work_item_types.query.graphql';
import createWorkItemMutation from '~/work_items/graphql/create_work_item.mutation.graphql';
-import createWorkItemFromTaskMutation from '~/work_items/graphql/create_work_item_from_task.mutation.graphql';
import { projectWorkItemTypesQueryResponse, createWorkItemMutationResponse } from '../mock_data';
jest.mock('~/lib/utils/uuids', () => ({ uuids: () => ['testuuid'] }));
@@ -42,7 +41,6 @@ describe('Create work item component', () => {
[
[projectWorkItemTypesQuery, queryHandler],
[createWorkItemMutation, mutationHandler],
- [createWorkItemFromTaskMutation, mutationHandler],
],
{},
{ typePolicies: { Project: { merge: true } } },
diff --git a/spec/frontend/work_items/pages/work_item_root_spec.js b/spec/frontend/work_items/pages/work_item_root_spec.js
index 84b10f30418..4854b5bfb77 100644
--- a/spec/frontend/work_items/pages/work_item_root_spec.js
+++ b/spec/frontend/work_items/pages/work_item_root_spec.js
@@ -49,7 +49,6 @@ describe('Work items root component', () => {
expect(findWorkItemDetail().props()).toEqual({
isModal: false,
- workItemParentId: null,
workItemIid: '1',
});
});
diff --git a/spec/frontend_integration/fly_out_nav_browser_spec.js b/spec/frontend_integration/fly_out_nav_browser_spec.js
deleted file mode 100644
index 07ddc0220e6..00000000000
--- a/spec/frontend_integration/fly_out_nav_browser_spec.js
+++ /dev/null
@@ -1,366 +0,0 @@
-import { GlBreakpointInstance } from '@gitlab/ui/dist/utils';
-import { SIDEBAR_COLLAPSED_CLASS } from '~/contextual_sidebar';
-import {
- calculateTop,
- showSubLevelItems,
- canShowSubItems,
- canShowActiveSubItems,
- mouseEnterTopItems,
- mouseLeaveTopItem,
- getOpenMenu,
- setOpenMenu,
- mousePos,
- getHideSubItemsInterval,
- documentMouseMove,
- getHeaderHeight,
- setSidebar,
- subItemsMouseLeave,
-} from '~/fly_out_nav';
-
-describe('Fly out sidebar navigation', () => {
- let el;
- let breakpointSize = 'lg';
-
- const OLD_SIDEBAR_WIDTH = 200;
- const CONTAINER_INITIAL_BOUNDING_RECT = {
- x: 8,
- y: 8,
- width: 769,
- height: 0,
- top: 8,
- right: 777,
- bottom: 8,
- left: 8,
- };
- const SUB_ITEMS_INITIAL_BOUNDING_RECT = {
- x: 148,
- y: 8,
- width: 0,
- height: 150,
- top: 8,
- right: 148,
- bottom: 158,
- left: 148,
- };
- const mockBoundingClientRect = (elem, rect) => {
- jest.spyOn(elem, 'getBoundingClientRect').mockReturnValue(rect);
- };
-
- const findSubItems = () => document.querySelector('.sidebar-sub-level-items');
- const mockBoundingRects = () => {
- const subItems = findSubItems();
- mockBoundingClientRect(el, CONTAINER_INITIAL_BOUNDING_RECT);
- mockBoundingClientRect(subItems, SUB_ITEMS_INITIAL_BOUNDING_RECT);
- };
- const mockSidebarFragment = (styleProps = '') =>
- `<div class="sidebar-sub-level-items" style="${styleProps}"></div>`;
-
- beforeEach(() => {
- el = document.createElement('div');
- el.style.position = 'relative';
- document.body.appendChild(el);
-
- jest.spyOn(GlBreakpointInstance, 'getBreakpointSize').mockImplementation(() => breakpointSize);
- });
-
- afterEach(() => {
- document.body.innerHTML = '';
- breakpointSize = 'lg';
- mousePos.length = 0;
-
- setSidebar(null);
- });
-
- describe('calculateTop', () => {
- it('returns boundingRect top', () => {
- const boundingRect = {
- top: 100,
- height: 100,
- };
-
- expect(calculateTop(boundingRect, 100)).toBe(100);
- });
- });
-
- describe('getHideSubItemsInterval', () => {
- beforeEach(() => {
- el.innerHTML = mockSidebarFragment('position: fixed; top: 0; left: 100px; height: 150px;');
- mockBoundingRects();
- });
-
- it('returns 0 if currentOpenMenu is nil', () => {
- setOpenMenu(null);
- expect(getHideSubItemsInterval()).toBe(0);
- });
-
- it('returns 0 if mousePos is empty', () => {
- expect(getHideSubItemsInterval()).toBe(0);
- });
-
- it('returns 0 when mouse above sub-items', () => {
- showSubLevelItems(el);
- documentMouseMove({
- clientX: el.getBoundingClientRect().left,
- clientY: el.getBoundingClientRect().top,
- });
- documentMouseMove({
- clientX: el.getBoundingClientRect().left,
- clientY: el.getBoundingClientRect().top - 50,
- });
-
- expect(getHideSubItemsInterval()).toBe(0);
- });
-
- it('returns 0 when mouse is below sub-items', () => {
- const subItems = findSubItems();
-
- showSubLevelItems(el);
- documentMouseMove({
- clientX: el.getBoundingClientRect().left,
- clientY: el.getBoundingClientRect().top,
- });
- documentMouseMove({
- clientX: el.getBoundingClientRect().left,
- clientY: el.getBoundingClientRect().top - subItems.getBoundingClientRect().height + 50,
- });
-
- expect(getHideSubItemsInterval()).toBe(0);
- });
-
- it('returns 300 when mouse is moved towards sub-items', () => {
- documentMouseMove({
- clientX: el.getBoundingClientRect().left,
- clientY: el.getBoundingClientRect().top,
- });
-
- showSubLevelItems(el);
- documentMouseMove({
- clientX: el.getBoundingClientRect().left + 20,
- clientY: el.getBoundingClientRect().top + 10,
- });
-
- expect(getHideSubItemsInterval()).toBe(300);
- });
- });
-
- describe('mouseLeaveTopItem', () => {
- beforeEach(() => {
- jest.spyOn(el.classList, 'remove');
- });
-
- it('removes is-over class if currentOpenMenu is null', () => {
- setOpenMenu(null);
-
- mouseLeaveTopItem(el);
-
- expect(el.classList.remove).toHaveBeenCalledWith('is-over');
- });
-
- it('removes is-over class if currentOpenMenu is null & there are sub-items', () => {
- setOpenMenu(null);
- el.innerHTML = mockSidebarFragment('position: absolute');
-
- mouseLeaveTopItem(el);
-
- expect(el.classList.remove).toHaveBeenCalledWith('is-over');
- });
-
- it('does not remove is-over class if currentOpenMenu is the passed in sub-items', () => {
- setOpenMenu(null);
- el.innerHTML = mockSidebarFragment('position: absolute');
-
- setOpenMenu(findSubItems());
- mouseLeaveTopItem(el);
-
- expect(el.classList.remove).not.toHaveBeenCalled();
- });
- });
-
- describe('mouseEnterTopItems', () => {
- beforeEach(() => {
- el.innerHTML = mockSidebarFragment(
- `position: absolute; top: 0; left: 100px; height: ${OLD_SIDEBAR_WIDTH}px;`,
- );
- mockBoundingRects();
- });
-
- it('shows sub-items after 0ms if no menu is open', () => {
- const subItems = findSubItems();
- mouseEnterTopItems(el);
-
- expect(getHideSubItemsInterval()).toBe(0);
-
- return new Promise((resolve) => {
- setTimeout(() => {
- expect(subItems.style.display).toBe('block');
- resolve();
- });
- });
- });
-
- it('shows sub-items after 300ms if a menu is currently open', () => {
- const subItems = findSubItems();
-
- documentMouseMove({
- clientX: el.getBoundingClientRect().left,
- clientY: el.getBoundingClientRect().top,
- });
-
- setOpenMenu(subItems);
-
- documentMouseMove({
- clientX: el.getBoundingClientRect().left + 20,
- clientY: el.getBoundingClientRect().top + 10,
- });
-
- mouseEnterTopItems(el, 0);
-
- return new Promise((resolve) => {
- setTimeout(() => {
- expect(subItems.style.display).toBe('block');
- resolve();
- });
- });
- });
- });
-
- describe('showSubLevelItems', () => {
- beforeEach(() => {
- el.innerHTML = mockSidebarFragment('position: absolute');
- });
-
- it('adds is-over class to el', () => {
- jest.spyOn(el.classList, 'add');
-
- showSubLevelItems(el);
-
- expect(el.classList.add).toHaveBeenCalledWith('is-over');
- });
-
- it('does not show sub-items on mobile', () => {
- breakpointSize = 'xs';
-
- showSubLevelItems(el);
-
- expect(findSubItems().style.display).not.toBe('block');
- });
-
- it('shows sub-items', () => {
- showSubLevelItems(el);
-
- expect(findSubItems().style.display).toBe('block');
- });
-
- it('shows collapsed only sub-items if icon only sidebar', () => {
- const subItems = findSubItems();
- const sidebar = document.createElement('div');
- sidebar.classList.add(SIDEBAR_COLLAPSED_CLASS);
- subItems.classList.add('is-fly-out-only');
-
- setSidebar(sidebar);
-
- showSubLevelItems(el);
-
- expect(findSubItems().style.display).toBe('block');
- });
-
- it('does not show collapsed only sub-items if icon only sidebar', () => {
- const subItems = findSubItems();
- subItems.classList.add('is-fly-out-only');
-
- showSubLevelItems(el);
-
- expect(subItems.style.display).not.toBe('block');
- });
-
- it('sets transform of sub-items', () => {
- const sidebar = document.createElement('div');
- const subItems = findSubItems();
-
- sidebar.style.width = `${OLD_SIDEBAR_WIDTH}px`;
-
- document.body.appendChild(sidebar);
-
- setSidebar(sidebar);
- showSubLevelItems(el);
-
- expect(subItems.style.transform).toBe(
- `translate3d(${OLD_SIDEBAR_WIDTH}px, ${
- Math.floor(el.getBoundingClientRect().top) - getHeaderHeight()
- }px, 0)`,
- );
- });
-
- it('sets is-above when element is above', () => {
- const subItems = findSubItems();
- mockBoundingRects();
-
- subItems.style.height = `${window.innerHeight + el.offsetHeight}px`;
- el.style.top = `${window.innerHeight - el.offsetHeight}px`;
-
- jest.spyOn(subItems.classList, 'add');
-
- showSubLevelItems(el);
-
- expect(subItems.classList.add).toHaveBeenCalledWith('is-above');
- });
- });
-
- describe('canShowSubItems', () => {
- it('returns true if on desktop size', () => {
- expect(canShowSubItems()).toBe(true);
- });
-
- it('returns false if on mobile size', () => {
- breakpointSize = 'xs';
-
- expect(canShowSubItems()).toBe(false);
- });
- });
-
- describe('canShowActiveSubItems', () => {
- it('returns true by default', () => {
- expect(canShowActiveSubItems(el)).toBe(true);
- });
-
- it('returns false when active & expanded sidebar', () => {
- const sidebar = document.createElement('div');
- el.classList.add('active');
-
- setSidebar(sidebar);
-
- expect(canShowActiveSubItems(el)).toBe(false);
- });
-
- it('returns true when active & collapsed sidebar', () => {
- const sidebar = document.createElement('div');
- sidebar.classList.add(SIDEBAR_COLLAPSED_CLASS);
- el.classList.add('active');
-
- setSidebar(sidebar);
-
- expect(canShowActiveSubItems(el)).toBe(true);
- });
- });
-
- describe('subItemsMouseLeave', () => {
- beforeEach(() => {
- el.innerHTML = mockSidebarFragment('position: absolute');
-
- setOpenMenu(findSubItems());
- });
-
- it('hides subMenu if element is not hovered', () => {
- subItemsMouseLeave(el);
-
- expect(getOpenMenu()).toBeNull();
- });
-
- it('does not hide subMenu if element is hovered', () => {
- el.classList.add('is-over');
- subItemsMouseLeave(el);
-
- expect(getOpenMenu()).not.toBeNull();
- });
- });
-});
diff --git a/spec/graphql/mutations/ci/runner/bulk_delete_spec.rb b/spec/graphql/mutations/ci/runner/bulk_delete_spec.rb
index aaa74fa78aa..0dcfaa9b8ac 100644
--- a/spec/graphql/mutations/ci/runner/bulk_delete_spec.rb
+++ b/spec/graphql/mutations/ci/runner/bulk_delete_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Mutations::Ci::Runner::BulkDelete, feature_category: :runner_fleet do
+RSpec.describe Mutations::Ci::Runner::BulkDelete, feature_category: :fleet_visibility do
include GraphqlHelpers
let_it_be(:admin_user) { create(:user, :admin) }
diff --git a/spec/graphql/mutations/ci/runner/delete_spec.rb b/spec/graphql/mutations/ci/runner/delete_spec.rb
index beff18e1dfd..3617c751165 100644
--- a/spec/graphql/mutations/ci/runner/delete_spec.rb
+++ b/spec/graphql/mutations/ci/runner/delete_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Mutations::Ci::Runner::Delete, feature_category: :runner_fleet do
+RSpec.describe Mutations::Ci::Runner::Delete, feature_category: :fleet_visibility do
include GraphqlHelpers
let_it_be(:runner) { create(:ci_runner) }
diff --git a/spec/graphql/mutations/ci/runner/update_spec.rb b/spec/graphql/mutations/ci/runner/update_spec.rb
index 03bfd4d738b..32c4e6952e4 100644
--- a/spec/graphql/mutations/ci/runner/update_spec.rb
+++ b/spec/graphql/mutations/ci/runner/update_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Mutations::Ci::Runner::Update, feature_category: :runner_fleet do
+RSpec.describe Mutations::Ci::Runner::Update, feature_category: :fleet_visibility do
include GraphqlHelpers
let_it_be(:user) { create(:user) }
diff --git a/spec/graphql/mutations/design_management/delete_spec.rb b/spec/graphql/mutations/design_management/delete_spec.rb
index 1b78529fbc7..7f499301543 100644
--- a/spec/graphql/mutations/design_management/delete_spec.rb
+++ b/spec/graphql/mutations/design_management/delete_spec.rb
@@ -86,46 +86,47 @@ RSpec.describe Mutations::DesignManagement::Delete do
end
end
- it 'runs no more than 31 queries' do
+ it 'runs no more than 34 queries' do
allow(Gitlab::Tracking).to receive(:event) # rubocop:disable RSpec/ExpectGitlabTracking
filenames.each(&:present?) # ignore setup
- # Queries: as of 2022-09-08
+ # Queries: as of 2022-12-01
# -------------
- # 01. routing query
- # 02. policy query: find namespace by type and id
- # 03. policy query: find namespace by id
- # 04. policy query: project.project_feature
- # 05,06. project.authorizations for user (same query twice)
- # 07. find issue by iid
- # 08. find project by id
- # 09. find namespace by id
- # 10. find group namespace by id
- # 11. policy query: find namespace by id (same query as 3)
- # 12. project.authorizations for user (same query as 5)
- # 13. find user by id
- # 14. project.project_features (same query as 3)
- # 15. project.authorizations for user (same query as 5)
- # 16. current designs by filename and issue
- # 17, 18 project.authorizations for user (same query as 5)
- # 19. find design_management_repository for project
- # 20. find route by id and source_type
+ # 01. for routes to find routes.source_id of projects matching paths
+ # 02. Find projects with the above source id.
+ # 03. preload routes of the above projects
+ # 04. policy query: find namespace by type and id
+ # 05. policy query: namespace_bans
+ # 06. policy query: project.project_feature
+ # 07,08. project.authorizations for user (same query twice)
+ # 09. find issue by iid
+ # 10. find project by id
+ # 11. find namespace by id
+ # 12. policy query: find namespace by type and id (same query as 4)
+ # 13. project.authorizations for user (same query as 7)
+ # 14. find user by id
+ # 15. project.project_features (same query as 6)
+ # 16. project.authorizations for user (same query as 7)
+ # 17. current designs by filename and issue
+ # 18, 19 project.authorizations for user (same query as 7)
+ # 20. find design_management_repository for project
+ # 21. find route by source_id and source_type
# ------------- our queries are below:
- # 21. start transaction
- # 22. create version with sha and issue
- # 23. create design-version links
- # 24. validate version.actions.present?
- # 25. validate version.sha is unique
- # 26. validate version.issue.present?
- # 27. leave transaction
- # 28. find project by id (same query as 8)
- # 29. find namespace by id (same query as 9)
- # 30. find project by id (same query as 8)
- # 31. find project by id (same query as 8)
- # 32. create event
- # 33. find plan for standard context
+ # 22. start transaction
+ # 23. create version with sha and issue
+ # 24. create design-version links
+ # 25. validate version.actions.present?
+ # 26. validate version.sha is unique
+ # 27. validate version.issue.present?
+ # 28. leave transaction
+ # 29. find project by id (same query as 10)
+ # 30. find namespace by id (same query as 11)
+ # 31. find project by id (same query as 10)
+ # 32. find project by id (same query as 10)
+ # 33. create event
+ # 34. find plan for standard context
#
- expect { run_mutation }.not_to exceed_query_limit(33)
+ expect { run_mutation }.not_to exceed_query_limit(34)
end
end
diff --git a/spec/graphql/mutations/namespace/package_settings/update_spec.rb b/spec/graphql/mutations/namespace/package_settings/update_spec.rb
index f4e79481d44..b184baaca3e 100644
--- a/spec/graphql/mutations/namespace/package_settings/update_spec.rb
+++ b/spec/graphql/mutations/namespace/package_settings/update_spec.rb
@@ -38,7 +38,8 @@ RSpec.describe Mutations::Namespace::PackageSettings::Update, feature_category:
npm_package_requests_forwarding: nil,
lock_npm_package_requests_forwarding: false,
pypi_package_requests_forwarding: nil,
- lock_pypi_package_requests_forwarding: false
+ lock_pypi_package_requests_forwarding: false,
+ nuget_symbol_server_enabled: false
}, to: {
maven_duplicates_allowed: false,
maven_duplicate_exception_regex: 'RELEASE',
@@ -51,7 +52,8 @@ RSpec.describe Mutations::Namespace::PackageSettings::Update, feature_category:
npm_package_requests_forwarding: true,
lock_npm_package_requests_forwarding: true,
pypi_package_requests_forwarding: true,
- lock_pypi_package_requests_forwarding: true
+ lock_pypi_package_requests_forwarding: true,
+ nuget_symbol_server_enabled: true
}
it_behaves_like 'returning a success'
@@ -106,7 +108,8 @@ RSpec.describe Mutations::Namespace::PackageSettings::Update, feature_category:
npm_package_requests_forwarding: true,
lock_npm_package_requests_forwarding: true,
pypi_package_requests_forwarding: true,
- lock_pypi_package_requests_forwarding: true
+ lock_pypi_package_requests_forwarding: true,
+ nuget_symbol_server_enabled: true
}
end
diff --git a/spec/graphql/mutations/projects/star_spec.rb b/spec/graphql/mutations/projects/star_spec.rb
new file mode 100644
index 00000000000..6b1811dcd39
--- /dev/null
+++ b/spec/graphql/mutations/projects/star_spec.rb
@@ -0,0 +1,73 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Mutations::Projects::Star, feature_category: :groups_and_projects do
+ describe '#resolve' do
+ let_it_be(:user, freeze: true) { create(:user) }
+
+ subject(:mutation) do
+ described_class
+ .new(object: nil, context: { current_user: user }, field: nil)
+ .resolve(project_id: project.to_global_id, starred: starred)
+ end
+
+ context 'when the user has read access to the project' do
+ let_it_be_with_reload(:project) { create(:project, :public) }
+
+ context 'and the project is not starred' do
+ context 'and the user stars the project' do
+ let(:starred) { true }
+
+ it 'stars the project for the current user' do
+ expect(mutation).to include(count: 1)
+ expect(project.reset.starrers).to include(user)
+ end
+ end
+
+ context 'and the user unstars the project' do
+ let(:starred) { false }
+
+ it 'does not raise an error or change the number of stars' do
+ expect(mutation).to include(count: 0)
+ expect(project.reset.starrers).not_to include(user)
+ end
+ end
+ end
+
+ context 'and the project is starred' do
+ before do
+ user.toggle_star(project)
+ end
+
+ context 'and the user stars the project' do
+ let(:starred) { true }
+
+ it 'does not raise an error or change the number of stars' do
+ expect(mutation).to include(count: 1)
+ expect(project.reset.starrers).to include(user)
+ end
+ end
+
+ context 'and the user unstars the project' do
+ let(:starred) { false }
+
+ it 'unstars the project for the current user' do
+ expect(mutation).to include(count: 0)
+ expect(project.reset.starrers).not_to include(user)
+ end
+ end
+ end
+ end
+
+ context 'when the user does not have read access to the project' do
+ let_it_be(:project, freeze: true) { create(:project, :private) }
+ let(:starred) { true }
+
+ it 'raises an error' do
+ expect { mutation }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
+ expect(project.starrers).not_to include(user)
+ end
+ end
+ end
+end
diff --git a/spec/graphql/resolvers/blame_resolver_spec.rb b/spec/graphql/resolvers/blame_resolver_spec.rb
index a3344132928..ff6bfa97def 100644
--- a/spec/graphql/resolvers/blame_resolver_spec.rb
+++ b/spec/graphql/resolvers/blame_resolver_spec.rb
@@ -12,7 +12,7 @@ RSpec.describe Resolvers::BlameResolver, feature_category: :source_code_manageme
let(:path) { 'files/ruby/popen.rb' }
let(:commit) { project.commit('master') }
let(:blob) { project.repository.blob_at(commit.id, path) }
- let(:args) { { from_line: 1, to_line: 2 } }
+ let(:args) { { from_line: 1, to_line: 100 } }
subject(:resolve_blame) { resolve(described_class, obj: blob, args: args, ctx: { current_user: user }) }
@@ -39,10 +39,10 @@ RSpec.describe Resolvers::BlameResolver, feature_category: :source_code_manageme
end
end
- shared_examples 'argument error' do
+ shared_examples 'argument error' do |error_message|
it 'raises an ArgumentError' do
expect_graphql_error_to_be_created(Gitlab::Graphql::Errors::ArgumentError,
- '`from_line` and `to_line` must be greater than or equal to 1') do
+ error_message) do
resolve_blame
end
end
@@ -52,23 +52,33 @@ RSpec.describe Resolvers::BlameResolver, feature_category: :source_code_manageme
context 'when from_line is below 1' do
let(:args) { { from_line: 0, to_line: 2 } }
- it_behaves_like 'argument error'
+ it_behaves_like 'argument error', '`from_line` and `to_line` must be greater than or equal to 1'
end
context 'when to_line is below 1' do
let(:args) { { from_line: 1, to_line: 0 } }
- it_behaves_like 'argument error'
+ it_behaves_like 'argument error', '`from_line` and `to_line` must be greater than or equal to 1'
end
context 'when to_line less than from_line' do
let(:args) { { from_line: 3, to_line: 1 } }
+ it_behaves_like 'argument error', '`to_line` must be greater than or equal to `from_line`'
+ end
+
+ context 'when difference between to_line and from_line is greater then 99' do
+ let(:args) { { from_line: 3, to_line: 103 } }
+
+ it_behaves_like 'argument error',
+ '`to_line` must be greater than or equal to `from_line` and smaller than `from_line` + 100'
+ end
+
+ context 'when to_line and from_line are the same' do
+ let(:args) { { from_line: 1, to_line: 1 } }
+
it 'returns blame object' do
- expect_graphql_error_to_be_created(Gitlab::Graphql::Errors::ArgumentError,
- '`to_line` must be greater than or equal to `from_line`') do
- resolve_blame
- end
+ expect(resolve_blame).to be_an_instance_of(Gitlab::Blame)
end
end
diff --git a/spec/graphql/resolvers/ci/catalog/resource_resolver_spec.rb b/spec/graphql/resolvers/ci/catalog/resource_resolver_spec.rb
index 19fc0c7fc4c..313d1d337da 100644
--- a/spec/graphql/resolvers/ci/catalog/resource_resolver_spec.rb
+++ b/spec/graphql/resolvers/ci/catalog/resource_resolver_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe Resolvers::Ci::Catalog::ResourceResolver, feature_category: :pipe
let_it_be(:namespace) { create(:group) }
let_it_be(:project) { create(:project, :private, namespace: namespace) }
- let_it_be(:resource) { create(:ci_catalog_resource, project: project) }
+ let_it_be(:resource) { create(:ci_catalog_resource, :published, project: project) }
let_it_be(:user) { create(:user) }
describe '#resolve' do
@@ -20,7 +20,7 @@ RSpec.describe Resolvers::Ci::Catalog::ResourceResolver, feature_category: :pipe
context 'when resource is found' do
it 'returns a single CI/CD Catalog resource' do
result = resolve(described_class, ctx: { current_user: user },
- args: { id: resource.to_global_id.to_s })
+ args: { id: resource.to_global_id })
expect(result.id).to eq(resource.id)
expect(result.class).to eq(Ci::Catalog::Resource)
@@ -30,7 +30,9 @@ RSpec.describe Resolvers::Ci::Catalog::ResourceResolver, feature_category: :pipe
context 'when resource is not found' do
it 'raises ResourceNotAvailable error' do
result = resolve(described_class, ctx: { current_user: user },
- args: { id: "gid://gitlab/Ci::Catalog::Resource/not-a-real-id" })
+ args: { id: GlobalID.new(
+ ::Gitlab::GlobalId.build(model_name: '::Ci::Catalog::Resource', id: "not-a-real-id")
+ ) })
expect(result).to be_a(::Gitlab::Graphql::Errors::ResourceNotAvailable)
end
@@ -40,7 +42,7 @@ RSpec.describe Resolvers::Ci::Catalog::ResourceResolver, feature_category: :pipe
context 'when user is not authorised to view the resource' do
it 'raises ResourceNotAvailable error' do
result = resolve(described_class, ctx: { current_user: user },
- args: { id: resource.to_global_id.to_s })
+ args: { id: resource.to_global_id })
expect(result).to be_a(::Gitlab::Graphql::Errors::ResourceNotAvailable)
end
@@ -115,7 +117,7 @@ RSpec.describe Resolvers::Ci::Catalog::ResourceResolver, feature_category: :pipe
expect_graphql_error_to_be_created(::Gitlab::Graphql::Errors::ArgumentError,
"Exactly one of 'id' or 'full_path' arguments is required.") do
resolve(described_class, ctx: { current_user: user },
- args: { full_path: resource.project.full_path, id: resource.to_global_id.to_s })
+ args: { full_path: resource.project.full_path, id: resource.to_global_id })
end
end
end
diff --git a/spec/graphql/resolvers/ci/catalog/resources/versions_resolver_spec.rb b/spec/graphql/resolvers/ci/catalog/resources/versions_resolver_spec.rb
new file mode 100644
index 00000000000..1ce0e91765f
--- /dev/null
+++ b/spec/graphql/resolvers/ci/catalog/resources/versions_resolver_spec.rb
@@ -0,0 +1,69 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Resolvers::Ci::Catalog::Resources::VersionsResolver, feature_category: :pipeline_composition do
+ include GraphqlHelpers
+
+ include_context 'when there are catalog resources with versions'
+
+ let(:sort) { nil }
+ let(:args) { { sort: sort }.compact }
+ let(:ctx) { { current_user: current_user } }
+
+ subject(:result) { resolve(described_class, ctx: ctx, obj: resource1, args: args) }
+
+ describe '#resolve' do
+ context 'when the user is authorized to read project releases' do
+ before_all do
+ resource1.project.add_guest(current_user)
+ end
+
+ context 'when sort argument is not provided' do
+ it 'returns versions ordered by released_at descending' do
+ expect(result.items).to eq([v1_1, v1_0])
+ end
+ end
+
+ context 'when sort argument is provided' do
+ context 'when sort is CREATED_ASC' do
+ let(:sort) { 'CREATED_ASC' }
+
+ it 'returns versions ordered by created_at ascending' do
+ expect(result.items.to_a).to eq([v1_1, v1_0])
+ end
+ end
+
+ context 'when sort is CREATED_DESC' do
+ let(:sort) { 'CREATED_DESC' }
+
+ it 'returns versions ordered by created_at descending' do
+ expect(result.items).to eq([v1_0, v1_1])
+ end
+ end
+
+ context 'when sort is RELEASED_AT_ASC' do
+ let(:sort) { 'RELEASED_AT_ASC' }
+
+ it 'returns versions ordered by released_at ascending' do
+ expect(result.items).to eq([v1_0, v1_1])
+ end
+ end
+
+ context 'when sort is RELEASED_AT_DESC' do
+ let(:sort) { 'RELEASED_AT_DESC' }
+
+ it 'returns versions ordered by released_at descending' do
+ expect(result.items).to eq([v1_1, v1_0])
+ end
+ end
+ end
+ end
+
+ context 'when the user is not authorized to read project releases' do
+ it 'returns empty response' do
+ expect(result).to be_empty
+ end
+ end
+ end
+end
diff --git a/spec/graphql/resolvers/ci/catalog/resources_resolver_spec.rb b/spec/graphql/resolvers/ci/catalog/resources_resolver_spec.rb
index 97105db686f..a55724b5611 100644
--- a/spec/graphql/resolvers/ci/catalog/resources_resolver_spec.rb
+++ b/spec/graphql/resolvers/ci/catalog/resources_resolver_spec.rb
@@ -6,23 +6,31 @@ RSpec.describe Resolvers::Ci::Catalog::ResourcesResolver, feature_category: :pip
include GraphqlHelpers
let_it_be(:namespace) { create(:group) }
- let_it_be(:project_1) { create(:project, name: 'Z', namespace: namespace) }
- let_it_be(:project_2) { create(:project, name: 'A_Test', namespace: namespace) }
- let_it_be(:project_3) { create(:project, name: 'L', description: 'Test', namespace: namespace) }
- let_it_be(:resource_1) { create(:ci_catalog_resource, project: project_1) }
- let_it_be(:resource_2) { create(:ci_catalog_resource, project: project_2) }
- let_it_be(:resource_3) { create(:ci_catalog_resource, project: project_3) }
+ let_it_be(:private_namespace_project) { create(:project, :private, name: 'z private test', namespace: namespace) }
+ let_it_be(:private_namespace_project_2) { create(:project, :private, name: 'a private test', namespace: namespace) }
+ let_it_be(:public_namespace_project) do
+ create(:project, :public, name: 'public', description: 'Test', namespace: namespace)
+ end
+
+ let_it_be(:internal_project) { create(:project, :internal, name: 'internal') }
+ let_it_be(:private_resource) { create(:ci_catalog_resource, :published, project: private_namespace_project) }
+ let_it_be(:private_resource_2) { create(:ci_catalog_resource, project: private_namespace_project_2) }
+ let_it_be(:public_resource) { create(:ci_catalog_resource, :published, project: public_namespace_project) }
+ let_it_be(:internal_resource) { create(:ci_catalog_resource, :published, project: internal_project) }
let_it_be(:user) { create(:user) }
let(:ctx) { { current_user: user } }
let(:search) { nil }
let(:sort) { nil }
+ let(:scope) { nil }
+ let(:project_path) { nil }
let(:args) do
{
- project_path: project_1.full_path,
+ project_path: project_path,
sort: sort,
- search: search
+ search: search,
+ scope: scope
}.compact
end
@@ -31,40 +39,89 @@ RSpec.describe Resolvers::Ci::Catalog::ResourcesResolver, feature_category: :pip
describe '#resolve' do
context 'with an authorized user' do
before_all do
- namespace.add_owner(user)
+ namespace.add_reporter(user)
+ internal_project.add_reporter(user)
end
- it 'returns all catalog resources visible to the current user in the namespace' do
- expect(result.items.count).to be(3)
- expect(result.items.pluck(:name)).to contain_exactly('Z', 'A_Test', 'L')
+ context 'when the project path argument is provided' do
+ let(:project_path) { private_namespace_project.full_path }
+
+ it 'returns all catalog resources visible to the current user in the namespace' do
+ expect(result.items.count).to be(2)
+ expect(result.items.pluck(:name)).to contain_exactly('z private test', 'public')
+ end
end
- context 'when the sort parameter is not provided' do
+ context 'when sort argument is not provided' do
it 'returns all catalog resources sorted by descending created date' do
- expect(result.items.pluck(:name)).to eq(%w[L A_Test Z])
+ expect(result.items.pluck(:name)).to eq(['internal', 'public', 'z private test'])
end
end
- context 'when the sort parameter is provided' do
+ context 'when the sort argument is provided' do
let(:sort) { 'NAME_DESC' }
- it 'returns all catalog resources sorted by descending name' do
- expect(result.items.pluck(:name)).to eq(%w[Z L A_Test])
+ it 'returns all published catalog resources sorted by descending name' do
+ expect(result.items.pluck(:name)).to eq(['z private test', 'public', 'internal'])
end
end
- context 'when the search parameter is provided' do
+ context 'when the search argument is provided' do
let(:search) { 'test' }
- it 'returns the catalog resources that match the search term' do
- expect(result.items.pluck(:name)).to contain_exactly('A_Test', 'L')
+ it 'returns published catalog resources that match the search term' do
+ expect(result.items.pluck(:name)).to contain_exactly('z private test', 'public')
+ end
+ end
+
+ context 'with scope argument' do
+ it 'defaults to :all and returns all catalog resources' do
+ expect(result.items.count).to be(3)
+ expect(result.items.pluck(:name)).to contain_exactly('public', 'internal', 'z private test')
+ end
+
+ context 'when the scope argument is :namespaces' do
+ let(:scope) { 'NAMESPACES' }
+
+ it 'returns projects of the namespaces the user is a member of' do
+ namespace = create(:namespace, owner: user)
+ internal_public_project = create(:project, :internal, name: 'internal public', namespace: namespace)
+ create(:ci_catalog_resource, :published, project: internal_public_project)
+
+ expect(result.items.count).to be(4)
+ expect(result.items.pluck(:name)).to contain_exactly('public', 'internal public', 'internal',
+ 'z private test')
+ end
+ end
+
+ context 'and the ci_guard_for_catalog_resource_scope FF is disabled' do
+ before do
+ stub_feature_flags(ci_guard_for_catalog_resource_scope: false)
+ end
+
+ it 'returns all the catalog resources' do
+ expect(result.items.count).to be(3)
+ expect(result.items.pluck(:name)).to contain_exactly('public', 'internal', 'z private test')
+ end
+ end
+
+ context 'when the scope is invalid' do
+ let(:scope) { 'INVALID' }
+
+ it 'defaults to :all and returns all catalog resources' do
+ expect(result.items.count).to be(3)
+ expect(result.items.pluck(:name)).to contain_exactly('public', 'internal', 'z private test')
+ end
end
end
end
- context 'when the current user cannot read the namespace catalog' do
- it 'returns empty response' do
- expect(result).to be_empty
+ context 'when the user is anonymous' do
+ let_it_be(:user) { nil }
+
+ it 'returns only public projects' do
+ expect(result.items.count).to be(1)
+ expect(result.items.pluck(:name)).to contain_exactly('public')
end
end
end
diff --git a/spec/graphql/resolvers/ci/catalog/versions_resolver_spec.rb b/spec/graphql/resolvers/ci/catalog/versions_resolver_spec.rb
deleted file mode 100644
index 02fb3dfaee4..00000000000
--- a/spec/graphql/resolvers/ci/catalog/versions_resolver_spec.rb
+++ /dev/null
@@ -1,66 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-# In this context, a `version` is equivalent to a `release`
-RSpec.describe Resolvers::Ci::Catalog::VersionsResolver, feature_category: :pipeline_composition do
- include GraphqlHelpers
-
- let_it_be(:today) { Time.now }
- let_it_be(:yesterday) { today - 1.day }
- let_it_be(:tomorrow) { today + 1.day }
-
- let_it_be(:project) { create(:project, :private) }
- # rubocop: disable Layout/LineLength
- let_it_be(:version1) { create(:release, project: project, tag: 'v1.0.0', released_at: yesterday, created_at: tomorrow) }
- let_it_be(:version2) { create(:release, project: project, tag: 'v2.0.0', released_at: today, created_at: yesterday) }
- let_it_be(:version3) { create(:release, project: project, tag: 'v3.0.0', released_at: tomorrow, created_at: today) }
- # rubocop: enable Layout/LineLength
- let_it_be(:developer) { create(:user) }
- let_it_be(:public_user) { create(:user) }
-
- let(:args) { { sort: :released_at_desc } }
- let(:all_releases) { [version1, version2, version3] }
-
- before_all do
- project.add_developer(developer)
- end
-
- describe '#resolve' do
- it_behaves_like 'releases and group releases resolver'
-
- describe 'when order_by is created_at' do
- let(:current_user) { developer }
-
- context 'with sort: desc' do
- let(:args) { { sort: :created_desc } }
-
- it 'returns the releases ordered by created_at in descending order' do
- expect(resolve_releases.to_a)
- .to match_array(all_releases)
- .and be_sorted(:created_at, :desc)
- end
- end
-
- context 'with sort: asc' do
- let(:args) { { sort: :created_asc } }
-
- it 'returns the releases ordered by created_at in ascending order' do
- expect(resolve_releases.to_a)
- .to match_array(all_releases)
- .and be_sorted(:created_at, :asc)
- end
- end
- end
- end
-
- private
-
- def resolve_versions
- context = { current_user: current_user }
- resolve(described_class, obj: project, args: args, ctx: context, arg_style: :internal)
- end
-
- # Required for shared examples
- alias_method :resolve_releases, :resolve_versions
-end
diff --git a/spec/graphql/resolvers/ci/group_runners_resolver_spec.rb b/spec/graphql/resolvers/ci/group_runners_resolver_spec.rb
index e0fc3b96b93..d1eec0baeea 100644
--- a/spec/graphql/resolvers/ci/group_runners_resolver_spec.rb
+++ b/spec/graphql/resolvers/ci/group_runners_resolver_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Resolvers::Ci::GroupRunnersResolver, feature_category: :runner_fleet do
+RSpec.describe Resolvers::Ci::GroupRunnersResolver, feature_category: :fleet_visibility do
include GraphqlHelpers
describe '#resolve' do
diff --git a/spec/graphql/resolvers/ci/project_runners_resolver_spec.rb b/spec/graphql/resolvers/ci/project_runners_resolver_spec.rb
index 9d9f0fee04a..85b55521174 100644
--- a/spec/graphql/resolvers/ci/project_runners_resolver_spec.rb
+++ b/spec/graphql/resolvers/ci/project_runners_resolver_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Resolvers::Ci::ProjectRunnersResolver, feature_category: :runner_fleet do
+RSpec.describe Resolvers::Ci::ProjectRunnersResolver, feature_category: :fleet_visibility do
include GraphqlHelpers
describe '#resolve' do
diff --git a/spec/graphql/resolvers/ci/runner_groups_resolver_spec.rb b/spec/graphql/resolvers/ci/runner_groups_resolver_spec.rb
index 9272689ef0b..f535f6e415d 100644
--- a/spec/graphql/resolvers/ci/runner_groups_resolver_spec.rb
+++ b/spec/graphql/resolvers/ci/runner_groups_resolver_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Resolvers::Ci::RunnerGroupsResolver, feature_category: :runner_fleet do
+RSpec.describe Resolvers::Ci::RunnerGroupsResolver, feature_category: :fleet_visibility do
include GraphqlHelpers
let_it_be(:group1) { create(:group) }
diff --git a/spec/graphql/resolvers/ci/runner_job_count_resolver_spec.rb b/spec/graphql/resolvers/ci/runner_job_count_resolver_spec.rb
index 6336ea883f7..18501d4add5 100644
--- a/spec/graphql/resolvers/ci/runner_job_count_resolver_spec.rb
+++ b/spec/graphql/resolvers/ci/runner_job_count_resolver_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Resolvers::Ci::RunnerJobCountResolver, feature_category: :runner_fleet do
+RSpec.describe Resolvers::Ci::RunnerJobCountResolver, feature_category: :fleet_visibility do
include GraphqlHelpers
let_it_be(:project) { create(:project, :repository) }
diff --git a/spec/graphql/resolvers/ci/runner_jobs_resolver_spec.rb b/spec/graphql/resolvers/ci/runner_jobs_resolver_spec.rb
index 322bead0d3c..4af87b6882f 100644
--- a/spec/graphql/resolvers/ci/runner_jobs_resolver_spec.rb
+++ b/spec/graphql/resolvers/ci/runner_jobs_resolver_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Resolvers::Ci::RunnerJobsResolver, feature_category: :runner_fleet do
+RSpec.describe Resolvers::Ci::RunnerJobsResolver, feature_category: :fleet_visibility do
include GraphqlHelpers
let_it_be(:project) { create(:project, :repository) }
diff --git a/spec/graphql/resolvers/ci/runner_platforms_resolver_spec.rb b/spec/graphql/resolvers/ci/runner_platforms_resolver_spec.rb
index da6a84cec44..e6238c41445 100644
--- a/spec/graphql/resolvers/ci/runner_platforms_resolver_spec.rb
+++ b/spec/graphql/resolvers/ci/runner_platforms_resolver_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Resolvers::Ci::RunnerPlatformsResolver, feature_category: :runner_fleet do
+RSpec.describe Resolvers::Ci::RunnerPlatformsResolver, feature_category: :fleet_visibility do
include GraphqlHelpers
describe '#resolve' do
diff --git a/spec/graphql/resolvers/ci/runner_projects_resolver_spec.rb b/spec/graphql/resolvers/ci/runner_projects_resolver_spec.rb
index 44203fb2912..c75d7fb831c 100644
--- a/spec/graphql/resolvers/ci/runner_projects_resolver_spec.rb
+++ b/spec/graphql/resolvers/ci/runner_projects_resolver_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Resolvers::Ci::RunnerProjectsResolver, feature_category: :runner_fleet do
+RSpec.describe Resolvers::Ci::RunnerProjectsResolver, feature_category: :fleet_visibility do
include GraphqlHelpers
let_it_be(:project1) { create(:project, description: 'Project1.1') }
diff --git a/spec/graphql/resolvers/ci/runner_setup_resolver_spec.rb b/spec/graphql/resolvers/ci/runner_setup_resolver_spec.rb
index 734337f7c92..1724623e5c4 100644
--- a/spec/graphql/resolvers/ci/runner_setup_resolver_spec.rb
+++ b/spec/graphql/resolvers/ci/runner_setup_resolver_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Resolvers::Ci::RunnerSetupResolver, feature_category: :runner_fleet do
+RSpec.describe Resolvers::Ci::RunnerSetupResolver, feature_category: :fleet_visibility do
include GraphqlHelpers
describe '#resolve' do
diff --git a/spec/graphql/resolvers/ci/runner_status_resolver_spec.rb b/spec/graphql/resolvers/ci/runner_status_resolver_spec.rb
index 97a10a7da33..d541bbddfe5 100644
--- a/spec/graphql/resolvers/ci/runner_status_resolver_spec.rb
+++ b/spec/graphql/resolvers/ci/runner_status_resolver_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Resolvers::Ci::RunnerStatusResolver, feature_category: :runner_fleet do
+RSpec.describe Resolvers::Ci::RunnerStatusResolver, feature_category: :fleet_visibility do
include GraphqlHelpers
describe '#resolve' do
diff --git a/spec/graphql/resolvers/ci/runners_resolver_spec.rb b/spec/graphql/resolvers/ci/runners_resolver_spec.rb
index 7d37d13366c..85a90924384 100644
--- a/spec/graphql/resolvers/ci/runners_resolver_spec.rb
+++ b/spec/graphql/resolvers/ci/runners_resolver_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Resolvers::Ci::RunnersResolver, feature_category: :runner_fleet do
+RSpec.describe Resolvers::Ci::RunnersResolver, feature_category: :fleet_visibility do
include GraphqlHelpers
describe '#resolve' do
diff --git a/spec/graphql/resolvers/container_repository_tags_resolver_spec.rb b/spec/graphql/resolvers/container_repository_tags_resolver_spec.rb
index 48be1c29184..5f12e8649b7 100644
--- a/spec/graphql/resolvers/container_repository_tags_resolver_spec.rb
+++ b/spec/graphql/resolvers/container_repository_tags_resolver_spec.rb
@@ -114,14 +114,6 @@ RSpec.describe Resolvers::ContainerRepositoryTagsResolver, feature_category: :co
expect(resolver(args)).is_a? Gitlab::Graphql::ExternallyPaginatedArray
end
-
- context 'when feature use_repository_list_tags_on_graphql is disabled' do
- before do
- stub_feature_flags(use_repository_list_tags_on_graphql: false)
- end
-
- it_behaves_like 'fetching via tags and filter in place'
- end
end
context 'when Gitlab API is not supported' do
diff --git a/spec/graphql/resolvers/group_milestones_resolver_spec.rb b/spec/graphql/resolvers/group_milestones_resolver_spec.rb
index b9b8ef1870b..e9caf91ecb7 100644
--- a/spec/graphql/resolvers/group_milestones_resolver_spec.rb
+++ b/spec/graphql/resolvers/group_milestones_resolver_spec.rb
@@ -102,76 +102,7 @@ RSpec.describe Resolvers::GroupMilestonesResolver, feature_category: :team_plann
end
end
- context 'when including descendant milestones in a public group' do
- let_it_be(:group) { create(:group, :public) }
-
- let(:args) { { include_descendants: true } }
-
- it 'finds milestones only in accessible projects and groups' do
- accessible_group = create(:group, :private, parent: group)
- accessible_project = create(:project, group: accessible_group)
- accessible_group.add_developer(current_user)
- inaccessible_group = create(:group, :private, parent: group)
- inaccessible_project = create(:project, :private, group: group)
- milestone1 = create(:milestone, group: group)
- milestone2 = create(:milestone, group: accessible_group)
- milestone3 = create(:milestone, project: accessible_project)
- create(:milestone, group: inaccessible_group)
- create(:milestone, project: inaccessible_project)
-
- expect(resolve_group_milestones(args: args)).to match_array([milestone1, milestone2, milestone3])
- end
- end
-
- describe 'include_descendants and include_ancestors' do
- let_it_be(:parent_group) { create(:group, :public) }
- let_it_be(:group) { create(:group, :public, parent: parent_group) }
- let_it_be(:accessible_group) { create(:group, :private, parent: group) }
- let_it_be(:accessible_project) { create(:project, group: accessible_group) }
- let_it_be(:inaccessible_group) { create(:group, :private, parent: group) }
- let_it_be(:inaccessible_project) { create(:project, :private, group: group) }
- let_it_be(:milestone1) { create(:milestone, group: group) }
- let_it_be(:milestone2) { create(:milestone, group: accessible_group) }
- let_it_be(:milestone3) { create(:milestone, project: accessible_project) }
- let_it_be(:milestone4) { create(:milestone, group: inaccessible_group) }
- let_it_be(:milestone5) { create(:milestone, project: inaccessible_project) }
- let_it_be(:milestone6) { create(:milestone, group: parent_group) }
-
- before do
- accessible_group.add_developer(current_user)
- end
-
- context 'when including neither ancestor or descendant milestones in a public group' do
- let(:args) { {} }
-
- it 'finds milestones only in accessible projects and groups' do
- expect(resolve_group_milestones(args: args)).to match_array([milestone1])
- end
- end
-
- context 'when including descendant milestones in a public group' do
- let(:args) { { include_descendants: true } }
-
- it 'finds milestones only in accessible projects and groups' do
- expect(resolve_group_milestones(args: args)).to match_array([milestone1, milestone2, milestone3])
- end
- end
-
- context 'when including ancestor milestones in a public group' do
- let(:args) { { include_ancestors: true } }
-
- it 'finds milestones only in accessible projects and groups' do
- expect(resolve_group_milestones(args: args)).to match_array([milestone1, milestone6])
- end
- end
-
- context 'when including both ancestor or descendant milestones in a public group' do
- let(:args) { { include_descendants: true, include_ancestors: true } }
-
- it 'finds milestones only in accessible projects and groups' do
- expect(resolve_group_milestones(args: args)).to match_array([milestone1, milestone2, milestone3, milestone6])
- end
- end
- end
+ # testing for include_descendants and include_ancestors moved into
+ # `spec/requests/api/graphql/milestone_spec.rb`
end
end
diff --git a/spec/graphql/resolvers/group_resolver_spec.rb b/spec/graphql/resolvers/group_resolver_spec.rb
index ed406d14772..c04961b4804 100644
--- a/spec/graphql/resolvers/group_resolver_spec.rb
+++ b/spec/graphql/resolvers/group_resolver_spec.rb
@@ -12,7 +12,7 @@ RSpec.describe Resolvers::GroupResolver do
it 'batch-resolves groups by full path' do
paths = [group1.full_path, group2.full_path]
- result = batch_sync(max_queries: 1) do
+ result = batch_sync(max_queries: 3) do
paths.map { |path| resolve_group(path) }
end
diff --git a/spec/graphql/resolvers/kas/agent_connections_resolver_spec.rb b/spec/graphql/resolvers/kas/agent_connections_resolver_spec.rb
index fe6509bcb3c..58333037e4c 100644
--- a/spec/graphql/resolvers/kas/agent_connections_resolver_spec.rb
+++ b/spec/graphql/resolvers/kas/agent_connections_resolver_spec.rb
@@ -20,7 +20,7 @@ RSpec.describe Resolvers::Kas::AgentConnectionsResolver do
let(:connection2) { double(agent_id: agent1.id) }
let(:connection3) { double(agent_id: agent2.id) }
let(:connected_agents) { [connection1, connection2, connection3] }
- let(:kas_client) { instance_double(Gitlab::Kas::Client, get_connected_agents: connected_agents) }
+ let(:kas_client) { instance_double(Gitlab::Kas::Client, get_connected_agents_by_agent_ids: connected_agents) }
subject do
batch_sync do
@@ -37,7 +37,7 @@ RSpec.describe Resolvers::Kas::AgentConnectionsResolver do
end
it 'queries KAS once when multiple agents are requested' do
- expect(kas_client).to receive(:get_connected_agents).once
+ expect(kas_client).to receive(:get_connected_agents_by_agent_ids).once
response = batch_sync do
resolve(described_class, obj: agent1, ctx: ctx)
@@ -49,7 +49,7 @@ RSpec.describe Resolvers::Kas::AgentConnectionsResolver do
context 'an error is returned from the KAS client' do
before do
- allow(kas_client).to receive(:get_connected_agents).and_raise(GRPC::DeadlineExceeded)
+ allow(kas_client).to receive(:get_connected_agents_by_agent_ids).and_raise(GRPC::DeadlineExceeded)
end
it 'raises a graphql error' do
diff --git a/spec/graphql/resolvers/ml/model_detail_resolver_spec.rb b/spec/graphql/resolvers/ml/model_detail_resolver_spec.rb
new file mode 100644
index 00000000000..1da208eb4d8
--- /dev/null
+++ b/spec/graphql/resolvers/ml/model_detail_resolver_spec.rb
@@ -0,0 +1,41 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Resolvers::Ml::ModelDetailResolver, feature_category: :mlops do
+ include GraphqlHelpers
+
+ describe '#resolve' do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:model) { create(:ml_models, project: project) }
+ let_it_be(:user) { project.owner }
+
+ let(:args) { { id: global_id_of(model) } }
+ let(:read_model_registry) { true }
+
+ before do
+ allow(Ability).to receive(:allowed?).and_call_original
+ allow(Ability).to receive(:allowed?)
+ .with(user, :read_model_registry, project)
+ .and_return(read_model_registry)
+ end
+
+ subject { force(resolve(described_class, ctx: { current_user: user }, args: args)) }
+
+ context 'when user is allowed and model exists' do
+ it { is_expected.to eq(model) }
+ end
+
+ context 'when user does not have permission' do
+ let(:read_model_registry) { false }
+
+ it { is_expected.to be_nil }
+ end
+
+ context 'when model does not exist' do
+ let(:args) { { id: global_id_of(id: non_existing_record_id, model_name: 'Ml::Model') } }
+
+ it { is_expected.to be_nil }
+ end
+ end
+end
diff --git a/spec/graphql/resolvers/project_resolver_spec.rb b/spec/graphql/resolvers/project_resolver_spec.rb
index dec9d4701e1..03febc75d3f 100644
--- a/spec/graphql/resolvers/project_resolver_spec.rb
+++ b/spec/graphql/resolvers/project_resolver_spec.rb
@@ -13,7 +13,7 @@ RSpec.describe Resolvers::ProjectResolver do
it 'batch-resolves projects by full path' do
paths = [project1.full_path, project2.full_path]
- result = batch_sync(max_queries: 1) do
+ result = batch_sync(max_queries: 3) do
paths.map { |path| resolve_project(path) }
end
diff --git a/spec/graphql/resolvers/timelog_resolver_spec.rb b/spec/graphql/resolvers/timelog_resolver_spec.rb
index 798d8a56cf5..c4253f4b9bc 100644
--- a/spec/graphql/resolvers/timelog_resolver_spec.rb
+++ b/spec/graphql/resolvers/timelog_resolver_spec.rb
@@ -29,6 +29,14 @@ RSpec.describe Resolvers::TimelogResolver, feature_category: :team_planning do
expect(timelogs).to contain_exactly(timelog1)
end
+ context 'when the project does not exist' do
+ let(:extra_args) { { project_id: "gid://gitlab/Project/#{non_existing_record_id}" } }
+
+ it 'returns an empty set' do
+ expect(timelogs).to be_empty
+ end
+ end
+
context 'when no dates specified' do
let(:args) { {} }
@@ -137,6 +145,20 @@ RSpec.describe Resolvers::TimelogResolver, feature_category: :team_planning do
expect(timelogs).to contain_exactly(timelog1)
end
+ context 'when the group does not exist' do
+ let_it_be(:error_class) { Gitlab::Graphql::Errors::ResourceNotAvailable }
+
+ let(:extra_args) { { group_id: "gid://gitlab/Group/#{non_existing_record_id}" } }
+
+ it 'returns an error' do
+ expect_graphql_error_to_be_created(error_class,
+ "The resource that you are attempting to access does not exist or " \
+ "you don't have permission to perform this action") do
+ timelogs
+ end
+ end
+ end
+
context 'when only start_date is present' do
let(:args) { { start_date: short_time_ago } }
diff --git a/spec/graphql/types/analytics/cycle_analytics/value_stream_type_spec.rb b/spec/graphql/types/analytics/cycle_analytics/value_stream_type_spec.rb
index 5e2638210d3..1d5a8dbebd6 100644
--- a/spec/graphql/types/analytics/cycle_analytics/value_stream_type_spec.rb
+++ b/spec/graphql/types/analytics/cycle_analytics/value_stream_type_spec.rb
@@ -7,5 +7,5 @@ RSpec.describe Types::Analytics::CycleAnalytics::ValueStreamType, feature_catego
specify { expect(described_class).to require_graphql_authorizations(:read_cycle_analytics) }
- specify { expect(described_class).to have_graphql_fields(:id, :name, :namespace, :project) }
+ specify { expect(described_class).to have_graphql_fields(:id, :name, :namespace, :project, :stages) }
end
diff --git a/spec/graphql/types/analytics/cycle_analytics/value_streams/stage_type_spec.rb b/spec/graphql/types/analytics/cycle_analytics/value_streams/stage_type_spec.rb
new file mode 100644
index 00000000000..92276647e1b
--- /dev/null
+++ b/spec/graphql/types/analytics/cycle_analytics/value_streams/stage_type_spec.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Types::Analytics::CycleAnalytics::ValueStreams::StageType, feature_category: :value_stream_management do
+ let(:fields) do
+ %i[
+ name start_event_identifier
+ end_event_identifier hidden custom
+ ]
+ end
+
+ specify { expect(described_class.graphql_name).to eq('ValueStreamStage') }
+ specify { expect(described_class).to have_graphql_fields(fields).at_least }
+end
diff --git a/spec/graphql/types/ci/catalog/resource_type_spec.rb b/spec/graphql/types/ci/catalog/resource_type_spec.rb
index 5f5732c5237..7e15efca644 100644
--- a/spec/graphql/types/ci/catalog/resource_type_spec.rb
+++ b/spec/graphql/types/ci/catalog/resource_type_spec.rb
@@ -16,8 +16,6 @@ RSpec.describe Types::Ci::Catalog::ResourceType, feature_category: :pipeline_com
latest_version
latest_released_at
star_count
- forks_count
- root_namespace
readme_html
open_issues_count
open_merge_requests_count
diff --git a/spec/graphql/types/ci/catalog/resources/component_type_spec.rb b/spec/graphql/types/ci/catalog/resources/component_type_spec.rb
new file mode 100644
index 00000000000..93ab926d406
--- /dev/null
+++ b/spec/graphql/types/ci/catalog/resources/component_type_spec.rb
@@ -0,0 +1,18 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Types::Ci::Catalog::Resources::ComponentType, feature_category: :pipeline_composition do
+ specify { expect(described_class.graphql_name).to eq('CiCatalogResourceComponent') }
+
+ it 'exposes the expected fields' do
+ expected_fields = %i[
+ id
+ inputs
+ name
+ path
+ ]
+
+ expect(described_class).to have_graphql_fields(*expected_fields)
+ end
+end
diff --git a/spec/graphql/types/ci/catalog/resources/components/input_type_spec.rb b/spec/graphql/types/ci/catalog/resources/components/input_type_spec.rb
new file mode 100644
index 00000000000..cb716cfff8c
--- /dev/null
+++ b/spec/graphql/types/ci/catalog/resources/components/input_type_spec.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Types::Ci::Catalog::Resources::Components::InputType, feature_category: :pipeline_composition do
+ specify { expect(described_class.graphql_name).to eq('CiCatalogResourceComponentInput') }
+
+ it 'exposes the expected fields' do
+ expected_fields = %i[
+ name
+ default
+ required
+ ]
+
+ expect(described_class).to have_graphql_fields(*expected_fields)
+ end
+end
diff --git a/spec/graphql/types/ci/catalog/resources/version_sort_enum_spec.rb b/spec/graphql/types/ci/catalog/resources/version_sort_enum_spec.rb
new file mode 100644
index 00000000000..fd0f1a1e553
--- /dev/null
+++ b/spec/graphql/types/ci/catalog/resources/version_sort_enum_spec.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['CiCatalogResourceVersionSort'], feature_category: :pipeline_composition do
+ it { expect(described_class.graphql_name).to eq('CiCatalogResourceVersionSort') }
+
+ it 'exposes all the existing catalog resource version sort options' do
+ expect(described_class.values.keys).to include(
+ *%w[RELEASED_AT_ASC RELEASED_AT_DESC CREATED_ASC CREATED_DESC]
+ )
+ end
+end
diff --git a/spec/graphql/types/ci/catalog/resources/version_type_spec.rb b/spec/graphql/types/ci/catalog/resources/version_type_spec.rb
new file mode 100644
index 00000000000..088973cf8f7
--- /dev/null
+++ b/spec/graphql/types/ci/catalog/resources/version_type_spec.rb
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Types::Ci::Catalog::Resources::VersionType, feature_category: :pipeline_composition do
+ specify { expect(described_class.graphql_name).to eq('CiCatalogResourceVersion') }
+
+ it 'exposes the expected fields' do
+ expected_fields = %i[
+ id
+ created_at
+ released_at
+ tag_name
+ tag_path
+ author
+ commit
+ components
+ ]
+
+ expect(described_class).to have_graphql_fields(*expected_fields)
+ end
+end
diff --git a/spec/graphql/types/ci/job_base_field_spec.rb b/spec/graphql/types/ci/job_base_field_spec.rb
index ec7d2a7d33a..c2bf73526d2 100644
--- a/spec/graphql/types/ci/job_base_field_spec.rb
+++ b/spec/graphql/types/ci/job_base_field_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Types::Ci::JobBaseField, feature_category: :runner_fleet do
+RSpec.describe Types::Ci::JobBaseField, feature_category: :fleet_visibility do
describe 'authorized?' do
let_it_be(:current_user) { create(:user) }
diff --git a/spec/graphql/types/ci/runner_countable_connection_type_spec.rb b/spec/graphql/types/ci/runner_countable_connection_type_spec.rb
index 49254ed0f93..9f24909e110 100644
--- a/spec/graphql/types/ci/runner_countable_connection_type_spec.rb
+++ b/spec/graphql/types/ci/runner_countable_connection_type_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Types::Ci::RunnerCountableConnectionType, feature_category: :runner_fleet do
+RSpec.describe Types::Ci::RunnerCountableConnectionType, feature_category: :fleet_visibility do
it 'contains attributes related to a runner connection' do
expected_fields = %w[count]
diff --git a/spec/graphql/types/ci/runner_manager_type_spec.rb b/spec/graphql/types/ci/runner_manager_type_spec.rb
index ff7297b0a0e..ce55d6fee03 100644
--- a/spec/graphql/types/ci/runner_manager_type_spec.rb
+++ b/spec/graphql/types/ci/runner_manager_type_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe GitlabSchema.types['CiRunnerManager'], feature_category: :runner_fleet do
+RSpec.describe GitlabSchema.types['CiRunnerManager'], feature_category: :fleet_visibility do
specify { expect(described_class.graphql_name).to eq('CiRunnerManager') }
specify { expect(described_class).to require_graphql_authorizations(:read_runner_manager) }
diff --git a/spec/graphql/types/ci/runner_platform_type_spec.rb b/spec/graphql/types/ci/runner_platform_type_spec.rb
index 1b0f5a5ec71..f4acfc1f8ca 100644
--- a/spec/graphql/types/ci/runner_platform_type_spec.rb
+++ b/spec/graphql/types/ci/runner_platform_type_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Types::Ci::RunnerPlatformType, feature_category: :runner_fleet do
+RSpec.describe Types::Ci::RunnerPlatformType, feature_category: :fleet_visibility do
specify { expect(described_class.graphql_name).to eq('RunnerPlatform') }
it 'exposes the expected fields' do
diff --git a/spec/graphql/types/ci/runner_setup_type_spec.rb b/spec/graphql/types/ci/runner_setup_type_spec.rb
index d3e47b52a80..66469a35a94 100644
--- a/spec/graphql/types/ci/runner_setup_type_spec.rb
+++ b/spec/graphql/types/ci/runner_setup_type_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Types::Ci::RunnerSetupType, feature_category: :runner_fleet do
+RSpec.describe Types::Ci::RunnerSetupType, feature_category: :fleet_visibility do
specify { expect(described_class.graphql_name).to eq('RunnerSetup') }
it 'exposes the expected fields' do
diff --git a/spec/graphql/types/ci/runner_upgrade_status_enum_spec.rb b/spec/graphql/types/ci/runner_upgrade_status_enum_spec.rb
index 4aa9ad094a6..3f6a867de39 100644
--- a/spec/graphql/types/ci/runner_upgrade_status_enum_spec.rb
+++ b/spec/graphql/types/ci/runner_upgrade_status_enum_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Types::Ci::RunnerUpgradeStatusEnum, feature_category: :runner_fleet do
+RSpec.describe Types::Ci::RunnerUpgradeStatusEnum, feature_category: :fleet_visibility do
let(:model_only_enum_values) { %w[not_processed] }
let(:expected_graphql_source_values) do
Ci::RunnerVersion.statuses.keys - model_only_enum_values
diff --git a/spec/graphql/types/ci/runner_web_url_edge_spec.rb b/spec/graphql/types/ci/runner_web_url_edge_spec.rb
index 07a9655b3e1..fc4e5428360 100644
--- a/spec/graphql/types/ci/runner_web_url_edge_spec.rb
+++ b/spec/graphql/types/ci/runner_web_url_edge_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Types::Ci::RunnerWebUrlEdge, feature_category: :runner_fleet do
+RSpec.describe Types::Ci::RunnerWebUrlEdge, feature_category: :fleet_visibility do
specify { expect(described_class.graphql_name).to eq('RunnerWebUrlEdge') }
it 'contains URL attributes' do
diff --git a/spec/graphql/types/container_registry/protection/rule_type_spec.rb b/spec/graphql/types/container_registry/protection/rule_type_spec.rb
index 58b53af80fb..40a45609345 100644
--- a/spec/graphql/types/container_registry/protection/rule_type_spec.rb
+++ b/spec/graphql/types/container_registry/protection/rule_type_spec.rb
@@ -15,8 +15,8 @@ RSpec.describe GitlabSchema.types['ContainerRegistryProtectionRule'], feature_ca
it { is_expected.to have_non_null_graphql_type(::Types::GlobalIDType[::ContainerRegistry::Protection::Rule]) }
end
- describe 'container_path_pattern' do
- subject { described_class.fields['containerPathPattern'] }
+ describe 'repository_path_pattern' do
+ subject { described_class.fields['repositoryPathPattern'] }
it { is_expected.to have_non_null_graphql_type(GraphQL::Types::String) }
end
diff --git a/spec/graphql/types/container_repository_details_type_spec.rb b/spec/graphql/types/container_repository_details_type_spec.rb
index 62e72089e09..2253b07f5c8 100644
--- a/spec/graphql/types/container_repository_details_type_spec.rb
+++ b/spec/graphql/types/container_repository_details_type_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe GitlabSchema.types['ContainerRepositoryDetails'] do
fields = %i[id name path location created_at updated_at expiration_policy_started_at
status tags_count can_delete expiration_policy_cleanup_status tags size
- project migration_state last_cleanup_deleted_tags_count]
+ project migration_state last_cleanup_deleted_tags_count userPermissions]
it { expect(described_class.graphql_name).to eq('ContainerRepositoryDetails') }
diff --git a/spec/graphql/types/container_repository_tag_type_spec.rb b/spec/graphql/types/container_repository_tag_type_spec.rb
index 1d1a76d6916..596a221b5c0 100644
--- a/spec/graphql/types/container_repository_tag_type_spec.rb
+++ b/spec/graphql/types/container_repository_tag_type_spec.rb
@@ -2,8 +2,8 @@
require 'spec_helper'
-RSpec.describe GitlabSchema.types['ContainerRepositoryTag'] do
- fields = %i[name path location digest revision short_revision total_size created_at can_delete]
+RSpec.describe GitlabSchema.types['ContainerRepositoryTag'], feature_category: :container_registry do
+ fields = %i[name path location digest revision short_revision total_size created_at can_delete user_permissions]
it { expect(described_class.graphql_name).to eq('ContainerRepositoryTag') }
@@ -12,4 +12,6 @@ RSpec.describe GitlabSchema.types['ContainerRepositoryTag'] do
it { expect(described_class).to require_graphql_authorizations(:read_container_image) }
it { expect(described_class).to have_graphql_fields(fields) }
+
+ it { expect(described_class).to expose_permissions_using(Types::PermissionTypes::ContainerRepositoryTag) }
end
diff --git a/spec/graphql/types/container_repository_type_spec.rb b/spec/graphql/types/container_repository_type_spec.rb
index bc92fa24050..0c7879c3f2a 100644
--- a/spec/graphql/types/container_repository_type_spec.rb
+++ b/spec/graphql/types/container_repository_type_spec.rb
@@ -2,10 +2,10 @@
require 'spec_helper'
-RSpec.describe GitlabSchema.types['ContainerRepository'] do
+RSpec.describe GitlabSchema.types['ContainerRepository'], feature_category: :container_registry do
fields = %i[id name path location created_at updated_at expiration_policy_started_at
status tags_count can_delete expiration_policy_cleanup_status project
- migration_state last_cleanup_deleted_tags_count]
+ migration_state last_cleanup_deleted_tags_count user_permissions]
it { expect(described_class.graphql_name).to eq('ContainerRepository') }
@@ -15,6 +15,8 @@ RSpec.describe GitlabSchema.types['ContainerRepository'] do
it { expect(described_class).to have_graphql_fields(fields) }
+ it { expect(described_class).to expose_permissions_using(Types::PermissionTypes::ContainerRepository) }
+
describe 'status field' do
subject { described_class.fields['status'] }
diff --git a/spec/graphql/types/current_user_type_spec.rb b/spec/graphql/types/current_user_type_spec.rb
new file mode 100644
index 00000000000..ff7a529a057
--- /dev/null
+++ b/spec/graphql/types/current_user_type_spec.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['CurrentUser'], feature_category: :user_profile do
+ specify { expect(described_class.graphql_name).to eq('CurrentUser') }
+
+ it "inherits authorization policies from the UserType superclass" do
+ expect(described_class).to require_graphql_authorizations(:read_user)
+ end
+end
diff --git a/spec/graphql/types/group_type_spec.rb b/spec/graphql/types/group_type_spec.rb
index 6622551f063..d3f9053faf3 100644
--- a/spec/graphql/types/group_type_spec.rb
+++ b/spec/graphql/types/group_type_spec.rb
@@ -125,4 +125,37 @@ RSpec.describe GitlabSchema.types['Group'] do
expect { clean_state_query }.not_to exceed_all_query_limit(control)
end
end
+
+ describe 'custom emoji' do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:subgroup) { create(:group, parent: group) }
+ let_it_be(:custom_emoji) { create(:custom_emoji, group: group) }
+ let_it_be(:custom_emoji_subgroup) { create(:custom_emoji, group: subgroup) }
+ let(:query) do
+ %(
+ query {
+ group(fullPath: "#{subgroup.full_path}") {
+ customEmoji(includeAncestorGroups: true) {
+ nodes {
+ id
+ }
+ }
+ }
+ }
+ )
+ end
+
+ before_all do
+ group.add_reporter(user)
+ end
+
+ describe 'when includeAncestorGroups is true' do
+ it 'returns emoji from ancestor groups' do
+ result = GitlabSchema.execute(query, context: { current_user: user }).as_json
+
+ expect(result.dig('data', 'group', 'customEmoji', 'nodes').count).to eq(2)
+ end
+ end
+ end
end
diff --git a/spec/graphql/types/issue_type_enum_spec.rb b/spec/graphql/types/issue_type_enum_spec.rb
index 5b1bc9c3d9c..f0370e275cd 100644
--- a/spec/graphql/types/issue_type_enum_spec.rb
+++ b/spec/graphql/types/issue_type_enum_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe Types::IssueTypeEnum, feature_category: :team_planning do
it 'exposes all the existing issue type values except epic' do
expect(described_class.values.keys).to match_array(
- %w[ISSUE INCIDENT TEST_CASE REQUIREMENT TASK OBJECTIVE KEY_RESULT]
+ %w[ISSUE INCIDENT TEST_CASE REQUIREMENT TASK OBJECTIVE KEY_RESULT EPIC]
)
end
end
diff --git a/spec/graphql/types/ml/candidate_links_type_spec.rb b/spec/graphql/types/ml/candidate_links_type_spec.rb
new file mode 100644
index 00000000000..489079cc717
--- /dev/null
+++ b/spec/graphql/types/ml/candidate_links_type_spec.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['MLCandidateLinks'], feature_category: :mlops do
+ it 'has the expected fields' do
+ expected_fields = %w[showPath artifact_path]
+
+ expect(described_class).to include_graphql_fields(*expected_fields)
+ end
+end
diff --git a/spec/graphql/types/ml/candidate_type_spec.rb b/spec/graphql/types/ml/candidate_type_spec.rb
new file mode 100644
index 00000000000..e000ada1309
--- /dev/null
+++ b/spec/graphql/types/ml/candidate_type_spec.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['MlCandidate'], feature_category: :mlops do
+ specify { expect(described_class.description).to eq('Candidate for a model version in the model registry') }
+
+ it 'includes all the package fields' do
+ expected_fields = %w[id name created_at _links]
+
+ expect(described_class).to include_graphql_fields(*expected_fields)
+ end
+end
diff --git a/spec/graphql/types/ml/model_type_spec.rb b/spec/graphql/types/ml/model_type_spec.rb
new file mode 100644
index 00000000000..ee0473ccafe
--- /dev/null
+++ b/spec/graphql/types/ml/model_type_spec.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['MlModel'], feature_category: :mlops do
+ specify { expect(described_class.description).to eq('Machine learning model in the model registry') }
+
+ it 'includes all the package fields' do
+ expected_fields = %w[id name versions candidates]
+
+ expect(described_class).to include_graphql_fields(*expected_fields)
+ end
+end
diff --git a/spec/graphql/types/ml/model_version_links_type_spec.rb b/spec/graphql/types/ml/model_version_links_type_spec.rb
new file mode 100644
index 00000000000..d2a11643c35
--- /dev/null
+++ b/spec/graphql/types/ml/model_version_links_type_spec.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['MLModelVersionLinks'], feature_category: :mlops do
+ it 'has the expected fields' do
+ expected_fields = %w[showPath]
+
+ expect(described_class).to include_graphql_fields(*expected_fields)
+ end
+end
diff --git a/spec/graphql/types/ml/model_version_type_spec.rb b/spec/graphql/types/ml/model_version_type_spec.rb
new file mode 100644
index 00000000000..03652c55e20
--- /dev/null
+++ b/spec/graphql/types/ml/model_version_type_spec.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['MlModelVersion'], feature_category: :mlops do
+ specify { expect(described_class.description).to eq('Version of a machine learning model') }
+
+ it 'includes all the package fields' do
+ expected_fields = %w[id version created_at _links]
+
+ expect(described_class).to include_graphql_fields(*expected_fields)
+ end
+end
diff --git a/spec/graphql/types/namespace/package_settings_type_spec.rb b/spec/graphql/types/namespace/package_settings_type_spec.rb
index d823f2017b6..0e731c1e2bf 100644
--- a/spec/graphql/types/namespace/package_settings_type_spec.rb
+++ b/spec/graphql/types/namespace/package_settings_type_spec.rb
@@ -32,6 +32,7 @@ RSpec.describe GitlabSchema.types['PackageSettings'], feature_category: :package
maven_package_requests_forwarding_locked
npm_package_requests_forwarding_locked
pypi_package_requests_forwarding_locked
+ nuget_symbol_server_enabled
]
expect(described_class).to include_graphql_fields(*expected_fields)
diff --git a/spec/graphql/types/organizations/organization_type_spec.rb b/spec/graphql/types/organizations/organization_type_spec.rb
index 62787ad220d..6bc4bac6ba2 100644
--- a/spec/graphql/types/organizations/organization_type_spec.rb
+++ b/spec/graphql/types/organizations/organization_type_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe GitlabSchema.types['Organization'], feature_category: :cell do
- let(:expected_fields) { %w[groups id name organization_users path web_url] }
+ let(:expected_fields) { %w[avatar_url description description_html groups id name organization_users path web_url] }
specify { expect(described_class.graphql_name).to eq('Organization') }
specify { expect(described_class).to require_graphql_authorizations(:read_organization) }
diff --git a/spec/graphql/types/permission_types/abuse_report_spec.rb b/spec/graphql/types/permission_types/abuse_report_spec.rb
deleted file mode 100644
index 399df137a78..00000000000
--- a/spec/graphql/types/permission_types/abuse_report_spec.rb
+++ /dev/null
@@ -1,15 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Types::PermissionTypes::AbuseReport, feature_category: :insider_threat do
- it do
- expected_permissions = [
- :read_abuse_report, :create_note
- ]
-
- expected_permissions.each do |permission|
- expect(described_class).to have_graphql_field(permission)
- end
- end
-end
diff --git a/spec/graphql/types/permission_types/container_repository_spec.rb b/spec/graphql/types/permission_types/container_repository_spec.rb
new file mode 100644
index 00000000000..1d8d9e994ed
--- /dev/null
+++ b/spec/graphql/types/permission_types/container_repository_spec.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['ContainerRepositoryPermissions'], feature_category: :container_registry do
+ it 'has the expected fields' do
+ expected_permissions = [:destroy_container_repository]
+
+ expect(described_class).to have_graphql_fields(expected_permissions).only
+ end
+end
diff --git a/spec/graphql/types/permission_types/container_repository_tag_spec.rb b/spec/graphql/types/permission_types/container_repository_tag_spec.rb
new file mode 100644
index 00000000000..69d60ba621b
--- /dev/null
+++ b/spec/graphql/types/permission_types/container_repository_tag_spec.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['ContainerRepositoryTagPermissions'], feature_category: :container_registry do
+ it 'has the expected fields' do
+ expected_permissions = [:destroy_container_repository_tag]
+
+ expect(described_class).to have_graphql_fields(expected_permissions).only
+ end
+end
diff --git a/spec/graphql/types/project_feature_access_level_enum_spec.rb b/spec/graphql/types/project_feature_access_level_enum_spec.rb
new file mode 100644
index 00000000000..a13b3be3f8f
--- /dev/null
+++ b/spec/graphql/types/project_feature_access_level_enum_spec.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['ProjectFeatureAccessLevel'], feature_category: :groups_and_projects do
+ specify { expect(described_class.graphql_name).to eq('ProjectFeatureAccessLevel') }
+
+ it 'exposes all the existing access levels' do
+ expect(described_class.values.keys).to include(*%w[DISABLED PRIVATE ENABLED])
+ end
+end
diff --git a/spec/graphql/types/project_feature_access_level_type_spec.rb b/spec/graphql/types/project_feature_access_level_type_spec.rb
new file mode 100644
index 00000000000..fae9de63d93
--- /dev/null
+++ b/spec/graphql/types/project_feature_access_level_type_spec.rb
@@ -0,0 +1,14 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['ProjectFeatureAccess'], feature_category: :groups_and_projects do
+ specify { expect(described_class.graphql_name).to eq('ProjectFeatureAccess') }
+ specify { expect(described_class).to require_graphql_authorizations(nil) }
+
+ it 'has expected fields' do
+ expected_fields = [:integer_value, :string_value]
+
+ expect(described_class).to have_graphql_fields(*expected_fields)
+ end
+end
diff --git a/spec/graphql/types/project_type_spec.rb b/spec/graphql/types/project_type_spec.rb
index 7b4bcf4b1b0..3965312316b 100644
--- a/spec/graphql/types/project_type_spec.rb
+++ b/spec/graphql/types/project_type_spec.rb
@@ -2,9 +2,10 @@
require 'spec_helper'
-RSpec.describe GitlabSchema.types['Project'] do
+RSpec.describe GitlabSchema.types['Project'], feature_category: :groups_and_projects do
include GraphqlHelpers
include ProjectForksHelper
+ using RSpec::Parameterized::TableSyntax
specify { expect(described_class).to expose_permissions_using(Types::PermissionTypes::Project) }
@@ -21,7 +22,8 @@ RSpec.describe GitlabSchema.types['Project'] do
container_registry_enabled shared_runners_enabled
lfs_enabled merge_requests_ff_only_enabled avatar_url
issues_enabled merge_requests_enabled wiki_enabled
- snippets_enabled jobs_enabled public_jobs open_issues_count import_status
+ forking_access_level issues_access_level merge_requests_access_level
+ snippets_enabled jobs_enabled public_jobs open_issues_count open_merge_requests_count import_status
only_allow_merge_if_pipeline_succeeds request_access_enabled
only_allow_merge_if_all_discussions_are_resolved printing_merge_request_link_enabled
namespace group statistics statistics_details_paths repository merge_requests merge_request issues
@@ -39,7 +41,7 @@ RSpec.describe GitlabSchema.types['Project'] do
recent_issue_boards ci_config_path_or_default packages_cleanup_policy ci_variables
timelog_categories fork_targets branch_rules ci_config_variables pipeline_schedules languages
incident_management_timeline_event_tags visible_forks inherited_ci_variables autocomplete_users
- ci_cd_settings detailed_import_status
+ ci_cd_settings detailed_import_status value_streams
]
expect(described_class).to include_graphql_fields(*expected_fields)
@@ -704,6 +706,63 @@ RSpec.describe GitlabSchema.types['Project'] do
end
end
+ describe 'project features access level' do
+ let_it_be(:project) { create(:project, :public) }
+
+ where(project_feature: %w[forkingAccessLevel issuesAccessLevel mergeRequestsAccessLevel])
+
+ with_them do
+ let(:query) do
+ %(
+ query {
+ project(fullPath: "#{project.full_path}") {
+ #{project_feature} {
+ integerValue
+ stringValue
+ }
+ }
+ }
+ )
+ end
+
+ subject { GitlabSchema.execute(query).as_json.dig('data', 'project', project_feature) }
+
+ it { is_expected.to eq({ "integerValue" => ProjectFeature::ENABLED, "stringValue" => "ENABLED" }) }
+ end
+ end
+
+ describe 'open_merge_requests_count' do
+ let_it_be(:project, reload: true) { create(:project, :public) }
+ let_it_be(:open_merge_request) { create(:merge_request, source_project: project) }
+ let_it_be(:closed_merge_request) { create(:merge_request, :closed, source_project: project) }
+
+ let(:query) do
+ %(
+ query {
+ project(fullPath: "#{project.full_path}") {
+ openMergeRequestsCount
+ }
+ }
+ )
+ end
+
+ subject(:open_merge_requests_count) do
+ GitlabSchema.execute(query).as_json.dig('data', 'project', 'openMergeRequestsCount')
+ end
+
+ context 'when the user can access merge requests' do
+ it { is_expected.to eq(1) }
+ end
+
+ context 'when the user cannot access merge requests' do
+ before do
+ project.project_feature.update!(merge_requests_access_level: ProjectFeature::PRIVATE)
+ end
+
+ it { is_expected.to be_nil }
+ end
+ end
+
describe 'branch_rules' do
let_it_be(:user) { create(:user) }
let_it_be(:project, reload: true) { create(:project, :public) }
diff --git a/spec/graphql/types/projects/service_type_enum_spec.rb b/spec/graphql/types/projects/service_type_enum_spec.rb
index a5b1ba24a44..40376afc7f7 100644
--- a/spec/graphql/types/projects/service_type_enum_spec.rb
+++ b/spec/graphql/types/projects/service_type_enum_spec.rb
@@ -38,7 +38,6 @@ RSpec.describe GitlabSchema.types['ServiceType'] do
PUMBLE_SERVICE
PUSHOVER_SERVICE
REDMINE_SERVICE
- SHIMO_SERVICE
SLACK_SERVICE
SLACK_SLASH_COMMANDS_SERVICE
TEAMCITY_SERVICE
diff --git a/spec/graphql/types/query_type_spec.rb b/spec/graphql/types/query_type_spec.rb
index 8bda738751d..0b5739be9a1 100644
--- a/spec/graphql/types/query_type_spec.rb
+++ b/spec/graphql/types/query_type_spec.rb
@@ -13,6 +13,14 @@ RSpec.describe GitlabSchema.types['Query'], feature_category: :shared do
expect(described_class).to have_graphql_fields(*expected_foss_fields).at_least
end
+ describe 'current_user field' do
+ subject { described_class.fields['currentUser'] }
+
+ it 'returns current user' do
+ is_expected.to have_graphql_type(Types::CurrentUserType)
+ end
+ end
+
describe 'namespace field' do
subject { described_class.fields['namespace'] }
@@ -137,4 +145,14 @@ RSpec.describe GitlabSchema.types['Query'], feature_category: :shared do
is_expected.to have_graphql_resolver(Resolvers::BoardListResolver)
end
end
+
+ describe 'mlModel field' do
+ subject { described_class.fields['mlModel'] }
+
+ it 'returns metadata', :aggregate_failures do
+ is_expected.to have_graphql_type(Types::Ml::ModelType)
+ is_expected.to have_graphql_arguments(:id)
+ is_expected.to have_graphql_resolver(Resolvers::Ml::ModelDetailResolver)
+ end
+ end
end
diff --git a/spec/graphql/types/root_storage_statistics_type_spec.rb b/spec/graphql/types/root_storage_statistics_type_spec.rb
index 00f4092baf4..8ac3b32948f 100644
--- a/spec/graphql/types/root_storage_statistics_type_spec.rb
+++ b/spec/graphql/types/root_storage_statistics_type_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe GitlabSchema.types['RootStorageStatistics'] do
+RSpec.describe GitlabSchema.types['RootStorageStatistics'], feature_category: :consumables_cost_management do
specify { expect(described_class.graphql_name).to eq('RootStorageStatistics') }
it 'has the expected fields' do
diff --git a/spec/graphql/types/user_preferences_type_spec.rb b/spec/graphql/types/user_preferences_type_spec.rb
index 06749dda239..87fac17a5ba 100644
--- a/spec/graphql/types/user_preferences_type_spec.rb
+++ b/spec/graphql/types/user_preferences_type_spec.rb
@@ -9,6 +9,7 @@ RSpec.describe Types::UserPreferencesType, feature_category: :user_profile do
expected_fields = %i[
issues_sort
visibility_pipeline_id_type
+ use_web_ide_extension_marketplace
]
expect(described_class).to have_graphql_fields(*expected_fields)
diff --git a/spec/graphql/types/work_item_state_counts_type_spec.rb b/spec/graphql/types/work_item_state_counts_type_spec.rb
new file mode 100644
index 00000000000..bab2e124222
--- /dev/null
+++ b/spec/graphql/types/work_item_state_counts_type_spec.rb
@@ -0,0 +1,12 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['WorkItemStateCountsType'], feature_category: :portfolio_management do
+ specify { expect(described_class.graphql_name).to eq('WorkItemStateCountsType') }
+
+ it 'exposes the expected fields' do
+ expected_fields = %i[all opened closed]
+ expect(described_class).to have_graphql_fields(*expected_fields)
+ end
+end
diff --git a/spec/graphql/types/work_items/widget_definition_interface_spec.rb b/spec/graphql/types/work_items/widget_definition_interface_spec.rb
new file mode 100644
index 00000000000..59320a75eba
--- /dev/null
+++ b/spec/graphql/types/work_items/widget_definition_interface_spec.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Types::WorkItems::WidgetDefinitionInterface, feature_category: :team_planning do
+ it 'exposes the expected fields' do
+ expected_fields = %i[
+ type
+ ]
+
+ expect(described_class).to have_graphql_fields(*expected_fields)
+ end
+
+ describe '.resolve_type' do
+ subject { described_class.resolve_type(object, {}) }
+
+ context 'for assignees widget' do
+ let(:object) { WorkItems::Widgets::Assignees }
+
+ it { is_expected.to eq(Types::WorkItems::WidgetDefinitions::AssigneesType) }
+ end
+
+ context 'for hierarchy widget' do
+ let(:object) { WorkItems::Widgets::Hierarchy }
+
+ it { is_expected.to eq(Types::WorkItems::WidgetDefinitions::HierarchyType) }
+ end
+
+ context 'for other widgets' do
+ let(:object) { WorkItems::Widgets::Description }
+
+ it { is_expected.to eq(Types::WorkItems::WidgetDefinitions::GenericType) }
+ end
+ end
+end
diff --git a/spec/graphql/types/work_items/widget_definitions/assignees_type_spec.rb b/spec/graphql/types/work_items/widget_definitions/assignees_type_spec.rb
new file mode 100644
index 00000000000..3a1feee960c
--- /dev/null
+++ b/spec/graphql/types/work_items/widget_definitions/assignees_type_spec.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Types::WorkItems::WidgetDefinitions::AssigneesType, feature_category: :team_planning do
+ it 'exposes the expected fields' do
+ expected_fields = %i[type can_invite_members]
+
+ expected_fields.each do |field|
+ expect(described_class).to have_graphql_field(field)
+ end
+ end
+end
diff --git a/spec/graphql/types/work_items/widget_definitions/generic_type_spec.rb b/spec/graphql/types/work_items/widget_definitions/generic_type_spec.rb
new file mode 100644
index 00000000000..19e962e71fd
--- /dev/null
+++ b/spec/graphql/types/work_items/widget_definitions/generic_type_spec.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Types::WorkItems::WidgetDefinitions::GenericType, feature_category: :team_planning do
+ it 'exposes the expected fields' do
+ expected_fields = %i[type]
+
+ expect(described_class).to have_graphql_fields(*expected_fields)
+ end
+end
diff --git a/spec/helpers/application_helper_spec.rb b/spec/helpers/application_helper_spec.rb
index d67d07a4f1e..2445689bf9f 100644
--- a/spec/helpers/application_helper_spec.rb
+++ b/spec/helpers/application_helper_spec.rb
@@ -701,55 +701,35 @@ RSpec.describe ApplicationHelper do
end
describe 'with-header' do
- using RSpec::Parameterized::TableSyntax
+ context 'when current_user' do
+ before do
+ allow(helper).to receive(:current_user).and_return(user)
+ end
- before do
- allow(helper).to receive(:show_super_sidebar?).and_return(show_super_sidebar)
- allow(helper).to receive(:current_user).and_return(current_user)
+ it { is_expected.not_to include('with-header') }
end
- where(:show_super_sidebar, :current_user) do
- true | nil
- false | ref(:user)
- false | nil
- end
+ context 'when no current_user' do
+ before do
+ allow(helper).to receive(:current_user).and_return(nil)
+ end
- with_them do
it { is_expected.to include('with-header') }
end
-
- context 'when with-header should not be shown' do
- let(:show_super_sidebar) { true }
- let(:current_user) { user }
-
- it { is_expected.not_to include('with-header') }
- end
end
describe 'with-top-bar' do
- context 'when show_super_sidebar? is true' do
- context 'when @hide_top_bar_padding is false' do
- before do
- allow(helper).to receive(:show_super_sidebar?).and_return(true)
- helper.instance_variable_set(:@hide_top_bar_padding, false)
- end
-
- it { is_expected.to include('with-top-bar') }
+ context 'when @hide_top_bar_padding is false' do
+ before do
+ helper.instance_variable_set(:@hide_top_bar_padding, false)
end
- context 'when @hide_top_bar_padding is true' do
- before do
- allow(helper).to receive(:show_super_sidebar?).and_return(true)
- helper.instance_variable_set(:@hide_top_bar_padding, true)
- end
-
- it { is_expected.not_to include('with-top-bar') }
- end
+ it { is_expected.to include('with-top-bar') }
end
- context 'when show_super_sidebar? is false' do
+ context 'when @hide_top_bar_padding is true' do
before do
- allow(helper).to receive(:show_super_sidebar?).and_return(false)
+ helper.instance_variable_set(:@hide_top_bar_padding, true)
end
it { is_expected.not_to include('with-top-bar') }
diff --git a/spec/helpers/artifacts_helper_spec.rb b/spec/helpers/artifacts_helper_spec.rb
index 30f9421954e..ed400ad4111 100644
--- a/spec/helpers/artifacts_helper_spec.rb
+++ b/spec/helpers/artifacts_helper_spec.rb
@@ -17,7 +17,8 @@ RSpec.describe ArtifactsHelper, feature_category: :build_artifacts do
it 'returns expected data' do
expect(subject).to include({
project_path: project.full_path,
- project_id: project.id
+ project_id: project.id,
+ job_artifacts_count_limit: 100
})
end
diff --git a/spec/helpers/avatars_helper_spec.rb b/spec/helpers/avatars_helper_spec.rb
index 2c12513c8ac..18cbbaf2008 100644
--- a/spec/helpers/avatars_helper_spec.rb
+++ b/spec/helpers/avatars_helper_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe AvatarsHelper, feature_category: :source_code_management do
let_it_be(:user) { create(:user) }
- describe '#project_icon, #group_icon, #topic_icon' do
+ describe '#group_icon, #topic_icon' do
shared_examples 'resource with a default avatar' do |source_type|
it 'returns a default avatar div' do
expect(public_send("#{source_type}_icon", *helper_args))
@@ -27,37 +27,7 @@ RSpec.describe AvatarsHelper, feature_category: :source_code_management do
allow(resource).to receive(:avatar_url).and_raise(error_class)
end
- it 'handles Gitaly exception gracefully' do
- expect(Gitlab::ErrorTracking).to receive(:track_exception).with(
- an_instance_of(error_class), source_type: 'Project', source_id: resource.id
- )
- expect { project_icon(resource) }.not_to raise_error
- end
-
- it_behaves_like 'resource with a default avatar', 'project'
- end
-
- context 'when providing a project' do
- let(:helper_args) { [resource] }
- let(:resource) { create(:project, name: 'foo') }
-
it_behaves_like 'resource with a default avatar', 'project'
-
- it_behaves_like 'resource with a custom avatar', 'project' do
- let(:resource) { create(:project, :public, avatar: File.open(uploaded_image_temp_path)) }
- end
-
- context 'when Gitaly is unavailable' do
- let(:error_class) { GRPC::Unavailable }
-
- include_examples 'Gitaly exception handling'
- end
-
- context 'when Gitaly request is taking too long' do
- let(:error_class) { GRPC::DeadlineExceeded }
-
- include_examples 'Gitaly exception handling'
- end
end
context 'when providing a group' do
diff --git a/spec/helpers/ci/jobs_helper_spec.rb b/spec/helpers/ci/jobs_helper_spec.rb
index af369f7d420..1394f536c72 100644
--- a/spec/helpers/ci/jobs_helper_spec.rb
+++ b/spec/helpers/ci/jobs_helper_spec.rb
@@ -19,15 +19,16 @@ RSpec.describe Ci::JobsHelper, feature_category: :continuous_integration do
it 'returns jobs data' do
expect(helper.jobs_data(project, job)).to include({
- "endpoint" => "/#{project.full_path}/-/jobs/#{job.id}.json",
+ "job_endpoint" => "/#{project.full_path}/-/jobs/#{job.id}.json",
+ "log_endpoint" => "/#{project.full_path}/-/jobs/#{job.id}/trace",
+ "test_report_summary_url" => "/#{project.full_path}/-/jobs/#{job.id}/test_report_summary.json",
"page_path" => "/#{project.full_path}/-/jobs/#{job.id}",
"project_path" => project.full_path,
"artifact_help_url" => "/help/user/gitlab_com/index.md#gitlab-cicd",
"deployment_help_url" => "/help/user/project/clusters/deploy_to_cluster.md#troubleshooting",
"runner_settings_url" => "/#{project.full_path}/-/runners#js-runners-settings",
- "build_status" => "pending",
- "build_stage" => "test",
- "retry_outdated_job_docs_url" => "/help/ci/pipelines/settings#retry-outdated-jobs"
+ "retry_outdated_job_docs_url" => "/help/ci/pipelines/settings#retry-outdated-jobs",
+ "pipeline_test_report_url" => "/#{project.full_path}/-/pipelines/#{job.pipeline.id}/test_report"
})
end
diff --git a/spec/helpers/ci/pipeline_editor_helper_spec.rb b/spec/helpers/ci/pipeline_editor_helper_spec.rb
index dd7d602e2a5..7a80aff27c1 100644
--- a/spec/helpers/ci/pipeline_editor_helper_spec.rb
+++ b/spec/helpers/ci/pipeline_editor_helper_spec.rb
@@ -26,6 +26,7 @@ RSpec.describe Ci::PipelineEditorHelper, feature_category: :pipeline_composition
let(:project) { create(:project, :repository) }
let(:default_helper_data) do
{
+ "ci-catalog-path" => explore_catalog_index_path,
"ci-config-path": project.ci_config_path_or_default,
"ci-examples-help-page-path" => help_page_path('ci/examples/index'),
"ci-help-page-path" => help_page_path('ci/index'),
@@ -61,7 +62,7 @@ RSpec.describe Ci::PipelineEditorHelper, feature_category: :pipeline_composition
allow(helper)
.to receive(:image_path)
- .with('illustrations/project-run-CICD-pipelines-sm.svg')
+ .with('illustrations/empty-state/empty-devops-md.svg')
.and_return('illustrations/validate.svg')
allow(helper)
diff --git a/spec/helpers/ci/pipelines_helper_spec.rb b/spec/helpers/ci/pipelines_helper_spec.rb
index 1a5c036b4f1..cb72fd5b156 100644
--- a/spec/helpers/ci/pipelines_helper_spec.rb
+++ b/spec/helpers/ci/pipelines_helper_spec.rb
@@ -97,7 +97,8 @@ RSpec.describe Ci::PipelinesHelper, feature_category: :continuous_integration do
:pipeline_editor_path,
:suggested_ci_templates,
:full_path,
- :visibility_pipeline_id_type])
+ :visibility_pipeline_id_type,
+ :show_jenkins_ci_prompt])
end
end
@@ -123,4 +124,39 @@ RSpec.describe Ci::PipelinesHelper, feature_category: :continuous_integration do
end
end
end
+
+ describe '#show_jenkins_ci_prompt' do
+ using RSpec::Parameterized::TableSyntax
+
+ subject { helper.pipelines_list_data(project, 'list_url')[:show_jenkins_ci_prompt] }
+
+ let_it_be(:user) { create(:user) }
+ let_it_be_with_reload(:project) { create(:project, :repository) }
+ let_it_be(:repository) { project.repository }
+
+ before do
+ sign_in(user)
+ project.send(add_role_method, user)
+
+ allow(repository).to receive(:gitlab_ci_yml).and_return(has_gitlab_ci?)
+ allow(repository).to receive(:jenkinsfile?).and_return(has_jenkinsfile?)
+ end
+
+ where(:add_role_method, :has_gitlab_ci?, :has_jenkinsfile?, :result) do
+ # Test permissions
+ :add_owner | false | true | "true"
+ :add_maintainer | false | true | "true"
+ :add_developer | false | true | "true"
+ :add_guest | false | true | "false"
+
+ # Test combination of presence of ci files
+ :add_owner | false | false | "false"
+ :add_owner | true | true | "false"
+ :add_owner | true | false | "false"
+ end
+
+ with_them do
+ it { expect(subject).to eq(result) }
+ end
+ end
end
diff --git a/spec/helpers/ci/runners_helper_spec.rb b/spec/helpers/ci/runners_helper_spec.rb
index febdc3bab65..583bbba1b6d 100644
--- a/spec/helpers/ci/runners_helper_spec.rb
+++ b/spec/helpers/ci/runners_helper_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Ci::RunnersHelper, feature_category: :runner_fleet do
+RSpec.describe Ci::RunnersHelper, feature_category: :fleet_visibility do
let_it_be(:user) { create(:user) }
before do
@@ -46,42 +46,6 @@ RSpec.describe Ci::RunnersHelper, feature_category: :runner_fleet do
end
end
- describe '#runner_contacted_at' do
- let(:contacted_at_stored) { 1.hour.ago.change(usec: 0) }
- let(:contacted_at_cached) { 1.second.ago.change(usec: 0) }
- let(:runner) { create(:ci_runner, contacted_at: contacted_at_stored) }
-
- before do
- runner.cache_attributes(contacted_at: contacted_at_cached)
- end
-
- context 'without sorting' do
- it 'returns cached value' do
- expect(helper.runner_contacted_at(runner)).to eq(contacted_at_cached)
- end
- end
-
- context 'with sorting set to created_date' do
- before do
- controller.params[:sort] = 'created_date'
- end
-
- it 'returns cached value' do
- expect(helper.runner_contacted_at(runner)).to eq(contacted_at_cached)
- end
- end
-
- context 'with sorting set to contacted_asc' do
- before do
- controller.params[:sort] = 'contacted_asc'
- end
-
- it 'returns stored value' do
- expect(helper.runner_contacted_at(runner)).to eq(contacted_at_stored)
- end
- end
- end
-
describe '#admin_runners_data_attributes' do
subject { helper.admin_runners_data_attributes }
diff --git a/spec/helpers/dashboard_helper_spec.rb b/spec/helpers/dashboard_helper_spec.rb
index 8a76771be0a..d52b3c9abb3 100644
--- a/spec/helpers/dashboard_helper_spec.rb
+++ b/spec/helpers/dashboard_helper_spec.rb
@@ -10,20 +10,6 @@ RSpec.describe DashboardHelper do
allow(helper).to receive(:can?) { true }
end
- describe '#dashboard_nav_links' do
- it 'has all the expected links by default' do
- menu_items = [:projects, :groups, :activity, :milestones, :snippets]
-
- expect(helper.dashboard_nav_links).to include(*menu_items)
- end
-
- it 'does not contain cross project elements when the user cannot read cross project' do
- expect(helper).to receive(:can?).with(user, :read_cross_project) { false }
-
- expect(helper.dashboard_nav_links).not_to include(:activity, :milestones)
- end
- end
-
describe '#feature_entry' do
shared_examples "a feature is enabled" do
it { is_expected.to include('<p aria-label="Demo: status on">') }
@@ -89,10 +75,4 @@ RSpec.describe DashboardHelper do
it { is_expected.to eq(false) }
end
-
- describe '#reviewer_mrs_dashboard_path' do
- subject { helper.reviewer_mrs_dashboard_path }
-
- it { is_expected.to eq(merge_requests_dashboard_path(reviewer_username: user.username)) }
- end
end
diff --git a/spec/helpers/explore_helper_spec.rb b/spec/helpers/explore_helper_spec.rb
index 68c5289a85f..22884c60740 100644
--- a/spec/helpers/explore_helper_spec.rb
+++ b/spec/helpers/explore_helper_spec.rb
@@ -10,14 +10,6 @@ RSpec.describe ExploreHelper do
allow(helper).to receive(:can?) { true }
end
- describe '#explore_nav_links' do
- it 'has all the expected links by default' do
- menu_items = [:projects, :groups, :topics, :snippets]
-
- expect(helper.explore_nav_links).to contain_exactly(*menu_items)
- end
- end
-
describe '#public_visibility_restricted?' do
it 'delegates to Gitlab::VisibilityLevel' do
expect(Gitlab::VisibilityLevel).to receive(:public_visibility_restricted?).and_call_original
diff --git a/spec/helpers/groups_helper_spec.rb b/spec/helpers/groups_helper_spec.rb
index 93191cc956b..8aee337f51c 100644
--- a/spec/helpers/groups_helper_spec.rb
+++ b/spec/helpers/groups_helper_spec.rb
@@ -93,7 +93,7 @@ RSpec.describe GroupsHelper, feature_category: :groups_and_projects do
shared_examples 'correct ancestor order' do
it 'outputs the groups in the correct order' do
expect(subject)
- .to match(%r{<li><a.*>#{deep_nested_group.name}.*</li>.*<a.*>#{very_deep_nested_group.name}</a>}m)
+ .to match(%r{<li.*><a.*>#{deep_nested_group.name}.*</li>.*<a.*>#{very_deep_nested_group.name}</a>}m)
end
end
@@ -134,7 +134,7 @@ RSpec.describe GroupsHelper, feature_category: :groups_and_projects do
subject(:link) { document.css('.group-path').first }
it 'uses the group name as innerText' do
- expect(link.inner_text).to eq(group.name)
+ expect(link.inner_text).to match(group.name)
end
it 'links to the group path' do
@@ -150,7 +150,7 @@ RSpec.describe GroupsHelper, feature_category: :groups_and_projects do
end
it 'uses the group\'s avatar_url' do
- expect(icon.attr('src')).to eq(group.avatar_url)
+ expect(icon.attr('src')).to match(group.avatar_url)
end
end
end
@@ -656,6 +656,41 @@ RSpec.describe GroupsHelper, feature_category: :groups_and_projects do
end
end
+ context 'when a user has different access for different groups in the hierarchy' do
+ let_it_be(:grand_parent) { create(:group) }
+ let_it_be(:parent) { create(:group, parent: grand_parent) }
+ let_it_be(:child) { create(:group, parent: parent) }
+ let_it_be(:grand_child) { create(:group, parent: child) }
+
+ before_all do
+ parent.add_developer(user)
+ child.add_maintainer(user)
+ grand_child.add_owner(user)
+ end
+
+ it 'returns the access levels that are peers or lower' do
+ expect(helper.access_level_roles_user_can_assign(grand_parent)).to be_empty
+ expect(helper.access_level_roles_user_can_assign(parent)).to eq({
+ 'Guest' => ::Gitlab::Access::GUEST,
+ 'Reporter' => ::Gitlab::Access::REPORTER,
+ 'Developer' => ::Gitlab::Access::DEVELOPER
+ })
+ expect(helper.access_level_roles_user_can_assign(child)).to eq(::Gitlab::Access.options)
+ expect(helper.access_level_roles_user_can_assign(grand_child)).to eq(::Gitlab::Access.options_with_owner)
+ end
+ end
+
+ context 'when a group is linked to another' do
+ let_it_be(:other_group) { create(:group) }
+ let_it_be(:group_link) { create(:group_group_link, shared_group: group, shared_with_group: other_group, group_access: Gitlab::Access::MAINTAINER) }
+
+ before_all do
+ other_group.add_owner(user)
+ end
+
+ it { is_expected.to eq(::Gitlab::Access.options) }
+ end
+
context 'when user is not provided' do
before do
allow(helper).to receive(:current_user).and_return(nil)
diff --git a/spec/helpers/issuables_helper_spec.rb b/spec/helpers/issuables_helper_spec.rb
index 6abce4c5983..f2e88bc311d 100644
--- a/spec/helpers/issuables_helper_spec.rb
+++ b/spec/helpers/issuables_helper_spec.rb
@@ -121,27 +121,6 @@ RSpec.describe IssuablesHelper, feature_category: :team_planning do
end
end
- describe '#assigned_open_issues_count_text', feature_category: :team_planning do
- let_it_be(:user) { create(:user) }
- let_it_be(:project) { create(:project).tap { |p| p.add_developer(user) } }
-
- subject { helper.assigned_open_issues_count_text }
-
- before do
- allow(helper).to receive(:current_user).and_return(user)
- end
-
- context 'when assigned issues count is over MAX_LIMIT_FOR_ASSIGNEED_ISSUES_COUNT' do
- before do
- stub_const('User::MAX_LIMIT_FOR_ASSIGNEED_ISSUES_COUNT', 2)
- end
-
- let_it_be(:issues) { create_list(:issue, 3, project: project, assignees: [user]) }
-
- it { is_expected.to eq '1+' }
- end
- end
-
describe '#issuables_state_counter_text' do
let_it_be(:user) { create(:user) }
@@ -600,7 +579,8 @@ RSpec.describe IssuablesHelper, feature_category: :team_planning do
initial_labels: '[]',
issuable_type: issuable.issuable_type,
labels_filter_base_path: project_issues_path(project),
- labels_manage_path: project_labels_path(project)
+ labels_manage_path: project_labels_path(project),
+ supports_lock_on_merge: issuable.supports_lock_on_merge?.to_s
})
end
end
@@ -620,7 +600,8 @@ RSpec.describe IssuablesHelper, feature_category: :team_planning do
title: label.title,
description: label.description,
color: label.color,
- text_color: label.text_color
+ text_color: label.text_color,
+ lock_on_merge: label.lock_on_merge
},
{
__typename: "Label",
@@ -628,7 +609,8 @@ RSpec.describe IssuablesHelper, feature_category: :team_planning do
title: label2.title,
description: label2.description,
color: label2.color,
- text_color: label2.text_color
+ text_color: label2.text_color,
+ lock_on_merge: label.lock_on_merge
}
]
@@ -638,7 +620,8 @@ RSpec.describe IssuablesHelper, feature_category: :team_planning do
initial_labels: initial_labels.to_json,
issuable_type: issuable.issuable_type,
labels_filter_base_path: project_merge_requests_path(project),
- labels_manage_path: project_labels_path(project)
+ labels_manage_path: project_labels_path(project),
+ supports_lock_on_merge: issuable.supports_lock_on_merge?.to_s
})
end
end
diff --git a/spec/helpers/issues_helper_spec.rb b/spec/helpers/issues_helper_spec.rb
index 62d94b59c2a..6a6808cd8d2 100644
--- a/spec/helpers/issues_helper_spec.rb
+++ b/spec/helpers/issues_helper_spec.rb
@@ -5,6 +5,8 @@ require 'spec_helper'
RSpec.describe IssuesHelper, feature_category: :team_planning do
include Features::MergeRequestHelpers
+ let_it_be(:user) { create(:user) }
+ let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project) }
let_it_be_with_reload(:issue) { create(:issue, project: project) }
@@ -47,7 +49,6 @@ RSpec.describe IssuesHelper, feature_category: :team_planning do
describe '#award_state_class' do
let!(:upvote) { create(:award_emoji) }
let(:awardable) { upvote.awardable }
- let(:user) { upvote.user }
before do
allow(helper).to receive(:can?) do |*args|
@@ -264,7 +265,6 @@ RSpec.describe IssuesHelper, feature_category: :team_planning do
end
describe '#group_issues_list_data' do
- let(:group) { create(:group) }
let(:current_user) { double.as_null_object }
it 'returns expected result' do
@@ -303,6 +303,7 @@ RSpec.describe IssuesHelper, feature_category: :team_planning do
allow(helper).to receive(:current_user).and_return(current_user)
allow(helper).to receive(:image_path).and_return('#')
allow(helper).to receive(:url_for).and_return('#')
+ stub_feature_flags(issue_date_filter: false)
expected = {
autocomplete_award_emojis_path: autocomplete_award_emojis_path,
@@ -311,6 +312,7 @@ RSpec.describe IssuesHelper, feature_category: :team_planning do
dashboard_milestones_path: dashboard_milestones_path(format: :json),
empty_state_with_filter_svg_path: '#',
empty_state_without_filter_svg_path: '#',
+ has_issue_date_filter_feature: 'false',
initial_sort: current_user&.user_preference&.issues_sort,
is_public_visibility_restricted: Gitlab::CurrentSettings.restricted_visibility_levels ? 'false' : '',
is_signed_in: current_user.present?.to_s,
@@ -433,4 +435,96 @@ RSpec.describe IssuesHelper, feature_category: :team_planning do
end
end
end
+
+ describe '#has_issue_date_filter_feature?' do
+ subject(:has_issue_date_filter_feature) { helper.has_issue_date_filter_feature?(namespace, namespace.owner) }
+
+ context 'when namespace is a group project' do
+ let_it_be(:namespace) { create(:project, namespace: group) }
+
+ it { is_expected.to be_truthy }
+
+ context 'when feature flag is disabled' do
+ before do
+ stub_feature_flags(issue_date_filter: false)
+ end
+
+ it { is_expected.to be_falsey }
+ end
+
+ context 'when feature flag enabled for group' do
+ before do
+ stub_feature_flags(issue_date_filter: [group])
+ end
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'when feature flag enabled for user' do
+ before do
+ stub_feature_flags(issue_date_filter: [namespace.owner])
+ end
+
+ it { is_expected.to be_truthy }
+ end
+ end
+
+ context 'when namespace is a group' do
+ let_it_be(:namespace) { group }
+
+ subject(:has_issue_date_filter_feature) { helper.has_issue_date_filter_feature?(namespace, user) }
+
+ before_all do
+ namespace.add_reporter(user)
+ end
+
+ it { is_expected.to be_truthy }
+
+ context 'when feature flag is disabled' do
+ before do
+ stub_feature_flags(issue_date_filter: false)
+ end
+
+ it { is_expected.to be_falsey }
+ end
+
+ context 'when feature flag enabled for group' do
+ before do
+ stub_feature_flags(issue_date_filter: [group])
+ end
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'when feature flag enabled for user' do
+ before do
+ stub_feature_flags(issue_date_filter: [user])
+ end
+
+ it { is_expected.to be_truthy }
+ end
+ end
+
+ context 'when namespace is a user project' do
+ let_it_be(:namespace) { project }
+
+ it { is_expected.to be_truthy }
+
+ context 'when feature flag is disabled' do
+ before do
+ stub_feature_flags(issue_date_filter: false)
+ end
+
+ it { is_expected.to be_falsey }
+ end
+
+ context 'when feature flag enabled for user' do
+ before do
+ stub_feature_flags(issue_date_filter: [project.owner])
+ end
+
+ it { is_expected.to be_truthy }
+ end
+ end
+ end
end
diff --git a/spec/helpers/json_helper_spec.rb b/spec/helpers/json_helper_spec.rb
deleted file mode 100644
index b9dfabb1b23..00000000000
--- a/spec/helpers/json_helper_spec.rb
+++ /dev/null
@@ -1,36 +0,0 @@
-# frozen_string_literal: true
-
-require "spec_helper"
-
-RSpec.describe JsonHelper do
- let(:hash) { { "foo" => "bar" } }
- let(:json) { '{"foo":"bar"}' }
-
- describe ".json_generate" do
- subject { helper.json_generate(hash) }
-
- it "generates JSON" do
- expect(subject).to eq(json)
- end
-
- it "calls the Gitlab::Json class" do
- expect(Gitlab::Json).to receive(:generate).with(hash)
-
- subject
- end
- end
-
- describe ".json_parse" do
- subject { helper.json_parse(json) }
-
- it "parses JSON" do
- expect(subject).to eq(hash)
- end
-
- it "calls the Gitlab::Json class" do
- expect(Gitlab::Json).to receive(:parse).with(json)
-
- subject
- end
- end
-end
diff --git a/spec/helpers/nav/new_dropdown_helper_spec.rb b/spec/helpers/nav/new_dropdown_helper_spec.rb
index 4252e10c922..a69ac8b3c19 100644
--- a/spec/helpers/nav/new_dropdown_helper_spec.rb
+++ b/spec/helpers/nav/new_dropdown_helper_spec.rb
@@ -84,7 +84,7 @@ RSpec.describe Nav::NewDropdownHelper, feature_category: :navigation do
track_action: 'click_link_new_project',
track_label: 'plus_menu_dropdown',
track_property: 'navigation_top',
- testid: 'global_new_project_link'
+ testid: 'global-new-project-link'
}
)
)
@@ -107,7 +107,7 @@ RSpec.describe Nav::NewDropdownHelper, feature_category: :navigation do
track_action: 'click_link_new_group',
track_label: 'plus_menu_dropdown',
track_property: 'navigation_top',
- testid: 'global_new_group_link'
+ testid: 'global-new-group-link'
}
)
)
@@ -130,7 +130,7 @@ RSpec.describe Nav::NewDropdownHelper, feature_category: :navigation do
track_action: 'click_link_new_snippet_parent',
track_label: 'plus_menu_dropdown',
track_property: 'navigation_top',
- testid: 'global_new_snippet_link'
+ testid: 'global-new-snippet-link'
}
)
)
diff --git a/spec/helpers/nav/top_nav_helper_spec.rb b/spec/helpers/nav/top_nav_helper_spec.rb
deleted file mode 100644
index 6ffc2cbf694..00000000000
--- a/spec/helpers/nav/top_nav_helper_spec.rb
+++ /dev/null
@@ -1,487 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Nav::TopNavHelper do
- let_it_be(:user) { build_stubbed(:user) }
- let_it_be(:admin) { build_stubbed(:user, :admin) }
- let_it_be(:external_user) { build_stubbed(:user, :external, can_create_group: false) }
-
- let(:current_user) { nil }
-
- before do
- stub_application_setting(snowplow_enabled: true)
- allow(helper).to receive(:current_user) { current_user }
- end
-
- describe '#top_nav_view_model' do
- let(:current_project) { nil }
- let(:current_group) { nil }
- let(:with_current_settings_admin_mode) { false }
- let(:with_header_link_admin_mode) { false }
- let(:with_projects) { false }
- let(:with_groups) { false }
- let(:with_milestones) { false }
- let(:with_snippets) { false }
- let(:with_activity) { false }
-
- let(:subject) { helper.top_nav_view_model(project: current_project, group: current_group) }
-
- let(:menu_tooltip) { 'Main menu' }
-
- before do
- allow(Gitlab::CurrentSettings).to receive(:admin_mode) { with_current_settings_admin_mode }
- allow(helper).to receive(:header_link?).with(:admin_mode) { with_header_link_admin_mode }
-
- # Defaulting all `dashboard_nav_link?` calls to false ensures the EE-specific behavior
- # is not enabled in this CE spec
- allow(helper).to receive(:dashboard_nav_link?).with(anything) { false }
-
- allow(helper).to receive(:dashboard_nav_link?).with(:projects) { with_projects }
- allow(helper).to receive(:dashboard_nav_link?).with(:groups) { with_groups }
- allow(helper).to receive(:dashboard_nav_link?).with(:milestones) { with_milestones }
- allow(helper).to receive(:dashboard_nav_link?).with(:snippets) { with_snippets }
- allow(helper).to receive(:dashboard_nav_link?).with(:activity) { with_activity }
- end
-
- it 'has :menuTooltip' do
- expect(subject[:menuTooltip]).to eq(menu_tooltip)
- end
-
- context 'when current_user is nil (anonymous)' do
- it 'has expected :primary' do
- expected_header = ::Gitlab::Nav::TopNavMenuHeader.build(
- title: 'Explore'
- )
- expected_primary = [
- { href: '/explore', icon: 'project', id: 'project', title: 'Projects' },
- { href: '/explore/groups', icon: 'group', id: 'groups', title: 'Groups' },
- { href: '/explore/projects/topics', icon: 'labels', id: 'topics', title: 'Topics' },
- { href: '/explore/snippets', icon: 'snippet', id: 'snippets', title: 'Snippets' }
- ].map do |item|
- ::Gitlab::Nav::TopNavMenuItem.build(**item)
- end
-
- expect(subject[:primary]).to eq([expected_header, *expected_primary])
- end
-
- it 'has expected :shortcuts' do
- expected_shortcuts = [
- {
- href: '/explore',
- id: 'project-shortcut',
- title: 'Projects',
- css_class: 'dashboard-shortcuts-projects'
- },
- {
- href: '/explore/groups',
- id: 'groups-shortcut',
- title: 'Groups',
- css_class: 'dashboard-shortcuts-groups'
- },
- {
- href: '/explore/projects/topics',
- id: 'topics-shortcut',
- title: 'Topics',
- css_class: 'dashboard-shortcuts-topics'
- },
- {
- href: '/explore/snippets',
- id: 'snippets-shortcut',
- title: 'Snippets',
- css_class: 'dashboard-shortcuts-snippets'
- }
- ].map do |item|
- ::Gitlab::Nav::TopNavMenuItem.build(**item)
- end
-
- expect(subject[:shortcuts]).to eq(expected_shortcuts)
- end
-
- context 'with current nav as project' do
- before do
- helper.nav('project')
- end
-
- it 'has expected :active' do
- expect(subject[:primary].detect { |entry| entry[:id] == 'project' }[:active]).to eq(true)
- end
- end
- end
-
- context 'when current_user is non-admin' do
- let(:current_user) { user }
-
- it 'has no menu items or views by default' do
- expect(subject).to eq({ menuTooltip: menu_tooltip,
- primary: [],
- secondary: [],
- shortcuts: [],
- views: {} })
- end
-
- context 'with projects' do
- let(:with_projects) { true }
- let(:projects_view) { subject[:views][:projects] }
-
- it 'has expected :primary' do
- expected_header = ::Gitlab::Nav::TopNavMenuHeader.build(
- title: 'Switch to'
- )
- expected_primary = ::Gitlab::Nav::TopNavMenuItem.build(
- data: {
- track_action: 'click_dropdown',
- track_label: 'projects_dropdown',
- track_property: 'navigation_top',
- testid: 'projects_dropdown'
- },
- icon: 'project',
- id: 'project',
- title: 'Projects',
- view: 'projects'
- )
- expect(subject[:primary]).to eq([expected_header, expected_primary])
- end
-
- it 'has expected :shortcuts' do
- expected_shortcuts = ::Gitlab::Nav::TopNavMenuItem.build(
- id: 'project-shortcut',
- title: 'Projects',
- href: '/dashboard/projects',
- css_class: 'dashboard-shortcuts-projects'
- )
- expect(subject[:shortcuts]).to eq([expected_shortcuts])
- end
-
- context 'projects' do
- it 'has expected :currentUserName' do
- expect(projects_view[:currentUserName]).to eq(current_user.username)
- end
-
- it 'has expected :namespace' do
- expect(projects_view[:namespace]).to eq('projects')
- end
-
- it 'has expected :linksPrimary' do
- expected_links_primary = [
- ::Gitlab::Nav::TopNavMenuItem.build(
- data: {
- testid: 'menu_item_link',
- qa_title: 'View all projects',
- **menu_data_tracking_attrs('view_all_projects')
- },
- href: '/dashboard/projects',
- id: 'your',
- title: 'View all projects'
- )
- ]
- expect(projects_view[:linksPrimary]).to eq(expected_links_primary)
- end
-
- it 'does not have any :linksSecondary' do
- expect(projects_view[:linksSecondary]).to eq([])
- end
-
- context 'with current nav as project' do
- before do
- helper.nav('project')
- end
-
- it 'has expected :active' do
- expect(subject[:primary].detect { |entry| entry[:id] == 'project' }[:active]).to eq(true)
- end
- end
-
- context 'with persisted project' do
- let_it_be(:project) { build_stubbed(:project) }
-
- let(:current_project) { project }
- let(:avatar_url) { 'project_avatar_url' }
-
- before do
- allow(project).to receive(:persisted?) { true }
- allow(project).to receive(:avatar_url) { avatar_url }
- end
-
- it 'has project as :container' do
- expected_container = {
- avatarUrl: avatar_url,
- id: project.id,
- name: project.name,
- namespace: project.full_name,
- webUrl: project_path(project)
- }
-
- expect(projects_view[:currentItem]).to eq(expected_container)
- end
- end
- end
- end
-
- context 'with groups' do
- let(:with_groups) { true }
- let(:groups_view) { subject[:views][:groups] }
-
- it 'has expected :primary' do
- expected_header = ::Gitlab::Nav::TopNavMenuHeader.build(
- title: 'Switch to'
- )
- expected_primary = ::Gitlab::Nav::TopNavMenuItem.build(
- data: {
- track_action: 'click_dropdown',
- track_label: 'groups_dropdown',
- track_property: 'navigation_top',
- testid: 'groups_dropdown'
- },
- icon: 'group',
- id: 'groups',
- title: 'Groups',
- view: 'groups'
- )
- expect(subject[:primary]).to eq([expected_header, expected_primary])
- end
-
- it 'has expected :shortcuts' do
- expected_shortcuts = ::Gitlab::Nav::TopNavMenuItem.build(
- id: 'groups-shortcut',
- title: 'Groups',
- href: '/dashboard/groups',
- css_class: 'dashboard-shortcuts-groups'
- )
- expect(subject[:shortcuts]).to eq([expected_shortcuts])
- end
-
- context 'groups' do
- it 'has expected :currentUserName' do
- expect(groups_view[:currentUserName]).to eq(current_user.username)
- end
-
- it 'has expected :namespace' do
- expect(groups_view[:namespace]).to eq('groups')
- end
-
- it 'has expected :linksPrimary' do
- expected_links_primary = [
- ::Gitlab::Nav::TopNavMenuItem.build(
- data: {
- testid: 'menu_item_link',
- qa_title: 'View all groups',
- **menu_data_tracking_attrs('view_all_groups')
- },
- href: '/dashboard/groups',
- id: 'your',
- title: 'View all groups'
- )
- ]
- expect(groups_view[:linksPrimary]).to eq(expected_links_primary)
- end
-
- it 'does not have any :linksSecondary' do
- expect(groups_view[:linksSecondary]).to eq([])
- end
-
- context 'with external user' do
- let(:current_user) { external_user }
-
- it 'does not have create group link' do
- expect(groups_view[:linksSecondary]).to eq([])
- end
- end
-
- context 'with current nav as group' do
- before do
- helper.nav('group')
- end
-
- it 'has expected :active' do
- expect(subject[:primary].detect { |entry| entry[:id] == 'groups' }[:active]).to eq(true)
- end
- end
-
- context 'with persisted group' do
- let_it_be(:group) { build_stubbed(:group) }
-
- let(:current_group) { group }
- let(:avatar_url) { 'group_avatar_url' }
-
- before do
- allow(group).to receive(:persisted?) { true }
- allow(group).to receive(:avatar_url) { avatar_url }
- end
-
- it 'has expected :container' do
- expected_container = {
- avatarUrl: avatar_url,
- id: group.id,
- name: group.name,
- namespace: group.full_name,
- webUrl: group_path(group)
- }
-
- expect(groups_view[:currentItem]).to eq(expected_container)
- end
- end
- end
- end
-
- context 'with milestones' do
- let(:with_milestones) { true }
-
- it 'has expected :shortcuts' do
- expected_shortcuts = ::Gitlab::Nav::TopNavMenuItem.build(
- id: 'milestones-shortcut',
- title: 'Milestones',
- href: '/dashboard/milestones',
- css_class: 'dashboard-shortcuts-milestones'
- )
- expect(subject[:shortcuts]).to eq([expected_shortcuts])
- end
- end
-
- context 'with snippets' do
- let(:with_snippets) { true }
-
- it 'has expected :shortcuts' do
- expected_shortcuts = ::Gitlab::Nav::TopNavMenuItem.build(
- id: 'snippets-shortcut',
- title: 'Snippets',
- href: '/dashboard/snippets',
- css_class: 'dashboard-shortcuts-snippets'
- )
- expect(subject[:shortcuts]).to eq([expected_shortcuts])
- end
- end
-
- context 'with activity' do
- let(:with_activity) { true }
-
- it 'has expected :shortcuts' do
- expected_shortcuts = ::Gitlab::Nav::TopNavMenuItem.build(
- id: 'activity-shortcut',
- title: 'Activity',
- href: '/dashboard/activity',
- css_class: 'dashboard-shortcuts-activity'
- )
- expect(subject[:shortcuts]).to eq([expected_shortcuts])
- end
- end
- end
-
- context 'when current_user is admin' do
- let_it_be(:current_user) { admin }
-
- let(:with_current_settings_admin_mode) { true }
-
- it 'has admin as first :secondary item' do
- expected_admin_item = ::Gitlab::Nav::TopNavMenuItem.build(
- data: {
- qa_selector: 'admin_area_link',
- **menu_data_tracking_attrs('admin')
- },
- id: 'admin',
- title: 'Admin',
- icon: 'admin',
- href: '/admin'
- )
-
- expect(subject[:secondary].first).to eq(expected_admin_item)
- end
-
- context 'with header link admin_mode true' do
- let(:with_header_link_admin_mode) { true }
-
- it 'has leave_admin_mode as last :secondary item' do
- expected_leave_admin_mode_item = ::Gitlab::Nav::TopNavMenuItem.build(
- id: 'leave_admin_mode',
- title: 'Leave admin mode',
- icon: 'lock-open',
- href: '/admin/session/destroy',
- data: { method: 'post', **menu_data_tracking_attrs('leave_admin_mode') }
- )
- expect(subject[:secondary].last).to eq(expected_leave_admin_mode_item)
- end
- end
-
- context 'with header link admin_mode false' do
- let(:with_header_link_admin_mode) { false }
-
- it 'has enter_admin_mode as last :secondary item' do
- expected_enter_admin_mode_item = ::Gitlab::Nav::TopNavMenuItem.build(
- data: {
- testid: 'menu_item_link',
- qa_title: 'Enter admin mode',
- **menu_data_tracking_attrs('enter_admin_mode')
- },
- id: 'enter_admin_mode',
- title: 'Enter admin mode',
- icon: 'lock',
- href: '/admin/session/new'
- )
- expect(subject[:secondary].last).to eq(expected_enter_admin_mode_item)
- end
- end
- end
- end
-
- describe '#top_nav_responsive_view_model' do
- let_it_be(:project) { create(:project) }
- let_it_be(:group) { create(:group) }
-
- let(:with_search) { false }
- let(:with_new_view_model) { nil }
-
- let(:subject) { helper.top_nav_responsive_view_model(project: project, group: group) }
-
- before do
- allow(helper).to receive(:header_link?).with(:search) { with_search }
- allow(helper).to receive(:new_dropdown_view_model).with(project: project, group: group) { with_new_view_model }
- end
-
- it 'has nil new subview' do
- expect(subject[:views][:new]).to be_nil
- end
-
- it 'has nil search subview' do
- expect(subject[:views][:search]).to be_nil
- end
-
- context 'with search' do
- let(:with_search) { true }
-
- it 'has search subview' do
- expect(subject[:views][:search]).to eq(
- ::Gitlab::Nav::TopNavMenuItem.build(
- id: 'search',
- title: 'Search',
- icon: 'search',
- href: search_path
- )
- )
- end
- end
-
- context 'with new' do
- let(:with_new_view_model) { { menu_sections: [{ id: 'test-new-view-model' }] } }
-
- it 'has new subview' do
- expect(subject[:views][:new]).to eq(with_new_view_model)
- end
- end
-
- context 'with new and no menu_sections' do
- let(:with_new_view_model) { { menu_sections: [] } }
-
- it 'has new subview' do
- expect(subject[:views][:new]).to be_nil
- end
- end
- end
-
- def menu_data_tracking_attrs(label)
- {
- track_label: "menu_#{label}",
- track_action: 'click_dropdown',
- track_property: 'navigation_top'
- }
- end
-end
diff --git a/spec/helpers/notes_helper_spec.rb b/spec/helpers/notes_helper_spec.rb
index 62c0d1b1ff7..58d39caa90c 100644
--- a/spec/helpers/notes_helper_spec.rb
+++ b/spec/helpers/notes_helper_spec.rb
@@ -55,23 +55,6 @@ RSpec.describe NotesHelper, feature_category: :team_planning do
end
end
- describe "#notes_max_access_for_users" do
- it 'returns access levels' do
- expect(helper.note_max_access_for_user(owner_note)).to eq(Gitlab::Access::OWNER)
- expect(helper.note_max_access_for_user(maintainer_note)).to eq(Gitlab::Access::MAINTAINER)
- expect(helper.note_max_access_for_user(reporter_note)).to eq(Gitlab::Access::REPORTER)
- end
-
- it 'handles access in different projects' do
- second_project = create(:project)
- second_project.add_reporter(maintainer)
- other_note = create(:note, author: maintainer, project: second_project)
-
- expect(helper.note_max_access_for_user(maintainer_note)).to eq(Gitlab::Access::MAINTAINER)
- expect(helper.note_max_access_for_user(other_note)).to eq(Gitlab::Access::REPORTER)
- end
- end
-
describe '#discussion_path' do
let_it_be(:project) { create(:project, :repository) }
diff --git a/spec/helpers/notifications_helper_spec.rb b/spec/helpers/notifications_helper_spec.rb
index a5338659659..8acd72b76a1 100644
--- a/spec/helpers/notifications_helper_spec.rb
+++ b/spec/helpers/notifications_helper_spec.rb
@@ -13,12 +13,6 @@ RSpec.describe NotificationsHelper do
it { expect(notification_icon(:custom)).to equal('') }
end
- describe 'notification_title' do
- it { expect(notification_title(:watch)).to match('Watch') }
- it { expect(notification_title(:mention)).to match('On mention') }
- it { expect(notification_title(:global)).to match('Global') }
- end
-
describe '#notification_icon_level' do
let(:user) { create(:user) }
let(:global_setting) { user.global_notification_setting }
diff --git a/spec/helpers/organizations/organization_helper_spec.rb b/spec/helpers/organizations/organization_helper_spec.rb
index 9d55d2a84f8..594013d515b 100644
--- a/spec/helpers/organizations/organization_helper_spec.rb
+++ b/spec/helpers/organizations/organization_helper_spec.rb
@@ -94,9 +94,12 @@ RSpec.describe Organizations::OrganizationHelper, feature_category: :cell do
describe '#home_organization_setting_app_data' do
it 'returns expected json' do
+ current_user = build_stubbed(:user)
+ allow(helper).to receive(:current_user).and_return(current_user)
+
expect(Gitlab::Json.parse(helper.home_organization_setting_app_data)).to eq(
{
- 'initial_selection' => 1
+ 'initial_selection' => current_user.user_preference.home_organization_id
}
)
end
@@ -119,10 +122,13 @@ RSpec.describe Organizations::OrganizationHelper, feature_category: :cell do
end
describe '#organization_user_app_data' do
- it 'returns expected data object' do
- expect(helper.organization_user_app_data(organization)).to eq(
+ it 'returns expected json' do
+ expect(Gitlab::Json.parse(helper.organization_user_app_data(organization))).to eq(
{
- organization_gid: organization.to_global_id
+ 'organization_gid' => organization.to_global_id.to_s,
+ 'paths' => {
+ 'admin_user' => admin_user_path(:id)
+ }
}
)
end
diff --git a/spec/helpers/projects/pipeline_helper_spec.rb b/spec/helpers/projects/pipeline_helper_spec.rb
index 7e117fe0cce..7b8b349c3e5 100644
--- a/spec/helpers/projects/pipeline_helper_spec.rb
+++ b/spec/helpers/projects/pipeline_helper_spec.rb
@@ -48,22 +48,8 @@ RSpec.describe Projects::PipelineHelper do
graphql_resource_etag: graphql_etag_pipeline_path(pipeline),
pipeline_iid: pipeline.iid,
pipelines_path: project_pipelines_path(project),
- name: pipeline.name,
- total_jobs: pipeline.total_size,
yaml_errors: pipeline.yaml_errors,
- failure_reason: pipeline.failure_reason,
- triggered_by_path: '',
- schedule: pipeline.schedule?.to_s,
- trigger: pipeline.trigger?.to_s,
- child: pipeline.child?.to_s,
- latest: pipeline.latest?.to_s,
- merge_train_pipeline: pipeline.merge_train_pipeline?.to_s,
- invalid: pipeline.has_yaml_errors?.to_s,
- failed: pipeline.failure_reason?.to_s,
- auto_devops: pipeline.auto_devops_source?.to_s,
- detached: pipeline.detached_merge_request_pipeline?.to_s,
- stuck: pipeline.stuck?.to_s,
- ref_text: pipeline.ref_text
+ trigger: pipeline.trigger?.to_s
})
end
end
diff --git a/spec/helpers/projects/terraform_helper_spec.rb b/spec/helpers/projects/terraform_helper_spec.rb
index 9c2f009be26..49a49e19ea9 100644
--- a/spec/helpers/projects/terraform_helper_spec.rb
+++ b/spec/helpers/projects/terraform_helper_spec.rb
@@ -23,7 +23,7 @@ RSpec.describe Projects::TerraformHelper do
end
it 'includes access token path' do
- expect(subject[:access_tokens_path]).to eq(profile_personal_access_tokens_path)
+ expect(subject[:access_tokens_path]).to eq(user_settings_personal_access_tokens_path)
end
it 'includes username' do
diff --git a/spec/helpers/projects_helper_spec.rb b/spec/helpers/projects_helper_spec.rb
index 1e05cf6a7ac..7ce1a7ef2ea 100644
--- a/spec/helpers/projects_helper_spec.rb
+++ b/spec/helpers/projects_helper_spec.rb
@@ -276,7 +276,7 @@ RSpec.describe ProjectsHelper, feature_category: :source_code_management do
it 'returns message prompting user to set password or set up a PAT' do
stub_application_setting(password_authentication_enabled_for_git?: true)
- expect(helper.no_password_message).to eq('Your account is authenticated with SSO or SAML. To <a href="/help/topics/git/terminology#pull-and-push" target="_blank" rel="noopener noreferrer">push and pull</a> over HTTP with Git using this account, you must <a href="/-/profile/password/edit">set a password</a> or <a href="/-/profile/personal_access_tokens">set up a Personal Access Token</a> to use instead of a password. For more information, see <a href="/help/gitlab-basics/start-using-git#clone-with-https" target="_blank" rel="noopener noreferrer">Clone with HTTPS</a>.')
+ expect(helper.no_password_message).to eq('Your account is authenticated with SSO or SAML. To <a href="/help/topics/git/terminology#pull-and-push" target="_blank" rel="noopener noreferrer">push and pull</a> over HTTP with Git using this account, you must <a href="/-/user_settings/password/edit">set a password</a> or <a href="/-/user_settings/personal_access_tokens">set up a Personal Access Token</a> to use instead of a password. For more information, see <a href="/help/gitlab-basics/start-using-git#clone-with-https" target="_blank" rel="noopener noreferrer">Clone with HTTPS</a>.')
end
end
@@ -284,7 +284,7 @@ RSpec.describe ProjectsHelper, feature_category: :source_code_management do
it 'returns message prompting user to set up a PAT' do
stub_application_setting(password_authentication_enabled_for_git?: false)
- expect(helper.no_password_message).to eq('Your account is authenticated with SSO or SAML. To <a href="/help/topics/git/terminology#pull-and-push" target="_blank" rel="noopener noreferrer">push and pull</a> over HTTP with Git using this account, you must <a href="/-/profile/personal_access_tokens">set up a Personal Access Token</a> to use instead of a password. For more information, see <a href="/help/gitlab-basics/start-using-git#clone-with-https" target="_blank" rel="noopener noreferrer">Clone with HTTPS</a>.')
+ expect(helper.no_password_message).to eq('Your account is authenticated with SSO or SAML. To <a href="/help/topics/git/terminology#pull-and-push" target="_blank" rel="noopener noreferrer">push and pull</a> over HTTP with Git using this account, you must <a href="/-/user_settings/personal_access_tokens">set up a Personal Access Token</a> to use instead of a password. For more information, see <a href="/help/gitlab-basics/start-using-git#clone-with-https" target="_blank" rel="noopener noreferrer">Clone with HTTPS</a>.')
end
end
end
@@ -892,7 +892,8 @@ RSpec.describe ProjectsHelper, feature_category: :source_code_management do
featureFlagsAccessLevel: project.project_feature.feature_flags_access_level,
releasesAccessLevel: project.project_feature.releases_access_level,
infrastructureAccessLevel: project.project_feature.infrastructure_access_level,
- modelExperimentsAccessLevel: project.project_feature.model_experiments_access_level
+ modelExperimentsAccessLevel: project.project_feature.model_experiments_access_level,
+ modelRegistryAccessLevel: project.project_feature.model_registry_access_level
)
end
diff --git a/spec/helpers/sidebars_helper_spec.rb b/spec/helpers/sidebars_helper_spec.rb
index c9131ca518f..421b1c178aa 100644
--- a/spec/helpers/sidebars_helper_spec.rb
+++ b/spec/helpers/sidebars_helper_spec.rb
@@ -131,8 +131,10 @@ RSpec.describe SidebarsHelper, feature_category: :navigation do
it 'returns sidebar values from user', :use_clean_rails_memory_store_caching do
expect(subject).to include({
is_logged_in: true,
+ is_admin: false,
name: user.name,
username: user.username,
+ admin_url: admin_root_url,
avatar_url: user.avatar_url,
has_link_to_profile: helper.current_user_menu?(:profile),
link_to_profile: user_path(user),
@@ -174,6 +176,14 @@ RSpec.describe SidebarsHelper, feature_category: :navigation do
})
end
+ context 'when user is admin' do
+ before do
+ allow(user).to receive(:can_admin_all_resources?).and_return(true)
+ end
+
+ it { is_expected.to include({ is_admin: true }) }
+ end
+
describe "shortcut links" do
describe "as the anonymous user" do
let_it_be(:user) { nil }
diff --git a/spec/helpers/stat_anchors_helper_spec.rb b/spec/helpers/stat_anchors_helper_spec.rb
index f3830bf4172..41ac7509c39 100644
--- a/spec/helpers/stat_anchors_helper_spec.rb
+++ b/spec/helpers/stat_anchors_helper_spec.rb
@@ -8,6 +8,10 @@ RSpec.describe StatAnchorsHelper do
describe '#stat_anchor_attrs' do
subject { helper.stat_anchor_attrs(anchor) }
+ before do
+ stub_feature_flags(project_overview_reorg: false)
+ end
+
context 'when anchor is a link' do
let(:anchor) { anchor_klass.new(true) }
diff --git a/spec/helpers/storage_helper_spec.rb b/spec/helpers/storage_helper_spec.rb
index b2da9fa8801..e840dddbedd 100644
--- a/spec/helpers/storage_helper_spec.rb
+++ b/spec/helpers/storage_helper_spec.rb
@@ -2,7 +2,7 @@
require "spec_helper"
-RSpec.describe StorageHelper do
+RSpec.describe StorageHelper, feature_category: :consumables_cost_management do
describe "#storage_counter" do
it "formats bytes to one decimal place" do
expect(helper.storage_counter(1.23.megabytes)).to eq("1.2 MiB")
diff --git a/spec/helpers/todos_helper_spec.rb b/spec/helpers/todos_helper_spec.rb
index 4680a43058d..bffb240dae4 100644
--- a/spec/helpers/todos_helper_spec.rb
+++ b/spec/helpers/todos_helper_spec.rb
@@ -48,19 +48,6 @@ RSpec.describe TodosHelper do
create(:todo, target: project, action: Todo::MEMBER_ACCESS_REQUESTED)
end
- describe '#todos_count_format' do
- it 'shows fuzzy count for 100 or more items' do
- expect(helper.todos_count_format(100)).to eq '99+'
- expect(helper.todos_count_format(1000)).to eq '99+'
- end
-
- it 'shows exact count for 99 or fewer items' do
- expect(helper.todos_count_format(99)).to eq '99'
- expect(helper.todos_count_format(50)).to eq '50'
- expect(helper.todos_count_format(1)).to eq '1'
- end
- end
-
describe '#todo_target_name' do
context 'when given a design' do
let(:todo) { design_todo }
diff --git a/spec/helpers/webpack_helper_spec.rb b/spec/helpers/webpack_helper_spec.rb
index 23585c47239..8cbc4db9108 100644
--- a/spec/helpers/webpack_helper_spec.rb
+++ b/spec/helpers/webpack_helper_spec.rb
@@ -43,7 +43,7 @@ RSpec.describe WebpackHelper do
stub_feature_flags(vite: true)
allow(helper).to receive(:vite_javascript_tag).and_return('vite')
- allow(helper).to receive(:vite_running).and_return(true)
+ allow(helper).to receive(:vite_enabled?).and_return(true)
end
describe '#webpack_bundle_tag' do
diff --git a/spec/initializers/6_validations_spec.rb b/spec/initializers/6_validations_spec.rb
index 4d317a7583e..6422dccc6d5 100644
--- a/spec/initializers/6_validations_spec.rb
+++ b/spec/initializers/6_validations_spec.rb
@@ -7,10 +7,10 @@ RSpec.describe '6_validations' do
describe 'validate_storages_config' do
context 'with correct settings' do
before do
- mock_storages(
- 'storage' => Gitlab::GitalyClient::StorageSettings.new('path' => 'tmp/tests/paths/a/b/c'),
- 'storage.with_VALID-chars01' => Gitlab::GitalyClient::StorageSettings.new('path' => 'tmp/tests/paths/a/b/d'),
- 'gitaly.c.gitlab-prd-164c.internal' => Gitlab::GitalyClient::StorageSettings.new('path' => 'tmp/tests/paths/a/b/e')
+ stub_storage_settings(
+ 'storage' => {},
+ 'storage.with_VALID-chars01' => {},
+ 'gitaly.c.gitlab-prd-164c.internal' => {}
)
end
@@ -21,7 +21,7 @@ RSpec.describe '6_validations' do
context 'with invalid storage names' do
before do
- mock_storages('name with spaces' => Gitlab::GitalyClient::StorageSettings.new('path' => 'tmp/tests/paths/a/b/c'))
+ stub_storage_settings('name with spaces' => {})
end
it 'throws an error' do
@@ -29,8 +29,4 @@ RSpec.describe '6_validations' do
end
end
end
-
- def mock_storages(storages)
- allow(Gitlab.config.repositories).to receive(:storages).and_return(storages)
- end
end
diff --git a/spec/initializers/circuitbox_spec.rb b/spec/initializers/circuitbox_spec.rb
index 64e384e4d22..d0a6d6c9f91 100644
--- a/spec/initializers/circuitbox_spec.rb
+++ b/spec/initializers/circuitbox_spec.rb
@@ -3,12 +3,7 @@
require 'spec_helper'
RSpec.describe 'circuitbox', feature_category: :shared do
- it 'does not configure Circuitbox', unless: Gitlab.ee? do
- expect(Circuitbox.default_circuit_store).to be_a(Circuitbox::MemoryStore)
- expect(Circuitbox.default_notifier).to be_a(Circuitbox::Notifier::ActiveSupport)
- end
-
- it 'configures Circuitbox', if: Gitlab.ee? do
+ it 'configures Circuitbox' do
expect(Circuitbox.default_circuit_store).to be_a(Gitlab::CircuitBreaker::Store)
expect(Circuitbox.default_notifier).to be_a(Gitlab::CircuitBreaker::Notifier)
end
diff --git a/spec/initializers/forbid_sidekiq_in_transactions_spec.rb b/spec/initializers/forbid_sidekiq_in_transactions_spec.rb
index 7b1907a7451..a8affbf8c69 100644
--- a/spec/initializers/forbid_sidekiq_in_transactions_spec.rb
+++ b/spec/initializers/forbid_sidekiq_in_transactions_spec.rb
@@ -46,6 +46,10 @@ RSpec.describe 'Sidekiq::Worker' do
context 'for mailers' do
let(:mailer_class) do
Class.new(ApplicationMailer) do
+ def self.name
+ 'Notify'
+ end
+
def test_mail; end
end
end
diff --git a/spec/keeps/helpers/postgres_ai_spec.rb b/spec/keeps/helpers/postgres_ai_spec.rb
new file mode 100644
index 00000000000..e3003be3460
--- /dev/null
+++ b/spec/keeps/helpers/postgres_ai_spec.rb
@@ -0,0 +1,57 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require './keeps/helpers/postgres_ai'
+
+RSpec.describe Keeps::Helpers::PostgresAi, feature_category: :tooling do
+ let(:connection_string) { 'host=localhost port=1234 user=user dbname=dbname' }
+ let(:password) { 'password' }
+ let(:pg_client) { instance_double(PG::Connection) }
+
+ before do
+ stub_env('POSTGRES_AI_CONNECTION_STRING', connection_string)
+ stub_env('POSTGRES_AI_PASSWORD', password)
+
+ allow(PG).to receive(:connect).with(connection_string, password: password).and_return(pg_client)
+ end
+
+ describe '#initialize' do
+ shared_examples 'no credentials supplied' do
+ it do
+ expect { described_class.new }.to raise_error(described_class::Error, "No credentials supplied")
+ end
+ end
+
+ context 'with no connection string' do
+ let(:connection_string) { '' }
+
+ include_examples 'no credentials supplied'
+ end
+
+ context 'with no password' do
+ let(:password) { '' }
+
+ include_examples 'no credentials supplied'
+ end
+ end
+
+ describe '#fetch_background_migration_status' do
+ let(:job_class_name) { 'ExampleJob' }
+ let(:query) do
+ <<~SQL
+ SELECT id, created_at, updated_at, finished_at, started_at, status, job_class_name
+ FROM batched_background_migrations
+ WHERE job_class_name = $1::text
+ SQL
+ end
+
+ let(:query_response) { double }
+
+ subject(:result) { described_class.new.fetch_background_migration_status(job_class_name) }
+
+ it 'fetches background migration data from Postgres AI' do
+ expect(pg_client).to receive(:exec_params).with(query, [job_class_name]).and_return(query_response)
+ expect(result).to eq(query_response)
+ end
+ end
+end
diff --git a/spec/lib/api/ci/helpers/runner_spec.rb b/spec/lib/api/ci/helpers/runner_spec.rb
index b74f5bf2de8..ee0a58a4e53 100644
--- a/spec/lib/api/ci/helpers/runner_spec.rb
+++ b/spec/lib/api/ci/helpers/runner_spec.rb
@@ -85,7 +85,7 @@ RSpec.describe API::Ci::Helpers::Runner do
end
end
- describe '#current_runner_manager', :freeze_time, feature_category: :runner_fleet do
+ describe '#current_runner_manager', :freeze_time, feature_category: :fleet_visibility do
let(:runner) { create(:ci_runner, token: 'foo') }
let(:runner_manager) { create(:ci_runner_machine, runner: runner, system_xid: 'bar', contacted_at: 1.hour.ago) }
diff --git a/spec/lib/api/entities/ci/job_request/image_spec.rb b/spec/lib/api/entities/ci/job_request/image_spec.rb
index 14d4a074fce..666ec31d3d9 100644
--- a/spec/lib/api/entities/ci/job_request/image_spec.rb
+++ b/spec/lib/api/entities/ci/job_request/image_spec.rb
@@ -4,7 +4,10 @@ require 'spec_helper'
RSpec.describe API::Entities::Ci::JobRequest::Image do
let(:ports) { [{ number: 80, protocol: 'http', name: 'name' }] }
- let(:image) { double(name: 'image_name', entrypoint: ['foo'], ports: ports, pull_policy: ['if-not-present']) }
+ let(:image) do
+ double(name: 'image_name', entrypoint: ['foo'], executor_opts: {}, ports: ports, pull_policy: ['if-not-present'])
+ end
+
let(:entity) { described_class.new(image) }
subject { entity.as_json }
@@ -29,6 +32,10 @@ RSpec.describe API::Entities::Ci::JobRequest::Image do
end
end
+ it 'returns the executor_opts options' do
+ expect(subject[:executor_opts]).to eq({})
+ end
+
it 'returns the pull policy' do
expect(subject[:pull_policy]).to eq(['if-not-present'])
end
diff --git a/spec/lib/api/entities/ci/job_request/service_spec.rb b/spec/lib/api/entities/ci/job_request/service_spec.rb
index 11350f7c41b..c2331799314 100644
--- a/spec/lib/api/entities/ci/job_request/service_spec.rb
+++ b/spec/lib/api/entities/ci/job_request/service_spec.rb
@@ -9,6 +9,7 @@ RSpec.describe API::Entities::Ci::JobRequest::Service do
::Gitlab::Ci::Build::Image,
name: 'image_name',
entrypoint: ['foo'],
+ executor_opts: {},
ports: ports,
pull_policy: ['if-not-present'],
alias: 'alias',
@@ -25,6 +26,7 @@ RSpec.describe API::Entities::Ci::JobRequest::Service do
expect(result).to eq(
name: 'image_name',
entrypoint: ['foo'],
+ executor_opts: {},
ports: ports,
pull_policy: ['if-not-present'],
alias: 'alias',
diff --git a/spec/lib/api/entities/hook_spec.rb b/spec/lib/api/entities/hook_spec.rb
new file mode 100644
index 00000000000..45648d6fb64
--- /dev/null
+++ b/spec/lib/api/entities/hook_spec.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe API::Entities::Hook, feature_category: :webhooks do
+ let(:hook) { create(:project_hook) }
+ let(:with_url_variables) { true }
+ let(:entity) { described_class.new(hook, with_url_variables: with_url_variables) }
+
+ subject(:json) { entity.as_json }
+
+ it 'exposes correct attributes' do
+ expect(json.keys).to contain_exactly(:alert_status, :created_at, :disabled_until, :enable_ssl_verification, :id,
+ :merge_requests_events, :push_events, :repository_update_events, :tag_push_events, :url, :url_variables
+ )
+ end
+
+ context 'when `with_url_variables` is set to false' do
+ let(:with_url_variables) { false }
+
+ it 'does not expose `with_url_variables` field' do
+ expect(json.keys).not_to include(:url_variables)
+ end
+ end
+end
diff --git a/spec/lib/api/entities/ml/mlflow/run_info_spec.rb b/spec/lib/api/entities/ml/mlflow/run_info_spec.rb
index 1664d9f18d2..f631a9cb803 100644
--- a/spec/lib/api/entities/ml/mlflow/run_info_spec.rb
+++ b/spec/lib/api/entities/ml/mlflow/run_info_spec.rb
@@ -3,9 +3,9 @@
require 'spec_helper'
RSpec.describe API::Entities::Ml::Mlflow::RunInfo, feature_category: :mlops do
- let_it_be(:candidate) { build(:ml_candidates) }
+ let_it_be(:candidate) { build_stubbed(:ml_candidates, internal_id: 1) }
- subject { described_class.new(candidate, packages_url: 'http://example.com').as_json }
+ subject { described_class.new(candidate).as_json }
context 'when start_time is nil' do
it { expect(subject[:start_time]).to eq(0) }
@@ -66,8 +66,19 @@ RSpec.describe API::Entities::Ml::Mlflow::RunInfo, feature_category: :mlops do
end
describe 'artifact_uri' do
- it 'is not implemented' do
- expect(subject[:artifact_uri]).to eq("http://example.com#{candidate.artifact_root}")
+ context 'when candidate does not belong to a model version' do
+ it 'returns the generic package (legacy) format of the artifact_uri' do
+ expect(subject[:artifact_uri]).to eq("http://localhost/api/v4/projects/#{candidate.project_id}/packages/generic#{candidate.artifact_root}")
+ end
+ end
+
+ context 'when candidate belongs to a model version' do
+ let!(:version) { create(:ml_model_versions, :with_package) }
+ let!(:candidate) { version.candidate }
+
+ it 'returns the model version format of the artifact_uri' do
+ expect(subject[:artifact_uri]).to eq("http://localhost/api/v4/projects/#{candidate.project_id}/packages/ml_models/#{version.model.name}/#{version.version}")
+ end
end
end
diff --git a/spec/lib/api/entities/project_import_status_spec.rb b/spec/lib/api/entities/project_import_status_spec.rb
index 5d7f06dc78e..8c397c30b78 100644
--- a/spec/lib/api/entities/project_import_status_spec.rb
+++ b/spec/lib/api/entities/project_import_status_spec.rb
@@ -105,6 +105,7 @@ RSpec.describe API::Entities::ProjectImportStatus, :aggregate_failures, feature_
let(:entity) { described_class.new(project) }
before do
+ allow(Gitlab::Redis::SharedState).to receive(:with).and_return('OK')
::Gitlab::GithubImport::ObjectCounter.increment(project, :issues, :fetched, value: 10)
::Gitlab::GithubImport::ObjectCounter.increment(project, :issues, :imported, value: 8)
end
diff --git a/spec/lib/api/entities/projects/repository_storage_move_spec.rb b/spec/lib/api/entities/projects/repository_storage_move_spec.rb
index 81f5d98b713..ae6c469bb64 100644
--- a/spec/lib/api/entities/projects/repository_storage_move_spec.rb
+++ b/spec/lib/api/entities/projects/repository_storage_move_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe API::Entities::Projects::RepositoryStorageMove do
+RSpec.describe API::Entities::Projects::RepositoryStorageMove, feature_category: :source_code_management do
describe '#as_json' do
subject { entity.as_json }
diff --git a/spec/lib/api/entities/snippets/repository_storage_move_spec.rb b/spec/lib/api/entities/snippets/repository_storage_move_spec.rb
index a848afbcff9..ceeae230b25 100644
--- a/spec/lib/api/entities/snippets/repository_storage_move_spec.rb
+++ b/spec/lib/api/entities/snippets/repository_storage_move_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe API::Entities::Snippets::RepositoryStorageMove do
+RSpec.describe API::Entities::Snippets::RepositoryStorageMove, feature_category: :source_code_management do
describe '#as_json' do
subject { entity.as_json }
diff --git a/spec/lib/api/helpers/import_github_helpers_spec.rb b/spec/lib/api/helpers/import_github_helpers_spec.rb
index 3324e38660c..7f8fbad1273 100644
--- a/spec/lib/api/helpers/import_github_helpers_spec.rb
+++ b/spec/lib/api/helpers/import_github_helpers_spec.rb
@@ -7,7 +7,6 @@ RSpec.describe API::Helpers::ImportGithubHelpers, feature_category: :importers d
helper = Class.new.include(described_class).new
def helper.params = {
personal_access_token: 'foo',
- additional_access_tokens: 'bar',
github_hostname: 'github.example.com'
}
helper
@@ -21,7 +20,7 @@ RSpec.describe API::Helpers::ImportGithubHelpers, feature_category: :importers d
describe '#access_params' do
it 'makes the passed in personal access token and extra tokens accessible' do
- expect(subject.access_params).to eq({ github_access_token: 'foo', additional_access_tokens: 'bar' })
+ expect(subject.access_params).to eq({ github_access_token: 'foo' })
end
end
diff --git a/spec/lib/api/ml/mlflow/api_helpers_spec.rb b/spec/lib/api/ml/mlflow/api_helpers_spec.rb
index 757a73ed612..3e7a0187d86 100644
--- a/spec/lib/api/ml/mlflow/api_helpers_spec.rb
+++ b/spec/lib/api/ml/mlflow/api_helpers_spec.rb
@@ -5,39 +5,6 @@ require 'spec_helper'
RSpec.describe API::Ml::Mlflow::ApiHelpers, feature_category: :mlops do
include described_class
- describe '#packages_url' do
- subject { packages_url }
-
- let_it_be(:user_project) { build_stubbed(:project) }
-
- context 'with an empty relative URL root' do
- before do
- allow(Gitlab::Application.routes).to receive(:default_url_options)
- .and_return(protocol: 'http', host: 'localhost', script_name: '')
- end
-
- it { is_expected.to eql("http://localhost/api/v4/projects/#{user_project.id}/packages/generic") }
- end
-
- context 'with a forward slash relative URL root' do
- before do
- allow(Gitlab::Application.routes).to receive(:default_url_options)
- .and_return(protocol: 'http', host: 'localhost', script_name: '/')
- end
-
- it { is_expected.to eql("http://localhost/api/v4/projects/#{user_project.id}/packages/generic") }
- end
-
- context 'with a relative URL root' do
- before do
- allow(Gitlab::Application.routes).to receive(:default_url_options)
- .and_return(protocol: 'http', host: 'localhost', script_name: '/gitlab/root')
- end
-
- it { is_expected.to eql("http://localhost/gitlab/root/api/v4/projects/#{user_project.id}/packages/generic") }
- end
- end
-
describe '#candidates_order_params' do
using RSpec::Parameterized::TableSyntax
@@ -61,4 +28,47 @@ RSpec.describe API::Ml::Mlflow::ApiHelpers, feature_category: :mlops do
end
end
end
+
+ describe '#model_order_params' do
+ using RSpec::Parameterized::TableSyntax
+
+ subject { model_order_params(params) }
+
+ where(:input, :order_by, :sort) do
+ '' | 'name' | 'asc'
+ 'name' | 'name' | 'asc'
+ 'name DESC' | 'name' | 'desc'
+ 'last_updated_timestamp' | 'updated_at' | 'asc'
+ 'last_updated_timestamp asc' | 'updated_at' | 'asc'
+ 'last_updated_timestamp DESC' | 'updated_at' | 'desc'
+ end
+ with_them do
+ let(:params) { { order_by: input } }
+
+ it 'is correct' do
+ is_expected.to include({ order_by: order_by, sort: sort })
+ end
+ end
+ end
+
+ describe '#model_filter_params' do
+ using RSpec::Parameterized::TableSyntax
+
+ subject { model_filter_params(params) }
+
+ where(:input, :output) do
+ '' | {}
+ 'name=""' | { name: '' }
+ 'name=foo' | { name: 'foo' }
+ 'name="foo"' | { name: 'foo' }
+ 'invalid="foo"' | {}
+ end
+ with_them do
+ let(:params) { { filter: input } }
+
+ it 'is correct' do
+ is_expected.to eq(output)
+ end
+ end
+ end
end
diff --git a/spec/lib/backup/database_configuration_spec.rb b/spec/lib/backup/database_configuration_spec.rb
new file mode 100644
index 00000000000..b7fa9f161c1
--- /dev/null
+++ b/spec/lib/backup/database_configuration_spec.rb
@@ -0,0 +1,239 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Backup::DatabaseConfiguration, :reestablished_active_record_base, feature_category: :backup_restore do
+ using RSpec::Parameterized::TableSyntax
+
+ let(:connection_name) { 'main' }
+
+ subject(:config) { described_class.new(connection_name) }
+
+ describe '#initialize' do
+ it 'initializes with the provided connection_name' do
+ expect_next_instance_of(described_class) do |config|
+ expect(config.connection_name).to eq(connection_name)
+ end
+
+ config
+ end
+ end
+
+ describe '#activerecord_configuration' do
+ it 'returns a ActiveRecord::DatabaseConfigurations::HashConfig' do
+ expect(config.activerecord_configuration).to be_a ActiveRecord::DatabaseConfigurations::HashConfig
+ end
+ end
+
+ context 'with configuration override feature' do
+ let(:application_config) do
+ {
+ adapter: 'postgresql',
+ host: 'some_host',
+ port: '5432'
+ }
+ end
+
+ let(:active_record_key) { described_class::SUPPORTED_OVERRIDES.invert[pg_env] }
+
+ before do
+ allow(config).to receive(:original_activerecord_config).and_return(application_config)
+ end
+
+ shared_context 'with generic database with overridden values' do
+ where(:env_variable, :overridden_value) do
+ 'GITLAB_BACKUP_PGHOST' | 'test.invalid.'
+ 'GITLAB_BACKUP_PGUSER' | 'some_user'
+ 'GITLAB_BACKUP_PGPORT' | '1543'
+ 'GITLAB_BACKUP_PGPASSWORD' | 'secret'
+ 'GITLAB_BACKUP_PGSSLMODE' | 'allow'
+ 'GITLAB_BACKUP_PGSSLKEY' | 'some_key'
+ 'GITLAB_BACKUP_PGSSLCERT' | '/path/to/cert'
+ 'GITLAB_BACKUP_PGSSLROOTCERT' | '/path/to/root/cert'
+ 'GITLAB_BACKUP_PGSSLCRL' | '/path/to/crl'
+ 'GITLAB_BACKUP_PGSSLCOMPRESSION' | '1'
+ 'GITLAB_OVERRIDE_PGHOST' | 'test.invalid.'
+ 'GITLAB_OVERRIDE_PGUSER' | 'some_user'
+ 'GITLAB_OVERRIDE_PGPORT' | '1543'
+ 'GITLAB_OVERRIDE_PGPASSWORD' | 'secret'
+ 'GITLAB_OVERRIDE_PGSSLMODE' | 'allow'
+ 'GITLAB_OVERRIDE_PGSSLKEY' | 'some_key'
+ 'GITLAB_OVERRIDE_PGSSLCERT' | '/path/to/cert'
+ 'GITLAB_OVERRIDE_PGSSLROOTCERT' | '/path/to/root/cert'
+ 'GITLAB_OVERRIDE_PGSSLCRL' | '/path/to/crl'
+ 'GITLAB_OVERRIDE_PGSSLCOMPRESSION' | '1'
+ end
+ end
+
+ shared_context 'with generic database with overridden values using current database prefix' do
+ where(:env_variable, :overridden_value) do
+ 'GITLAB_BACKUP_MAIN_PGHOST' | 'test.invalid.'
+ 'GITLAB_BACKUP_MAIN_PGUSER' | 'some_user'
+ 'GITLAB_BACKUP_MAIN_PGPORT' | '1543'
+ 'GITLAB_BACKUP_MAIN_PGPASSWORD' | 'secret'
+ 'GITLAB_BACKUP_MAIN_PGSSLMODE' | 'allow'
+ 'GITLAB_BACKUP_MAIN_PGSSLKEY' | 'some_key'
+ 'GITLAB_BACKUP_MAIN_PGSSLCERT' | '/path/to/cert'
+ 'GITLAB_BACKUP_MAIN_PGSSLROOTCERT' | '/path/to/root/cert'
+ 'GITLAB_BACKUP_MAIN_PGSSLCRL' | '/path/to/crl'
+ 'GITLAB_BACKUP_MAIN_PGSSLCOMPRESSION' | '1'
+ 'GITLAB_OVERRIDE_MAIN_PGHOST' | 'test.invalid.'
+ 'GITLAB_OVERRIDE_MAIN_PGUSER' | 'some_user'
+ 'GITLAB_OVERRIDE_MAIN_PGPORT' | '1543'
+ 'GITLAB_OVERRIDE_MAIN_PGPASSWORD' | 'secret'
+ 'GITLAB_OVERRIDE_MAIN_PGSSLMODE' | 'allow'
+ 'GITLAB_OVERRIDE_MAIN_PGSSLKEY' | 'some_key'
+ 'GITLAB_OVERRIDE_MAIN_PGSSLCERT' | '/path/to/cert'
+ 'GITLAB_OVERRIDE_MAIN_PGSSLROOTCERT' | '/path/to/root/cert'
+ 'GITLAB_OVERRIDE_MAIN_PGSSLCRL' | '/path/to/crl'
+ 'GITLAB_OVERRIDE_MAIN_PGSSLCOMPRESSION' | '1'
+ end
+ end
+
+ shared_context 'with generic database with overridden values for a different database prefix' do
+ where(:env_variable, :overridden_value) do
+ 'GITLAB_BACKUP_CI_PGHOST' | 'test.invalid.'
+ 'GITLAB_BACKUP_CI_PGUSER' | 'some_user'
+ 'GITLAB_BACKUP_CI_PGPORT' | '1543'
+ 'GITLAB_BACKUP_CI_PGPASSWORD' | 'secret'
+ 'GITLAB_BACKUP_CI_PGSSLMODE' | 'allow'
+ 'GITLAB_BACKUP_CI_PGSSLKEY' | 'some_key'
+ 'GITLAB_BACKUP_CI_PGSSLCERT' | '/path/to/cert'
+ 'GITLAB_BACKUP_CI_PGSSLROOTCERT' | '/path/to/root/cert'
+ 'GITLAB_BACKUP_CI_PGSSLCRL' | '/path/to/crl'
+ 'GITLAB_BACKUP_CI_PGSSLCOMPRESSION' | '1'
+ 'GITLAB_OVERRIDE_CI_PGHOST' | 'test.invalid.'
+ 'GITLAB_OVERRIDE_CI_PGUSER' | 'some_user'
+ 'GITLAB_OVERRIDE_CI_PGPORT' | '1543'
+ 'GITLAB_OVERRIDE_CI_PGPASSWORD' | 'secret'
+ 'GITLAB_OVERRIDE_CI_PGSSLMODE' | 'allow'
+ 'GITLAB_OVERRIDE_CI_PGSSLKEY' | 'some_key'
+ 'GITLAB_OVERRIDE_CI_PGSSLCERT' | '/path/to/cert'
+ 'GITLAB_OVERRIDE_CI_PGSSLROOTCERT' | '/path/to/root/cert'
+ 'GITLAB_OVERRIDE_CI_PGSSLCRL' | '/path/to/crl'
+ 'GITLAB_OVERRIDE_CI_PGSSLCOMPRESSION' | '1'
+ end
+ end
+
+ describe('#pg_env_variables') do
+ context 'with provided ENV variables' do
+ before do
+ stub_env(env_variable, overridden_value)
+ end
+
+ context 'when generic database configuration is overridden' do
+ include_context "with generic database with overridden values"
+
+ with_them do
+ let(:pg_env) { env_variable[/GITLAB_(BACKUP|OVERRIDE)_(\w+)/, 2] }
+
+ it 'PostgreSQL ENV overrides application configuration' do
+ expect(config.pg_env_variables).to include({ pg_env => overridden_value })
+ end
+ end
+ end
+
+ context 'when specific database configuration is overridden' do
+ context 'and environment variables are for the current database name' do
+ include_context 'with generic database with overridden values using current database prefix'
+
+ with_them do
+ let(:pg_env) { env_variable[/GITLAB_(BACKUP|OVERRIDE)_MAIN_(\w+)/, 2] }
+
+ it 'PostgreSQL ENV overrides application configuration' do
+ expect(config.pg_env_variables).to include({ pg_env => overridden_value })
+ end
+ end
+ end
+
+ context 'and environment variables are for another database' do
+ include_context 'with generic database with overridden values for a different database prefix'
+
+ with_them do
+ let(:pg_env) { env_variable[/GITLAB_(BACKUP|OVERRIDE)_CI_(\w+)/, 1] }
+
+ it 'PostgreSQL ENV is expected to equal application configuration' do
+ expect(config.pg_env_variables).to eq(
+ {
+ 'PGHOST' => application_config[:host],
+ 'PGPORT' => application_config[:port]
+ }
+ )
+ end
+ end
+ end
+ end
+ end
+
+ context 'when both GITLAB_BACKUP_PGUSER and GITLAB_BACKUP_MAIN_PGUSER variable are present' do
+ it 'prefers more specific GITLAB_BACKUP_MAIN_PGUSER' do
+ stub_env('GITLAB_BACKUP_PGUSER', 'generic_user')
+ stub_env('GITLAB_BACKUP_MAIN_PGUSER', 'specific_user')
+
+ expect(config.pg_env_variables['PGUSER']).to eq('specific_user')
+ end
+ end
+ end
+
+ describe('#activerecord_variables') do
+ context 'with provided ENV variables' do
+ before do
+ stub_env(env_variable, overridden_value)
+ end
+
+ context 'when generic database configuration is overridden' do
+ include_context "with generic database with overridden values"
+
+ with_them do
+ let(:pg_env) { env_variable[/GITLAB_(BACKUP|OVERRIDE)_(\w+)/, 2] }
+
+ it 'ActiveRecord backup configuration overrides application configuration' do
+ expect(config.activerecord_variables).to eq(
+ application_config.merge(active_record_key => overridden_value)
+ )
+ end
+ end
+ end
+
+ context 'when specific database configuration is overridden' do
+ context 'and environment variables are for the current database name' do
+ include_context 'with generic database with overridden values using current database prefix'
+
+ with_them do
+ let(:pg_env) { env_variable[/GITLAB_(BACKUP|OVERRIDE)_MAIN_(\w+)/, 2] }
+
+ it 'ActiveRecord backup configuration overrides application configuration' do
+ expect(config.activerecord_variables).to eq(
+ application_config.merge(active_record_key => overridden_value)
+ )
+ end
+ end
+ end
+
+ context 'and environment variables are for another database' do
+ include_context 'with generic database with overridden values for a different database prefix'
+
+ with_them do
+ let(:pg_env) { env_variable[/GITLAB_(BACKUP|OVERRIDE)_CI_(\w+)/, 1] }
+
+ it 'ActiveRecord backup configuration is expected to equal application configuration' do
+ expect(config.activerecord_variables).to eq(application_config)
+ end
+ end
+ end
+ end
+ end
+
+ context 'when both GITLAB_BACKUP_PGUSER and GITLAB_BACKUP_MAIN_PGUSER variable are present' do
+ with_them do
+ it 'prefers more specific GITLAB_BACKUP_MAIN_PGUSER' do
+ stub_env('GITLAB_BACKUP_PGUSER', 'generic_user')
+ stub_env('GITLAB_BACKUP_MAIN_PGUSER', 'specific_user')
+
+ expect(config.activerecord_variables[:username]).to eq('specific_user')
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/backup/database_connection_spec.rb b/spec/lib/backup/database_connection_spec.rb
new file mode 100644
index 00000000000..b56da3d99f7
--- /dev/null
+++ b/spec/lib/backup/database_connection_spec.rb
@@ -0,0 +1,103 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Backup::DatabaseConnection, :reestablished_active_record_base, feature_category: :backup_restore do
+ let(:connection_name) { 'main' }
+ let(:snapshot_id_pattern) { /[A-Z0-9]{8}-[A-Z0-9]{8}-[0-9]/ }
+
+ subject(:backup_connection) { described_class.new(connection_name) }
+
+ describe '#initialize' do
+ it 'initializes database_configuration with the provided connection_name' do
+ expect(Backup::DatabaseConfiguration).to receive(:new).with(connection_name).and_call_original
+
+ backup_connection
+ end
+ end
+
+ describe '#connection_name' do
+ it 'returns the same connection name used during initialization' do
+ expect(backup_connection.connection_name).to eq(connection_name)
+ end
+ end
+
+ describe '#connection' do
+ it 'is an instance of a ActiveRecord::Base.connection' do
+ backup_connection.connection.is_a? Gitlab::Database::LoadBalancing::ConnectionProxy
+ end
+ end
+
+ describe '#database_configuration' do
+ it 'returns database configuration' do
+ expect(backup_connection.database_configuration).to be_a(Backup::DatabaseConfiguration)
+ end
+ end
+
+ describe '#snapshot_id' do
+ it "returns nil when snapshot has not been triggered" do
+ expect(backup_connection.snapshot_id).to be_nil
+ end
+
+ context 'when a snapshot transaction is open', :delete do
+ let!(:snapshot_id) { backup_connection.export_snapshot! }
+
+ it 'returns the snapshot_id in the expected format' do
+ expect(backup_connection.snapshot_id).to match(snapshot_id_pattern)
+ end
+
+ it 'returns the snapshot_id equal to the one returned by #export_snapshot!' do
+ expect(backup_connection.snapshot_id).to eq(snapshot_id)
+ end
+
+ it "returns nil after a snapshot is released" do
+ backup_connection.release_snapshot!
+
+ expect(backup_connection.snapshot_id).to be_nil
+ end
+ end
+ end
+
+ describe '#export_snapshot!', :delete do
+ it 'returns a snapshot_id in the expected format' do
+ expect(backup_connection.export_snapshot!).to match(snapshot_id_pattern)
+ end
+
+ it 'opens a transaction with correct isolation format and triggers a snapshot generation' do
+ expect(backup_connection.connection).to receive(:begin_transaction).with(
+ isolation: :repeatable_read
+ ).and_call_original
+
+ expect(backup_connection.connection).to receive(:select_value).with(
+ "SELECT pg_export_snapshot()"
+ ).and_call_original
+
+ backup_connection.export_snapshot!
+ end
+
+ it 'disables transaction time out' do
+ expect_next_instance_of(Gitlab::Database::TransactionTimeoutSettings) do |transaction_settings|
+ expect(transaction_settings).to receive(:disable_timeouts).and_call_original
+ end
+
+ backup_connection.export_snapshot!
+ end
+ end
+
+ describe '#release_snapshot!', :delete do
+ it 'clears out existing snapshot_id' do
+ snapshot_id = backup_connection.export_snapshot!
+
+ expect { backup_connection.release_snapshot! }.to change { backup_connection.snapshot_id }
+ .from(snapshot_id).to(nil)
+ end
+
+ it 'executes a transaction rollback' do
+ backup_connection.export_snapshot!
+
+ expect(backup_connection.connection).to receive(:rollback_transaction).and_call_original
+
+ backup_connection.release_snapshot!
+ end
+ end
+end
diff --git a/spec/lib/backup/database_model_spec.rb b/spec/lib/backup/database_model_spec.rb
index c9d036b37f8..9fab5cbc1c0 100644
--- a/spec/lib/backup/database_model_spec.rb
+++ b/spec/lib/backup/database_model_spec.rb
@@ -8,10 +8,10 @@ RSpec.describe Backup::DatabaseModel, :reestablished_active_record_base, feature
let(:gitlab_database_name) { 'main' }
describe '#connection' do
- subject { described_class.new(gitlab_database_name).connection }
+ subject(:connection) { described_class.new(gitlab_database_name).connection }
it 'an instance of a ActiveRecord::Base.connection' do
- subject.is_a? ActiveRecord::Base.connection.class # rubocop:disable Database/MultipleDatabases
+ connection.is_a? ActiveRecord::Base.connection.class # rubocop:disable Database/MultipleDatabases -- We actually need an ActiveRecord::Base here
end
end
@@ -24,7 +24,7 @@ RSpec.describe Backup::DatabaseModel, :reestablished_active_record_base, feature
}
end
- subject { described_class.new(gitlab_database_name).config }
+ subject(:config) { described_class.new(gitlab_database_name).config }
before do
allow(
@@ -34,11 +34,11 @@ RSpec.describe Backup::DatabaseModel, :reestablished_active_record_base, feature
shared_examples 'no configuration is overridden' do
it 'ActiveRecord backup configuration is expected to equal application configuration' do
- expect(subject[:activerecord]).to eq(application_config)
+ expect(config[:activerecord]).to eq(application_config)
end
it 'PostgreSQL ENV is expected to equal application configuration' do
- expect(subject[:pg_env]).to eq(
+ expect(config[:pg_env]).to eq(
{
'PGHOST' => application_config[:host],
'PGPORT' => application_config[:port]
@@ -51,11 +51,11 @@ RSpec.describe Backup::DatabaseModel, :reestablished_active_record_base, feature
let(:active_record_key) { described_class::SUPPORTED_OVERRIDES.invert[pg_env] }
it 'ActiveRecord backup configuration overrides application configuration' do
- expect(subject[:activerecord]).to eq(application_config.merge(active_record_key => overridden_value))
+ expect(config[:activerecord]).to eq(application_config.merge(active_record_key => overridden_value))
end
it 'PostgreSQL ENV overrides application configuration' do
- expect(subject[:pg_env]).to include({ pg_env => overridden_value })
+ expect(config[:pg_env]).to include({ pg_env => overridden_value })
end
end
@@ -63,7 +63,7 @@ RSpec.describe Backup::DatabaseModel, :reestablished_active_record_base, feature
it_behaves_like 'no configuration is overridden'
end
- context 'when GITLAB_BACKUP_PG* variables are set' do
+ context 'when generic database configuration is overridden' do
where(:env_variable, :overridden_value) do
'GITLAB_BACKUP_PGHOST' | 'test.invalid.'
'GITLAB_BACKUP_PGUSER' | 'some_user'
@@ -75,10 +75,20 @@ RSpec.describe Backup::DatabaseModel, :reestablished_active_record_base, feature
'GITLAB_BACKUP_PGSSLROOTCERT' | '/path/to/root/cert'
'GITLAB_BACKUP_PGSSLCRL' | '/path/to/crl'
'GITLAB_BACKUP_PGSSLCOMPRESSION' | '1'
+ 'GITLAB_OVERRIDE_PGHOST' | 'test.invalid.'
+ 'GITLAB_OVERRIDE_PGUSER' | 'some_user'
+ 'GITLAB_OVERRIDE_PGPORT' | '1543'
+ 'GITLAB_OVERRIDE_PGPASSWORD' | 'secret'
+ 'GITLAB_OVERRIDE_PGSSLMODE' | 'allow'
+ 'GITLAB_OVERRIDE_PGSSLKEY' | 'some_key'
+ 'GITLAB_OVERRIDE_PGSSLCERT' | '/path/to/cert'
+ 'GITLAB_OVERRIDE_PGSSLROOTCERT' | '/path/to/root/cert'
+ 'GITLAB_OVERRIDE_PGSSLCRL' | '/path/to/crl'
+ 'GITLAB_OVERRIDE_PGSSLCOMPRESSION' | '1'
end
with_them do
- let(:pg_env) { env_variable[/GITLAB_BACKUP_(\w+)/, 1] }
+ let(:pg_env) { env_variable[/GITLAB_(BACKUP|OVERRIDE)_(\w+)/, 2] }
before do
stub_env(env_variable, overridden_value)
@@ -88,7 +98,7 @@ RSpec.describe Backup::DatabaseModel, :reestablished_active_record_base, feature
end
end
- context 'when GITLAB_BACKUP_<DBNAME>_PG* variables are set' do
+ context 'when specific database configuration is overridden' do
context 'and environment variables are for the current database name' do
where(:env_variable, :overridden_value) do
'GITLAB_BACKUP_MAIN_PGHOST' | 'test.invalid.'
@@ -101,10 +111,20 @@ RSpec.describe Backup::DatabaseModel, :reestablished_active_record_base, feature
'GITLAB_BACKUP_MAIN_PGSSLROOTCERT' | '/path/to/root/cert'
'GITLAB_BACKUP_MAIN_PGSSLCRL' | '/path/to/crl'
'GITLAB_BACKUP_MAIN_PGSSLCOMPRESSION' | '1'
+ 'GITLAB_OVERRIDE_MAIN_PGHOST' | 'test.invalid.'
+ 'GITLAB_OVERRIDE_MAIN_PGUSER' | 'some_user'
+ 'GITLAB_OVERRIDE_MAIN_PGPORT' | '1543'
+ 'GITLAB_OVERRIDE_MAIN_PGPASSWORD' | 'secret'
+ 'GITLAB_OVERRIDE_MAIN_PGSSLMODE' | 'allow'
+ 'GITLAB_OVERRIDE_MAIN_PGSSLKEY' | 'some_key'
+ 'GITLAB_OVERRIDE_MAIN_PGSSLCERT' | '/path/to/cert'
+ 'GITLAB_OVERRIDE_MAIN_PGSSLROOTCERT' | '/path/to/root/cert'
+ 'GITLAB_OVERRIDE_MAIN_PGSSLCRL' | '/path/to/crl'
+ 'GITLAB_OVERRIDE_MAIN_PGSSLCOMPRESSION' | '1'
end
with_them do
- let(:pg_env) { env_variable[/GITLAB_BACKUP_MAIN_(\w+)/, 1] }
+ let(:pg_env) { env_variable[/GITLAB_(BACKUP|OVERRIDE)_MAIN_(\w+)/, 2] }
before do
stub_env(env_variable, overridden_value)
@@ -126,10 +146,20 @@ RSpec.describe Backup::DatabaseModel, :reestablished_active_record_base, feature
'GITLAB_BACKUP_CI_PGSSLROOTCERT' | '/path/to/root/cert'
'GITLAB_BACKUP_CI_PGSSLCRL' | '/path/to/crl'
'GITLAB_BACKUP_CI_PGSSLCOMPRESSION' | '1'
+ 'GITLAB_OVERRIDE_CI_PGHOST' | 'test.invalid.'
+ 'GITLAB_OVERRIDE_CI_PGUSER' | 'some_user'
+ 'GITLAB_OVERRIDE_CI_PGPORT' | '1543'
+ 'GITLAB_OVERRIDE_CI_PGPASSWORD' | 'secret'
+ 'GITLAB_OVERRIDE_CI_PGSSLMODE' | 'allow'
+ 'GITLAB_OVERRIDE_CI_PGSSLKEY' | 'some_key'
+ 'GITLAB_OVERRIDE_CI_PGSSLCERT' | '/path/to/cert'
+ 'GITLAB_OVERRIDE_CI_PGSSLROOTCERT' | '/path/to/root/cert'
+ 'GITLAB_OVERRIDE_CI_PGSSLCRL' | '/path/to/crl'
+ 'GITLAB_OVERRIDE_CI_PGSSLCOMPRESSION' | '1'
end
with_them do
- let(:pg_env) { env_variable[/GITLAB_BACKUP_CI_(\w+)/, 1] }
+ let(:pg_env) { env_variable[/GITLAB_(BACKUP|OVERRIDE)_CI_(\w+)/, 1] }
before do
stub_env(env_variable, overridden_value)
@@ -146,7 +176,6 @@ RSpec.describe Backup::DatabaseModel, :reestablished_active_record_base, feature
end
it 'prefers more specific GITLAB_BACKUP_MAIN_PGUSER' do
- config = subject
expect(config.dig(:activerecord, :username)).to eq('specfic_user')
expect(config.dig(:pg_env, 'PGUSER')).to eq('specfic_user')
end
diff --git a/spec/lib/backup/database_spec.rb b/spec/lib/backup/database_spec.rb
index 073efbbbfcc..86468689f76 100644
--- a/spec/lib/backup/database_spec.rb
+++ b/spec/lib/backup/database_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe Backup::Database, :reestablished_active_record_base, feature_category: :backup_restore do
let(:progress) { StringIO.new }
- let(:output) { progress.string }
+ let(:progress_output) { progress.string }
let(:backup_id) { 'some_id' }
let(:one_database_configured?) { base_models_for_backup.one? }
let(:timeout_service) do
@@ -48,28 +48,16 @@ RSpec.describe Backup::Database, :reestablished_active_record_base, feature_cate
it 'uses snapshots' do
Dir.mktmpdir do |dir|
- expect_next_instances_of(Backup::DatabaseModel, 2) do |adapter|
- expect(adapter.connection).to receive(:begin_transaction).with(
- isolation: :repeatable_read
- ).and_call_original
- expect(adapter.connection).to receive(:select_value).with(
- "SELECT pg_export_snapshot()"
- ).and_call_original
- expect(adapter.connection).to receive(:rollback_transaction).and_call_original
- end
+ expect_next_instances_of(Backup::DatabaseConnection, 2) do |backup_connection|
+ expect(backup_connection).to receive(:export_snapshot!).and_call_original
- subject.dump(dir, backup_id)
- end
- end
+ expect_next_instance_of(::Gitlab::Backup::Cli::Utils::PgDump) do |pgdump|
+ expect(pgdump.snapshot_id).to eq(backup_connection.snapshot_id)
+ end
- it 'disables transaction time out' do
- number_of_databases = base_models_for_backup.count
- expect(Gitlab::Database::TransactionTimeoutSettings)
- .to receive(:new).exactly(2 * number_of_databases).times.and_return(timeout_service)
- expect(timeout_service).to receive(:disable_timeouts).exactly(number_of_databases).times
- expect(timeout_service).to receive(:restore_timeouts).exactly(number_of_databases).times
+ expect(backup_connection).to receive(:release_snapshot!).and_call_original
+ end
- Dir.mktmpdir do |dir|
subject.dump(dir, backup_id)
end
end
@@ -82,79 +70,18 @@ RSpec.describe Backup::Database, :reestablished_active_record_base, feature_cate
it 'does not use snapshots' do
Dir.mktmpdir do |dir|
- base_model = Backup::DatabaseModel.new('main')
- expect(base_model.connection).not_to receive(:begin_transaction).with(
- isolation: :repeatable_read
- ).and_call_original
- expect(base_model.connection).not_to receive(:select_value).with(
- "SELECT pg_export_snapshot()"
- ).and_call_original
- expect(base_model.connection).not_to receive(:rollback_transaction).and_call_original
-
- subject.dump(dir, backup_id)
- end
- end
- end
-
- describe 'pg_dump arguments' do
- let(:snapshot_id) { 'fake_id' }
- let(:default_pg_args) do
- args = [
- '--clean',
- '--if-exists'
- ]
-
- if Gitlab::Database.database_mode == Gitlab::Database::MODE_MULTIPLE_DATABASES
- args + ["--snapshot=#{snapshot_id}"]
- else
- args
- end
- end
-
- let(:dumper) { double }
- let(:destination_dir) { 'tmp' }
-
- before do
- allow(Backup::Dump::Postgres).to receive(:new).and_return(dumper)
- allow(dumper).to receive(:dump).with(any_args).and_return(true)
- end
+ expect_next_instance_of(Backup::DatabaseConnection) do |backup_connection|
+ expect(backup_connection).not_to receive(:export_snapshot!)
- shared_examples 'pg_dump arguments' do
- it 'calls Backup::Dump::Postgres with correct pg_dump arguments' do
- number_of_databases = base_models_for_backup.count
- if number_of_databases > 1
- expect_next_instances_of(Backup::DatabaseModel, number_of_databases) do |model|
- expect(model.connection).to receive(:select_value).with(
- "SELECT pg_export_snapshot()"
- ).and_return(snapshot_id)
+ expect_next_instance_of(::Gitlab::Backup::Cli::Utils::PgDump) do |pgdump|
+ expect(pgdump.snapshot_id).to be_nil
end
- end
-
- expect(dumper).to receive(:dump).with(anything, anything, expected_pg_args)
-
- subject.dump(destination_dir, backup_id)
- end
- end
-
- context 'when no PostgreSQL schemas are specified' do
- let(:expected_pg_args) { default_pg_args }
- include_examples 'pg_dump arguments'
- end
-
- context 'when a PostgreSQL schema is used' do
- let(:schema) { 'gitlab' }
- let(:expected_pg_args) do
- default_pg_args + ['-n', schema] + Gitlab::Database::EXTRA_SCHEMAS.flat_map do |schema|
- ['-n', schema.to_s]
+ expect(backup_connection).not_to receive(:release_snapshot!)
end
- end
- before do
- allow(Gitlab.config.backup).to receive(:pg_schema).and_return(schema)
+ subject.dump(dir, backup_id)
end
-
- include_examples 'pg_dump arguments'
end
end
@@ -223,7 +150,7 @@ RSpec.describe Backup::Database, :reestablished_active_record_base, feature_cate
subject.restore(backup_dir, backup_id)
- expect(output).to include('Removing all tables. Press `Ctrl-C` within 5 seconds to abort')
+ expect(progress_output).to include('Removing all tables. Press `Ctrl-C` within 5 seconds to abort')
end
it 'has a pre restore warning' do
@@ -241,9 +168,21 @@ RSpec.describe Backup::Database, :reestablished_active_record_base, feature_cate
subject.restore(backup_dir, backup_id)
- expect(output).to include("Restoring PostgreSQL database")
- expect(output).to include("[DONE]")
- expect(output).not_to include("ERRORS")
+ expect(progress_output).to include("Restoring PostgreSQL database")
+ expect(progress_output).to include("[DONE]")
+ expect(progress_output).not_to include("ERRORS")
+ end
+
+ context 'when DECOMPRESS_CMD is set to tee' do
+ before do
+ stub_env('DECOMPRESS_CMD', 'tee')
+ end
+
+ it 'outputs a message about DECOMPRESS_CMD' do
+ expect do
+ subject.restore(backup_dir, backup_id)
+ end.to output(/Using custom DECOMPRESS_CMD 'tee'/).to_stdout
+ end
end
end
@@ -277,9 +216,9 @@ RSpec.describe Backup::Database, :reestablished_active_record_base, feature_cate
subject.restore(backup_dir, backup_id)
- expect(output).to include("ERRORS")
- expect(output).not_to include(noise)
- expect(output).to include(visible_error)
+ expect(progress_output).to include("ERRORS")
+ expect(progress_output).not_to include(noise)
+ expect(progress_output).to include(visible_error)
expect(subject.post_restore_warning).not_to be_nil
end
end
diff --git a/spec/lib/backup/dump/postgres_spec.rb b/spec/lib/backup/dump/postgres_spec.rb
index f6a68ab6db9..1da2ee950db 100644
--- a/spec/lib/backup/dump/postgres_spec.rb
+++ b/spec/lib/backup/dump/postgres_spec.rb
@@ -3,17 +3,22 @@
require 'spec_helper'
RSpec.describe Backup::Dump::Postgres, feature_category: :backup_restore do
- describe '#dump' do
- let(:pg_database) { 'gitlabhq_test' }
- let(:destination_dir) { Dir.mktmpdir }
- let(:db_file_name) { File.join(destination_dir, 'output.gz') }
+ let(:pg_database) { 'gitlabhq_test' }
+ let(:pg_dump) { ::Gitlab::Backup::Cli::Utils::PgDump.new(database_name: pg_database) }
+ let(:default_compression_cmd) { 'gzip -c -1' }
- let(:pipes) { IO.pipe }
- let(:gzip_pid) { spawn('gzip -c -1', in: pipes[0], out: [db_file_name, 'w', 0o600]) }
- let(:pg_dump_pid) { Process.spawn('pg_dump', *args, pg_database, out: pipes[1]) }
- let(:args) { ['--help'] }
+ subject(:postgres) { described_class.new }
- subject { described_class.new }
+ describe '#compress_cmd' do
+ it 'returns default compression command' do
+ expect(postgres.compress_cmd).to eq(default_compression_cmd)
+ end
+ end
+
+ describe '#dump' do
+ let(:pipes) { IO.pipe }
+ let(:destination_dir) { Dir.mktmpdir }
+ let(:dump_file_name) { File.join(destination_dir, 'output.gz') }
before do
allow(IO).to receive(:pipe).and_return(pipes)
@@ -23,14 +28,55 @@ RSpec.describe Backup::Dump::Postgres, feature_category: :backup_restore do
FileUtils.remove_entry destination_dir
end
- it 'creates gzipped dump using supplied arguments' do
- expect(subject).to receive(:spawn).with('gzip -c -1', in: pipes.first,
- out: [db_file_name, 'w', 0o600]).and_return(gzip_pid)
- expect(Process).to receive(:spawn).with('pg_dump', *args, pg_database, out: pipes[1]).and_return(pg_dump_pid)
+ context 'with default compression method' do
+ it 'creates a dump file' do
+ postgres.dump(dump_file_name, pg_dump)
+
+ expect(File.exist?(dump_file_name)).to eq(true)
+ end
+
+ it 'default compression command is used' do
+ compressor_pid = spawn(default_compression_cmd, in: pipes[0], out: [dump_file_name, 'w', 0o600])
+
+ expect(postgres).to receive(:spawn).with(
+ default_compression_cmd,
+ in: pipes.first,
+ out: [dump_file_name, 'w', 0o600]).and_return(compressor_pid)
+
+ postgres.dump(dump_file_name, pg_dump)
+
+ expect(File.exist?(dump_file_name)).to eq(true)
+ end
+ end
+
+ context 'when COMPRESS_CMD is set to tee' do
+ let(:tee_pid) { spawn('tee', in: pipes[0], out: [dump_file_name, 'w', 0o600]) }
+
+ before do
+ stub_env('COMPRESS_CMD', 'tee')
+ end
+
+ it 'creates a dump file' do
+ postgres.dump(dump_file_name, pg_dump)
+
+ expect(File.exist?(dump_file_name)).to eq(true)
+ end
+
+ it 'passes through tee instead of gzip' do
+ custom_compression_command = 'tee'
+ compressor_pid = spawn(custom_compression_command, in: pipes[0], out: [dump_file_name, 'w', 0o600])
+
+ expect(postgres).to receive(:spawn).with(
+ custom_compression_command,
+ in: pipes.first,
+ out: [dump_file_name, 'w', 0o600]).and_return(compressor_pid)
- subject.dump(pg_database, db_file_name, args)
+ expect do
+ postgres.dump(dump_file_name, pg_dump)
+ end.to output(/Using custom COMPRESS_CMD 'tee'/).to_stdout
- expect(File.exist?(db_file_name)).to eq(true)
+ expect(File.exist?(dump_file_name)).to eq(true)
+ end
end
end
end
diff --git a/spec/lib/backup/files_spec.rb b/spec/lib/backup/files_spec.rb
index 48c89e06dfa..f0fc829764a 100644
--- a/spec/lib/backup/files_spec.rb
+++ b/spec/lib/backup/files_spec.rb
@@ -68,7 +68,7 @@ RSpec.describe Backup::Files, feature_category: :backup_restore do
it 'calls tar command with unlink' do
expect(subject).to receive(:tar).and_return('blabla-tar')
- expect(subject).to receive(:run_pipeline!).with([%w[gzip -cd], %w[blabla-tar --unlink-first --recursive-unlink -C /var/gitlab-registry -xf -]], any_args)
+ expect(subject).to receive(:run_pipeline!).with(["gzip -cd", %w[blabla-tar --unlink-first --recursive-unlink -C /var/gitlab-registry -xf -]], any_args)
expect(subject).to receive(:pipeline_succeeded?).and_return(true)
subject.restore('registry.tar.gz', 'backup_id')
end
@@ -107,6 +107,21 @@ RSpec.describe Backup::Files, feature_category: :backup_restore do
expect { subject.restore('registry.tar.gz', 'backup_id') }.to raise_error(/is a mountpoint/)
end
end
+
+ describe 'with DECOMPRESS_CMD' do
+ before do
+ stub_env('DECOMPRESS_CMD', 'tee')
+ allow(subject).to receive(:pipeline_succeeded?).and_return(true)
+ end
+
+ it 'passes through tee instead of gzip' do
+ expect(subject).to receive(:run_pipeline!).with(['tee', anything], any_args).and_return([[true, true], ''])
+
+ expect do
+ subject.restore('registry.tar.gz', 'backup_id')
+ end.to output(/Using custom DECOMPRESS_CMD 'tee'/).to_stdout
+ end
+ end
end
describe '#dump' do
@@ -173,6 +188,37 @@ RSpec.describe Backup::Files, feature_category: :backup_restore do
.and raise_error(/Failed to create compressed file/)
end
end
+
+ describe 'with COMPRESS_CMD' do
+ before do
+ stub_env('COMPRESS_CMD', 'tee')
+ end
+
+ it 'passes through tee instead of gzip' do
+ expect(subject).to receive(:run_pipeline!).with([anything, 'tee'], any_args)
+ expect do
+ subject.dump('registry.tar.gz', 'backup_id')
+ end.to output(/Using custom COMPRESS_CMD 'tee'/).to_stdout
+ end
+ end
+
+ context 'when GZIP_RSYNCABLE is "yes"' do
+ before do
+ stub_env('GZIP_RSYNCABLE', 'yes')
+ end
+
+ it 'gzips the files with rsyncable option' do
+ expect(subject).to receive(:run_pipeline!).with([anything, 'gzip --rsyncable -c -1'], any_args)
+ subject.dump('registry.tar.gz', 'backup_id')
+ end
+ end
+
+ context 'when GZIP_RSYNCABLE is not set' do
+ it 'gzips the files without the rsyncable option' do
+ expect(subject).to receive(:run_pipeline!).with([anything, 'gzip -c -1'], any_args)
+ subject.dump('registry.tar.gz', 'backup_id')
+ end
+ end
end
describe '#exclude_dirs' do
@@ -226,13 +272,13 @@ RSpec.describe Backup::Files, feature_category: :backup_restore do
it 'returns true if both tar and gzip succeeeded' do
expect(
- subject.pipeline_succeeded?(tar_status: status_0, gzip_status: status_0, output: 'any_output')
+ subject.pipeline_succeeded?(tar_status: status_0, compress_status: status_0, output: 'any_output')
).to be_truthy
end
it 'returns false if gzip failed' do
expect(
- subject.pipeline_succeeded?(tar_status: status_1, gzip_status: status_1, output: 'any_output')
+ subject.pipeline_succeeded?(tar_status: status_1, compress_status: status_1, output: 'any_output')
).to be_falsey
end
@@ -243,7 +289,7 @@ RSpec.describe Backup::Files, feature_category: :backup_restore do
it 'returns true' do
expect(
- subject.pipeline_succeeded?(tar_status: status_1, gzip_status: status_0, output: 'any_output')
+ subject.pipeline_succeeded?(tar_status: status_1, compress_status: status_0, output: 'any_output')
).to be_truthy
end
end
@@ -255,7 +301,7 @@ RSpec.describe Backup::Files, feature_category: :backup_restore do
it 'returns false' do
expect(
- subject.pipeline_succeeded?(tar_status: status_1, gzip_status: status_0, output: 'any_output')
+ subject.pipeline_succeeded?(tar_status: status_1, compress_status: status_0, output: 'any_output')
).to be_falsey
end
end
diff --git a/spec/lib/backup/repositories_spec.rb b/spec/lib/backup/repositories_spec.rb
index ad5fb8ea84e..024f6c5db96 100644
--- a/spec/lib/backup/repositories_spec.rb
+++ b/spec/lib/backup/repositories_spec.rb
@@ -90,10 +90,7 @@ RSpec.describe Backup::Repositories, feature_category: :backup_restore do
let_it_be(:project) { create(:project_with_design, :repository) }
before do
- stub_storage_settings('test_second_storage' => {
- 'gitaly_address' => Gitlab.config.repositories.storages.default.gitaly_address,
- 'path' => TestEnv::SECOND_STORAGE_PATH
- })
+ stub_storage_settings('test_second_storage' => {})
end
it 'calls enqueue for all repositories on the specified storage', :aggregate_failures do
@@ -249,44 +246,11 @@ RSpec.describe Backup::Repositories, feature_category: :backup_restore do
end
end
- context 'cleanup snippets' do
- before do
- error_response = ServiceResponse.error(message: "Repository has more than one branch")
- allow(Snippets::RepositoryValidationService).to receive_message_chain(:new, :execute).and_return(error_response)
- end
-
- it 'shows the appropriate error' do
- subject.restore(destination, backup_id)
-
- expect(progress).to have_received(:puts).with("Snippet #{personal_snippet.full_path} can't be restored: Repository has more than one branch")
- expect(progress).to have_received(:puts).with("Snippet #{project_snippet.full_path} can't be restored: Repository has more than one branch")
- end
-
- it 'removes the snippets from the DB' do
- expect { subject.restore(destination, backup_id) }.to change(PersonalSnippet, :count).by(-1)
- .and change(ProjectSnippet, :count).by(-1)
- .and change(SnippetRepository, :count).by(-2)
- end
-
- it 'removes the repository from disk' do
- gitlab_shell = Gitlab::Shell.new
- shard_name = personal_snippet.repository.shard
- path = personal_snippet.disk_path + '.git'
-
- subject.restore(destination, backup_id)
-
- expect(gitlab_shell.repository_exists?(shard_name, path)).to eq false
- end
- end
-
context 'storages' do
let(:storages) { %w[default] }
before do
- stub_storage_settings('test_second_storage' => {
- 'gitaly_address' => Gitlab.config.repositories.storages.default.gitaly_address,
- 'path' => TestEnv::SECOND_STORAGE_PATH
- })
+ stub_storage_settings('test_second_storage' => {})
end
it 'calls enqueue for all repositories on the specified storage', :aggregate_failures do
diff --git a/spec/lib/banzai/filter/custom_emoji_filter_spec.rb b/spec/lib/banzai/filter/custom_emoji_filter_spec.rb
index 7fd25eac81b..4fc9d9dd4f6 100644
--- a/spec/lib/banzai/filter/custom_emoji_filter_spec.rb
+++ b/spec/lib/banzai/filter/custom_emoji_filter_spec.rb
@@ -55,4 +55,12 @@ RSpec.describe Banzai::Filter::CustomEmojiFilter, feature_category: :team_planni
filter('<p>:tanuki:</p> <p>:party-parrot:</p>')
end.not_to exceed_all_query_limit(control_count.count)
end
+
+ it 'uses custom emoji from ancestor group' do
+ subgroup = create(:group, parent: group)
+
+ doc = filter('<p>:tanuki:</p>', group: subgroup)
+
+ expect(doc.css('gl-emoji').size).to eq 1
+ end
end
diff --git a/spec/lib/banzai/filter/markdown_filter_spec.rb b/spec/lib/banzai/filter/markdown_filter_spec.rb
index 251e6efe50b..b4fb715b8f0 100644
--- a/spec/lib/banzai/filter/markdown_filter_spec.rb
+++ b/spec/lib/banzai/filter/markdown_filter_spec.rb
@@ -3,6 +3,7 @@
require 'spec_helper'
RSpec.describe Banzai::Filter::MarkdownFilter, feature_category: :team_planning do
+ using RSpec::Parameterized::TableSyntax
include FilterSpecHelper
describe 'markdown engine from context' do
@@ -22,6 +23,21 @@ RSpec.describe Banzai::Filter::MarkdownFilter, feature_category: :team_planning
end
end
+ describe 'parse_sourcepos' do
+ where(:sourcepos, :expected) do
+ '1:1-1:4' | { start: { row: 0, col: 0 }, end: { row: 0, col: 3 } }
+ '12:22-1:456' | { start: { row: 11, col: 21 }, end: { row: 0, col: 455 } }
+ '0:0-0:0' | { start: { row: 0, col: 0 }, end: { row: 0, col: 0 } }
+ '-1:2-3:-4' | nil
+ end
+
+ with_them do
+ it 'correctly parses' do
+ expect(described_class.parse_sourcepos(sourcepos)).to eq expected
+ end
+ end
+ end
+
describe 'code block' do
it 'adds language to lang attribute when specified' do
result = filter("```html\nsome code\n```", no_sourcepos: true)
diff --git a/spec/lib/banzai/filter/quick_action_filter_spec.rb b/spec/lib/banzai/filter/quick_action_filter_spec.rb
new file mode 100644
index 00000000000..a2a300d157c
--- /dev/null
+++ b/spec/lib/banzai/filter/quick_action_filter_spec.rb
@@ -0,0 +1,37 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Banzai::Filter::QuickActionFilter, feature_category: :team_planning do
+ let(:result) { {} }
+
+ it 'detects action in paragraph' do
+ described_class.call('<p data-sourcepos="1:1-2:3">/quick</p>', {}, result)
+
+ expect(result[:quick_action_paragraphs]).to match_array [{ start_line: 0, end_line: 1 }]
+ end
+
+ it 'does not detect action in paragraph if no sourcepos' do
+ described_class.call('<p>/quick</p>', {}, result)
+
+ expect(result[:quick_action_paragraphs]).to be_empty
+ end
+
+ it 'does not detect action in blockquote' do
+ described_class.call('<blockquote data-sourcepos="1:1-1:1">/quick</blockquote>', {}, result)
+
+ expect(result[:quick_action_paragraphs]).to be_empty
+ end
+
+ it 'does not detect action in html block' do
+ described_class.call('<li data-sourcepos="1:1-1:1">/quick</li>', {}, result)
+
+ expect(result[:quick_action_paragraphs]).to be_empty
+ end
+
+ it 'does not detect action in code block' do
+ described_class.call('<code data-sourcepos="1:1-1:1">/quick</code>', {}, result)
+
+ expect(result[:quick_action_paragraphs]).to be_empty
+ end
+end
diff --git a/spec/lib/banzai/filter/references/alert_reference_filter_spec.rb b/spec/lib/banzai/filter/references/alert_reference_filter_spec.rb
index 9723e9b39f1..9a2e68aaae0 100644
--- a/spec/lib/banzai/filter/references/alert_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/references/alert_reference_filter_spec.rb
@@ -240,9 +240,15 @@ RSpec.describe Banzai::Filter::References::AlertReferenceFilter, feature_categor
# Since we're not batching alert queries across projects,
# we have to account for that.
- # 1 for both projects, 1 for alerts in each project == 3
+ # 1 for routes to find routes.source_id of projects matching paths
+ # 1 for projects belonging to the above routes
+ # 1 for preloading routes of the projects
+ # 1 for loading the namespaces associated to the project
+ # 1 for loading the routes associated with the namespace
+ # 1x2 for alerts in each project
+ # Total == 7
# TODO: https://gitlab.com/gitlab-org/gitlab/-/issues/330359
- max_count += 2
+ max_count += 6
expect do
reference_filter(markdown)
diff --git a/spec/lib/banzai/filter/references/commit_reference_filter_spec.rb b/spec/lib/banzai/filter/references/commit_reference_filter_spec.rb
index 6e0f9eda0e2..35a3f20f7b7 100644
--- a/spec/lib/banzai/filter/references/commit_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/references/commit_reference_filter_spec.rb
@@ -287,12 +287,18 @@ RSpec.describe Banzai::Filter::References::CommitReferenceFilter, feature_catego
reference_filter(markdown)
end.count
- markdown = "#{commit_reference} 8b95f2f1 8b95f2f2 8b95f2f3 #{commit2_reference} #{commit3_reference}"
+ expect(max_count).to eq 0
+
+ markdown = "#{commit_reference} 8b95f2f1 8b95f2f2 8b95f2f3 #{commit2_reference} #{commit3_reference}"
# Commits are not DB entries, they are on the project itself.
- # So adding commits from two more projects to the markdown should
- # only increase by 1 query
- max_count += 1
+ # 1 for for routes to find routes.source_id of projects matching paths
+ # 1 for projects belonging to the above routes
+ # 1 for preloading routes of the projects
+ # 1 for loading the namespaces associated to the project
+ # 1 for loading the routes associated with the namespace
+ # Total = 5
+ max_count += 5
expect do
reference_filter(markdown)
diff --git a/spec/lib/banzai/filter/references/label_reference_filter_spec.rb b/spec/lib/banzai/filter/references/label_reference_filter_spec.rb
index a4587b70dfa..81b08a4c516 100644
--- a/spec/lib/banzai/filter/references/label_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/references/label_reference_filter_spec.rb
@@ -747,10 +747,16 @@ RSpec.describe Banzai::Filter::References::LabelReferenceFilter, feature_categor
# Since we're not batching label queries across projects/groups,
# queries increase when a new project/group is added.
# TODO: https://gitlab.com/gitlab-org/gitlab/-/issues/330359
- # first reference to already loaded project (1),
- # second reference requires project and namespace (2), and label (1)
+ # 1 for for routes to find routes.source_id of projects matching paths
+ # 1 for projects belonging to the above routes
+ # 1 for preloading routes of the projects
+ # 1 for loading the namespaces associated to the project
+ # 1 for loading the routes associated with the namespace
+ # 1 for the group
+ # 1x2 for labels
+ # Total == 8
markdown = "#{project_reference} #{group2_reference}"
- max_count = control_count + 3
+ max_count = control_count + 7
expect do
reference_filter(markdown)
diff --git a/spec/lib/banzai/filter/references/milestone_reference_filter_spec.rb b/spec/lib/banzai/filter/references/milestone_reference_filter_spec.rb
index 1fa62d70b72..e778f07227c 100644
--- a/spec/lib/banzai/filter/references/milestone_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/references/milestone_reference_filter_spec.rb
@@ -522,7 +522,7 @@ RSpec.describe Banzai::Filter::References::MilestoneReferenceFilter, feature_cat
# queries increase when a new project/group is added.
# TODO: https://gitlab.com/gitlab-org/gitlab/-/issues/330359
markdown = "#{project_reference} #{group2_reference}"
- control_count += 5
+ control_count += 9
expect do
reference_filter(markdown)
diff --git a/spec/lib/banzai/filter/references/project_reference_filter_spec.rb b/spec/lib/banzai/filter/references/project_reference_filter_spec.rb
index 9433862ac8a..c55fff78756 100644
--- a/spec/lib/banzai/filter/references/project_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/references/project_reference_filter_spec.rb
@@ -119,7 +119,7 @@ RSpec.describe Banzai::Filter::References::ProjectReferenceFilter, feature_categ
reference_filter(markdown)
end.count
- expect(max_count).to eq 1
+ expect(max_count).to eq 2
markdown = "#{normal_project_reference} #{invalidate_reference(normal_project_reference)} #{group_project_reference} #{nested_project_reference}"
diff --git a/spec/lib/banzai/filter/references/reference_cache_spec.rb b/spec/lib/banzai/filter/references/reference_cache_spec.rb
index 577e4471433..04877931610 100644
--- a/spec/lib/banzai/filter/references/reference_cache_spec.rb
+++ b/spec/lib/banzai/filter/references/reference_cache_spec.rb
@@ -79,8 +79,16 @@ RSpec.describe Banzai::Filter::References::ReferenceCache, feature_category: :te
expect(control_count).to eq 3
# Since this is an issue filter that is not batching issue queries
# across projects, we have to account for that.
- # 1 for original issue, 2 for second route/project, 1 for other issue
- max_count = control_count + 4
+ # 1 for for routes to find routes.source_id of projects matching paths
+ # 1 for projects belonging to the above routes
+ # 1 for preloading routes of the projects
+ # 1 for loading the namespaces associated to the project
+ # 1 for loading the routes associated with the namespace
+ # 1x2 for issues
+ # 1x2 for groups
+ # 1x2 for work_item_types
+ # Total = 11
+ max_count = control_count + 8
expect do
cache.load_references_per_parent(filter.nodes)
diff --git a/spec/lib/banzai/filter/references/snippet_reference_filter_spec.rb b/spec/lib/banzai/filter/references/snippet_reference_filter_spec.rb
index b196d85ba8a..00eac7262f4 100644
--- a/spec/lib/banzai/filter/references/snippet_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/references/snippet_reference_filter_spec.rb
@@ -239,9 +239,15 @@ RSpec.describe Banzai::Filter::References::SnippetReferenceFilter, feature_categ
# Since we're not batching snippet queries across projects,
# we have to account for that.
- # 1 for both projects, 1 for snippets in each project == 3
+ # 1 for for routes to find routes.source_id of projects matching paths
+ # 1 for projects belonging to the above routes
+ # 1 for preloading routes of the projects
+ # 1 for loading the namespaces associated to the project
+ # 1 for loading the routes associated with the namespace
+ # 1x2 for snippets in each project == 2
+ # Total = 7
# TODO: https://gitlab.com/gitlab-org/gitlab/-/issues/330359
- max_count = control_count + 2
+ max_count = control_count + 6
expect do
reference_filter(markdown)
diff --git a/spec/lib/banzai/pipeline/quick_action_pipeline_spec.rb b/spec/lib/banzai/pipeline/quick_action_pipeline_spec.rb
new file mode 100644
index 00000000000..cce69b9baf0
--- /dev/null
+++ b/spec/lib/banzai/pipeline/quick_action_pipeline_spec.rb
@@ -0,0 +1,62 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Banzai::Pipeline::QuickActionPipeline, feature_category: :team_planning do
+ using RSpec::Parameterized::TableSyntax
+
+ it 'does not detect a quick action' do
+ markdown = <<~MD.strip
+ <!-- HTML comment -->
+ A paragraph
+
+ > a blockquote
+ MD
+ result = described_class.call(markdown, project: nil)
+
+ expect(result[:quick_action_paragraphs]).to be_empty
+ end
+
+ it 'does detect a quick action' do
+ markdown = <<~MD.strip
+ <!-- HTML comment -->
+ /quick
+
+ > a blockquote
+ MD
+ result = described_class.call(markdown, project: nil)
+
+ expect(result[:quick_action_paragraphs]).to match_array [{ start_line: 1, end_line: 1 }]
+ end
+
+ it 'does detect a multiple quick actions but not in a multi-line blockquote' do
+ markdown = <<~MD.strip
+ Lorem ipsum
+ /quick
+ /action
+
+ >>>
+ /quick
+ >>>
+
+ /action
+ MD
+ result = described_class.call(markdown, project: nil)
+
+ expect(result[:quick_action_paragraphs])
+ .to match_array [{ start_line: 0, end_line: 2 }, { start_line: 8, end_line: 8 }]
+ end
+
+ it 'does not a quick action in a code block' do
+ markdown = <<~MD.strip
+ ```
+ Lorem ipsum
+ /quick
+ /action
+ ```
+ MD
+ result = described_class.call(markdown, project: nil)
+
+ expect(result[:quick_action_paragraphs]).to be_empty
+ end
+end
diff --git a/spec/lib/bitbucket/connection_spec.rb b/spec/lib/bitbucket/connection_spec.rb
index 2b35a37558c..6cf010f2eed 100644
--- a/spec/lib/bitbucket/connection_spec.rb
+++ b/spec/lib/bitbucket/connection_spec.rb
@@ -19,6 +19,10 @@ RSpec.describe Bitbucket::Connection, feature_category: :integrations do
token_url: OmniAuth::Strategies::Bitbucket.default_options[:client_options]['token_url']
}
+ expect_next_instance_of(described_class) do |instance|
+ expect(instance).to receive(:retry_with_exponential_backoff).and_call_original
+ end
+
expect(OAuth2::Client)
.to receive(:new)
.with(anything, anything, expected_client_options)
@@ -31,6 +35,47 @@ RSpec.describe Bitbucket::Connection, feature_category: :integrations do
connection.get('/users')
end
+
+ context 'when the API returns an error' do
+ before do
+ allow_next_instance_of(OAuth2::AccessToken) do |instance|
+ allow(instance).to receive(:get).and_raise(OAuth2::Error, 'some error')
+ end
+
+ stub_const('Bitbucket::ExponentialBackoff::INITIAL_DELAY', 0.0)
+ allow(Random).to receive(:rand).and_return(0.001)
+ end
+
+ it 'logs the retries and raises an error if it does not succeed on retry' do
+ expect(Gitlab::BitbucketImport::Logger).to receive(:info)
+ .with(message: 'Retrying in 0.0 seconds due to some error')
+ .twice
+
+ connection = described_class.new({ token: token })
+
+ expect { connection.get('/users') }.to raise_error(Bitbucket::ExponentialBackoff::RateLimitError)
+ end
+ end
+
+ context 'when the bitbucket_importer_exponential_backoff feature flag is disabled' do
+ before do
+ stub_feature_flags(bitbucket_importer_exponential_backoff: false)
+ end
+
+ it 'does not run with exponential backoff' do
+ expect_next_instance_of(described_class) do |instance|
+ expect(instance).not_to receive(:retry_with_exponential_backoff).and_call_original
+ end
+
+ expect_next_instance_of(OAuth2::AccessToken) do |instance|
+ expect(instance).to receive(:get).and_return(double(parsed: true))
+ end
+
+ connection = described_class.new({ token: token })
+
+ connection.get('/users')
+ end
+ end
end
describe '#expired?' do
diff --git a/spec/lib/bitbucket/exponential_backoff_spec.rb b/spec/lib/bitbucket/exponential_backoff_spec.rb
new file mode 100644
index 00000000000..b52a83731f4
--- /dev/null
+++ b/spec/lib/bitbucket/exponential_backoff_spec.rb
@@ -0,0 +1,62 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Bitbucket::ExponentialBackoff, feature_category: :importers do
+ let(:service) { dummy_class.new }
+ let(:body) { 'test' }
+ let(:parsed_response) { instance_double(Net::HTTPResponse, body: body.to_json) }
+ let(:response) { double(Faraday::Response, body: body, parsed: parsed_response) }
+ let(:response_caller) { -> { response } }
+
+ let(:dummy_class) do
+ Class.new do
+ def logger
+ @logger ||= Logger.new(File::NULL)
+ end
+
+ def dummy_method(response_caller)
+ retry_with_exponential_backoff do
+ response_caller.call
+ end
+ end
+
+ include Bitbucket::ExponentialBackoff
+ end
+ end
+
+ subject(:execute) { service.dummy_method(response_caller) }
+
+ describe '.retry_with_exponential_backoff' do
+ let(:max_retries) { described_class::MAX_RETRIES }
+
+ context 'when the function succeeds on the first try' do
+ it 'calls the function once and returns its result' do
+ expect(response_caller).to receive(:call).once.and_call_original
+
+ expect(Gitlab::Json.parse(execute.parsed.body)).to eq(body)
+ end
+ end
+
+ context 'when the function response is an error' do
+ let(:error) { 'Rate limit for this resource has been exceeded' }
+
+ before do
+ stub_const("#{described_class.name}::INITIAL_DELAY", 0.0)
+ allow(Random).to receive(:rand).and_return(0.001)
+ end
+
+ it 'raises a RateLimitError if the maximum number of retries is exceeded' do
+ allow(response_caller).to receive(:call).and_raise(OAuth2::Error, error)
+
+ message = "Maximum number of retries (#{max_retries}) exceeded. #{error}"
+
+ expect do
+ execute
+ end.to raise_error(described_class::RateLimitError, message)
+
+ expect(response_caller).to have_received(:call).exactly(max_retries).times
+ end
+ end
+ end
+end
diff --git a/spec/lib/bitbucket/representation/pull_request_spec.rb b/spec/lib/bitbucket/representation/pull_request_spec.rb
index 9ebf59ecf82..9aeeef320ed 100644
--- a/spec/lib/bitbucket/representation/pull_request_spec.rb
+++ b/spec/lib/bitbucket/representation/pull_request_spec.rb
@@ -74,11 +74,13 @@ RSpec.describe Bitbucket::Representation::PullRequest, feature_category: :import
'title' => 'title',
'source' => {
'branch' => { 'name' => 'source-branch-name' },
- 'commit' => { 'hash' => 'source-commit-hash' }
+ 'commit' => { 'hash' => 'source-commit-hash' },
+ 'repository' => { 'uuid' => 'uuid' }
},
'destination' => {
'branch' => { 'name' => 'destination-branch-name' },
- 'commit' => { 'hash' => 'destination-commit-hash' }
+ 'commit' => { 'hash' => 'destination-commit-hash' },
+ 'repository' => { 'uuid' => 'uuid' }
},
'merge_commit' => { 'hash' => 'merge-commit-hash' },
'reviewers' => [
@@ -101,6 +103,7 @@ RSpec.describe Bitbucket::Representation::PullRequest, feature_category: :import
target_branch_sha: 'destination-commit-hash',
title: 'title',
updated_at: 'updated-at',
+ source_and_target_project_different: false,
reviewers: ['user-2']
}
diff --git a/spec/lib/bitbucket_server/client_spec.rb b/spec/lib/bitbucket_server/client_spec.rb
index cd3179f19d4..0d027234a0d 100644
--- a/spec/lib/bitbucket_server/client_spec.rb
+++ b/spec/lib/bitbucket_server/client_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe BitbucketServer::Client do
+RSpec.describe BitbucketServer::Client, feature_category: :importers do
let(:base_uri) { 'https://test:7990/stash/' }
let(:options) { { base_uri: base_uri, user: 'bitbucket', password: 'mypassword' } }
let(:project) { 'SOME-PROJECT' }
@@ -80,6 +80,25 @@ RSpec.describe BitbucketServer::Client do
end
end
+ describe '#users' do
+ let(:path) { "/projects/#{project}/permissions/users" }
+
+ it 'requests a collection' do
+ expect(BitbucketServer::Paginator).to receive(:new).with(anything, path, :user, page_offset: 0, limit: nil)
+
+ subject.users(project)
+ end
+
+ it 'requests a collection with offset and limit' do
+ offset = 10
+ limit = 100
+
+ expect(BitbucketServer::Paginator).to receive(:new).with(anything, path, :user, page_offset: offset, limit: limit)
+
+ subject.users(project, page_offset: offset, limit: limit)
+ end
+ end
+
describe '#create_branch' do
let(:branch) { 'test-branch' }
let(:sha) { '12345678' }
diff --git a/spec/lib/bitbucket_server/connection_spec.rb b/spec/lib/bitbucket_server/connection_spec.rb
index 8341ca10f43..59eda91285f 100644
--- a/spec/lib/bitbucket_server/connection_spec.rb
+++ b/spec/lib/bitbucket_server/connection_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe BitbucketServer::Connection do
+RSpec.describe BitbucketServer::Connection, feature_category: :importers do
let(:options) { { base_uri: 'https://test:7990', user: 'bitbucket', password: 'mypassword' } }
let(:payload) { { 'test' => 1 } }
let(:headers) { { "Content-Type" => "application/json" } }
@@ -11,83 +11,162 @@ RSpec.describe BitbucketServer::Connection do
subject { described_class.new(options) }
describe '#get' do
- it 'returns JSON body' do
- WebMock.stub_request(:get, url).with(headers: { 'Accept' => 'application/json' }).to_return(body: payload.to_json, status: 200, headers: headers)
+ before do
+ WebMock.stub_request(:get, url).with(headers: { 'Accept' => 'application/json' })
+ .to_return(body: payload.to_json, status: 200, headers: headers)
+ end
+
+ it 'runs with retry_with_delay' do
+ expect(subject).to receive(:retry_with_delay).and_call_original.once
- expect(subject.get(url, { something: 1 })).to eq(payload)
+ subject.get(url)
end
- it 'throws an exception if the response is not 200' do
- WebMock.stub_request(:get, url).with(headers: { 'Accept' => 'application/json' }).to_return(body: payload.to_json, status: 500, headers: headers)
+ shared_examples 'handles get requests' do
+ it 'returns JSON body' do
+ expect(subject.get(url, { something: 1 })).to eq(payload)
+ end
+
+ it 'throws an exception if the response is not 200' do
+ WebMock.stub_request(:get, url).with(headers: { 'Accept' => 'application/json' }).to_return(body: payload.to_json, status: 500, headers: headers)
+
+ expect { subject.get(url) }.to raise_error(described_class::ConnectionError)
+ end
+
+ it 'throws an exception if the response is not JSON' do
+ WebMock.stub_request(:get, url).with(headers: { 'Accept' => 'application/json' }).to_return(body: 'bad data', status: 200, headers: headers)
- expect { subject.get(url) }.to raise_error(described_class::ConnectionError)
+ expect { subject.get(url) }.to raise_error(described_class::ConnectionError)
+ end
+
+ it 'throws an exception upon a network error' do
+ WebMock.stub_request(:get, url).with(headers: { 'Accept' => 'application/json' }).to_raise(OpenSSL::SSL::SSLError)
+
+ expect { subject.get(url) }.to raise_error(described_class::ConnectionError)
+ end
end
- it 'throws an exception if the response is not JSON' do
- WebMock.stub_request(:get, url).with(headers: { 'Accept' => 'application/json' }).to_return(body: 'bad data', status: 200, headers: headers)
+ it_behaves_like 'handles get requests'
+
+ context 'when the response is a 429 rate limit reached error' do
+ let(:response) do
+ instance_double(HTTParty::Response, parsed_response: payload, code: 429, headers: headers.merge('retry-after' => '0'))
+ end
+
+ before do
+ allow(Gitlab::HTTP).to receive(:get).and_return(response)
+ end
- expect { subject.get(url) }.to raise_error(described_class::ConnectionError)
+ it 'sleeps, retries and if the error persists it fails' do
+ expect(Gitlab::BitbucketServerImport::Logger).to receive(:info)
+ .with(message: 'Retrying in 0 seconds due to 429 Too Many Requests')
+ .once
+
+ expect { subject.get(url) }.to raise_error(BitbucketServer::Connection::ConnectionError)
+ end
end
- it 'throws an exception upon a network error' do
- WebMock.stub_request(:get, url).with(headers: { 'Accept' => 'application/json' }).to_raise(OpenSSL::SSL::SSLError)
+ context 'when the bitbucket_server_importer_exponential_backoff feature flag is disabled' do
+ before do
+ stub_feature_flags(bitbucket_server_importer_exponential_backoff: false)
+ end
- expect { subject.get(url) }.to raise_error(described_class::ConnectionError)
+ it_behaves_like 'handles get requests'
end
end
describe '#post' do
let(:headers) { { 'Accept' => 'application/json', 'Content-Type' => 'application/json' } }
- it 'returns JSON body' do
+ before do
WebMock.stub_request(:post, url).with(headers: headers).to_return(body: payload.to_json, status: 200, headers: headers)
-
- expect(subject.post(url, payload)).to eq(payload)
end
- it 'throws an exception if the response is not 200' do
- WebMock.stub_request(:post, url).with(headers: headers).to_return(body: payload.to_json, status: 500, headers: headers)
+ it 'runs with retry_with_delay' do
+ expect(subject).to receive(:retry_with_delay).and_call_original.once
- expect { subject.post(url, payload) }.to raise_error(described_class::ConnectionError)
+ subject.post(url, payload)
end
- it 'throws an exception upon a network error' do
- WebMock.stub_request(:post, url).with(headers: { 'Accept' => 'application/json' }).to_raise(OpenSSL::SSL::SSLError)
+ shared_examples 'handles post requests' do
+ it 'returns JSON body' do
+ expect(subject.post(url, payload)).to eq(payload)
+ end
+
+ it 'throws an exception if the response is not 200' do
+ WebMock.stub_request(:post, url).with(headers: headers).to_return(body: payload.to_json, status: 500, headers: headers)
- expect { subject.post(url, payload) }.to raise_error(described_class::ConnectionError)
+ expect { subject.post(url, payload) }.to raise_error(described_class::ConnectionError)
+ end
+
+ it 'throws an exception upon a network error' do
+ WebMock.stub_request(:post, url).with(headers: { 'Accept' => 'application/json' }).to_raise(OpenSSL::SSL::SSLError)
+
+ expect { subject.post(url, payload) }.to raise_error(described_class::ConnectionError)
+ end
+
+ it 'throws an exception if the URI is invalid' do
+ stub_request(:post, url).with(headers: { 'Accept' => 'application/json' }).to_raise(URI::InvalidURIError)
+
+ expect { subject.post(url, payload) }.to raise_error(described_class::ConnectionError)
+ end
end
- it 'throws an exception if the URI is invalid' do
- stub_request(:post, url).with(headers: { 'Accept' => 'application/json' }).to_raise(URI::InvalidURIError)
+ it_behaves_like 'handles post requests'
- expect { subject.post(url, payload) }.to raise_error(described_class::ConnectionError)
+ context 'when the bitbucket_server_importer_exponential_backoff feature flag is disabled' do
+ before do
+ stub_feature_flags(bitbucket_server_importer_exponential_backoff: false)
+ end
+
+ it_behaves_like 'handles post requests'
end
end
describe '#delete' do
let(:headers) { { 'Accept' => 'application/json', 'Content-Type' => 'application/json' } }
+ before do
+ WebMock.stub_request(:delete, branch_url).with(headers: headers).to_return(body: payload.to_json, status: 200, headers: headers)
+ end
+
context 'branch API' do
let(:branch_path) { '/projects/foo/repos/bar/branches' }
let(:branch_url) { 'https://test:7990/rest/branch-utils/1.0/projects/foo/repos/bar/branches' }
let(:path) {}
- it 'returns JSON body' do
- WebMock.stub_request(:delete, branch_url).with(headers: headers).to_return(body: payload.to_json, status: 200, headers: headers)
+ it 'runs with retry_with_delay' do
+ expect(subject).to receive(:retry_with_delay).and_call_original.once
- expect(subject.delete(:branches, branch_path, payload)).to eq(payload)
+ subject.delete(:branches, branch_path, payload)
end
- it 'throws an exception if the response is not 200' do
- WebMock.stub_request(:delete, branch_url).with(headers: headers).to_return(body: payload.to_json, status: 500, headers: headers)
+ shared_examples 'handles delete requests' do
+ it 'returns JSON body' do
+ expect(subject.delete(:branches, branch_path, payload)).to eq(payload)
+ end
- expect { subject.delete(:branches, branch_path, payload) }.to raise_error(described_class::ConnectionError)
+ it 'throws an exception if the response is not 200' do
+ WebMock.stub_request(:delete, branch_url).with(headers: headers).to_return(body: payload.to_json, status: 500, headers: headers)
+
+ expect { subject.delete(:branches, branch_path, payload) }.to raise_error(described_class::ConnectionError)
+ end
+
+ it 'throws an exception upon a network error' do
+ WebMock.stub_request(:delete, branch_url).with(headers: headers).to_raise(OpenSSL::SSL::SSLError)
+
+ expect { subject.delete(:branches, branch_path, payload) }.to raise_error(described_class::ConnectionError)
+ end
end
- it 'throws an exception upon a network error' do
- WebMock.stub_request(:delete, branch_url).with(headers: headers).to_raise(OpenSSL::SSL::SSLError)
+ it_behaves_like 'handles delete requests'
+
+ context 'with the bitbucket_server_importer_exponential_backoff feature flag disabled' do
+ before do
+ stub_feature_flags(bitbucket_server_importer_exponential_backoff: false)
+ end
- expect { subject.delete(:branches, branch_path, payload) }.to raise_error(described_class::ConnectionError)
+ it_behaves_like 'handles delete requests'
end
end
end
diff --git a/spec/lib/bitbucket_server/representation/activity_spec.rb b/spec/lib/bitbucket_server/representation/activity_spec.rb
index 0b7e4542cbe..ee0486ab210 100644
--- a/spec/lib/bitbucket_server/representation/activity_spec.rb
+++ b/spec/lib/bitbucket_server/representation/activity_spec.rb
@@ -2,11 +2,12 @@
require 'spec_helper'
-RSpec.describe BitbucketServer::Representation::Activity do
+RSpec.describe BitbucketServer::Representation::Activity, feature_category: :importers do
let(:activities) { Gitlab::Json.parse(fixture_file('importers/bitbucket_server/activities.json'))['values'] }
let(:inline_comment) { activities.first }
let(:comment) { activities[3] }
let(:merge_event) { activities[4] }
+ let(:approved_event) { activities[8] }
describe 'regular comment' do
subject { described_class.new(comment) }
@@ -37,4 +38,17 @@ RSpec.describe BitbucketServer::Representation::Activity do
it { expect(subject.created_at).to be_a(Time) }
it { expect(subject.merge_commit).to eq('839fa9a2d434eb697815b8fcafaecc51accfdbbc') }
end
+
+ describe 'approved event' do
+ subject { described_class.new(approved_event) }
+
+ it { expect(subject.id).to eq(15) }
+ it { expect(subject.comment?).to be_falsey }
+ it { expect(subject.inline_comment?).to be_falsey }
+ it { expect(subject.merge_event?).to be_falsey }
+ it { expect(subject.approved_event?).to be_truthy }
+ it { expect(subject.approver_username).to eq('slug') }
+ it { expect(subject.approver_email).to eq('test.user@example.com') }
+ it { expect(subject.created_at).to be_a(Time) }
+ end
end
diff --git a/spec/lib/bitbucket_server/representation/user_spec.rb b/spec/lib/bitbucket_server/representation/user_spec.rb
new file mode 100644
index 00000000000..32470e3a12f
--- /dev/null
+++ b/spec/lib/bitbucket_server/representation/user_spec.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BitbucketServer::Representation::User, feature_category: :importers do
+ let(:email) { 'test@email.com' }
+ let(:username) { 'test_user' }
+ let(:sample_data) { { 'user' => { 'emailAddress' => email, 'slug' => username } } }
+
+ subject(:user) { described_class.new(sample_data) }
+
+ describe '#email' do
+ it { expect(user.email).to eq(email) }
+ end
+
+ describe '#username' do
+ it { expect(user.username).to eq(username) }
+ end
+end
diff --git a/spec/lib/bitbucket_server/retry_with_delay_spec.rb b/spec/lib/bitbucket_server/retry_with_delay_spec.rb
new file mode 100644
index 00000000000..99685b08299
--- /dev/null
+++ b/spec/lib/bitbucket_server/retry_with_delay_spec.rb
@@ -0,0 +1,60 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BitbucketServer::RetryWithDelay, feature_category: :importers do
+ let(:service) { dummy_class.new }
+ let(:body) { 'test' }
+ let(:response) { instance_double(HTTParty::Response, body: body, code: 200) }
+ let(:response_caller) { -> { response } }
+
+ let(:dummy_class) do
+ Class.new do
+ def logger
+ @logger ||= Logger.new(File::NULL)
+ end
+
+ def dummy_method(response_caller)
+ retry_with_delay do
+ response_caller.call
+ end
+ end
+
+ include BitbucketServer::RetryWithDelay
+ end
+ end
+
+ subject(:execute) { service.dummy_method(response_caller) }
+
+ describe '.retry_with_delay' do
+ context 'when the function succeeds on the first try' do
+ it 'calls the function once and returns its result' do
+ expect(response_caller).to receive(:call).once.and_call_original
+
+ execute
+ end
+ end
+
+ context 'when the request has a status code of 429' do
+ let(:headers) { { 'retry-after' => '0' } }
+ let(:body) { 'HTTP Status 429 - Too Many Requests' }
+ let(:response) { instance_double(HTTParty::Response, body: body, code: 429, headers: headers) }
+
+ before do
+ stub_const("#{described_class}::MAXIMUM_DELAY", 0)
+ end
+
+ it 'calls the function again after a delay' do
+ expect(response_caller).to receive(:call).twice.and_call_original
+
+ expect_next_instance_of(Logger) do |logger|
+ expect(logger).to receive(:info)
+ .with(message: 'Retrying in 0 seconds due to 429 Too Many Requests')
+ .once
+ end
+
+ execute
+ end
+ end
+ end
+end
diff --git a/spec/lib/bulk_imports/clients/http_spec.rb b/spec/lib/bulk_imports/clients/http_spec.rb
index 08d0509b54f..2eceefe3091 100644
--- a/spec/lib/bulk_imports/clients/http_spec.rb
+++ b/spec/lib/bulk_imports/clients/http_spec.rb
@@ -250,9 +250,9 @@ RSpec.describe BulkImports::Clients::HTTP, feature_category: :importers do
stub_request(:get, 'http://gitlab.example/api/v4/metadata?private_token=token')
.to_return(status: 401, body: "", headers: { 'Content-Type' => 'application/json' })
- expect { subject.instance_version }.to raise_exception(BulkImports::Error,
- "Personal access token does not have the required 'api' scope or " \
- "is no longer valid.")
+ expect { subject.instance_version }
+ .to raise_exception(BulkImports::Error,
+ "Check that the source instance base URL and the personal access token meet the necessary requirements.")
end
end
@@ -262,9 +262,9 @@ RSpec.describe BulkImports::Clients::HTTP, feature_category: :importers do
stub_request(:get, 'http://gitlab.example/api/v4/metadata?private_token=token')
.to_return(status: 403, body: "", headers: { 'Content-Type' => 'application/json' })
- expect { subject.instance_version }.to raise_exception(BulkImports::Error,
- "Personal access token does not have the required 'api' scope or " \
- "is no longer valid.")
+ expect { subject.instance_version }
+ .to raise_exception(BulkImports::Error,
+ "Check that the source instance base URL and the personal access token meet the necessary requirements.")
end
end
diff --git a/spec/lib/bulk_imports/common/pipelines/entity_finisher_spec.rb b/spec/lib/bulk_imports/common/pipelines/entity_finisher_spec.rb
index b96ea20c676..e1ad9c75dcb 100644
--- a/spec/lib/bulk_imports/common/pipelines/entity_finisher_spec.rb
+++ b/spec/lib/bulk_imports/common/pipelines/entity_finisher_spec.rb
@@ -10,16 +10,13 @@ RSpec.describe BulkImports::Common::Pipelines::EntityFinisher, feature_category:
subject = described_class.new(context)
expect_next_instance_of(BulkImports::Logger) do |logger|
+ expect(logger).to receive(:with_entity).with(entity).and_call_original
+
expect(logger)
.to receive(:info)
.with(
- bulk_import_id: entity.bulk_import_id,
- bulk_import_entity_id: entity.id,
- bulk_import_entity_type: entity.source_type,
- source_full_path: entity.source_full_path,
pipeline_class: described_class.name,
- message: 'Entity finished',
- source_version: entity.bulk_import.source_version_info.to_s
+ message: 'Entity finished'
)
end
diff --git a/spec/lib/bulk_imports/common/pipelines/lfs_objects_pipeline_spec.rb b/spec/lib/bulk_imports/common/pipelines/lfs_objects_pipeline_spec.rb
index 5ba9bd08009..5662c4d7bdc 100644
--- a/spec/lib/bulk_imports/common/pipelines/lfs_objects_pipeline_spec.rb
+++ b/spec/lib/bulk_imports/common/pipelines/lfs_objects_pipeline_spec.rb
@@ -192,7 +192,7 @@ RSpec.describe BulkImports::Common::Pipelines::LfsObjectsPipeline, feature_categ
allow(object).to receive(:persisted?).and_return(false)
end
- expect_next_instance_of(Gitlab::Import::Logger) do |logger|
+ expect_next_instance_of(BulkImports::Logger) do |logger|
expect(logger)
.to receive(:warn)
.with(project_id: portable.id,
diff --git a/spec/lib/bulk_imports/logger_spec.rb b/spec/lib/bulk_imports/logger_spec.rb
new file mode 100644
index 00000000000..889e5573c66
--- /dev/null
+++ b/spec/lib/bulk_imports/logger_spec.rb
@@ -0,0 +1,49 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::Logger, feature_category: :importers do
+ describe '#with_entity' do
+ subject(:logger) { described_class.new('/dev/null').with_entity(entity) }
+
+ let(:entity) { build(:bulk_import_entity) }
+
+ it 'records the entity information' do
+ output = logger.format_message('INFO', Time.zone.now, 'test', 'Hello world')
+ data = Gitlab::Json.parse(output)
+
+ expect(data).to include(
+ 'bulk_import_id' => entity.bulk_import_id,
+ 'bulk_import_entity_id' => entity.id,
+ 'bulk_import_entity_type' => entity.source_type,
+ 'source_full_path' => entity.source_full_path,
+ 'source_version' => entity.bulk_import.source_version_info.to_s
+ )
+ end
+ end
+
+ describe '#with_tracker' do
+ subject(:logger) { described_class.new('/dev/null').with_tracker(tracker) }
+
+ let_it_be(:tracker) { build(:bulk_import_tracker) }
+
+ it 'records the tracker information' do
+ output = logger.format_message('INFO', Time.zone.now, 'test', 'Hello world')
+ data = Gitlab::Json.parse(output)
+
+ expect(data).to include(
+ 'tracker_id' => tracker.id,
+ 'pipeline_class' => tracker.pipeline_name,
+ 'tracker_state' => tracker.human_status_name
+ )
+ end
+
+ it 'also loads the entity data' do
+ expect_next_instance_of(described_class) do |logger|
+ expect(logger).to receive(:with_entity).once
+ end
+
+ logger
+ end
+ end
+end
diff --git a/spec/lib/bulk_imports/network_error_spec.rb b/spec/lib/bulk_imports/network_error_spec.rb
index d5e2b739c8f..7a7250416f7 100644
--- a/spec/lib/bulk_imports/network_error_spec.rb
+++ b/spec/lib/bulk_imports/network_error_spec.rb
@@ -5,6 +5,10 @@ require 'spec_helper'
RSpec.describe BulkImports::NetworkError, :clean_gitlab_redis_cache, feature_category: :importers do
let(:tracker) { double(id: 1, stage: 2, entity: double(id: 3)) }
+ before do
+ allow(Gitlab::Redis::SharedState).to receive(:with).and_return('OK')
+ end
+
describe '.new' do
it 'requires either a message or a HTTP response' do
expect { described_class.new }
diff --git a/spec/lib/bulk_imports/pipeline/runner_spec.rb b/spec/lib/bulk_imports/pipeline/runner_spec.rb
index 4540408990c..5482068204d 100644
--- a/spec/lib/bulk_imports/pipeline/runner_spec.rb
+++ b/spec/lib/bulk_imports/pipeline/runner_spec.rb
@@ -55,13 +55,11 @@ RSpec.describe BulkImports::Pipeline::Runner, feature_category: :importers do
shared_examples 'failed pipeline' do |exception_class, exception_message|
it 'logs import failure' do
expect_next_instance_of(BulkImports::Logger) do |logger|
+ expect(logger).to receive(:with_entity).with(context.entity).and_call_original
expect(logger).to receive(:error)
.with(
a_hash_including(
- 'bulk_import_entity_id' => entity.id,
'bulk_import_id' => entity.bulk_import_id,
- 'bulk_import_entity_type' => entity.source_type,
- 'source_full_path' => entity.source_full_path,
'pipeline_step' => :extractor,
'pipeline_class' => 'BulkImports::MyPipeline',
'exception.class' => exception_class,
@@ -69,8 +67,7 @@ RSpec.describe BulkImports::Pipeline::Runner, feature_category: :importers do
'correlation_id' => anything,
'class' => 'BulkImports::MyPipeline',
'message' => 'An object of a pipeline failed to import',
- 'exception.backtrace' => anything,
- 'source_version' => entity.bulk_import.source_version_info.to_s
+ 'exception.backtrace' => anything
)
)
end
@@ -94,6 +91,7 @@ RSpec.describe BulkImports::Pipeline::Runner, feature_category: :importers do
it 'logs a warn message and marks entity and tracker as failed' do
expect_next_instance_of(BulkImports::Logger) do |logger|
+ expect(logger).to receive(:with_entity).with(context.entity).and_call_original
expect(logger).to receive(:warn)
.with(
log_params(
@@ -195,8 +193,11 @@ RSpec.describe BulkImports::Pipeline::Runner, feature_category: :importers do
end
expect(subject).to receive(:on_finish)
+ expect(context.bulk_import).to receive(:touch)
+ expect(context.entity).to receive(:touch)
- expect_next_instance_of(Gitlab::Import::Logger) do |logger|
+ expect_next_instance_of(BulkImports::Logger) do |logger|
+ expect(logger).to receive(:with_entity).with(context.entity).and_call_original
expect(logger).to receive(:info)
.with(
log_params(
@@ -305,14 +306,14 @@ RSpec.describe BulkImports::Pipeline::Runner, feature_category: :importers do
allow(extractor).to receive(:extract).with(context).and_raise(
BulkImports::NetworkError.new(
'Net::ReadTimeout',
- response: instance_double(HTTParty::Response, code: reponse_status_code, headers: {})
+ response: instance_double(HTTParty::Response, code: response_status_code, headers: {})
)
)
end
end
context 'when exception is retriable' do
- let(:reponse_status_code) { 429 }
+ let(:response_status_code) { 429 }
it 'raises the exception BulkImports::RetryPipelineError' do
expect { subject.run }.to raise_error(BulkImports::RetryPipelineError)
@@ -320,7 +321,7 @@ RSpec.describe BulkImports::Pipeline::Runner, feature_category: :importers do
end
context 'when exception is not retriable' do
- let(:reponse_status_code) { 503 }
+ let(:response_status_code) { 505 }
it_behaves_like 'failed pipeline', 'BulkImports::NetworkError', 'Net::ReadTimeout'
end
@@ -417,6 +418,7 @@ RSpec.describe BulkImports::Pipeline::Runner, feature_category: :importers do
entity.fail_op!
expect_next_instance_of(BulkImports::Logger) do |logger|
+ expect(logger).to receive(:with_entity).with(context.entity).and_call_original
expect(logger).to receive(:warn)
.with(
log_params(
@@ -434,10 +436,6 @@ RSpec.describe BulkImports::Pipeline::Runner, feature_category: :importers do
def log_params(context, extra = {})
{
bulk_import_id: context.bulk_import_id,
- bulk_import_entity_id: context.entity.id,
- bulk_import_entity_type: context.entity.source_type,
- source_full_path: entity.source_full_path,
- source_version: context.entity.bulk_import.source_version_info.to_s,
context_extra: context.extra
}.merge(extra)
end
diff --git a/spec/lib/bulk_imports/projects/pipelines/references_pipeline_spec.rb b/spec/lib/bulk_imports/projects/pipelines/references_pipeline_spec.rb
index e2b99fe4db4..96247329cc2 100644
--- a/spec/lib/bulk_imports/projects/pipelines/references_pipeline_spec.rb
+++ b/spec/lib/bulk_imports/projects/pipelines/references_pipeline_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe BulkImports::Projects::Pipelines::ReferencesPipeline, feature_cat
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project) }
let_it_be(:bulk_import) { create(:bulk_import, user: user) }
- let_it_be(:config) { create(:bulk_import_configuration, bulk_import: bulk_import, url: 'https://my.gitlab.com') }
+
let_it_be(:entity) do
create(
:bulk_import_entity,
@@ -19,250 +19,55 @@ RSpec.describe BulkImports::Projects::Pipelines::ReferencesPipeline, feature_cat
let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
- let(:issue) { create(:issue, project: project, description: 'https://my.gitlab.com/source/full/path/-/issues/1') }
- let(:mr) do
- create(
- :merge_request,
- source_project: project,
- description: 'https://my.gitlab.com/source/full/path/-/merge_requests/1 @source_username? @bob, @alice!'
- )
- end
- let(:issue_note) do
- create(
- :note,
- project: project,
- noteable: issue,
- note: 'https://my.gitlab.com/source/full/path/-/issues/1 @older_username, not_a@username, and @old_username.'
- )
- end
+ let_it_be(:issue) { create(:issue, project: project) }
+ let_it_be(:merge_request) { create(:merge_request, source_project: project) }
+ let_it_be(:issue_note) { create(:note, noteable: issue, project: project) }
+ let_it_be(:merge_request_note) { create(:note, noteable: merge_request, project: project) }
+ let_it_be(:system_note) { create(:note, project: project, system: true, noteable: issue) }
- let(:mr_note) do
- create(
- :note,
- project: project,
- noteable: mr,
- note: 'https://my.gitlab.com/source/full/path/-/merge_requests/1 @same_username'
- )
- end
+ let_it_be(:random_project) { create(:project) }
+ let_it_be(:random_issue) { create(:issue, project: random_project) }
+ let_it_be(:random_merge_request) { create(:merge_request, source_project: random_project) }
+ let_it_be(:random_issue_note) { create(:note, noteable: random_issue, project: random_project) }
+ let_it_be(:random_mr_note) { create(:note, noteable: random_merge_request, project: random_project) }
+ let_it_be(:random_system_note) { create(:note, system: true, noteable: random_issue, project: random_project) }
- let(:interchanged_usernames) do
- create(
- :note,
- project: project,
- noteable: mr,
- note: '@manuelgrabowski-admin, @boaty-mc-boatface'
- )
- end
-
- let(:old_note_html) { 'old note_html' }
- let(:system_note) do
- create(
- :note,
- project: project,
- system: true,
- noteable: issue,
- note: "mentioned in merge request !#{mr.iid} created by @old_username",
- note_html: old_note_html
- )
- end
-
- let(:username_system_note) do
- create(
- :note,
- project: project,
- system: true,
- noteable: issue,
- note: "mentioned in merge request created by @source_username.",
- note_html: 'empty'
- )
- end
+ let(:delay) { described_class::DELAY }
subject(:pipeline) { described_class.new(context) }
- before do
- project.add_owner(user)
-
- allow(Gitlab::Cache::Import::Caching)
- .to receive(:values_from_hash)
- .and_return({
- 'old_username' => 'new_username',
- 'older_username' => 'newer_username',
- 'source_username' => 'destination_username',
- 'bob' => 'alice-gdk',
- 'alice' => 'bob-gdk',
- 'manuelgrabowski' => 'manuelgrabowski-admin',
- 'manuelgrabowski-admin' => 'manuelgrabowski',
- 'boaty-mc-boatface' => 'boatymcboatface',
- 'boatymcboatface' => 'boaty-mc-boatface'
- })
- end
-
- def create_project_data
- [issue, mr, issue_note, mr_note, system_note, username_system_note]
- end
-
- def create_username_project_data
- [username_system_note]
- end
-
- describe '#extract' do
- it 'returns ExtractedData containing issues, mrs & their notes' do
- create_project_data
+ describe '#run' do
+ it "enqueues TransformReferencesWorker for the project's issues, mrs and their notes" do
+ expect(BulkImports::TransformReferencesWorker).to receive(:perform_in)
+ .with(delay, [issue.id], 'Issue', tracker.id)
- extracted_data = subject.extract(context)
+ expect(BulkImports::TransformReferencesWorker).to receive(:perform_in)
+ .with(delay * 2, array_including([issue_note.id, system_note.id]), 'Note', tracker.id)
- expect(extracted_data).to be_instance_of(BulkImports::Pipeline::ExtractedData)
- expect(extracted_data.data).to contain_exactly(issue, mr, issue_note, system_note, username_system_note, mr_note)
- expect(system_note.note_html).not_to eq(old_note_html)
- expect(system_note.note_html)
- .to include("class=\"gfm gfm-merge_request\">!#{mr.iid}</a>")
- .and include(project.full_path.to_s)
- .and include("@old_username")
- expect(username_system_note.note_html)
- .to include("@source_username")
- end
-
- context 'when object body is nil' do
- let(:issue) { create(:issue, project: project, description: nil) }
+ expect(BulkImports::TransformReferencesWorker).to receive(:perform_in)
+ .with(delay * 3, [merge_request.id], 'MergeRequest', tracker.id)
- it 'returns ExtractedData not containing the object' do
- extracted_data = subject.extract(context)
+ expect(BulkImports::TransformReferencesWorker).to receive(:perform_in)
+ .with(delay * 4, [merge_request_note.id], 'Note', tracker.id)
- expect(extracted_data.data).to contain_exactly(issue_note, mr, mr_note)
- end
+ subject.run
end
- end
-
- describe '#transform', :clean_gitlab_redis_cache do
- it 'updates matching urls and usernames with new ones' do
- transformed_mr = subject.transform(context, mr)
- transformed_note = subject.transform(context, mr_note)
- transformed_issue = subject.transform(context, issue)
- transformed_issue_note = subject.transform(context, issue_note)
- transformed_system_note = subject.transform(context, system_note)
- transformed_username_system_note = subject.transform(context, username_system_note)
-
- expected_url = URI('')
- expected_url.scheme = ::Gitlab.config.gitlab.https ? 'https' : 'http'
- expected_url.host = ::Gitlab.config.gitlab.host
- expected_url.port = ::Gitlab.config.gitlab.port
- expected_url.path = "/#{project.full_path}/-/merge_requests/#{mr.iid}"
- expect(transformed_issue_note.note).not_to include("@older_username")
- expect(transformed_mr.description).not_to include("@source_username")
- expect(transformed_system_note.note).not_to include("@old_username")
- expect(transformed_username_system_note.note).not_to include("@source_username")
+ it 'does not enqueue objects that do not belong to the project' do
+ expect(BulkImports::TransformReferencesWorker).not_to receive(:perform_in)
+ .with(anything, [random_issue.id], 'Issue', tracker.id)
- expect(transformed_issue.description)
- .to eq("http://localhost:80/#{transformed_issue.namespace.full_path}/-/issues/1")
- expect(transformed_mr.description).to eq("#{expected_url} @destination_username? @alice-gdk, @bob-gdk!")
- expect(transformed_note.note).to eq("#{expected_url} @same_username")
- expect(transformed_issue_note.note).to include("@newer_username, not_a@username, and @new_username.")
- expect(transformed_system_note.note).to eq("mentioned in merge request !#{mr.iid} created by @new_username")
- expect(transformed_username_system_note.note).to include("@destination_username.")
- end
-
- it 'handles situations where old usernames are substrings of new usernames' do
- transformed_mr = subject.transform(context, mr)
-
- expect(transformed_mr.description).to include("@alice-gdk")
- expect(transformed_mr.description).not_to include("@bob-gdk-gdk")
- end
-
- it 'handles situations where old and new usernames are interchanged' do
- # e.g
- # |------------------------|-------------------------|
- # | old_username | new_username |
- # |------------------------|-------------------------|
- # | @manuelgrabowski-admin | @manuelgrabowski |
- # | @manuelgrabowski | @manuelgrabowski-admin |
- # |------------------------|-------------------------|
-
- transformed_interchanged_usernames = subject.transform(context, interchanged_usernames)
-
- expect(transformed_interchanged_usernames.note).to include("@manuelgrabowski")
- expect(transformed_interchanged_usernames.note).to include("@boatymcboatface")
- expect(transformed_interchanged_usernames.note).not_to include("@manuelgrabowski-admin")
- expect(transformed_interchanged_usernames.note).not_to include("@boaty-mc-boatface")
- end
-
- context 'when object does not have reference or username' do
- it 'returns object unchanged' do
- issue.update!(description: 'foo')
-
- transformed_issue = subject.transform(context, issue)
-
- expect(transformed_issue.description).to eq('foo')
- end
- end
+ expect(BulkImports::TransformReferencesWorker).not_to receive(:perform_in)
+ .with(anything, array_including([random_issue_note.id, random_system_note.id]), 'Note', tracker.id)
- context 'when there are not matched urls or usernames' do
- let(:description) { 'https://my.gitlab.com/another/project/path/-/issues/1 @random_username' }
-
- shared_examples 'returns object unchanged' do
- it 'returns object unchanged' do
- issue.update!(description: description)
-
- transformed_issue = subject.transform(context, issue)
-
- expect(transformed_issue.description).to eq(description)
- end
- end
-
- include_examples 'returns object unchanged'
-
- context 'when url path does not start with source full path' do
- let(:description) { 'https://my.gitlab.com/another/source/full/path/-/issues/1' }
-
- include_examples 'returns object unchanged'
- end
-
- context 'when host does not match and url path starts with source full path' do
- let(:description) { 'https://another.gitlab.com/source/full/path/-/issues/1' }
-
- include_examples 'returns object unchanged'
- end
-
- context 'when url does not match at all' do
- let(:description) { 'https://website.example/foo/bar' }
-
- include_examples 'returns object unchanged'
- end
- end
- end
-
- describe '#load' do
- it 'saves the object when object body changed' do
- transformed_issue = subject.transform(context, issue)
- transformed_note = subject.transform(context, mr_note)
- transformed_mr = subject.transform(context, mr)
- transformed_issue_note = subject.transform(context, issue_note)
- transformed_system_note = subject.transform(context, system_note)
-
- expect(transformed_issue).to receive(:save!)
- expect(transformed_note).to receive(:save!)
- expect(transformed_mr).to receive(:save!)
- expect(transformed_issue_note).to receive(:save!)
- expect(transformed_system_note).to receive(:save!)
-
- subject.load(context, transformed_issue)
- subject.load(context, transformed_note)
- subject.load(context, transformed_mr)
- subject.load(context, transformed_issue_note)
- subject.load(context, transformed_system_note)
- end
+ expect(BulkImports::TransformReferencesWorker).not_to receive(:perform_in)
+ .with(anything, [random_merge_request.id], 'MergeRequest', tracker.id)
- context 'when object body is not changed' do
- it 'does not save the object' do
- expect(mr).not_to receive(:save!)
- expect(mr_note).not_to receive(:save!)
- expect(system_note).not_to receive(:save!)
+ expect(BulkImports::TransformReferencesWorker).not_to receive(:perform_in)
+ .with(anything, [random_mr_note.id], 'Note', tracker.id)
- subject.load(context, mr)
- subject.load(context, mr_note)
- subject.load(context, system_note)
- end
+ subject.run
end
end
end
diff --git a/spec/lib/bulk_imports/projects/stage_spec.rb b/spec/lib/bulk_imports/projects/stage_spec.rb
index fc670d10655..8b06e0bb2ee 100644
--- a/spec/lib/bulk_imports/projects/stage_spec.rb
+++ b/spec/lib/bulk_imports/projects/stage_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe BulkImports::Projects::Stage do
+RSpec.describe BulkImports::Projects::Stage, feature_category: :importers do
subject do
entity = build(:bulk_import_entity, :project_entity)
@@ -15,7 +15,8 @@ RSpec.describe BulkImports::Projects::Stage do
expect(pipelines).to include(
hash_including({ stage: 0, pipeline: BulkImports::Projects::Pipelines::ProjectPipeline }),
- hash_including({ stage: 1, pipeline: BulkImports::Projects::Pipelines::RepositoryPipeline })
+ hash_including({ stage: 1, pipeline: BulkImports::Projects::Pipelines::RepositoryPipeline }),
+ hash_including({ stage: 5, pipeline: BulkImports::Projects::Pipelines::ReferencesPipeline })
)
expect(pipelines.last).to match(hash_including({ pipeline: BulkImports::Common::Pipelines::EntityFinisher }))
end
diff --git a/spec/lib/click_house/connection_spec.rb b/spec/lib/click_house/connection_spec.rb
new file mode 100644
index 00000000000..dda736dfaa8
--- /dev/null
+++ b/spec/lib/click_house/connection_spec.rb
@@ -0,0 +1,54 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ClickHouse::Connection, click_house: :without_migrations, feature_category: :database do
+ let(:connection) { described_class.new(:main) }
+
+ describe '#select' do
+ it 'proxies select to client' do
+ expect(
+ connection.select('SELECT 1')
+ ).to eq([{ '1' => 1 }])
+ end
+ end
+
+ describe '#execute' do
+ it 'proxies execute to client' do
+ create_test_table
+
+ connection.execute(
+ <<~SQL
+ INSERT INTO test_table VALUES (1), (2), (3)
+ SQL
+ )
+
+ expect(connection.select('SELECT id FROM test_table')).to eq(
+ [{ 'id' => 1 }, { 'id' => 2 }, { 'id' => 3 }]
+ )
+ end
+ end
+
+ describe '#table_exists?' do
+ it "return false when table doesn't exist" do
+ expect(connection.table_exists?('test_table')).to eq(false)
+ end
+
+ it 'returns true when table exists' do
+ create_test_table
+
+ expect(connection.table_exists?('test_table')).to eq(true)
+ end
+ end
+
+ def create_test_table
+ connection.execute(
+ <<~SQL
+ CREATE TABLE test_table (
+ id UInt64
+ ) ENGINE = MergeTree
+ PRIMARY KEY(id)
+ SQL
+ )
+ end
+end
diff --git a/spec/lib/click_house/iterator_spec.rb b/spec/lib/click_house/iterator_spec.rb
new file mode 100644
index 00000000000..fd054c0afe5
--- /dev/null
+++ b/spec/lib/click_house/iterator_spec.rb
@@ -0,0 +1,43 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ClickHouse::Iterator, :click_house, feature_category: :database do
+ let(:query_builder) { ClickHouse::QueryBuilder.new('event_authors') }
+ let(:connection) { ClickHouse::Connection.new(:main) }
+ let(:iterator) { described_class.new(query_builder: query_builder, connection: connection) }
+
+ before do
+ connection.execute('INSERT INTO event_authors (author_id) SELECT number + 1 FROM numbers(10)')
+ end
+
+ def collect_ids_with_batch_size(of)
+ [].tap do |ids|
+ iterator.each_batch(column: :author_id, of: of) do |scope|
+ query = scope.select(Arel.sql('DISTINCT author_id')).to_sql
+ ids.concat(connection.select(query).pluck('author_id'))
+ end
+ end
+ end
+
+ it 'iterates correctly' do
+ expected_values = (1..10).to_a
+
+ expect(collect_ids_with_batch_size(3)).to match_array(expected_values)
+ expect(collect_ids_with_batch_size(5)).to match_array(expected_values)
+ expect(collect_ids_with_batch_size(10)).to match_array(expected_values)
+ expect(collect_ids_with_batch_size(15)).to match_array(expected_values)
+ end
+
+ context 'when there are no records for the given query' do
+ let(:query_builder) do
+ ClickHouse::QueryBuilder
+ .new('event_authors')
+ .where(author_id: 0)
+ end
+
+ it 'returns no data' do
+ expect(collect_ids_with_batch_size(3)).to match_array([])
+ end
+ end
+end
diff --git a/spec/lib/click_house/migration_support/exclusive_lock_spec.rb b/spec/lib/click_house/migration_support/exclusive_lock_spec.rb
new file mode 100644
index 00000000000..5176cc75266
--- /dev/null
+++ b/spec/lib/click_house/migration_support/exclusive_lock_spec.rb
@@ -0,0 +1,140 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ClickHouse::MigrationSupport::ExclusiveLock, feature_category: :database do
+ include ExclusiveLeaseHelpers
+
+ let(:worker_class) do
+ # This worker will be active longer than the ClickHouse worker TTL
+ Class.new do
+ def self.name
+ 'TestWorker'
+ end
+
+ include ::ApplicationWorker
+ include ::ClickHouseWorker
+
+ def perform(*); end
+ end
+ end
+
+ before do
+ stub_const('TestWorker', worker_class)
+ end
+
+ describe '.register_running_worker' do
+ before do
+ TestWorker.click_house_migration_lock(10.seconds)
+ end
+
+ it 'yields without arguments' do
+ expect { |b| described_class.register_running_worker(worker_class, 'test', &b) }.to yield_with_no_args
+ end
+
+ it 'registers worker for a limited period of time', :freeze_time, :aggregate_failures do
+ expect(described_class.active_sidekiq_workers?).to eq false
+
+ described_class.register_running_worker(worker_class, 'test') do
+ expect(described_class.active_sidekiq_workers?).to eq true
+ travel 9.seconds
+ expect(described_class.active_sidekiq_workers?).to eq true
+ travel 2.seconds
+ expect(described_class.active_sidekiq_workers?).to eq false
+ end
+ end
+ end
+
+ describe '.pause_workers?' do
+ subject(:pause_workers?) { described_class.pause_workers? }
+
+ it { is_expected.to eq false }
+
+ context 'with lock taken' do
+ let!(:lease) { stub_exclusive_lease_taken(described_class::MIGRATION_LEASE_KEY) }
+
+ it { is_expected.to eq true }
+ end
+ end
+
+ describe '.execute_migration' do
+ it 'yields without raising error' do
+ expect { |b| described_class.execute_migration(&b) }.to yield_with_no_args
+ end
+
+ context 'when migration lock is taken' do
+ let!(:lease) { stub_exclusive_lease_taken(described_class::MIGRATION_LEASE_KEY) }
+
+ it 'raises LockError' do
+ expect do
+ expect { |b| described_class.execute_migration(&b) }.not_to yield_control
+ end.to raise_error ::ClickHouse::MigrationSupport::Errors::LockError
+ end
+ end
+
+ context 'when ClickHouse workers are still active', :freeze_time do
+ let(:sleep_time) { described_class::WORKERS_WAIT_SLEEP }
+ let!(:started_at) { Time.current }
+
+ def migration
+ expect { |b| described_class.execute_migration(&b) }.to yield_with_no_args
+ end
+
+ around do |example|
+ described_class.register_running_worker(worker_class, anything) do
+ example.run
+ end
+ end
+
+ it 'waits for workers and raises ClickHouse::MigrationSupport::LockError if workers do not stop in time' do
+ expect(described_class).to receive(:sleep).at_least(1).with(sleep_time) { travel(sleep_time) }
+
+ expect { migration }.to raise_error(ClickHouse::MigrationSupport::Errors::LockError,
+ /Timed out waiting for active workers/)
+ expect(Time.current - started_at).to eq(described_class::DEFAULT_CLICKHOUSE_WORKER_TTL)
+ end
+
+ context 'when wait_for_clickhouse_workers_during_migration FF is disabled' do
+ before do
+ stub_feature_flags(wait_for_clickhouse_workers_during_migration: false)
+ end
+
+ it 'runs migration without waiting for workers' do
+ expect { migration }.not_to raise_error
+ expect(Time.current - started_at).to eq(0.0)
+ end
+ end
+
+ it 'ignores expired workers' do
+ travel(described_class::DEFAULT_CLICKHOUSE_WORKER_TTL + 1.second)
+
+ migration
+ end
+
+ context 'when worker registration is almost expiring' do
+ let(:worker_class) do
+ # This worker will be active for less than the ClickHouse worker TTL
+ Class.new do
+ def self.name
+ 'TestWorker'
+ end
+
+ include ::ApplicationWorker
+ include ::ClickHouseWorker
+
+ click_house_migration_lock(
+ ClickHouse::MigrationSupport::ExclusiveLock::DEFAULT_CLICKHOUSE_WORKER_TTL - 1.second)
+
+ def perform(*); end
+ end
+ end
+
+ it 'completes migration' do
+ expect(described_class).to receive(:sleep).at_least(1).with(sleep_time) { travel(sleep_time) }
+
+ expect { migration }.not_to raise_error
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/click_house/migration_support/migration_context_spec.rb b/spec/lib/click_house/migration_support/migration_context_spec.rb
new file mode 100644
index 00000000000..0f70e1e3f94
--- /dev/null
+++ b/spec/lib/click_house/migration_support/migration_context_spec.rb
@@ -0,0 +1,203 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ClickHouse::MigrationSupport::MigrationContext,
+ click_house: :without_migrations, feature_category: :database do
+ include ClickHouseTestHelpers
+
+ # We don't need to delete data since we don't modify Postgres data
+ self.use_transactional_tests = false
+
+ let(:connection) { ::ClickHouse::Connection.new(:main) }
+ let(:schema_migration) { ClickHouse::MigrationSupport::SchemaMigration.new(connection) }
+
+ let(:migrations_base_dir) { 'click_house/migrations' }
+ let(:migrations_dir) { expand_fixture_path("#{migrations_base_dir}/#{migrations_dirname}") }
+ let(:migration_context) { described_class.new(connection, migrations_dir, schema_migration) }
+ let(:target_version) { nil }
+
+ after do
+ unload_click_house_migration_classes(expand_fixture_path(migrations_base_dir))
+ end
+
+ describe 'performs migrations' do
+ include ExclusiveLeaseHelpers
+
+ subject(:migration) { migrate(migration_context, target_version) }
+
+ describe 'when creating a table' do
+ let(:migrations_dirname) { 'plain_table_creation' }
+ let(:lease_key) { 'click_house:migrations' }
+ let(:lease_timeout) { 1.hour }
+
+ it 'executes migration through ClickHouse::MigrationSupport::ExclusiveLock.execute_migration' do
+ expect(ClickHouse::MigrationSupport::ExclusiveLock).to receive(:execute_migration)
+
+ # Test that not running execute_migration will not execute migrations
+ expect { migration }.not_to change { active_schema_migrations_count }
+ end
+
+ it 'creates a table' do
+ expect(ClickHouse::MigrationSupport::ExclusiveLock).to receive(:execute_migration).and_call_original
+ expect_to_obtain_exclusive_lease(lease_key, timeout: lease_timeout)
+
+ expect { migration }.to change { active_schema_migrations_count }.from(0).to(1)
+
+ table_schema = describe_table('some')
+ expect(schema_migrations).to contain_exactly(a_hash_including(version: '1', active: 1))
+ expect(table_schema).to match({
+ id: a_hash_including(type: 'UInt64'),
+ date: a_hash_including(type: 'Date')
+ })
+ end
+
+ context 'when a migration is already running' do
+ let(:migration_name) { 'create_some_table' }
+
+ before do
+ stub_exclusive_lease_taken(lease_key)
+ end
+
+ it 'raises error after timeout when migration is executing concurrently' do
+ expect { migration }.to raise_error(ClickHouse::MigrationSupport::Errors::LockError)
+ .and not_change { active_schema_migrations_count }
+ end
+ end
+ end
+
+ describe 'when dropping a table' do
+ let(:migrations_dirname) { 'drop_table' }
+ let(:target_version) { 2 }
+
+ it 'drops table' do
+ migrate(migration_context, 1)
+ expect(table_names).to include('some')
+
+ migration
+ expect(table_names).not_to include('some')
+ end
+ end
+
+ context 'when a migration raises an error' do
+ let(:migrations_dirname) { 'migration_with_error' }
+
+ it 'passes the error to caller as a StandardError' do
+ expect { migration }.to raise_error StandardError,
+ "An error has occurred, all later migrations canceled:\n\nA migration error happened"
+ expect(schema_migrations).to be_empty
+ end
+ end
+
+ context 'when connecting to not-existing database' do
+ let(:migrations_dirname) { 'plain_table_creation' }
+ let(:connection) { ::ClickHouse::Connection.new(:unknown_database) }
+
+ it 'raises ConfigurationError' do
+ expect { migration }.to raise_error ClickHouse::Client::ConfigurationError,
+ "The database 'unknown_database' is not configured"
+ end
+ end
+
+ context 'when target_version is incorrect' do
+ let(:target_version) { 2 }
+ let(:migrations_dirname) { 'plain_table_creation' }
+
+ it 'raises UnknownMigrationVersionError' do
+ expect { migration }.to raise_error ClickHouse::MigrationSupport::Errors::UnknownMigrationVersionError
+
+ expect(active_schema_migrations_count).to eq 0
+ end
+ end
+
+ context 'when migrations with duplicate name exist' do
+ let(:migrations_dirname) { 'duplicate_name' }
+
+ it 'raises DuplicateMigrationNameError' do
+ expect { migration }.to raise_error ClickHouse::MigrationSupport::Errors::DuplicateMigrationNameError
+
+ expect(active_schema_migrations_count).to eq 0
+ end
+ end
+
+ context 'when migrations with duplicate version exist' do
+ let(:migrations_dirname) { 'duplicate_version' }
+
+ it 'raises DuplicateMigrationVersionError' do
+ expect { migration }.to raise_error ClickHouse::MigrationSupport::Errors::DuplicateMigrationVersionError
+
+ expect(active_schema_migrations_count).to eq 0
+ end
+ end
+ end
+
+ describe 'performs rollbacks' do
+ subject(:migration) { rollback(migration_context, target_version) }
+
+ before do
+ # Ensure that all migrations are up
+ migrate(migration_context, nil)
+ end
+
+ context 'when down method is present' do
+ let(:migrations_dirname) { 'table_creation_with_down_method' }
+
+ context 'when specifying target_version' do
+ it 'removes migrations and performs down method' do
+ expect(table_names).to include('some', 'another')
+
+ # test that target_version is prioritized over step
+ expect { rollback(migration_context, 1, 10000) }.to change { active_schema_migrations_count }.from(2).to(1)
+ expect(table_names).not_to include('another')
+ expect(table_names).to include('some')
+ expect(schema_migrations).to contain_exactly(
+ a_hash_including(version: '1', active: 1),
+ a_hash_including(version: '2', active: 0)
+ )
+
+ expect { rollback(migration_context, nil) }.to change { active_schema_migrations_count }.to(0)
+ expect(table_names).not_to include('some', 'another')
+
+ expect(schema_migrations).to contain_exactly(
+ a_hash_including(version: '1', active: 0),
+ a_hash_including(version: '2', active: 0)
+ )
+ end
+ end
+
+ context 'when specifying step' do
+ it 'removes migrations and performs down method' do
+ expect(table_names).to include('some', 'another')
+
+ expect { rollback(migration_context, nil, 1) }.to change { active_schema_migrations_count }.from(2).to(1)
+ expect(table_names).not_to include('another')
+ expect(table_names).to include('some')
+
+ expect { rollback(migration_context, nil, 2) }.to change { active_schema_migrations_count }.to(0)
+ expect(table_names).not_to include('some', 'another')
+ end
+ end
+ end
+
+ context 'when down method is missing' do
+ let(:migrations_dirname) { 'plain_table_creation' }
+ let(:target_version) { 0 }
+
+ it 'removes migration ignoring missing down method' do
+ expect { migration }.to change { active_schema_migrations_count }.from(1).to(0)
+ .and not_change { table_names & %w[some] }.from(%w[some])
+ end
+ end
+
+ context 'when target_version is incorrect' do
+ let(:target_version) { -1 }
+ let(:migrations_dirname) { 'plain_table_creation' }
+
+ it 'raises UnknownMigrationVersionError' do
+ expect { migration }.to raise_error ClickHouse::MigrationSupport::Errors::UnknownMigrationVersionError
+
+ expect(active_schema_migrations_count).to eq 1
+ end
+ end
+ end
+end
diff --git a/spec/lib/click_house/migration_support/sidekiq_middleware_spec.rb b/spec/lib/click_house/migration_support/sidekiq_middleware_spec.rb
new file mode 100644
index 00000000000..03c9edfabaa
--- /dev/null
+++ b/spec/lib/click_house/migration_support/sidekiq_middleware_spec.rb
@@ -0,0 +1,61 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ClickHouse::MigrationSupport::SidekiqMiddleware, feature_category: :database do
+ let(:worker_with_click_house_worker) do
+ Class.new do
+ def self.name
+ 'TestWorker'
+ end
+ include ApplicationWorker
+ include ClickHouseWorker
+ end
+ end
+
+ let(:worker_without_click_house_worker) do
+ Class.new do
+ def self.name
+ 'TestWorkerWithoutClickHouseWorker'
+ end
+ include ApplicationWorker
+ end
+ end
+
+ subject(:middleware) { described_class.new }
+
+ before do
+ stub_const('TestWorker', worker_with_click_house_worker)
+ stub_const('TestWorkerWithoutClickHouseWorker', worker_without_click_house_worker)
+ end
+
+ describe '#call' do
+ let(:worker) { worker_class.new }
+ let(:job) { { 'jid' => 123, 'class' => worker_class.name } }
+ let(:queue) { 'test_queue' }
+
+ context 'when worker does not include ClickHouseWorker' do
+ let(:worker_class) { worker_without_click_house_worker }
+
+ it 'yields control without registering running worker' do
+ expect(ClickHouse::MigrationSupport::ExclusiveLock).not_to receive(:register_running_worker)
+ expect { |b| middleware.call(worker, job, queue, &b) }.to yield_with_no_args
+ end
+ end
+
+ context 'when worker includes ClickHouseWorker' do
+ let(:worker_class) { worker_with_click_house_worker }
+
+ it 'registers running worker and yields control' do
+ expect(ClickHouse::MigrationSupport::ExclusiveLock)
+ .to receive(:register_running_worker)
+ .with(worker_class, 'test_queue:123')
+ .and_wrap_original do |method, worker_class, worker_id|
+ expect { |b| method.call(worker_class, worker_id, &b) }.to yield_with_no_args
+ end
+
+ middleware.call(worker, job, queue)
+ end
+ end
+ end
+end
diff --git a/spec/lib/extracts_ref_spec.rb b/spec/lib/extracts_ref_spec.rb
index 9ff11899e89..c7186011654 100644
--- a/spec/lib/extracts_ref_spec.rb
+++ b/spec/lib/extracts_ref_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe ExtractsRef do
+RSpec.describe ExtractsRef, feature_category: :source_code_management do
include described_class
include RepoHelpers
@@ -98,6 +98,12 @@ RSpec.describe ExtractsRef do
it { is_expected.to eq(nil) }
end
+
+ context 'when ref_type is a hash' do
+ let(:ref_type) { { 'just' => 'hash' } }
+
+ it { is_expected.to eq(nil) }
+ end
end
it_behaves_like 'extracts refs'
diff --git a/spec/lib/feature_spec.rb b/spec/lib/feature_spec.rb
index 7860d85457a..64c249770b7 100644
--- a/spec/lib/feature_spec.rb
+++ b/spec/lib/feature_spec.rb
@@ -5,1069 +5,1123 @@ require 'spec_helper'
RSpec.describe Feature, :clean_gitlab_redis_feature_flag, stub_feature_flags: false, feature_category: :shared do
include StubVersion
- before do
- # reset Flipper AR-engine
- Feature.reset
- skip_feature_flags_yaml_validation
- end
+ # Pick a long-lasting real feature flag to test that we can check feature flags in the load balancer
+ let(:load_balancer_test_feature_flag) { :require_email_verification }
- describe '.current_request' do
- it 'returns a FlipperRequest with a flipper_id' do
- flipper_request = described_class.current_request
+ where(:bypass_load_balancer) { [true, false] }
- expect(flipper_request.flipper_id).to include("FlipperRequest:")
+ with_them do
+ def wrap_all_methods_with_flag_check(lb, flag)
+ lb.methods(false).each do |meth|
+ allow(lb).to receive(meth).and_wrap_original do |m, *args, **kwargs, &block|
+ Feature.enabled?(flag)
+ m.call(*args, **kwargs, &block)
+ end
+ end
end
-
- context 'when request store is inactive' do
- it 'does not cache flipper_id' do
- previous_id = described_class.current_request.flipper_id
-
- expect(described_class.current_request.flipper_id).not_to eq(previous_id)
+ before do
+ if bypass_load_balancer
+ stub_env(Feature::BypassLoadBalancer::FLAG, 'true')
+ wrap_all_methods_with_flag_check(ApplicationRecord.load_balancer, load_balancer_test_feature_flag)
end
+
+ # reset Flipper AR-engine
+ Feature.reset
+ skip_feature_flags_yaml_validation
end
- context 'when request store is active', :request_store do
- it 'caches flipper_id when request store is active' do
- previous_id = described_class.current_request.flipper_id
+ describe '.current_request' do
+ it 'returns a FlipperRequest with a flipper_id' do
+ flipper_request = described_class.current_request
- expect(described_class.current_request.flipper_id).to eq(previous_id)
+ expect(flipper_request.flipper_id).to include("FlipperRequest:")
end
- it 'returns a new flipper_id when request ends' do
- previous_id = described_class.current_request.flipper_id
-
- RequestStore.end!
+ context 'when request store is inactive' do
+ it 'does not cache flipper_id' do
+ previous_id = described_class.current_request.flipper_id
- expect(described_class.current_request.flipper_id).not_to eq(previous_id)
+ expect(described_class.current_request.flipper_id).not_to eq(previous_id)
+ end
end
- end
- end
- describe '.get' do
- let(:feature) { double(:feature) }
- let(:key) { 'my_feature' }
+ context 'when request store is active', :request_store do
+ it 'caches flipper_id when request store is active' do
+ previous_id = described_class.current_request.flipper_id
- it 'returns the Flipper feature' do
- expect_any_instance_of(Flipper::DSL).to receive(:feature).with(key)
- .and_return(feature)
+ expect(described_class.current_request.flipper_id).to eq(previous_id)
+ end
- expect(described_class.get(key)).to eq(feature)
- end
- end
+ it 'returns a new flipper_id when request ends' do
+ previous_id = described_class.current_request.flipper_id
- describe '.persisted_names' do
- it 'returns the names of the persisted features' do
- Feature.enable('foo')
+ RequestStore.end!
- expect(described_class.persisted_names).to contain_exactly('foo')
+ expect(described_class.current_request.flipper_id).not_to eq(previous_id)
+ end
+ end
end
- it 'returns an empty Array when no features are presisted' do
- expect(described_class.persisted_names).to be_empty
- end
+ describe '.gitlab_instance' do
+ it 'returns a FlipperGitlabInstance with a flipper_id' do
+ flipper_request = described_class.gitlab_instance
- it 'caches the feature names when request store is active',
- :request_store, :use_clean_rails_memory_store_caching do
- Feature.enable('foo')
+ expect(flipper_request.flipper_id).to include("FlipperGitlabInstance:")
+ end
- expect(Gitlab::ProcessMemoryCache.cache_backend)
- .to receive(:fetch)
- .once
- .with('flipper/v1/features', { expires_in: 1.minute })
- .and_call_original
+ it 'caches flipper_id' do
+ previous_id = described_class.gitlab_instance.flipper_id
- 2.times do
- expect(described_class.persisted_names).to contain_exactly('foo')
+ expect(described_class.gitlab_instance.flipper_id).to eq(previous_id)
end
end
- it 'fetches all flags once in a single query', :request_store do
- Feature.enable('foo1')
- Feature.enable('foo2')
-
- queries = ActiveRecord::QueryRecorder.new(skip_cached: false) do
- expect(described_class.persisted_names).to contain_exactly('foo1', 'foo2')
+ describe '.get' do
+ let(:feature) { double(:feature) }
+ let(:key) { 'my_feature' }
- RequestStore.clear!
+ it 'returns the Flipper feature' do
+ expect_any_instance_of(Flipper::DSL).to receive(:feature).with(key)
+ .and_return(feature)
- expect(described_class.persisted_names).to contain_exactly('foo1', 'foo2')
+ expect(described_class.get(key)).to eq(feature)
end
-
- expect(queries.count).to eq(1)
end
- end
- describe '.persisted_name?' do
- context 'when the feature is persisted' do
- it 'returns true when feature name is a string' do
+ describe '.persisted_names' do
+ it 'returns the names of the persisted features' do
Feature.enable('foo')
- expect(described_class.persisted_name?('foo')).to eq(true)
+ expect(described_class.persisted_names).to contain_exactly('foo')
end
- it 'returns true when feature name is a symbol' do
+ it 'returns an empty Array when no features are presisted' do
+ expect(described_class.persisted_names).to be_empty
+ end
+
+ it 'caches the feature names when request store is active',
+ :request_store, :use_clean_rails_memory_store_caching do
Feature.enable('foo')
- expect(described_class.persisted_name?(:foo)).to eq(true)
- end
- end
+ expect(Gitlab::ProcessMemoryCache.cache_backend)
+ .to receive(:fetch)
+ .once
+ .with('flipper/v1/features', { expires_in: 1.minute })
+ .and_call_original
- context 'when the feature is not persisted' do
- it 'returns false when feature name is a string' do
- expect(described_class.persisted_name?('foo')).to eq(false)
+ 2.times do
+ expect(described_class.persisted_names).to contain_exactly('foo')
+ end
end
- it 'returns false when feature name is a symbol' do
- expect(described_class.persisted_name?(:bar)).to eq(false)
- end
- end
- end
+ it 'fetches all flags once in a single query', :request_store do
+ Feature.enable('foo1')
+ Feature.enable('foo2')
- describe '.all' do
- let(:features) { Set.new }
+ queries = ActiveRecord::QueryRecorder.new(skip_cached: false) do
+ expect(described_class.persisted_names).to contain_exactly('foo1', 'foo2')
- it 'returns the Flipper features as an array' do
- expect_any_instance_of(Flipper::DSL).to receive(:features)
- .and_return(features)
+ RequestStore.clear!
- expect(described_class.all).to eq(features.to_a)
+ expect(described_class.persisted_names).to contain_exactly('foo1', 'foo2')
+ end
+
+ expect(queries.count).to eq(1)
+ end
end
- end
- describe '.flipper' do
- context 'when request store is inactive' do
- it 'memoizes the Flipper instance but does not not enable Flipper memoization' do
- expect(Flipper).to receive(:new).once.and_call_original
+ describe '.persisted_name?' do
+ context 'when the feature is persisted' do
+ it 'returns true when feature name is a string' do
+ Feature.enable('foo')
- 2.times do
- described_class.flipper
+ expect(described_class.persisted_name?('foo')).to eq(true)
end
- expect(described_class.flipper.adapter.memoizing?).to eq(false)
- end
- end
+ it 'returns true when feature name is a symbol' do
+ Feature.enable('foo')
- context 'when request store is active', :request_store do
- it 'memoizes the Flipper instance' do
- expect(Flipper).to receive(:new).once.and_call_original
+ expect(described_class.persisted_name?(:foo)).to eq(true)
+ end
+ end
- described_class.flipper
- described_class.instance_variable_set(:@flipper, nil)
- described_class.flipper
+ context 'when the feature is not persisted' do
+ it 'returns false when feature name is a string' do
+ expect(described_class.persisted_name?('foo')).to eq(false)
+ end
- expect(described_class.flipper.adapter.memoizing?).to eq(true)
+ it 'returns false when feature name is a symbol' do
+ expect(described_class.persisted_name?(:bar)).to eq(false)
+ end
end
end
- end
- describe '.enabled?' do
- before do
- allow(Feature).to receive(:log_feature_flag_states?).and_return(false)
+ describe '.all' do
+ let(:features) { Set.new }
- stub_feature_flag_definition(:disabled_feature_flag)
- stub_feature_flag_definition(:enabled_feature_flag, default_enabled: true)
- end
+ it 'returns the Flipper features as an array' do
+ expect_any_instance_of(Flipper::DSL).to receive(:features)
+ .and_return(features)
- context 'when using redis cache', :use_clean_rails_redis_caching do
- it 'does not make recursive feature-flag calls' do
- expect(described_class).to receive(:enabled?).once.and_call_original
- described_class.enabled?(:disabled_feature_flag)
+ expect(described_class.all).to eq(features.to_a)
end
end
- context 'when self-recursive' do
- before do
- allow(Feature).to receive(:with_feature).and_wrap_original do |original, name, &block|
- original.call(name) do |ff|
- Feature.enabled?(name)
- block.call(ff)
+ describe '.flipper' do
+ context 'when request store is inactive' do
+ it 'memoizes the Flipper instance but does not not enable Flipper memoization' do
+ expect(Flipper).to receive(:new).once.and_call_original
+
+ 2.times do
+ described_class.flipper
end
+
+ expect(described_class.flipper.adapter.memoizing?).to eq(false)
end
end
- it 'returns the default value' do
- expect(described_class.enabled?(:enabled_feature_flag)).to eq true
- end
+ context 'when request store is active', :request_store do
+ it 'memoizes the Flipper instance' do
+ expect(Flipper).to receive(:new).once.and_call_original
- it 'detects self recursion' do
- expect(Gitlab::ErrorTracking)
- .to receive(:track_exception)
- .with(have_attributes(message: 'self recursion'), { stack: [:enabled_feature_flag] })
+ described_class.flipper
+ described_class.instance_variable_set(:@flipper, nil)
+ described_class.flipper
- described_class.enabled?(:enabled_feature_flag)
+ expect(described_class.flipper.adapter.memoizing?).to eq(true)
+ end
end
end
- context 'when deeply recursive' do
+ describe '.enabled?' do
before do
- allow(Feature).to receive(:with_feature).and_wrap_original do |original, name, &block|
- original.call(name) do |ff|
- Feature.enabled?(:"deeper_#{name}", type: :undefined, default_enabled_if_undefined: true)
- block.call(ff)
- end
+ allow(Feature).to receive(:log_feature_flag_states?).and_return(false)
+
+ stub_feature_flag_definition(:disabled_feature_flag)
+ stub_feature_flag_definition(:enabled_feature_flag, default_enabled: true)
+ end
+
+ context 'when using redis cache', :use_clean_rails_redis_caching do
+ it 'does not make recursive feature-flag calls' do
+ expect(described_class).to receive(:enabled?).once.and_call_original
+ described_class.enabled?(:disabled_feature_flag)
end
end
- it 'detects deep recursion' do
- expect(Gitlab::ErrorTracking)
- .to receive(:track_exception)
- .with(have_attributes(message: 'deep recursion'), stack: have_attributes(size: be > 10))
+ context 'when self-recursive' do
+ before do
+ allow(Feature).to receive(:with_feature).and_wrap_original do |original, name, &block|
+ original.call(name) do |ff|
+ Feature.enabled?(name)
+ block.call(ff)
+ end
+ end
+ end
- described_class.enabled?(:enabled_feature_flag)
- end
- end
+ it 'returns the default value' do
+ expect(described_class.enabled?(:enabled_feature_flag)).to eq true
+ end
- it 'returns false (and tracks / raises exception for dev) for undefined feature' do
- expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception)
+ it 'detects self recursion' do
+ expect(Gitlab::ErrorTracking)
+ .to receive(:track_exception)
+ .with(have_attributes(message: 'self recursion'), { stack: [:enabled_feature_flag] })
- expect(described_class.enabled?(:some_random_feature_flag)).to be_falsey
- end
+ described_class.enabled?(:enabled_feature_flag)
+ end
+ end
- it 'returns false for undefined feature with default_enabled_if_undefined: false' do
- expect(described_class.enabled?(:some_random_feature_flag, default_enabled_if_undefined: false)).to be_falsey
- end
+ context 'when deeply recursive' do
+ before do
+ allow(Feature).to receive(:with_feature).and_wrap_original do |original, name, &block|
+ original.call(name) do |ff|
+ Feature.enabled?(:"deeper_#{name}", type: :undefined, default_enabled_if_undefined: true)
+ block.call(ff)
+ end
+ end
+ end
- it 'returns true for undefined feature with default_enabled_if_undefined: true' do
- expect(described_class.enabled?(:some_random_feature_flag, default_enabled_if_undefined: true)).to be_truthy
- end
+ it 'detects deep recursion' do
+ expect(Gitlab::ErrorTracking)
+ .to receive(:track_exception)
+ .with(have_attributes(message: 'deep recursion'), stack: have_attributes(size: be > 10))
- it 'returns false for existing disabled feature in the database' do
- described_class.disable(:disabled_feature_flag)
+ described_class.enabled?(:enabled_feature_flag)
+ end
+ end
- expect(described_class.enabled?(:disabled_feature_flag)).to be_falsey
- end
+ it 'returns false (and tracks / raises exception for dev) for undefined feature' do
+ expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception)
- it 'returns true for existing enabled feature in the database' do
- described_class.enable(:enabled_feature_flag)
+ expect(described_class.enabled?(:some_random_feature_flag)).to be_falsey
+ end
- expect(described_class.enabled?(:enabled_feature_flag)).to be_truthy
- end
+ it 'returns false for undefined feature with default_enabled_if_undefined: false' do
+ expect(described_class.enabled?(:some_random_feature_flag, default_enabled_if_undefined: false)).to be_falsey
+ end
- it { expect(described_class.send(:l1_cache_backend)).to eq(Gitlab::ProcessMemoryCache.cache_backend) }
- it { expect(described_class.send(:l2_cache_backend)).to eq(Gitlab::Redis::FeatureFlag.cache_store) }
+ it 'returns true for undefined feature with default_enabled_if_undefined: true' do
+ expect(described_class.enabled?(:some_random_feature_flag, default_enabled_if_undefined: true)).to be_truthy
+ end
- it 'caches the status in L1 and L2 caches',
- :request_store, :use_clean_rails_memory_store_caching do
- described_class.enable(:disabled_feature_flag)
- flipper_key = "flipper/v1/feature/disabled_feature_flag"
+ it 'returns false for existing disabled feature in the database' do
+ described_class.disable(:disabled_feature_flag)
- expect(described_class.send(:l2_cache_backend))
- .to receive(:fetch)
- .once
- .with(flipper_key, { expires_in: 1.hour })
- .and_call_original
+ expect(described_class.enabled?(:disabled_feature_flag)).to be_falsey
+ end
- expect(described_class.send(:l1_cache_backend))
- .to receive(:fetch)
- .once
- .with(flipper_key, { expires_in: 1.minute })
- .and_call_original
+ it 'returns true for existing enabled feature in the database' do
+ described_class.enable(:enabled_feature_flag)
- 2.times do
- expect(described_class.enabled?(:disabled_feature_flag)).to be_truthy
+ expect(described_class.enabled?(:enabled_feature_flag)).to be_truthy
end
- end
- it 'returns the default value when the database does not exist' do
- fake_default = double('fake default')
- expect(ActiveRecord::Base).to receive(:connection) { raise ActiveRecord::NoDatabaseError, "No database" }
+ it { expect(described_class.send(:l1_cache_backend)).to eq(Gitlab::ProcessMemoryCache.cache_backend) }
+ it { expect(described_class.send(:l2_cache_backend)).to eq(Gitlab::Redis::FeatureFlag.cache_store) }
- expect(described_class.enabled?(:a_feature, default_enabled_if_undefined: fake_default)).to eq(fake_default)
- end
+ it 'caches the status in L1 and L2 caches',
+ :request_store, :use_clean_rails_memory_store_caching do
+ described_class.enable(:disabled_feature_flag)
+ flipper_key = "flipper/v1/feature/disabled_feature_flag"
- context 'logging is enabled', :request_store do
- before do
- allow(Feature).to receive(:log_feature_flag_states?).and_call_original
+ expect(described_class.send(:l2_cache_backend))
+ .to receive(:fetch)
+ .once
+ .with(flipper_key, { expires_in: 1.hour })
+ .and_call_original
- stub_feature_flag_definition(:enabled_feature_flag, log_state_changes: true)
+ expect(described_class.send(:l1_cache_backend))
+ .to receive(:fetch)
+ .once
+ .with(flipper_key, { expires_in: 1.minute })
+ .and_call_original
- described_class.enable(:feature_flag_state_logs)
- described_class.enable(:enabled_feature_flag)
- described_class.enabled?(:enabled_feature_flag)
+ 2.times do
+ expect(described_class.enabled?(:disabled_feature_flag)).to be_truthy
+ end
end
- it 'does not log feature_flag_state_logs' do
- expect(described_class.logged_states).not_to have_key("feature_flag_state_logs")
- end
+ it 'returns the default value when the database does not exist' do
+ fake_default = double('fake default')
- it 'logs other feature flags' do
- expect(described_class.logged_states).to have_key(:enabled_feature_flag)
- expect(described_class.logged_states[:enabled_feature_flag]).to be_truthy
- end
- end
+ base_class = Feature::BypassLoadBalancer.enabled? ? Feature::BypassLoadBalancer::FlipperRecord : ActiveRecord::Base
+ expect(base_class).to receive(:connection) { raise ActiveRecord::NoDatabaseError, "No database" }
- context 'cached feature flag', :request_store do
- before do
- described_class.send(:flipper).memoize = false
- described_class.enabled?(:disabled_feature_flag)
+ expect(described_class.enabled?(:a_feature, default_enabled_if_undefined: fake_default)).to eq(fake_default)
end
- it 'caches the status in L1 cache for the first minute' do
- expect do
- expect(described_class.send(:l1_cache_backend)).to receive(:fetch).once.and_call_original
- expect(described_class.send(:l2_cache_backend)).not_to receive(:fetch)
- expect(described_class.enabled?(:disabled_feature_flag)).to be_truthy
- end.not_to exceed_query_limit(0)
- end
+ context 'logging is enabled', :request_store do
+ before do
+ allow(Feature).to receive(:log_feature_flag_states?).and_call_original
- it 'caches the status in L2 cache after 2 minutes' do
- travel_to 2.minutes.from_now do
- expect do
- expect(described_class.send(:l1_cache_backend)).to receive(:fetch).once.and_call_original
- expect(described_class.send(:l2_cache_backend)).to receive(:fetch).once.and_call_original
- expect(described_class.enabled?(:disabled_feature_flag)).to be_truthy
- end.not_to exceed_query_limit(0)
+ stub_feature_flag_definition(:enabled_feature_flag, log_state_changes: true)
+
+ described_class.enable(:feature_flag_state_logs)
+ described_class.enable(:enabled_feature_flag)
+ described_class.enabled?(:enabled_feature_flag)
+ end
+
+ it 'does not log feature_flag_state_logs' do
+ expect(described_class.logged_states).not_to have_key("feature_flag_state_logs")
+ end
+
+ it 'logs other feature flags' do
+ expect(described_class.logged_states).to have_key(:enabled_feature_flag)
+ expect(described_class.logged_states[:enabled_feature_flag]).to be_truthy
end
end
- it 'fetches the status after an hour' do
- travel_to 61.minutes.from_now do
+ context 'cached feature flag', :request_store do
+ before do
+ described_class.send(:flipper).memoize = false
+ described_class.enabled?(:disabled_feature_flag)
+ end
+
+ it 'caches the status in L1 cache for the first minute' do
expect do
expect(described_class.send(:l1_cache_backend)).to receive(:fetch).once.and_call_original
- expect(described_class.send(:l2_cache_backend)).to receive(:fetch).once.and_call_original
+ expect(described_class.send(:l2_cache_backend)).not_to receive(:fetch)
expect(described_class.enabled?(:disabled_feature_flag)).to be_truthy
- end.not_to exceed_query_limit(1)
+ end.not_to exceed_query_limit(0)
end
- end
- end
-
- context 'with current_request actor' do
- context 'when request store is inactive' do
- it 'returns the approximate percentage set' do
- number_of_times = 1_000
- percentage = 50
- described_class.enable_percentage_of_actors(:enabled_feature_flag, percentage)
- gate_values = Array.new(number_of_times) do
- described_class.enabled?(:enabled_feature_flag, described_class.current_request)
+ it 'caches the status in L2 cache after 2 minutes' do
+ travel_to 2.minutes.from_now do
+ expect do
+ expect(described_class.send(:l1_cache_backend)).to receive(:fetch).once.and_call_original
+ expect(described_class.send(:l2_cache_backend)).to receive(:fetch).once.and_call_original
+ expect(described_class.enabled?(:disabled_feature_flag)).to be_truthy
+ end.not_to exceed_query_limit(0)
end
+ end
- margin_of_error = 0.05 * number_of_times
- expected_size = number_of_times * percentage / 100
- expect(gate_values.count { |v| v }).to be_within(margin_of_error).of(expected_size)
+ it 'fetches the status after an hour' do
+ travel_to 61.minutes.from_now do
+ expect do
+ expect(described_class.send(:l1_cache_backend)).to receive(:fetch).once.and_call_original
+ expect(described_class.send(:l2_cache_backend)).to receive(:fetch).once.and_call_original
+ expect(described_class.enabled?(:disabled_feature_flag)).to be_truthy
+ end.not_to exceed_query_limit(1)
+ end
end
end
- context 'when request store is active', :request_store do
- it 'always returns the same gate value' do
- described_class.enable_percentage_of_actors(:enabled_feature_flag, 50)
+ [:current_request, :request, described_class.current_request].each do |thing|
+ context "with #{thing} actor" do
+ context 'when request store is inactive' do
+ it 'returns the approximate percentage set' do
+ number_of_times = 1_000
+ percentage = 50
+ described_class.enable_percentage_of_actors(:enabled_feature_flag, percentage)
- previous_gate_value = described_class.enabled?(:enabled_feature_flag, described_class.current_request)
+ gate_values = Array.new(number_of_times) do
+ described_class.enabled?(:enabled_feature_flag, thing)
+ end
- 1_000.times do
- expect(described_class.enabled?(:enabled_feature_flag, described_class.current_request)).to eq(previous_gate_value)
+ margin_of_error = 0.05 * number_of_times
+ expected_size = number_of_times * percentage / 100
+ expect(gate_values.count { |v| v }).to be_within(margin_of_error).of(expected_size)
+ end
end
- end
- end
- end
- context 'with a group member' do
- let(:key) { :awesome_feature }
- let(:guinea_pigs) { create_list(:user, 3) }
+ context 'when request store is active', :request_store do
+ it 'always returns the same gate value' do
+ described_class.enable_percentage_of_actors(:enabled_feature_flag, 50)
- before do
- described_class.reset
- stub_feature_flag_definition(key)
- Flipper.unregister_groups
- Flipper.register(:guinea_pigs) do |actor|
- guinea_pigs.include?(actor.thing)
+ previous_gate_value = described_class.enabled?(:enabled_feature_flag, thing)
+
+ 1_000.times do
+ expect(described_class.enabled?(:enabled_feature_flag, thing)).to eq(previous_gate_value)
+ end
+ end
+ end
end
- described_class.enable(key, described_class.group(:guinea_pigs))
end
- it 'is true for all group members' do
- expect(described_class.enabled?(key, guinea_pigs[0])).to be_truthy
- expect(described_class.enabled?(key, guinea_pigs[1])).to be_truthy
- expect(described_class.enabled?(key, guinea_pigs[2])).to be_truthy
- end
+ context 'with gitlab_instance actor' do
+ it 'always returns the same gate value' do
+ described_class.enable(:enabled_feature_flag, described_class.gitlab_instance)
- it 'is false for any other actor' do
- expect(described_class.enabled?(key, create(:user))).to be_falsey
+ expect(described_class.enabled?(:enabled_feature_flag, described_class.gitlab_instance)).to be_truthy
+ end
end
- end
-
- context 'with an individual actor' do
- let(:actor) { stub_feature_flag_gate('CustomActor:5') }
- let(:another_actor) { stub_feature_flag_gate('CustomActor:10') }
- before do
- described_class.enable(:enabled_feature_flag, actor)
- end
+ context 'with :instance actor' do
+ it 'always returns the same gate value' do
+ described_class.enable(:enabled_feature_flag, :instance)
- it 'returns true when same actor is informed' do
- expect(described_class.enabled?(:enabled_feature_flag, actor)).to be_truthy
+ expect(described_class.enabled?(:enabled_feature_flag, :instance)).to be_truthy
+ end
end
- it 'returns false when different actor is informed' do
- expect(described_class.enabled?(:enabled_feature_flag, another_actor)).to be_falsey
- end
+ context 'with a group member' do
+ let(:key) { :awesome_feature }
+ let(:guinea_pigs) { create_list(:user, 3) }
- it 'returns false when no actor is informed' do
- expect(described_class.enabled?(:enabled_feature_flag)).to be_falsey
- end
- end
+ before do
+ described_class.reset
+ stub_feature_flag_definition(key)
+ Flipper.unregister_groups
+ Flipper.register(:guinea_pigs) do |actor|
+ guinea_pigs.include?(actor.thing)
+ end
+ described_class.enable(key, described_class.group(:guinea_pigs))
+ end
- context 'with invalid actor' do
- let(:actor) { double('invalid actor') }
+ it 'is true for all group members' do
+ expect(described_class.enabled?(key, guinea_pigs[0])).to be_truthy
+ expect(described_class.enabled?(key, guinea_pigs[1])).to be_truthy
+ expect(described_class.enabled?(key, guinea_pigs[2])).to be_truthy
+ end
- context 'when is dev_or_test_env' do
- it 'does raise exception' do
- expect { described_class.enabled?(:enabled_feature_flag, actor) }
- .to raise_error /needs to include `FeatureGate` or implement `flipper_id`/
+ it 'is false for any other actor' do
+ expect(described_class.enabled?(key, create(:user))).to be_falsey
end
end
- end
- context 'validates usage of feature flag with YAML definition' do
- let(:definition) do
- Feature::Definition.new('development/my_feature_flag.yml',
- name: 'my_feature_flag',
- type: 'development',
- default_enabled: default_enabled
- ).tap(&:validate!)
- end
+ context 'with an individual actor' do
+ let(:actor) { stub_feature_flag_gate('CustomActor:5') }
+ let(:another_actor) { stub_feature_flag_gate('CustomActor:10') }
- let(:default_enabled) { false }
+ before do
+ described_class.enable(:enabled_feature_flag, actor)
+ end
- before do
- stub_env('LAZILY_CREATE_FEATURE_FLAG', '0')
+ it 'returns true when same actor is informed' do
+ expect(described_class.enabled?(:enabled_feature_flag, actor)).to be_truthy
+ end
- allow(Feature::Definition).to receive(:valid_usage!).and_call_original
- allow(Feature::Definition).to receive(:definitions) do
- { definition.key => definition }
+ it 'returns false when different actor is informed' do
+ expect(described_class.enabled?(:enabled_feature_flag, another_actor)).to be_falsey
end
- end
- it 'when usage is correct' do
- expect { described_class.enabled?(:my_feature_flag) }.not_to raise_error
+ it 'returns false when no actor is informed' do
+ expect(described_class.enabled?(:enabled_feature_flag)).to be_falsey
+ end
end
- it 'when invalid type is used' do
- expect { described_class.enabled?(:my_feature_flag, type: :ops) }
- .to raise_error(/The `type:` of/)
- end
+ context 'with invalid actor' do
+ let(:actor) { double('invalid actor') }
- context 'when default_enabled: is false in the YAML definition' do
- it 'reads the default from the YAML definition' do
- expect(described_class.enabled?(:my_feature_flag)).to eq(default_enabled)
+ context 'when is dev_or_test_env' do
+ it 'does raise exception' do
+ expect { described_class.enabled?(:enabled_feature_flag, actor) }
+ .to raise_error /needs to include `FeatureGate` or implement `flipper_id`/
+ end
end
end
- context 'when default_enabled: is true in the YAML definition' do
- let(:default_enabled) { true }
-
- it 'reads the default from the YAML definition' do
- expect(described_class.enabled?(:my_feature_flag)).to eq(true)
+ context 'validates usage of feature flag with YAML definition' do
+ let(:definition) do
+ Feature::Definition.new('development/my_feature_flag.yml',
+ name: 'my_feature_flag',
+ type: 'development',
+ default_enabled: default_enabled
+ ).tap(&:validate!)
end
- context 'and feature has been disabled' do
- before do
- described_class.disable(:my_feature_flag)
- end
+ let(:default_enabled) { false }
- it 'is not enabled' do
- expect(described_class.enabled?(:my_feature_flag)).to eq(false)
+ before do
+ stub_env('LAZILY_CREATE_FEATURE_FLAG', '0')
+ lb_ff_definition = Feature::Definition.get(load_balancer_test_feature_flag)
+ allow(Feature::Definition).to receive(:valid_usage!).and_call_original
+ allow(Feature::Definition).to receive(:definitions) do
+ { definition.key => definition, lb_ff_definition.key => lb_ff_definition }
end
end
- context 'with a cached value and the YAML definition is changed thereafter' do
- before do
- described_class.enabled?(:my_feature_flag)
+ it 'when usage is correct' do
+ expect { described_class.enabled?(:my_feature_flag) }.not_to raise_error
+ end
+
+ it 'when invalid type is used' do
+ expect { described_class.enabled?(:my_feature_flag, type: :ops) }
+ .to raise_error(/The `type:` of/)
+ end
+
+ context 'when default_enabled: is false in the YAML definition' do
+ it 'reads the default from the YAML definition' do
+ expect(described_class.enabled?(:my_feature_flag)).to eq(default_enabled)
end
+ end
- it 'reads new default value' do
- allow(definition).to receive(:default_enabled).and_return(true)
+ context 'when default_enabled: is true in the YAML definition' do
+ let(:default_enabled) { true }
+ it 'reads the default from the YAML definition' do
expect(described_class.enabled?(:my_feature_flag)).to eq(true)
end
- end
- context 'when YAML definition does not exist for an optional type' do
- let(:optional_type) { described_class::Shared::TYPES.find { |name, attrs| attrs[:optional] }.first }
+ context 'and feature has been disabled' do
+ before do
+ described_class.disable(:my_feature_flag)
+ end
- context 'when in dev or test environment' do
- it 'raises an error for dev' do
- expect { described_class.enabled?(:non_existent_flag, type: optional_type) }
- .to raise_error(
- Feature::InvalidFeatureFlagError,
- "The feature flag YAML definition for 'non_existent_flag' does not exist")
+ it 'is not enabled' do
+ expect(described_class.enabled?(:my_feature_flag)).to eq(false)
end
end
- context 'when in production' do
+ context 'with a cached value and the YAML definition is changed thereafter' do
before do
- allow(Gitlab::ErrorTracking).to receive(:should_raise_for_dev?).and_return(false)
+ described_class.enabled?(:my_feature_flag)
end
- context 'when database exists' do
- before do
- allow(ApplicationRecord.database).to receive(:exists?).and_return(true)
- end
+ it 'reads new default value' do
+ allow(definition).to receive(:default_enabled).and_return(true)
- it 'checks the persisted status and returns false' do
- expect(described_class).to receive(:with_feature).with(:non_existent_flag).and_call_original
+ expect(described_class.enabled?(:my_feature_flag)).to eq(true)
+ end
+ end
+
+ context 'when YAML definition does not exist for an optional type' do
+ let(:optional_type) { described_class::Shared::TYPES.find { |name, attrs| attrs[:optional] }.first }
- expect(described_class.enabled?(:non_existent_flag, type: optional_type)).to eq(false)
+ context 'when in dev or test environment' do
+ it 'raises an error for dev' do
+ expect { described_class.enabled?(:non_existent_flag, type: optional_type) }
+ .to raise_error(
+ Feature::InvalidFeatureFlagError,
+ "The feature flag YAML definition for 'non_existent_flag' does not exist")
end
end
- context 'when database does not exist' do
+ context 'when in production' do
before do
- allow(ApplicationRecord.database).to receive(:exists?).and_return(false)
+ allow(Gitlab::ErrorTracking).to receive(:should_raise_for_dev?).and_return(false)
+ end
+
+ context 'when database exists' do
+ before do
+ allow(ApplicationRecord.database).to receive(:exists?).and_return(true)
+ end
+
+ it 'checks the persisted status and returns false' do
+ expect(described_class).to receive(:with_feature).with(:non_existent_flag).and_call_original
+
+ expect(described_class.enabled?(:non_existent_flag, type: optional_type)).to eq(false)
+ end
end
- it 'returns false without checking the status in the database' do
- expect(described_class).not_to receive(:get)
+ context 'when database does not exist' do
+ before do
+ allow(ApplicationRecord.database).to receive(:exists?).and_return(false)
+ end
+
+ it 'returns false without checking the status in the database' do
+ expect(described_class).not_to receive(:get)
- expect(described_class.enabled?(:non_existent_flag, type: optional_type)).to eq(false)
+ expect(described_class.enabled?(:non_existent_flag, type: optional_type)).to eq(false)
+ end
end
end
end
end
end
end
- end
-
- describe '.disable?' do
- it 'returns true (and tracks / raises exception for dev) for undefined feature' do
- expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception)
-
- expect(described_class.disabled?(:some_random_feature_flag)).to be_truthy
- end
- it 'returns true for undefined feature with default_enabled_if_undefined: false' do
- expect(described_class.disabled?(:some_random_feature_flag, default_enabled_if_undefined: false)).to be_truthy
- end
-
- it 'returns false for undefined feature with default_enabled_if_undefined: true' do
- expect(described_class.disabled?(:some_random_feature_flag, default_enabled_if_undefined: true)).to be_falsey
- end
+ describe '.disable?' do
+ it 'returns true (and tracks / raises exception for dev) for undefined feature' do
+ expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception)
- it 'returns true for existing disabled feature in the database' do
- stub_feature_flag_definition(:disabled_feature_flag)
- described_class.disable(:disabled_feature_flag)
+ expect(described_class.disabled?(:some_random_feature_flag)).to be_truthy
+ end
- expect(described_class.disabled?(:disabled_feature_flag)).to be_truthy
- end
+ it 'returns true for undefined feature with default_enabled_if_undefined: false' do
+ expect(described_class.disabled?(:some_random_feature_flag, default_enabled_if_undefined: false)).to be_truthy
+ end
- it 'returns false for existing enabled feature in the database' do
- stub_feature_flag_definition(:enabled_feature_flag)
- described_class.enable(:enabled_feature_flag)
+ it 'returns false for undefined feature with default_enabled_if_undefined: true' do
+ expect(described_class.disabled?(:some_random_feature_flag, default_enabled_if_undefined: true)).to be_falsey
+ end
- expect(described_class.disabled?(:enabled_feature_flag)).to be_falsey
- end
- end
+ it 'returns true for existing disabled feature in the database' do
+ stub_feature_flag_definition(:disabled_feature_flag)
+ described_class.disable(:disabled_feature_flag)
- shared_examples_for 'logging' do
- let(:expected_action) {}
- let(:expected_extra) {}
+ expect(described_class.disabled?(:disabled_feature_flag)).to be_truthy
+ end
- it 'logs the event' do
- expect(Feature.logger).to receive(:info).at_least(:once).with(key: key, action: expected_action, **expected_extra)
+ it 'returns false for existing enabled feature in the database' do
+ stub_feature_flag_definition(:enabled_feature_flag)
+ described_class.enable(:enabled_feature_flag)
- subject
+ expect(described_class.disabled?(:enabled_feature_flag)).to be_falsey
+ end
end
- end
- describe '.enable' do
- subject { described_class.enable(key, thing) }
+ shared_examples_for 'logging' do
+ let(:expected_action) {}
+ let(:expected_extra) {}
- let(:key) { :awesome_feature }
- let(:thing) { true }
+ it 'logs the event' do
+ expect(Feature.logger).to receive(:info).at_least(:once).with(key: key, action: expected_action, **expected_extra)
- it_behaves_like 'logging' do
- let(:expected_action) { :enable }
- let(:expected_extra) { { "extra.thing" => "true" } }
+ subject
+ end
end
- # This is documented to return true, modify doc/administration/feature_flags.md if it changes
- it 'returns true' do
- expect(subject).to be true
- end
+ describe '.enable' do
+ subject { described_class.enable(key, thing) }
- context 'when thing is an actor' do
- let(:thing) { create(:user) }
+ let(:key) { :awesome_feature }
+ let(:thing) { true }
it_behaves_like 'logging' do
- let(:expected_action) { eq(:enable) | eq(:remove_opt_out) }
- let(:expected_extra) { { "extra.thing" => thing.flipper_id.to_s } }
+ let(:expected_action) { :enable }
+ let(:expected_extra) { { "extra.thing" => "true" } }
end
- end
- end
- describe '.disable' do
- subject { described_class.disable(key, thing) }
+ # This is documented to return true, modify doc/administration/feature_flags.md if it changes
+ it 'returns true' do
+ expect(subject).to be true
+ end
- let(:key) { :awesome_feature }
- let(:thing) { false }
+ context 'when thing is an actor' do
+ let(:thing) { create(:user) }
- it_behaves_like 'logging' do
- let(:expected_action) { :disable }
- let(:expected_extra) { { "extra.thing" => "false" } }
+ it_behaves_like 'logging' do
+ let(:expected_action) { eq(:enable) | eq(:remove_opt_out) }
+ let(:expected_extra) { { "extra.thing" => thing.flipper_id.to_s } }
+ end
+ end
end
- # This is documented to return true, modify doc/administration/feature_flags.md if it changes
- it 'returns true' do
- expect(subject).to be true
- end
+ describe '.disable' do
+ subject { described_class.disable(key, thing) }
- context 'when thing is an actor' do
- let(:thing) { create(:user) }
- let(:flag_opts) { {} }
+ let(:key) { :awesome_feature }
+ let(:thing) { false }
it_behaves_like 'logging' do
let(:expected_action) { :disable }
- let(:expected_extra) { { "extra.thing" => thing.flipper_id.to_s } }
+ let(:expected_extra) { { "extra.thing" => "false" } }
end
- before do
- stub_feature_flag_definition(key, flag_opts)
+ # This is documented to return true, modify doc/administration/feature_flags.md if it changes
+ it 'returns true' do
+ expect(subject).to be true
end
- context 'when the feature flag was enabled for this actor' do
- before do
- described_class.enable(key, thing)
- end
+ context 'when thing is an actor' do
+ let(:thing) { create(:user) }
+ let(:flag_opts) { {} }
- it 'marks this thing as disabled' do
- expect { subject }.to change { thing_enabled? }.from(true).to(false)
+ it_behaves_like 'logging' do
+ let(:expected_action) { :disable }
+ let(:expected_extra) { { "extra.thing" => thing.flipper_id.to_s } }
end
- it 'does not change the global value' do
- expect { subject }.not_to change { described_class.enabled?(key) }.from(false)
+ before do
+ stub_feature_flag_definition(key, flag_opts)
end
- it 'is possible to re-enable the feature' do
- subject
+ context 'when the feature flag was enabled for this actor' do
+ before do
+ described_class.enable(key, thing)
+ end
- expect { described_class.enable(key, thing) }
- .to change { thing_enabled? }.from(false).to(true)
- end
- end
+ it 'marks this thing as disabled' do
+ expect { subject }.to change { thing_enabled? }.from(true).to(false)
+ end
- context 'when the feature flag is enabled globally' do
- before do
- described_class.enable(key)
- end
+ it 'does not change the global value' do
+ expect { subject }.not_to change { described_class.enabled?(key) }.from(false)
+ end
+
+ it 'is possible to re-enable the feature' do
+ subject
- it 'does not mark this thing as disabled' do
- expect { subject }.not_to change { thing_enabled? }.from(true)
+ expect { described_class.enable(key, thing) }
+ .to change { thing_enabled? }.from(false).to(true)
+ end
end
- it 'does not change the global value' do
- expect { subject }.not_to change { described_class.enabled?(key) }.from(true)
+ context 'when the feature flag is enabled globally' do
+ before do
+ described_class.enable(key)
+ end
+
+ it 'does not mark this thing as disabled' do
+ expect { subject }.not_to change { thing_enabled? }.from(true)
+ end
+
+ it 'does not change the global value' do
+ expect { subject }.not_to change { described_class.enabled?(key) }.from(true)
+ end
end
end
end
- end
- describe 'opt_out' do
- subject { described_class.opt_out(key, thing) }
+ describe 'opt_out' do
+ subject { described_class.opt_out(key, thing) }
- let(:key) { :awesome_feature }
+ let(:key) { :awesome_feature }
- before do
- stub_feature_flag_definition(key)
- described_class.enable(key)
- end
+ before do
+ stub_feature_flag_definition(key)
+ described_class.enable(key)
+ end
- context 'when thing is an actor' do
- let_it_be(:thing) { create(:project) }
+ context 'when thing is an actor' do
+ let_it_be(:thing) { create(:project) }
- it 'marks this thing as disabled' do
- expect { subject }.to change { thing_enabled? }.from(true).to(false)
- end
+ it 'marks this thing as disabled' do
+ expect { subject }.to change { thing_enabled? }.from(true).to(false)
+ end
- it 'does not change the global value' do
- expect { subject }.not_to change { described_class.enabled?(key) }.from(true)
- end
+ it 'does not change the global value' do
+ expect { subject }.not_to change { described_class.enabled?(key) }.from(true)
+ end
- it_behaves_like 'logging' do
- let(:expected_action) { eq(:opt_out) }
- let(:expected_extra) { { "extra.thing" => thing.flipper_id.to_s } }
- end
+ it_behaves_like 'logging' do
+ let(:expected_action) { eq(:opt_out) }
+ let(:expected_extra) { { "extra.thing" => thing.flipper_id.to_s } }
+ end
- it 'stores the opt-out information as a gate' do
- subject
+ it 'stores the opt-out information as a gate' do
+ subject
- flag = described_class.get(key)
+ flag = described_class.get(key)
- expect(flag.actors_value).to include(include(thing.flipper_id))
- expect(flag.actors_value).not_to include(thing.flipper_id)
+ expect(flag.actors_value).to include(include(thing.flipper_id))
+ expect(flag.actors_value).not_to include(thing.flipper_id)
+ end
end
- end
- context 'when thing is a group' do
- let(:thing) { Feature.group(:guinea_pigs) }
- let(:guinea_pigs) { create_list(:user, 3) }
+ context 'when thing is a group' do
+ let(:thing) { Feature.group(:guinea_pigs) }
+ let(:guinea_pigs) { create_list(:user, 3) }
- before do
- Feature.reset
- Flipper.unregister_groups
- Flipper.register(:guinea_pigs) do |actor|
- guinea_pigs.include?(actor.thing)
+ before do
+ Feature.reset
+ Flipper.unregister_groups
+ Flipper.register(:guinea_pigs) do |actor|
+ guinea_pigs.include?(actor.thing)
+ end
end
- end
- it 'has no effect' do
- expect { subject }.not_to change { described_class.enabled?(key, guinea_pigs.first) }.from(true)
+ it 'has no effect' do
+ expect { subject }.not_to change { described_class.enabled?(key, guinea_pigs.first) }.from(true)
+ end
end
end
- end
- describe 'remove_opt_out' do
- subject { described_class.remove_opt_out(key, thing) }
+ describe 'remove_opt_out' do
+ subject { described_class.remove_opt_out(key, thing) }
- let(:key) { :awesome_feature }
+ let(:key) { :awesome_feature }
- before do
- stub_feature_flag_definition(key)
- described_class.enable(key)
- described_class.opt_out(key, thing)
- end
+ before do
+ stub_feature_flag_definition(key)
+ described_class.enable(key)
+ described_class.opt_out(key, thing)
+ end
- context 'when thing is an actor' do
- let_it_be(:thing) { create(:project) }
+ context 'when thing is an actor' do
+ let_it_be(:thing) { create(:project) }
- it 're-enables this thing' do
- expect { subject }.to change { thing_enabled? }.from(false).to(true)
- end
+ it 're-enables this thing' do
+ expect { subject }.to change { thing_enabled? }.from(false).to(true)
+ end
- it 'does not change the global value' do
- expect { subject }.not_to change { described_class.enabled?(key) }.from(true)
- end
+ it 'does not change the global value' do
+ expect { subject }.not_to change { described_class.enabled?(key) }.from(true)
+ end
- it_behaves_like 'logging' do
- let(:expected_action) { eq(:remove_opt_out) }
- let(:expected_extra) { { "extra.thing" => thing.flipper_id.to_s } }
- end
+ it_behaves_like 'logging' do
+ let(:expected_action) { eq(:remove_opt_out) }
+ let(:expected_extra) { { "extra.thing" => thing.flipper_id.to_s } }
+ end
- it 'removes the opt-out information' do
- subject
+ it 'removes the opt-out information' do
+ subject
- flag = described_class.get(key)
+ flag = described_class.get(key)
- expect(flag.actors_value).to be_empty
+ expect(flag.actors_value).to be_empty
+ end
end
- end
- context 'when thing is a group' do
- let(:thing) { Feature.group(:guinea_pigs) }
- let(:guinea_pigs) { create_list(:user, 3) }
+ context 'when thing is a group' do
+ let(:thing) { Feature.group(:guinea_pigs) }
+ let(:guinea_pigs) { create_list(:user, 3) }
- before do
- Feature.reset
- Flipper.unregister_groups
- Flipper.register(:guinea_pigs) do |actor|
- guinea_pigs.include?(actor.thing)
+ before do
+ Feature.reset
+ Flipper.unregister_groups
+ Flipper.register(:guinea_pigs) do |actor|
+ guinea_pigs.include?(actor.thing)
+ end
end
- end
- it 'has no effect' do
- expect { subject }.not_to change { described_class.enabled?(key, guinea_pigs.first) }.from(true)
+ it 'has no effect' do
+ expect { subject }.not_to change { described_class.enabled?(key, guinea_pigs.first) }.from(true)
+ end
end
end
- end
- describe '.enable_percentage_of_time' do
- subject { described_class.enable_percentage_of_time(key, percentage) }
+ describe '.enable_percentage_of_time' do
+ subject { described_class.enable_percentage_of_time(key, percentage) }
- let(:key) { :awesome_feature }
- let(:percentage) { 50 }
-
- it_behaves_like 'logging' do
- let(:expected_action) { :enable_percentage_of_time }
- let(:expected_extra) { { "extra.percentage" => percentage.to_s } }
- end
+ let(:key) { :awesome_feature }
+ let(:percentage) { 50 }
- context 'when the flag is on' do
- before do
- described_class.enable(key)
+ it_behaves_like 'logging' do
+ let(:expected_action) { :enable_percentage_of_time }
+ let(:expected_extra) { { "extra.percentage" => percentage.to_s } }
end
- it 'fails with InvalidOperation' do
- expect { subject }.to raise_error(described_class::InvalidOperation)
+ context 'when the flag is on' do
+ before do
+ described_class.enable(key)
+ end
+
+ it 'fails with InvalidOperation' do
+ expect { subject }.to raise_error(described_class::InvalidOperation)
+ end
end
end
- end
- describe '.disable_percentage_of_time' do
- subject { described_class.disable_percentage_of_time(key) }
+ describe '.disable_percentage_of_time' do
+ subject { described_class.disable_percentage_of_time(key) }
- let(:key) { :awesome_feature }
+ let(:key) { :awesome_feature }
- it_behaves_like 'logging' do
- let(:expected_action) { :disable_percentage_of_time }
- let(:expected_extra) { {} }
+ it_behaves_like 'logging' do
+ let(:expected_action) { :disable_percentage_of_time }
+ let(:expected_extra) { {} }
+ end
end
- end
-
- describe '.enable_percentage_of_actors' do
- subject { described_class.enable_percentage_of_actors(key, percentage) }
- let(:key) { :awesome_feature }
- let(:percentage) { 50 }
+ describe '.enable_percentage_of_actors' do
+ subject { described_class.enable_percentage_of_actors(key, percentage) }
- it_behaves_like 'logging' do
- let(:expected_action) { :enable_percentage_of_actors }
- let(:expected_extra) { { "extra.percentage" => percentage.to_s } }
- end
+ let(:key) { :awesome_feature }
+ let(:percentage) { 50 }
- context 'when the flag is on' do
- before do
- described_class.enable(key)
+ it_behaves_like 'logging' do
+ let(:expected_action) { :enable_percentage_of_actors }
+ let(:expected_extra) { { "extra.percentage" => percentage.to_s } }
end
- it 'fails with InvalidOperation' do
- expect { subject }.to raise_error(described_class::InvalidOperation)
+ context 'when the flag is on' do
+ before do
+ described_class.enable(key)
+ end
+
+ it 'fails with InvalidOperation' do
+ expect { subject }.to raise_error(described_class::InvalidOperation)
+ end
end
end
- end
- describe '.disable_percentage_of_actors' do
- subject { described_class.disable_percentage_of_actors(key) }
+ describe '.disable_percentage_of_actors' do
+ subject { described_class.disable_percentage_of_actors(key) }
- let(:key) { :awesome_feature }
+ let(:key) { :awesome_feature }
- it_behaves_like 'logging' do
- let(:expected_action) { :disable_percentage_of_actors }
- let(:expected_extra) { {} }
+ it_behaves_like 'logging' do
+ let(:expected_action) { :disable_percentage_of_actors }
+ let(:expected_extra) { {} }
+ end
end
- end
- describe '.remove' do
- subject { described_class.remove(key) }
-
- let(:key) { :awesome_feature }
- let(:actor) { create(:user) }
-
- before do
- described_class.enable(key)
- end
+ describe '.remove' do
+ subject { described_class.remove(key) }
- it_behaves_like 'logging' do
- let(:expected_action) { :remove }
- let(:expected_extra) { {} }
- end
+ let(:key) { :awesome_feature }
+ let(:actor) { create(:user) }
- context 'for a non-persisted feature' do
- it 'returns nil' do
- expect(described_class.remove(:non_persisted_feature_flag)).to be_nil
+ before do
+ described_class.enable(key)
end
- it 'returns true, and cleans up' do
- expect(subject).to be_truthy
- expect(described_class.persisted_names).not_to include(key)
+ it_behaves_like 'logging' do
+ let(:expected_action) { :remove }
+ let(:expected_extra) { {} }
end
- end
- end
-
- describe '.log_feature_flag_states?' do
- let(:log_state_changes) { false }
- let(:milestone) { "0.0" }
- let(:flag_name) { :some_flag }
- let(:flag_type) { 'development' }
- before do
- Feature.enable(:feature_flag_state_logs)
- Feature.enable(:some_flag)
-
- allow(Feature).to receive(:log_feature_flag_states?).and_return(false)
- allow(Feature).to receive(:log_feature_flag_states?).with(:feature_flag_state_logs).and_call_original
- allow(Feature).to receive(:log_feature_flag_states?).with(:some_flag).and_call_original
+ context 'for a non-persisted feature' do
+ it 'returns nil' do
+ expect(described_class.remove(:non_persisted_feature_flag)).to be_nil
+ end
- stub_feature_flag_definition(flag_name,
- type: flag_type,
- milestone: milestone,
- log_state_changes: log_state_changes)
+ it 'returns true, and cleans up' do
+ expect(subject).to be_truthy
+ expect(described_class.persisted_names).not_to include(key)
+ end
+ end
end
- subject { described_class.log_feature_flag_states?(flag_name) }
+ describe '.log_feature_flag_states?' do
+ let(:log_state_changes) { false }
+ let(:milestone) { "0.0" }
+ let(:flag_name) { :some_flag }
+ let(:flag_type) { 'development' }
- context 'when flag is feature_flag_state_logs' do
- let(:milestone) { "14.6" }
- let(:flag_name) { :feature_flag_state_logs }
- let(:flag_type) { 'ops' }
- let(:log_state_changes) { true }
+ before do
+ Feature.enable(:feature_flag_state_logs)
+ Feature.enable(:some_flag)
- it { is_expected.to be_falsey }
- end
+ allow(Feature).to receive(:log_feature_flag_states?).and_return(false)
+ allow(Feature).to receive(:log_feature_flag_states?).with(:feature_flag_state_logs).and_call_original
+ allow(Feature).to receive(:log_feature_flag_states?).with(:some_flag).and_call_original
- context 'when flag is old' do
- it { is_expected.to be_falsey }
- end
+ stub_feature_flag_definition(flag_name,
+ type: flag_type,
+ milestone: milestone,
+ log_state_changes: log_state_changes)
+ end
- context 'when flag is old while log_state_changes is not present ' do
- let(:log_state_changes) { nil }
+ subject { described_class.log_feature_flag_states?(flag_name) }
- it { is_expected.to be_falsey }
- end
+ context 'when flag is feature_flag_state_logs' do
+ let(:milestone) { "14.6" }
+ let(:flag_name) { :feature_flag_state_logs }
+ let(:flag_type) { 'ops' }
+ let(:log_state_changes) { true }
- context 'when flag is old but log_state_changes is true' do
- let(:log_state_changes) { true }
+ it { is_expected.to be_falsey }
+ end
- it { is_expected.to be_truthy }
- end
+ context 'when flag is old' do
+ it { is_expected.to be_falsey }
+ end
- context 'when flag is new and not feature_flag_state_logs' do
- let(:milestone) { "14.6" }
+ context 'when flag is old while log_state_changes is not present ' do
+ let(:log_state_changes) { nil }
- before do
- stub_version('14.5.123', 'deadbeef')
+ it { is_expected.to be_falsey }
end
- it { is_expected.to be_truthy }
- end
+ context 'when flag is old but log_state_changes is true' do
+ let(:log_state_changes) { true }
- context 'when milestone is nil' do
- let(:milestone) { nil }
+ it { is_expected.to be_truthy }
+ end
- it { is_expected.to be_falsey }
- end
- end
+ context 'when flag is new and not feature_flag_state_logs' do
+ let(:milestone) { "14.6" }
- context 'caching with stale reads from the database', :use_clean_rails_redis_caching, :request_store, :aggregate_failures do
- let(:actor) { stub_feature_flag_gate('CustomActor:5') }
- let(:another_actor) { stub_feature_flag_gate('CustomActor:10') }
+ before do
+ stub_version('14.5.123', 'deadbeef')
+ end
- # This is a bit unpleasant. For these tests we want to simulate stale reads
- # from the database (due to database load balancing). A simple way to do
- # that is to stub the response on the adapter Flipper uses for reading from
- # the database. However, there isn't a convenient API for this. We know that
- # the ActiveRecord adapter is always at the 'bottom' of the chain, so we can
- # find it that way.
- let(:active_record_adapter) do
- adapter = described_class.flipper
+ it { is_expected.to be_truthy }
+ end
- loop do
- break adapter unless adapter.instance_variable_get(:@adapter)
+ context 'when milestone is nil' do
+ let(:milestone) { nil }
- adapter = adapter.instance_variable_get(:@adapter)
+ it { is_expected.to be_falsey }
end
end
- before do
- stub_feature_flag_definition(:enabled_feature_flag)
- end
+ context 'caching with stale reads from the database', :use_clean_rails_redis_caching, :request_store, :aggregate_failures do
+ let(:actor) { stub_feature_flag_gate('CustomActor:5') }
+ let(:another_actor) { stub_feature_flag_gate('CustomActor:10') }
- it 'gives the correct value when enabling for an additional actor' do
- described_class.enable(:enabled_feature_flag, actor)
- initial_gate_values = active_record_adapter.get(described_class.get(:enabled_feature_flag))
+ # This is a bit unpleasant. For these tests we want to simulate stale reads
+ # from the database (due to database load balancing). A simple way to do
+ # that is to stub the response on the adapter Flipper uses for reading from
+ # the database. However, there isn't a convenient API for this. We know that
+ # the ActiveRecord adapter is always at the 'bottom' of the chain, so we can
+ # find it that way.
+ let(:active_record_adapter) do
+ adapter = described_class.flipper
- # This should only be enabled for `actor`
- expect(described_class.enabled?(:enabled_feature_flag, actor)).to be(true)
- expect(described_class.enabled?(:enabled_feature_flag, another_actor)).to be(false)
- expect(described_class.enabled?(:enabled_feature_flag)).to be(false)
+ loop do
+ break adapter unless adapter.instance_variable_get(:@adapter)
- # Enable for `another_actor` and simulate a stale read
- described_class.enable(:enabled_feature_flag, another_actor)
- allow(active_record_adapter).to receive(:get).once.and_return(initial_gate_values)
+ adapter = adapter.instance_variable_get(:@adapter)
+ end
+ end
- # Should read from the cache and be enabled for both of these actors
- expect(described_class.enabled?(:enabled_feature_flag, actor)).to be(true)
- expect(described_class.enabled?(:enabled_feature_flag, another_actor)).to be(true)
- expect(described_class.enabled?(:enabled_feature_flag)).to be(false)
- end
+ before do
+ stub_feature_flag_definition(:enabled_feature_flag)
+ end
- it 'gives the correct value when enabling for percentage of time' do
- described_class.enable_percentage_of_time(:enabled_feature_flag, 10)
- initial_gate_values = active_record_adapter.get(described_class.get(:enabled_feature_flag))
+ it 'gives the correct value when enabling for an additional actor' do
+ described_class.enable(:enabled_feature_flag, actor)
+ initial_gate_values = active_record_adapter.get(described_class.get(:enabled_feature_flag))
- # Test against `gate_values` directly as otherwise it would be non-determistic
- expect(described_class.get(:enabled_feature_flag).gate_values.percentage_of_time).to eq(10)
+ # This should only be enabled for `actor`
+ expect(described_class.enabled?(:enabled_feature_flag, actor)).to be(true)
+ expect(described_class.enabled?(:enabled_feature_flag, another_actor)).to be(false)
+ expect(described_class.enabled?(:enabled_feature_flag)).to be(false)
- # Enable 50% of time and simulate a stale read
- described_class.enable_percentage_of_time(:enabled_feature_flag, 50)
- allow(active_record_adapter).to receive(:get).once.and_return(initial_gate_values)
+ # Enable for `another_actor` and simulate a stale read
+ described_class.enable(:enabled_feature_flag, another_actor)
+ allow(active_record_adapter).to receive(:get).once.and_return(initial_gate_values)
- # Should read from the cache and be enabled 50% of the time
- expect(described_class.get(:enabled_feature_flag).gate_values.percentage_of_time).to eq(50)
- end
+ # Should read from the cache and be enabled for both of these actors
+ expect(described_class.enabled?(:enabled_feature_flag, actor)).to be(true)
+ expect(described_class.enabled?(:enabled_feature_flag, another_actor)).to be(true)
+ expect(described_class.enabled?(:enabled_feature_flag)).to be(false)
+ end
- it 'gives the correct value when disabling the flag' do
- described_class.enable(:enabled_feature_flag, actor)
- described_class.enable(:enabled_feature_flag, another_actor)
- initial_gate_values = active_record_adapter.get(described_class.get(:enabled_feature_flag))
+ it 'gives the correct value when enabling for percentage of time' do
+ described_class.enable_percentage_of_time(:enabled_feature_flag, 10)
+ initial_gate_values = active_record_adapter.get(described_class.get(:enabled_feature_flag))
- # This be enabled for `actor` and `another_actor`
- expect(described_class.enabled?(:enabled_feature_flag, actor)).to be(true)
- expect(described_class.enabled?(:enabled_feature_flag, another_actor)).to be(true)
- expect(described_class.enabled?(:enabled_feature_flag)).to be(false)
+ # Test against `gate_values` directly as otherwise it would be non-determistic
+ expect(described_class.get(:enabled_feature_flag).gate_values.percentage_of_time).to eq(10)
- # Disable for `another_actor` and simulate a stale read
- described_class.disable(:enabled_feature_flag, another_actor)
- allow(active_record_adapter).to receive(:get).once.and_return(initial_gate_values)
+ # Enable 50% of time and simulate a stale read
+ described_class.enable_percentage_of_time(:enabled_feature_flag, 50)
+ allow(active_record_adapter).to receive(:get).once.and_return(initial_gate_values)
- # Should read from the cache and be enabled only for `actor`
- expect(described_class.enabled?(:enabled_feature_flag, actor)).to be(true)
- expect(described_class.enabled?(:enabled_feature_flag, another_actor)).to be(false)
- expect(described_class.enabled?(:enabled_feature_flag)).to be(false)
- end
+ # Should read from the cache and be enabled 50% of the time
+ expect(described_class.get(:enabled_feature_flag).gate_values.percentage_of_time).to eq(50)
+ end
- it 'gives the correct value when deleting the flag' do
- described_class.enable(:enabled_feature_flag, actor)
- initial_gate_values = active_record_adapter.get(described_class.get(:enabled_feature_flag))
+ it 'gives the correct value when disabling the flag' do
+ described_class.enable(:enabled_feature_flag, actor)
+ described_class.enable(:enabled_feature_flag, another_actor)
+ initial_gate_values = active_record_adapter.get(described_class.get(:enabled_feature_flag))
- # This should only be enabled for `actor`
- expect(described_class.enabled?(:enabled_feature_flag, actor)).to be(true)
- expect(described_class.enabled?(:enabled_feature_flag)).to be(false)
+ # This be enabled for `actor` and `another_actor`
+ expect(described_class.enabled?(:enabled_feature_flag, actor)).to be(true)
+ expect(described_class.enabled?(:enabled_feature_flag, another_actor)).to be(true)
+ expect(described_class.enabled?(:enabled_feature_flag)).to be(false)
- # Remove and simulate a stale read
- described_class.remove(:enabled_feature_flag)
- allow(active_record_adapter).to receive(:get).once.and_return(initial_gate_values)
+ # Disable for `another_actor` and simulate a stale read
+ described_class.disable(:enabled_feature_flag, another_actor)
+ allow(active_record_adapter).to receive(:get).once.and_return(initial_gate_values)
- # Should read from the cache and be disabled everywhere
- expect(described_class.enabled?(:enabled_feature_flag, actor)).to be(false)
- expect(described_class.enabled?(:enabled_feature_flag)).to be(false)
- end
- end
+ # Should read from the cache and be enabled only for `actor`
+ expect(described_class.enabled?(:enabled_feature_flag, actor)).to be(true)
+ expect(described_class.enabled?(:enabled_feature_flag, another_actor)).to be(false)
+ expect(described_class.enabled?(:enabled_feature_flag)).to be(false)
+ end
+
+ it 'gives the correct value when deleting the flag' do
+ described_class.enable(:enabled_feature_flag, actor)
+ initial_gate_values = active_record_adapter.get(described_class.get(:enabled_feature_flag))
- describe Feature::Target do
- describe '#targets' do
- let(:project) { create(:project) }
- let(:group) { create(:group) }
- let(:user_name) { project.first_owner.username }
+ # This should only be enabled for `actor`
+ expect(described_class.enabled?(:enabled_feature_flag, actor)).to be(true)
+ expect(described_class.enabled?(:enabled_feature_flag)).to be(false)
- subject do
- described_class.new(
- user: user_name,
- project: project.full_path,
- group: group.full_path,
- repository: project.repository.full_path
- )
- end
+ # Remove and simulate a stale read
+ described_class.remove(:enabled_feature_flag)
+ allow(active_record_adapter).to receive(:get).once.and_return(initial_gate_values)
- it 'returns all found targets' do
- expect(subject.targets).to be_an(Array)
- expect(subject.targets).to eq([project.first_owner, project, group, project.repository])
+ # Should read from the cache and be disabled everywhere
+ expect(described_class.enabled?(:enabled_feature_flag, actor)).to be(false)
+ expect(described_class.enabled?(:enabled_feature_flag)).to be(false)
end
+ end
- context 'when repository target works with different types of repositories' do
- let_it_be(:group) { create(:group) }
- let_it_be(:project) { create(:project, :wiki_repo, group: group) }
- let_it_be(:project_in_user_namespace) { create(:project, namespace: create(:user).namespace) }
- let(:personal_snippet) { create(:personal_snippet) }
- let(:project_snippet) { create(:project_snippet, project: project) }
-
- let(:targets) do
- [
- project,
- project.wiki,
- project_in_user_namespace,
- personal_snippet,
- project_snippet
- ]
- end
+ describe Feature::Target do
+ describe '#targets' do
+ let(:project) { create(:project) }
+ let(:group) { create(:group) }
+ let(:user_name) { project.first_owner.username }
subject do
described_class.new(
- repository: targets.map { |t| t.repository.full_path }.join(",")
+ user: user_name,
+ project: project.full_path,
+ group: group.full_path,
+ repository: project.repository.full_path
)
end
it 'returns all found targets' do
expect(subject.targets).to be_an(Array)
- expect(subject.targets).to eq(targets.map(&:repository))
+ expect(subject.targets).to eq([project.first_owner, project, group, project.repository])
+ end
+
+ context 'when repository target works with different types of repositories' do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, :wiki_repo, group: group) }
+ let_it_be(:project_in_user_namespace) { create(:project, namespace: create(:user).namespace) }
+ let(:personal_snippet) { create(:personal_snippet) }
+ let(:project_snippet) { create(:project_snippet, project: project) }
+
+ let(:targets) do
+ [
+ project,
+ project.wiki,
+ project_in_user_namespace,
+ personal_snippet,
+ project_snippet
+ ]
+ end
+
+ subject do
+ described_class.new(
+ repository: targets.map { |t| t.repository.full_path }.join(",")
+ )
+ end
+
+ it 'returns all found targets' do
+ expect(subject.targets).to be_an(Array)
+ expect(subject.targets).to eq(targets.map(&:repository))
+ end
end
end
end
diff --git a/spec/lib/generators/batched_background_migration/expected_files/my_batched_migration_dictionary_matcher.txt b/spec/lib/generators/batched_background_migration/expected_files/my_batched_migration_dictionary_matcher.txt
index 3b166bd4c4c..240472585bb 100644
--- a/spec/lib/generators/batched_background_migration/expected_files/my_batched_migration_dictionary_matcher.txt
+++ b/spec/lib/generators/batched_background_migration/expected_files/my_batched_migration_dictionary_matcher.txt
@@ -3,8 +3,8 @@ migration_job_name: MyBatchedMigration
description: # Please capture what MyBatchedMigration does
feature_category: database
introduced_by_url: # URL of the MR \(or issue/commit\) that introduced the migration
-milestone: [0-9\.]+
+milestone: '[0-9\.]+'
queued_migration_version: [0-9]+
# Replace with the approximate date you think it's best to ensure the completion of this BBM.
finalize_after: # yyyy-mm-dd
-finalized_by: # version of the migration that ensured this bbm
+finalized_by: # version of the migration that finalized this BBM
diff --git a/spec/lib/generators/batched_background_migration/expected_files/queue_my_batched_migration.txt b/spec/lib/generators/batched_background_migration/expected_files/queue_my_batched_migration.txt
index 36f7885b591..d1fab7cf4bd 100644
--- a/spec/lib/generators/batched_background_migration/expected_files/queue_my_batched_migration.txt
+++ b/spec/lib/generators/batched_background_migration/expected_files/queue_my_batched_migration.txt
@@ -19,7 +19,6 @@ class QueueMyBatchedMigration < Gitlab::Database::Migration[2.2]
:projects,
:id,
job_interval: DELAY_INTERVAL,
- queued_migration_version: '<migration_version>',
batch_size: BATCH_SIZE,
sub_batch_size: SUB_BATCH_SIZE
)
diff --git a/spec/lib/generators/gitlab/analytics/group_fetcher_spec.rb b/spec/lib/generators/gitlab/analytics/group_fetcher_spec.rb
new file mode 100644
index 00000000000..77cc3904560
--- /dev/null
+++ b/spec/lib/generators/gitlab/analytics/group_fetcher_spec.rb
@@ -0,0 +1,100 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Analytics::GroupFetcher, :silence_stdout, feature_category: :service_ping do
+ let(:stage_data) do
+ <<~YAML
+ stages:
+ analyze:
+ section: analytics
+ groups:
+ analytics_instrumentation:
+ secure:
+ section: security
+ groups:
+ static_analysis:
+ dynamic_analysis:
+ YAML
+ end
+
+ let(:response) { instance_double(HTTParty::Response, success?: true, body: stage_data) }
+
+ around do |example|
+ described_class.instance_variable_set(:@groups, nil)
+ example.run
+ described_class.instance_variable_set(:@groups, nil)
+ end
+
+ before do
+ allow(Gitlab::HTTP).to receive(:get).and_return(response)
+ end
+
+ context 'when online' do
+ describe '.group_unknown?' do
+ it 'returns false for known groups' do
+ expect(described_class.group_unknown?('analytics_instrumentation')).to be_falsy
+ end
+
+ it 'returns true for unknown groups' do
+ expect(described_class.group_unknown?('unknown')).to be_truthy
+ end
+ end
+
+ describe '.stage_text' do
+ it 'returns the stage name for known groups' do
+ expect(described_class.stage_text('analytics_instrumentation')).to eq('analyze')
+ end
+
+ it 'returns empty string for unknown group' do
+ expect(described_class.stage_text('unknown')).to eq('')
+ end
+ end
+
+ describe '.section_text' do
+ it 'returns the section name for known groups' do
+ expect(described_class.section_text('analytics_instrumentation')).to eq('analytics')
+ end
+
+ it 'returns empty string for unknown group' do
+ expect(described_class.section_text('unknown')).to eq('')
+ end
+ end
+ end
+
+ context 'when offline' do
+ before do
+ allow(Gitlab::HTTP).to receive(:get).and_raise(Gitlab::HTTP_V2::BlockedUrlError)
+ end
+
+ describe '.group_unknown?' do
+ it 'returns false for known groups' do
+ expect(described_class.group_unknown?('analytics_instrumentation')).to be_falsy
+ end
+
+ it 'returns false for unknown group' do
+ expect(described_class.group_unknown?('unknown')).to be_falsy
+ end
+ end
+
+ describe '.stage_text' do
+ it 'returns empty string for known groups' do
+ expect(described_class.stage_text('analytics_instrumentation')).to eq('')
+ end
+
+ it 'returns empty string for unknown groups' do
+ expect(described_class.stage_text('unknown')).to eq('')
+ end
+ end
+
+ describe '.section_text' do
+ it 'returns empty string for known groups' do
+ expect(described_class.section_text('analytics_instrumentation')).to eq('')
+ end
+
+ it 'returns empty string for unknown groups' do
+ expect(described_class.section_text('unknown')).to eq('')
+ end
+ end
+ end
+end
diff --git a/spec/lib/generators/gitlab/analytics/internal_events_generator_spec.rb b/spec/lib/generators/gitlab/analytics/internal_events_generator_spec.rb
index c52d17d4a5b..2d9356ca96d 100644
--- a/spec/lib/generators/gitlab/analytics/internal_events_generator_spec.rb
+++ b/spec/lib/generators/gitlab/analytics/internal_events_generator_spec.rb
@@ -10,81 +10,27 @@ RSpec.describe Gitlab::Analytics::InternalEventsGenerator, :silence_stdout, feat
let(:tmpfile) { Tempfile.new('test-metadata') }
let(:existing_key_paths) { {} }
let(:description) { "This metric counts unique users viewing analytics metrics dashboard section" }
- let(:group) { "group::analytics instrumentation" }
- let(:stage) { "analytics" }
+ let(:group) { "analytics_instrumentation" }
+ let(:stage) { "analyze" }
let(:section) { "analytics" }
let(:mr) { "https://gitlab.com/some-group/some-project/-/merge_requests/123" }
let(:event) { "view_analytics_dashboard" }
let(:unique) { "user.id" }
let(:time_frames) { %w[7d] }
+ let(:group_unknown) { false }
let(:include_default_identifiers) { 'yes' }
- let(:options) do
+ let(:base_options) do
{
time_frames: time_frames,
free: true,
mr: mr,
group: group,
- stage: stage,
- section: section,
event: event,
unique: unique
}.stringify_keys
end
- let(:key_path_without_time_frame) { "count_distinct_#{unique.sub('.', '_')}_from_#{event}" }
- let(:key_path_7d) { "#{key_path_without_time_frame}_7d" }
- let(:metric_definition_path_7d) { Dir.glob(File.join(temp_dir, "metrics/counts_7d/#{key_path_7d}.yml")).first }
- let(:metric_definition_7d) do
- {
- "key_path" => key_path_7d,
- "description" => description,
- "product_section" => section,
- "product_stage" => stage,
- "product_group" => group,
- "performance_indicator_type" => [],
- "value_type" => "number",
- "status" => "active",
- "milestone" => "13.9",
- "introduced_by_url" => mr,
- "time_frame" => "7d",
- "data_source" => "internal_events",
- "data_category" => "optional",
- "instrumentation_class" => "RedisHLLMetric",
- "distribution" => %w[ce ee],
- "tier" => %w[free premium ultimate],
- "options" => {
- "events" => [event]
- },
- "events" => [{ "name" => event, "unique" => unique }]
- }
- end
-
- let(:key_path_all) { "count_total_#{event}" }
- let(:metric_definition_path_all) { Dir.glob(File.join(temp_dir, "metrics/counts_all/#{key_path_all}.yml")).first }
- let(:metric_definition_all) do
- {
- "key_path" => key_path_all,
- "description" => description,
- "product_section" => section,
- "product_stage" => stage,
- "product_group" => group,
- "performance_indicator_type" => [],
- "value_type" => "number",
- "status" => "active",
- "milestone" => "13.9",
- "introduced_by_url" => mr,
- "time_frame" => "all",
- "data_source" => "internal_events",
- "data_category" => "optional",
- "instrumentation_class" => "TotalCountMetric",
- "distribution" => %w[ce ee],
- "tier" => %w[free premium ultimate],
- "options" => {
- "events" => [event]
- },
- "events" => [{ "name" => event }]
- }
- end
+ let(:options) { base_options }
before do
stub_const("#{described_class}::TOP_LEVEL_DIR_EE", ee_temp_dir)
@@ -98,6 +44,10 @@ RSpec.describe Gitlab::Analytics::InternalEventsGenerator, :silence_stdout, feat
.and_return(description)
end
+ allow(Gitlab::Analytics::GroupFetcher).to receive(:group_unknown?).and_return(group_unknown)
+ allow(Gitlab::Analytics::GroupFetcher).to receive(:stage_text).with(group).and_return(stage)
+ allow(Gitlab::Analytics::GroupFetcher).to receive(:section_text).with(group).and_return(section)
+
allow(Gitlab::TaskHelpers).to receive(:prompt).and_return(include_default_identifiers)
allow(Gitlab::Usage::MetricDefinition).to receive(:definitions).and_return(existing_key_paths)
end
@@ -189,35 +139,85 @@ RSpec.describe Gitlab::Analytics::InternalEventsGenerator, :silence_stdout, feat
end
describe 'Creating metric definition file' do
- context 'for single time frame' do
- let(:time_frames) { %w[7d] }
+ let(:metric_dir) { temp_dir }
+ let(:base_key_path_unique) { "count_distinct_#{unique.sub('.', '_')}_from_#{event}" }
+ let(:base_key_path_total) { "count_total_#{event}" }
+ let(:base_metric_definition) do
+ {
+ "description" => description,
+ "product_section" => section,
+ "product_stage" => stage,
+ "product_group" => group,
+ "performance_indicator_type" => [],
+ "value_type" => "number",
+ "status" => "active",
+ "milestone" => "13.9",
+ "introduced_by_url" => mr,
+ "data_source" => "internal_events",
+ "data_category" => "optional",
+ "distribution" => %w[ce ee],
+ "tier" => %w[free premium ultimate],
+ "options" => {
+ "events" => [event]
+ }
+ }
+ end
+
+ let(:metric_definition_extra) { {} }
- it 'creates a metric definition file' do
+ shared_examples 'creates unique metric definitions' do |time_frames|
+ it 'creates a metric definiton for each of the time frames' do
described_class.new([], options).invoke_all
- expect(YAML.safe_load(File.read(metric_definition_path_7d))).to eq(metric_definition_7d)
+ time_frames.each do |time_frame|
+ key_path = "#{base_key_path_unique}_#{time_frame}"
+ metric_definition_path = Dir.glob(File.join(metric_dir, "metrics/counts_#{time_frame}/#{key_path}.yml")).first
+ metric_definition = base_metric_definition.merge(
+ "key_path" => key_path,
+ "time_frame" => time_frame,
+ "events" => [{ "name" => event, "unique" => unique }]
+ ).merge(metric_definition_extra)
+ expect(YAML.safe_load(File.read(metric_definition_path))).to eq(metric_definition)
+ end
end
+ end
- context 'with time frame "all"' do
- let(:time_frames) { %w[all] }
+ shared_examples 'creates total metric definitions' do |time_frames|
+ it 'creates a metric definiton for each of the time frames' do
+ described_class.new([], options).invoke_all
- it 'creates a total count metric definition file' do
- described_class.new([], options).invoke_all
- expect(YAML.safe_load(File.read(metric_definition_path_all))).to eq(metric_definition_all)
+ time_frames.each do |time_frame|
+ key_path = "#{base_key_path_total}_#{time_frame}"
+ metric_definition_path = Dir.glob(File.join(metric_dir, "metrics/counts_#{time_frame}/#{key_path}.yml")).first
+ metric_definition = base_metric_definition.merge(
+ "key_path" => key_path,
+ "time_frame" => time_frame,
+ "events" => [{ "name" => event }]
+ ).merge(metric_definition_extra)
+ expect(YAML.safe_load(File.read(metric_definition_path))).to eq(metric_definition)
end
end
+ end
- context 'for ultimate only feature' do
- let(:metric_definition_path_7d) do
- Dir.glob(File.join(ee_temp_dir, temp_dir, "metrics/counts_7d/#{key_path_7d}.yml")).first
- end
+ context 'for single time frame' do
+ let(:time_frames) { %w[7d] }
- it 'creates a metric definition file' do
- described_class.new([], options.merge(tiers: %w[ultimate])).invoke_all
+ it_behaves_like 'creates unique metric definitions', %w[7d]
- expect(YAML.safe_load(File.read(metric_definition_path_7d)))
- .to eq(metric_definition_7d.merge("tier" => ["ultimate"], "distribution" => ["ee"]))
- end
+ context 'with time frame "all" and no "unique"' do
+ let(:time_frames) { %w[all] }
+
+ let(:options) { base_options.except('unique') }
+
+ it_behaves_like 'creates total metric definitions', %w[all]
+ end
+
+ context 'for ultimate only feature' do
+ let(:metric_dir) { File.join(ee_temp_dir, temp_dir) }
+ let(:options) { base_options.merge(tiers: %w[ultimate]) }
+ let(:metric_definition_extra) { { "tier" => ["ultimate"], "distribution" => ["ee"] } }
+
+ it_behaves_like 'creates unique metric definitions', %w[7d]
end
context 'with invalid time frame' do
@@ -228,7 +228,16 @@ RSpec.describe Gitlab::Analytics::InternalEventsGenerator, :silence_stdout, feat
end
end
+ context 'with invalid time frame for unique metrics' do
+ let(:time_frames) { %w[all] }
+
+ it 'raises error' do
+ expect { described_class.new([], options).invoke_all }.to raise_error(RuntimeError)
+ end
+ end
+
context 'with duplicated key path' do
+ let(:key_path_7d) { "#{base_key_path_unique}_7d" }
let(:existing_key_paths) { { key_path_7d => true } }
it 'raises error' do
@@ -252,14 +261,14 @@ RSpec.describe Gitlab::Analytics::InternalEventsGenerator, :silence_stdout, feat
context 'without obligatory parameter' do
it 'raises error', :aggregate_failures do
- %w[unique event mr section stage group].each do |option|
+ %w[event mr group].each do |option|
expect { described_class.new([], options.without(option)).invoke_all }
.to raise_error(RuntimeError)
end
end
end
- context 'with to short description' do
+ context 'with too short description' do
it 'asks again for description' do
allow_next_instance_of(described_class) do |instance|
allow(instance).to receive(:ask)
@@ -281,42 +290,28 @@ RSpec.describe Gitlab::Analytics::InternalEventsGenerator, :silence_stdout, feat
end
context 'for multiple time frames' do
- let(:time_frames) { %w[7d 28d all] }
- let(:key_path_28d) { "#{key_path_without_time_frame}_28d" }
- let(:metric_definition_path_28d) { Dir.glob(File.join(temp_dir, "metrics/counts_28d/#{key_path_28d}.yml")).first }
- let(:metric_definition_28d) do
- metric_definition_7d.merge(
- "key_path" => key_path_28d,
- "time_frame" => "28d"
- )
- end
+ let(:time_frames) { %w[7d 28d] }
- it 'creates metric definition files' do
- described_class.new([], options).invoke_all
-
- expect(YAML.safe_load(File.read(metric_definition_path_7d))).to eq(metric_definition_7d)
- expect(YAML.safe_load(File.read(metric_definition_path_28d))).to eq(metric_definition_28d)
- expect(YAML.safe_load(File.read(metric_definition_path_all))).to eq(metric_definition_all)
- end
+ it_behaves_like 'creates unique metric definitions', %w[7d 28d]
end
context 'with default time frames' do
- let(:time_frames) { nil }
- let(:key_path_28d) { "#{key_path_without_time_frame}_28d" }
- let(:metric_definition_path_28d) { Dir.glob(File.join(temp_dir, "metrics/counts_28d/#{key_path_28d}.yml")).first }
- let(:metric_definition_28d) do
- metric_definition_7d.merge(
- "key_path" => key_path_28d,
- "time_frame" => "28d"
- )
- end
+ let(:options) { base_options.without('time_frames', 'unique') }
- it 'creates metric definition files' do
- described_class.new([], options.without('time_frames')).invoke_all
+ it_behaves_like 'creates total metric definitions', %w[7d 28d all]
- expect(YAML.safe_load(File.read(metric_definition_path_7d))).to eq(metric_definition_7d)
- expect(YAML.safe_load(File.read(metric_definition_path_28d))).to eq(metric_definition_28d)
- expect(YAML.safe_load(File.read(metric_definition_path_all))).to eq(metric_definition_all)
+ context 'with unique' do
+ let(:options) { base_options.without('time_frames') }
+
+ it_behaves_like 'creates unique metric definitions', %w[7d 28d]
+
+ it "doesn't create a total 'all' metric" do
+ described_class.new([], options).invoke_all
+
+ key_path = "#{base_key_path_total}_all"
+
+ expect(Dir.glob(File.join(metric_dir, "metrics/counts_all/#{key_path}.yml")).first).to be_nil
+ end
end
end
end
diff --git a/spec/lib/gitlab/access/branch_protection_spec.rb b/spec/lib/gitlab/access/branch_protection_spec.rb
index e54ff8807b5..1ecb1cdd759 100644
--- a/spec/lib/gitlab/access/branch_protection_spec.rb
+++ b/spec/lib/gitlab/access/branch_protection_spec.rb
@@ -90,9 +90,9 @@ RSpec.describe Gitlab::Access::BranchProtection do
where(:level, :result) do
Gitlab::Access::PROTECTION_NONE | true
Gitlab::Access::PROTECTION_DEV_CAN_PUSH | false
- Gitlab::Access::PROTECTION_DEV_CAN_MERGE | true
+ Gitlab::Access::PROTECTION_DEV_CAN_MERGE | false
Gitlab::Access::PROTECTION_FULL | false
- Gitlab::Access::PROTECTION_DEV_CAN_INITIAL_PUSH | true
+ Gitlab::Access::PROTECTION_DEV_CAN_INITIAL_PUSH | false
end
with_them { it { is_expected.to eq(result) } }
@@ -117,10 +117,10 @@ RSpec.describe Gitlab::Access::BranchProtection do
where(:level, :result) do
Gitlab::Access::PROTECTION_NONE | [{ 'access_level' => Gitlab::Access::DEVELOPER }]
- Gitlab::Access::PROTECTION_DEV_CAN_PUSH | [{ 'access_level' => Gitlab::Access::DEVELOPER }]
+ Gitlab::Access::PROTECTION_DEV_CAN_PUSH | [{ 'access_level' => Gitlab::Access::MAINTAINER }]
Gitlab::Access::PROTECTION_DEV_CAN_MERGE | [{ 'access_level' => Gitlab::Access::DEVELOPER }]
Gitlab::Access::PROTECTION_FULL | [{ 'access_level' => Gitlab::Access::MAINTAINER }]
- Gitlab::Access::PROTECTION_DEV_CAN_INITIAL_PUSH | [{ 'access_level' => Gitlab::Access::DEVELOPER }]
+ Gitlab::Access::PROTECTION_DEV_CAN_INITIAL_PUSH | [{ 'access_level' => Gitlab::Access::MAINTAINER }]
end
with_them { it { is_expected.to eq(result) } }
diff --git a/spec/lib/gitlab/analytics/cycle_analytics/aggregated/records_fetcher_spec.rb b/spec/lib/gitlab/analytics/cycle_analytics/aggregated/records_fetcher_spec.rb
index aa0a1b66eef..14831f0e61d 100644
--- a/spec/lib/gitlab/analytics/cycle_analytics/aggregated/records_fetcher_spec.rb
+++ b/spec/lib/gitlab/analytics/cycle_analytics/aggregated/records_fetcher_spec.rb
@@ -2,19 +2,23 @@
require 'spec_helper'
-RSpec.describe Gitlab::Analytics::CycleAnalytics::Aggregated::RecordsFetcher do
- let_it_be(:project) { create(:project) }
+RSpec.describe Gitlab::Analytics::CycleAnalytics::Aggregated::RecordsFetcher, feature_category: :value_stream_management do
+ let_it_be(:project, refind: true) { create(:project, :public) }
let_it_be(:issue_1) { create(:issue, project: project) }
- let_it_be(:issue_2) { create(:issue, project: project) }
+ let_it_be(:issue_2) { create(:issue, :confidential, project: project) }
let_it_be(:issue_3) { create(:issue, project: project) }
+ let_it_be(:merge_request) { create(:merge_request, :unique_branches, source_project: project, target_project: project) }
+
+ let_it_be(:user) { create(:user).tap { |u| project.add_developer(u) } }
+
let_it_be(:stage) { create(:cycle_analytics_stage, start_event_identifier: :issue_created, end_event_identifier: :issue_deployed_to_production, namespace: project.reload.project_namespace) }
let_it_be(:stage_event_1) { create(:cycle_analytics_issue_stage_event, stage_event_hash_id: stage.stage_event_hash_id, project_id: project.id, issue_id: issue_1.id, start_event_timestamp: 2.years.ago, end_event_timestamp: 1.year.ago) } # duration: 1 year
let_it_be(:stage_event_2) { create(:cycle_analytics_issue_stage_event, stage_event_hash_id: stage.stage_event_hash_id, project_id: project.id, issue_id: issue_2.id, start_event_timestamp: 5.years.ago, end_event_timestamp: 2.years.ago) } # duration: 3 years
let_it_be(:stage_event_3) { create(:cycle_analytics_issue_stage_event, stage_event_hash_id: stage.stage_event_hash_id, project_id: project.id, issue_id: issue_3.id, start_event_timestamp: 6.years.ago, end_event_timestamp: 3.months.ago) } # duration: 5+ years
- let(:params) { { from: 10.years.ago, to: Date.today } }
+ let(:params) { { from: 10.years.ago, to: Date.today, current_user: user } }
subject(:records_fetcher) do
query_builder = Gitlab::Analytics::CycleAnalytics::Aggregated::BaseQueryBuilder.new(stage: stage, params: params)
@@ -25,7 +29,7 @@ RSpec.describe Gitlab::Analytics::CycleAnalytics::Aggregated::RecordsFetcher do
it 'returns issues in the correct order' do
returned_iids = records_fetcher.serialized_records.pluck(:iid).map(&:to_i)
- expect(returned_iids).to eq(expected_issue_ids)
+ expect(returned_iids).to eq(expected_iids)
end
it 'passes a hash with all expected attributes to the serializer' do
@@ -52,7 +56,7 @@ RSpec.describe Gitlab::Analytics::CycleAnalytics::Aggregated::RecordsFetcher do
describe '#serialized_records' do
describe 'sorting' do
context 'when sorting by end event DESC' do
- let(:expected_issue_ids) { [issue_3.iid, issue_1.iid, issue_2.iid] }
+ let(:expected_iids) { [issue_3.iid, issue_1.iid, issue_2.iid] }
before do
params[:sort] = :end_event
@@ -76,7 +80,7 @@ RSpec.describe Gitlab::Analytics::CycleAnalytics::Aggregated::RecordsFetcher do
end
context 'when sorting by end event ASC' do
- let(:expected_issue_ids) { [issue_2.iid, issue_1.iid, issue_3.iid] }
+ let(:expected_iids) { [issue_2.iid, issue_1.iid, issue_3.iid] }
before do
params[:sort] = :end_event
@@ -87,7 +91,7 @@ RSpec.describe Gitlab::Analytics::CycleAnalytics::Aggregated::RecordsFetcher do
end
context 'when sorting by duration DESC' do
- let(:expected_issue_ids) { [issue_3.iid, issue_2.iid, issue_1.iid] }
+ let(:expected_iids) { [issue_3.iid, issue_2.iid, issue_1.iid] }
before do
params[:sort] = :duration
@@ -98,7 +102,7 @@ RSpec.describe Gitlab::Analytics::CycleAnalytics::Aggregated::RecordsFetcher do
end
context 'when sorting by duration ASC' do
- let(:expected_issue_ids) { [issue_1.iid, issue_2.iid, issue_3.iid] }
+ let(:expected_iids) { [issue_1.iid, issue_2.iid, issue_3.iid] }
before do
params[:sort] = :duration
@@ -110,7 +114,7 @@ RSpec.describe Gitlab::Analytics::CycleAnalytics::Aggregated::RecordsFetcher do
end
describe 'pagination' do
- let(:expected_issue_ids) { [issue_3.iid] }
+ let(:expected_iids) { [issue_3.iid] }
before do
params[:sort] = :duration
@@ -163,4 +167,66 @@ RSpec.describe Gitlab::Analytics::CycleAnalytics::Aggregated::RecordsFetcher do
end
end
end
+
+ describe 'respecting visibility rules' do
+ let(:expected_iids) { [issue_3.iid, issue_1.iid] }
+
+ subject(:returned_iids) { records_fetcher.serialized_records.pluck(:iid).map(&:to_i) }
+
+ context 'when current user is guest' do
+ before do
+ params[:current_user] = nil
+ end
+
+ it { is_expected.to eq(expected_iids) }
+ end
+
+ context 'when current user is logged and has no access to the project' do
+ before do
+ params[:current_user] = create(:user)
+ end
+
+ it { is_expected.to eq(expected_iids) }
+ end
+ end
+
+ context 'when querying merge requests' do
+ let_it_be(:mr_stage) { create(:cycle_analytics_stage, start_event_identifier: :merge_request_last_build_started, end_event_identifier: :merge_request_last_build_finished, namespace: project.reload.project_namespace) }
+ let_it_be(:mr_stage_event) { create(:cycle_analytics_merge_request_stage_event, stage_event_hash_id: mr_stage.stage_event_hash_id, project_id: project.id, merge_request_id: merge_request.id, start_event_timestamp: 2.years.ago, end_event_timestamp: 1.year.ago) }
+
+ let(:stage) { mr_stage }
+ let(:expected_iids) { [merge_request.iid] }
+
+ subject(:returned_iids) { records_fetcher.serialized_records.pluck(:iid).map(&:to_i) }
+
+ it { is_expected.to eq(expected_iids) }
+
+ context 'when current user is guest' do
+ before do
+ params[:current_user] = nil
+ end
+
+ it { is_expected.to eq([merge_request.iid]) }
+ end
+
+ context 'when current user is logged and has no access to the project' do
+ before do
+ params[:current_user] = create(:user)
+ end
+
+ it { is_expected.to eq([merge_request.iid]) }
+
+ context 'when MR access level is elevated' do
+ before do
+ project.project_feature.update!(
+ builds_access_level: ProjectFeature::PRIVATE,
+ repository_access_level: ProjectFeature::PRIVATE,
+ merge_requests_access_level: ProjectFeature::PRIVATE
+ )
+ end
+
+ it { is_expected.to eq([]) }
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/application_context_spec.rb b/spec/lib/gitlab/application_context_spec.rb
index 20c1536b9e6..99f932975d0 100644
--- a/spec/lib/gitlab/application_context_spec.rb
+++ b/spec/lib/gitlab/application_context_spec.rb
@@ -210,6 +210,14 @@ RSpec.describe Gitlab::ApplicationContext do
expect(result(context)).to include(job_id: job.id, project: project.full_path, pipeline_id: job.pipeline_id)
end
end
+
+ context 'when using bulk import context' do
+ it 'sets expected bulk_import_entity_id value' do
+ context = described_class.new(bulk_import_entity_id: 1)
+
+ expect(result(context)).to include(bulk_import_entity_id: 1)
+ end
+ end
end
describe '#use' do
diff --git a/spec/lib/gitlab/auth/saml/config_spec.rb b/spec/lib/gitlab/auth/saml/config_spec.rb
index 2ecc26f9b96..bb5446e8d6a 100644
--- a/spec/lib/gitlab/auth/saml/config_spec.rb
+++ b/spec/lib/gitlab/auth/saml/config_spec.rb
@@ -19,6 +19,41 @@ RSpec.describe Gitlab::Auth::Saml::Config do
end
end
+ describe '.default_attribute_statements' do
+ it 'includes upstream defaults, nickname and Microsoft values' do
+ expect(described_class.default_attribute_statements).to eq(
+ {
+ nickname: %w[username nickname],
+ name: [
+ 'name',
+ 'http://schemas.xmlsoap.org/ws/2005/05/identity/claims/name',
+ 'http://schemas.microsoft.com/ws/2008/06/identity/claims/name'
+ ],
+ email: [
+ 'email',
+ 'mail',
+ 'http://schemas.xmlsoap.org/ws/2005/05/identity/claims/emailaddress',
+ 'http://schemas.microsoft.com/ws/2008/06/identity/claims/emailaddress'
+ ],
+ first_name: [
+ 'first_name',
+ 'firstname',
+ 'firstName',
+ 'http://schemas.xmlsoap.org/ws/2005/05/identity/claims/givenname',
+ 'http://schemas.microsoft.com/ws/2008/06/identity/claims/givenname'
+ ],
+ last_name: [
+ 'last_name',
+ 'lastname',
+ 'lastName',
+ 'http://schemas.xmlsoap.org/ws/2005/05/identity/claims/surname',
+ 'http://schemas.microsoft.com/ws/2008/06/identity/claims/surname'
+ ]
+ }
+ )
+ end
+ end
+
describe '#external_groups' do
let(:config_1) { described_class.new('saml1') }
diff --git a/spec/lib/gitlab/auth_spec.rb b/spec/lib/gitlab/auth_spec.rb
index 020089b3880..9974e24ad50 100644
--- a/spec/lib/gitlab/auth_spec.rb
+++ b/spec/lib/gitlab/auth_spec.rb
@@ -45,26 +45,26 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching, feature_cate
expect(subject.all_available_scopes).to match_array %i[api read_user read_api read_repository write_repository read_registry write_registry sudo admin_mode read_observability write_observability create_runner k8s_proxy ai_features]
end
- it 'contains for non-admin user all non-default scopes without ADMIN access and without observability scopes and ai_features' do
+ it 'contains for non-admin user all non-default scopes without ADMIN access and without observability scopes' do
user = build_stubbed(:user, admin: false)
- expect(subject.available_scopes_for(user)).to match_array %i[api read_user read_api read_repository write_repository read_registry write_registry create_runner k8s_proxy]
+ expect(subject.available_scopes_for(user)).to match_array %i[api read_user read_api read_repository write_repository read_registry write_registry create_runner k8s_proxy ai_features]
end
- it 'contains for admin user all non-default scopes with ADMIN access and without observability scopes and ai_features' do
+ it 'contains for admin user all non-default scopes with ADMIN access and without observability scopes' do
user = build_stubbed(:user, admin: true)
- expect(subject.available_scopes_for(user)).to match_array %i[api read_user read_api read_repository write_repository read_registry write_registry sudo admin_mode create_runner k8s_proxy]
+ expect(subject.available_scopes_for(user)).to match_array %i[api read_user read_api read_repository write_repository read_registry write_registry sudo admin_mode create_runner k8s_proxy ai_features]
end
- it 'contains for project all resource bot scopes without ai_features' do
- expect(subject.available_scopes_for(project)).to match_array %i[api read_api read_repository write_repository read_registry write_registry read_observability write_observability create_runner k8s_proxy]
+ it 'contains for project all resource bot scopes' do
+ expect(subject.available_scopes_for(project)).to match_array %i[api read_api read_repository write_repository read_registry write_registry read_observability write_observability create_runner k8s_proxy ai_features]
end
it 'contains for group all resource bot scopes' do
group = build_stubbed(:group).tap { |g| g.namespace_settings = build_stubbed(:namespace_settings, namespace: g) }
- expect(subject.available_scopes_for(group)).to match_array %i[api read_api read_repository write_repository read_registry write_registry read_observability write_observability create_runner k8s_proxy]
+ expect(subject.available_scopes_for(group)).to match_array %i[api read_api read_repository write_repository read_registry write_registry read_observability write_observability create_runner k8s_proxy ai_features]
end
it 'contains for unsupported type no scopes' do
@@ -75,34 +75,6 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching, feature_cate
expect(subject.optional_scopes).to match_array %i[read_user read_api read_repository write_repository read_registry write_registry sudo admin_mode openid profile email read_observability write_observability create_runner k8s_proxy ai_features]
end
- describe 'ai_features scope' do
- let(:resource) { nil }
-
- subject { described_class.available_scopes_for(resource) }
-
- context 'when resource is user', 'and user has a group with ai features' do
- let(:resource) { build_stubbed(:user) }
-
- it { is_expected.not_to include(:ai_features) }
- end
-
- context 'when resource is project' do
- let(:resource) { build_stubbed(:project) }
-
- it 'does not include ai_features scope' do
- is_expected.not_to include(:ai_features)
- end
- end
-
- context 'when resource is group' do
- let(:resource) { build_stubbed(:group) }
-
- it 'does not include ai_features scope' do
- is_expected.not_to include(:ai_features)
- end
- end
- end
-
context 'with observability_tracing feature flag' do
context 'when disabled' do
before do
@@ -114,7 +86,7 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching, feature_cate
g.namespace_settings = build_stubbed(:namespace_settings, namespace: g)
end
- expect(subject.available_scopes_for(group)).to match_array %i[api read_api read_repository write_repository read_registry write_registry create_runner k8s_proxy]
+ expect(subject.available_scopes_for(group)).to match_array %i[api read_api read_repository write_repository read_registry write_registry create_runner k8s_proxy ai_features]
end
it 'contains for project all resource bot scopes without observability scopes' do
@@ -123,7 +95,7 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching, feature_cate
end
project = build_stubbed(:project, namespace: group)
- expect(subject.available_scopes_for(project)).to match_array %i[api read_api read_repository write_repository read_registry write_registry create_runner k8s_proxy]
+ expect(subject.available_scopes_for(project)).to match_array %i[api read_api read_repository write_repository read_registry write_registry create_runner k8s_proxy ai_features]
end
end
@@ -140,17 +112,17 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching, feature_cate
end
it 'contains for group all resource bot scopes including observability scopes' do
- expect(subject.available_scopes_for(group)).to match_array %i[api read_api read_repository write_repository read_registry write_registry read_observability write_observability create_runner k8s_proxy]
+ expect(subject.available_scopes_for(group)).to match_array %i[api read_api read_repository write_repository read_registry write_registry read_observability write_observability create_runner k8s_proxy ai_features]
end
it 'contains for admin user all non-default scopes with ADMIN access and without observability scopes' do
user = build_stubbed(:user, admin: true)
- expect(subject.available_scopes_for(user)).to match_array %i[api read_user read_api read_repository write_repository read_registry write_registry sudo admin_mode create_runner k8s_proxy]
+ expect(subject.available_scopes_for(user)).to match_array %i[api read_user read_api read_repository write_repository read_registry write_registry sudo admin_mode create_runner k8s_proxy ai_features]
end
it 'contains for project all resource bot scopes including observability scopes' do
- expect(subject.available_scopes_for(project)).to match_array %i[api read_api read_repository write_repository read_registry write_registry read_observability write_observability create_runner k8s_proxy]
+ expect(subject.available_scopes_for(project)).to match_array %i[api read_api read_repository write_repository read_registry write_registry read_observability write_observability create_runner k8s_proxy ai_features]
end
it 'contains for other group all resource bot scopes without observability scopes' do
@@ -159,7 +131,7 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching, feature_cate
g.namespace_settings = build_stubbed(:namespace_settings, namespace: g)
end
- expect(subject.available_scopes_for(other_group)).to match_array %i[api read_api read_repository write_repository read_registry write_registry create_runner k8s_proxy]
+ expect(subject.available_scopes_for(other_group)).to match_array %i[api read_api read_repository write_repository read_registry write_registry create_runner k8s_proxy ai_features]
end
it 'contains for other project all resource bot scopes without observability scopes' do
@@ -169,7 +141,7 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching, feature_cate
end
other_project = build_stubbed(:project, namespace: other_group)
- expect(subject.available_scopes_for(other_project)).to match_array %i[api read_api read_repository write_repository read_registry write_registry create_runner k8s_proxy]
+ expect(subject.available_scopes_for(other_project)).to match_array %i[api read_api read_repository write_repository read_registry write_registry create_runner k8s_proxy ai_features]
end
end
end
diff --git a/spec/lib/gitlab/background_migration/backfill_branch_protection_namespace_setting_spec.rb b/spec/lib/gitlab/background_migration/backfill_branch_protection_namespace_setting_spec.rb
new file mode 100644
index 00000000000..d985e7fae61
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/backfill_branch_protection_namespace_setting_spec.rb
@@ -0,0 +1,76 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::BackfillBranchProtectionNamespaceSetting,
+ feature_category: :source_code_management do
+ let(:namespaces_table) { table(:namespaces) }
+ let(:namespace_settings_table) { table(:namespace_settings) }
+ let(:group_namespace) do
+ namespaces_table.create!(name: 'group_namespace', path: 'path-1', type: 'Group', default_branch_protection: 0)
+ end
+
+ let(:user_namespace) do
+ namespaces_table.create!(name: 'user_namespace', path: 'path-2', type: 'User', default_branch_protection: 1)
+ end
+
+ let(:user_three_namespace) do
+ namespaces_table.create!(name: 'user_three_namespace', path: 'path-3', type: 'User', default_branch_protection: 2)
+ end
+
+ let(:group_four_namespace) do
+ namespaces_table.create!(name: 'group_four_namespace', path: 'path-4', type: 'Group', default_branch_protection: 3)
+ end
+
+ let(:group_five_namespace) do
+ namespaces_table.create!(name: 'group_five_namespace', path: 'path-5', type: 'Group', default_branch_protection: 4)
+ end
+
+ let(:start_id) { group_namespace.id }
+ let(:end_id) { group_five_namespace.id }
+
+ subject(:perform_migration) do
+ described_class.new(
+ start_id: start_id,
+ end_id: end_id,
+ batch_table: :namespace_settings,
+ batch_column: :namespace_id,
+ sub_batch_size: 2,
+ pause_ms: 0,
+ connection: ActiveRecord::Base.connection
+ ).perform
+ end
+
+ before do
+ namespace_settings_table.create!(namespace_id: group_namespace.id, default_branch_protection_defaults: {})
+ namespace_settings_table.create!(namespace_id: user_namespace.id, default_branch_protection_defaults: {})
+ namespace_settings_table.create!(namespace_id: user_three_namespace.id, default_branch_protection_defaults: {})
+ namespace_settings_table.create!(namespace_id: group_four_namespace.id, default_branch_protection_defaults: {})
+ namespace_settings_table.create!(namespace_id: group_five_namespace.id, default_branch_protection_defaults: {})
+ end
+
+ it 'updates default_branch_protection_defaults to a correct value', :aggregate_failures do
+ expect(ActiveRecord::QueryRecorder.new { perform_migration }.count).to eq(16)
+
+ expect(migrated_attribute(group_namespace.id)).to eq({ "allow_force_push" => true,
+ "allowed_to_merge" => [{ "access_level" => 30 }],
+ "allowed_to_push" => [{ "access_level" => 30 }] })
+ expect(migrated_attribute(user_namespace.id)).to eq({ "allow_force_push" => false,
+ "allowed_to_merge" => [{ "access_level" => 40 }],
+ "allowed_to_push" => [{ "access_level" => 30 }] })
+ expect(migrated_attribute(user_three_namespace.id)).to eq({ "allow_force_push" => false,
+ "allowed_to_merge" => [{ "access_level" => 40 }],
+ "allowed_to_push" => [{ "access_level" => 40 }] })
+ expect(migrated_attribute(group_four_namespace.id)).to eq({ "allow_force_push" => false,
+ "allowed_to_merge" => [{ "access_level" => 30 }],
+ "allowed_to_push" => [{ "access_level" => 40 }] })
+ expect(migrated_attribute(group_five_namespace.id)).to eq({ "allow_force_push" => false,
+ "allowed_to_merge" => [{ "access_level" => 40 }],
+ "allowed_to_push" => [{ "access_level" => 40 }],
+ "developer_can_initial_push" => true })
+ end
+
+ def migrated_attribute(namespace_id)
+ namespace_settings_table.find(namespace_id).default_branch_protection_defaults
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/backfill_imported_issue_search_data_spec.rb b/spec/lib/gitlab/background_migration/backfill_imported_issue_search_data_spec.rb
index b3f04055e0a..edf972189b2 100644
--- a/spec/lib/gitlab/background_migration/backfill_imported_issue_search_data_spec.rb
+++ b/spec/lib/gitlab/background_migration/backfill_imported_issue_search_data_spec.rb
@@ -1,11 +1,10 @@
# frozen_string_literal: true
require 'spec_helper'
-require_migration!
RSpec.describe Gitlab::BackgroundMigration::BackfillImportedIssueSearchData,
:migration,
- schema: 20220707075300 do
+ schema: 20221111123146 do
let!(:namespace) { table(:namespaces).create!(name: 'user', path: 'user') }
let!(:issue_search_data_table) { table(:issue_search_data) }
diff --git a/spec/lib/gitlab/background_migration/backfill_integrations_enable_ssl_verification_spec.rb b/spec/lib/gitlab/background_migration/backfill_integrations_enable_ssl_verification_spec.rb
index 3c0b7766871..925fb0c9a20 100644
--- a/spec/lib/gitlab/background_migration/backfill_integrations_enable_ssl_verification_spec.rb
+++ b/spec/lib/gitlab/background_migration/backfill_integrations_enable_ssl_verification_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::BackgroundMigration::BackfillIntegrationsEnableSslVerification, schema: 20220425121410 do
+RSpec.describe Gitlab::BackgroundMigration::BackfillIntegrationsEnableSslVerification, schema: 20221111123146 do
let(:migration) { described_class.new }
let(:integrations) { described_class::Integration }
diff --git a/spec/lib/gitlab/background_migration/backfill_internal_on_notes_spec.rb b/spec/lib/gitlab/background_migration/backfill_internal_on_notes_spec.rb
index 40a4758ba5f..e948717d693 100644
--- a/spec/lib/gitlab/background_migration/backfill_internal_on_notes_spec.rb
+++ b/spec/lib/gitlab/background_migration/backfill_internal_on_notes_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::BackgroundMigration::BackfillInternalOnNotes, :migration, schema: 20220920124709 do
+RSpec.describe Gitlab::BackgroundMigration::BackfillInternalOnNotes, :migration, schema: 20211202041233 do
let(:notes_table) { table(:notes) }
let!(:confidential_note) { notes_table.create!(id: 1, confidential: true, internal: false) }
diff --git a/spec/lib/gitlab/background_migration/backfill_merge_request_diffs_project_id_spec.rb b/spec/lib/gitlab/background_migration/backfill_merge_request_diffs_project_id_spec.rb
new file mode 100644
index 00000000000..8679a8fab8a
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/backfill_merge_request_diffs_project_id_spec.rb
@@ -0,0 +1,43 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::BackfillMergeRequestDiffsProjectId,
+ feature_category: :code_review_workflow,
+ schema: 20231114034017 do # schema before we introduced the invalid not-null constraint
+ let!(:tags_without_project_id) do
+ 13.times do
+ namespace = table(:namespaces).create!(name: 'my namespace', path: 'my-namespace')
+ project = table(:projects).create!(name: 'my project', path: 'my-project', namespace_id: namespace.id,
+ project_namespace_id: namespace.id)
+ merge_request = table(:merge_requests).create!(target_project_id: project.id, target_branch: 'main',
+ source_branch: 'not-main')
+ table(:merge_request_diffs).create!(merge_request_id: merge_request.id, project_id: nil)
+ end
+ end
+
+ let!(:start_id) { table(:merge_request_diffs).minimum(:id) }
+ let!(:end_id) { table(:merge_request_diffs).maximum(:id) }
+
+ let!(:migration) do
+ described_class.new(
+ start_id: start_id,
+ end_id: end_id,
+ batch_table: :merge_request_diffs,
+ batch_column: :id,
+ sub_batch_size: 10,
+ pause_ms: 2,
+ connection: ::ApplicationRecord.connection
+ )
+ end
+
+ it 'backfills the missing project_id for the batch' do
+ backfilled_diffs = table(:merge_request_diffs)
+ .joins('INNER JOIN merge_requests ON merge_request_diffs.merge_request_id = merge_requests.id')
+ .where('merge_request_diffs.project_id = merge_requests.target_project_id')
+
+ expect do
+ migration.perform
+ end.to change { backfilled_diffs.count }.from(0).to(13)
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/backfill_namespace_id_for_project_route_spec.rb b/spec/lib/gitlab/background_migration/backfill_namespace_id_for_project_route_spec.rb
deleted file mode 100644
index 2949bc068c8..00000000000
--- a/spec/lib/gitlab/background_migration/backfill_namespace_id_for_project_route_spec.rb
+++ /dev/null
@@ -1,53 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-# this needs the schema to be before we introduce the not null constraint on routes#namespace_id
-RSpec.describe Gitlab::BackgroundMigration::BackfillNamespaceIdForProjectRoute, schema: 20220606060825 do
- let(:migration) { described_class.new }
- let(:namespaces) { table(:namespaces) }
- let(:projects) { table(:projects) }
- let(:routes) { table(:routes) }
-
- let(:namespace1) { namespaces.create!(name: 'batchtest1', type: 'Group', path: 'space1') }
- let(:namespace2) { namespaces.create!(name: 'batchtest2', type: 'Group', parent_id: namespace1.id, path: 'space2') }
- let(:namespace3) { namespaces.create!(name: 'batchtest3', type: 'Group', parent_id: namespace2.id, path: 'space3') }
-
- let(:proj_namespace1) { namespaces.create!(name: 'proj1', path: 'proj1', type: 'Project', parent_id: namespace1.id) }
- let(:proj_namespace2) { namespaces.create!(name: 'proj2', path: 'proj2', type: 'Project', parent_id: namespace2.id) }
- let(:proj_namespace3) { namespaces.create!(name: 'proj3', path: 'proj3', type: 'Project', parent_id: namespace3.id) }
- let(:proj_namespace4) { namespaces.create!(name: 'proj4', path: 'proj4', type: 'Project', parent_id: namespace3.id) }
-
- # rubocop:disable Layout/LineLength
- let(:proj1) { projects.create!(name: 'proj1', path: 'proj1', namespace_id: namespace1.id, project_namespace_id: proj_namespace1.id) }
- let(:proj2) { projects.create!(name: 'proj2', path: 'proj2', namespace_id: namespace2.id, project_namespace_id: proj_namespace2.id) }
- let(:proj3) { projects.create!(name: 'proj3', path: 'proj3', namespace_id: namespace3.id, project_namespace_id: proj_namespace3.id) }
- let(:proj4) { projects.create!(name: 'proj4', path: 'proj4', namespace_id: namespace3.id, project_namespace_id: proj_namespace4.id) }
- # rubocop:enable Layout/LineLength
-
- let!(:namespace_route1) { routes.create!(path: 'space1', source_id: namespace1.id, source_type: 'Namespace') }
- let!(:namespace_route2) { routes.create!(path: 'space1/space2', source_id: namespace2.id, source_type: 'Namespace') }
- let!(:namespace_route3) { routes.create!(path: 'space1/space3', source_id: namespace3.id, source_type: 'Namespace') }
-
- let!(:proj_route1) { routes.create!(path: 'space1/proj1', source_id: proj1.id, source_type: 'Project') }
- let!(:proj_route2) { routes.create!(path: 'space1/space2/proj2', source_id: proj2.id, source_type: 'Project') }
- let!(:proj_route3) { routes.create!(path: 'space1/space3/proj3', source_id: proj3.id, source_type: 'Project') }
- let!(:proj_route4) { routes.create!(path: 'space1/space3/proj4', source_id: proj4.id, source_type: 'Project') }
-
- subject(:perform_migration) { migration.perform(proj_route1.id, proj_route4.id, :routes, :id, 2, 0) }
-
- it 'backfills namespace_id for the selected records', :aggregate_failures do
- perform_migration
-
- expected_namespaces = [proj_namespace1.id, proj_namespace2.id, proj_namespace3.id, proj_namespace4.id]
-
- expected_projects = [proj_route1.id, proj_route2.id, proj_route3.id, proj_route4.id]
- expect(routes.where.not(namespace_id: nil).pluck(:id)).to match_array(expected_projects)
- expect(routes.where.not(namespace_id: nil).pluck(:namespace_id)).to match_array(expected_namespaces)
- end
-
- it 'tracks timings of queries' do
- expect(migration.batch_metrics.timings).to be_empty
-
- expect { perform_migration }.to change { migration.batch_metrics.timings }
- end
-end
diff --git a/spec/lib/gitlab/background_migration/backfill_namespace_id_of_vulnerability_reads_spec.rb b/spec/lib/gitlab/background_migration/backfill_namespace_id_of_vulnerability_reads_spec.rb
index 6a55c6951d5..c2c5c3e9de0 100644
--- a/spec/lib/gitlab/background_migration/backfill_namespace_id_of_vulnerability_reads_spec.rb
+++ b/spec/lib/gitlab/background_migration/backfill_namespace_id_of_vulnerability_reads_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::BackgroundMigration::BackfillNamespaceIdOfVulnerabilityReads, schema: 20220722145845 do
+RSpec.describe Gitlab::BackgroundMigration::BackfillNamespaceIdOfVulnerabilityReads, schema: 20221111123146 do
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:users) { table(:users) }
diff --git a/spec/lib/gitlab/background_migration/backfill_project_import_level_spec.rb b/spec/lib/gitlab/background_migration/backfill_project_import_level_spec.rb
index ae296483166..73661a3da1f 100644
--- a/spec/lib/gitlab/background_migration/backfill_project_import_level_spec.rb
+++ b/spec/lib/gitlab/background_migration/backfill_project_import_level_spec.rb
@@ -1,7 +1,6 @@
# frozen_string_literal: true
require 'spec_helper'
-require_migration!
# rubocop:disable Layout/HashAlignment
RSpec.describe Gitlab::BackgroundMigration::BackfillProjectImportLevel do
diff --git a/spec/lib/gitlab/background_migration/backfill_project_member_namespace_id_spec.rb b/spec/lib/gitlab/background_migration/backfill_project_member_namespace_id_spec.rb
deleted file mode 100644
index 96f49624d22..00000000000
--- a/spec/lib/gitlab/background_migration/backfill_project_member_namespace_id_spec.rb
+++ /dev/null
@@ -1,124 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::BackfillProjectMemberNamespaceId, :migration, schema: 20220516054011 do
- let(:migration) do
- described_class.new(
- start_id: 1, end_id: 10,
- batch_table: table_name, batch_column: batch_column,
- sub_batch_size: sub_batch_size, pause_ms: pause_ms,
- connection: ApplicationRecord.connection
- )
- end
-
- let(:members_table) { table(:members) }
- let(:projects_table) { table(:projects) }
- let(:namespaces_table) { table(:namespaces) }
-
- let(:table_name) { 'members' }
- let(:batch_column) { :id }
- let(:sub_batch_size) { 100 }
- let(:pause_ms) { 0 }
-
- subject(:perform_migration) do
- migration.perform
- end
-
- before do
- namespaces_table.create!(id: 201, name: 'group1', path: 'group1', type: 'Group')
- namespaces_table.create!(id: 202, name: 'group2', path: 'group2', type: 'Group')
- namespaces_table.create!(id: 300, name: 'project-namespace-1', path: 'project-namespace-1-path', type: 'Project')
- namespaces_table.create!(id: 301, name: 'project-namespace-2', path: 'project-namespace-2-path', type: 'Project')
- namespaces_table.create!(id: 302, name: 'project-namespace-3', path: 'project-namespace-3-path', type: 'Project')
-
- projects_table.create!(id: 100, name: 'project1', path: 'project1', namespace_id: 202, project_namespace_id: 300)
- projects_table.create!(id: 101, name: 'project2', path: 'project2', namespace_id: 202, project_namespace_id: 301)
- projects_table.create!(id: 102, name: 'project3', path: 'project3', namespace_id: 202, project_namespace_id: 302)
-
- # project1, no member namespace (fill in)
- members_table.create!(
- id: 1, source_id: 100,
- source_type: 'Project', type: 'ProjectMember',
- member_namespace_id: nil, access_level: 10, notification_level: 3
- )
-
- # bogus source id, no member namespace id (do nothing)
- members_table.create!(
- id: 2, source_id: non_existing_record_id,
- source_type: 'Project', type: 'ProjectMember',
- member_namespace_id: nil, access_level: 10, notification_level: 3
- )
-
- # project3, existing member namespace id (do nothing)
- members_table.create!(
- id: 3, source_id: 102,
- source_type: 'Project', type: 'ProjectMember',
- member_namespace_id: 300, access_level: 10, notification_level: 3
- )
-
- # Group memberships (do not change)
- # group1, no member namespace (do nothing)
- members_table.create!(
- id: 4, source_id: 201,
- source_type: 'Namespace', type: 'GroupMember',
- member_namespace_id: nil, access_level: 10, notification_level: 3
- )
-
- # group2, existing member namespace (do nothing)
- members_table.create!(
- id: 5, source_id: 202,
- source_type: 'Namespace', type: 'GroupMember',
- member_namespace_id: 201, access_level: 10, notification_level: 3
- )
-
- # Project Namespace memberships (do not change)
- # project namespace, existing member namespace (do nothing)
- members_table.create!(
- id: 6, source_id: 300,
- source_type: 'Namespace', type: 'ProjectNamespaceMember',
- member_namespace_id: 201, access_level: 10, notification_level: 3
- )
-
- # project namespace, not member namespace (do nothing)
- members_table.create!(
- id: 7, source_id: 301,
- source_type: 'Namespace', type: 'ProjectNamespaceMember',
- member_namespace_id: 201, access_level: 10, notification_level: 3
- )
- end
-
- it 'backfills `member_namespace_id` for the selected records', :aggregate_failures do
- expect(members_table.where(type: 'ProjectMember', member_namespace_id: nil).count).to eq 2
- expect(members_table.where(type: 'GroupMember', member_namespace_id: nil).count).to eq 1
-
- queries = ActiveRecord::QueryRecorder.new do
- perform_migration
- end
-
- # rubocop:disable Layout/LineLength
- expect(queries.count).to eq(3)
- expect(members_table.where(type: 'ProjectMember', member_namespace_id: nil).count).to eq 1 # just the bogus one
- expect(members_table.where(type: 'ProjectMember').pluck(:member_namespace_id)).to match_array([nil, 300, 300])
- expect(members_table.where(type: 'GroupMember', member_namespace_id: nil).count).to eq 1
- expect(members_table.where(type: 'GroupMember').pluck(:member_namespace_id)).to match_array([nil, 201])
- # rubocop:enable Layout/LineLength
- end
-
- it 'tracks timings of queries' do
- expect(migration.batch_metrics.timings).to be_empty
-
- expect { perform_migration }.to change { migration.batch_metrics.timings }
- end
-
- context 'when given a negative pause_ms' do
- let(:pause_ms) { -9 }
- let(:sub_batch_size) { 2 }
-
- it 'uses 0 as a floor for pause_ms' do
- expect(migration).to receive(:sleep).with(0)
-
- perform_migration
- end
- end
-end
diff --git a/spec/lib/gitlab/background_migration/backfill_topics_title_spec.rb b/spec/lib/gitlab/background_migration/backfill_topics_title_spec.rb
index 3c46456eed0..3eb0000877d 100644
--- a/spec/lib/gitlab/background_migration/backfill_topics_title_spec.rb
+++ b/spec/lib/gitlab/background_migration/backfill_topics_title_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::BackgroundMigration::BackfillTopicsTitle, schema: 20220331133802 do
+RSpec.describe Gitlab::BackgroundMigration::BackfillTopicsTitle, schema: 20221111123146 do
it 'correctly backfills the title of the topics' do
topics = table(:topics)
diff --git a/spec/lib/gitlab/background_migration/backfill_user_details_fields_spec.rb b/spec/lib/gitlab/background_migration/backfill_user_details_fields_spec.rb
index 04ada1703bc..1d1853b032c 100644
--- a/spec/lib/gitlab/background_migration/backfill_user_details_fields_spec.rb
+++ b/spec/lib/gitlab/background_migration/backfill_user_details_fields_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::BackgroundMigration::BackfillUserDetailsFields, :migration, schema: 20221018232820 do
+RSpec.describe Gitlab::BackgroundMigration::BackfillUserDetailsFields, :migration, schema: 20221111123146 do
let(:users) { table(:users) }
let(:user_details) { table(:user_details) }
diff --git a/spec/lib/gitlab/background_migration/backfill_vs_code_settings_uuid_spec.rb b/spec/lib/gitlab/background_migration/backfill_vs_code_settings_uuid_spec.rb
new file mode 100644
index 00000000000..bf1fce4094e
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/backfill_vs_code_settings_uuid_spec.rb
@@ -0,0 +1,74 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::BackfillVsCodeSettingsUuid, schema: 20231130140901, feature_category: :web_ide do
+ let!(:vs_code_settings) { table(:vs_code_settings) }
+ let!(:users) { table(:users) }
+
+ let!(:user) do
+ users.create!(
+ email: "test1@example.com",
+ username: "test1",
+ notification_email: "test@example.com",
+ name: "test",
+ state: "active",
+ projects_limit: 10)
+ end
+
+ subject(:migration) do
+ described_class.new(
+ start_id: vs_code_setting_one.id,
+ end_id: vs_code_setting_two.id,
+ batch_table: :vs_code_settings,
+ batch_column: :id,
+ sub_batch_size: 100,
+ pause_ms: 0,
+ connection: ActiveRecord::Base.connection
+ )
+ end
+
+ describe "#perform" do
+ context 'when it finds vs_code_setting rows with empty uuid' do
+ let(:vs_code_setting_one) do
+ vs_code_settings.create!(user_id: user.id, setting_type: 'profiles', content: '{}')
+ end
+
+ let(:vs_code_setting_two) do
+ vs_code_settings.create!(user_id: user.id, setting_type: 'tasks', content: '{}')
+ end
+
+ it 'populates uuid column with a generated uuid' do
+ expect(vs_code_setting_one.uuid).to be_nil
+ expect(vs_code_setting_two.uuid).to be_nil
+
+ migration.perform
+
+ expect(vs_code_setting_one.reload.uuid).not_to be_nil
+ expect(vs_code_setting_two.reload.uuid).not_to be_nil
+ end
+ end
+
+ context 'when it finds vs_code_setting rows with non-empty uuid' do
+ let(:vs_code_setting_one) do
+ vs_code_settings.create!(user_id: user.id, setting_type: 'profiles', content: '{}', uuid: SecureRandom.uuid)
+ end
+
+ let(:vs_code_setting_two) do
+ vs_code_settings.create!(user_id: user.id, setting_type: 'tasks', content: '{}')
+ end
+
+ it 'populates uuid column with a generated uuid' do
+ expect(vs_code_setting_one.uuid).not_to be_nil
+ expect(vs_code_setting_two.uuid).to be_nil
+
+ previous_uuid = vs_code_setting_one.uuid
+
+ migration.perform
+
+ expect(vs_code_setting_one.reload.uuid).to eq(previous_uuid)
+ expect(vs_code_setting_two.reload.uuid).not_to be_nil
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/backfill_vulnerability_reads_cluster_agent_spec.rb b/spec/lib/gitlab/background_migration/backfill_vulnerability_reads_cluster_agent_spec.rb
index 3f1a57434a7..63135971cd3 100644
--- a/spec/lib/gitlab/background_migration/backfill_vulnerability_reads_cluster_agent_spec.rb
+++ b/spec/lib/gitlab/background_migration/backfill_vulnerability_reads_cluster_agent_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::BackgroundMigration::BackfillVulnerabilityReadsClusterAgent, :migration, schema: 20220525221133 do # rubocop:disable Layout/LineLength
+RSpec.describe Gitlab::BackgroundMigration::BackfillVulnerabilityReadsClusterAgent, :migration, schema: 20221111123146 do # rubocop:disable Layout/LineLength
let(:migration) do
described_class.new(
start_id: 1, end_id: 10,
diff --git a/spec/lib/gitlab/background_migration/backfill_work_item_type_id_for_issues_spec.rb b/spec/lib/gitlab/background_migration/backfill_work_item_type_id_for_issues_spec.rb
index c7e4095a488..4c1af279804 100644
--- a/spec/lib/gitlab/background_migration/backfill_work_item_type_id_for_issues_spec.rb
+++ b/spec/lib/gitlab/background_migration/backfill_work_item_type_id_for_issues_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::BackfillWorkItemTypeIdForIssues,
:migration,
- schema: 20220825142324,
+ schema: 20221111123146,
feature_category: :team_planning do
let(:batch_column) { 'id' }
let(:sub_batch_size) { 2 }
diff --git a/spec/lib/gitlab/background_migration/cleanup_orphaned_routes_spec.rb b/spec/lib/gitlab/background_migration/cleanup_orphaned_routes_spec.rb
deleted file mode 100644
index a09d5559d33..00000000000
--- a/spec/lib/gitlab/background_migration/cleanup_orphaned_routes_spec.rb
+++ /dev/null
@@ -1,80 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-# this needs the schema to be before we introduce the not null constraint on routes#namespace_id
-RSpec.describe Gitlab::BackgroundMigration::CleanupOrphanedRoutes, schema: 20220606060825 do
- let(:namespaces) { table(:namespaces) }
- let(:projects) { table(:projects) }
- let(:routes) { table(:routes) }
-
- let!(:namespace1) { namespaces.create!(name: 'batchtest1', type: 'Group', path: 'space1') }
- let!(:namespace2) { namespaces.create!(name: 'batchtest2', type: 'Group', parent_id: namespace1.id, path: 'space2') }
- let!(:namespace3) { namespaces.create!(name: 'batchtest3', type: 'Group', parent_id: namespace2.id, path: 'space3') }
-
- let!(:proj_namespace1) { namespaces.create!(name: 'proj1', path: 'proj1', type: 'Project', parent_id: namespace1.id) }
- let!(:proj_namespace2) { namespaces.create!(name: 'proj2', path: 'proj2', type: 'Project', parent_id: namespace2.id) }
- let!(:proj_namespace3) { namespaces.create!(name: 'proj3', path: 'proj3', type: 'Project', parent_id: namespace3.id) }
-
- # rubocop:disable Layout/LineLength
- let!(:proj1) { projects.create!(name: 'proj1', path: 'proj1', namespace_id: namespace1.id, project_namespace_id: proj_namespace1.id) }
- let!(:proj2) { projects.create!(name: 'proj2', path: 'proj2', namespace_id: namespace2.id, project_namespace_id: proj_namespace2.id) }
- let!(:proj3) { projects.create!(name: 'proj3', path: 'proj3', namespace_id: namespace3.id, project_namespace_id: proj_namespace3.id) }
-
- # valid namespace routes with not null namespace_id
- let!(:namespace_route1) { routes.create!(path: 'space1', source_id: namespace1.id, source_type: 'Namespace', namespace_id: namespace1.id) }
- # valid namespace routes with null namespace_id
- let!(:namespace_route2) { routes.create!(path: 'space1/space2', source_id: namespace2.id, source_type: 'Namespace') }
- let!(:namespace_route3) { routes.create!(path: 'space1/space3', source_id: namespace3.id, source_type: 'Namespace') }
- # invalid/orphaned namespace route
- let!(:orphaned_namespace_route_a) { routes.create!(path: 'space1/space4', source_id: non_existing_record_id, source_type: 'Namespace') }
- let!(:orphaned_namespace_route_b) { routes.create!(path: 'space1/space5', source_id: non_existing_record_id - 1, source_type: 'Namespace') }
-
- # valid project routes with not null namespace_id
- let!(:proj_route1) { routes.create!(path: 'space1/proj1', source_id: proj1.id, source_type: 'Project', namespace_id: proj_namespace1.id) }
- # valid project routes with null namespace_id
- let!(:proj_route2) { routes.create!(path: 'space1/space2/proj2', source_id: proj2.id, source_type: 'Project') }
- let!(:proj_route3) { routes.create!(path: 'space1/space3/proj3', source_id: proj3.id, source_type: 'Project') }
- # invalid/orphaned namespace route
- let!(:orphaned_project_route_a) { routes.create!(path: 'space1/space3/proj5', source_id: non_existing_record_id, source_type: 'Project') }
- let!(:orphaned_project_route_b) { routes.create!(path: 'space1/space3/proj6', source_id: non_existing_record_id - 1, source_type: 'Project') }
- # rubocop:enable Layout/LineLength
-
- let!(:migration_attrs) do
- {
- start_id: Route.minimum(:id),
- end_id: Route.maximum(:id),
- batch_table: :routes,
- batch_column: :id,
- sub_batch_size: 100,
- pause_ms: 0,
- connection: ApplicationRecord.connection
- }
- end
-
- let!(:migration) { described_class.new(**migration_attrs) }
-
- subject(:perform_migration) { migration.perform }
-
- it 'cleans orphaned routes', :aggregate_failures do
- all_route_ids = Route.pluck(:id)
-
- orphaned_route_ids = [
- orphaned_namespace_route_a, orphaned_namespace_route_b, orphaned_project_route_a, orphaned_project_route_b
- ].pluck(:id)
- remaining_routes = (all_route_ids - orphaned_route_ids).sort
-
- expect { perform_migration }.to change { Route.pluck(:id) }.to contain_exactly(*remaining_routes)
- expect(Route.all).to all(have_attributes(namespace_id: be_present))
-
- # expect that routes that had namespace_id set did not change namespace_id
- expect(namespace_route1.reload.namespace_id).to eq(namespace1.id)
- expect(proj_route1.reload.namespace_id).to eq(proj_namespace1.id)
- end
-
- it 'tracks timings of queries' do
- expect(migration.batch_metrics.timings).to be_empty
-
- expect { perform_migration }.to change { migration.batch_metrics.timings }
- end
-end
diff --git a/spec/lib/gitlab/background_migration/destroy_invalid_group_members_spec.rb b/spec/lib/gitlab/background_migration/destroy_invalid_group_members_spec.rb
deleted file mode 100644
index 4e136808a36..00000000000
--- a/spec/lib/gitlab/background_migration/destroy_invalid_group_members_spec.rb
+++ /dev/null
@@ -1,105 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::DestroyInvalidGroupMembers, :migration, schema: 20220809002011 do
- # rubocop: disable Layout/LineLength
- # rubocop: disable RSpec/ScatteredLet
- let!(:migration_attrs) do
- {
- start_id: 1,
- end_id: 1000,
- batch_table: :members,
- batch_column: :id,
- sub_batch_size: 100,
- pause_ms: 0,
- connection: ApplicationRecord.connection
- }
- end
-
- let!(:migration) { described_class.new(**migration_attrs) }
-
- subject(:perform_migration) { migration.perform }
-
- let(:users_table) { table(:users) }
- let(:namespaces_table) { table(:namespaces) }
- let(:members_table) { table(:members) }
- let(:projects_table) { table(:projects) }
-
- let(:user1) { users_table.create!(name: 'user1', email: 'user1@example.com', projects_limit: 5) }
- let(:user2) { users_table.create!(name: 'user2', email: 'user2@example.com', projects_limit: 5) }
- let(:user3) { users_table.create!(name: 'user3', email: 'user3@example.com', projects_limit: 5) }
- let(:user4) { users_table.create!(name: 'user4', email: 'user4@example.com', projects_limit: 5) }
- let(:user5) { users_table.create!(name: 'user5', email: 'user5@example.com', projects_limit: 5) }
- let(:user6) { users_table.create!(name: 'user6', email: 'user6@example.com', projects_limit: 5) }
-
- let!(:group1) { namespaces_table.create!(name: 'marvellous group 1', path: 'group-path-1', type: 'Group') }
-
- let!(:group2) { namespaces_table.create!(name: 'outstanding group 2', path: 'group-path-2', type: 'Group') }
-
- # create group member records, a mix of both valid and invalid
- # project members will have already been filtered out.
- let!(:group_member1) { create_invalid_group_member(id: 1, user_id: user1.id) }
-
- let!(:group_member4) { create_valid_group_member(id: 4, user_id: user2.id, group_id: group1.id) }
-
- let!(:group_member5) { create_valid_group_member(id: 5, user_id: user3.id, group_id: group2.id) }
-
- let!(:group_member6) { create_invalid_group_member(id: 6, user_id: user4.id) }
-
- let!(:group_member7) { create_valid_group_member(id: 7, user_id: user5.id, group_id: group1.id) }
-
- let!(:group_member8) { create_invalid_group_member(id: 8, user_id: user6.id) }
-
- it 'removes invalid memberships but keeps valid ones', :aggregate_failures do
- expect(members_table.where(type: 'GroupMember').count).to eq 6
-
- queries = ActiveRecord::QueryRecorder.new do
- perform_migration
- end
-
- expect(queries.count).to eq(4)
- expect(members_table.where(type: 'GroupMember').pluck(:id)).to match_array([group_member4, group_member5, group_member7].map(&:id))
- end
-
- it 'tracks timings of queries' do
- expect(migration.batch_metrics.timings).to be_empty
-
- expect { perform_migration }.to change { migration.batch_metrics.timings }
- end
-
- it 'logs IDs of deleted records' do
- expect(Gitlab::AppLogger).to receive(:info).with({ message: 'Removing invalid group member records',
- deleted_count: 3, ids: [group_member1, group_member6, group_member8].map(&:id) })
-
- perform_migration
- end
-
- def create_invalid_group_member(id:, user_id:)
- members_table.create!(
- id: id,
- user_id: user_id,
- source_id: non_existing_record_id,
- access_level: Gitlab::Access::MAINTAINER,
- type: "GroupMember",
- source_type: "Namespace",
- notification_level: 3,
- member_namespace_id: nil
- )
- end
-
- def create_valid_group_member(id:, user_id:, group_id:)
- members_table.create!(
- id: id,
- user_id: user_id,
- source_id: group_id,
- access_level: Gitlab::Access::MAINTAINER,
- type: "GroupMember",
- source_type: "Namespace",
- member_namespace_id: group_id,
- notification_level: 3
- )
- end
- # rubocop: enable Layout/LineLength
- # rubocop: enable RSpec/ScatteredLet
-end
diff --git a/spec/lib/gitlab/background_migration/destroy_invalid_members_spec.rb b/spec/lib/gitlab/background_migration/destroy_invalid_members_spec.rb
index e5965d4a1d8..95c5a64bc7b 100644
--- a/spec/lib/gitlab/background_migration/destroy_invalid_members_spec.rb
+++ b/spec/lib/gitlab/background_migration/destroy_invalid_members_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
# rubocop: disable RSpec/MultipleMemoizedHelpers
-RSpec.describe Gitlab::BackgroundMigration::DestroyInvalidMembers, :migration, schema: 20221004094814 do
+RSpec.describe Gitlab::BackgroundMigration::DestroyInvalidMembers, :migration, schema: 20221111123146 do
let!(:migration_attrs) do
{
start_id: 1,
diff --git a/spec/lib/gitlab/background_migration/destroy_invalid_project_members_spec.rb b/spec/lib/gitlab/background_migration/destroy_invalid_project_members_spec.rb
deleted file mode 100644
index 090c31049b4..00000000000
--- a/spec/lib/gitlab/background_migration/destroy_invalid_project_members_spec.rb
+++ /dev/null
@@ -1,124 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::DestroyInvalidProjectMembers, :migration, schema: 20220901035725 do
- # rubocop: disable RSpec/ScatteredLet
- let!(:migration_attrs) do
- {
- start_id: 1,
- end_id: 1000,
- batch_table: :members,
- batch_column: :id,
- sub_batch_size: 100,
- pause_ms: 0,
- connection: ApplicationRecord.connection
- }
- end
-
- let!(:migration) { described_class.new(**migration_attrs) }
-
- subject(:perform_migration) { migration.perform }
-
- let(:users_table) { table(:users) }
- let(:namespaces_table) { table(:namespaces) }
- let(:members_table) { table(:members) }
- let(:projects_table) { table(:projects) }
-
- let(:user1) { users_table.create!(name: 'user1', email: 'user1@example.com', projects_limit: 5) }
- let(:user2) { users_table.create!(name: 'user2', email: 'user2@example.com', projects_limit: 5) }
- let(:user3) { users_table.create!(name: 'user3', email: 'user3@example.com', projects_limit: 5) }
- let(:user4) { users_table.create!(name: 'user4', email: 'user4@example.com', projects_limit: 5) }
- let(:user5) { users_table.create!(name: 'user5', email: 'user5@example.com', projects_limit: 5) }
- let(:user6) { users_table.create!(name: 'user6', email: 'user6@example.com', projects_limit: 5) }
-
- let!(:group1) { namespaces_table.create!(name: 'marvellous group 1', path: 'group-path-1', type: 'Group') }
-
- let!(:project_namespace1) do
- namespaces_table.create!(
- name: 'fabulous project', path: 'project-path-1', type: 'ProjectNamespace', parent_id: group1.id
- )
- end
-
- let!(:project1) do
- projects_table.create!(
- name: 'fabulous project',
- path: 'project-path-1',
- project_namespace_id: project_namespace1.id,
- namespace_id: group1.id
- )
- end
-
- let!(:project_namespace2) do
- namespaces_table.create!(
- name: 'splendiferous project', path: 'project-path-2', type: 'ProjectNamespace', parent_id: group1.id
- )
- end
-
- let!(:project2) do
- projects_table.create!(
- name: 'splendiferous project',
- path: 'project-path-2',
- project_namespace_id: project_namespace2.id,
- namespace_id: group1.id
- )
- end
-
- # create project member records, a mix of both valid and invalid
- # group members will have already been filtered out.
- let!(:project_member1) { create_invalid_project_member(id: 1, user_id: user1.id) }
- let!(:project_member2) { create_valid_project_member(id: 4, user_id: user2.id, project: project1) }
- let!(:project_member3) { create_valid_project_member(id: 5, user_id: user3.id, project: project2) }
- let!(:project_member4) { create_invalid_project_member(id: 6, user_id: user4.id) }
- let!(:project_member5) { create_valid_project_member(id: 7, user_id: user5.id, project: project2) }
- let!(:project_member6) { create_invalid_project_member(id: 8, user_id: user6.id) }
-
- it 'removes invalid memberships but keeps valid ones', :aggregate_failures do
- expect(members_table.where(type: 'ProjectMember').count).to eq 6
-
- queries = ActiveRecord::QueryRecorder.new do
- perform_migration
- end
-
- expect(queries.count).to eq(4)
- expect(members_table.where(type: 'ProjectMember'))
- .to match_array([project_member2, project_member3, project_member5])
- end
-
- it 'tracks timings of queries' do
- expect(migration.batch_metrics.timings).to be_empty
-
- expect { perform_migration }.to change { migration.batch_metrics.timings }
- end
-
- it 'logs IDs of deleted records' do
- expect(Gitlab::AppLogger).to receive(:info).with({
- message: 'Removing invalid project member records',
- deleted_count: 3,
- ids: [project_member1, project_member4, project_member6].map(&:id)
- })
-
- perform_migration
- end
-
- def create_invalid_project_member(id:, user_id:)
- members_table.create!(
- id: id, user_id: user_id, source_id: non_existing_record_id, access_level: Gitlab::Access::MAINTAINER,
- type: "ProjectMember", source_type: "Project", notification_level: 3, member_namespace_id: nil
- )
- end
-
- def create_valid_project_member(id:, user_id:, project:)
- members_table.create!(
- id: id,
- user_id: user_id,
- source_id: project.id,
- access_level: Gitlab::Access::MAINTAINER,
- type: "ProjectMember",
- source_type: "Project",
- member_namespace_id: project.project_namespace_id,
- notification_level: 3
- )
- end
- # rubocop: enable RSpec/ScatteredLet
-end
diff --git a/spec/lib/gitlab/background_migration/disable_legacy_open_source_license_for_no_issues_no_repo_projects_spec.rb b/spec/lib/gitlab/background_migration/disable_legacy_open_source_license_for_no_issues_no_repo_projects_spec.rb
index 93913a2742b..8afbd7403d3 100644
--- a/spec/lib/gitlab/background_migration/disable_legacy_open_source_license_for_no_issues_no_repo_projects_spec.rb
+++ b/spec/lib/gitlab/background_migration/disable_legacy_open_source_license_for_no_issues_no_repo_projects_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::DisableLegacyOpenSourceLicenseForNoIssuesNoRepoProjects,
:migration,
- schema: 20220722084543 do
+ schema: 20221111123146 do
let(:namespaces_table) { table(:namespaces) }
let(:projects_table) { table(:projects) }
let(:project_settings_table) { table(:project_settings) }
diff --git a/spec/lib/gitlab/background_migration/disable_legacy_open_source_license_for_one_member_no_repo_projects_spec.rb b/spec/lib/gitlab/background_migration/disable_legacy_open_source_license_for_one_member_no_repo_projects_spec.rb
index 285e5ebbee2..185e2da6f1d 100644
--- a/spec/lib/gitlab/background_migration/disable_legacy_open_source_license_for_one_member_no_repo_projects_spec.rb
+++ b/spec/lib/gitlab/background_migration/disable_legacy_open_source_license_for_one_member_no_repo_projects_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::DisableLegacyOpenSourceLicenseForOneMemberNoRepoProjects,
:migration,
- schema: 20220721031446 do
+ schema: 20221111123146 do
let(:namespaces_table) { table(:namespaces) }
let(:projects_table) { table(:projects) }
let(:project_settings_table) { table(:project_settings) }
@@ -13,15 +13,14 @@ RSpec.describe Gitlab::BackgroundMigration::DisableLegacyOpenSourceLicenseForOne
let(:project_authorizations_table) { table(:project_authorizations) }
subject(:perform_migration) do
- described_class.new(
- start_id: projects_table.minimum(:id),
+ described_class.new(start_id: projects_table.minimum(:id),
end_id: projects_table.maximum(:id),
batch_table: :projects,
batch_column: :id,
sub_batch_size: 2,
pause_ms: 0,
- connection: ActiveRecord::Base.connection
- ).perform
+ connection: ActiveRecord::Base.connection)
+ .perform
end
it 'sets `legacy_open_source_license_available` to false only for public projects with 1 member and no repo',
@@ -43,13 +42,13 @@ RSpec.describe Gitlab::BackgroundMigration::DisableLegacyOpenSourceLicenseForOne
def create_legacy_license_public_project(path, repo_size: 0, members: 1)
namespace = namespaces_table.create!(name: "namespace-#{path}", path: "namespace-#{path}")
- project_namespace = namespaces_table.create!(
- name: "-project-namespace-#{path}", path: "project-namespace-#{path}", type: 'Project'
- )
- project = projects_table.create!(
- name: path, path: path, namespace_id: namespace.id,
- project_namespace_id: project_namespace.id, visibility_level: 20
- )
+ project_namespace =
+ namespaces_table.create!(name: "-project-namespace-#{path}", path: "project-namespace-#{path}", type: 'Project')
+ project = projects_table
+ .create!(
+ name: path, path: path, namespace_id: namespace.id,
+ project_namespace_id: project_namespace.id, visibility_level: 20
+ )
members.times do |member_id|
user = users_table.create!(email: "user#{member_id}-project-#{project.id}@gitlab.com", projects_limit: 100)
diff --git a/spec/lib/gitlab/background_migration/disable_legacy_open_source_license_for_projects_less_than_one_mb_spec.rb b/spec/lib/gitlab/background_migration/disable_legacy_open_source_license_for_projects_less_than_one_mb_spec.rb
index cf544c87b31..f1ec09840ab 100644
--- a/spec/lib/gitlab/background_migration/disable_legacy_open_source_license_for_projects_less_than_one_mb_spec.rb
+++ b/spec/lib/gitlab/background_migration/disable_legacy_open_source_license_for_projects_less_than_one_mb_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::DisableLegacyOpenSourceLicenseForProjectsLessThanOneMb,
:migration,
- schema: 20220906074449 do
+ schema: 20221111123146 do
let(:namespaces_table) { table(:namespaces) }
let(:projects_table) { table(:projects) }
let(:project_settings_table) { table(:project_settings) }
diff --git a/spec/lib/gitlab/background_migration/expire_o_auth_tokens_spec.rb b/spec/lib/gitlab/background_migration/expire_o_auth_tokens_spec.rb
index ba3aab03f2a..4997ae3763e 100644
--- a/spec/lib/gitlab/background_migration/expire_o_auth_tokens_spec.rb
+++ b/spec/lib/gitlab/background_migration/expire_o_auth_tokens_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::BackgroundMigration::ExpireOAuthTokens, :migration, schema: 20220428133724 do
+RSpec.describe Gitlab::BackgroundMigration::ExpireOAuthTokens, :migration, schema: 20221111123146 do
let(:migration) { described_class.new }
let(:oauth_access_tokens_table) { table(:oauth_access_tokens) }
diff --git a/spec/lib/gitlab/background_migration/fix_allow_descendants_override_disabled_shared_runners_spec.rb b/spec/lib/gitlab/background_migration/fix_allow_descendants_override_disabled_shared_runners_spec.rb
index 5f5dcb35836..cd99557e1d9 100644
--- a/spec/lib/gitlab/background_migration/fix_allow_descendants_override_disabled_shared_runners_spec.rb
+++ b/spec/lib/gitlab/background_migration/fix_allow_descendants_override_disabled_shared_runners_spec.rb
@@ -2,7 +2,8 @@
require 'spec_helper'
-RSpec.describe Gitlab::BackgroundMigration::FixAllowDescendantsOverrideDisabledSharedRunners, schema: 20230802085923, feature_category: :runner_fleet do # rubocop:disable Layout/LineLength
+RSpec.describe Gitlab::BackgroundMigration::FixAllowDescendantsOverrideDisabledSharedRunners, schema: 20230802085923,
+ feature_category: :fleet_visibility do
let(:namespaces) { table(:namespaces) }
let!(:valid_enabled) do
diff --git a/spec/lib/gitlab/background_migration/migrate_human_user_type_spec.rb b/spec/lib/gitlab/background_migration/migrate_human_user_type_spec.rb
index 7edeaed5794..83d19ef7ce3 100644
--- a/spec/lib/gitlab/background_migration/migrate_human_user_type_spec.rb
+++ b/spec/lib/gitlab/background_migration/migrate_human_user_type_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::BackgroundMigration::MigrateHumanUserType, schema: 20230327103401, feature_category: :user_management do # rubocop:disable Layout/LineLength
+RSpec.describe Gitlab::BackgroundMigration::MigrateHumanUserType, feature_category: :user_management do
let!(:valid_users) do
# 13 is the max value we have at the moment.
(0..13).map do |type|
diff --git a/spec/lib/gitlab/background_migration/populate_projects_star_count_spec.rb b/spec/lib/gitlab/background_migration/populate_projects_star_count_spec.rb
index 74f674e052d..35f93c9982b 100644
--- a/spec/lib/gitlab/background_migration/populate_projects_star_count_spec.rb
+++ b/spec/lib/gitlab/background_migration/populate_projects_star_count_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::BackgroundMigration::PopulateProjectsStarCount, schema: 20221019105041 do
+RSpec.describe Gitlab::BackgroundMigration::PopulateProjectsStarCount, schema: 20221111123146 do
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:users) { table(:users) }
diff --git a/spec/lib/gitlab/background_migration/remove_self_managed_wiki_notes_spec.rb b/spec/lib/gitlab/background_migration/remove_self_managed_wiki_notes_spec.rb
index 59d5d56ebe8..74fe2c63139 100644
--- a/spec/lib/gitlab/background_migration/remove_self_managed_wiki_notes_spec.rb
+++ b/spec/lib/gitlab/background_migration/remove_self_managed_wiki_notes_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::BackgroundMigration::RemoveSelfManagedWikiNotes, :migration, schema: 20220601110011 do
+RSpec.describe Gitlab::BackgroundMigration::RemoveSelfManagedWikiNotes, :migration, schema: 20221111123146 do
let(:notes) { table(:notes) }
subject(:perform_migration) do
diff --git a/spec/lib/gitlab/background_migration/reset_too_many_tags_skipped_registry_imports_spec.rb b/spec/lib/gitlab/background_migration/reset_too_many_tags_skipped_registry_imports_spec.rb
index afdd855c5a8..86c2ab35136 100644
--- a/spec/lib/gitlab/background_migration/reset_too_many_tags_skipped_registry_imports_spec.rb
+++ b/spec/lib/gitlab/background_migration/reset_too_many_tags_skipped_registry_imports_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::ResetTooManyTagsSkippedRegistryImports, :migration,
:aggregate_failures,
- schema: 20220502173045 do
+ schema: 20221111123146 do
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:container_repositories) { table(:container_repositories) }
diff --git a/spec/lib/gitlab/background_migration/set_legacy_open_source_license_available_for_non_public_projects_spec.rb b/spec/lib/gitlab/background_migration/set_legacy_open_source_license_available_for_non_public_projects_spec.rb
index 5109c3ec0c2..5f7a699ac0b 100644
--- a/spec/lib/gitlab/background_migration/set_legacy_open_source_license_available_for_non_public_projects_spec.rb
+++ b/spec/lib/gitlab/background_migration/set_legacy_open_source_license_available_for_non_public_projects_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::SetLegacyOpenSourceLicenseAvailableForNonPublicProjects,
:migration,
- schema: 20220722110026 do
+ schema: 20221111123146 do
let(:namespaces_table) { table(:namespaces) }
let(:projects_table) { table(:projects) }
let(:project_settings_table) { table(:project_settings) }
diff --git a/spec/lib/gitlab/bitbucket_import/importer_spec.rb b/spec/lib/gitlab/bitbucket_import/importer_spec.rb
deleted file mode 100644
index d468483661a..00000000000
--- a/spec/lib/gitlab/bitbucket_import/importer_spec.rb
+++ /dev/null
@@ -1,559 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BitbucketImport::Importer, :clean_gitlab_redis_cache, feature_category: :importers do
- include ImportSpecHelper
-
- before do
- stub_omniauth_provider('bitbucket')
- end
-
- let(:statuses) do
- [
- "open",
- "resolved",
- "on hold",
- "invalid",
- "duplicate",
- "wontfix",
- "closed" # undocumented status
- ]
- end
-
- let(:reporters) do
- [
- nil,
- { "nickname" => "reporter1" },
- nil,
- { "nickname" => "reporter2" },
- { "nickname" => "reporter1" },
- nil,
- { "nickname" => "reporter3" }
- ]
- end
-
- let(:sample_issues_statuses) do
- issues = []
-
- statuses.map.with_index do |status, index|
- issues << {
- id: index,
- state: status,
- title: "Issue #{index}",
- kind: 'bug',
- content: {
- raw: "Some content to issue #{index}",
- markup: "markdown",
- html: "Some content to issue #{index}"
- }
- }
- end
-
- reporters.map.with_index do |reporter, index|
- issues[index]['reporter'] = reporter
- end
-
- issues
- end
-
- let_it_be(:project_identifier) { 'namespace/repo' }
-
- let_it_be_with_reload(:project) do
- create(
- :project,
- :repository,
- import_source: project_identifier,
- import_url: "https://bitbucket.org/#{project_identifier}.git",
- import_data_attributes: { credentials: { 'token' => 'token' } }
- )
- end
-
- let(:importer) { described_class.new(project) }
- let(:sample) { RepoHelpers.sample_compare }
- let(:issues_statuses_sample_data) do
- {
- count: sample_issues_statuses.count,
- values: sample_issues_statuses
- }
- end
-
- let(:last_issue_data) do
- {
- page: 1,
- pagelen: 1,
- values: [sample_issues_statuses.last]
- }
- end
-
- let(:counter) { double('counter', increment: true) }
-
- subject { described_class.new(project) }
-
- describe '#import_pull_requests' do
- let(:source_branch_sha) { sample.commits.last }
- let(:merge_commit_sha) { sample.commits.second }
- let(:target_branch_sha) { sample.commits.first }
- let(:pull_request) do
- instance_double(
- Bitbucket::Representation::PullRequest,
- iid: 10,
- source_branch_sha: source_branch_sha,
- source_branch_name: Gitlab::Git::BRANCH_REF_PREFIX + sample.source_branch,
- target_branch_sha: target_branch_sha,
- target_branch_name: Gitlab::Git::BRANCH_REF_PREFIX + sample.target_branch,
- merge_commit_sha: merge_commit_sha,
- title: 'This is a title',
- description: 'This is a test pull request',
- state: 'merged',
- author: pull_request_author,
- created_at: Time.now,
- updated_at: Time.now)
- end
-
- let(:pull_request_author) { 'other' }
- let(:comments) { [@inline_note, @reply] }
-
- let(:author_line) { "*Created by: someuser*\n\n" }
-
- before do
- allow(subject).to receive(:import_wiki)
- allow(subject).to receive(:import_issues)
-
- # https://gitlab.com/gitlab-org/gitlab-test/compare/c1acaa58bbcbc3eafe538cb8274ba387047b69f8...5937ac0a7beb003549fc5fd26fc247ad
- @inline_note = instance_double(
- Bitbucket::Representation::PullRequestComment,
- iid: 2,
- file_path: '.gitmodules',
- old_pos: nil,
- new_pos: 4,
- note: 'Hello world',
- author: 'someuser',
- created_at: Time.now,
- updated_at: Time.now,
- inline?: true,
- has_parent?: false)
-
- @reply = instance_double(
- Bitbucket::Representation::PullRequestComment,
- iid: 3,
- file_path: '.gitmodules',
- note: 'Hello world',
- author: 'someuser',
- created_at: Time.now,
- updated_at: Time.now,
- inline?: true,
- has_parent?: true,
- parent_id: 2)
-
- allow(subject.client).to receive(:repo)
- allow(subject.client).to receive(:pull_requests).and_return([pull_request])
- allow(subject.client).to receive(:pull_request_comments).with(anything, pull_request.iid).and_return(comments)
- end
-
- it 'imports threaded discussions' do
- expect { subject.execute }.to change { MergeRequest.count }.by(1)
-
- merge_request = MergeRequest.first
- expect(merge_request.state).to eq('merged')
- expect(merge_request.notes.count).to eq(2)
- expect(merge_request.notes.map(&:discussion_id).uniq.count).to eq(1)
-
- notes = merge_request.notes.order(:id).to_a
- start_note = notes.first
- expect(start_note).to be_a(DiffNote)
- expect(start_note.note).to include(@inline_note.note)
- expect(start_note.note).to include(author_line)
-
- reply_note = notes.last
- expect(reply_note).to be_a(DiffNote)
- expect(reply_note.note).to include(@reply.note)
- expect(reply_note.note).to include(author_line)
- end
-
- context 'when author is blank' do
- let(:pull_request_author) { nil }
-
- it 'adds created by anonymous in the description', :aggregate_failures do
- expect { subject.execute }.to change { MergeRequest.count }.by(1)
-
- expect(MergeRequest.first.description).to include('Created by: Anonymous')
- end
- end
-
- context 'when user exists in GitLab' do
- let!(:existing_user) { create(:user, username: 'someuser') }
- let!(:identity) { create(:identity, provider: 'bitbucket', extern_uid: existing_user.username, user: existing_user) }
-
- it 'does not add author line to comments' do
- expect { subject.execute }.to change { MergeRequest.count }.by(1)
-
- merge_request = MergeRequest.first
-
- notes = merge_request.notes.order(:id).to_a
- start_note = notes.first
- expect(start_note.note).to eq(@inline_note.note)
- expect(start_note.note).not_to include(author_line)
-
- reply_note = notes.last
- expect(reply_note.note).to eq(@reply.note)
- expect(reply_note.note).not_to include(author_line)
- end
- end
-
- it 'calls RefConverter to convert Bitbucket refs to Gitlab refs' do
- expect(subject.instance_values['ref_converter']).to receive(:convert_note).twice
-
- subject.execute
- end
-
- context 'when importing a pull request throws an exception' do
- before do
- allow(pull_request).to receive(:raw).and_return({ error: "broken" })
- allow(subject.client).to receive(:pull_request_comments).and_raise(Gitlab::HTTP::Error)
- end
-
- it 'logs an error without the backtrace' do
- expect(Gitlab::ErrorTracking).to receive(:log_exception)
- .with(instance_of(Gitlab::HTTP::Error), hash_including(raw_response: '{"error":"broken"}'))
-
- subject.execute
-
- expect(subject.errors.count).to eq(1)
- expect(subject.errors.first.keys).to match_array(%i[type iid errors])
- end
- end
-
- context 'when source SHA is not found in the repository' do
- let(:source_branch_sha) { 'a' * Commit::MIN_SHA_LENGTH }
- let(:target_branch_sha) { 'c' * Commit::MIN_SHA_LENGTH }
-
- it 'uses merge commit SHA for source' do
- expect { subject.execute }.to change { MergeRequest.count }.by(1)
-
- merge_request_diff = MergeRequest.first.merge_request_diff
- expect(merge_request_diff.head_commit_sha).to eq merge_commit_sha
- expect(merge_request_diff.start_commit_sha).to eq target_branch_sha
- end
-
- context 'when the merge commit SHA is also not found' do
- let(:merge_commit_sha) { 'b' * Commit::MIN_SHA_LENGTH }
-
- it 'uses the pull request sha references' do
- expect { subject.execute }.to change { MergeRequest.count }.by(1)
-
- merge_request_diff = MergeRequest.first.merge_request_diff
- expect(merge_request_diff.head_commit_sha).to eq source_branch_sha
- expect(merge_request_diff.start_commit_sha).to eq target_branch_sha
- end
- end
- end
-
- context "when target_branch_sha is blank" do
- let(:target_branch_sha) { nil }
-
- it 'creates the merge request with no target branch', :aggregate_failures do
- expect { subject.execute }.to change { MergeRequest.count }.by(1)
-
- merge_request = MergeRequest.first
- expect(merge_request.target_branch_sha).to eq(nil)
- end
- end
-
- context 'metrics' do
- before do
- allow(Gitlab::Metrics).to receive(:counter) { counter }
- allow(pull_request).to receive(:raw).and_return('hello world')
- end
-
- it 'counts imported pull requests' do
- expect(Gitlab::Metrics).to receive(:counter).with(
- :bitbucket_importer_imported_merge_requests_total,
- 'The number of imported merge (pull) requests'
- )
-
- expect(counter).to receive(:increment)
-
- subject.execute
- end
- end
-
- context 'when pull request was already imported' do
- let(:pull_request_already_imported) do
- instance_double(
- BitbucketServer::Representation::PullRequest,
- iid: 11)
- end
-
- let(:cache_key) do
- format(described_class::ALREADY_IMPORTED_CACHE_KEY, project: project.id, collection: :pull_requests)
- end
-
- before do
- allow(subject.client).to receive(:pull_requests).and_return([pull_request, pull_request_already_imported])
- Gitlab::Cache::Import::Caching.set_add(cache_key, pull_request_already_imported.iid)
- end
-
- it 'does not import the previously imported pull requests', :aggregate_failures do
- expect { subject.execute }.to change { MergeRequest.count }.by(1)
-
- expect(Gitlab::Cache::Import::Caching.set_includes?(cache_key, pull_request.iid)).to eq(true)
- end
- end
- end
-
- context 'issues statuses' do
- before do
- # HACK: Bitbucket::Representation.const_get('Issue') seems to return ::Issue without this
- Bitbucket::Representation::Issue.new({})
-
- stub_request(
- :get,
- "https://api.bitbucket.org/2.0/repositories/#{project_identifier}"
- ).to_return(status: 200,
- headers: { "Content-Type" => "application/json" },
- body: { has_issues: true, full_name: project_identifier }.to_json)
-
- stub_request(
- :get,
- "https://api.bitbucket.org/2.0/repositories/#{project_identifier}/issues?pagelen=1&sort=-created_on&state=ALL"
- ).to_return(status: 200,
- headers: { "Content-Type" => "application/json" },
- body: last_issue_data.to_json)
-
- stub_request(
- :get,
- "https://api.bitbucket.org/2.0/repositories/#{project_identifier}/issues?pagelen=50&sort=created_on"
- ).to_return(status: 200,
- headers: { "Content-Type" => "application/json" },
- body: issues_statuses_sample_data.to_json)
-
- stub_request(:get, "https://api.bitbucket.org/2.0/repositories/namespace/repo?pagelen=50&sort=created_on")
- .with(headers: { 'Accept' => '*/*', 'Accept-Encoding' => 'gzip;q=1.0,deflate;q=0.6,identity;q=0.3', 'Authorization' => 'Bearer', 'User-Agent' => 'Faraday v0.9.2' })
- .to_return(status: 200, body: "", headers: {})
-
- sample_issues_statuses.each_with_index do |issue, index|
- stub_request(
- :get,
- "https://api.bitbucket.org/2.0/repositories/#{project_identifier}/issues/#{issue[:id]}/comments?pagelen=50&sort=created_on"
- ).to_return(
- status: 200,
- headers: { "Content-Type" => "application/json" },
- body: { author_info: { username: "username" }, utc_created_on: index }.to_json
- )
- end
-
- stub_request(
- :get,
- "https://api.bitbucket.org/2.0/repositories/#{project_identifier}/pullrequests?pagelen=50&sort=created_on&state=ALL"
- ).to_return(status: 200,
- headers: { "Content-Type" => "application/json" },
- body: {}.to_json)
- end
-
- context 'creating labels on project' do
- before do
- allow(importer).to receive(:import_wiki)
- end
-
- it 'creates labels as expected' do
- expect { importer.execute }.to change { Label.count }.from(0).to(Gitlab::BitbucketImport::Importer::LABELS.size)
- end
-
- it 'does not fail if label is already existing' do
- label = Gitlab::BitbucketImport::Importer::LABELS.first
- ::Labels::CreateService.new(label).execute(project: project)
-
- expect { importer.execute }.not_to raise_error
- end
-
- it 'does not create new labels' do
- Gitlab::BitbucketImport::Importer::LABELS.each do |label|
- create(:label, project: project, title: label[:title])
- end
-
- expect { importer.execute }.not_to change { Label.count }
- end
-
- it 'does not update existing ones' do
- label_title = Gitlab::BitbucketImport::Importer::LABELS.first[:title]
- existing_label = create(:label, project: project, title: label_title)
- # Reload label from database so we avoid timestamp comparison issues related to time precision when comparing
- # attributes later.
- existing_label.reload
-
- travel_to(Time.now + 1.minute) do
- importer.execute
-
- label_after_import = project.labels.find(existing_label.id)
- expect(label_after_import.attributes).to eq(existing_label.attributes)
- end
- end
-
- it 'raises an error if a label is not valid' do
- stub_const("#{described_class}::LABELS", [{ title: nil, color: nil }])
-
- expect { importer.create_labels }.to raise_error(StandardError, /Failed to create label/)
- end
- end
-
- it 'maps statuses to open or closed' do
- allow(importer).to receive(:import_wiki)
-
- importer.execute
-
- expect(project.issues.where(state_id: Issue.available_states[:closed]).size).to eq(5)
- expect(project.issues.where(state_id: Issue.available_states[:opened]).size).to eq(2)
- expect(project.issues.map(&:namespace_id).uniq).to match_array([project.project_namespace_id])
- end
-
- describe 'wiki import' do
- it 'is skipped when the wiki exists' do
- expect(project.wiki).to receive(:repository_exists?) { true }
- expect(project.wiki.repository).not_to receive(:import_repository)
-
- importer.execute
-
- expect(importer.errors).to be_empty
- end
-
- it 'imports to the project disk_path' do
- expect(project.wiki).to receive(:repository_exists?) { false }
- expect(project.wiki.repository).to receive(:import_repository)
-
- importer.execute
-
- expect(importer.errors).to be_empty
- end
- end
-
- describe 'issue import' do
- it 'allocates internal ids' do
- expect(Issue).to receive(:track_namespace_iid!).with(project.project_namespace, 6)
-
- importer.execute
- end
-
- it 'maps reporters to anonymous if bitbucket reporter is nil' do
- allow(importer).to receive(:import_wiki)
- importer.execute
-
- expect(project.issues.size).to eq(7)
- expect(project.issues.where("description LIKE ?", '%Anonymous%').size).to eq(3)
- expect(project.issues.where("description LIKE ?", '%reporter1%').size).to eq(2)
- expect(project.issues.where("description LIKE ?", '%reporter2%').size).to eq(1)
- expect(project.issues.where("description LIKE ?", '%reporter3%').size).to eq(1)
- expect(importer.errors).to be_empty
- end
-
- it 'sets work item type on new issues' do
- allow(importer).to receive(:import_wiki)
-
- importer.execute
-
- expect(project.issues.map(&:work_item_type_id).uniq).to contain_exactly(WorkItems::Type.default_issue_type.id)
- end
-
- context 'with issue comments' do
- let(:note) { 'Hello world' }
- let(:inline_note) do
- instance_double(Bitbucket::Representation::Comment, note: note, author: 'someuser', created_at: Time.now, updated_at: Time.now)
- end
-
- before do
- allow_next_instance_of(Bitbucket::Client) do |instance|
- allow(instance).to receive(:issue_comments).and_return([inline_note])
- end
- allow(importer).to receive(:import_wiki)
- end
-
- it 'imports issue comments' do
- importer.execute
-
- comment = project.notes.first
- expect(project.notes.size).to eq(7)
- expect(comment.note).to include(note)
- expect(comment.note).to include(inline_note.author)
- expect(importer.errors).to be_empty
- end
-
- it 'calls RefConverter to convert Bitbucket refs to Gitlab refs' do
- expect(importer.instance_values['ref_converter']).to receive(:convert_note).exactly(7).times
-
- importer.execute
- end
- end
-
- context 'when issue was already imported' do
- let(:cache_key) do
- format(described_class::ALREADY_IMPORTED_CACHE_KEY, project: project.id, collection: :issues)
- end
-
- before do
- Gitlab::Cache::Import::Caching.set_add(cache_key, sample_issues_statuses.first[:id])
- end
-
- it 'does not import previously imported issues', :aggregate_failures do
- expect { subject.execute }.to change { Issue.count }.by(sample_issues_statuses.size - 1)
-
- sample_issues_statuses.each do |sample_issues_status|
- expect(Gitlab::Cache::Import::Caching.set_includes?(cache_key, sample_issues_status[:id])).to eq(true)
- end
- end
- end
- end
-
- context 'metrics' do
- before do
- allow(Gitlab::Metrics).to receive(:counter) { counter }
- end
-
- it 'counts imported issues' do
- expect(Gitlab::Metrics).to receive(:counter).with(
- :bitbucket_importer_imported_issues_total,
- 'The number of imported issues'
- )
-
- expect(counter).to receive(:increment)
-
- subject.execute
- end
- end
- end
-
- describe '#execute' do
- context 'metrics' do
- let(:histogram) { double(:histogram) }
-
- before do
- allow(subject).to receive(:import_wiki)
- allow(subject).to receive(:import_issues)
- allow(subject).to receive(:import_pull_requests)
-
- allow(Gitlab::Metrics).to receive(:counter).and_return(counter)
- allow(Gitlab::Metrics).to receive(:histogram).and_return(histogram)
- allow(histogram).to receive(:observe)
- allow(counter).to receive(:increment)
- end
-
- it 'counts and measures duration of imported projects' do
- expect(Gitlab::Metrics).to receive(:counter).with(
- :bitbucket_importer_imported_projects_total,
- 'The number of imported projects'
- )
-
- expect(Gitlab::Metrics).to receive(:histogram).with(
- :bitbucket_importer_total_duration_seconds,
- 'Total time spent importing projects, in seconds',
- {},
- Gitlab::Import::Metrics::IMPORT_DURATION_BUCKETS
- )
-
- expect(counter).to receive(:increment)
- expect(histogram).to receive(:observe).with({ importer: :bitbucket_importer }, anything)
-
- subject.execute
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/bitbucket_import/importers/issues_importer_spec.rb b/spec/lib/gitlab/bitbucket_import/importers/issues_importer_spec.rb
index af5a929683e..90987f6d3d4 100644
--- a/spec/lib/gitlab/bitbucket_import/importers/issues_importer_spec.rb
+++ b/spec/lib/gitlab/bitbucket_import/importers/issues_importer_spec.rb
@@ -67,7 +67,7 @@ RSpec.describe Gitlab::BitbucketImport::Importers::IssuesImporter, feature_categ
it 'tracks the failure and does not fail' do
expect(Gitlab::Import::ImportFailureService).to receive(:track).once
- importer.execute
+ expect(importer.execute).to be_a(Gitlab::JobWaiter)
end
end
diff --git a/spec/lib/gitlab/bitbucket_import/importers/issues_notes_importer_spec.rb b/spec/lib/gitlab/bitbucket_import/importers/issues_notes_importer_spec.rb
index a04543b0511..84dea203478 100644
--- a/spec/lib/gitlab/bitbucket_import/importers/issues_notes_importer_spec.rb
+++ b/spec/lib/gitlab/bitbucket_import/importers/issues_notes_importer_spec.rb
@@ -29,7 +29,7 @@ RSpec.describe Gitlab::BitbucketImport::Importers::IssuesNotesImporter, feature_
it 'tracks the failure and does not fail' do
expect(Gitlab::Import::ImportFailureService).to receive(:track).once
- importer.execute
+ expect(importer.execute).to be_a(Gitlab::JobWaiter)
end
end
diff --git a/spec/lib/gitlab/bitbucket_import/importers/pull_request_importer_spec.rb b/spec/lib/gitlab/bitbucket_import/importers/pull_request_importer_spec.rb
index 1f36a353724..e5a8a1c906f 100644
--- a/spec/lib/gitlab/bitbucket_import/importers/pull_request_importer_spec.rb
+++ b/spec/lib/gitlab/bitbucket_import/importers/pull_request_importer_spec.rb
@@ -77,6 +77,18 @@ RSpec.describe Gitlab::BitbucketImport::Importers::PullRequestImporter, :clean_g
end
end
+ context 'when the source and target projects are different' do
+ let(:importer) { described_class.new(project, hash.merge(source_and_target_project_different: true)) }
+
+ it 'skips the import' do
+ expect(Gitlab::BitbucketImport::Logger)
+ .to receive(:info)
+ .with(include(message: 'skipping because source and target projects are different', iid: anything))
+
+ expect { importer.execute }.not_to change { project.merge_requests.count }
+ end
+ end
+
context 'when the author does not have a bitbucket identity' do
before do
identity.update!(provider: :github)
diff --git a/spec/lib/gitlab/bitbucket_import/importers/pull_requests_importer_spec.rb b/spec/lib/gitlab/bitbucket_import/importers/pull_requests_importer_spec.rb
index eba7ec92aba..4d72c47d61a 100644
--- a/spec/lib/gitlab/bitbucket_import/importers/pull_requests_importer_spec.rb
+++ b/spec/lib/gitlab/bitbucket_import/importers/pull_requests_importer_spec.rb
@@ -49,7 +49,7 @@ RSpec.describe Gitlab::BitbucketImport::Importers::PullRequestsImporter, feature
it 'tracks the failure and does not fail' do
expect(Gitlab::Import::ImportFailureService).to receive(:track).once
- importer.execute
+ expect(importer.execute).to be_a(Gitlab::JobWaiter)
end
end
diff --git a/spec/lib/gitlab/bitbucket_import/importers/pull_requests_notes_importer_spec.rb b/spec/lib/gitlab/bitbucket_import/importers/pull_requests_notes_importer_spec.rb
index 78a08accf82..b4c26ff7add 100644
--- a/spec/lib/gitlab/bitbucket_import/importers/pull_requests_notes_importer_spec.rb
+++ b/spec/lib/gitlab/bitbucket_import/importers/pull_requests_notes_importer_spec.rb
@@ -29,7 +29,7 @@ RSpec.describe Gitlab::BitbucketImport::Importers::PullRequestsNotesImporter, fe
it 'tracks the failure and does not fail' do
expect(Gitlab::Import::ImportFailureService).to receive(:track).once
- importer.execute
+ expect(importer.execute).to be_a(Gitlab::JobWaiter)
end
end
diff --git a/spec/lib/gitlab/bitbucket_import/ref_converter_spec.rb b/spec/lib/gitlab/bitbucket_import/ref_converter_spec.rb
index 578b661d86b..c458214e794 100644
--- a/spec/lib/gitlab/bitbucket_import/ref_converter_spec.rb
+++ b/spec/lib/gitlab/bitbucket_import/ref_converter_spec.rb
@@ -19,7 +19,14 @@ RSpec.describe Gitlab::BitbucketImport::RefConverter, feature_category: :importe
context 'when the note has an issue ref' do
let(:note) { "[https://bitbucket.org/namespace/repo/issues/1/first-issue](https://bitbucket.org/namespace/repo/issues/1/first-issue){: data-inline-card='' } " }
- let(:expected) { "[http://localhost/#{path}/-/issues/1/](http://localhost/#{path}/-/issues/1/)" }
+ let(:expected) { "[http://localhost/#{path}/-/issues/1](http://localhost/#{path}/-/issues/1)" }
+
+ it_behaves_like 'converts the ref correctly'
+ end
+
+ context 'when the note references issues without an issue name' do
+ let(:note) { "[https://bitbucket.org/namespace/repo/issues](https://bitbucket.org/namespace/repo/issues){: data-inline-card='' } " }
+ let(:expected) { "[http://localhost/#{path}/-/issues](http://localhost/#{path}/-/issues)" }
it_behaves_like 'converts the ref correctly'
end
diff --git a/spec/lib/gitlab/bitbucket_server_import/importers/pull_request_notes_importer_spec.rb b/spec/lib/gitlab/bitbucket_server_import/importers/pull_request_notes_importer_spec.rb
index c7e91c340b0..914ebefdb8f 100644
--- a/spec/lib/gitlab/bitbucket_server_import/importers/pull_request_notes_importer_spec.rb
+++ b/spec/lib/gitlab/bitbucket_server_import/importers/pull_request_notes_importer_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe Gitlab::BitbucketServerImport::Importers::PullRequestNotesImporter, feature_category: :importers do
include AfterNextHelpers
- let_it_be(:project) do
+ let_it_be_with_reload(:project) do
create(:project, :repository, :import_started,
import_data_attributes: {
data: { 'project_key' => 'key', 'repo_slug' => 'slug' },
@@ -18,21 +18,36 @@ RSpec.describe Gitlab::BitbucketServerImport::Importers::PullRequestNotesImporte
let_it_be(:pull_request) { BitbucketServer::Representation::PullRequest.new(pull_request_data) }
let_it_be(:note_author) { create(:user, username: 'note_author', email: 'note_author@example.org') }
- let_it_be(:pull_request_author) do
+ let!(:pull_request_author) do
create(:user, username: 'pull_request_author', email: 'pull_request_author@example.org')
end
let(:merge_event) do
instance_double(
BitbucketServer::Representation::Activity,
+ id: 3,
comment?: false,
merge_event?: true,
+ approved_event?: false,
committer_email: pull_request_author.email,
merge_timestamp: now,
merge_commit: '12345678'
)
end
+ let(:approved_event) do
+ instance_double(
+ BitbucketServer::Representation::Activity,
+ id: 4,
+ comment?: false,
+ merge_event?: false,
+ approved_event?: true,
+ approver_username: pull_request_author.username,
+ approver_email: pull_request_author.email,
+ created_at: now
+ )
+ end
+
let(:pr_note) do
instance_double(
BitbucketServer::Representation::Comment,
@@ -48,6 +63,7 @@ RSpec.describe Gitlab::BitbucketServerImport::Importers::PullRequestNotesImporte
let(:pr_comment) do
instance_double(
BitbucketServer::Representation::Activity,
+ id: 5,
comment?: true,
inline_comment?: false,
merge_event?: false,
@@ -63,9 +79,9 @@ RSpec.describe Gitlab::BitbucketServerImport::Importers::PullRequestNotesImporte
.to receive(:info).with(include(import_stage: stage, message: message))
end
- subject(:importer) { described_class.new(project, pull_request.to_hash) }
+ subject(:importer) { described_class.new(project.reload, pull_request.to_hash) }
- describe '#execute', :clean_gitlab_redis_cache do
+ describe '#execute' do
context 'when a matching merge request is not found' do
it 'does nothing' do
expect { importer.execute }.not_to change { Note.count }
@@ -79,7 +95,7 @@ RSpec.describe Gitlab::BitbucketServerImport::Importers::PullRequestNotesImporte
end
end
- context 'when a matching merge request is found' do
+ context 'when a matching merge request is found', :clean_gitlab_redis_cache do
let_it_be(:merge_request) { create(:merge_request, iid: pull_request.iid, source_project: project) }
it 'logs its progress' do
@@ -211,6 +227,112 @@ RSpec.describe Gitlab::BitbucketServerImport::Importers::PullRequestNotesImporte
expect(merge_request.merge_commit_sha).to eq(merge_event.merge_commit)
end
end
+
+ context 'when PR has an approved event' do
+ before do
+ allow_next(BitbucketServer::Client).to receive(:activities).and_return([approved_event])
+ end
+
+ it 'creates the approval, reviewer and approval note' do
+ expect { importer.execute }
+ .to change { merge_request.approvals.count }.from(0).to(1)
+ .and change { merge_request.notes.count }.from(0).to(1)
+ .and change { merge_request.reviewers.count }.from(0).to(1)
+
+ approval = merge_request.approvals.first
+
+ expect(approval.user).to eq(pull_request_author)
+ expect(approval.created_at).to eq(now)
+
+ note = merge_request.notes.first
+
+ expect(note.note).to eq('approved this merge request')
+ expect(note.author).to eq(pull_request_author)
+ expect(note.system).to be_truthy
+ expect(note.created_at).to eq(now)
+
+ reviewer = merge_request.reviewers.first
+
+ expect(reviewer.id).to eq(pull_request_author.id)
+ end
+
+ context 'when a user with a matching username does not exist' do
+ before do
+ pull_request_author.update!(username: 'another_username')
+ end
+
+ it 'finds the user based on email' do
+ importer.execute
+
+ approval = merge_request.approvals.first
+
+ expect(approval.user).to eq(pull_request_author)
+ end
+
+ context 'when no users match email or username' do
+ let_it_be(:another_author) { create(:user) }
+
+ before do
+ pull_request_author.destroy!
+ end
+
+ it 'does not set an approver' do
+ expect { importer.execute }
+ .to not_change { merge_request.approvals.count }
+ .and not_change { merge_request.notes.count }
+ .and not_change { merge_request.reviewers.count }
+
+ expect(merge_request.approvals).to be_empty
+ end
+ end
+ end
+
+ context 'if the reviewer already existed' do
+ before do
+ merge_request.reviewers = [pull_request_author]
+ merge_request.save!
+ end
+
+ it 'does not create the reviewer record' do
+ expect { importer.execute }.not_to change { merge_request.reviewers.count }
+ end
+ end
+ end
+ end
+
+ shared_examples 'import is skipped' do
+ it 'does not log and does not import notes' do
+ expect(Gitlab::BitbucketServerImport::Logger)
+ .not_to receive(:info).with(include(import_stage: 'import_pull_request_notes', message: 'starting'))
+
+ expect { importer.execute }.not_to change { Note.count }
+ end
+ end
+
+ context 'when the project has been marked as failed' do
+ before do
+ project.import_state.mark_as_failed('error')
+ end
+
+ include_examples 'import is skipped'
+ end
+
+ context 'when the import data does not have credentials' do
+ before do
+ project.import_data.credentials = nil
+ project.import_data.save!
+ end
+
+ include_examples 'import is skipped'
+ end
+
+ context 'when the import data does not have data' do
+ before do
+ project.import_data.data = nil
+ project.import_data.save!
+ end
+
+ include_examples 'import is skipped'
end
end
end
diff --git a/spec/lib/gitlab/bitbucket_server_import/importers/pull_requests_importer_spec.rb b/spec/lib/gitlab/bitbucket_server_import/importers/pull_requests_importer_spec.rb
index af8a0202083..df6dfa3219c 100644
--- a/spec/lib/gitlab/bitbucket_server_import/importers/pull_requests_importer_spec.rb
+++ b/spec/lib/gitlab/bitbucket_server_import/importers/pull_requests_importer_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe Gitlab::BitbucketServerImport::Importers::PullRequestsImporter, feature_category: :importers do
+ include RepoHelpers
+
let_it_be(:project) do
create(:project, :with_import_url, :import_started, :empty_repo,
import_data_attributes: {
@@ -15,15 +17,17 @@ RSpec.describe Gitlab::BitbucketServerImport::Importers::PullRequestsImporter, f
subject(:importer) { described_class.new(project) }
describe '#execute', :clean_gitlab_redis_cache do
+ let(:commit_sha) { 'aaaa1' }
+
before do
allow_next_instance_of(BitbucketServer::Client) do |client|
- allow(client).to receive(:pull_requests).and_return(
+ allow(client).to receive(:pull_requests).with('key', 'slug', a_hash_including(limit: 50)).and_return(
[
BitbucketServer::Representation::PullRequest.new(
{
'id' => 1,
'state' => 'MERGED',
- 'fromRef' => { 'latestCommit' => 'aaaa1' },
+ 'fromRef' => { 'latestCommit' => commit_sha },
'toRef' => { 'latestCommit' => 'aaaa2' }
}
),
@@ -77,15 +81,42 @@ RSpec.describe Gitlab::BitbucketServerImport::Importers::PullRequestsImporter, f
context 'when pull requests are in merged or declined status' do
it 'fetches latest commits from the remote repository' do
+ expected_refmap = [
+ "#{commit_sha}:refs/merge-requests/1/head",
+ 'aaaa2:refs/keep-around/aaaa2',
+ 'bbbb1:refs/merge-requests/2/head',
+ 'bbbb2:refs/keep-around/bbbb2'
+ ]
+
expect(project.repository).to receive(:fetch_remote).with(
project.import_url,
- refmap: %w[aaaa1 aaaa2 bbbb1 bbbb2],
+ refmap: expected_refmap,
prune: false
)
importer.execute
end
+ context 'when a commit already exists' do
+ let_it_be(:commit_sha) { create_file_in_repo(project, 'master', 'master', 'test.txt', 'testing')[:result] }
+
+ it 'does not fetch the commit' do
+ expected_refmap = [
+ 'aaaa2:refs/keep-around/aaaa2',
+ 'bbbb1:refs/merge-requests/2/head',
+ 'bbbb2:refs/keep-around/bbbb2'
+ ]
+
+ expect(project.repository).to receive(:fetch_remote).with(
+ project.import_url,
+ refmap: expected_refmap,
+ prune: false
+ )
+
+ importer.execute
+ end
+ end
+
context 'when feature flag "fetch_commits_for_bitbucket_server" is disabled' do
before do
stub_feature_flags(fetch_commits_for_bitbucket_server: false)
diff --git a/spec/lib/gitlab/bitbucket_server_import/importers/users_importer_spec.rb b/spec/lib/gitlab/bitbucket_server_import/importers/users_importer_spec.rb
new file mode 100644
index 00000000000..33d6ab94513
--- /dev/null
+++ b/spec/lib/gitlab/bitbucket_server_import/importers/users_importer_spec.rb
@@ -0,0 +1,65 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BitbucketServerImport::Importers::UsersImporter, feature_category: :importers do
+ let(:logger) { Gitlab::BitbucketServerImport::Logger }
+
+ let_it_be(:project) do
+ create(:project, :with_import_url, :import_started, :empty_repo,
+ import_data_attributes: {
+ data: { 'project_key' => 'key', 'repo_slug' => 'slug' },
+ credentials: { 'base_uri' => 'http://bitbucket.org/', 'user' => 'bitbucket', 'password' => 'password' }
+ }
+ )
+ end
+
+ let(:user_1) do
+ BitbucketServer::Representation::User.new(
+ { 'user' => { 'emailAddress' => 'email1', 'slug' => 'username1' } }
+ )
+ end
+
+ let(:user_2) do
+ BitbucketServer::Representation::User.new(
+ { 'user' => { 'emailAddress' => 'email2', 'slug' => 'username2' } }
+ )
+ end
+
+ let(:user_3) do
+ BitbucketServer::Representation::User.new(
+ { 'user' => { 'emailAddress' => 'email3', 'slug' => 'username3' } }
+ )
+ end
+
+ before do
+ stub_const("#{described_class}::BATCH_SIZE", 2)
+
+ allow_next_instance_of(BitbucketServer::Client) do |client|
+ allow(client).to receive(:users).with('key', limit: 2, page_offset: 1).and_return([user_1, user_2])
+ allow(client).to receive(:users).with('key', limit: 2, page_offset: 2).and_return([user_3])
+ allow(client).to receive(:users).with('key', limit: 2, page_offset: 3).and_return([])
+ end
+ end
+
+ subject(:importer) { described_class.new(project) }
+
+ describe '#execute' do
+ it 'writes the username and email to cache for every user in batches' do
+ expect(logger).to receive(:info).with(hash_including(message: 'starting'))
+ expect(logger).to receive(:info).with(hash_including(message: 'importing page 1 using batch size 2'))
+ expect(logger).to receive(:info).with(hash_including(message: 'importing page 2 using batch size 2'))
+ expect(logger).to receive(:info).with(hash_including(message: 'importing page 3 using batch size 2'))
+ expect(logger).to receive(:info).with(hash_including(message: 'finished'))
+
+ expect(Gitlab::Cache::Import::Caching).to receive(:write_multiple).and_call_original.twice
+
+ importer.execute
+
+ cache_key_prefix = "bitbucket_server/project/#{project.id}/source/username"
+ expect(Gitlab::Cache::Import::Caching.read("#{cache_key_prefix}/username1")).to eq('email1')
+ expect(Gitlab::Cache::Import::Caching.read("#{cache_key_prefix}/username2")).to eq('email2')
+ expect(Gitlab::Cache::Import::Caching.read("#{cache_key_prefix}/username3")).to eq('email3')
+ end
+ end
+end
diff --git a/spec/lib/gitlab/cache/import/caching_spec.rb b/spec/lib/gitlab/cache/import/caching_spec.rb
index 6f9879da281..8f1c552e0b7 100644
--- a/spec/lib/gitlab/cache/import/caching_spec.rb
+++ b/spec/lib/gitlab/cache/import/caching_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Cache::Import::Caching, :clean_gitlab_redis_cache do
+RSpec.describe Gitlab::Cache::Import::Caching, :clean_gitlab_redis_cache, :clean_gitlab_redis_shared_state, feature_category: :importers do
shared_examples 'validated redis value' do
let(:value) { double('value', to_s: Object.new) }
@@ -32,7 +32,7 @@ RSpec.describe Gitlab::Cache::Import::Caching, :clean_gitlab_redis_cache do
expect(redis).to receive(:get).with(/foo/).and_return('bar')
expect(redis).to receive(:expire).with(/foo/, described_class::TIMEOUT)
- expect(Gitlab::Redis::Cache).to receive(:with).twice.and_yield(redis)
+ expect(Gitlab::Redis::Cache).to receive(:with).exactly(4).times.and_yield(redis)
described_class.read('foo')
end
@@ -44,7 +44,7 @@ RSpec.describe Gitlab::Cache::Import::Caching, :clean_gitlab_redis_cache do
expect(redis).to receive(:get).with(/foo/).and_return('')
expect(redis).not_to receive(:expire)
- expect(Gitlab::Redis::Cache).to receive(:with).and_yield(redis)
+ expect(Gitlab::Redis::Cache).to receive(:with).twice.and_yield(redis)
described_class.read('foo')
end
@@ -80,6 +80,10 @@ RSpec.describe Gitlab::Cache::Import::Caching, :clean_gitlab_redis_cache do
end
describe '.increment' do
+ before do
+ allow(Gitlab::Redis::SharedState).to receive(:with).and_return('OK')
+ end
+
it 'increment a key and returns the current value' do
expect(described_class.increment('foo')).to eq(1)
diff --git a/spec/lib/gitlab/checks/container_moved_spec.rb b/spec/lib/gitlab/checks/container_moved_spec.rb
index 00ef5604e1d..de7ec330e0c 100644
--- a/spec/lib/gitlab/checks/container_moved_spec.rb
+++ b/spec/lib/gitlab/checks/container_moved_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Checks::ContainerMoved, :clean_gitlab_redis_shared_state do
+RSpec.describe Gitlab::Checks::ContainerMoved, :clean_gitlab_redis_shared_state, feature_category: :source_code_management do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project, :repository, :wiki_repo, namespace: user.namespace) }
diff --git a/spec/lib/gitlab/checks/force_push_spec.rb b/spec/lib/gitlab/checks/force_push_spec.rb
index 8cdee727d3d..a84d9194cd2 100644
--- a/spec/lib/gitlab/checks/force_push_spec.rb
+++ b/spec/lib/gitlab/checks/force_push_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Checks::ForcePush do
+RSpec.describe Gitlab::Checks::ForcePush, feature_category: :source_code_management do
let_it_be(:project) { create(:project, :repository) }
describe '.force_push?' do
diff --git a/spec/lib/gitlab/checks/global_file_size_check_spec.rb b/spec/lib/gitlab/checks/global_file_size_check_spec.rb
index db615053356..9e3be2fd2ec 100644
--- a/spec/lib/gitlab/checks/global_file_size_check_spec.rb
+++ b/spec/lib/gitlab/checks/global_file_size_check_spec.rb
@@ -36,7 +36,7 @@ RSpec.describe Gitlab::Checks::GlobalFileSizeCheck, feature_category: :source_co
context 'when there are oversized blobs' do
let(:mock_blob_id) { "88acbfafb1b8fdb7c51db870babce21bd861ac4f" }
let(:mock_blob_size) { 300 * 1024 * 1024 } # 300 MiB
- let(:size_msg) { "300.0" } # it is (mock_blob_size / 1024.0 / 1024.0).round(2).to_s
+ let(:size_msg) { "300" }
let(:blob_double) { instance_double(Gitlab::Git::Blob, size: mock_blob_size, id: mock_blob_id) }
before do
@@ -53,8 +53,7 @@ RSpec.describe Gitlab::Checks::GlobalFileSizeCheck, feature_category: :source_co
expect(Gitlab::AppJsonLogger).to receive(:info).with('Checking for blobs over the file size limit')
expect(Gitlab::AppJsonLogger).to receive(:info).with(
message: 'Found blob over global limit',
- blob_sizes: [mock_blob_size],
- blob_details: { mock_blob_id => { "size" => mock_blob_size } }
+ blob_details: [{ "id" => mock_blob_id, "size" => mock_blob_size }]
)
expect do
subject.validate!
diff --git a/spec/lib/gitlab/checks/lfs_check_spec.rb b/spec/lib/gitlab/checks/lfs_check_spec.rb
index 331a43b814f..9f001dd1941 100644
--- a/spec/lib/gitlab/checks/lfs_check_spec.rb
+++ b/spec/lib/gitlab/checks/lfs_check_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Checks::LfsCheck do
+RSpec.describe Gitlab::Checks::LfsCheck, feature_category: :source_code_management do
include_context 'changes access checks context'
let(:blob_object) { project.repository.blob_at_branch('lfs', 'files/lfs/lfs_object.iso') }
diff --git a/spec/lib/gitlab/checks/lfs_integrity_spec.rb b/spec/lib/gitlab/checks/lfs_integrity_spec.rb
index abad2bfa905..0aecf26f42f 100644
--- a/spec/lib/gitlab/checks/lfs_integrity_spec.rb
+++ b/spec/lib/gitlab/checks/lfs_integrity_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Checks::LfsIntegrity do
+RSpec.describe Gitlab::Checks::LfsIntegrity, feature_category: :source_code_management do
include ProjectForksHelper
let!(:time_left) { 50 }
diff --git a/spec/lib/gitlab/checks/matching_merge_request_spec.rb b/spec/lib/gitlab/checks/matching_merge_request_spec.rb
index 5397aea90a9..85fe669b8cf 100644
--- a/spec/lib/gitlab/checks/matching_merge_request_spec.rb
+++ b/spec/lib/gitlab/checks/matching_merge_request_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Checks::MatchingMergeRequest do
+RSpec.describe Gitlab::Checks::MatchingMergeRequest, feature_category: :source_code_management do
describe '#match?' do
let_it_be(:newrev) { '012345678' }
let_it_be(:target_branch) { 'feature' }
diff --git a/spec/lib/gitlab/checks/project_created_spec.rb b/spec/lib/gitlab/checks/project_created_spec.rb
index 6a2e4201030..74961a61892 100644
--- a/spec/lib/gitlab/checks/project_created_spec.rb
+++ b/spec/lib/gitlab/checks/project_created_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Checks::ProjectCreated, :clean_gitlab_redis_shared_state do
+RSpec.describe Gitlab::Checks::ProjectCreated, :clean_gitlab_redis_shared_state, feature_category: :source_code_management do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project, :repository, namespace: user.namespace) }
diff --git a/spec/lib/gitlab/checks/push_check_spec.rb b/spec/lib/gitlab/checks/push_check_spec.rb
index 262438256b4..f61e4c39715 100644
--- a/spec/lib/gitlab/checks/push_check_spec.rb
+++ b/spec/lib/gitlab/checks/push_check_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Checks::PushCheck do
+RSpec.describe Gitlab::Checks::PushCheck, feature_category: :source_code_management do
include_context 'change access checks context'
describe '#validate!' do
diff --git a/spec/lib/gitlab/checks/push_file_count_check_spec.rb b/spec/lib/gitlab/checks/push_file_count_check_spec.rb
index e05070e8f35..fef45588979 100644
--- a/spec/lib/gitlab/checks/push_file_count_check_spec.rb
+++ b/spec/lib/gitlab/checks/push_file_count_check_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Checks::PushFileCountCheck do
+RSpec.describe Gitlab::Checks::PushFileCountCheck, feature_category: :source_code_management do
let(:snippet) { create(:personal_snippet, :repository) }
let(:changes) { { oldrev: oldrev, newrev: newrev, ref: ref } }
let(:timeout) { Gitlab::GitAccess::INTERNAL_TIMEOUT }
diff --git a/spec/lib/gitlab/checks/single_change_access_spec.rb b/spec/lib/gitlab/checks/single_change_access_spec.rb
index 8d9f96dd2b4..9ae3e4b246d 100644
--- a/spec/lib/gitlab/checks/single_change_access_spec.rb
+++ b/spec/lib/gitlab/checks/single_change_access_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Checks::SingleChangeAccess do
+RSpec.describe Gitlab::Checks::SingleChangeAccess, feature_category: :source_code_management do
describe '#validate!' do
include_context 'change access checks context'
diff --git a/spec/lib/gitlab/checks/snippet_check_spec.rb b/spec/lib/gitlab/checks/snippet_check_spec.rb
index c43b65d09c5..7763cc82681 100644
--- a/spec/lib/gitlab/checks/snippet_check_spec.rb
+++ b/spec/lib/gitlab/checks/snippet_check_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Checks::SnippetCheck do
+RSpec.describe Gitlab::Checks::SnippetCheck, feature_category: :source_code_management do
include_context 'change access checks context'
let_it_be(:snippet) { create(:personal_snippet, :repository) }
diff --git a/spec/lib/gitlab/checks/tag_check_spec.rb b/spec/lib/gitlab/checks/tag_check_spec.rb
index 2b1fbc7e797..15c6b906689 100644
--- a/spec/lib/gitlab/checks/tag_check_spec.rb
+++ b/spec/lib/gitlab/checks/tag_check_spec.rb
@@ -11,126 +11,138 @@ RSpec.describe Gitlab::Checks::TagCheck, feature_category: :source_code_manageme
it 'raises an error when user does not have access' do
allow(user_access).to receive(:can_do_action?).with(:admin_tag).and_return(false)
- expect { subject.validate! }.to raise_error(Gitlab::GitAccess::ForbiddenError, 'You are not allowed to change existing tags on this project.')
+ expect { change_check.validate! }.to raise_error(
+ Gitlab::GitAccess::ForbiddenError,
+ 'You are not allowed to change existing tags on this project.'
+ )
end
- context "prohibited tags check" do
+ describe "prohibited tags check" do
it 'prohibits tags name that include refs/heads at the head' do
- allow(subject).to receive(:tag_name).and_return("refs/heads/foo")
+ allow(change_check).to receive(:tag_name).and_return("refs/heads/foo")
- expect { subject.validate! }.to raise_error(Gitlab::GitAccess::ForbiddenError, "You cannot create a tag with a prohibited pattern.")
+ expect { change_check.validate! }.to raise_error(
+ Gitlab::GitAccess::ForbiddenError,
+ "You cannot create a tag with a prohibited pattern."
+ )
end
it "prohibits tag names that include refs/tags/ at the head" do
- allow(subject).to receive(:tag_name).and_return("refs/tags/foo")
+ allow(change_check).to receive(:tag_name).and_return("refs/tags/foo")
- expect { subject.validate! }.to raise_error(Gitlab::GitAccess::ForbiddenError, "You cannot create a tag with a prohibited pattern.")
+ expect { change_check.validate! }.to raise_error(
+ Gitlab::GitAccess::ForbiddenError,
+ "You cannot create a tag with a prohibited pattern."
+ )
end
it "doesn't prohibit a nested refs/tags/ string in a tag name" do
- allow(subject).to receive(:tag_name).and_return("fix-for-refs/tags/foo")
-
- expect { subject.validate! }.not_to raise_error
- end
+ allow(change_check).to receive(:tag_name).and_return("fix-for-refs/tags/foo")
- context "deleting a refs/tags headed tag" do
- let(:newrev) { "0000000000000000000000000000000000000000" }
- let(:ref) { "refs/tags/refs/tags/267208abfe40e546f5e847444276f7d43a39503e" }
-
- it "doesn't prohibit the deletion of a refs/tags/ tag name" do
- expect { subject.validate! }.not_to raise_error
- end
+ expect { change_check.validate! }.not_to raise_error
end
it "prohibits tag names that include characters incompatible with UTF-8" do
- allow(subject).to receive(:tag_name).and_return("v6.0.0-\xCE.BETA")
+ allow(change_check).to receive(:tag_name).and_return("v6.0.0-\xCE.BETA")
- expect { subject.validate! }.to raise_error(Gitlab::GitAccess::ForbiddenError, "Tag names must be valid when converted to UTF-8 encoding")
+ expect { change_check.validate! }.to raise_error(
+ Gitlab::GitAccess::ForbiddenError,
+ "Tag names must be valid when converted to UTF-8 encoding"
+ )
end
it "doesn't prohibit UTF-8 compatible characters" do
- allow(subject).to receive(:tag_name).and_return("v6.0.0-Ü.BETA")
+ allow(change_check).to receive(:tag_name).and_return("v6.0.0-Ü.BETA")
- expect { subject.validate! }.not_to raise_error
+ expect { change_check.validate! }.not_to raise_error
end
context "when prohibited_tag_name_encoding_check feature flag is disabled" do
before do
stub_feature_flags(prohibited_tag_name_encoding_check: false)
- allow(subject).to receive(:validate_tag_name_not_sha_like!)
+ allow(change_check).to receive(:validate_tag_name_not_sha_like!)
end
it "doesn't prohibit tag names that include characters incompatible with UTF-8" do
- allow(subject).to receive(:tag_name).and_return("v6.0.0-\xCE.BETA")
+ allow(change_check).to receive(:tag_name).and_return("v6.0.0-\xCE.BETA")
- expect { subject.validate! }.not_to raise_error
+ expect { change_check.validate! }.not_to raise_error
end
it "doesn't prohibit UTF-8 compatible characters" do
- allow(subject).to receive(:tag_name).and_return("v6.0.0-Ü.BETA")
+ allow(change_check).to receive(:tag_name).and_return("v6.0.0-Ü.BETA")
+
+ expect { change_check.validate! }.not_to raise_error
+ end
+ end
- expect { subject.validate! }.not_to raise_error
+ describe "deleting a refs/tags headed tag" do
+ let(:newrev) { "0000000000000000000000000000000000000000" }
+ let(:ref) { "refs/tags/refs/tags/267208abfe40e546f5e847444276f7d43a39503e" }
+
+ it "doesn't prohibit the deletion of a refs/tags/ tag name" do
+ expect { change_check.validate! }.not_to raise_error
end
end
it "forbids SHA-1 values" do
- allow(subject)
+ allow(change_check)
.to receive(:tag_name)
.and_return("267208abfe40e546f5e847444276f7d43a39503e")
- expect { subject.validate! }.to raise_error(
+ expect { change_check.validate! }.to raise_error(
Gitlab::GitAccess::ForbiddenError,
"You cannot create a tag with a SHA-1 or SHA-256 tag name."
)
end
it "forbids SHA-256 values" do
- allow(subject)
+ allow(change_check)
.to receive(:tag_name)
.and_return("09b9fd3ea68e9b95a51b693a29568c898e27d1476bbd83c825664f18467fc175")
- expect { subject.validate! }.to raise_error(
+ expect { change_check.validate! }.to raise_error(
Gitlab::GitAccess::ForbiddenError,
"You cannot create a tag with a SHA-1 or SHA-256 tag name."
)
end
it "forbids '{SHA-1}{+anything}' values" do
- allow(subject)
+ allow(change_check)
.to receive(:tag_name)
.and_return("267208abfe40e546f5e847444276f7d43a39503e-")
- expect { subject.validate! }.to raise_error(
+ expect { change_check.validate! }.to raise_error(
Gitlab::GitAccess::ForbiddenError,
"You cannot create a tag with a SHA-1 or SHA-256 tag name."
)
end
it "forbids '{SHA-256}{+anything} values" do
- allow(subject)
+ allow(change_check)
.to receive(:tag_name)
.and_return("09b9fd3ea68e9b95a51b693a29568c898e27d1476bbd83c825664f18467fc175-")
- expect { subject.validate! }.to raise_error(
+ expect { change_check.validate! }.to raise_error(
Gitlab::GitAccess::ForbiddenError,
"You cannot create a tag with a SHA-1 or SHA-256 tag name."
)
end
it "allows SHA-1 values to be appended to the tag name" do
- allow(subject)
+ allow(change_check)
.to receive(:tag_name)
.and_return("fix-267208abfe40e546f5e847444276f7d43a39503e")
- expect { subject.validate! }.not_to raise_error
+ expect { change_check.validate! }.not_to raise_error
end
it "allows SHA-256 values to be appended to the tag name" do
- allow(subject)
+ allow(change_check)
.to receive(:tag_name)
.and_return("fix-09b9fd3ea68e9b95a51b693a29568c898e27d1476bbd83c825664f18467fc175")
- expect { subject.validate! }.not_to raise_error
+ expect { change_check.validate! }.not_to raise_error
end
end
@@ -142,31 +154,36 @@ RSpec.describe Gitlab::Checks::TagCheck, feature_category: :source_code_manageme
project.add_maintainer(user)
end
- context 'deletion' do
+ describe 'deleting a tag' do
let(:oldrev) { 'be93687618e4b132087f430a4d8fc3a609c9b77c' }
let(:newrev) { '0000000000000000000000000000000000000000' }
- context 'via web interface' do
+ context 'when deleting via web interface' do
let(:protocol) { 'web' }
it 'is allowed' do
- expect { subject.validate! }.not_to raise_error
+ expect { change_check.validate! }.not_to raise_error
end
end
- context 'via SSH' do
+ context 'when deleting via SSH' do
it 'is prevented' do
- expect { subject.validate! }.to raise_error(Gitlab::GitAccess::ForbiddenError, /only delete.*web interface/)
+ expect { change_check.validate! }.to raise_error(
+ Gitlab::GitAccess::ForbiddenError,
+ 'You can only delete protected tags using the web interface.'
+ )
end
end
end
- context 'update' do
+ describe 'updating a tag' do
let(:oldrev) { 'be93687618e4b132087f430a4d8fc3a609c9b77c' }
let(:newrev) { '54fcc214b94e78d7a41a9a8fe6d87a5e59500e51' }
it 'is prevented' do
- expect { subject.validate! }.to raise_error(Gitlab::GitAccess::ForbiddenError, /cannot be updated/)
+ expect { change_check.validate! }.to raise_error(
+ Gitlab::GitAccess::ForbiddenError, 'Protected tags cannot be updated.'
+ )
end
end
end
@@ -176,37 +193,47 @@ RSpec.describe Gitlab::Checks::TagCheck, feature_category: :source_code_manageme
project.add_developer(user)
end
- context 'deletion' do
+ describe 'deleting a tag' do
let(:oldrev) { 'be93687618e4b132087f430a4d8fc3a609c9b77c' }
let(:newrev) { '0000000000000000000000000000000000000000' }
it 'is prevented' do
- expect { subject.validate! }.to raise_error(Gitlab::GitAccess::ForbiddenError, /not allowed to delete/)
+ expect { change_check.validate! }.to raise_error(
+ Gitlab::GitAccess::ForbiddenError,
+ 'You are not allowed to delete protected tags from this project. ' \
+ 'Only a project maintainer or owner can delete a protected tag.'
+ )
end
end
end
- context 'creation' do
+ describe 'creating a tag' do
let(:oldrev) { '0000000000000000000000000000000000000000' }
let(:newrev) { '54fcc214b94e78d7a41a9a8fe6d87a5e59500e51' }
let(:ref) { 'refs/tags/v9.1.0' }
it 'prevents creation below access level' do
- expect { subject.validate! }.to raise_error(Gitlab::GitAccess::ForbiddenError, /allowed to create this tag as it is protected/)
+ expect { change_check.validate! }.to raise_error(
+ Gitlab::GitAccess::ForbiddenError,
+ 'You are not allowed to create this tag as it is protected.'
+ )
end
context 'when user has access' do
let!(:protected_tag) { create(:protected_tag, :developers_can_create, project: project, name: 'v*') }
it 'allows tag creation' do
- expect { subject.validate! }.not_to raise_error
+ expect { change_check.validate! }.not_to raise_error
end
context 'when tag name is the same as default branch' do
let(:ref) { "refs/tags/#{project.default_branch}" }
it 'is prevented' do
- expect { subject.validate! }.to raise_error(Gitlab::GitAccess::ForbiddenError, /cannot use default branch name to create a tag/)
+ expect { change_check.validate! }.to raise_error(
+ Gitlab::GitAccess::ForbiddenError,
+ 'You cannot use default branch name to create a tag'
+ )
end
end
end
diff --git a/spec/lib/gitlab/checks/timed_logger_spec.rb b/spec/lib/gitlab/checks/timed_logger_spec.rb
index 261fdd6c002..e5c76afff3c 100644
--- a/spec/lib/gitlab/checks/timed_logger_spec.rb
+++ b/spec/lib/gitlab/checks/timed_logger_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Checks::TimedLogger do
+RSpec.describe Gitlab::Checks::TimedLogger, feature_category: :source_code_management do
let!(:timeout) { 50.seconds }
let!(:start) { Time.now }
let!(:ref) { "bar" }
diff --git a/spec/lib/gitlab/ci/build/image_spec.rb b/spec/lib/gitlab/ci/build/image_spec.rb
index 4895077a731..f8c0d69be2e 100644
--- a/spec/lib/gitlab/ci/build/image_spec.rb
+++ b/spec/lib/gitlab/ci/build/image_spec.rb
@@ -10,7 +10,7 @@ RSpec.describe Gitlab::Ci::Build::Image do
context 'when image is defined in job' do
let(:image_name) { 'image:1.0' }
- let(:job) { create(:ci_build, options: { image: image_name } ) }
+ let(:job) { create(:ci_build, options: { image: image_name }) }
context 'when image is defined as string' do
it 'fabricates an object of the proper class' do
@@ -29,12 +29,14 @@ RSpec.describe Gitlab::Ci::Build::Image do
context 'when image is defined as hash' do
let(:entrypoint) { '/bin/sh' }
let(:pull_policy) { %w[always if-not-present] }
+ let(:executor_opts) { { docker: { platform: 'arm64' } } }
let(:job) do
create(:ci_build, options: { image: { name: image_name,
entrypoint: entrypoint,
ports: [80],
- pull_policy: pull_policy } } )
+ executor_opts: executor_opts,
+ pull_policy: pull_policy } })
end
it 'fabricates an object of the proper class' do
@@ -44,6 +46,7 @@ RSpec.describe Gitlab::Ci::Build::Image do
it 'populates fabricated object with the proper attributes' do
expect(subject.name).to eq(image_name)
expect(subject.entrypoint).to eq(entrypoint)
+ expect(subject.executor_opts).to eq(executor_opts)
expect(subject.pull_policy).to eq(pull_policy)
end
@@ -98,11 +101,12 @@ RSpec.describe Gitlab::Ci::Build::Image do
let(:service_entrypoint) { '/bin/sh' }
let(:service_alias) { 'db' }
let(:service_command) { 'sleep 30' }
+ let(:executor_opts) { { docker: { platform: 'amd64' } } }
let(:pull_policy) { %w[always if-not-present] }
let(:job) do
create(:ci_build, options: { services: [{ name: service_image_name, entrypoint: service_entrypoint,
alias: service_alias, command: service_command, ports: [80],
- pull_policy: pull_policy }] })
+ executor_opts: executor_opts, pull_policy: pull_policy }] })
end
it 'fabricates an non-empty array of objects' do
@@ -116,6 +120,7 @@ RSpec.describe Gitlab::Ci::Build::Image do
expect(subject.first.entrypoint).to eq(service_entrypoint)
expect(subject.first.alias).to eq(service_alias)
expect(subject.first.command).to eq(service_command)
+ expect(subject.first.executor_opts).to eq(executor_opts)
expect(subject.first.pull_policy).to eq(pull_policy)
port = subject.first.ports.first
diff --git a/spec/lib/gitlab/ci/components/instance_path_spec.rb b/spec/lib/gitlab/ci/components/instance_path_spec.rb
index 4ba963b54b5..b9b4c3f7c69 100644
--- a/spec/lib/gitlab/ci/components/instance_path_spec.rb
+++ b/spec/lib/gitlab/ci/components/instance_path_spec.rb
@@ -42,48 +42,86 @@ RSpec.describe Gitlab::Ci::Components::InstancePath, feature_category: :pipeline
end
end
- context 'when the component is simple (single file template)' do
- it 'fetches the component content', :aggregate_failures do
+ shared_examples 'does not find the component' do
+ it 'returns nil' do
result = path.fetch_content!(current_user: user)
- expect(result.content).to eq('image: alpine_1')
- expect(result.path).to eq('templates/secret-detection.yml')
- expect(path.host).to eq(current_host)
- expect(path.project).to eq(project)
- expect(path.sha).to eq(project.commit('master').id)
+ expect(result).to be_nil
+ end
+ end
+
+ shared_examples 'finds the component' do
+ shared_examples 'fetches the component content' do
+ it 'fetches the component content', :aggregate_failures do
+ result = path.fetch_content!(current_user: user)
+ expect(result.content).to eq(file_content)
+ expect(result.path).to eq(file_path)
+ expect(path.host).to eq(current_host)
+ expect(path.project).to eq(project)
+ expect(path.sha).to eq(project.commit('master').id)
+ end
+ end
+
+ it_behaves_like 'fetches the component content'
+
+ context 'when feature flag ci_redirect_component_project is disabled' do
+ before do
+ stub_feature_flags(ci_redirect_component_project: false)
+ end
+
+ it_behaves_like 'fetches the component content'
+ end
+
+ context 'when the there is a redirect set for the project' do
+ let!(:redirect_route) { project.redirect_routes.create!(path: 'another-group/new-project') }
+ let(:project_path) { redirect_route.path }
+
+ it_behaves_like 'fetches the component content'
+
+ context 'when feature flag ci_redirect_component_project is disabled' do
+ before do
+ stub_feature_flags(ci_redirect_component_project: false)
+ end
+
+ it_behaves_like 'does not find the component'
+ end
+ end
+ end
+
+ context 'when the component is simple (single file template)' do
+ it_behaves_like 'finds the component' do
+ let(:file_path) { 'templates/secret-detection.yml' }
+ let(:file_content) { 'image: alpine_1' }
end
end
context 'when the component is complex (directory-based template)' do
let(:address) { "acme.com/#{project_path}/dast@#{version}" }
- it 'fetches the component content', :aggregate_failures do
- result = path.fetch_content!(current_user: user)
- expect(result.content).to eq('image: alpine_2')
- expect(result.path).to eq('templates/dast/template.yml')
- expect(path.host).to eq(current_host)
- expect(path.project).to eq(project)
- expect(path.sha).to eq(project.commit('master').id)
+ it_behaves_like 'finds the component' do
+ let(:file_path) { 'templates/dast/template.yml' }
+ let(:file_content) { 'image: alpine_2' }
end
context 'when there is an invalid nested component folder' do
let(:address) { "acme.com/#{project_path}/dast/another-folder@#{version}" }
- it 'returns nil' do
- result = path.fetch_content!(current_user: user)
- expect(result.content).to be_nil
- end
+ it_behaves_like 'does not find the component'
end
context 'when there is an invalid nested component path' do
let(:address) { "acme.com/#{project_path}/dast/another-template@#{version}" }
- it 'returns nil' do
- result = path.fetch_content!(current_user: user)
- expect(result.content).to be_nil
- end
+ it_behaves_like 'does not find the component'
end
end
+ context "when the project path starts with '/'" do
+ let(:project_path) { "/#{project.full_path}" }
+
+ it_behaves_like 'does not find the component'
+ end
+
+ # TODO: remove when deleting the feature flag `ci_redirect_component_project`
shared_examples 'prevents infinite loop' do |prefix|
context "when the project path starts with '#{prefix}'" do
let(:project_path) { "#{prefix}#{project.full_path}" }
@@ -127,7 +165,7 @@ RSpec.describe Gitlab::Ci::Components::InstancePath, feature_category: :pipeline
released_at: Time.zone.now)
end
- it 'fetches the component content', :aggregate_failures do
+ it 'returns the component content of the latest project release', :aggregate_failures do
result = path.fetch_content!(current_user: user)
expect(result.content).to eq('image: alpine_2')
expect(result.path).to eq('templates/secret-detection.yml')
@@ -135,6 +173,25 @@ RSpec.describe Gitlab::Ci::Components::InstancePath, feature_category: :pipeline
expect(path.project).to eq(project)
expect(path.sha).to eq(latest_sha)
end
+
+ context 'when the project is a catalog resource' do
+ let_it_be(:resource) { create(:ci_catalog_resource, project: project) }
+
+ before do
+ project.releases.each do |release|
+ create(:ci_catalog_resource_version, catalog_resource: resource, release: release)
+ end
+ end
+
+ it 'returns the component content of the latest catalog resource version', :aggregate_failures do
+ result = path.fetch_content!(current_user: user)
+ expect(result.content).to eq('image: alpine_2')
+ expect(result.path).to eq('templates/secret-detection.yml')
+ expect(path.host).to eq(current_host)
+ expect(path.project).to eq(project)
+ expect(path.sha).to eq(latest_sha)
+ end
+ end
end
context 'when version does not exist' do
@@ -162,88 +219,5 @@ RSpec.describe Gitlab::Ci::Components::InstancePath, feature_category: :pipeline
end
end
end
-
- # All the following tests are for deprecated code and will be removed
- # in https://gitlab.com/gitlab-org/gitlab/-/issues/415855
- context 'when the project does not contain a templates directory' do
- let(:project_path) { project.full_path }
- let(:address) { "acme.com/#{project_path}/component@#{version}" }
-
- let_it_be(:project) do
- create(
- :project, :custom_repo,
- files: {
- 'component/template.yml' => 'image: alpine'
- }
- )
- end
-
- before do
- project.add_developer(user)
- end
-
- it 'fetches the component content', :aggregate_failures do
- result = path.fetch_content!(current_user: user)
- expect(result.content).to eq('image: alpine')
- expect(result.path).to eq('component/template.yml')
- expect(path.host).to eq(current_host)
- expect(path.project).to eq(project)
- expect(path.sha).to eq(project.commit('master').id)
- end
-
- context 'when project path is nested under a subgroup' do
- let_it_be(:group) { create(:group, :nested) }
- let_it_be(:project) do
- create(
- :project, :custom_repo,
- files: {
- 'component/template.yml' => 'image: alpine'
- },
- group: group
- )
- end
-
- it 'fetches the component content', :aggregate_failures do
- result = path.fetch_content!(current_user: user)
- expect(result.content).to eq('image: alpine')
- expect(result.path).to eq('component/template.yml')
- expect(path.host).to eq(current_host)
- expect(path.project).to eq(project)
- expect(path.sha).to eq(project.commit('master').id)
- end
- end
-
- context 'when current GitLab instance is installed on a relative URL' do
- let(:address) { "acme.com/gitlab/#{project_path}/component@#{version}" }
- let(:current_host) { 'acme.com/gitlab/' }
-
- it 'fetches the component content', :aggregate_failures do
- result = path.fetch_content!(current_user: user)
- expect(result.content).to eq('image: alpine')
- expect(result.path).to eq('component/template.yml')
- expect(path.host).to eq(current_host)
- expect(path.project).to eq(project)
- expect(path.sha).to eq(project.commit('master').id)
- end
- end
-
- context 'when version does not exist' do
- let(:version) { 'non-existent' }
-
- it 'returns nil', :aggregate_failures do
- expect(path.fetch_content!(current_user: user)).to be_nil
- expect(path.host).to eq(current_host)
- expect(path.project).to eq(project)
- expect(path.sha).to be_nil
- end
- end
-
- context 'when user does not have permissions' do
- it 'raises an error when fetching the content' do
- expect { path.fetch_content!(current_user: build(:user)) }
- .to raise_error(Gitlab::Access::AccessDeniedError)
- end
- end
- end
end
end
diff --git a/spec/lib/gitlab/ci/config/entry/auto_cancel_spec.rb b/spec/lib/gitlab/ci/config/entry/auto_cancel_spec.rb
new file mode 100644
index 00000000000..bdd66cc00a1
--- /dev/null
+++ b/spec/lib/gitlab/ci/config/entry/auto_cancel_spec.rb
@@ -0,0 +1,72 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ci::Config::Entry::AutoCancel, feature_category: :pipeline_composition do
+ subject(:config) { described_class.new(config_hash) }
+
+ context 'with on_new_commit' do
+ let(:config_hash) do
+ { on_new_commit: 'interruptible' }
+ end
+
+ it { is_expected.to be_valid }
+
+ it 'returns value correctly' do
+ expect(config.value).to eq(config_hash)
+ end
+
+ context 'when on_new_commit is invalid' do
+ let(:config_hash) do
+ { on_new_commit: 'invalid' }
+ end
+
+ it { is_expected.not_to be_valid }
+
+ it 'returns errors' do
+ expect(config.errors)
+ .to include('auto cancel on new commit must be one of: conservative, interruptible, disabled')
+ end
+ end
+ end
+
+ context 'with on_job_failure' do
+ ['all', 'none', nil].each do |value|
+ context 'when the `on_job_failure` value is valid' do
+ let(:config_hash) { { on_job_failure: value } }
+
+ it { is_expected.to be_valid }
+
+ it 'returns value correctly' do
+ expect(config.value).to eq(on_job_failure: value)
+ end
+ end
+ end
+
+ context 'when on_job_failure is invalid' do
+ let(:config_hash) do
+ { on_job_failure: 'invalid' }
+ end
+
+ it { is_expected.not_to be_valid }
+
+ it 'returns errors' do
+ expect(config.errors)
+ .to include('auto cancel on job failure must be one of: none, all')
+ end
+ end
+ end
+
+ context 'with invalid key' do
+ let(:config_hash) do
+ { invalid: 'interruptible' }
+ end
+
+ it { is_expected.not_to be_valid }
+
+ it 'returns errors' do
+ expect(config.errors)
+ .to include('auto cancel config contains unknown keys: invalid')
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/config/entry/bridge_spec.rb b/spec/lib/gitlab/ci/config/entry/bridge_spec.rb
index 6e6b9d949c5..35f2a99ee87 100644
--- a/spec/lib/gitlab/ci/config/entry/bridge_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/bridge_spec.rb
@@ -2,10 +2,11 @@
require 'spec_helper'
-RSpec.describe Gitlab::Ci::Config::Entry::Bridge do
+RSpec.describe Gitlab::Ci::Config::Entry::Bridge, feature_category: :continuous_integration do
subject(:entry) { described_class.new(config, name: :my_bridge) }
it_behaves_like 'with inheritable CI config' do
+ let(:config) { { trigger: 'some/project' } }
let(:inheritable_key) { 'default' }
let(:inheritable_class) { Gitlab::Ci::Config::Entry::Default }
@@ -13,9 +14,13 @@ RSpec.describe Gitlab::Ci::Config::Entry::Bridge do
# that we know that we don't want to inherit
# as they do not have sense in context of Bridge
let(:ignored_inheritable_columns) do
- %i[before_script after_script hooks image services cache interruptible timeout
+ %i[before_script after_script hooks image services cache timeout
retry tags artifacts id_tokens]
end
+
+ before do
+ allow(entry).to receive_message_chain(:inherit_entry, :default_entry, :inherit?).and_return(true)
+ end
end
describe '.matching?' do
diff --git a/spec/lib/gitlab/ci/config/entry/image_spec.rb b/spec/lib/gitlab/ci/config/entry/image_spec.rb
index 17c45ec4c2c..99a6e25b313 100644
--- a/spec/lib/gitlab/ci/config/entry/image_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/image_spec.rb
@@ -42,6 +42,12 @@ RSpec.describe Gitlab::Ci::Config::Entry::Image do
end
end
+ describe '#executor_opts' do
+ it "returns nil" do
+ expect(entry.executor_opts).to be_nil
+ end
+ end
+
describe '#ports' do
it "returns image's ports" do
expect(entry.ports).to be_nil
@@ -88,6 +94,54 @@ RSpec.describe Gitlab::Ci::Config::Entry::Image do
end
end
+ context 'when configuration specifies docker' do
+ let(:config) { { name: 'image:1.0', docker: {} } }
+
+ it 'is valid' do
+ expect(entry).to be_valid
+ end
+
+ describe '#value' do
+ it "returns value" do
+ expect(entry.value).to eq(
+ name: 'image:1.0',
+ executor_opts: {
+ docker: {}
+ }
+ )
+ end
+ end
+
+ context "when docker specifies an option" do
+ let(:config) { { name: 'image:1.0', docker: { platform: 'amd64' } } }
+
+ it 'is valid' do
+ expect(entry).to be_valid
+ end
+
+ describe '#value' do
+ it "returns value" do
+ expect(entry.value).to eq(
+ name: 'image:1.0',
+ executor_opts: {
+ docker: { platform: 'amd64' }
+ }
+ )
+ end
+ end
+ end
+
+ context "when docker specifies an invalid option" do
+ let(:config) { { name: 'image:1.0', docker: { platform: 1 } } }
+
+ it 'is not valid' do
+ expect(entry).not_to be_valid
+ expect(entry.errors.first)
+ .to match %r{image executor opts '/docker/platform' must be a valid 'string'}
+ end
+ end
+ end
+
context 'when configuration has ports' do
let(:ports) { [{ number: 80, protocol: 'http', name: 'foobar' }] }
let(:config) { { name: 'image:1.0', entrypoint: %w[/bin/sh run], ports: ports } }
@@ -146,7 +200,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Image do
describe '#errors' do
it 'saves errors' do
expect(entry.errors.first)
- .to match /config should be a hash or a string/
+ .to match(/config should be a hash or a string/)
end
end
@@ -163,7 +217,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Image do
describe '#errors' do
it 'saves errors' do
expect(entry.errors.first)
- .to match /config contains unknown keys: non_existing/
+ .to match(/config contains unknown keys: non_existing/)
end
end
diff --git a/spec/lib/gitlab/ci/config/entry/includes_spec.rb b/spec/lib/gitlab/ci/config/entry/includes_spec.rb
index f1f28c24e70..54c02868010 100644
--- a/spec/lib/gitlab/ci/config/entry/includes_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/includes_spec.rb
@@ -13,4 +13,18 @@ RSpec.describe ::Gitlab::Ci::Config::Entry::Includes, feature_category: :pipelin
2.times { expect { described_class.new(config) }.not_to change { described_class.aspects.count } }
end
end
+
+ describe 'validations' do
+ let(:config) { [1, 2] }
+
+ let(:includes_entry) { described_class.new(config, max_size: 1) }
+
+ it 'returns invalid' do
+ expect(includes_entry).not_to be_valid
+ end
+
+ it 'returns the appropriate error' do
+ expect(includes_entry.errors).to include('includes config is too long (maximum is 1)')
+ end
+ end
end
diff --git a/spec/lib/gitlab/ci/config/entry/inherit/default_spec.rb b/spec/lib/gitlab/ci/config/entry/inherit/default_spec.rb
index 7cd9b0acb99..c0d21385ce6 100644
--- a/spec/lib/gitlab/ci/config/entry/inherit/default_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/inherit/default_spec.rb
@@ -31,6 +31,7 @@ RSpec.describe ::Gitlab::Ci::Config::Entry::Inherit::Default do
false | false
%w[image] | true
%w[before_script] | false
+ '123' | false
end
with_them do
diff --git a/spec/lib/gitlab/ci/config/entry/job_spec.rb b/spec/lib/gitlab/ci/config/entry/job_spec.rb
index 24d3cac6616..073d8feaadd 100644
--- a/spec/lib/gitlab/ci/config/entry/job_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/job_spec.rb
@@ -789,7 +789,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Job, feature_category: :pipeline_compo
hooks: { pre_get_sources_script: 'echo hello' } }
end
- it 'returns correct value' do
+ it 'returns correct values' do
expect(entry.value).to eq(
name: :rspec,
before_script: %w[ls pwd],
@@ -806,6 +806,93 @@ RSpec.describe Gitlab::Ci::Config::Entry::Job, feature_category: :pipeline_compo
)
end
end
+
+ context 'with retry present in the config' do
+ let(:config) do
+ {
+ script: 'rspec',
+ retry: { max: 1, when: "always" }
+ }
+ end
+
+ it 'returns correct values' do
+ expect(entry.value)
+ .to eq(name: :rspec,
+ script: %w[rspec],
+ stage: 'test',
+ ignore: false,
+ retry: { max: 1, when: %w[always] },
+ only: { refs: %w[branches tags] },
+ job_variables: {},
+ root_variables_inheritance: true,
+ scheduling_type: :stage
+ )
+ end
+
+ context 'when ci_retry_on_exit_codes feature flag is disabled' do
+ before do
+ stub_feature_flags(ci_retry_on_exit_codes: false)
+ end
+
+ it 'returns correct values' do
+ expect(entry.value)
+ .to eq(name: :rspec,
+ script: %w[rspec],
+ stage: 'test',
+ ignore: false,
+ retry: { max: 1, when: %w[always] },
+ only: { refs: %w[branches tags] },
+ job_variables: {},
+ root_variables_inheritance: true,
+ scheduling_type: :stage
+ )
+ end
+ end
+
+ context 'with exit_codes present' do
+ let(:config) do
+ {
+ script: 'rspec',
+ retry: { max: 1, when: "always", exit_codes: 255 }
+ }
+ end
+
+ it 'returns correct values' do
+ expect(entry.value)
+ .to eq(name: :rspec,
+ script: %w[rspec],
+ stage: 'test',
+ ignore: false,
+ retry: { max: 1, when: %w[always], exit_codes: [255] },
+ only: { refs: %w[branches tags] },
+ job_variables: {},
+ root_variables_inheritance: true,
+ scheduling_type: :stage
+ )
+ end
+
+ context 'when ci_retry_on_exit_codes feature flag is disabled' do
+ before do
+ stub_feature_flags(ci_retry_on_exit_codes: false)
+ end
+
+ it 'returns correct values' do
+ expect(entry.value)
+ .to eq(name: :rspec,
+ script: %w[rspec],
+ stage: 'test',
+ ignore: false,
+ # Shouldn't include exit_codes
+ retry: { max: 1, when: %w[always] },
+ only: { refs: %w[branches tags] },
+ job_variables: {},
+ root_variables_inheritance: true,
+ scheduling_type: :stage
+ )
+ end
+ end
+ end
+ end
end
context 'when job is using tags' do
diff --git a/spec/lib/gitlab/ci/config/entry/processable_spec.rb b/spec/lib/gitlab/ci/config/entry/processable_spec.rb
index 44e2fdbac37..84a8fd827cb 100644
--- a/spec/lib/gitlab/ci/config/entry/processable_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/processable_spec.rb
@@ -217,6 +217,15 @@ RSpec.describe Gitlab::Ci::Config::Entry::Processable, feature_category: :pipeli
end
end
end
+
+ context 'when interruptible is not a boolean' do
+ let(:config) { { interruptible: 123 } }
+
+ it 'returns error about wrong value type' do
+ expect(entry).not_to be_valid
+ expect(entry.errors).to include "interruptible config should be a boolean value"
+ end
+ end
end
describe '#relevant?' do
@@ -462,6 +471,28 @@ RSpec.describe Gitlab::Ci::Config::Entry::Processable, feature_category: :pipeli
end
end
end
+
+ context 'with interruptible' do
+ context 'when interruptible is not defined' do
+ let(:config) { { script: 'ls' } }
+
+ it 'sets interruptible to nil' do
+ entry.compose!(deps)
+
+ expect(entry.value[:interruptible]).to be_nil
+ end
+ end
+
+ context 'when interruptible is defined' do
+ let(:config) { { script: 'ls', interruptible: true } }
+
+ it 'sets interruptible to the value' do
+ entry.compose!(deps)
+
+ expect(entry.value[:interruptible]).to eq(true)
+ end
+ end
+ end
end
context 'when composed' do
diff --git a/spec/lib/gitlab/ci/config/entry/reports_spec.rb b/spec/lib/gitlab/ci/config/entry/reports_spec.rb
index d610c3ce2f6..a6675229c62 100644
--- a/spec/lib/gitlab/ci/config/entry/reports_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/reports_spec.rb
@@ -49,6 +49,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Reports, feature_category: :pipeline_c
:accessibility | 'gl-accessibility.json'
:cyclonedx | 'gl-sbom.cdx.zip'
:annotations | 'gl-annotations.json'
+ :repository_xray | 'gl-repository-xray.json'
end
with_them do
diff --git a/spec/lib/gitlab/ci/config/entry/retry_spec.rb b/spec/lib/gitlab/ci/config/entry/retry_spec.rb
index 84ef5344a8b..e01b50c5fbd 100644
--- a/spec/lib/gitlab/ci/config/entry/retry_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/retry_spec.rb
@@ -11,8 +11,9 @@ RSpec.describe Gitlab::Ci::Config::Entry::Retry do
end
shared_context 'when retry value is a hash', :hash do
- let(:config) { { max: max, when: public_send(:when) }.compact }
+ let(:config) { { max: max, when: public_send(:when), exit_codes: public_send(:exit_codes) }.compact }
let(:when) {}
+ let(:exit_codes) {}
let(:max) {}
end
@@ -43,6 +44,44 @@ RSpec.describe Gitlab::Ci::Config::Entry::Retry do
expect(value).to eq(when: %w[unknown_failure runner_system_failure])
end
end
+
+ context 'and `exit_codes` is an integer' do
+ let(:exit_codes) { 255 }
+
+ it 'returns an array of exit_codes' do
+ expect(value).to eq(exit_codes: [255])
+ end
+ end
+
+ context 'and `exit_codes` is an array' do
+ let(:exit_codes) { [255, 142] }
+
+ it 'returns an array of exit_codes' do
+ expect(value).to eq(exit_codes: [255, 142])
+ end
+ end
+ end
+
+ context 'when ci_retry_on_exit_codes feature flag is disabled', :hash do
+ before do
+ stub_feature_flags(ci_retry_on_exit_codes: false)
+ end
+
+ context 'when `exit_codes` is an integer' do
+ let(:exit_codes) { 255 }
+
+ it 'deletes the attribute exit_codes' do
+ expect(value).to eq({})
+ end
+ end
+
+ context 'when `exit_codes` is an array' do
+ let(:exit_codes) { [255, 137] }
+
+ it 'deletes the attribute exit_codes' do
+ expect(value).to eq({})
+ end
+ end
end
end
@@ -65,6 +104,22 @@ RSpec.describe Gitlab::Ci::Config::Entry::Retry do
end
end
+ context 'with numeric exit_codes' do
+ let(:exit_codes) { 255 }
+
+ it 'is valid' do
+ expect(entry).to be_valid
+ end
+ end
+
+ context 'with hash values exit_codes' do
+ let(:exit_codes) { [255, 142] }
+
+ it 'is valid' do
+ expect(entry).to be_valid
+ end
+ end
+
context 'with string when' do
let(:when) { 'unknown_failure' }
@@ -202,7 +257,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Retry do
end
end
- context 'iwth max too high' do
+ context 'with max too high' do
let(:max) { 10 }
it 'returns error about value too high' do
@@ -211,6 +266,33 @@ RSpec.describe Gitlab::Ci::Config::Entry::Retry do
end
end
+ context 'with exit_codes in wrong format' do
+ let(:exit_codes) { true }
+
+ it 'raises an error' do
+ expect(entry).not_to be_valid
+ expect(entry.errors).to include 'retry exit codes should be an array of integers or an integer'
+ end
+ end
+
+ context 'with exit_codes in wrong array format' do
+ let(:exit_codes) { ['string 1', 'string 2'] }
+
+ it 'raises an error' do
+ expect(entry).not_to be_valid
+ expect(entry.errors).to include 'retry exit codes should be an array of integers or an integer'
+ end
+ end
+
+ context 'with exit_codes in wrong mixed array format' do
+ let(:exit_codes) { [255, '155'] }
+
+ it 'raises an error' do
+ expect(entry).not_to be_valid
+ expect(entry.errors).to include 'retry exit codes should be an array of integers or an integer'
+ end
+ end
+
context 'with when in wrong format' do
let(:when) { true }
diff --git a/spec/lib/gitlab/ci/config/entry/service_spec.rb b/spec/lib/gitlab/ci/config/entry/service_spec.rb
index 1f935bebed5..82747e7b521 100644
--- a/spec/lib/gitlab/ci/config/entry/service_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/service_spec.rb
@@ -47,11 +47,23 @@ RSpec.describe Gitlab::Ci::Config::Entry::Service do
expect(entry.ports).to be_nil
end
end
+
+ describe '#executor_opts' do
+ it "returns service's executor_opts configuration" do
+ expect(entry.executor_opts).to be_nil
+ end
+ end
end
context 'when configuration is a hash' do
let(:config) do
- { name: 'postgresql:9.5', alias: 'db', command: %w[cmd run], entrypoint: %w[/bin/sh run] }
+ {
+ name: 'postgresql:9.5',
+ alias: 'db',
+ command: %w[cmd run],
+ entrypoint: %w[/bin/sh run],
+ variables: { 'MY_VAR' => 'variable' }
+ }
end
describe '#valid?' do
@@ -141,6 +153,51 @@ RSpec.describe Gitlab::Ci::Config::Entry::Service do
end
end
+ context 'when configuration has docker options' do
+ let(:config) { { name: 'postgresql:9.5', docker: { platform: 'amd64' } } }
+
+ describe '#valid?' do
+ it 'is valid' do
+ expect(entry).to be_valid
+ end
+ end
+
+ describe '#value' do
+ it "returns value" do
+ expect(entry.value).to eq(
+ name: 'postgresql:9.5',
+ executor_opts: {
+ docker: { platform: 'amd64' }
+ }
+ )
+ end
+ end
+ end
+
+ context 'when docker options have an invalid property' do
+ let(:config) { { name: 'postgresql:9.5', docker: { invalid: 'option' } } }
+
+ describe '#valid?' do
+ it 'is not valid' do
+ expect(entry).not_to be_valid
+ expect(entry.errors.first)
+ .to match %r{service executor opts '/docker/invalid' must be a valid 'schema'}
+ end
+ end
+ end
+
+ context 'when docker options platform is not string' do
+ let(:config) { { name: 'postgresql:9.5', docker: { platform: 123 } } }
+
+ describe '#valid?' do
+ it 'is not valid' do
+ expect(entry).not_to be_valid
+ expect(entry.errors.first)
+ .to match %r{service executor opts '/docker/platform' must be a valid 'string'}
+ end
+ end
+ end
+
context 'when configuration has pull_policy' do
let(:config) { { name: 'postgresql:9.5', pull_policy: 'if-not-present' } }
diff --git a/spec/lib/gitlab/ci/config/entry/workflow_spec.rb b/spec/lib/gitlab/ci/config/entry/workflow_spec.rb
index 97ac199f47d..d3ce3ffe641 100644
--- a/spec/lib/gitlab/ci/config/entry/workflow_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/workflow_spec.rb
@@ -2,13 +2,12 @@
require 'spec_helper'
-RSpec.describe Gitlab::Ci::Config::Entry::Workflow do
- let(:factory) { Gitlab::Config::Entry::Factory.new(described_class).value(rules_hash) }
- let(:config) { factory.create! }
+RSpec.describe Gitlab::Ci::Config::Entry::Workflow, feature_category: :pipeline_composition do
+ subject(:config) { described_class.new(workflow_hash) }
describe 'validations' do
context 'when work config value is a string' do
- let(:rules_hash) { 'build' }
+ let(:workflow_hash) { 'build' }
describe '#valid?' do
it 'is invalid' do
@@ -22,13 +21,13 @@ RSpec.describe Gitlab::Ci::Config::Entry::Workflow do
describe '#value' do
it 'returns the invalid configuration' do
- expect(config.value).to eq(rules_hash)
+ expect(config.value).to eq(workflow_hash)
end
end
end
context 'when work config value is a hash' do
- let(:rules_hash) { { rules: [{ if: '$VAR' }] } }
+ let(:workflow_hash) { { rules: [{ if: '$VAR' }] } }
describe '#valid?' do
it 'is valid' do
@@ -42,12 +41,12 @@ RSpec.describe Gitlab::Ci::Config::Entry::Workflow do
describe '#value' do
it 'returns the config' do
- expect(config.value).to eq(rules_hash)
+ expect(config.value).to eq(workflow_hash)
end
end
context 'with an invalid key' do
- let(:rules_hash) { { trash: [{ if: '$VAR' }] } }
+ let(:workflow_hash) { { trash: [{ if: '$VAR' }] } }
describe '#valid?' do
it 'is invalid' do
@@ -61,64 +60,79 @@ RSpec.describe Gitlab::Ci::Config::Entry::Workflow do
describe '#value' do
it 'returns the invalid configuration' do
- expect(config.value).to eq(rules_hash)
+ expect(config.value).to eq(workflow_hash)
end
end
end
+ end
+ end
- context 'with workflow name' do
- let(:factory) { Gitlab::Config::Entry::Factory.new(described_class).value(workflow_hash) }
+ describe '.default' do
+ it 'is nil' do
+ expect(described_class.default).to be_nil
+ end
+ end
- context 'with a blank name' do
- let(:workflow_hash) do
- { name: '' }
- end
+ context 'with workflow name' do
+ context 'with a blank name' do
+ let(:workflow_hash) do
+ { name: '' }
+ end
- it 'is invalid' do
- expect(config).not_to be_valid
- end
+ it 'is invalid' do
+ expect(config).not_to be_valid
+ end
- it 'returns error about invalid name' do
- expect(config.errors).to include('workflow name is too short (minimum is 1 character)')
- end
- end
+ it 'returns error about invalid name' do
+ expect(config.errors).to include('workflow name is too short (minimum is 1 character)')
+ end
+ end
- context 'with too long name' do
- let(:workflow_hash) do
- { name: 'a' * 256 }
- end
+ context 'with too long name' do
+ let(:workflow_hash) do
+ { name: 'a' * 256 }
+ end
- it 'is invalid' do
- expect(config).not_to be_valid
- end
+ it 'is invalid' do
+ expect(config).not_to be_valid
+ end
- it 'returns error about invalid name' do
- expect(config.errors).to include('workflow name is too long (maximum is 255 characters)')
- end
- end
+ it 'returns error about invalid name' do
+ expect(config.errors).to include('workflow name is too long (maximum is 255 characters)')
+ end
+ end
- context 'when name is nil' do
- let(:workflow_hash) { { name: nil } }
+ context 'when name is nil' do
+ let(:workflow_hash) { { name: nil } }
- it 'is valid' do
- expect(config).to be_valid
- end
- end
+ it 'is valid' do
+ expect(config).to be_valid
+ end
+ end
- context 'when name is not provided' do
- let(:workflow_hash) { { rules: [{ if: '$VAR' }] } }
+ context 'when name is not provided' do
+ let(:workflow_hash) { { rules: [{ if: '$VAR' }] } }
- it 'is valid' do
- expect(config).to be_valid
- end
- end
+ it 'is valid' do
+ expect(config).to be_valid
end
end
end
- describe '.default' do
- it 'is nil' do
- expect(described_class.default).to be_nil
+ context 'with auto_cancel' do
+ let(:workflow_hash) do
+ {
+ auto_cancel: {
+ on_new_commit: 'interruptible',
+ on_job_failure: 'none'
+ }
+ }
+ end
+
+ it { is_expected.to be_valid }
+
+ it 'returns value correctly' do
+ expect(config.value).to eq(workflow_hash)
end
end
end
diff --git a/spec/lib/gitlab/ci/config/external/file/local_spec.rb b/spec/lib/gitlab/ci/config/external/file/local_spec.rb
index 0643bf0c046..b961ee0d190 100644
--- a/spec/lib/gitlab/ci/config/external/file/local_spec.rb
+++ b/spec/lib/gitlab/ci/config/external/file/local_spec.rb
@@ -269,8 +269,8 @@ RSpec.describe Gitlab::Ci::Config::External::File::Local, feature_category: :pip
context_sha: sha,
type: :local,
location: 'lib/gitlab/ci/templates/existent-file.yml',
- blob: "http://localhost/#{project.full_path}/-/blob/#{sha}/lib/gitlab/ci/templates/existent-file.yml",
- raw: "http://localhost/#{project.full_path}/-/raw/#{sha}/lib/gitlab/ci/templates/existent-file.yml",
+ blob: "http://#{Gitlab.config.gitlab.host}/#{project.full_path}/-/blob/#{sha}/lib/gitlab/ci/templates/existent-file.yml",
+ raw: "http://#{Gitlab.config.gitlab.host}/#{project.full_path}/-/raw/#{sha}/lib/gitlab/ci/templates/existent-file.yml",
extra: {}
)
}
diff --git a/spec/lib/gitlab/ci/config/external/file/remote_spec.rb b/spec/lib/gitlab/ci/config/external/file/remote_spec.rb
index f8d3d1019f5..7293e640112 100644
--- a/spec/lib/gitlab/ci/config/external/file/remote_spec.rb
+++ b/spec/lib/gitlab/ci/config/external/file/remote_spec.rb
@@ -75,7 +75,9 @@ RSpec.describe Gitlab::Ci::Config::External::File::Remote, feature_category: :pi
context 'with a timeout' do
before do
- allow(Gitlab::HTTP).to receive(:get).and_raise(Timeout::Error)
+ allow_next_instance_of(HTTParty::Request) do |instance|
+ allow(instance).to receive(:perform).and_raise(Timeout::Error)
+ end
end
it { is_expected.to be_falsy }
@@ -94,24 +96,33 @@ RSpec.describe Gitlab::Ci::Config::External::File::Remote, feature_category: :pi
end
end
- describe "#content" do
+ # When the FF ci_parallel_remote_includes is removed,
+ # convert this `shared_context` to `describe` and remove `rubocop:disable`.
+ shared_context "#content" do # rubocop:disable RSpec/ContextWording -- This is temporary until the FF is removed.
+ subject(:content) do
+ remote_file.preload_content
+ remote_file.content
+ end
+
context 'with a valid remote file' do
before do
stub_full_request(location).to_return(body: remote_file_content)
end
it 'returns the content of the file' do
- expect(remote_file.content).to eql(remote_file_content)
+ expect(content).to eql(remote_file_content)
end
end
context 'with a timeout' do
before do
- allow(Gitlab::HTTP).to receive(:get).and_raise(Timeout::Error)
+ allow_next_instance_of(HTTParty::Request) do |instance|
+ allow(instance).to receive(:perform).and_raise(Timeout::Error)
+ end
end
it 'is falsy' do
- expect(remote_file.content).to be_falsy
+ expect(content).to be_falsy
end
end
@@ -123,7 +134,7 @@ RSpec.describe Gitlab::Ci::Config::External::File::Remote, feature_category: :pi
end
it 'is nil' do
- expect(remote_file.content).to be_nil
+ expect(content).to be_nil
end
end
@@ -131,11 +142,21 @@ RSpec.describe Gitlab::Ci::Config::External::File::Remote, feature_category: :pi
let(:location) { 'http://localhost:8080' }
it 'is nil' do
- expect(remote_file.content).to be_nil
+ expect(content).to be_nil
end
end
end
+ it_behaves_like "#content"
+
+ context 'when the FF ci_parallel_remote_includes is disabled' do
+ before do
+ stub_feature_flags(ci_parallel_remote_includes: false)
+ end
+
+ it_behaves_like "#content"
+ end
+
describe "#error_message" do
subject(:error_message) do
Gitlab::Ci::Config::External::Mapper::Verifier.new(context).process([remote_file])
@@ -234,13 +255,18 @@ RSpec.describe Gitlab::Ci::Config::External::File::Remote, feature_category: :pi
end
describe '#to_hash' do
+ subject(:to_hash) do
+ remote_file.preload_content
+ remote_file.to_hash
+ end
+
before do
stub_full_request(location).to_return(body: remote_file_content)
end
context 'with a valid remote file' do
it 'returns the content as a hash' do
- expect(remote_file.to_hash).to eql(
+ expect(to_hash).to eql(
before_script: ["apt-get update -qq && apt-get install -y -qq sqlite3 libsqlite3-dev nodejs",
"ruby -v",
"which ruby",
@@ -260,7 +286,7 @@ RSpec.describe Gitlab::Ci::Config::External::File::Remote, feature_category: :pi
end
it 'returns the content as a hash' do
- expect(remote_file.to_hash).to eql(
+ expect(to_hash).to eql(
include: [
{ local: 'another-file.yml',
rules: [{ exists: ['Dockerfile'] }] }
@@ -293,7 +319,7 @@ RSpec.describe Gitlab::Ci::Config::External::File::Remote, feature_category: :pi
it 'returns the content as a hash' do
expect(remote_file).to be_valid
- expect(remote_file.to_hash).to eql(
+ expect(to_hash).to eql(
include: [
{ local: 'some-file.yml',
rules: [{ exists: ['Dockerfile'] }] }
diff --git a/spec/lib/gitlab/ci/config/external/mapper_spec.rb b/spec/lib/gitlab/ci/config/external/mapper_spec.rb
index 5f28b45496f..d67b0ff8895 100644
--- a/spec/lib/gitlab/ci/config/external/mapper_spec.rb
+++ b/spec/lib/gitlab/ci/config/external/mapper_spec.rb
@@ -85,7 +85,13 @@ RSpec.describe Gitlab::Ci::Config::External::Mapper, feature_category: :pipeline
an_instance_of(Gitlab::Ci::Config::External::File::Remote))
end
- it_behaves_like 'logging config file fetch', 'config_file_fetch_remote_content_duration_s', 1
+ context 'when the FF ci_parallel_remote_includes is disabled' do
+ before do
+ stub_feature_flags(ci_parallel_remote_includes: false)
+ end
+
+ it_behaves_like 'logging config file fetch', 'config_file_fetch_remote_content_duration_s', 1
+ end
end
context 'when the key is a remote file hash' do
diff --git a/spec/lib/gitlab/ci/config/external/processor_spec.rb b/spec/lib/gitlab/ci/config/external/processor_spec.rb
index 68cdf56f198..4684495fa26 100644
--- a/spec/lib/gitlab/ci/config/external/processor_spec.rb
+++ b/spec/lib/gitlab/ci/config/external/processor_spec.rb
@@ -410,7 +410,7 @@ RSpec.describe Gitlab::Ci::Config::External::Processor, feature_category: :pipel
let(:other_project_files) do
{
- '/component-x/template.yml' => <<~YAML
+ '/templates/component-x/template.yml' => <<~YAML
component_x_job:
script: echo Component X
YAML
diff --git a/spec/lib/gitlab/ci/config/interpolation/inputs/base_input_spec.rb b/spec/lib/gitlab/ci/config/interpolation/inputs/base_input_spec.rb
index 30036ee68ed..b0a514cb1e2 100644
--- a/spec/lib/gitlab/ci/config/interpolation/inputs/base_input_spec.rb
+++ b/spec/lib/gitlab/ci/config/interpolation/inputs/base_input_spec.rb
@@ -4,8 +4,34 @@ require 'spec_helper'
RSpec.describe Gitlab::Ci::Config::Interpolation::Inputs::BaseInput, feature_category: :pipeline_composition do
describe '.matches?' do
- it 'is not implemented' do
- expect { described_class.matches?(double) }.to raise_error(NotImplementedError)
+ context 'when given is a hash' do
+ before do
+ stub_const('TestInput', Class.new(described_class))
+
+ TestInput.class_eval do
+ def self.type_name
+ 'test'
+ end
+ end
+ end
+
+ context 'when the spec type matches the input type' do
+ it 'returns true' do
+ expect(TestInput.matches?({ type: 'test' })).to be_truthy
+ end
+ end
+
+ context 'when the spec type does not match the input type' do
+ it 'returns false' do
+ expect(TestInput.matches?({ type: 'string' })).to be_falsey
+ end
+ end
+ end
+
+ context 'when not given a hash' do
+ it 'returns false' do
+ expect(described_class.matches?([])).to be_falsey
+ end
end
end
diff --git a/spec/lib/gitlab/ci/config/interpolation/text_interpolator_spec.rb b/spec/lib/gitlab/ci/config/interpolation/text_interpolator_spec.rb
new file mode 100644
index 00000000000..70858c0fff8
--- /dev/null
+++ b/spec/lib/gitlab/ci/config/interpolation/text_interpolator_spec.rb
@@ -0,0 +1,221 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ci::Config::Interpolation::TextInterpolator, feature_category: :pipeline_composition do
+ let(:result) { ::Gitlab::Ci::Config::Yaml::Result.new(config: [header, content]) }
+
+ subject(:interpolator) { described_class.new(result, arguments, []) }
+
+ context 'when input data is valid' do
+ let(:header) do
+ { spec: { inputs: { website: nil } } }
+ end
+
+ let(:content) do
+ "test: 'deploy $[[ inputs.website ]]'"
+ end
+
+ let(:arguments) do
+ { website: 'gitlab.com' }
+ end
+
+ it 'correctly interpolates the config' do
+ interpolator.interpolate!
+
+ expect(interpolator).to be_interpolated
+ expect(interpolator).to be_valid
+ expect(interpolator.to_result).to eq("test: 'deploy gitlab.com'")
+ end
+ end
+
+ context 'when config has a syntax error' do
+ let(:result) { ::Gitlab::Ci::Config::Yaml::Result.new(error: 'Invalid configuration format') }
+
+ let(:arguments) do
+ { website: 'gitlab.com' }
+ end
+
+ it 'surfaces an error about invalid config' do
+ interpolator.interpolate!
+
+ expect(interpolator).not_to be_valid
+ expect(interpolator.error_message).to eq('Invalid configuration format')
+ end
+ end
+
+ context 'when spec header is missing but inputs are specified' do
+ let(:header) { nil }
+ let(:content) { "test: 'echo'" }
+ let(:arguments) { { foo: 'bar' } }
+
+ it 'surfaces an error about invalid inputs' do
+ interpolator.interpolate!
+
+ expect(interpolator).not_to be_valid
+ expect(interpolator.error_message).to eq(
+ 'Given inputs not defined in the `spec` section of the included configuration file'
+ )
+ end
+ end
+
+ context 'when spec header is invalid' do
+ let(:header) do
+ { spec: { arguments: { website: nil } } }
+ end
+
+ let(:content) do
+ "test: 'deploy $[[ inputs.website ]]'"
+ end
+
+ let(:arguments) do
+ { website: 'gitlab.com' }
+ end
+
+ it 'surfaces an error about invalid header' do
+ interpolator.interpolate!
+
+ expect(interpolator).not_to be_valid
+ expect(interpolator.error_message).to eq('header:spec config contains unknown keys: arguments')
+ end
+ end
+
+ context 'when provided interpolation argument is invalid' do
+ let(:header) do
+ { spec: { inputs: { website: nil } } }
+ end
+
+ let(:content) do
+ "test: 'deploy $[[ inputs.website ]]'"
+ end
+
+ let(:arguments) do
+ { website: ['gitlab.com'] }
+ end
+
+ it 'returns an error about the invalid argument' do
+ interpolator.interpolate!
+
+ expect(interpolator).not_to be_valid
+ expect(interpolator.error_message).to eq('`website` input: provided value is not a string')
+ end
+ end
+
+ context 'when interpolation block is invalid' do
+ let(:header) do
+ { spec: { inputs: { website: nil } } }
+ end
+
+ let(:content) do
+ "test: 'deploy $[[ inputs.abc ]]'"
+ end
+
+ let(:arguments) do
+ { website: 'gitlab.com' }
+ end
+
+ it 'returns an error about the invalid block' do
+ interpolator.interpolate!
+
+ expect(interpolator).not_to be_valid
+ expect(interpolator.error_message).to eq('unknown interpolation key: `abc`')
+ end
+ end
+
+ context 'when multiple interpolation blocks are invalid' do
+ let(:header) do
+ { spec: { inputs: { website: nil } } }
+ end
+
+ let(:content) do
+ "test: 'deploy $[[ inputs.something.abc ]] $[[ inputs.cde ]] $[[ efg ]]'"
+ end
+
+ let(:arguments) do
+ { website: 'gitlab.com' }
+ end
+
+ it 'stops execution after the first invalid block' do
+ interpolator.interpolate!
+
+ expect(interpolator).not_to be_valid
+ expect(interpolator.error_message).to eq('unknown interpolation key: `something`')
+ end
+ end
+
+ context 'when there are many invalid arguments' do
+ let(:header) do
+ { spec: { inputs: {
+ allow_failure: { type: 'boolean' },
+ image: nil,
+ parallel: { type: 'number' },
+ website: nil
+ } } }
+ end
+
+ let(:content) do
+ "test: 'deploy $[[ inputs.website ]] $[[ inputs.parallel ]] $[[ inputs.allow_failure ]] $[[ inputs.image ]]'"
+ end
+
+ let(:arguments) do
+ { allow_failure: 'no', parallel: 'yes', website: 8 }
+ end
+
+ it 'reports a maximum of 3 errors in the error message' do
+ interpolator.interpolate!
+
+ expect(interpolator).not_to be_valid
+ expect(interpolator.error_message).to eq(
+ '`allow_failure` input: provided value is not a boolean, ' \
+ '`image` input: required value has not been provided, ' \
+ '`parallel` input: provided value is not a number'
+ )
+ expect(interpolator.errors).to contain_exactly(
+ '`allow_failure` input: provided value is not a boolean',
+ '`image` input: required value has not been provided',
+ '`parallel` input: provided value is not a number',
+ '`website` input: provided value is not a string'
+ )
+ end
+ end
+
+ describe '#to_result' do
+ context 'when interpolation is not used' do
+ let(:result) do
+ ::Gitlab::Ci::Config::Yaml::Result.new(config: content)
+ end
+
+ let(:content) do
+ "test: 'deploy production'"
+ end
+
+ let(:arguments) { nil }
+
+ it 'returns original content' do
+ interpolator.interpolate!
+
+ expect(interpolator.to_result).to eq(content)
+ end
+ end
+
+ context 'when interpolation is available' do
+ let(:header) do
+ { spec: { inputs: { website: nil } } }
+ end
+
+ let(:content) do
+ "test: 'deploy $[[ inputs.website ]]'"
+ end
+
+ let(:arguments) do
+ { website: 'gitlab.com' }
+ end
+
+ it 'correctly interpolates content' do
+ interpolator.interpolate!
+
+ expect(interpolator.to_result).to eq("test: 'deploy gitlab.com'")
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/config/interpolation/text_template_spec.rb b/spec/lib/gitlab/ci/config/interpolation/text_template_spec.rb
new file mode 100644
index 00000000000..a2f98fc0d5d
--- /dev/null
+++ b/spec/lib/gitlab/ci/config/interpolation/text_template_spec.rb
@@ -0,0 +1,105 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ci::Config::Interpolation::TextTemplate, feature_category: :pipeline_composition do
+ subject(:template) { described_class.new(config, ctx) }
+
+ let(:config) do
+ <<~CFG
+ test:
+ spec:
+ env: $[[ inputs.env ]]
+
+ $[[ inputs.key ]]:
+ name: $[[ inputs.key ]]
+ script: my-value
+ parallel: $[[ inputs.parallel ]]
+ CFG
+ end
+
+ let(:ctx) do
+ { inputs: { env: 'dev', key: 'abc', parallel: 6 } }
+ end
+
+ it 'interpolates the values properly' do
+ expect(template.interpolated).to eq <<~RESULT
+ test:
+ spec:
+ env: dev
+
+ abc:
+ name: abc
+ script: my-value
+ parallel: 6
+ RESULT
+ end
+
+ context 'when the config has an unknown interpolation key' do
+ let(:config) { '$[[ xxx.yyy ]]: abc' }
+
+ it 'does not interpolate the config' do
+ expect(template).not_to be_valid
+ expect(template.interpolated).to be_nil
+ expect(template.errors).to contain_exactly('unknown interpolation key: `xxx`')
+ end
+ end
+
+ context 'when template consists of nested arrays with hashes and values' do
+ let(:config) do
+ <<~CFG
+ test:
+ - a-$[[ inputs.key ]]-b
+ - c-$[[ inputs.key ]]-d:
+ d-$[[ inputs.key ]]-e
+ val: 1
+ CFG
+ end
+
+ it 'performs a valid interpolation' do
+ result = <<~RESULT
+ test:
+ - a-abc-b
+ - c-abc-d:
+ d-abc-e
+ val: 1
+ RESULT
+
+ expect(template).to be_valid
+ expect(template.interpolated).to eq result
+ end
+ end
+
+ context 'when template contains symbols that need interpolation' do
+ subject(:template) do
+ described_class.new("'$[[ inputs.key ]]': 'cde'", ctx)
+ end
+
+ it 'performs a valid interpolation' do
+ expect(template).to be_valid
+ expect(template.interpolated).to eq("'abc': 'cde'")
+ end
+ end
+
+ context 'when template is too large' do
+ before do
+ stub_application_setting(ci_max_total_yaml_size_bytes: 1)
+ end
+
+ it 'returns an error' do
+ expect(template.interpolated).to be_nil
+ expect(template.errors).to contain_exactly('config too large')
+ end
+ end
+
+ context 'when there are too many interpolation blocks' do
+ before do
+ stub_const("#{described_class}::MAX_BLOCKS", 1)
+ end
+
+ it 'returns an error' do
+ expect(template.interpolated).to be_nil
+ expect(template.errors).to contain_exactly('too many interpolation blocks')
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/config_spec.rb b/spec/lib/gitlab/ci/config_spec.rb
index fdf152b3584..76be65d91c4 100644
--- a/spec/lib/gitlab/ci/config_spec.rb
+++ b/spec/lib/gitlab/ci/config_spec.rb
@@ -43,6 +43,34 @@ RSpec.describe Gitlab::Ci::Config, feature_category: :pipeline_composition do
expect(config.to_hash).to eq hash
end
+ context 'when yml has stages' do
+ let(:yml) do
+ <<-EOS
+ image: image:1.0
+ stages:
+ - custom_stage
+ rspec:
+ script:
+ - gem install rspec
+ - rspec
+ EOS
+ end
+
+ specify do
+ expect(config.to_hash[:stages]).to eq(['.pre', 'custom_stage', '.post'])
+ end
+
+ context 'with inject_edge_stages option disabled' do
+ let(:config) do
+ described_class.new(yml, project: nil, pipeline: nil, sha: nil, user: nil, inject_edge_stages: false)
+ end
+
+ specify do
+ expect(config.to_hash[:stages]).to contain_exactly('custom_stage')
+ end
+ end
+ end
+
describe '#valid?' do
it 'is valid' do
expect(config).to be_valid
diff --git a/spec/lib/gitlab/ci/jwt_v2_spec.rb b/spec/lib/gitlab/ci/jwt_v2_spec.rb
index c2ced10620b..1093e6331cd 100644
--- a/spec/lib/gitlab/ci/jwt_v2_spec.rb
+++ b/spec/lib/gitlab/ci/jwt_v2_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Ci::JwtV2, feature_category: :continuous_integration do
+RSpec.describe Gitlab::Ci::JwtV2, feature_category: :secrets_management do
let(:namespace) { build_stubbed(:namespace) }
let(:project) { build_stubbed(:project, namespace: namespace) }
let(:user) do
diff --git a/spec/lib/gitlab/ci/parsers/sbom/cyclonedx_spec.rb b/spec/lib/gitlab/ci/parsers/sbom/cyclonedx_spec.rb
index a331af9a9ac..9c8402faf77 100644
--- a/spec/lib/gitlab/ci/parsers/sbom/cyclonedx_spec.rb
+++ b/spec/lib/gitlab/ci/parsers/sbom/cyclonedx_spec.rb
@@ -33,35 +33,27 @@ RSpec.describe Gitlab::Ci::Parsers::Sbom::Cyclonedx, feature_category: :dependen
allow(SecureRandom).to receive(:uuid).and_return(uuid)
end
- context 'when report JSON is invalid' do
- let(:raw_report_data) { '{ ' }
+ context 'when report is invalid' do
+ context 'when report JSON is invalid' do
+ let(:raw_report_data) { '{ ' }
- it 'handles errors and adds them to the report' do
- expect(report).to receive(:add_error).with(a_string_including("Report JSON is invalid:"))
+ it 'handles errors and adds them to the report' do
+ expect(report).to receive(:add_error).with(a_string_including("Report JSON is invalid:"))
- expect { parse! }.not_to raise_error
+ expect { parse! }.not_to raise_error
+ end
end
- end
-
- context 'when report uses an unsupported spec version' do
- let(:report_data) { base_report_data.merge({ 'specVersion' => '1.3' }) }
-
- it 'reports unsupported version as an error' do
- expect(report).to receive(:add_error).with("Unsupported CycloneDX spec version. Must be one of: 1.4")
- parse!
- end
- end
+ context 'when report does not conform to the CycloneDX schema' do
+ let(:report_valid?) { false }
+ let(:validator_errors) { %w[error1 error2] }
- context 'when report does not conform to the CycloneDX schema' do
- let(:report_valid?) { false }
- let(:validator_errors) { %w[error1 error2] }
+ it 'reports all errors returned by the validator' do
+ expect(report).to receive(:add_error).with("error1")
+ expect(report).to receive(:add_error).with("error2")
- it 'reports all errors returned by the validator' do
- expect(report).to receive(:add_error).with("error1")
- expect(report).to receive(:add_error).with("error2")
-
- parse!
+ parse!
+ end
end
end
@@ -109,25 +101,26 @@ RSpec.describe Gitlab::Ci::Parsers::Sbom::Cyclonedx, feature_category: :dependen
it 'adds each component, ignoring unused attributes' do
expect(report).to receive(:add_component)
- .with(
- an_object_having_attributes(
- name: "activesupport",
- version: "5.1.4",
- component_type: "library",
- purl: an_object_having_attributes(type: "gem")
- )
- )
+ .with(
+ an_object_having_attributes(
+ name: "activesupport",
+ version: "5.1.4",
+ component_type: "library",
+ purl: an_object_having_attributes(type: "gem")
+ )
+ )
expect(report).to receive(:add_component)
- .with(
- an_object_having_attributes(
- name: "byebug",
- version: "10.0.0",
- component_type: "library",
- purl: an_object_having_attributes(type: "gem")
- )
- )
+ .with(
+ an_object_having_attributes(
+ name: "byebug",
+ version: "10.0.0",
+ component_type: "library",
+ purl: an_object_having_attributes(type: "gem")
+ )
+ )
expect(report).to receive(:add_component)
- .with(an_object_having_attributes(name: "minimal-component", version: nil, component_type: "library"))
+ .with(an_object_having_attributes(name: "minimal-component", version: nil,
+ component_type: "library"))
parse!
end
diff --git a/spec/lib/gitlab/ci/parsers/sbom/validators/cyclonedx_schema_validator_spec.rb b/spec/lib/gitlab/ci/parsers/sbom/validators/cyclonedx_schema_validator_spec.rb
index acb7c122bcd..9422290761d 100644
--- a/spec/lib/gitlab/ci/parsers/sbom/validators/cyclonedx_schema_validator_spec.rb
+++ b/spec/lib/gitlab/ci/parsers/sbom/validators/cyclonedx_schema_validator_spec.rb
@@ -4,160 +4,116 @@ require "spec_helper"
RSpec.describe Gitlab::Ci::Parsers::Sbom::Validators::CyclonedxSchemaValidator,
feature_category: :dependency_management do
- # Reports should be valid or invalid according to the specification at
- # https://cyclonedx.org/docs/1.4/json/
-
- subject(:validator) { described_class.new(report_data) }
-
- let_it_be(:required_attributes) do
+ let(:report_data) do
{
"bomFormat" => "CycloneDX",
- "specVersion" => "1.4",
+ "specVersion" => spec_version,
"version" => 1
}
end
- context "with minimally valid report" do
- let_it_be(:report_data) { required_attributes }
-
- it { is_expected.to be_valid }
- end
-
- context "when report has components" do
- let(:report_data) { required_attributes.merge({ "components" => components }) }
-
- context "with minimally valid components" do
- let(:components) do
- [
- {
- "type" => "library",
- "name" => "activesupport"
- },
- {
- "type" => "library",
- "name" => "byebug"
- }
- ]
- end
+ subject(:validator) { described_class.new(report_data) }
- it { is_expected.to be_valid }
+ shared_examples 'a validator that performs the expected validations' do
+ let(:required_attributes) do
+ {
+ "bomFormat" => "CycloneDX",
+ "specVersion" => spec_version,
+ "version" => 1
+ }
end
- context "when components have versions" do
- let(:components) do
- [
- {
- "type" => "library",
- "name" => "activesupport",
- "version" => "5.1.4"
- },
- {
- "type" => "library",
- "name" => "byebug",
- "version" => "10.0.0"
- }
- ]
- end
+ context "with minimally valid report" do
+ let(:report_data) { required_attributes }
it { is_expected.to be_valid }
end
- context 'when components have licenses' do
- let(:components) do
- [
- {
- "type" => "library",
- "name" => "activesupport",
- "version" => "5.1.4",
- "licenses" => [
- { "license" => { "id" => "MIT" } }
- ]
- }
- ]
- end
+ context "when report has components" do
+ let(:report_data) { required_attributes.merge({ "components" => components }) }
- it { is_expected.to be_valid }
- end
-
- context 'when components have a signature' do
- let(:components) do
- [
- {
- "type" => "library",
- "name" => "activesupport",
- "version" => "5.1.4",
- "signature" => {
- "algorithm" => "ES256",
- "publicKey" => {
- "kty" => "EC",
- "crv" => "P-256",
- "x" => "6BKxpty8cI-exDzCkh-goU6dXq3MbcY0cd1LaAxiNrU",
- "y" => "mCbcvUzm44j3Lt2b5BPyQloQ91tf2D2V-gzeUxWaUdg"
- },
- "value" => "ybT1qz5zHNi4Ndc6y7Zhamuf51IqXkPkZwjH1XcC-KSuBiaQplTw6Jasf2MbCLg3CF7PAdnMO__WSLwvI5r2jA"
+ context "with minimally valid components" do
+ let(:components) do
+ [
+ {
+ "type" => "library",
+ "name" => "activesupport"
+ },
+ {
+ "type" => "library",
+ "name" => "byebug"
}
- }
- ]
- end
-
- it { is_expected.to be_valid }
- end
+ ]
+ end
- context "when components are not valid" do
- let(:components) do
- [
- { "type" => "foo" },
- { "name" => "activesupport" }
- ]
+ it { is_expected.to be_valid }
end
- it { is_expected.not_to be_valid }
-
- it "outputs errors for each validation failure" do
- expect(validator.errors).to match_array(
+ context "when components have versions" do
+ let(:components) do
[
- "property '/components/0' is missing required keys: name",
- "property '/components/0/type' is not one of: [\"application\", \"framework\"," \
- " \"library\", \"container\", \"operating-system\", \"device\", \"firmware\", \"file\"]",
- "property '/components/1' is missing required keys: type"
- ])
- end
- end
- end
-
- context "when report has metadata" do
- let(:metadata) do
- {
- "timestamp" => "2022-02-23T08:02:39Z",
- "tools" => [{ "vendor" => "GitLab", "name" => "Gemnasium", "version" => "2.34.0" }],
- "authors" => [{ "name" => "GitLab", "email" => "support@gitlab.com" }]
- }
- end
+ {
+ "type" => "library",
+ "name" => "activesupport",
+ "version" => "5.1.4"
+ },
+ {
+ "type" => "library",
+ "name" => "byebug",
+ "version" => "10.0.0"
+ }
+ ]
+ end
- let(:report_data) { required_attributes.merge({ "metadata" => metadata }) }
+ it { is_expected.to be_valid }
+ end
- it { is_expected.to be_valid }
+ context 'when components have licenses' do
+ let(:components) do
+ [
+ {
+ "type" => "library",
+ "name" => "activesupport",
+ "version" => "5.1.4",
+ "licenses" => [
+ { "license" => { "id" => "MIT" } }
+ ]
+ }
+ ]
+ end
- context "when metadata has properties" do
- before do
- metadata.merge!({ "properties" => properties })
+ it { is_expected.to be_valid }
end
- context "when properties are valid" do
- let(:properties) do
+ context 'when components have a signature' do
+ let(:components) do
[
- { "name" => "gitlab:dependency_scanning:input_file", "value" => "Gemfile.lock" },
- { "name" => "gitlab:dependency_scanning:package_manager", "value" => "bundler" }
+ {
+ "type" => "library",
+ "name" => "activesupport",
+ "version" => "5.1.4",
+ "signature" => {
+ "algorithm" => "ES256",
+ "publicKey" => {
+ "kty" => "EC",
+ "crv" => "P-256",
+ "x" => "6BKxpty8cI-exDzCkh-goU6dXq3MbcY0cd1LaAxiNrU",
+ "y" => "mCbcvUzm44j3Lt2b5BPyQloQ91tf2D2V-gzeUxWaUdg"
+ },
+ "value" => "ybT1qz5zHNi4Ndc6y7Zhamuf51IqXkPkZwjH1XcC-KSuBiaQplTw6Jasf2MbCLg3CF7PAdnMO__WSLwvI5r2jA"
+ }
+ }
]
end
it { is_expected.to be_valid }
end
- context "when properties are invalid" do
- let(:properties) do
+ context "when components are not valid" do
+ let(:components) do
[
- { "name" => ["gitlab:meta:schema_version"], "value" => 1 }
+ { "type" => "foo" },
+ { "name" => "activesupport" }
]
end
@@ -166,11 +122,75 @@ RSpec.describe Gitlab::Ci::Parsers::Sbom::Validators::CyclonedxSchemaValidator,
it "outputs errors for each validation failure" do
expect(validator.errors).to match_array(
[
- "property '/metadata/properties/0/name' is not of type: string",
- "property '/metadata/properties/0/value' is not of type: string"
+ "property '/components/0' is missing required keys: name",
+ a_string_starting_with("property '/components/0/type' is not one of:"),
+ "property '/components/1' is missing required keys: type"
])
end
end
end
+
+ context "when report has metadata" do
+ let(:metadata) do
+ {
+ "timestamp" => "2022-02-23T08:02:39Z",
+ "tools" => [{ "vendor" => "GitLab", "name" => "Gemnasium", "version" => "2.34.0" }],
+ "authors" => [{ "name" => "GitLab", "email" => "support@gitlab.com" }]
+ }
+ end
+
+ let(:report_data) { required_attributes.merge({ "metadata" => metadata }) }
+
+ it { is_expected.to be_valid }
+
+ context "when metadata has properties" do
+ before do
+ metadata.merge!({ "properties" => properties })
+ end
+
+ context "when properties are valid" do
+ let(:properties) do
+ [
+ { "name" => "gitlab:dependency_scanning:input_file", "value" => "Gemfile.lock" },
+ { "name" => "gitlab:dependency_scanning:package_manager", "value" => "bundler" }
+ ]
+ end
+
+ it { is_expected.to be_valid }
+ end
+
+ context "when properties are invalid" do
+ let(:properties) do
+ [
+ { "name" => ["gitlab:meta:schema_version"], "value" => 1 }
+ ]
+ end
+
+ it { is_expected.not_to be_valid }
+
+ it "outputs errors for each validation failure" do
+ expect(validator.errors).to match_array(
+ [
+ "property '/metadata/properties/0/name' is not of type: string",
+ "property '/metadata/properties/0/value' is not of type: string"
+ ])
+ end
+ end
+ end
+ end
+ end
+
+ context 'when spec version is supported' do
+ where(:spec_version) { %w[1.4 1.5] }
+
+ with_them do
+ it_behaves_like 'a validator that performs the expected validations'
+ end
+ end
+
+ context 'when spec version is not supported' do
+ let(:spec_version) { '1.3' }
+
+ it { is_expected.not_to be_valid }
end
end
diff --git a/spec/lib/gitlab/ci/pipeline/chain/cancel_pending_pipelines_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/cancel_pending_pipelines_spec.rb
index 31bffcbeb2a..00f834fcf80 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/cancel_pending_pipelines_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/cancel_pending_pipelines_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Ci::Pipeline::Chain::CancelPendingPipelines, feature_category: :continuous_integration do
let_it_be(:project) { create(:project) }
let_it_be(:user) { create(:user) }
- let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
+ let_it_be_with_reload(:pipeline) { create(:ci_pipeline, project: project) }
let_it_be(:command) { Gitlab::Ci::Pipeline::Chain::Command.new(project: project, current_user: user) }
let_it_be(:step) { described_class.new(pipeline, command) }
@@ -17,5 +17,18 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::CancelPendingPipelines, feature_cate
subject
end
+
+ context 'with scheduled pipelines' do
+ before do
+ pipeline.source = :schedule
+ end
+
+ it 'enqueues LowUrgencyCancelRedundantPipelinesWorker' do
+ expect(Ci::LowUrgencyCancelRedundantPipelinesWorker)
+ .to receive(:perform_async).with(pipeline.id)
+
+ subject
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/ci/pipeline/chain/evaluate_workflow_rules_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/evaluate_workflow_rules_spec.rb
index eb5a37f19f4..44ccb1eeae1 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/evaluate_workflow_rules_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/evaluate_workflow_rules_spec.rb
@@ -12,10 +12,13 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::EvaluateWorkflowRules do
end
let(:step) { described_class.new(pipeline, command) }
+ let(:ff_always_set_pipeline_failure_reason) { true }
describe '#perform!' do
context 'when pipeline has been skipped by workflow configuration' do
before do
+ stub_feature_flags(always_set_pipeline_failure_reason: ff_always_set_pipeline_failure_reason)
+
allow(step).to receive(:workflow_rules_result)
.and_return(
double(pass?: false, variables: {})
@@ -39,6 +42,20 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::EvaluateWorkflowRules do
it 'saves workflow_rules_result' do
expect(command.workflow_rules_result.variables).to eq({})
end
+
+ it 'sets the failure reason', :aggregate_failures do
+ expect(pipeline).to be_failed
+ expect(pipeline).to be_filtered_by_workflow_rules
+ end
+
+ context 'when always_set_pipeline_failure_reason is disabled' do
+ let(:ff_always_set_pipeline_failure_reason) { false }
+
+ it 'does not set the failure reason', :aggregate_failures do
+ expect(pipeline).not_to be_failed
+ expect(pipeline.failure_reason).to be_blank
+ end
+ end
end
context 'when pipeline has not been skipped by workflow configuration' do
@@ -67,6 +84,10 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::EvaluateWorkflowRules do
it 'saves workflow_rules_result' do
expect(command.workflow_rules_result.variables).to eq({ 'VAR1' => 'val2', 'VAR2' => 3 })
end
+
+ it 'does not set a failure reason' do
+ expect(pipeline).not_to be_filtered_by_workflow_rules
+ end
end
end
end
diff --git a/spec/lib/gitlab/ci/pipeline/chain/helpers_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/helpers_spec.rb
index 96ada90b4e1..84c2fb6525e 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/helpers_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/helpers_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Ci::Pipeline::Chain::Helpers do
+RSpec.describe Gitlab::Ci::Pipeline::Chain::Helpers, feature_category: :continuous_integration do
let(:helper_class) do
Class.new do
include Gitlab::Ci::Pipeline::Chain::Helpers
@@ -38,14 +38,35 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Helpers do
describe '.error' do
shared_examples 'error function' do
specify do
- expect(pipeline).to receive(:drop!).with(drop_reason).and_call_original
expect(pipeline).to receive(:add_error_message).with(message).and_call_original
- expect(pipeline).to receive(:ensure_project_iid!).twice.and_call_original
+
+ if command.save_incompleted
+ expect(pipeline).to receive(:ensure_project_iid!).twice.and_call_original
+ expect(pipeline).to receive(:drop!).with(drop_reason).and_call_original
+ end
subject.error(message, config_error: config_error, drop_reason: drop_reason)
expect(pipeline.yaml_errors).to eq(yaml_error)
expect(pipeline.errors[:base]).to include(message)
+ expect(pipeline.status).to eq 'failed'
+ expect(pipeline.failure_reason).to eq drop_reason.to_s
+ end
+
+ context 'when feature flag always_set_pipeline_failure_reason is false' do
+ before do
+ stub_feature_flags(always_set_pipeline_failure_reason: false)
+ end
+
+ specify do
+ subject.error(message, config_error: config_error, drop_reason: drop_reason)
+
+ if command.save_incompleted
+ expect(pipeline.failure_reason).to eq drop_reason.to_s
+ else
+ expect(pipeline.failure_reason).not_to be_present
+ end
+ end
end
end
@@ -79,6 +100,43 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Helpers do
let(:yaml_error) { nil }
it_behaves_like "error function"
+
+ specify do
+ subject.error(message, config_error: config_error, drop_reason: drop_reason)
+
+ expect(pipeline).to be_persisted
+ end
+
+ context ' when the drop reason is not persistable' do
+ let(:drop_reason) { :filtered_by_rules }
+ let(:command) { double(project: nil) }
+
+ specify do
+ expect(command).to receive(:increment_pipeline_failure_reason_counter)
+
+ subject.error(message, config_error: config_error, drop_reason: drop_reason)
+
+ expect(pipeline).to be_failed
+ expect(pipeline.failure_reason).to eq drop_reason.to_s
+ expect(pipeline).not_to be_persisted
+ end
+ end
+
+ context 'when save_incompleted is false' do
+ let(:command) { double(save_incompleted: false, project: nil) }
+
+ before do
+ allow(command).to receive(:increment_pipeline_failure_reason_counter)
+ end
+
+ it_behaves_like "error function"
+
+ specify do
+ subject.error(message, config_error: config_error, drop_reason: drop_reason)
+
+ expect(pipeline).not_to be_persisted
+ end
+ end
end
end
end
diff --git a/spec/lib/gitlab/ci/pipeline/chain/populate_metadata_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/populate_metadata_spec.rb
index 00200b57b1e..732748d8c8b 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/populate_metadata_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/populate_metadata_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Ci::Pipeline::Chain::PopulateMetadata do
+RSpec.describe Gitlab::Ci::Pipeline::Chain::PopulateMetadata, feature_category: :pipeline_composition do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { create(:user) }
@@ -43,16 +43,28 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::PopulateMetadata do
stub_ci_pipeline_yaml_file(YAML.dump(config))
end
- context 'with pipeline name' do
- let(:config) do
- { workflow: { name: ' Pipeline name ' }, rspec: { script: 'rspec' } }
- end
-
+ shared_examples 'not breaking the chain' do
it 'does not break the chain' do
run_chain
expect(step.break?).to be false
end
+ end
+
+ shared_examples 'not saving pipeline metadata' do
+ it 'does not save pipeline metadata' do
+ run_chain
+
+ expect(pipeline.pipeline_metadata).to be_nil
+ end
+ end
+
+ context 'with pipeline name' do
+ let(:config) do
+ { workflow: { name: ' Pipeline name ' }, rspec: { script: 'rspec' } }
+ end
+
+ it_behaves_like 'not breaking the chain'
it 'builds pipeline_metadata' do
run_chain
@@ -67,22 +79,14 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::PopulateMetadata do
{ workflow: { name: ' ' }, rspec: { script: 'rspec' } }
end
- it 'strips whitespace from name' do
- run_chain
-
- expect(pipeline.pipeline_metadata).to be_nil
- end
+ it_behaves_like 'not saving pipeline metadata'
context 'with empty name after variable substitution' do
let(:config) do
{ workflow: { name: '$VAR1' }, rspec: { script: 'rspec' } }
end
- it 'does not save empty name' do
- run_chain
-
- expect(pipeline.pipeline_metadata).to be_nil
- end
+ it_behaves_like 'not saving pipeline metadata'
end
end
@@ -127,4 +131,140 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::PopulateMetadata do
end
end
end
+
+ context 'with auto_cancel' do
+ let(:on_new_commit) { 'interruptible' }
+ let(:on_job_failure) { 'all' }
+ let(:auto_cancel) { { on_new_commit: on_new_commit, on_job_failure: on_job_failure } }
+ let(:config) { { workflow: { auto_cancel: auto_cancel }, rspec: { script: 'rspec' } } }
+
+ it_behaves_like 'not breaking the chain'
+
+ it 'builds pipeline_metadata' do
+ run_chain
+
+ expect(pipeline.pipeline_metadata.auto_cancel_on_new_commit).to eq('interruptible')
+ expect(pipeline.pipeline_metadata.auto_cancel_on_job_failure).to eq('all')
+ expect(pipeline.pipeline_metadata).not_to be_persisted
+ end
+
+ context 'with no auto_cancel' do
+ let(:config) do
+ { rspec: { script: 'rspec' } }
+ end
+
+ it_behaves_like 'not saving pipeline metadata'
+ end
+
+ context 'with auto_cancel: nil' do
+ let(:auto_cancel) { nil }
+
+ it_behaves_like 'not saving pipeline metadata'
+ end
+
+ context 'with auto_cancel_on_new_commit and no auto_cancel_on_job_failure' do
+ let(:auto_cancel) { { on_new_commit: on_new_commit } }
+
+ it 'builds pipeline_metadata' do
+ run_chain
+
+ expect(pipeline.pipeline_metadata.auto_cancel_on_new_commit).to eq('interruptible')
+ expect(pipeline.pipeline_metadata.auto_cancel_on_job_failure).to eq('none')
+ expect(pipeline.pipeline_metadata).not_to be_persisted
+ end
+ end
+
+ context 'with auto_cancel_on_job_failure and no auto_cancel_on_new_commit' do
+ let(:auto_cancel) { { on_job_failure: on_job_failure } }
+
+ it 'builds pipeline_metadata' do
+ run_chain
+
+ expect(pipeline.pipeline_metadata.auto_cancel_on_new_commit).to eq('conservative')
+ expect(pipeline.pipeline_metadata.auto_cancel_on_job_failure).to eq('all')
+ expect(pipeline.pipeline_metadata).not_to be_persisted
+ end
+ end
+
+ context 'with auto_cancel_on_new_commit: nil and auto_cancel_on_job_failure: nil' do
+ let(:on_new_commit) { nil }
+ let(:on_job_failure) { nil }
+
+ it_behaves_like 'not saving pipeline metadata'
+ end
+
+ context 'with auto_cancel_on_new_commit valid and auto_cancel_on_job_failure: nil' do
+ let(:on_job_failure) { nil }
+
+ it 'builds pipeline_metadata' do
+ run_chain
+
+ expect(pipeline.pipeline_metadata.auto_cancel_on_new_commit).to eq('interruptible')
+ expect(pipeline.pipeline_metadata.auto_cancel_on_job_failure).to eq('none')
+ expect(pipeline.pipeline_metadata).not_to be_persisted
+ end
+ end
+
+ context 'with auto_cancel_on_new_commit: nil and auto_cancel_on_job_failure valid' do
+ let(:on_new_commit) { nil }
+
+ it 'builds pipeline_metadata' do
+ run_chain
+
+ expect(pipeline.pipeline_metadata.auto_cancel_on_new_commit).to eq('conservative')
+ expect(pipeline.pipeline_metadata.auto_cancel_on_job_failure).to eq('all')
+ expect(pipeline.pipeline_metadata).not_to be_persisted
+ end
+ end
+
+ context 'when auto_cancel_on_job_failure: none' do
+ let(:on_job_failure) { 'none' }
+
+ it 'builds pipeline_metadata' do
+ run_chain
+
+ expect(pipeline.pipeline_metadata.auto_cancel_on_job_failure).to eq('none')
+ expect(pipeline.pipeline_metadata).not_to be_persisted
+ end
+ end
+
+ context 'when auto_cancel_pipeline_on_job_failure feature is disabled' do
+ before do
+ stub_feature_flags(auto_cancel_pipeline_on_job_failure: false)
+ end
+
+ it 'ignores the auto_cancel_on_job_failure value' do
+ run_chain
+
+ expect(pipeline.pipeline_metadata.auto_cancel_on_job_failure).to eq('none')
+ expect(pipeline.pipeline_metadata).not_to be_persisted
+ end
+ end
+ end
+
+ context 'with both pipeline name and auto_cancel' do
+ let(:config) do
+ {
+ workflow: {
+ name: 'Pipeline name',
+ auto_cancel: {
+ on_new_commit: 'interruptible',
+ on_job_failure: 'none'
+ }
+ },
+ rspec: { script: 'rspec' }
+ }
+ end
+
+ it_behaves_like 'not breaking the chain'
+
+ it 'builds pipeline_metadata' do
+ run_chain
+
+ expect(pipeline.pipeline_metadata.name).to eq('Pipeline name')
+ expect(pipeline.pipeline_metadata.auto_cancel_on_new_commit).to eq('interruptible')
+ expect(pipeline.pipeline_metadata.auto_cancel_on_job_failure).to eq('none')
+ expect(pipeline.pipeline_metadata).not_to be_persisted
+ end
+ end
end
diff --git a/spec/lib/gitlab/ci/pipeline/chain/populate_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/populate_spec.rb
index 91bb94bbb11..476b1be35a9 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/populate_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/populate_spec.rb
@@ -34,12 +34,15 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Populate, feature_category: :continu
{ rspec: { script: 'rspec' } }
end
+ let(:ff_always_set_pipeline_failure_reason) { true }
+
def run_chain
dependencies.map(&:perform!)
step.perform!
end
before do
+ stub_feature_flags(always_set_pipeline_failure_reason: ff_always_set_pipeline_failure_reason)
stub_ci_pipeline_yaml_file(YAML.dump(config))
end
@@ -100,7 +103,27 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Populate, feature_category: :continu
it 'increments the error metric' do
counter = Gitlab::Metrics.counter(:gitlab_ci_pipeline_failure_reasons, 'desc')
- expect { run_chain }.to change { counter.get(reason: 'unknown_failure') }.by(1)
+ expect { run_chain }.to change { counter.get(reason: 'filtered_by_rules') }.by(1)
+ end
+
+ it 'sets the failure reason without persisting the pipeline', :aggregate_failures do
+ run_chain
+
+ expect(pipeline).not_to be_persisted
+ expect(pipeline).to be_failed
+ expect(pipeline).to be_filtered_by_rules
+ end
+
+ context 'when ff always_set_pipeline_failure_reason is disabled' do
+ let(:ff_always_set_pipeline_failure_reason) { false }
+
+ it 'sets the failure reason without persisting the pipeline', :aggregate_failures do
+ run_chain
+
+ expect(pipeline).not_to be_persisted
+ expect(pipeline).not_to be_failed
+ expect(pipeline).not_to be_filtered_by_rules
+ end
end
end
diff --git a/spec/lib/gitlab/ci/pipeline/chain/validate/external_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/validate/external_spec.rb
index 52a00e0d501..4017076d29f 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/validate/external_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/validate/external_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Ci::Pipeline::Chain::Validate::External do
+RSpec.describe Gitlab::Ci::Pipeline::Chain::Validate::External, feature_category: :continuous_integration do
let_it_be(:project) { create(:project) }
let_it_be(:user) { create(:user, :with_sign_ins) }
@@ -328,11 +328,12 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Validate::External do
context 'when save_incompleted is false' do
let(:save_incompleted) { false }
- it 'adds errors to the pipeline without dropping it' do
+ it 'adds errors to the pipeline without persisting it', :aggregate_failures do
perform!
- expect(pipeline.status).to eq('pending')
expect(pipeline).not_to be_persisted
+ expect(pipeline.status).to eq('failed')
+ expect(pipeline).to be_external_validation_failure
expect(pipeline.errors.to_a).to include('External validation failed')
end
diff --git a/spec/lib/gitlab/ci/reports/sbom/source_spec.rb b/spec/lib/gitlab/ci/reports/sbom/source_spec.rb
index c1eaea511b7..09a601833ad 100644
--- a/spec/lib/gitlab/ci/reports/sbom/source_spec.rb
+++ b/spec/lib/gitlab/ci/reports/sbom/source_spec.rb
@@ -5,47 +5,93 @@ require 'fast_spec_helper'
RSpec.describe Gitlab::Ci::Reports::Sbom::Source, feature_category: :dependency_management do
let(:attributes) do
{
- type: :dependency_scanning,
- data: {
- 'category' => 'development',
- 'input_file' => { 'path' => 'package-lock.json' },
- 'source_file' => { 'path' => 'package.json' },
- 'package_manager' => { 'name' => 'npm' },
- 'language' => { 'name' => 'JavaScript' }
- }
+ type: type,
+ data: { 'category' => 'development',
+ 'package_manager' => { 'name' => 'npm' },
+ 'language' => { 'name' => 'JavaScript' } }.merge(extra_attributes)
}
end
- subject { described_class.new(**attributes) }
+ subject(:source) { described_class.new(**attributes) }
- it 'has correct attributes' do
- expect(subject).to have_attributes(
- source_type: attributes[:type],
- data: attributes[:data]
- )
- end
+ shared_examples_for 'it has correct common attributes' do
+ it 'has correct type and data' do
+ expect(subject).to have_attributes(
+ source_type: type,
+ data: attributes[:data]
+ )
+ end
- describe '#source_file_path' do
- it 'returns the correct source_file_path' do
- expect(subject.source_file_path).to eq('package.json')
+ describe '#packager' do
+ it 'returns the correct package manager name' do
+ expect(subject.packager).to eq("npm")
+ end
end
- end
- describe '#input_file_path' do
- it 'returns the correct input_file_path' do
- expect(subject.input_file_path).to eq("package-lock.json")
+ describe '#language' do
+ it 'returns the correct language' do
+ expect(subject.language).to eq("JavaScript")
+ end
end
end
- describe '#packager' do
- it 'returns the correct package manager name' do
- expect(subject.packager).to eq("npm")
+ context 'when dependency scanning' do
+ let(:type) { :dependency_scanning }
+ let(:extra_attributes) do
+ {
+ 'input_file' => { 'path' => 'package-lock.json' },
+ 'source_file' => { 'path' => 'package.json' }
+ }
+ end
+
+ it_behaves_like 'it has correct common attributes'
+
+ describe '#source_file_path' do
+ it 'returns the correct source_file_path' do
+ expect(subject.source_file_path).to eq('package.json')
+ end
+ end
+
+ describe '#input_file_path' do
+ it 'returns the correct input_file_path' do
+ expect(subject.input_file_path).to eq("package-lock.json")
+ end
end
end
- describe '#language' do
- it 'returns the correct langauge' do
- expect(subject.language).to eq("JavaScript")
+ context 'when container scanning' do
+ let(:type) { :container_scanning }
+ let(:extra_attributes) do
+ {
+ "image" => { "name" => "rhel", "tag" => "7.1" },
+ "operating_system" => { "name" => "Red Hat Enterprise Linux", "version" => "7" }
+ }
+ end
+
+ it_behaves_like 'it has correct common attributes'
+
+ describe "#image_name" do
+ subject { source.image_name }
+
+ it { is_expected.to eq("rhel") }
+ end
+
+ describe "#image_tag" do
+ subject { source.image_tag }
+
+ it { is_expected.to eq("7.1") }
+ end
+
+ describe "#operating_system_name" do
+ subject { source.operating_system_name }
+
+ it { is_expected.to eq("Red Hat Enterprise Linux") }
+ end
+
+ describe "#operating_system_version" do
+ subject { source.operating_system_version }
+
+ it { is_expected.to eq("7") }
end
end
end
diff --git a/spec/lib/gitlab/ci/runner_instructions_spec.rb b/spec/lib/gitlab/ci/runner_instructions_spec.rb
index 31c53d4a030..6da649393f3 100644
--- a/spec/lib/gitlab/ci/runner_instructions_spec.rb
+++ b/spec/lib/gitlab/ci/runner_instructions_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Ci::RunnerInstructions, feature_category: :runner_fleet do
+RSpec.describe Gitlab::Ci::RunnerInstructions, feature_category: :fleet_visibility do
using RSpec::Parameterized::TableSyntax
let(:params) { {} }
diff --git a/spec/lib/gitlab/ci/runner_releases_spec.rb b/spec/lib/gitlab/ci/runner_releases_spec.rb
index 9e211327dee..126a5b85471 100644
--- a/spec/lib/gitlab/ci/runner_releases_spec.rb
+++ b/spec/lib/gitlab/ci/runner_releases_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Ci::RunnerReleases, feature_category: :runner_fleet do
+RSpec.describe Gitlab::Ci::RunnerReleases, feature_category: :fleet_visibility do
subject { described_class.instance }
let(:runner_releases_url) { 'http://testurl.com/runner_public_releases' }
diff --git a/spec/lib/gitlab/ci/runner_upgrade_check_spec.rb b/spec/lib/gitlab/ci/runner_upgrade_check_spec.rb
index 526d6cba657..778c0aa69de 100644
--- a/spec/lib/gitlab/ci/runner_upgrade_check_spec.rb
+++ b/spec/lib/gitlab/ci/runner_upgrade_check_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Ci::RunnerUpgradeCheck, feature_category: :runner_fleet do
+RSpec.describe Gitlab::Ci::RunnerUpgradeCheck, feature_category: :fleet_visibility do
using RSpec::Parameterized::TableSyntax
subject(:instance) { described_class.new(gitlab_version, runner_releases) }
diff --git a/spec/lib/gitlab/ci/templates/Diffblue_Cover_spec.rb b/spec/lib/gitlab/ci/templates/Diffblue_Cover_spec.rb
new file mode 100644
index 00000000000..c16356bfda7
--- /dev/null
+++ b/spec/lib/gitlab/ci/templates/Diffblue_Cover_spec.rb
@@ -0,0 +1,26 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Diffblue-Cover.gitlab-ci.yml', feature_category: :continuous_integration do
+ subject(:template) { Gitlab::Template::GitlabCiYmlTemplate.find('Diffblue-Cover') }
+
+ describe 'the created pipeline' do
+ let(:pipeline_branch) { 'patch-1' }
+ let_it_be(:project) { create(:project, :repository, create_branch: 'patch-1') }
+ let(:user) { project.first_owner }
+
+ let(:mr_service) { MergeRequests::CreatePipelineService.new(project: project, current_user: user) }
+ let(:merge_request) { create(:merge_request, :simple, source_project: project, source_branch: pipeline_branch) }
+ let(:mr_pipeline) { mr_service.execute(merge_request).payload }
+ let(:mr_build_names) { mr_pipeline.builds.pluck(:name) }
+
+ before do
+ stub_ci_pipeline_yaml_file(template.content)
+ end
+
+ it 'creates diffblue-cover jobs' do
+ expect(mr_build_names).to include('diffblue-cover')
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/templates/templates_spec.rb b/spec/lib/gitlab/ci/templates/templates_spec.rb
index 36c6e805bdf..98f0d32960b 100644
--- a/spec/lib/gitlab/ci/templates/templates_spec.rb
+++ b/spec/lib/gitlab/ci/templates/templates_spec.rb
@@ -20,6 +20,7 @@ RSpec.describe 'CI YML Templates' do
context 'that support autodevops' do
exceptions = [
+ 'Diffblue-Cover.gitlab-ci.yml', # no auto-devops
'Security/DAST.gitlab-ci.yml', # DAST stage is defined inside AutoDevops yml
'Security/DAST-API.gitlab-ci.yml', # no auto-devops
'Security/API-Fuzzing.gitlab-ci.yml', # no auto-devops
diff --git a/spec/lib/gitlab/ci/variables/builder/pipeline_spec.rb b/spec/lib/gitlab/ci/variables/builder/pipeline_spec.rb
index 860a1fd30bd..f8d67a6f0b4 100644
--- a/spec/lib/gitlab/ci/variables/builder/pipeline_spec.rb
+++ b/spec/lib/gitlab/ci/variables/builder/pipeline_spec.rb
@@ -66,6 +66,7 @@ RSpec.describe Gitlab::Ci::Variables::Builder::Pipeline, feature_category: :secr
let_it_be(:assignees) { create_list(:user, 2) }
let_it_be(:milestone) { create(:milestone, project: project) }
let_it_be(:labels) { create_list(:label, 2) }
+ let(:merge_request_description) { nil }
let(:merge_request) do
create(:merge_request, :simple,
@@ -73,6 +74,7 @@ RSpec.describe Gitlab::Ci::Variables::Builder::Pipeline, feature_category: :secr
target_project: project,
assignees: assignees,
milestone: milestone,
+ description: merge_request_description,
labels: labels)
end
@@ -113,6 +115,8 @@ RSpec.describe Gitlab::Ci::Variables::Builder::Pipeline, feature_category: :secr
merge_request.source_branch
).to_s,
'CI_MERGE_REQUEST_TITLE' => merge_request.title,
+ 'CI_MERGE_REQUEST_DESCRIPTION' => merge_request.description,
+ 'CI_MERGE_REQUEST_DESCRIPTION_IS_TRUNCATED' => 'false',
'CI_MERGE_REQUEST_ASSIGNEES' => merge_request.assignee_username_list,
'CI_MERGE_REQUEST_MILESTONE' => milestone.title,
'CI_MERGE_REQUEST_LABELS' => labels.map(&:title).sort.join(','),
@@ -121,6 +125,78 @@ RSpec.describe Gitlab::Ci::Variables::Builder::Pipeline, feature_category: :secr
'CI_MERGE_REQUEST_SQUASH_ON_MERGE' => merge_request.squash_on_merge?.to_s
end
+ context 'when merge request description hits the limit' do
+ let(:merge_request_description) { 'a' * (MergeRequest::CI_MERGE_REQUEST_DESCRIPTION_MAX_LENGTH + 1) }
+
+ it 'truncates the exposed description' do
+ truncated_description = merge_request.description.truncate(
+ MergeRequest::CI_MERGE_REQUEST_DESCRIPTION_MAX_LENGTH
+ )
+ expect(subject.to_hash)
+ .to include(
+ 'CI_MERGE_REQUEST_DESCRIPTION' => truncated_description,
+ 'CI_MERGE_REQUEST_DESCRIPTION_IS_TRUNCATED' => 'true'
+ )
+ end
+ end
+
+ context 'when merge request description fits the length limit' do
+ let(:merge_request_description) { 'a' * (MergeRequest::CI_MERGE_REQUEST_DESCRIPTION_MAX_LENGTH - 1) }
+
+ it 'does not truncate the exposed description' do
+ expect(subject.to_hash)
+ .to include(
+ 'CI_MERGE_REQUEST_DESCRIPTION' => merge_request.description,
+ 'CI_MERGE_REQUEST_DESCRIPTION_IS_TRUNCATED' => 'false'
+ )
+ end
+ end
+
+ context 'when truncate_ci_merge_request_description feature flag is disabled' do
+ before do
+ stub_feature_flags(truncate_ci_merge_request_description: false)
+ end
+
+ context 'when merge request description hits the limit' do
+ let(:merge_request_description) { 'a' * (MergeRequest::CI_MERGE_REQUEST_DESCRIPTION_MAX_LENGTH + 1) }
+
+ it 'does not truncate the exposed description' do
+ expect(subject.to_hash)
+ .to include(
+ 'CI_MERGE_REQUEST_DESCRIPTION' => merge_request.description
+ )
+ expect(subject.to_hash)
+ .not_to have_key('CI_MERGE_REQUEST_DESCRIPTION_IS_TRUNCATED')
+ end
+ end
+
+ context 'when merge request description fits the length limit' do
+ let(:merge_request_description) { 'a' * (MergeRequest::CI_MERGE_REQUEST_DESCRIPTION_MAX_LENGTH - 1) }
+
+ it 'does not truncate the exposed description' do
+ expect(subject.to_hash)
+ .to include(
+ 'CI_MERGE_REQUEST_DESCRIPTION' => merge_request.description
+ )
+ expect(subject.to_hash)
+ .not_to have_key('CI_MERGE_REQUEST_DESCRIPTION_IS_TRUNCATED')
+ end
+ end
+
+ context 'when merge request description does not exist' do
+ let(:merge_request_description) { nil }
+
+ it 'does not truncate the exposed description' do
+ expect(subject.to_hash)
+ .to include(
+ 'CI_MERGE_REQUEST_DESCRIPTION' => merge_request.description
+ )
+ expect(subject.to_hash)
+ .not_to have_key('CI_MERGE_REQUEST_DESCRIPTION_IS_TRUNCATED')
+ end
+ end
+ end
+
it 'exposes diff variables' do
expect(subject.to_hash)
.to include(
@@ -214,6 +290,7 @@ RSpec.describe Gitlab::Ci::Variables::Builder::Pipeline, feature_category: :secr
'CI_MERGE_REQUEST_SOURCE_BRANCH_NAME' => merge_request.source_branch.to_s,
'CI_MERGE_REQUEST_SOURCE_BRANCH_SHA' => merge_request.source_branch_sha,
'CI_MERGE_REQUEST_TITLE' => merge_request.title,
+ 'CI_MERGE_REQUEST_DESCRIPTION' => merge_request.description,
'CI_MERGE_REQUEST_ASSIGNEES' => merge_request.assignee_username_list,
'CI_MERGE_REQUEST_MILESTONE' => milestone.title,
'CI_MERGE_REQUEST_LABELS' => labels.map(&:title).sort.join(','),
diff --git a/spec/lib/gitlab/ci/variables/downstream/generator_spec.rb b/spec/lib/gitlab/ci/variables/downstream/generator_spec.rb
index cd68b0cdf2b..f5845e492bc 100644
--- a/spec/lib/gitlab/ci/variables/downstream/generator_spec.rb
+++ b/spec/lib/gitlab/ci/variables/downstream/generator_spec.rb
@@ -39,6 +39,15 @@ RSpec.describe Gitlab::Ci::Variables::Downstream::Generator, feature_category: :
]
end
+ let(:pipeline_dotenv_variables) do
+ [
+ { key: 'PIPELINE_DOTENV_VAR1', value: 'variable 1' },
+ { key: 'PIPELINE_DOTENV_VAR2', value: 'variable 2' },
+ { key: 'PIPELINE_DOTENV_RAW_VAR3', value: '$REF1', raw: true },
+ { key: 'PIPELINE_DOTENV_INTERPOLATION_VAR4', value: 'interpolate $REF1 $REF2' }
+ ]
+ end
+
let(:bridge) do
instance_double(
'Ci::Bridge',
@@ -48,7 +57,8 @@ RSpec.describe Gitlab::Ci::Variables::Downstream::Generator, feature_category: :
expand_file_refs?: false,
yaml_variables: yaml_variables,
pipeline_variables: pipeline_variables,
- pipeline_schedule_variables: pipeline_schedule_variables
+ pipeline_schedule_variables: pipeline_schedule_variables,
+ dependency_variables: pipeline_dotenv_variables
)
end
@@ -69,7 +79,12 @@ RSpec.describe Gitlab::Ci::Variables::Downstream::Generator, feature_category: :
{ key: 'PIPELINE_SCHEDULE_VAR1', value: 'variable 1' },
{ key: 'PIPELINE_SCHEDULE_VAR2', value: 'variable 2' },
{ key: 'PIPELINE_SCHEDULE_RAW_VAR3', value: '$REF1', raw: true },
- { key: 'PIPELINE_SCHEDULE_INTERPOLATION_VAR4', value: 'interpolate ref 1 ref 2' }
+ { key: 'PIPELINE_SCHEDULE_INTERPOLATION_VAR4', value: 'interpolate ref 1 ref 2' },
+ { key: 'PIPELINE_DOTENV_VAR1', value: 'variable 1' },
+ { key: 'PIPELINE_DOTENV_VAR2', value: 'variable 2' },
+ { key: 'PIPELINE_DOTENV_RAW_VAR3', value: '$REF1', raw: true },
+ { key: 'PIPELINE_DOTENV_INTERPOLATION_VAR4', value: 'interpolate ref 1 ref 2' }
+
]
expect(generator.calculate).to contain_exactly(*expected)
@@ -79,6 +94,7 @@ RSpec.describe Gitlab::Ci::Variables::Downstream::Generator, feature_category: :
allow(bridge).to receive(:yaml_variables).and_return([])
allow(bridge).to receive(:pipeline_variables).and_return([])
allow(bridge).to receive(:pipeline_schedule_variables).and_return([])
+ allow(bridge).to receive(:dependency_variables).and_return([])
expect(generator.calculate).to be_empty
end
@@ -105,6 +121,10 @@ RSpec.describe Gitlab::Ci::Variables::Downstream::Generator, feature_category: :
[{ key: 'PIPELINE_SCHEDULE_INTERPOLATION_VAR', value: 'interpolate $REF1 $REF2 $FILE_REF3 $FILE_REF4' }]
end
+ let(:pipeline_dotenv_variables) do
+ [{ key: 'PIPELINE_DOTENV_INTERPOLATION_VAR', value: 'interpolate $REF1 $REF2 $FILE_REF3 $FILE_REF4' }]
+ end
+
context 'when expand_file_refs is true' do
before do
allow(bridge).to receive(:expand_file_refs?).and_return(true)
@@ -114,7 +134,8 @@ RSpec.describe Gitlab::Ci::Variables::Downstream::Generator, feature_category: :
expected = [
{ key: 'INTERPOLATION_VAR', value: 'interpolate ref 1 ref 3 ' },
{ key: 'PIPELINE_INTERPOLATION_VAR', value: 'interpolate ref 1 ref 3 ' },
- { key: 'PIPELINE_SCHEDULE_INTERPOLATION_VAR', value: 'interpolate ref 1 ref 3 ' }
+ { key: 'PIPELINE_SCHEDULE_INTERPOLATION_VAR', value: 'interpolate ref 1 ref 3 ' },
+ { key: 'PIPELINE_DOTENV_INTERPOLATION_VAR', value: 'interpolate ref 1 ref 3 ' }
]
expect(generator.calculate).to contain_exactly(*expected)
@@ -131,6 +152,7 @@ RSpec.describe Gitlab::Ci::Variables::Downstream::Generator, feature_category: :
{ key: 'INTERPOLATION_VAR', value: 'interpolate ref 1 $FILE_REF3 ' },
{ key: 'PIPELINE_INTERPOLATION_VAR', value: 'interpolate ref 1 $FILE_REF3 ' },
{ key: 'PIPELINE_SCHEDULE_INTERPOLATION_VAR', value: 'interpolate ref 1 $FILE_REF3 ' },
+ { key: 'PIPELINE_DOTENV_INTERPOLATION_VAR', value: 'interpolate ref 1 $FILE_REF3 ' },
{ key: 'FILE_REF3', value: 'ref 3', variable_type: :file }
]
diff --git a/spec/lib/gitlab/ci/yaml_processor/test_cases/interruptible_spec.rb b/spec/lib/gitlab/ci/yaml_processor/test_cases/interruptible_spec.rb
new file mode 100644
index 00000000000..03ff7077969
--- /dev/null
+++ b/spec/lib/gitlab/ci/yaml_processor/test_cases/interruptible_spec.rb
@@ -0,0 +1,96 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+module Gitlab
+ module Ci
+ RSpec.describe YamlProcessor, feature_category: :pipeline_composition do
+ subject(:processor) { described_class.new(config, user: nil).execute }
+
+ let(:builds) { processor.builds }
+
+ context 'with interruptible' do
+ let(:default_config) { nil }
+
+ let(:config) do
+ <<~YAML
+ #{default_config}
+
+ build1:
+ script: rspec
+ interruptible: true
+
+ build2:
+ script: rspec
+ interruptible: false
+
+ build3:
+ script: rspec
+
+ bridge1:
+ trigger: some/project
+ interruptible: true
+
+ bridge2:
+ trigger: some/project
+ interruptible: false
+
+ bridge3:
+ trigger: some/project
+ YAML
+ end
+
+ it 'returns jobs with their interruptible value' do
+ expect(builds).to contain_exactly(
+ a_hash_including(name: 'build1', interruptible: true),
+ a_hash_including(name: 'build2', interruptible: false),
+ a_hash_including(name: 'build3').and(exclude(:interruptible)),
+ a_hash_including(name: 'bridge1', interruptible: true),
+ a_hash_including(name: 'bridge2', interruptible: false),
+ a_hash_including(name: 'bridge3').and(exclude(:interruptible))
+ )
+ end
+
+ context 'when default:interruptible is true' do
+ let(:default_config) do
+ <<~YAML
+ default:
+ interruptible: true
+ YAML
+ end
+
+ it 'returns jobs with their interruptible value' do
+ expect(builds).to contain_exactly(
+ a_hash_including(name: 'build1', interruptible: true),
+ a_hash_including(name: 'build2', interruptible: false),
+ a_hash_including(name: 'build3', interruptible: true),
+ a_hash_including(name: 'bridge1', interruptible: true),
+ a_hash_including(name: 'bridge2', interruptible: false),
+ a_hash_including(name: 'bridge3', interruptible: true)
+ )
+ end
+ end
+
+ context 'when default:interruptible is false' do
+ let(:default_config) do
+ <<~YAML
+ default:
+ interruptible: false
+ YAML
+ end
+
+ it 'returns jobs with their interruptible value' do
+ expect(builds).to contain_exactly(
+ a_hash_including(name: 'build1', interruptible: true),
+ a_hash_including(name: 'build2', interruptible: false),
+ a_hash_including(name: 'build3', interruptible: false),
+ a_hash_including(name: 'bridge1', interruptible: true),
+ a_hash_including(name: 'bridge2', interruptible: false),
+ a_hash_including(name: 'bridge3', interruptible: false)
+ )
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/yaml_processor_spec.rb b/spec/lib/gitlab/ci/yaml_processor_spec.rb
index f01c1c7d053..844a6849c8f 100644
--- a/spec/lib/gitlab/ci/yaml_processor_spec.rb
+++ b/spec/lib/gitlab/ci/yaml_processor_spec.rb
@@ -123,55 +123,6 @@ module Gitlab
end
end
- describe 'interruptible entry' do
- describe 'interruptible job' do
- let(:config) do
- YAML.dump(rspec: { script: 'rspec', interruptible: true })
- end
-
- it { expect(rspec_build[:interruptible]).to be_truthy }
- end
-
- describe 'interruptible job with default value' do
- let(:config) do
- YAML.dump(rspec: { script: 'rspec' })
- end
-
- it { expect(rspec_build).not_to have_key(:interruptible) }
- end
-
- describe 'uninterruptible job' do
- let(:config) do
- YAML.dump(rspec: { script: 'rspec', interruptible: false })
- end
-
- it { expect(rspec_build[:interruptible]).to be_falsy }
- end
-
- it "returns interruptible when overridden for job" do
- config = YAML.dump({ default: { interruptible: true },
- rspec: { script: "rspec" } })
-
- config_processor = described_class.new(config).execute
- builds = config_processor.builds.select { |b| b[:stage] == "test" }
-
- expect(builds.size).to eq(1)
- expect(builds.first).to eq({
- stage: "test",
- stage_idx: 2,
- name: "rspec",
- only: { refs: %w[branches tags] },
- options: { script: ["rspec"] },
- interruptible: true,
- allow_failure: false,
- when: "on_success",
- job_variables: [],
- root_variables_inheritance: true,
- scheduling_type: :stage
- })
- end
- end
-
describe 'retry entry' do
context 'when retry count is specified' do
let(:config) do
@@ -544,6 +495,27 @@ module Gitlab
expect(subject.workflow_name).to be_nil
end
end
+
+ context 'with auto_cancel' do
+ let(:config) do
+ <<-YML
+ workflow:
+ auto_cancel:
+ on_new_commit: interruptible
+ on_job_failure: all
+
+ hello:
+ script: echo world
+ YML
+ end
+
+ it 'parses the workflow:auto_cancel as workflow_auto_cancel' do
+ expect(subject.workflow_auto_cancel).to eq({
+ on_new_commit: 'interruptible',
+ on_job_failure: 'all'
+ })
+ end
+ end
end
describe '#warnings' do
@@ -1313,6 +1285,46 @@ module Gitlab
})
end
end
+
+ context 'when image and service have docker options' do
+ let(:config) do
+ <<~YAML
+ test:
+ script: exit 0
+ image:
+ name: ruby:2.7
+ docker:
+ platform: linux/amd64
+ services:
+ - name: postgres:11.9
+ docker:
+ platform: linux/amd64
+ YAML
+ end
+
+ it { is_expected.to be_valid }
+
+ it "returns with image" do
+ expect(processor.builds).to contain_exactly({
+ stage: "test",
+ stage_idx: 2,
+ name: "test",
+ only: { refs: %w[branches tags] },
+ options: {
+ script: ["exit 0"],
+ image: { name: "ruby:2.7",
+ executor_opts: { docker: { platform: 'linux/amd64' } } },
+ services: [{ name: "postgres:11.9",
+ executor_opts: { docker: { platform: 'linux/amd64' } } }]
+ },
+ allow_failure: false,
+ when: "on_success",
+ job_variables: [],
+ root_variables_inheritance: true,
+ scheduling_type: :stage
+ })
+ end
+ end
end
describe 'Variables' do
diff --git a/spec/lib/gitlab/circuit_breaker/notifier_spec.rb b/spec/lib/gitlab/circuit_breaker/notifier_spec.rb
new file mode 100644
index 00000000000..1640ebb99f9
--- /dev/null
+++ b/spec/lib/gitlab/circuit_breaker/notifier_spec.rb
@@ -0,0 +1,37 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::CircuitBreaker::Notifier, feature_category: :shared do
+ subject(:instance) { described_class.new }
+
+ describe '#notify' do
+ context 'when event is failure' do
+ it 'sends an exception to Gitlab::ErrorTracking' do
+ expect(Gitlab::ErrorTracking).to receive(:track_exception)
+
+ instance.notify('test_service', 'failure')
+ end
+ end
+
+ context 'when event is not failure' do
+ it 'does not send an exception to Gitlab::ErrorTracking' do
+ expect(Gitlab::ErrorTracking).not_to receive(:track_exception)
+
+ instance.notify('test_service', 'test_event')
+ end
+ end
+ end
+
+ describe '#notify_warning' do
+ it do
+ expect { instance.notify_warning('test_service', 'test_message') }.not_to raise_error
+ end
+ end
+
+ describe '#notify_run' do
+ it do
+ expect { instance.notify_run('test_service') { puts 'test block' } }.not_to raise_error
+ end
+ end
+end
diff --git a/spec/lib/gitlab/circuit_breaker/store_spec.rb b/spec/lib/gitlab/circuit_breaker/store_spec.rb
new file mode 100644
index 00000000000..1b1983d4b52
--- /dev/null
+++ b/spec/lib/gitlab/circuit_breaker/store_spec.rb
@@ -0,0 +1,201 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::CircuitBreaker::Store, :clean_gitlab_redis_rate_limiting, feature_category: :ai_abstraction_layer do
+ let(:key) { 'key-1' }
+ let(:value) { 'value' }
+ let(:circuit_store) { described_class.new }
+
+ shared_examples 'reliable circuit breaker store method' do
+ it 'does not raise an error when Redis::BaseConnectionError is encountered' do
+ allow(Gitlab::Redis::RateLimiting)
+ .to receive(:with)
+ .and_raise(Redis::BaseConnectionError)
+
+ expect { subject }.not_to raise_error
+ end
+ end
+
+ describe '#key?' do
+ subject(:key?) { circuit_store.key?(key) }
+
+ it_behaves_like 'reliable circuit breaker store method'
+
+ context 'when key exists' do
+ before do
+ circuit_store.store(key, value)
+ end
+
+ it { is_expected.to eq(true) }
+ end
+
+ context 'when key does not exist' do
+ it { is_expected.to eq(false) }
+ end
+ end
+
+ describe '#store' do
+ let(:options) { {} }
+
+ subject(:store) { circuit_store.store(key, value, options) }
+
+ it_behaves_like 'reliable circuit breaker store method'
+
+ it 'stores value for specified key without expiry by default' do
+ expect(store).to eq(value)
+
+ with_redis do |redis|
+ expect(redis.get(key)).to eq(value)
+ expect(redis.ttl(key)).to eq(-1)
+ end
+ end
+
+ context 'when expires option is set' do
+ let(:options) { { expires: 10 } }
+
+ it 'stores value for specified key with expiry' do
+ expect(store).to eq(value)
+
+ with_redis do |redis|
+ expect(redis.get(key)).to eq(value)
+ expect(redis.ttl(key)).to eq(10)
+ end
+ end
+ end
+ end
+
+ describe '#increment' do
+ let(:options) { {} }
+
+ subject(:increment) { circuit_store.increment(key, 1, options) }
+
+ it_behaves_like 'reliable circuit breaker store method'
+
+ context 'when key does not exist' do
+ it 'sets key and increments value' do
+ increment
+
+ with_redis do |redis|
+ expect(redis.get(key).to_i).to eq(1)
+ expect(redis.ttl(key)).to eq(-1)
+ end
+ end
+
+ context 'with expiry' do
+ let(:options) { { expires: 10 } }
+
+ it 'sets key and increments value with expiration' do
+ increment
+
+ with_redis do |redis|
+ expect(redis.get(key).to_i).to eq(1)
+ expect(redis.ttl(key)).to eq(10)
+ end
+ end
+ end
+ end
+
+ context 'when key exists' do
+ before do
+ circuit_store.store(key, 1)
+ end
+
+ it 'increments value' do
+ increment
+
+ with_redis do |redis|
+ expect(redis.get(key).to_i).to eq(2)
+ expect(redis.ttl(key)).to eq(-1)
+ end
+ end
+
+ context 'with expiry' do
+ let(:options) { { expires: 10 } }
+
+ it 'increments value with expiration' do
+ increment
+
+ with_redis do |redis|
+ expect(redis.get(key).to_i).to eq(2)
+ expect(redis.ttl(key)).to eq(10)
+ end
+ end
+ end
+ end
+ end
+
+ describe '#load' do
+ subject(:load) { circuit_store.load(key) }
+
+ it_behaves_like 'reliable circuit breaker store method'
+
+ context 'when key exists' do
+ before do
+ circuit_store.store(key, value)
+ end
+
+ it 'returns the value of the key' do
+ expect(load).to eq(value)
+ end
+ end
+
+ context 'when key does not exist' do
+ it 'returns nil' do
+ expect(load).to be_nil
+ end
+ end
+ end
+
+ describe '#values_at' do
+ let(:other_key) { 'key-2' }
+ let(:other_value) { 'value-2' }
+
+ subject(:values_at) { circuit_store.values_at(key, other_key) }
+
+ it_behaves_like 'reliable circuit breaker store method'
+
+ context 'when keys exist' do
+ before do
+ circuit_store.store(key, value)
+ circuit_store.store(other_key, other_value)
+ end
+
+ it 'returns values of keys' do
+ expect(values_at).to match_array([value, other_value])
+ end
+ end
+
+ context 'when some keys do not exist' do
+ before do
+ circuit_store.store(key, value)
+ end
+
+ it 'returns values of keys with nil for non-existing ones' do
+ expect(values_at).to match_array([value, nil])
+ end
+ end
+ end
+
+ describe '#delete' do
+ subject(:delete) { circuit_store.delete(key) }
+
+ before do
+ circuit_store.store(key, value)
+ end
+
+ it_behaves_like 'reliable circuit breaker store method'
+
+ it 'deletes key' do
+ delete
+
+ with_redis do |redis|
+ expect(redis.exists?(key)).to eq(false)
+ end
+ end
+ end
+
+ def with_redis(&block)
+ Gitlab::Redis::RateLimiting.with(&block)
+ end
+end
diff --git a/spec/lib/gitlab/circuit_breaker_spec.rb b/spec/lib/gitlab/circuit_breaker_spec.rb
new file mode 100644
index 00000000000..4cd2f41869e
--- /dev/null
+++ b/spec/lib/gitlab/circuit_breaker_spec.rb
@@ -0,0 +1,120 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::CircuitBreaker, :clean_gitlab_redis_rate_limiting, feature_category: :shared do
+ let(:service_name) { 'DummyService' }
+ let(:volume_threshold) { 5 }
+ let(:circuit) do
+ Circuitbox.circuit(service_name,
+ { volume_threshold: volume_threshold, exceptions: [Gitlab::CircuitBreaker::InternalServerError] })
+ end
+
+ let(:dummy_class) do
+ Class.new do
+ def dummy_method
+ Gitlab::CircuitBreaker.run_with_circuit('DummyService') do
+ raise Gitlab::CircuitBreaker::InternalServerError
+ end
+ end
+
+ def another_dummy_method
+ Gitlab::CircuitBreaker.run_with_circuit('DummyService') do
+ # Do nothing but successful.
+ end
+ end
+ end
+ end
+
+ subject(:instance) { dummy_class.new }
+
+ before do
+ stub_const(service_name, dummy_class)
+ allow(Circuitbox).to receive(:circuit).and_return(circuit)
+ end
+
+ # rubocop: disable RSpec/AnyInstanceOf -- the instance is defined by an initializer
+ describe '#circuit' do
+ it 'returns nil value' do
+ expect(instance.dummy_method).to be_nil
+ end
+
+ it 'does not raise an error' do
+ expect { instance.dummy_method }.not_to raise_error
+ end
+
+ context 'when failed multiple times below volume threshold' do
+ it 'does not open the circuit' do
+ expect_any_instance_of(Gitlab::CircuitBreaker::Notifier).to receive(:notify)
+ .with(anything, 'failure')
+ .exactly(4).times
+
+ 4.times do
+ instance.dummy_method
+ end
+
+ expect(circuit).not_to be_open
+ end
+ end
+
+ context 'when failed multiple times over volume threshold' do
+ it 'allows the call 5 times, then opens the circuit and skips subsequent calls' do
+ expect_any_instance_of(Gitlab::CircuitBreaker::Notifier).to receive(:notify)
+ .with(anything, 'failure')
+ .exactly(5).times
+
+ expect_any_instance_of(Gitlab::CircuitBreaker::Notifier).to receive(:notify)
+ .with(anything, 'open')
+ .once
+
+ expect_any_instance_of(Gitlab::CircuitBreaker::Notifier).to receive(:notify)
+ .with(anything, 'skipped')
+ .once
+
+ 6.times do
+ instance.dummy_method
+ end
+
+ expect(circuit).to be_open
+ end
+ end
+
+ context 'when circuit is previously open' do
+ before do
+ # Opens the circuit
+ 6.times do
+ instance.dummy_method
+ end
+
+ # Deletes the open key
+ circuit.try_close_next_time
+ end
+
+ context 'when does not fail again' do
+ it 'closes the circuit' do
+ instance.another_dummy_method
+
+ expect(circuit).not_to be_open
+ end
+ end
+
+ context 'when fails again' do
+ it 'opens the circuit' do
+ instance.dummy_method
+
+ expect(circuit).to be_open
+ end
+ end
+ end
+ end
+ # rubocop: enable RSpec/AnyInstanceOf
+
+ describe '#run_with_circuit' do
+ let(:block) { proc {} }
+
+ it 'runs the code block within the Circuitbox circuit' do
+ expect(circuit).to receive(:run).with(exception: false, &block)
+ described_class.run_with_circuit('service', &block)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/contributions_calendar_spec.rb b/spec/lib/gitlab/contributions_calendar_spec.rb
index 326e27fa716..d1dbd167d48 100644
--- a/spec/lib/gitlab/contributions_calendar_spec.rb
+++ b/spec/lib/gitlab/contributions_calendar_spec.rb
@@ -19,9 +19,9 @@ RSpec.describe Gitlab::ContributionsCalendar, feature_category: :user_profile do
end
end
- let_it_be(:feature_project) do
+ let_it_be(:public_project_with_private_issues) do
create(:project, :public, :issues_private) do |project|
- create(:project_member, user: contributor, project: project).project
+ create(:project_member, user: contributor, project: project)
end
end
@@ -45,7 +45,12 @@ RSpec.describe Gitlab::ContributionsCalendar, feature_category: :user_profile do
end
def create_event(project, day, hour = 0, action = :created, target_symbol = :issue)
- targets[project] ||= create(target_symbol, project: project, author: contributor)
+ targets[project] ||=
+ if target_symbol == :merge_request
+ create(:merge_request, source_project: project, author: contributor)
+ else
+ create(target_symbol, project: project, author: contributor)
+ end
Event.create!(
project: project,
@@ -58,7 +63,7 @@ RSpec.describe Gitlab::ContributionsCalendar, feature_category: :user_profile do
end
describe '#activity_dates', :aggregate_failures do
- it "returns a hash of date => count" do
+ it 'returns a hash of date => count' do
create_event(public_project, last_week)
create_event(public_project, last_week)
create_event(public_project, today)
@@ -114,6 +119,15 @@ RSpec.describe Gitlab::ContributionsCalendar, feature_category: :user_profile do
expect(calendar(contributor).activity_dates[today]).to eq(2)
end
+ it "counts merge request events" do
+ create_event(public_project, today, 0, :created, :merge_request)
+ create_event(public_project, today, 1, :closed, :merge_request)
+ create_event(public_project, today, 2, :approved, :merge_request)
+ create_event(public_project, today, 3, :merged, :merge_request)
+
+ expect(calendar(contributor).activity_dates[today]).to eq(4)
+ end
+
context "when events fall under different dates depending on the system time zone" do
before do
create_event(public_project, today, 1)
@@ -189,10 +203,10 @@ RSpec.describe Gitlab::ContributionsCalendar, feature_category: :user_profile do
it "only shows private events to authorized users" do
e1 = create_event(public_project, today)
e2 = create_event(private_project, today)
- e3 = create_event(feature_project, today)
+ e3 = create_event(public_project_with_private_issues, today, 0, :created, :issue)
create_event(public_project, last_week)
- expect(calendar.events_by_date(today)).to contain_exactly(e1, e3)
+ expect(calendar.events_by_date(today)).to contain_exactly(e1)
expect(calendar(contributor).events_by_date(today)).to contain_exactly(e1, e2, e3)
end
@@ -202,6 +216,17 @@ RSpec.describe Gitlab::ContributionsCalendar, feature_category: :user_profile do
expect(calendar.events_by_date(today)).to contain_exactly(e1)
end
+ it 'includes merge request events' do
+ mr_created_event = create_event(public_project, today, 0, :created, :merge_request)
+ mr_closed_event = create_event(public_project, today, 1, :closed, :merge_request)
+ mr_approved_event = create_event(public_project, today, 2, :approved, :merge_request)
+ mr_merged_event = create_event(public_project, today, 3, :merged, :merge_request)
+
+ expect(calendar.events_by_date(today)).to contain_exactly(
+ mr_created_event, mr_closed_event, mr_approved_event, mr_merged_event
+ )
+ end
+
context 'when the user cannot read cross project' do
before do
allow(Ability).to receive(:allowed?).and_call_original
@@ -215,40 +240,4 @@ RSpec.describe Gitlab::ContributionsCalendar, feature_category: :user_profile do
end
end
end
-
- describe '#starting_year' do
- let(:travel_time) { Time.find_zone('UTC').local(2020, 12, 31, 19, 0, 0) }
-
- context "when the contributor's timezone is not set" do
- it "is the start of last year in the system timezone" do
- expect(calendar.starting_year).to eq(2019)
- end
- end
-
- context "when the contributor's timezone is set to Sydney" do
- let(:contributor) { create(:user, { timezone: 'Sydney' }) }
-
- it "is the start of last year in Sydney" do
- expect(calendar.starting_year).to eq(2020)
- end
- end
- end
-
- describe '#starting_month' do
- let(:travel_time) { Time.find_zone('UTC').local(2020, 12, 31, 19, 0, 0) }
-
- context "when the contributor's timezone is not set" do
- it "is the start of this month in the system timezone" do
- expect(calendar.starting_month).to eq(12)
- end
- end
-
- context "when the contributor's timezone is set to Sydney" do
- let(:contributor) { create(:user, { timezone: 'Sydney' }) }
-
- it "is the start of this month in Sydney" do
- expect(calendar.starting_month).to eq(1)
- end
- end
- end
end
diff --git a/spec/lib/gitlab/counters/buffered_counter_spec.rb b/spec/lib/gitlab/counters/buffered_counter_spec.rb
index 4fd152eb805..e9b3eb3ae62 100644
--- a/spec/lib/gitlab/counters/buffered_counter_spec.rb
+++ b/spec/lib/gitlab/counters/buffered_counter_spec.rb
@@ -13,7 +13,7 @@ RSpec.describe Gitlab::Counters::BufferedCounter, :clean_gitlab_redis_shared_sta
describe '#get' do
it 'returns the value when there is an existing value stored in the counter' do
- Gitlab::Redis::SharedState.with do |redis|
+ Gitlab::Redis::BufferedCounter.with do |redis|
redis.set(counter.key, 456)
end
@@ -393,7 +393,7 @@ RSpec.describe Gitlab::Counters::BufferedCounter, :clean_gitlab_redis_shared_sta
it 'removes the key from Redis' do
counter.initiate_refresh!
- Gitlab::Redis::SharedState.with do |redis|
+ Gitlab::Redis::BufferedCounter.with do |redis|
expect(redis.exists?(counter.key)).to eq(false)
end
end
@@ -488,7 +488,7 @@ RSpec.describe Gitlab::Counters::BufferedCounter, :clean_gitlab_redis_shared_sta
end
it 'removes all tracking keys' do
- Gitlab::Redis::SharedState.with do |redis|
+ Gitlab::Redis::BufferedCounter.with do |redis|
expect { counter.cleanup_refresh }
.to change { redis.scan_each(match: "#{counter.refresh_key}*").to_a.count }.from(4).to(0)
end
@@ -533,7 +533,7 @@ RSpec.describe Gitlab::Counters::BufferedCounter, :clean_gitlab_redis_shared_sta
let(:flushed_amount) { 10 }
before do
- Gitlab::Redis::SharedState.with do |redis|
+ Gitlab::Redis::BufferedCounter.with do |redis|
redis.incrby(counter.flushed_key, flushed_amount)
end
end
@@ -546,7 +546,7 @@ RSpec.describe Gitlab::Counters::BufferedCounter, :clean_gitlab_redis_shared_sta
it 'deletes the relative :flushed key' do
counter.commit_increment!
- Gitlab::Redis::SharedState.with do |redis|
+ Gitlab::Redis::BufferedCounter.with do |redis|
key_exists = redis.exists?(counter.flushed_key)
expect(key_exists).to be_falsey
end
@@ -555,7 +555,7 @@ RSpec.describe Gitlab::Counters::BufferedCounter, :clean_gitlab_redis_shared_sta
context 'when deleting :flushed key fails' do
before do
- Gitlab::Redis::SharedState.with do |redis|
+ Gitlab::Redis::BufferedCounter.with do |redis|
redis.incrby(counter.flushed_key, 10)
allow(redis).to receive(:del).and_raise('could not delete key')
@@ -614,7 +614,7 @@ RSpec.describe Gitlab::Counters::BufferedCounter, :clean_gitlab_redis_shared_sta
with_them do
before do
- Gitlab::Redis::SharedState.with do |redis|
+ Gitlab::Redis::BufferedCounter.with do |redis|
redis.set(increment_key, increment) if increment
redis.set(flushed_key, flushed) if flushed
end
@@ -635,19 +635,19 @@ RSpec.describe Gitlab::Counters::BufferedCounter, :clean_gitlab_redis_shared_sta
end
def redis_get_key(key)
- Gitlab::Redis::SharedState.with do |redis|
+ Gitlab::Redis::BufferedCounter.with do |redis|
redis.get(key)
end
end
def redis_exists_key(key)
- Gitlab::Redis::SharedState.with do |redis|
+ Gitlab::Redis::BufferedCounter.with do |redis|
redis.exists?(key)
end
end
def redis_key_ttl(key)
- Gitlab::Redis::SharedState.with do |redis|
+ Gitlab::Redis::BufferedCounter.with do |redis|
redis.ttl(key)
end
end
diff --git a/spec/lib/gitlab/database/background_migration/batched_background_migration_dictionary_spec.rb b/spec/lib/gitlab/database/background_migration/batched_background_migration_dictionary_spec.rb
new file mode 100644
index 00000000000..b3aa0c194d2
--- /dev/null
+++ b/spec/lib/gitlab/database/background_migration/batched_background_migration_dictionary_spec.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ::Gitlab::Database::BackgroundMigration::BatchedBackgroundMigrationDictionary, feature_category: :database do
+ describe '.entry' do
+ it 'returns a single dictionary entry for the given migration job' do
+ entry = described_class.entry('MigrateHumanUserType')
+ expect(entry.migration_job_name).to eq('MigrateHumanUserType')
+ expect(entry.finalized_by).to eq(20230523101514)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb b/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb
index f70b38377d8..ffede2b6759 100644
--- a/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb
+++ b/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb
@@ -911,4 +911,18 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigration, type: :m
expect(actual).to contain_exactly(migration)
end
end
+
+ describe '#finalize_command' do
+ let_it_be(:migration) do
+ create(
+ :batched_background_migration,
+ gitlab_schema: :gitlab_main,
+ job_arguments: [['column_1'], ['column_1_convert_to_bigint']]
+ )
+ end
+
+ it 'generates the correct finalize command' do
+ expect(migration.finalize_command).to eq("sudo gitlab-rake gitlab:background_migrations:finalize[CopyColumnUsingBackgroundMigrationJob,events,id,'[[\"column_1\"]\\,[\"column_1_convert_to_bigint\"]]']")
+ end
+ end
end
diff --git a/spec/lib/gitlab/database/decomposition/migrate_spec.rb b/spec/lib/gitlab/database/decomposition/migrate_spec.rb
new file mode 100644
index 00000000000..fa2248e8d84
--- /dev/null
+++ b/spec/lib/gitlab/database/decomposition/migrate_spec.rb
@@ -0,0 +1,180 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::Decomposition::Migrate, :delete, query_analyzers: false, feature_category: :cell do
+ let(:ci_database_name) do
+ config = ActiveRecord::Base.configurations.find_db_config(Rails.env).configuration_hash
+
+ "#{config[:database]}_ci"
+ end
+
+ let(:ci_connection) do
+ database_model = self.class.const_set(:TestCiApplicationRecord, Class.new(ApplicationRecord))
+
+ database_model.establish_connection(
+ ActiveRecord::DatabaseConfigurations::HashConfig.new(
+ ActiveRecord::Base.connection_db_config.env_name,
+ 'ci',
+ ActiveRecord::Base.connection_db_config.configuration_hash.dup.merge(database: ci_database_name)
+ )
+ )
+
+ Gitlab::Database::LoadBalancing::Setup.new(database_model).setup
+
+ database_model.connection
+ end
+
+ let(:backup_location_postfix) { SecureRandom.alphanumeric(10) }
+
+ before do
+ skip_if_database_exists(:ci)
+
+ allow(SecureRandom).to receive(:alphanumeric).with(10).and_return(backup_location_postfix)
+ end
+
+ after do
+ Milestone.delete_all
+ Ci::Pipeline.delete_all
+ end
+
+ describe '#new' do
+ context 'when backup_location is not specified' do
+ subject(:instance) { described_class.new }
+
+ it 'defaults to subdirectory of configured backup location' do
+ expect(instance.instance_variable_get(:@backup_location)).to eq(
+ File.join(Gitlab.config.backup.path, "migration_#{backup_location_postfix}")
+ )
+ end
+ end
+
+ context 'when backup_location is specified' do
+ let(:backup_base_location) { Rails.root.join('tmp') }
+
+ subject(:instance) { described_class.new(backup_base_location: backup_base_location) }
+
+ it 'uses subdirectory of specified backup_location' do
+ expect(instance.instance_variable_get(:@backup_location)).to eq(
+ File.join(backup_base_location, "migration_#{backup_location_postfix}")
+ )
+ end
+
+ context 'when specified_backup_location does not exist' do
+ let(:backup_base_location) { Rails.root.join('tmp', SecureRandom.alphanumeric(10)) }
+
+ context 'and creation of the directory succeeds' do
+ it 'uses subdirectory of specified backup_location' do
+ expect(instance.instance_variable_get(:@backup_location)).to eq(
+ File.join(backup_base_location, "migration_#{backup_location_postfix}")
+ )
+ end
+ end
+
+ context 'and creation of the directory fails' do
+ before do
+ allow(FileUtils).to receive(:mkdir_p).with(backup_base_location).and_raise(Errno::EROFS.new)
+ end
+
+ it 'raises error' do
+ expect { instance.process! }.to raise_error(
+ Gitlab::Database::Decomposition::MigrateError,
+ "Failed to create directory #{backup_base_location}: Read-only file system"
+ )
+ end
+ end
+ end
+ end
+ end
+
+ describe '#process!' do
+ subject(:process) { described_class.new.process! }
+
+ before do
+ # Database `ci` is not configured. But it can still exist. So drop and create it
+ ActiveRecord::Base.connection.execute("DROP DATABASE IF EXISTS #{ci_database_name} WITH (FORCE)")
+ ActiveRecord::Base.connection.execute("CREATE DATABASE #{ci_database_name}")
+ end
+
+ context 'when the checks pass' do
+ let!(:milestone) { create(:milestone) }
+ let!(:ci_pipeline) { create(:ci_pipeline) }
+
+ it 'copies main database to ci database' do
+ process
+
+ ci_milestones = ci_connection.execute("SELECT COUNT(*) FROM milestones").getvalue(0, 0)
+ ci_pipelines = ci_connection.execute("SELECT COUNT(*) FROM ci_pipelines").getvalue(0, 0)
+
+ expect(ci_milestones).to be(Milestone.count)
+ expect(ci_pipelines).to be(Ci::Pipeline.count)
+ end
+ end
+
+ context 'when local diskspace is not enough' do
+ let(:backup_location) { described_class.new.backup_location }
+ let(:fake_stats) { instance_double(Sys::Filesystem::Stat, bytes_free: 1000) }
+
+ before do
+ allow(Sys::Filesystem).to receive(:stat).with(File.expand_path("#{backup_location}/../")).and_return(fake_stats)
+ end
+
+ it 'raises error' do
+ expect { process }.to raise_error(
+ Gitlab::Database::Decomposition::MigrateError,
+ /Not enough diskspace available on #{backup_location}: Available: (.+?), Needed: (.+?)/
+ )
+ end
+ end
+
+ context 'when connection to ci database fails' do
+ before do
+ ActiveRecord::Base.connection.execute("DROP DATABASE IF EXISTS #{ci_database_name} WITH (FORCE)")
+ end
+
+ it 'raises error' do
+ host = ActiveRecord::Base.configurations.find_db_config(Rails.env).configuration_hash[:host]
+ expect { process }.to raise_error(
+ Gitlab::Database::Decomposition::MigrateError,
+ "Can't connect to database '#{ci_database_name} on host '#{host}'. Ensure the database has been created.")
+ end
+ end
+
+ context 'when ci database is not empty' do
+ before do
+ ci_connection.execute("CREATE TABLE IF NOT EXISTS _test_table (id integer, primary key (id))")
+ end
+
+ it 'raises error' do
+ expect { process }.to raise_error(
+ Gitlab::Database::Decomposition::MigrateError,
+ "Database '#{ci_database_name}' is not empty"
+ )
+ end
+ end
+
+ context 'when already on decomposed setup' do
+ before do
+ allow(Gitlab::Database).to receive(:database_mode).and_return(Gitlab::Database::MODE_MULTIPLE_DATABASES)
+ end
+
+ it 'raises error' do
+ expect { process }.to raise_error(
+ Gitlab::Database::Decomposition::MigrateError,
+ "GitLab is already configured to run on multiple databases"
+ )
+ end
+ end
+
+ context 'when not all background migrations are finished' do
+ let!(:batched_migration) { create(:batched_background_migration, :active) }
+
+ it 'raises error' do
+ expect { process }.to raise_error(
+ Gitlab::Database::Decomposition::MigrateError,
+ "Found 1 unfinished Background Migration(s). Please wait until they are finished."
+ )
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/dictionary_spec.rb b/spec/lib/gitlab/database/dictionary_spec.rb
index 6d2de41468b..261cf27ed69 100644
--- a/spec/lib/gitlab/database/dictionary_spec.rb
+++ b/spec/lib/gitlab/database/dictionary_spec.rb
@@ -3,81 +3,104 @@
require 'spec_helper'
RSpec.describe Gitlab::Database::Dictionary, feature_category: :database do
- subject(:database_dictionary) { described_class.new(file_path) }
+ describe '.entries' do
+ it 'all tables and views are unique' do
+ table_and_view_names = described_class.entries('')
+ table_and_view_names += described_class.entries('views')
+
+ # ignore gitlab_internal due to `ar_internal_metadata`, `schema_migrations`
+ table_and_view_names = table_and_view_names
+ .reject { |database_dictionary| database_dictionary.schema?('gitlab_internal') }
+
+ duplicated_tables = table_and_view_names
+ .group_by(&:key_name)
+ .select { |_, schemas| schemas.count > 1 }
+ .keys
+
+ expect(duplicated_tables).to be_empty, \
+ "Duplicated table(s) #{duplicated_tables.to_a} found in #{described_class}.views_and_tables_to_schema. " \
+ "Any duplicated table must be removed from db/docs/ or ee/db/docs/. " \
+ "More info: https://docs.gitlab.com/ee/development/database/database_dictionary.html"
+ end
+ end
- context 'for a table' do
- let(:file_path) { 'db/docs/application_settings.yml' }
+ describe '::Entry' do
+ subject(:database_dictionary) { described_class::Entry.new(file_path) }
- describe '#name_and_schema' do
- it 'returns the name of the table and its gitlab schema' do
- expect(database_dictionary.name_and_schema).to match_array(['application_settings', :gitlab_main_clusterwide])
+ context 'for a table' do
+ let(:file_path) { 'db/docs/application_settings.yml' }
+
+ describe '#name_and_schema' do
+ it 'returns the name of the table and its gitlab schema' do
+ expect(database_dictionary.name_and_schema).to match_array(['application_settings', :gitlab_main_clusterwide])
+ end
end
- end
- describe '#table_name' do
- it 'returns the name of the table' do
- expect(database_dictionary.table_name).to eq('application_settings')
+ describe '#table_name' do
+ it 'returns the name of the table' do
+ expect(database_dictionary.table_name).to eq('application_settings')
+ end
end
- end
- describe '#view_name' do
- it 'returns nil' do
- expect(database_dictionary.view_name).to be_nil
+ describe '#view_name' do
+ it 'returns nil' do
+ expect(database_dictionary.view_name).to be_nil
+ end
end
- end
- describe '#milestone' do
- it 'returns the milestone in which the table was introduced' do
- expect(database_dictionary.milestone).to eq('7.7')
+ describe '#milestone' do
+ it 'returns the milestone in which the table was introduced' do
+ expect(database_dictionary.milestone).to eq('7.7')
+ end
end
- end
- describe '#gitlab_schema' do
- it 'returns the gitlab_schema of the table' do
- expect(database_dictionary.table_name).to eq('application_settings')
+ describe '#gitlab_schema' do
+ it 'returns the gitlab_schema of the table' do
+ expect(database_dictionary.table_name).to eq('application_settings')
+ end
end
- end
- describe '#schema?' do
- it 'checks if the given schema matches the schema of the table' do
- expect(database_dictionary.schema?('gitlab_main')).to eq(false)
- expect(database_dictionary.schema?('gitlab_main_clusterwide')).to eq(true)
+ describe '#schema?' do
+ it 'checks if the given schema matches the schema of the table' do
+ expect(database_dictionary.schema?('gitlab_main')).to eq(false)
+ expect(database_dictionary.schema?('gitlab_main_clusterwide')).to eq(true)
+ end
end
- end
- describe '#key_name' do
- it 'returns the value of the name of the table' do
- expect(database_dictionary.key_name).to eq('application_settings')
+ describe '#key_name' do
+ it 'returns the value of the name of the table' do
+ expect(database_dictionary.key_name).to eq('application_settings')
+ end
end
- end
- describe '#validate!' do
- it 'raises an error if the gitlab_schema is empty' do
- allow(database_dictionary).to receive(:gitlab_schema).and_return(nil)
+ describe '#validate!' do
+ it 'raises an error if the gitlab_schema is empty' do
+ allow(database_dictionary).to receive(:gitlab_schema).and_return(nil)
- expect { database_dictionary.validate! }.to raise_error(Gitlab::Database::GitlabSchema::UnknownSchemaError)
+ expect { database_dictionary.validate! }.to raise_error(Gitlab::Database::GitlabSchema::UnknownSchemaError)
+ end
end
end
- end
- context 'for a view' do
- let(:file_path) { 'db/docs/views/postgres_constraints.yml' }
+ context 'for a view' do
+ let(:file_path) { 'db/docs/views/postgres_constraints.yml' }
- describe '#table_name' do
- it 'returns nil' do
- expect(database_dictionary.table_name).to be_nil
+ describe '#table_name' do
+ it 'returns nil' do
+ expect(database_dictionary.table_name).to be_nil
+ end
end
- end
- describe '#view_name' do
- it 'returns the name of the view' do
- expect(database_dictionary.view_name).to eq('postgres_constraints')
+ describe '#view_name' do
+ it 'returns the name of the view' do
+ expect(database_dictionary.view_name).to eq('postgres_constraints')
+ end
end
- end
- describe '#key_name' do
- it 'returns the value of the name of the view' do
- expect(database_dictionary.key_name).to eq('postgres_constraints')
+ describe '#key_name' do
+ it 'returns the value of the name of the view' do
+ expect(database_dictionary.key_name).to eq('postgres_constraints')
+ end
end
end
end
diff --git a/spec/lib/gitlab/database/gitlab_schema_spec.rb b/spec/lib/gitlab/database/gitlab_schema_spec.rb
index a47e53c18a5..7fca47c707c 100644
--- a/spec/lib/gitlab/database/gitlab_schema_spec.rb
+++ b/spec/lib/gitlab/database/gitlab_schema_spec.rb
@@ -1,13 +1,6 @@
# frozen_string_literal: true
require 'spec_helper'
-RSpec.shared_examples 'validate path globs' do |path_globs|
- it 'returns an array of path globs' do
- expect(path_globs).to be_an(Array)
- expect(path_globs).to all(be_an(Pathname))
- end
-end
-
RSpec.shared_examples 'validate schema data' do |tables_and_views|
it 'all tables and views have assigned a known gitlab_schema' do
expect(tables_and_views).to all(
@@ -88,32 +81,6 @@ RSpec.describe Gitlab::Database::GitlabSchema, feature_category: :database do
end
end
end
-
- it 'all tables and views are unique' do
- table_and_view_names = described_class.build_dictionary('')
- table_and_view_names += described_class.build_dictionary('views')
-
- # ignore gitlab_internal due to `ar_internal_metadata`, `schema_migrations`
- table_and_view_names = table_and_view_names
- .reject { |database_dictionary| database_dictionary.schema?('gitlab_internal') }
-
- duplicated_tables = table_and_view_names
- .group_by(&:key_name)
- .select { |_, schemas| schemas.count > 1 }
- .keys
-
- expect(duplicated_tables).to be_empty, \
- "Duplicated table(s) #{duplicated_tables.to_a} found in #{described_class}.views_and_tables_to_schema. " \
- "Any duplicated table must be removed from db/docs/ or ee/db/docs/. " \
- "More info: https://docs.gitlab.com/ee/development/database/database_dictionary.html"
- end
- end
-
- describe '.dictionary_path_globs' do
- include_examples 'validate path globs', described_class.dictionary_path_globs('')
- include_examples 'validate path globs', described_class.dictionary_path_globs('views')
- include_examples 'validate path globs', described_class.dictionary_path_globs('deleted_views')
- include_examples 'validate path globs', described_class.dictionary_path_globs('deleted_tables')
end
describe '.tables_to_schema' do
@@ -306,4 +273,16 @@ RSpec.describe Gitlab::Database::GitlabSchema, feature_category: :database do
end
end
end
+
+ describe '.cell_local?' do
+ it 'is true for cell local tables and false otherwise' do
+ expect(described_class.cell_local?('gitlab_ci')).to eq(true)
+ expect(described_class.cell_local?('gitlab_pm')).to eq(true)
+ expect(described_class.cell_local?('gitlab_main_cell')).to eq(true)
+ expect(described_class.cell_local?('gitlab_main')).to eq(false)
+ expect(described_class.cell_local?('gitlab_main_clusterwide')).to eq(false)
+ expect(described_class.cell_local?('gitlab_shared')).to eq(false)
+ expect(described_class.cell_local?('gitlab_internal')).to eq(false)
+ end
+ end
end
diff --git a/spec/lib/gitlab/database/health_status/indicators/autovacuum_active_on_table_spec.rb b/spec/lib/gitlab/database/health_status/indicators/autovacuum_active_on_table_spec.rb
index cd145bd5c0f..328cdede794 100644
--- a/spec/lib/gitlab/database/health_status/indicators/autovacuum_active_on_table_spec.rb
+++ b/spec/lib/gitlab/database/health_status/indicators/autovacuum_active_on_table_spec.rb
@@ -19,6 +19,7 @@ RSpec.describe Gitlab::Database::HealthStatus::Indicators::AutovacuumActiveOnTab
before do
swapout_view_for_table(:postgres_autovacuum_activity, connection: connection)
+ stub_feature_flags(skip_autovacuum_health_check_for_ci_builds: false)
end
let(:tables) { [table] }
@@ -59,10 +60,34 @@ RSpec.describe Gitlab::Database::HealthStatus::Indicators::AutovacuumActiveOnTab
expect(subject.indicator_class).to eq(described_class)
end
- it 'returns NoSignal signal in case the feature flag is disabled' do
- stub_feature_flags(batched_migrations_health_status_autovacuum: false)
+ context 'with specific feature flags' do
+ it 'returns NotAvailable on batched_migrations_health_status_autovacuum FF being disable' do
+ stub_feature_flags(batched_migrations_health_status_autovacuum: false)
- expect(subject).to be_a(Gitlab::Database::HealthStatus::Signals::NotAvailable)
+ expect(subject).to be_a(Gitlab::Database::HealthStatus::Signals::NotAvailable)
+ end
+
+ context 'with skip_autovacuum_health_check_for_ci_builds FF being enabled' do
+ before do
+ stub_feature_flags(skip_autovacuum_health_check_for_ci_builds: true)
+ end
+
+ context 'for ci_builds table' do
+ let(:table) { 'ci_builds' }
+
+ it 'returns NotAvailable' do
+ expect(subject).to be_a(Gitlab::Database::HealthStatus::Signals::NotAvailable)
+ end
+ end
+
+ context 'for users table' do
+ let(:table) { 'users' }
+
+ it 'returns Stop signal' do
+ expect(subject).to be_a(Gitlab::Database::HealthStatus::Signals::Stop)
+ end
+ end
+ end
end
end
end
diff --git a/spec/lib/gitlab/database/load_balancing/load_balancer_spec.rb b/spec/lib/gitlab/database/load_balancing/load_balancer_spec.rb
index c975f5b5ee4..3c14dc23a80 100644
--- a/spec/lib/gitlab/database/load_balancing/load_balancer_spec.rb
+++ b/spec/lib/gitlab/database/load_balancing/load_balancer_spec.rb
@@ -92,8 +92,20 @@ RSpec.describe Gitlab::Database::LoadBalancing::LoadBalancer, :request_store, fe
end
end
+ shared_examples 'restrict within concurrent ruby' do |lb_method|
+ it 'raises an exception when running within a concurrent Ruby thread' do
+ Thread.current[:restrict_within_concurrent_ruby] = true
+
+ expect { |b| lb.public_send(lb_method, &b) }.to raise_error(Gitlab::Utils::ConcurrentRubyThreadIsUsedError,
+ "Cannot run 'db' if running from `Concurrent::Promise`.")
+
+ Thread.current[:restrict_within_concurrent_ruby] = nil
+ end
+ end
+
describe '#read' do
it_behaves_like 'logs service discovery thread interruption', :read
+ it_behaves_like 'restrict within concurrent ruby', :read
it 'yields a connection for a read' do
connection = double(:connection)
@@ -227,6 +239,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::LoadBalancer, :request_store, fe
describe '#read_write' do
it_behaves_like 'logs service discovery thread interruption', :read_write
+ it_behaves_like 'restrict within concurrent ruby', :read_write
it 'yields a connection for a write' do
connection = ActiveRecord::Base.connection_pool.connection
diff --git a/spec/lib/gitlab/database/migration_spec.rb b/spec/lib/gitlab/database/migration_spec.rb
index 18bbc6c1dd3..8390a5ff19e 100644
--- a/spec/lib/gitlab/database/migration_spec.rb
+++ b/spec/lib/gitlab/database/migration_spec.rb
@@ -34,6 +34,12 @@ RSpec.describe Gitlab::Database::Migration do
# untouched.
expect(described_class[described_class.current_version]).to be < ActiveRecord::Migration::Current
end
+
+ it 'matches the version used by Rubocop' do
+ require 'rubocop'
+ load 'rubocop/cop/migration/versioned_migration_class.rb'
+ expect(described_class.current_version).to eq(RuboCop::Cop::Migration::VersionedMigrationClass::CURRENT_MIGRATION_VERSION)
+ end
end
describe Gitlab::Database::Migration::LockRetriesConcern do
diff --git a/spec/lib/gitlab/database/migrations/batched_background_migration_helpers_spec.rb b/spec/lib/gitlab/database/migrations/batched_background_migration_helpers_spec.rb
index a81ccf9583a..5c98379d852 100644
--- a/spec/lib/gitlab/database/migrations/batched_background_migration_helpers_spec.rb
+++ b/spec/lib/gitlab/database/migrations/batched_background_migration_helpers_spec.rb
@@ -71,8 +71,11 @@ RSpec.describe Gitlab::Database::Migrations::BatchedBackgroundMigrationHelpers,
end
context "when the migration doesn't exist already" do
+ let(:version) { '20231204101122' }
+
before do
allow(Gitlab::Database::PgClass).to receive(:for_table).with(:projects).and_return(pgclass_info)
+ allow(migration).to receive(:version).and_return(version)
end
subject(:enqueue_batched_background_migration) do
@@ -81,7 +84,6 @@ RSpec.describe Gitlab::Database::Migrations::BatchedBackgroundMigrationHelpers,
:projects,
:id,
job_interval: 5.minutes,
- queued_migration_version: format("%.14d", 123),
batch_min_value: 5,
batch_max_value: 1000,
batch_class_name: 'MyBatchClass',
@@ -115,7 +117,7 @@ RSpec.describe Gitlab::Database::Migrations::BatchedBackgroundMigrationHelpers,
status_name: :active,
total_tuple_count: pgclass_info.cardinality_estimate,
gitlab_schema: 'gitlab_ci',
- queued_migration_version: format("%.14d", 123)
+ queued_migration_version: version
)
end
end
diff --git a/spec/lib/gitlab/database/migrations/pg_backend_pid_spec.rb b/spec/lib/gitlab/database/migrations/pg_backend_pid_spec.rb
index 33e83ea2575..a9ef28a4b51 100644
--- a/spec/lib/gitlab/database/migrations/pg_backend_pid_spec.rb
+++ b/spec/lib/gitlab/database/migrations/pg_backend_pid_spec.rb
@@ -18,6 +18,17 @@ RSpec.describe Gitlab::Database::Migrations::PgBackendPid, feature_category: :da
expect { |b| patched_instance.with_advisory_lock_connection(&b) }.to yield_with_args(:conn)
end
+
+ it 're-yields with same arguments and wraps it with calls to .say even when error is raised' do
+ patched_instance = klass.prepend(described_class).new
+ expect(Gitlab::Database::Migrations::PgBackendPid).to receive(:say).twice
+
+ expect do
+ patched_instance.with_advisory_lock_connection do
+ raise ActiveRecord::ConcurrentMigrationError, 'test'
+ end
+ end.to raise_error ActiveRecord::ConcurrentMigrationError
+ end
end
describe '.patch!' do
diff --git a/spec/lib/gitlab/database/no_cross_db_foreign_keys_spec.rb b/spec/lib/gitlab/database/no_cross_db_foreign_keys_spec.rb
index c57b8bb5992..60934eb06a5 100644
--- a/spec/lib/gitlab/database/no_cross_db_foreign_keys_spec.rb
+++ b/spec/lib/gitlab/database/no_cross_db_foreign_keys_spec.rb
@@ -11,7 +11,6 @@ RSpec.describe 'cross-database foreign keys' do
# should be added as a comment along with the name of the column.
let!(:allowed_cross_database_foreign_keys) do
[
- 'events.author_id', # https://gitlab.com/gitlab-org/gitlab/-/issues/429803
'gitlab_subscriptions.hosted_plan_id', # https://gitlab.com/gitlab-org/gitlab/-/issues/422012
'group_import_states.user_id', # https://gitlab.com/gitlab-org/gitlab/-/issues/421210
'identities.saml_provider_id', # https://gitlab.com/gitlab-org/gitlab/-/issues/422010
@@ -27,10 +26,8 @@ RSpec.describe 'cross-database foreign keys' do
'namespace_commit_emails.email_id', # https://gitlab.com/gitlab-org/gitlab/-/issues/429804
'namespace_commit_emails.user_id', # https://gitlab.com/gitlab-org/gitlab/-/issues/429804
'path_locks.user_id', # https://gitlab.com/gitlab-org/gitlab/-/issues/429380
- 'project_authorizations.user_id', # https://gitlab.com/gitlab-org/gitlab/-/issues/422044
'protected_branch_push_access_levels.user_id', # https://gitlab.com/gitlab-org/gitlab/-/issues/431054
'protected_branch_merge_access_levels.user_id', # https://gitlab.com/gitlab-org/gitlab/-/issues/431055
- 'security_orchestration_policy_configurations.bot_user_id', # https://gitlab.com/gitlab-org/gitlab/-/issues/429438
'user_group_callouts.user_id' # https://gitlab.com/gitlab-org/gitlab/-/issues/421287
]
end
@@ -59,4 +56,17 @@ RSpec.describe 'cross-database foreign keys' do
end
end
end
+
+ it 'only allows existing foreign keys to be present in the exempted list', :aggregate_failures do
+ allowed_cross_database_foreign_keys.each do |entry|
+ table, _ = entry.split('.')
+
+ all_foreign_keys_for_table = foreign_keys_for(table)
+ fk_entry = all_foreign_keys_for_table.find { |fk| "#{fk.from_table}.#{fk.column}" == entry }
+
+ expect(fk_entry).to be_present,
+ "`#{entry}` is no longer a foreign key. " \
+ "You must remove this entry from the `allowed_cross_database_foreign_keys` list."
+ end
+ end
end
diff --git a/spec/lib/gitlab/database/no_new_tables_with_gitlab_main_schema_spec.rb b/spec/lib/gitlab/database/no_new_tables_with_gitlab_main_schema_spec.rb
index 338475fa9c4..d1d7aa12c46 100644
--- a/spec/lib/gitlab/database/no_new_tables_with_gitlab_main_schema_spec.rb
+++ b/spec/lib/gitlab/database/no_new_tables_with_gitlab_main_schema_spec.rb
@@ -11,7 +11,9 @@ RSpec.describe 'new tables with gitlab_main schema', feature_category: :cell do
# Specific tables can be exempted from this requirement, and such tables must be added to the `exempted_tables` list.
let!(:exempted_tables) do
- []
+ [
+ "audit_events_instance_amazon_s3_configurations" # https://gitlab.com/gitlab-org/gitlab/-/issues/431327
+ ]
end
let!(:starting_from_milestone) { 16.7 }
@@ -48,16 +50,16 @@ RSpec.describe 'new tables with gitlab_main schema', feature_category: :cell do
end
def tables_having_gitlab_main_schema(starting_from_milestone:)
- selected_data = gitlab_main_schema_tables.select do |database_dictionary|
- database_dictionary.milestone.to_f >= starting_from_milestone
+ selected_data = gitlab_main_schema_tables.select do |entry|
+ entry.milestone.to_f >= starting_from_milestone
end
selected_data.map(&:table_name)
end
def gitlab_main_schema_tables
- ::Gitlab::Database::GitlabSchema.build_dictionary('').select do |database_dictionary|
- database_dictionary.schema?('gitlab_main')
+ ::Gitlab::Database::Dictionary.entries.select do |entry|
+ entry.schema?('gitlab_main')
end
end
end
diff --git a/spec/lib/gitlab/database/obsolete_ignored_columns_spec.rb b/spec/lib/gitlab/database/obsolete_ignored_columns_spec.rb
index fa7645d581c..56899924b60 100644
--- a/spec/lib/gitlab/database/obsolete_ignored_columns_spec.rb
+++ b/spec/lib/gitlab/database/obsolete_ignored_columns_spec.rb
@@ -53,11 +53,11 @@ RSpec.describe Gitlab::Database::ObsoleteIgnoredColumns, feature_category: :data
expect(subject.execute).to eq(
[
['Testing::A', {
- 'unused' => IgnorableColumns::ColumnIgnore.new(Date.parse('2019-01-01'), '12.0'),
- 'also_unused' => IgnorableColumns::ColumnIgnore.new(Date.parse('2019-02-01'), '12.1')
+ 'unused' => IgnorableColumns::ColumnIgnore.new(Date.parse('2019-01-01'), '12.0', false),
+ 'also_unused' => IgnorableColumns::ColumnIgnore.new(Date.parse('2019-02-01'), '12.1', false)
}],
['Testing::B', {
- 'other' => IgnorableColumns::ColumnIgnore.new(Date.parse('2019-01-01'), '12.0')
+ 'other' => IgnorableColumns::ColumnIgnore.new(Date.parse('2019-01-01'), '12.0', false)
}]
])
end
diff --git a/spec/lib/gitlab/database/partitioning/ci_sliding_list_strategy_spec.rb b/spec/lib/gitlab/database/partitioning/ci_sliding_list_strategy_spec.rb
index 79c2c9e32d2..337749446ed 100644
--- a/spec/lib/gitlab/database/partitioning/ci_sliding_list_strategy_spec.rb
+++ b/spec/lib/gitlab/database/partitioning/ci_sliding_list_strategy_spec.rb
@@ -6,8 +6,8 @@ RSpec.describe Gitlab::Database::Partitioning::CiSlidingListStrategy, feature_ca
let(:connection) { ActiveRecord::Base.connection }
let(:table_name) { :_test_gitlab_ci_partitioned_test }
let(:model) { class_double(ApplicationRecord, table_name: table_name, connection: connection) }
- let(:next_partition_if) { nil }
- let(:detach_partition_if) { nil }
+ let(:next_partition_if) { ->(_) { false } }
+ let(:detach_partition_if) { ->(_) { false } }
subject(:strategy) do
described_class.new(model, :partition,
@@ -62,6 +62,16 @@ RSpec.describe Gitlab::Database::Partitioning::CiSlidingListStrategy, feature_ca
it 'is the partition with the largest value' do
expect(strategy.active_partition.value).to eq(101)
end
+
+ context 'when there are no partitions' do
+ before do
+ drop_partitions
+ end
+
+ it 'is the initial partition' do
+ expect(strategy.active_partition.value).to eq(100)
+ end
+ end
end
describe '#missing_partitions' do
@@ -74,6 +84,17 @@ RSpec.describe Gitlab::Database::Partitioning::CiSlidingListStrategy, feature_ca
expect(extra.length).to eq(1)
expect(extra.first.value).to eq(102)
end
+
+ context 'when there are no partitions for the table' do
+ it 'returns partitions for value 100 and 101' do
+ drop_partitions
+
+ missing_partitions = strategy.missing_partitions
+
+ expect(missing_partitions.size).to eq(2)
+ expect(missing_partitions.map(&:value)).to match_array([100, 101])
+ end
+ end
end
context 'when next_partition_if returns false' do
@@ -85,8 +106,8 @@ RSpec.describe Gitlab::Database::Partitioning::CiSlidingListStrategy, feature_ca
end
context 'when there are no partitions for the table' do
- it 'returns a partition for value 1' do
- connection.execute("drop table #{table_name}_100; drop table #{table_name}_101;")
+ it 'returns a partition for value 100' do
+ drop_partitions
missing_partitions = strategy.missing_partitions
@@ -201,4 +222,8 @@ RSpec.describe Gitlab::Database::Partitioning::CiSlidingListStrategy, feature_ca
})
end
end
+
+ def drop_partitions
+ connection.execute("drop table #{table_name}_100; drop table #{table_name}_101;")
+ end
end
diff --git a/spec/lib/gitlab/database/postgres_index_spec.rb b/spec/lib/gitlab/database/postgres_index_spec.rb
index 2e654a33a58..fd2455e25c0 100644
--- a/spec/lib/gitlab/database/postgres_index_spec.rb
+++ b/spec/lib/gitlab/database/postgres_index_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Database::PostgresIndex do
+RSpec.describe Gitlab::Database::PostgresIndex, feature_category: :database do
let(:schema) { 'public' }
let(:name) { 'foo_idx' }
let(:identifier) { "#{schema}.#{name}" }
@@ -13,6 +13,9 @@ RSpec.describe Gitlab::Database::PostgresIndex do
CREATE UNIQUE INDEX bar_key ON public.users (id);
CREATE TABLE _test_gitlab_main_example_table (id serial primary key);
+
+ CREATE TABLE _test_partitioned (id bigserial primary key not null) PARTITION BY LIST (id);
+ CREATE TABLE _test_partitioned_1 PARTITION OF _test_partitioned FOR VALUES IN (1);
SQL
end
@@ -25,8 +28,8 @@ RSpec.describe Gitlab::Database::PostgresIndex do
it { is_expected.to be_a Gitlab::Database::SharedModel }
describe '.reindexing_support' do
- it 'only non partitioned indexes' do
- expect(described_class.reindexing_support).to all(have_attributes(partitioned: false))
+ it 'includes partitioned indexes' do
+ expect(described_class.reindexing_support.where("name = '_test_partitioned_1_pkey'")).not_to be_empty
end
it 'only indexes that dont serve an exclusion constraint' do
diff --git a/spec/lib/gitlab/database/postgres_sequences_spec.rb b/spec/lib/gitlab/database/postgres_sequences_spec.rb
new file mode 100644
index 00000000000..2373edaea18
--- /dev/null
+++ b/spec/lib/gitlab/database/postgres_sequences_spec.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::PostgresSequence, type: :model, feature_category: :database do
+ # PostgresSequence does not `behaves_like 'a postgres model'` because it does not correspond 1-1 with a single entry
+ # in pg_class
+ let(:schema) { ActiveRecord::Base.connection.current_schema }
+ let(:table_name) { '_test_table' }
+ let(:table_name_without_sequence) { '_test_table_without_sequence' }
+
+ before do
+ ActiveRecord::Base.connection.execute(<<~SQL)
+ CREATE TABLE #{table_name} (
+ id bigserial PRIMARY KEY NOT NULL
+ );
+
+ CREATE TABLE #{table_name_without_sequence} (
+ id bigint PRIMARY KEY NOT NULL
+ );
+ SQL
+ end
+
+ describe '#by_table_name' do
+ context 'when table does not have a sequence' do
+ it 'returns an empty collection' do
+ expect(described_class.by_table_name(table_name_without_sequence)).to be_empty
+ end
+ end
+
+ it 'returns the sequence for a given table' do
+ expect(described_class.by_table_name(table_name).first[:table_name]).to eq(table_name)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/postgresql_adapter/force_disconnectable_mixin_spec.rb b/spec/lib/gitlab/database/postgresql_adapter/force_disconnectable_mixin_spec.rb
index 3650ca1d904..9570a25238e 100644
--- a/spec/lib/gitlab/database/postgresql_adapter/force_disconnectable_mixin_spec.rb
+++ b/spec/lib/gitlab/database/postgresql_adapter/force_disconnectable_mixin_spec.rb
@@ -15,7 +15,9 @@ RSpec.describe Gitlab::Database::PostgresqlAdapter::ForceDisconnectableMixin, :d
end
let(:config) { ActiveRecord::Base.configurations.find_db_config(Rails.env).configuration_hash.merge(pool: 1) }
- let(:pool) { model.establish_connection(config) }
+ let(:pool) do
+ model.establish_connection(ActiveRecord::DatabaseConfigurations::HashConfig.new(Rails.env, 'main', config))
+ end
it 'calls the force disconnect callback on checkin' do
connection = pool.connection
diff --git a/spec/lib/gitlab/database/query_analyzers/prevent_set_operator_mismatch_spec.rb b/spec/lib/gitlab/database/query_analyzers/prevent_set_operator_mismatch_spec.rb
index 28c155c1eb1..7fcdc59b691 100644
--- a/spec/lib/gitlab/database/query_analyzers/prevent_set_operator_mismatch_spec.rb
+++ b/spec/lib/gitlab/database/query_analyzers/prevent_set_operator_mismatch_spec.rb
@@ -42,12 +42,21 @@ RSpec.describe Gitlab::Database::QueryAnalyzers::PreventSetOperatorMismatch, que
end
context 'when SQL does not include a set operator' do
- let(:sql) { 'SELECT 1' }
+ where(:sql) do
+ [
+ 'SELECT 1',
+ 'SELECT union_station',
+ 'SELECT intersection',
+ 'SELECT deny_all_requests_except_allowed from application_settings'
+ ]
+ end
- it 'does not parse SQL' do
- expect(described_class::SelectStmt).not_to receive(:new)
+ with_them do
+ it 'does not parse SQL' do
+ expect(described_class::SelectStmt).not_to receive(:new)
- process_sql sql
+ process_sql sql
+ end
end
end
diff --git a/spec/lib/gitlab/database/schema_migrations/context_spec.rb b/spec/lib/gitlab/database/schema_migrations/context_spec.rb
index 6a614e2488f..ed83ed9e744 100644
--- a/spec/lib/gitlab/database/schema_migrations/context_spec.rb
+++ b/spec/lib/gitlab/database/schema_migrations/context_spec.rb
@@ -25,12 +25,15 @@ RSpec.describe Gitlab::Database::SchemaMigrations::Context do
context 'multiple databases', :reestablished_active_record_base do
before do
- connection_class.establish_connection(
+ db_config =
ActiveRecord::Base
.connection_pool
.db_config
.configuration_hash
.merge(configuration_overrides)
+
+ connection_class.establish_connection(
+ ActiveRecord::DatabaseConfigurations::HashConfig.new(Rails.env, 'main', db_config)
)
end
diff --git a/spec/lib/gitlab/database/sharding_key_spec.rb b/spec/lib/gitlab/database/sharding_key_spec.rb
new file mode 100644
index 00000000000..b47f5ea5df0
--- /dev/null
+++ b/spec/lib/gitlab/database/sharding_key_spec.rb
@@ -0,0 +1,153 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'new tables missing sharding_key', feature_category: :cell do
+ # Specific tables can be temporarily exempt from this requirement. You must add an issue link in a comment next to
+ # the table name to remove this once a decision has been made.
+ let(:allowed_to_be_missing_sharding_key) do
+ [
+ 'abuse_report_assignees', # https://gitlab.com/gitlab-org/gitlab/-/issues/432365
+ 'sbom_occurrences_vulnerabilities' # https://gitlab.com/gitlab-org/gitlab/-/issues/432900
+ ]
+ end
+
+ # Specific tables can be temporarily exempt from this requirement. You must add an issue link in a comment next to
+ # the table name to remove this once a decision has been made.
+ let(:allowed_to_be_missing_not_null) do
+ [
+ 'labels.project_id', # https://gitlab.com/gitlab-org/gitlab/-/issues/434356
+ 'labels.group_id' # https://gitlab.com/gitlab-org/gitlab/-/issues/434356
+ ]
+ end
+
+ let(:starting_from_milestone) { 16.6 }
+
+ let(:allowed_sharding_key_referenced_tables) { %w[projects namespaces organizations] }
+
+ it 'requires a sharding_key for all cell-local tables, after milestone 16.6', :aggregate_failures do
+ tables_missing_sharding_key(starting_from_milestone: starting_from_milestone).each do |table_name|
+ expect(allowed_to_be_missing_sharding_key).to include(table_name), error_message(table_name)
+ end
+ end
+
+ it 'ensures all sharding_key columns exist and reference projects, namespaces or organizations',
+ :aggregate_failures do
+ all_tables_to_sharding_key.each do |table_name, sharding_key|
+ sharding_key.each do |column_name, referenced_table_name|
+ expect(column_exists?(table_name, column_name)).to eq(true),
+ "Could not find sharding key column #{table_name}.#{column_name}"
+ expect(referenced_table_name).to be_in(allowed_sharding_key_referenced_tables)
+ end
+ end
+ end
+
+ it 'ensures all sharding_key columns are not nullable or have a not null check constraint',
+ :aggregate_failures do
+ all_tables_to_sharding_key.each do |table_name, sharding_key|
+ sharding_key.each do |column_name, _|
+ not_nullable = not_nullable?(table_name, column_name)
+ has_null_check_constraint = has_null_check_constraint?(table_name, column_name)
+
+ if allowed_to_be_missing_not_null.include?("#{table_name}.#{column_name}")
+ expect(not_nullable || has_null_check_constraint).to eq(false),
+ "You must remove `#{table_name}.#{column_name}` from allowed_to_be_missing_not_null" \
+ "since it now has a valid constraint."
+ else
+ expect(not_nullable || has_null_check_constraint).to eq(true),
+ "Missing a not null constraint for `#{table_name}.#{column_name}` . " \
+ "All sharding keys must be not nullable or have a NOT NULL check constraint"
+ end
+ end
+ end
+ end
+
+ it 'only allows `allowed_to_be_missing_sharding_key` to include tables that are missing a sharding_key',
+ :aggregate_failures do
+ allowed_to_be_missing_sharding_key.each do |exempted_table|
+ expect(tables_missing_sharding_key(starting_from_milestone: starting_from_milestone)).to include(exempted_table),
+ "`#{exempted_table}` is not missing a `sharding_key`. " \
+ "You must remove this table from the `allowed_to_be_missing_sharding_key` list."
+ end
+ end
+
+ private
+
+ def error_message(table_name)
+ <<~HEREDOC
+ The table `#{table_name}` is missing a `sharding_key` in the `db/docs` YML file.
+ Starting from GitLab #{starting_from_milestone}, we expect all new tables to define a `sharding_key`.
+
+ To choose an appropriate sharding_key for this table please refer
+ to our guidelines at https://docs.gitlab.com/ee/development/database/multiple_databases.html#defining-a-sharding-key-for-all-cell-local-tables, or consult with the Tenant Scale group.
+ HEREDOC
+ end
+
+ def tables_missing_sharding_key(starting_from_milestone:)
+ ::Gitlab::Database::Dictionary.entries.select do |entry|
+ entry.sharding_key.blank? &&
+ entry.milestone.to_f >= starting_from_milestone &&
+ ::Gitlab::Database::GitlabSchema.cell_local?(entry.gitlab_schema)
+ end.map(&:table_name)
+ end
+
+ def all_tables_to_sharding_key
+ entries_with_sharding_key = ::Gitlab::Database::Dictionary.entries.select do |entry|
+ entry.sharding_key.present?
+ end
+
+ entries_with_sharding_key.to_h do |entry|
+ [entry.table_name, entry.sharding_key]
+ end
+ end
+
+ def not_nullable?(table_name, column_name)
+ sql = <<~SQL
+ SELECT 1
+ FROM information_schema.columns
+ WHERE table_schema = 'public' AND
+ table_name = '#{table_name}' AND
+ column_name = '#{column_name}' AND
+ is_nullable = 'NO'
+ SQL
+
+ result = ApplicationRecord.connection.execute(sql)
+
+ result.count > 0
+ end
+
+ def has_null_check_constraint?(table_name, column_name)
+ # This is a heuristic query to look for all check constraints on the table and see if any of them contain a clause
+ # column IS NOT NULL. This is to match tables that will have multiple sharding keys where either of them can be not
+ # null. Such cases may look like:
+ # (project_id IS NOT NULL) OR (group_id IS NOT NULL)
+ # It's possible that this will sometimes incorrectly find a check constraint that isn't exactly as strict as we want
+ # but it should be pretty unlikely.
+ sql = <<~SQL
+ SELECT 1
+ FROM pg_constraint
+ INNER JOIN pg_class ON pg_constraint.conrelid = pg_class.oid
+ WHERE pg_class.relname = '#{table_name}'
+ AND contype = 'c'
+ AND pg_get_constraintdef(pg_constraint.oid) ILIKE '%#{column_name} IS NOT NULL%'
+ SQL
+
+ result = ApplicationRecord.connection.execute(sql)
+
+ result.count > 0
+ end
+
+ def column_exists?(table_name, column_name)
+ sql = <<~SQL
+ SELECT 1
+ FROM information_schema.columns
+ WHERE table_schema = 'public' AND
+ table_name = '#{table_name}' AND
+ column_name = '#{column_name}';
+ SQL
+
+ result = ApplicationRecord.connection.execute(sql)
+
+ result.count > 0
+ end
+end
diff --git a/spec/lib/gitlab/database/transaction/observer_spec.rb b/spec/lib/gitlab/database/transaction/observer_spec.rb
index 778212add66..2d5a59a2d5d 100644
--- a/spec/lib/gitlab/database/transaction/observer_spec.rb
+++ b/spec/lib/gitlab/database/transaction/observer_spec.rb
@@ -21,6 +21,8 @@ RSpec.describe Gitlab::Database::Transaction::Observer, feature_category: :datab
it 'tracks transaction data', :aggregate_failures do
ActiveRecord::Base.transaction do
+ User.first
+
ActiveRecord::Base.transaction(requires_new: true) do
User.first
diff --git a/spec/lib/gitlab/database/with_lock_retries_spec.rb b/spec/lib/gitlab/database/with_lock_retries_spec.rb
index 7e0435c815b..624e2b5c144 100644
--- a/spec/lib/gitlab/database/with_lock_retries_spec.rb
+++ b/spec/lib/gitlab/database/with_lock_retries_spec.rb
@@ -244,9 +244,9 @@ RSpec.describe Gitlab::Database::WithLockRetries, feature_category: :database do
it 'executes `SET LOCAL lock_timeout` using the configured timeout value in milliseconds' do
expect(connection).to receive(:execute).with("RESET idle_in_transaction_session_timeout; RESET lock_timeout").and_call_original
- expect(connection).to receive(:execute).with("SAVEPOINT active_record_1", "TRANSACTION").and_call_original
+ expect(connection).to receive(:create_savepoint).with('active_record_1')
expect(connection).to receive(:execute).with("SET LOCAL lock_timeout TO '15ms'").and_call_original
- expect(connection).to receive(:execute).with("RELEASE SAVEPOINT active_record_1", "TRANSACTION").and_call_original
+ expect(connection).to receive(:release_savepoint).with('active_record_1')
subject.run {}
end
diff --git a/spec/lib/gitlab/diff/file_spec.rb b/spec/lib/gitlab/diff/file_spec.rb
index bc4fc49b1b7..2cd27472440 100644
--- a/spec/lib/gitlab/diff/file_spec.rb
+++ b/spec/lib/gitlab/diff/file_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Diff::File do
include RepoHelpers
- let(:project) { create(:project, :repository) }
+ let_it_be(:project) { create(:project, :repository) }
let(:commit) { project.commit(sample_commit.id) }
let(:diff) { commit.raw_diffs.first }
let(:diff_file) { described_class.new(diff, diff_refs: commit.diff_refs, repository: project.repository) }
@@ -51,6 +51,31 @@ RSpec.describe Gitlab::Diff::File do
project.commit(branch_name).diffs.diff_files.first
end
+ describe 'delegated methods' do
+ subject { diff_file }
+
+ %i[
+ new_file?
+ deleted_file?
+ renamed_file?
+ unidiff
+ old_path
+ new_path
+ a_mode
+ b_mode
+ mode_changed?
+ submodule?
+ expanded?
+ too_large?
+ collapsed?
+ line_count
+ has_binary_notice?
+ generated?
+ ].each do |method|
+ it { is_expected.to delegate_method(method).to(:diff) }
+ end
+ end
+
describe '#initialize' do
let(:commit) { project.commit("532c837") }
diff --git a/spec/lib/gitlab/doctor/reset_tokens_spec.rb b/spec/lib/gitlab/doctor/reset_tokens_spec.rb
index 0cc947efdb4..b2155ee83ad 100644
--- a/spec/lib/gitlab/doctor/reset_tokens_spec.rb
+++ b/spec/lib/gitlab/doctor/reset_tokens_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Doctor::ResetTokens, feature_category: :runner_fleet do
+RSpec.describe Gitlab::Doctor::ResetTokens, feature_category: :fleet_visibility do
let(:logger) { instance_double('Logger') }
let(:model_names) { %w[Project Group] }
let(:token_names) { %w[runners_token] }
diff --git a/spec/lib/gitlab/email/handler/create_note_handler_spec.rb b/spec/lib/gitlab/email/handler/create_note_handler_spec.rb
index c7b69f39951..30514b531dc 100644
--- a/spec/lib/gitlab/email/handler/create_note_handler_spec.rb
+++ b/spec/lib/gitlab/email/handler/create_note_handler_spec.rb
@@ -2,11 +2,11 @@
require 'spec_helper'
-RSpec.describe Gitlab::Email::Handler::CreateNoteHandler do
+RSpec.describe Gitlab::Email::Handler::CreateNoteHandler, feature_category: :shared do
include_context 'email shared context'
- let_it_be(:user) { create(:user, email: 'jake@adventuretime.ooo') }
- let_it_be(:project) { create(:project, :public, :repository) }
+ let_it_be_with_reload(:user) { create(:user, email: 'jake@adventuretime.ooo') }
+ let_it_be(:project) { create(:project, :public, :repository) }
let(:noteable) { note.noteable }
let(:note) { create(:diff_note_on_merge_request, project: project) }
@@ -133,14 +133,16 @@ RSpec.describe Gitlab::Email::Handler::CreateNoteHandler do
end
end
- context 'mail key is in the References header' do
+ context 'when mail key is in the References header' do
let(:email_raw) { fixture_file('emails/reply_without_subaddressing_and_key_inside_references.eml') }
it_behaves_like 'an email that contains a mail key', 'References'
end
- context 'mail key is in the References header with a comma' do
- let(:email_raw) { fixture_file('emails/reply_without_subaddressing_and_key_inside_references_with_a_comma.eml') }
+ context 'when mail key is in the References header with a comma' do
+ let(:email_raw) do
+ fixture_file('emails/reply_without_subaddressing_and_key_inside_references_with_a_comma.eml')
+ end
it_behaves_like 'an email that contains a mail key', 'References'
end
@@ -228,4 +230,110 @@ RSpec.describe Gitlab::Email::Handler::CreateNoteHandler do
end
end
end
+
+ context 'when issue is closed' do
+ let_it_be(:noteable) { create(:issue, :closed, :confidential, project: project) }
+ let_it_be(:note) { create(:note, noteable: noteable, project: project) }
+
+ let!(:sent_notification) do
+ allow(Gitlab::ServiceDesk).to receive(:enabled?).with(project: project).and_return(true)
+ SentNotification.record_note(note, Users::Internal.support_bot.id)
+ end
+
+ let(:reply_address) { "support+#{sent_notification.reply_key}@example.com" }
+ let(:reopen_note) { noteable.notes.last }
+ let(:email_raw) do
+ <<~EMAIL
+ From: from@example.com
+ To: #{reply_address}
+ Subject: Issue title
+
+ Issue description
+ EMAIL
+ end
+
+ before do
+ stub_incoming_email_setting(enabled: true, address: 'support+%{key}@example.com')
+ end
+
+ it 'does not reopen issue but adds external participants comment' do
+ # Only 1 from received email
+ expect { receiver.execute }.to change { noteable.notes.count }.by(1)
+ expect(noteable).to be_closed
+ end
+
+ context 'when noteable is a commit' do
+ let!(:note) { create(:note_on_commit, project: project) }
+ let!(:noteable) { note.noteable }
+
+ let!(:sent_notification) do
+ allow(Gitlab::ServiceDesk).to receive(:enabled?).with(project: project).and_return(true)
+ SentNotification.record_note(note, Users::Internal.support_bot.id)
+ end
+
+ it 'does not reopen issue but adds external participants comment' do
+ expect { receiver.execute }.to change { noteable.notes.count }.by(1)
+ end
+ end
+
+ context 'when reopen_issue_on_external_participant_note is true' do
+ shared_examples 'an automatically reopened issue' do
+ it 'reopens issue, adds external participants comment and reopen comment' do
+ # 1 from received email and 1 reopen comment
+ expect { receiver.execute }.to change { noteable.notes.count }.by(2)
+ expect(noteable.reset).to be_open
+
+ expect(reopen_note).to be_confidential
+ expect(reopen_note.author).to eq(Users::Internal.support_bot)
+ expect(reopen_note.note).to include(reopen_comment_body)
+ end
+ end
+
+ let!(:settings) do
+ create(:service_desk_setting, project: project, reopen_issue_on_external_participant_note: true)
+ end
+
+ let(:reopen_comment_body) do
+ s_(
+ "ServiceDesk|This issue has been reopened because it received a new comment from an external participant."
+ )
+ end
+
+ it_behaves_like 'an automatically reopened issue'
+
+ it 'does not contain an assignee mention' do
+ receiver.execute
+ expect(reopen_note.note).not_to include("@")
+ end
+
+ context 'when issue is assigned to a user' do
+ before do
+ noteable.update!(assignees: [user])
+ end
+
+ it_behaves_like 'an automatically reopened issue'
+
+ it 'contains an assignee mention' do
+ receiver.execute
+ expect(reopen_note.note).to include(user.to_reference)
+ end
+ end
+
+ context 'when issue is assigned to multiple users' do
+ let_it_be(:another_user) { create(:user) }
+
+ before do
+ noteable.update!(assignees: [user, another_user])
+ end
+
+ it_behaves_like 'an automatically reopened issue'
+
+ it 'contains two assignee mentions' do
+ receiver.execute
+ expect(reopen_note.note).to include(user.to_reference)
+ expect(reopen_note.note).to include(another_user.to_reference)
+ end
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb b/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb
index f13fd0be4cd..9d484198cc0 100644
--- a/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb
+++ b/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb
@@ -19,6 +19,7 @@ RSpec.describe Gitlab::Email::Handler::ServiceDeskHandler, feature_category: :se
let_it_be(:group) { create(:group, :private, :crm_enabled, name: "email") }
+ let(:expected_subject) { "The message subject! @all" }
let(:expected_description) do
"Service desk stuff!\n\n```\na = b\n```\n\n`/label ~label1`\n`/assign @user1`\n`/close`\n![image](uploads/image.png)"
end
@@ -43,7 +44,7 @@ RSpec.describe Gitlab::Email::Handler::ServiceDeskHandler, feature_category: :se
expect(new_issue.author).to eql(Users::Internal.support_bot)
expect(new_issue.confidential?).to be true
expect(new_issue.all_references.all).to be_empty
- expect(new_issue.title).to eq("The message subject! @all")
+ expect(new_issue.title).to eq(expected_subject)
expect(new_issue.description).to eq(expected_description.strip)
expect(new_issue.email&.email_message_id).to eq(message_id)
end
@@ -115,6 +116,40 @@ RSpec.describe Gitlab::Email::Handler::ServiceDeskHandler, feature_category: :se
it_behaves_like 'a new issue request'
+ context 'when more than the defined limit of participants are in Cc header' do
+ before do
+ stub_const("IssueEmailParticipants::CreateService::MAX_NUMBER_OF_RECORDS", 6)
+ end
+
+ let(:cc_addresses) { Array.new(6) { |i| "user#{i}@example.com" }.join(', ') }
+ let(:author_email) { 'from@example.com' }
+ let(:expected_subject) { "Issue title" }
+ let(:expected_description) do
+ <<~DESC
+ Issue description
+
+ ![image](uploads/image.png)
+ DESC
+ end
+
+ let(:email_raw) do
+ <<~EMAIL
+ From: #{author_email}
+ To: #{to_address}
+ Cc: #{cc_addresses}
+ Message-ID: <#{message_id}>
+ Subject: #{expected_subject}
+
+ Issue description
+ EMAIL
+ end
+
+ # Author email plus 5 from Cc
+ let(:issue_email_participants_count) { 6 }
+
+ it_behaves_like 'a new issue request'
+ end
+
context 'when no CC header is present' do
let(:email_raw) do
<<~EMAIL
@@ -462,7 +497,7 @@ RSpec.describe Gitlab::Email::Handler::ServiceDeskHandler, feature_category: :se
end
end
- shared_examples 'a handler that does not verify the custom email' do |error_identifier|
+ shared_examples 'a handler that does not verify the custom email' do
it 'does not verify the custom email address' do
# project has no owner, so only notify verification triggerer
expect(Notify).to receive(:service_desk_verification_result_email).once
@@ -477,20 +512,32 @@ RSpec.describe Gitlab::Email::Handler::ServiceDeskHandler, feature_category: :se
end
end
- shared_examples 'a handler that verifies Service Desk custom email verification emails' do
+ context 'when using incoming_email address' do
+ before do
+ stub_incoming_email_setting(enabled: true, address: 'support+%{key}@example.com')
+ end
+
it_behaves_like 'an early exiting handler'
context 'with valid service desk settings' do
let_it_be(:user) { create(:user) }
+ let_it_be(:credentials) { create(:service_desk_custom_email_credential, project: project) }
- let!(:settings) { create(:service_desk_setting, project: project, custom_email: 'custom-support-email@example.com') }
- let!(:verification) { create(:service_desk_custom_email_verification, project: project, token: 'ZROT4ZZXA-Y6', triggerer: user) }
+ let_it_be_with_reload(:settings) do
+ create(:service_desk_setting, project: project, custom_email: 'custom-support-email@example.com')
+ end
+
+ let_it_be_with_reload(:verification) do
+ create(:service_desk_custom_email_verification, project: project, token: 'ZROT4ZZXA-Y6', triggerer: user)
+ end
let(:message_delivery) { instance_double(ActionMailer::MessageDelivery) }
- before do
+ before_all do
project.add_maintainer(user)
+ end
+ before do
allow(message_delivery).to receive(:deliver_later)
allow(Notify).to receive(:service_desk_verification_result_email).and_return(message_delivery)
end
@@ -521,7 +568,9 @@ RSpec.describe Gitlab::Email::Handler::ServiceDeskHandler, feature_category: :se
verification.update!(token: 'XXXXXXXXXXXX')
end
- it_behaves_like 'a handler that does not verify the custom email', 'incorrect_token'
+ it_behaves_like 'a handler that does not verify the custom email' do
+ let(:error_identifier) { 'incorrect_token' }
+ end
end
context 'and verification email ingested too late' do
@@ -529,7 +578,9 @@ RSpec.describe Gitlab::Email::Handler::ServiceDeskHandler, feature_category: :se
verification.update!(triggered_at: ServiceDesk::CustomEmailVerification::TIMEFRAME.ago)
end
- it_behaves_like 'a handler that does not verify the custom email', 'mail_not_received_within_timeframe'
+ it_behaves_like 'a handler that does not verify the custom email' do
+ let(:error_identifier) { 'mail_not_received_within_timeframe' }
+ end
end
context 'and from header differs from custom email address' do
@@ -537,29 +588,13 @@ RSpec.describe Gitlab::Email::Handler::ServiceDeskHandler, feature_category: :se
settings.update!(custom_email: 'different-from@example.com')
end
- it_behaves_like 'a handler that does not verify the custom email', 'incorrect_from'
- end
- end
-
- context 'when service_desk_custom_email feature flag is disabled' do
- before do
- stub_feature_flags(service_desk_custom_email: false)
- end
-
- it 'does not trigger the verification process and adds an issue instead' do
- expect { receiver.execute }.to change { Issue.count }.by(1)
+ it_behaves_like 'a handler that does not verify the custom email' do
+ let(:error_identifier) { 'incorrect_from' }
+ end
end
end
end
- context 'when using incoming_email address' do
- before do
- stub_incoming_email_setting(enabled: true, address: 'support+%{key}@example.com')
- end
-
- it_behaves_like 'a handler that verifies Service Desk custom email verification emails'
- end
-
context 'when using service_desk_email address' do
let(:receiver) { Gitlab::Email::ServiceDeskReceiver.new(email_raw) }
@@ -567,7 +602,35 @@ RSpec.describe Gitlab::Email::Handler::ServiceDeskHandler, feature_category: :se
stub_service_desk_email_setting(enabled: true, address: 'support+%{key}@example.com')
end
- it_behaves_like 'a handler that verifies Service Desk custom email verification emails'
+ it_behaves_like 'an early exiting handler'
+
+ context 'with valid service desk settings' do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:credentials) { create(:service_desk_custom_email_credential, project: project) }
+
+ let_it_be_with_reload(:settings) do
+ create(:service_desk_setting, project: project, custom_email: 'custom-support-email@example.com')
+ end
+
+ let_it_be_with_reload(:verification) do
+ create(:service_desk_custom_email_verification, project: project, token: 'ZROT4ZZXA-Y6', triggerer: user)
+ end
+
+ let(:message_delivery) { instance_double(ActionMailer::MessageDelivery) }
+
+ before_all do
+ project.add_maintainer(user)
+ end
+
+ before do
+ allow(message_delivery).to receive(:deliver_later)
+ allow(Notify).to receive(:service_desk_verification_result_email).and_return(message_delivery)
+ end
+
+ it_behaves_like 'a handler that does not verify the custom email' do
+ let(:error_identifier) { 'incorrect_forwarding_target' }
+ end
+ end
end
end
end
diff --git a/spec/lib/gitlab/email/receiver_spec.rb b/spec/lib/gitlab/email/receiver_spec.rb
index c86a83092a4..aff5928c3da 100644
--- a/spec/lib/gitlab/email/receiver_spec.rb
+++ b/spec/lib/gitlab/email/receiver_spec.rb
@@ -2,10 +2,10 @@
require 'spec_helper'
-RSpec.describe Gitlab::Email::Receiver do
+RSpec.describe Gitlab::Email::Receiver, feature_category: :shared do
include_context 'email shared context'
- let_it_be(:project) { create(:project) }
+ let_it_be_with_reload(:project) { create(:project) }
let(:metric_transaction) { instance_double(Gitlab::Metrics::WebTransaction) }
shared_examples 'successful receive' do
@@ -130,6 +130,63 @@ RSpec.describe Gitlab::Email::Receiver do
it_behaves_like 'successful receive'
end
+
+ context 'when Service Desk custom email reply address in To header and no References header exists' do
+ let_it_be_with_refind(:setting) { create(:service_desk_setting, project: project, add_external_participants_from_cc: true) }
+
+ let!(:credential) { create(:service_desk_custom_email_credential, project: project) }
+ let!(:verification) { create(:service_desk_custom_email_verification, :finished, project: project) }
+ let(:incoming_email) { "incoming+gitlabhq/gitlabhq+auth_token@appmail.example.com" }
+ let(:reply_key) { "5de1a83a6fc3c9fe34d756c7f484159e" }
+ let(:custom_email_reply) { "support+#{reply_key}@example.com" }
+
+ context 'when custom email is enabled' do
+ let(:email_raw) do
+ <<~EMAIL
+ Delivered-To: #{incoming_email}
+ From: jake@example.com
+ To: #{custom_email_reply}
+ Subject: Reply titile
+
+ Reply body
+ EMAIL
+ end
+
+ let(:meta_key) { :to_address }
+ let(:meta_value) { [custom_email_reply] }
+
+ before do
+ project.reset
+ setting.update!(custom_email: 'support@example.com', custom_email_enabled: true)
+ end
+
+ it_behaves_like 'successful receive' do
+ let(:mail_key) { reply_key }
+ end
+
+ # Email forwarding using a transport rule in Microsoft 365 adds the forwarding
+ # target to the `To` header. We have to select te custom email reply address
+ # before the incoming address (forwarding target)
+ # See https://gitlab.com/gitlab-org/gitlab/-/issues/426269#note_1629170865 for email structure
+ context 'when also Service Desk incoming address in To header' do
+ let(:email_raw) do
+ <<~EMAIL
+ From: jake@example.com
+ To: #{custom_email_reply}, #{incoming_email}
+ Subject: Reply titile
+
+ Reply body
+ EMAIL
+ end
+
+ let(:meta_value) { [custom_email_reply, incoming_email] }
+
+ it_behaves_like 'successful receive' do
+ let(:mail_key) { reply_key }
+ end
+ end
+ end
+ end
end
context 'when we cannot find a capable handler' do
diff --git a/spec/lib/gitlab/email/service_desk/custom_email_spec.rb b/spec/lib/gitlab/email/service_desk/custom_email_spec.rb
index bba1ca1c8be..bdf31567251 100644
--- a/spec/lib/gitlab/email/service_desk/custom_email_spec.rb
+++ b/spec/lib/gitlab/email/service_desk/custom_email_spec.rb
@@ -6,10 +6,9 @@ RSpec.describe Gitlab::Email::ServiceDesk::CustomEmail, feature_category: :servi
let(:reply_key) { 'b7721fc7e8419911a8bea145236a0519' }
let(:custom_email) { 'support@example.com' }
let(:email_with_reply_key) { 'support+b7721fc7e8419911a8bea145236a0519@example.com' }
+ let_it_be(:project) { create(:project) }
describe '.reply_address' do
- let_it_be(:project) { create(:project) }
-
subject(:reply_address) { described_class.reply_address(nil, nil) }
it { is_expected.to be nil }
@@ -34,4 +33,38 @@ RSpec.describe Gitlab::Email::ServiceDesk::CustomEmail, feature_category: :servi
end
end
end
+
+ describe '.key_from_reply_address' do
+ let(:email) { email_with_reply_key }
+
+ subject(:reply_address) { described_class.key_from_reply_address(email) }
+
+ it { is_expected.to be nil }
+
+ context 'with service_desk_setting' do
+ let_it_be_with_refind(:setting) do
+ create(:service_desk_setting, project: project, add_external_participants_from_cc: true)
+ end
+
+ it { is_expected.to be nil }
+
+ context 'with custom email' do
+ let!(:credential) { create(:service_desk_custom_email_credential, project: project) }
+ let!(:verification) { create(:service_desk_custom_email_verification, :finished, project: project) }
+
+ before do
+ project.reset
+ setting.update!(custom_email: 'support@example.com', custom_email_enabled: true)
+ end
+
+ it { is_expected.to eq reply_key }
+ end
+ end
+
+ context 'without reply key' do
+ let(:email) { custom_email }
+
+ it { is_expected.to be nil }
+ end
+ end
end
diff --git a/spec/lib/gitlab/error_tracking/context_payload_generator_spec.rb b/spec/lib/gitlab/error_tracking/context_payload_generator_spec.rb
index 38745fe0cde..932c1b2fb4c 100644
--- a/spec/lib/gitlab/error_tracking/context_payload_generator_spec.rb
+++ b/spec/lib/gitlab/error_tracking/context_payload_generator_spec.rb
@@ -64,8 +64,11 @@ RSpec.describe Gitlab::ErrorTracking::ContextPayloadGenerator do
end
context 'when the GITLAB_SENTRY_EXTRA_TAGS env is a JSON hash' do
- it 'includes those tags in all events' do
+ before do
stub_env('GITLAB_SENTRY_EXTRA_TAGS', { foo: 'bar', baz: 'quux' }.to_json)
+ end
+
+ it 'includes those tags in all events' do
payload = {}
Gitlab::ApplicationContext.with_context(feature_category: 'feature_a') do
@@ -87,6 +90,26 @@ RSpec.describe Gitlab::ErrorTracking::ContextPayloadGenerator do
generator.generate(exception, extra)
end
+
+ context 'with generated tags' do
+ it 'includes all tags' do
+ payload = {}
+
+ Gitlab::ApplicationContext.with_context(feature_category: 'feature_a') do
+ payload = generator.generate(exception, extra, { 'mytag' => '123' })
+ end
+
+ expect(payload[:tags]).to eql(
+ correlation_id: 'cid',
+ locale: 'en',
+ program: 'test',
+ feature_category: 'feature_a',
+ 'foo' => 'bar',
+ 'baz' => 'quux',
+ 'mytag' => '123'
+ )
+ end
+ end
end
context 'when the GITLAB_SENTRY_EXTRA_TAGS env is not a JSON hash' do
diff --git a/spec/lib/gitlab/error_tracking_spec.rb b/spec/lib/gitlab/error_tracking_spec.rb
index 79016335a40..c9b2e21d934 100644
--- a/spec/lib/gitlab/error_tracking_spec.rb
+++ b/spec/lib/gitlab/error_tracking_spec.rb
@@ -97,6 +97,27 @@ RSpec.describe Gitlab::ErrorTracking, feature_category: :shared do
)
end.to raise_error(RuntimeError, /boom/)
end
+
+ context 'with tags' do
+ let(:tags) { { 'mytag' => 2 } }
+
+ before do
+ sentry_payload[:tags].merge!(tags)
+ end
+
+ it 'includes additional tags' do
+ expect(Raven).to receive(:capture_exception).with(exception, sentry_payload)
+ expect(Sentry).to receive(:capture_exception).with(exception, sentry_payload)
+
+ expect do
+ described_class.track_and_raise_for_dev_exception(
+ exception,
+ { issue_url: issue_url, some_other_info: 'info' },
+ tags
+ )
+ end.to raise_error(RuntimeError, /boom/)
+ end
+ end
end
context 'when exceptions for dev should not be raised' do
@@ -181,8 +202,10 @@ RSpec.describe Gitlab::ErrorTracking, feature_category: :shared do
end
describe '.track_exception' do
+ let(:tags) { {} }
+
subject(:track_exception) do
- described_class.track_exception(exception, extra)
+ described_class.track_exception(exception, extra, tags)
end
before do
@@ -207,6 +230,18 @@ RSpec.describe Gitlab::ErrorTracking, feature_category: :shared do
expect(Gitlab::ErrorTracking::Logger).to have_received(:error).with(logger_payload)
end
+ context 'with tags' do
+ let(:tags) { { 'mytag' => 2 } }
+
+ it 'includes the tags' do
+ track_exception
+
+ expect(Gitlab::ErrorTracking::Logger).to have_received(:error).with(
+ hash_including({ 'tags.mytag' => 2 })
+ )
+ end
+ end
+
context 'with filterable parameters' do
let(:extra) { { test: 1, my_token: 'test' } }
diff --git a/spec/lib/gitlab/event_store/event_spec.rb b/spec/lib/gitlab/event_store/event_spec.rb
index 97f6870a5ec..edcb0e5dd1a 100644
--- a/spec/lib/gitlab/event_store/event_spec.rb
+++ b/spec/lib/gitlab/event_store/event_spec.rb
@@ -1,8 +1,10 @@
# frozen_string_literal: true
-require 'spec_helper'
+require 'fast_spec_helper'
+require 'json_schemer'
+require 'oj'
-RSpec.describe Gitlab::EventStore::Event do
+RSpec.describe Gitlab::EventStore::Event, feature_category: :shared do
let(:event_class) { stub_const('TestEvent', Class.new(described_class)) }
let(:event) { event_class.new(data: data) }
let(:data) { { project_id: 123, project_path: 'org/the-project' } }
@@ -42,6 +44,14 @@ RSpec.describe Gitlab::EventStore::Event do
it 'initializes the event correctly' do
expect(event.data).to eq(data)
end
+
+ it 'validates schema' do
+ expect(event_class.json_schema_valid).to eq(nil)
+
+ event
+
+ expect(event_class.json_schema_valid).to eq(true)
+ end
end
context 'when some properties are missing' do
@@ -59,6 +69,31 @@ RSpec.describe Gitlab::EventStore::Event do
expect { event }.to raise_error(Gitlab::EventStore::InvalidEvent, 'Event data must be a Hash')
end
end
+
+ context 'when schema is invalid' do
+ before do
+ event_class.class_eval do
+ def schema
+ {
+ 'required' => ['project_id'],
+ 'type' => 'object',
+ 'properties' => {
+ 'project_id' => { 'type' => 'int' },
+ 'project_path' => { 'type' => 'string ' }
+ }
+ }
+ end
+ end
+ end
+
+ it 'raises an error' do
+ expect(event_class.json_schema_valid).to eq(nil)
+
+ expect { event }.to raise_error(Gitlab::EventStore::InvalidEvent, 'Schema for event TestEvent is invalid')
+
+ expect(event_class.json_schema_valid).to eq(false)
+ end
+ end
end
end
end
diff --git a/spec/lib/gitlab/event_store/store_spec.rb b/spec/lib/gitlab/event_store/store_spec.rb
index 04d0706c130..e747027db98 100644
--- a/spec/lib/gitlab/event_store/store_spec.rb
+++ b/spec/lib/gitlab/event_store/store_spec.rb
@@ -263,12 +263,59 @@ RSpec.describe Gitlab::EventStore::Store, feature_category: :shared do
end
end
+ describe '#publish_group' do
+ let(:event1) { event_klass.new(data: { name: 'Bob', id: 123 }) }
+ let(:event2) { event_klass.new(data: { name: 'Alice', id: 456 }) }
+ let(:event3) { event_klass.new(data: { name: 'Eva', id: 789 }) }
+
+ let(:group_size) { 3 }
+ let(:events) { [event1, event2, event3] }
+ let(:serialized_data) { events.map(&:data).map(&:deep_stringify_keys) }
+
+ let(:store) do
+ described_class.new do |s|
+ s.subscribe worker, to: event_klass, group_size: group_size
+ end
+ end
+
+ subject { store.publish_group(events) }
+
+ context 'with valid events' do
+ it 'calls consume_events of subscription' do
+ expect(store.subscriptions[event_klass].first).to receive(:consume_events).with(events)
+
+ subject
+ end
+ end
+
+ context 'when there is invalid event' do
+ let(:events) { [event1, invalid_event] }
+
+ context 'when event is invalid' do
+ let(:invalid_event) { stub_const('TestEvent', {}) }
+
+ it 'raises InvalidEvent error' do
+ expect { subject }.to raise_error(Gitlab::EventStore::InvalidEvent)
+ end
+ end
+
+ context 'when one of the events is a different event' do
+ let(:invalid_event) { stub_const('DifferentEvent', Class.new(Gitlab::EventStore::Event)) }
+
+ it 'raises InvalidEvent error' do
+ expect { subject }.to raise_error(Gitlab::EventStore::InvalidEvent)
+ end
+ end
+ end
+ end
+
describe 'subscriber' do
let(:data) { { name: 'Bob', id: 123 } }
+ let(:event_data) { data }
let(:event_name) { event.class.name }
let(:worker_instance) { worker.new }
- subject { worker_instance.perform(event_name, data) }
+ subject { worker_instance.perform(event_name, event_data) }
it 'is a Sidekiq worker' do
expect(worker_instance).to be_a(ApplicationWorker)
@@ -278,7 +325,7 @@ RSpec.describe Gitlab::EventStore::Store, feature_category: :shared do
expect(worker_instance).to receive(:handle_event).with(instance_of(event.class))
expect_any_instance_of(event.class) do |event|
- expect(event).to receive(:data).and_return(data)
+ expect(event).to receive(:data).and_return(event_data)
end
subject
@@ -299,5 +346,24 @@ RSpec.describe Gitlab::EventStore::Store, feature_category: :shared do
expect { subject }.to raise_error(NotImplementedError)
end
end
+
+ context 'when there are multiple events' do
+ let(:event_data) { [{ name: 'Bob', id: 123 }, { name: 'Alice', id: 456 }] }
+
+ let(:first_event) { event_klass.new(data: event_data.first) }
+ let(:second_event) { event_klass.new(data: event_data.last) }
+
+ before do
+ allow(worker_instance).to receive(:construct_event).with(event_klass, event_data.first).and_return(first_event)
+ allow(worker_instance).to receive(:construct_event).with(event_klass, event_data.last).and_return(second_event)
+ end
+
+ it 'calls handle_event multiple times' do
+ expect(worker_instance).to receive(:handle_event).once.with(first_event)
+ expect(worker_instance).to receive(:handle_event).once.with(second_event)
+
+ subject
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/event_store/subscription_spec.rb b/spec/lib/gitlab/event_store/subscription_spec.rb
new file mode 100644
index 00000000000..2a87f48be10
--- /dev/null
+++ b/spec/lib/gitlab/event_store/subscription_spec.rb
@@ -0,0 +1,142 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::EventStore::Subscription, feature_category: :shared do
+ let(:worker) do
+ stub_const('EventSubscriber', Class.new).tap do |klass|
+ klass.class_eval do
+ include Gitlab::EventStore::Subscriber
+
+ def handle_event(event)
+ event.data
+ end
+ end
+ end
+ end
+
+ let(:event_klass) { stub_const('TestEvent', Class.new(Gitlab::EventStore::Event)) }
+ let(:event) { event_klass.new(data: data) }
+
+ let(:delay) { nil }
+ let(:condition) { nil }
+ let(:group_size) { nil }
+
+ subject(:subscription) { described_class.new(worker, condition, delay, group_size) }
+
+ before do
+ event_klass.class_eval do
+ def schema
+ {
+ 'required' => %w[name id],
+ 'type' => 'object',
+ 'properties' => {
+ 'name' => { 'type' => 'string' },
+ 'id' => { 'type' => 'integer' }
+ }
+ }
+ end
+ end
+ end
+
+ describe '#consume_events' do
+ let(:event1) { event_klass.new(data: { name: 'Bob', id: 123 }) }
+ let(:event2) { event_klass.new(data: { name: 'Alice', id: 456 }) }
+ let(:event3) { event_klass.new(data: { name: 'Eva', id: 789 }) }
+
+ let(:group_size) { 3 }
+ let(:events) { [event1, event2, event3] }
+ let(:serialized_data) { events.map(&:data).map(&:deep_stringify_keys) }
+
+ subject(:consume_events) { subscription.consume_events(events) }
+
+ context 'with invalid events' do
+ let(:events) { [event1, invalid_event] }
+
+ context 'when event is invalid' do
+ let(:invalid_event) { stub_const('TestEvent', Class.new { attr_reader :data }).new }
+
+ it 'raises InvalidEvent error' do
+ expect { consume_events }.to raise_error(Gitlab::EventStore::InvalidEvent)
+ end
+ end
+
+ context 'when one of the events is a different event' do
+ let(:invalid_event_klass) { stub_const('DifferentEvent', Class.new(Gitlab::EventStore::Event)) }
+ let(:invalid_event) { invalid_event_klass.new(data: {}) }
+
+ before do
+ invalid_event_klass.class_eval do
+ def schema
+ {
+ 'type' => 'object',
+ 'properties' => {}
+ }
+ end
+ end
+ end
+
+ it 'raises InvalidEvent error' do
+ expect { consume_events }.to raise_error(Gitlab::EventStore::InvalidEvent)
+ end
+ end
+ end
+
+ context 'when grouped events size is more than batch scheduling size' do
+ let(:group_size) { 2 }
+
+ before do
+ stub_const("#{described_class}::SCHEDULING_BATCH_SIZE", 1)
+ end
+
+ it 'dispatches the events to the worker with batch parameters' do
+ expect(worker).to receive(:bulk_perform_in).with(
+ 1.second,
+ [['TestEvent', serialized_data.take(2)], ['TestEvent', serialized_data.drop(2)]],
+ batch_size: 1,
+ batch_delay: 10.seconds
+ )
+
+ consume_events
+ end
+
+ context 'with delayed dispatching of event' do
+ let(:delay) { 1.minute }
+
+ it 'dispatches the events to the worker with batch parameters and delay' do
+ expect(worker).to receive(:bulk_perform_in).with(
+ 1.minute,
+ [['TestEvent', serialized_data.take(2)], ['TestEvent', serialized_data.drop(2)]],
+ batch_size: 1,
+ batch_delay: 10.seconds
+ )
+
+ consume_events
+ end
+ end
+ end
+
+ context 'when subscription has grouped dispatching of events' do
+ let(:group_size) { 2 }
+
+ it 'dispatches the events to the worker in group' do
+ expect(worker).to receive(:bulk_perform_async).once.with([
+ ['TestEvent', serialized_data.take(2)],
+ ['TestEvent', serialized_data.drop(2)]
+ ])
+
+ consume_events
+ end
+ end
+
+ context 'when subscription has delayed dispatching of event' do
+ let(:delay) { 1.minute }
+
+ it 'dispatches the events to the worker after some time' do
+ expect(worker).to receive(:bulk_perform_in).with(1.minute, [['TestEvent', serialized_data]])
+
+ consume_events
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/exclusive_lease_spec.rb b/spec/lib/gitlab/exclusive_lease_spec.rb
index 80154c729e3..a02e2625c5e 100644
--- a/spec/lib/gitlab/exclusive_lease_spec.rb
+++ b/spec/lib/gitlab/exclusive_lease_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Gitlab::ExclusiveLease, :request_store,
- :clean_gitlab_redis_cluster_shared_state, feature_category: :shared do
+ :clean_gitlab_redis_shared_state, feature_category: :shared do
let(:unique_key) { SecureRandom.hex(10) }
describe '#try_obtain' do
diff --git a/spec/lib/gitlab/experiment/rollout/feature_spec.rb b/spec/lib/gitlab/experiment/rollout/feature_spec.rb
index 6d01b7a175f..9d602083ad6 100644
--- a/spec/lib/gitlab/experiment/rollout/feature_spec.rb
+++ b/spec/lib/gitlab/experiment/rollout/feature_spec.rb
@@ -3,50 +3,25 @@
require 'spec_helper'
RSpec.describe Gitlab::Experiment::Rollout::Feature, :experiment, feature_category: :acquisition do
- subject { described_class.new(subject_experiment) }
+ subject(:experiment_instance) { described_class.new(subject_experiment) }
let(:subject_experiment) { experiment('namespaced/stub') }
- describe "#enabled?", :saas do
+ describe "#enabled?" do
before do
stub_feature_flags(gitlab_experiment: true)
- allow(subject).to receive(:feature_flag_defined?).and_return(true)
- allow(subject).to receive(:feature_flag_instance).and_return(double(state: :on))
+ allow(experiment_instance).to receive(:feature_flag_defined?).and_return(true)
+ allow(experiment_instance)
+ .to receive(:feature_flag_instance).and_return(instance_double('Flipper::Feature', state: :on))
end
- it "is enabled when all criteria are met" do
- expect(subject).to be_enabled
- end
-
- it "isn't enabled if the feature definition doesn't exist" do
- expect(subject).to receive(:feature_flag_defined?).and_return(false)
-
- expect(subject).not_to be_enabled
- end
-
- it "isn't enabled if we're not in dev or dotcom environments" do
- expect(Gitlab).to receive(:com?).and_return(false)
-
- expect(subject).not_to be_enabled
- end
-
- it "isn't enabled if the feature flag state is :off" do
- expect(subject).to receive(:feature_flag_instance).and_return(double(state: :off))
-
- expect(subject).not_to be_enabled
- end
-
- it "isn't enabled if the gitlab_experiment feature flag is false" do
- stub_feature_flags(gitlab_experiment: false)
-
- expect(subject).not_to be_enabled
- end
+ it { is_expected.not_to be_enabled }
end
describe "#execute_assignment" do
let(:variants) do
->(e) do
- # rubocop:disable Lint/EmptyBlock
+ # rubocop:disable Lint/EmptyBlock -- Specific for test
e.control {}
e.variant(:red) {}
e.variant(:blue) {}
@@ -63,26 +38,26 @@ RSpec.describe Gitlab::Experiment::Rollout::Feature, :experiment, feature_catego
it "uses the default value as specified in the yaml" do
expect(Feature).to receive(:enabled?).with(
'namespaced_stub',
- subject,
+ experiment_instance,
type: :experiment
).and_return(false)
- expect(subject.execute_assignment).to be_nil
+ expect(experiment_instance.execute_assignment).to be_nil
end
it "returns an assigned name" do
- expect(subject.execute_assignment).to eq(:blue)
+ expect(experiment_instance.execute_assignment).to eq(:blue)
end
context "when there are no behaviors" do
- let(:variants) { ->(e) { e.control {} } } # rubocop:disable Lint/EmptyBlock
+ let(:variants) { ->(e) { e.control {} } } # rubocop:disable Lint/EmptyBlock -- Specific for test
it "does not raise an error" do
- expect { subject.execute_assignment }.not_to raise_error
+ expect { experiment_instance.execute_assignment }.not_to raise_error
end
end
- context "for even rollout to non-control", :saas do
+ context "for even rollout to non-control" do
let(:counts) { Hash.new(0) }
let(:subject_experiment) { experiment('namespaced/stub') }
@@ -91,8 +66,8 @@ RSpec.describe Gitlab::Experiment::Rollout::Feature, :experiment, feature_catego
allow(instance).to receive(:enabled?).and_return(true)
end
- subject_experiment.variant(:variant1) {} # rubocop:disable Lint/EmptyBlock
- subject_experiment.variant(:variant2) {} # rubocop:disable Lint/EmptyBlock
+ subject_experiment.variant(:variant1) {} # rubocop:disable Lint/EmptyBlock -- Specific for test
+ subject_experiment.variant(:variant2) {} # rubocop:disable Lint/EmptyBlock -- Specific for test
end
it "rolls out relatively evenly to 2 behaviors" do
@@ -102,7 +77,7 @@ RSpec.describe Gitlab::Experiment::Rollout::Feature, :experiment, feature_catego
end
it "rolls out relatively evenly to 3 behaviors" do
- subject_experiment.variant(:variant3) {} # rubocop:disable Lint/EmptyBlock
+ subject_experiment.variant(:variant3) {} # rubocop:disable Lint/EmptyBlock -- Specific for test
100.times { |i| run_cycle(subject_experiment, value: i) }
@@ -115,7 +90,7 @@ RSpec.describe Gitlab::Experiment::Rollout::Feature, :experiment, feature_catego
end
it "rolls out with the expected distribution" do
- subject_experiment.variant(:variant3) {} # rubocop:disable Lint/EmptyBlock
+ subject_experiment.variant(:variant3) {} # rubocop:disable Lint/EmptyBlock -- Specific for test
100.times { |i| run_cycle(subject_experiment, value: i) }
@@ -152,14 +127,14 @@ RSpec.describe Gitlab::Experiment::Rollout::Feature, :experiment, feature_catego
describe "#flipper_id" do
it "returns the expected flipper id if the experiment doesn't provide one" do
- subject.instance_variable_set(:@experiment, double(id: '__id__'))
- expect(subject.flipper_id).to eq('Experiment;__id__')
+ experiment_instance.instance_variable_set(:@experiment, instance_double('Gitlab::Experiment', id: '__id__'))
+ expect(experiment_instance.flipper_id).to eq('Experiment;__id__')
end
it "lets the experiment provide a flipper id so it can override the default" do
allow(subject_experiment).to receive(:flipper_id).and_return('_my_overridden_id_')
- expect(subject.flipper_id).to eq('_my_overridden_id_')
+ expect(experiment_instance.flipper_id).to eq('_my_overridden_id_')
end
end
end
diff --git a/spec/lib/gitlab/file_detector_spec.rb b/spec/lib/gitlab/file_detector_spec.rb
index 55bb1804d86..eabc92b794a 100644
--- a/spec/lib/gitlab/file_detector_spec.rb
+++ b/spec/lib/gitlab/file_detector_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::FileDetector do
+RSpec.describe Gitlab::FileDetector, feature_category: :global_search do
describe '.types_in_paths' do
it 'returns the file types for the given paths' do
expect(described_class.types_in_paths(%w[README.md CHANGELOG VERSION VERSION]))
@@ -116,5 +116,9 @@ RSpec.describe Gitlab::FileDetector do
expect(described_class.type_of(type_name)).to be_nil
end
end
+
+ it 'returns the type of a Jenkins config file' do
+ expect(described_class.type_of('jenkinsfile')).to eq(:jenkinsfile)
+ end
end
end
diff --git a/spec/lib/gitlab/git/commit_spec.rb b/spec/lib/gitlab/git/commit_spec.rb
index d8d62ac9670..6c8634281ae 100644
--- a/spec/lib/gitlab/git/commit_spec.rb
+++ b/spec/lib/gitlab/git/commit_spec.rb
@@ -8,7 +8,6 @@ RSpec.describe Gitlab::Git::Commit, feature_category: :source_code_management do
describe "Commit info from gitaly commit" do
let(:subject) { (+"My commit").force_encoding('ASCII-8BIT') }
- let(:body) { subject + (+"My body").force_encoding('ASCII-8BIT') }
let(:body_size) { body.length }
let(:gitaly_commit) { build(:gitaly_commit, subject: subject, body: body, body_size: body_size, tree_id: tree_id) }
let(:id) { gitaly_commit.id }
@@ -17,6 +16,17 @@ RSpec.describe Gitlab::Git::Commit, feature_category: :source_code_management do
let(:author) { gitaly_commit.author }
let(:commit) { described_class.new(repository, gitaly_commit) }
+ let(:body) do
+ body = +<<~BODY
+ Bleep bloop.
+
+ Cc: John Doe <johndoe@gitlab.com>
+ Cc: Jane Doe <janedoe@gitlab.com>
+ BODY
+
+ [subject, "\n", body].join.force_encoding("ASCII-8BIT")
+ end
+
it { expect(commit.short_id).to eq(id[0..10]) }
it { expect(commit.id).to eq(id) }
it { expect(commit.sha).to eq(id) }
@@ -29,6 +39,18 @@ RSpec.describe Gitlab::Git::Commit, feature_category: :source_code_management do
it { expect(commit.parent_ids).to eq(gitaly_commit.parent_ids) }
it { expect(commit.tree_id).to eq(tree_id) }
+ it "parses the commit trailers" do
+ expect(commit.trailers).to eq(
+ { "Cc" => "Jane Doe <janedoe@gitlab.com>" }
+ )
+ end
+
+ it "parses the extended commit trailers" do
+ expect(commit.extended_trailers).to eq(
+ { "Cc" => ["John Doe <johndoe@gitlab.com>", "Jane Doe <janedoe@gitlab.com>"] }
+ )
+ end
+
context 'non-UTC dates' do
let(:seconds) { Time.now.to_i }
@@ -773,6 +795,7 @@ RSpec.describe Gitlab::Git::Commit, feature_category: :source_code_management do
message: "tree css fixes",
parent_ids: ["874797c3a73b60d2187ed6e2fcabd289ff75171e"],
trailers: {},
+ extended_trailers: {},
referenced_by: []
}
end
diff --git a/spec/lib/gitlab/git/compare_spec.rb b/spec/lib/gitlab/git/compare_spec.rb
index 81b5aa94656..5ee5e18d5af 100644
--- a/spec/lib/gitlab/git/compare_spec.rb
+++ b/spec/lib/gitlab/git/compare_spec.rb
@@ -2,11 +2,14 @@
require "spec_helper"
-RSpec.describe Gitlab::Git::Compare do
- let_it_be(:repository) { create(:project, :repository).repository.raw }
+RSpec.describe Gitlab::Git::Compare, feature_category: :source_code_management do
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:repository) { project.repository.raw }
- let(:compare) { described_class.new(repository, SeedRepo::BigCommit::ID, SeedRepo::Commit::ID, straight: false) }
- let(:compare_straight) { described_class.new(repository, SeedRepo::BigCommit::ID, SeedRepo::Commit::ID, straight: true) }
+ let(:compare) { described_class.new(repository, base, head, straight: false) }
+ let(:compare_straight) { described_class.new(repository, base, head, straight: true) }
+ let(:base) { SeedRepo::BigCommit::ID }
+ let(:head) { SeedRepo::Commit::ID }
describe '#commits' do
subject do
@@ -109,4 +112,103 @@ RSpec.describe Gitlab::Git::Compare do
it { is_expected.to include('files/ruby/popen.rb') }
it { is_expected.not_to include('LICENSE') }
end
+
+ describe '#generated_files' do
+ subject(:generated_files) { compare.generated_files }
+
+ context 'with a detected generated file' do
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:repository) { project.repository.raw }
+ let_it_be(:branch) { 'generated-file-test' }
+ let_it_be(:base) do
+ project
+ .repository
+ .create_file(
+ project.creator,
+ '.gitattributes',
+ "*.txt gitlab-generated\n",
+ branch_name: branch,
+ message: 'Add .gitattributes file')
+ end
+
+ let_it_be(:head) do
+ project
+ .repository
+ .create_file(
+ project.creator,
+ 'file1.rb',
+ "some content\n",
+ branch_name: branch,
+ message: 'Add file1')
+ project
+ .repository
+ .create_file(
+ project.creator,
+ 'file1.txt',
+ "some content\n",
+ branch_name: branch,
+ message: 'Add file2')
+ end
+
+ it 'sets the diff as generated' do
+ expect(generated_files).to eq Set.new(['file1.txt'])
+ end
+
+ context 'when base is nil' do
+ let(:base) { nil }
+
+ it 'does not try to detect generated files' do
+ expect(repository).not_to receive(:detect_generated_files)
+ expect(repository).not_to receive(:find_changed_paths)
+ expect(generated_files).to eq Set.new
+ end
+ end
+
+ context 'when head is nil' do
+ let(:head) { nil }
+
+ it 'does not try to detect generated files' do
+ expect(repository).not_to receive(:detect_generated_files)
+ expect(repository).not_to receive(:find_changed_paths)
+ expect(generated_files).to eq Set.new
+ end
+ end
+ end
+
+ context 'with updated .gitattributes in the HEAD' do
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:repository) { project.repository.raw }
+ let_it_be(:branch) { 'generated-file-test' }
+ let_it_be(:head) do
+ project
+ .repository
+ .create_file(
+ project.creator,
+ '.gitattributes',
+ "*.txt gitlab-generated\n",
+ branch_name: branch,
+ message: 'Add .gitattributes file')
+ project
+ .repository
+ .create_file(
+ project.creator,
+ 'file1.rb',
+ "some content\n",
+ branch_name: branch,
+ message: 'Add file1')
+ project
+ .repository
+ .create_file(
+ project.creator,
+ 'file1.txt',
+ "some content\n",
+ branch_name: branch,
+ message: 'Add file2')
+ end
+
+ it 'does not set any files as generated' do
+ expect(generated_files).to eq Set.new
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/git/diff_collection_spec.rb b/spec/lib/gitlab/git/diff_collection_spec.rb
index 72ddd0759ec..dc60d486f49 100644
--- a/spec/lib/gitlab/git/diff_collection_spec.rb
+++ b/spec/lib/gitlab/git/diff_collection_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Git::DiffCollection do
+RSpec.describe Gitlab::Git::DiffCollection, feature_category: :source_code_management do
before do
stub_const('MutatingConstantIterator', Class.new)
@@ -531,6 +531,99 @@ RSpec.describe Gitlab::Git::DiffCollection do
end
describe '#each' do
+ context 'with Gitlab::GitalyClient::DiffStitcher' do
+ let(:collection) do
+ described_class.new(
+ iterator,
+ max_files: max_files,
+ max_lines: max_lines,
+ limits: limits,
+ expanded: expanded,
+ generated_files: generated_files
+ )
+ end
+
+ let(:iterator) { Gitlab::GitalyClient::DiffStitcher.new(diff_params) }
+ let(:diff_params) { [diff_1, diff_2] }
+ let(:diff_1) do
+ OpenStruct.new(
+ to_path: ".gitmodules",
+ from_path: ".gitmodules",
+ old_mode: 0100644,
+ new_mode: 0100644,
+ from_id: '357406f3075a57708d0163752905cc1576fceacc',
+ to_id: '8e5177d718c561d36efde08bad36b43687ee6bf0',
+ patch: 'a' * 10,
+ raw_patch_data: 'a' * 10,
+ end_of_patch: true
+ )
+ end
+
+ let(:diff_2) do
+ OpenStruct.new(
+ to_path: ".gitignore",
+ from_path: ".gitignore",
+ old_mode: 0100644,
+ new_mode: 0100644,
+ from_id: '357406f3075a57708d0163752905cc1576fceacc',
+ to_id: '8e5177d718c561d36efde08bad36b43687ee6bf0',
+ patch: 'a' * 20,
+ raw_patch_data: 'a' * 20,
+ end_of_patch: true
+ )
+ end
+
+ context 'with generated_files' do
+ let(:generated_files) { [diff_1.from_path] }
+
+ it 'sets generated files as generated' do
+ collection.each do |d|
+ if d.old_path == diff_1.from_path
+ expect(d.generated).to be true
+ else
+ expect(d.generated).to be false
+ end
+ end
+ end
+ end
+
+ context 'without generated_files' do
+ let(:generated_files) { nil }
+
+ it 'set generated as nil' do
+ collection.each do |d|
+ expect(d.generated).to be_nil
+ end
+ end
+ end
+ end
+
+ context 'with existing generated value in the hash' do
+ let(:collection) do
+ described_class.new([{ diff: 'some content', generated: true }], options)
+ end
+
+ context 'when collapse_generated on' do
+ let(:options) { { collapse_generated: true } }
+
+ it 'sets the diff as generated' do
+ collection.each do |diff|
+ expect(diff.generated).to eq true
+ end
+ end
+ end
+
+ context 'when collapse_generated off' do
+ let(:options) { { collapse_generated: false } }
+
+ it 'does not set the diff as generated' do
+ collection.each do |diff|
+ expect(diff.generated).to be_nil
+ end
+ end
+ end
+ end
+
context 'when diff are too large' do
let(:collection) do
described_class.new([{ diff: 'a' * 204800 }])
diff --git a/spec/lib/gitlab/git/diff_spec.rb b/spec/lib/gitlab/git/diff_spec.rb
index 6b3630d7a1f..c40445433c0 100644
--- a/spec/lib/gitlab/git/diff_spec.rb
+++ b/spec/lib/gitlab/git/diff_spec.rb
@@ -50,7 +50,7 @@ EOT
let(:diff) { described_class.new(@raw_diff_hash) }
it 'initializes the diff' do
- expect(diff.to_hash).to eq(@raw_diff_hash)
+ expect(diff.to_hash).to eq(@raw_diff_hash.merge(generated: nil))
end
it 'does not prune the diff' do
@@ -87,7 +87,7 @@ EOT
let(:raw_patch) { @raw_diff_hash[:diff] }
it 'initializes the diff' do
- expect(diff.to_hash).to eq(@raw_diff_hash)
+ expect(diff.to_hash).to eq(@raw_diff_hash.merge(generated: nil))
end
it 'does not prune the diff' do
@@ -156,6 +156,31 @@ EOT
expect(diff).to be_collapsed
end
end
+
+ context 'when the file is set as generated' do
+ let(:diff) { described_class.new(gitaly_diff, generated: true, expanded: expanded) }
+ let(:raw_patch) { 'some text' }
+
+ context 'when expanded is set to false' do
+ let(:expanded) { false }
+
+ it 'will be marked as generated and collapsed' do
+ expect(diff).to be_generated
+ expect(diff).to be_collapsed
+ expect(diff.diff).to be_empty
+ end
+ end
+
+ context 'when expanded is set to true' do
+ let(:expanded) { true }
+
+ it 'will still be marked as generated, but not as collapsed' do
+ expect(diff).to be_generated
+ expect(diff).not_to be_collapsed
+ expect(diff.diff).not_to be_empty
+ end
+ end
+ end
end
context 'using a Gitaly::CommitDelta' do
@@ -173,7 +198,7 @@ EOT
let(:diff) { described_class.new(commit_delta) }
it 'initializes the diff' do
- expect(diff.to_hash).to eq(@raw_diff_hash.merge(diff: ''))
+ expect(diff.to_hash).to eq(@raw_diff_hash.merge(diff: '', generated: nil))
end
it 'is not too large' do
diff --git a/spec/lib/gitlab/git/repository_spec.rb b/spec/lib/gitlab/git/repository_spec.rb
index 5791d9c524f..cc07a16d362 100644
--- a/spec/lib/gitlab/git/repository_spec.rb
+++ b/spec/lib/gitlab/git/repository_spec.rb
@@ -2670,10 +2670,7 @@ RSpec.describe Gitlab::Git::Repository, feature_category: :source_code_managemen
subject { new_repository.replicate(repository) }
before do
- stub_storage_settings('test_second_storage' => {
- 'gitaly_address' => Gitlab.config.repositories.storages.default.gitaly_address,
- 'path' => TestEnv::SECOND_STORAGE_PATH
- })
+ stub_storage_settings('test_second_storage' => {})
end
after do
@@ -2781,6 +2778,31 @@ RSpec.describe Gitlab::Git::Repository, feature_category: :source_code_managemen
end
end
+ describe '#object_format' do
+ subject { repository.object_format }
+
+ context 'for SHA1 repository' do
+ it { is_expected.to eq :OBJECT_FORMAT_SHA1 }
+ end
+
+ context 'for SHA256 repository' do
+ let(:project) { create(:project, :empty_repo, object_format: Repository::FORMAT_SHA256) }
+ let(:repository) { project.repository.raw }
+
+ it { is_expected.to eq :OBJECT_FORMAT_SHA256 }
+ end
+
+ context 'for removed repository' do
+ let(:repository) { mutable_repository }
+
+ before do
+ repository.remove
+ end
+
+ it { expect { subject }.to raise_error(Gitlab::Git::Repository::NoRepository) }
+ end
+ end
+
describe '#get_file_attributes' do
let(:rev) { 'master' }
let(:paths) { ['file.txt'] }
@@ -2790,4 +2812,69 @@ RSpec.describe Gitlab::Git::Repository, feature_category: :source_code_managemen
subject { repository.get_file_attributes(rev, paths, attrs) }
end
end
+
+ describe '#detect_generated_files' do
+ let(:project) do
+ create(:project, :custom_repo, files: {
+ '.gitattributes' => gitattr_content,
+ 'file1.txt' => 'first file',
+ 'file2.txt' => 'second file'
+ })
+ end
+
+ let(:repository) { project.repository.raw }
+ let(:rev) { 'master' }
+ let(:paths) { ['file1.txt', 'file2.txt'] }
+
+ subject(:generated_files) { repository.detect_generated_files(rev, paths) }
+
+ context 'when the linguist-generated attribute is used' do
+ let(:gitattr_content) { "*.txt text\nfile1.txt linguist-generated\n" }
+
+ it 'returns generated files only' do
+ expect(generated_files).to contain_exactly('file1.txt')
+ end
+ end
+
+ context 'when the gitlab-generated attribute is used' do
+ let(:gitattr_content) { "*.txt text\nfile1.txt gitlab-generated\n" }
+
+ it 'returns generated files only' do
+ expect(generated_files).to contain_exactly('file1.txt')
+ end
+ end
+
+ context 'when both linguist-generated and gitlab-generated attribute are used' do
+ let(:gitattr_content) { "*.txt text\nfile1.txt linguist-generated gitlab-generated\n" }
+
+ it 'returns generated files only' do
+ expect(generated_files).to contain_exactly('file1.txt')
+ end
+ end
+
+ context 'when the all files are generated' do
+ let(:gitattr_content) { "*.txt gitlab-generated\n" }
+
+ it 'returns all generated files' do
+ expect(generated_files).to eq paths.to_set
+ end
+ end
+
+ context 'when empty paths are given' do
+ let(:paths) { [] }
+ let(:gitattr_content) { "*.txt gitlab-generated\n" }
+
+ it 'returns an empty set' do
+ expect(generated_files).to eq Set.new
+ end
+ end
+
+ context 'when no generated overrides are used' do
+ let(:gitattr_content) { "*.txt text\n" }
+
+ it 'returns an empty set' do
+ expect(generated_files).to eq Set.new
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/gitaly_client/conflicts_service_spec.rb b/spec/lib/gitlab/gitaly_client/conflicts_service_spec.rb
index bdc16f16e66..ddd63159a03 100644
--- a/spec/lib/gitlab/gitaly_client/conflicts_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/conflicts_service_spec.rb
@@ -17,26 +17,11 @@ RSpec.describe Gitlab::GitalyClient::ConflictsService do
describe '#conflicts' do
subject(:conflicts) { client.conflicts? }
- context "with the `skip_conflict_files_in_gitaly` feature flag on" do
- it 'calls list_conflict_files with `skip_content: true`' do
- expect_any_instance_of(described_class).to receive(:list_conflict_files)
- .with(skip_content: true).and_return(["let's pretend i'm a conflicted file"])
+ it 'calls list_conflict_files with no parameters' do
+ expect_any_instance_of(described_class).to receive(:list_conflict_files)
+ .and_return(["let's pretend i'm a conflicted file"])
- conflicts
- end
- end
-
- context "with the `skip_conflict_files_in_gitaly` feature flag off" do
- before do
- stub_feature_flags(skip_conflict_files_in_gitaly: false)
- end
-
- it 'calls list_conflict_files with no parameters' do
- expect_any_instance_of(described_class).to receive(:list_conflict_files)
- .with(skip_content: false).and_return(["let's pretend i'm a conflicted file"])
-
- conflicts
- end
+ conflicts
end
end
diff --git a/spec/lib/gitlab/gitaly_client/repository_service_spec.rb b/spec/lib/gitlab/gitaly_client/repository_service_spec.rb
index 727bf494ee6..26b96ecf36b 100644
--- a/spec/lib/gitlab/gitaly_client/repository_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/repository_service_spec.rb
@@ -355,6 +355,40 @@ RSpec.describe Gitlab::GitalyClient::RepositoryService, feature_category: :gital
client.create_repository('feature/新機能')
end
+
+ context 'when object format is provided' do
+ before do
+ expect_any_instance_of(Gitaly::RepositoryService::Stub)
+ .to receive(:create_repository)
+ .with(gitaly_request_with_path(storage_name, relative_path)
+ .and(gitaly_request_with_params(default_branch: '', object_format: expected_format)), kind_of(Hash))
+ .and_return(double)
+ end
+
+ context 'with SHA1 format' do
+ let(:expected_format) { :OBJECT_FORMAT_SHA1 }
+
+ it 'sends a create_repository message with object format' do
+ client.create_repository(object_format: Repository::FORMAT_SHA1)
+ end
+ end
+
+ context 'with SHA256 format' do
+ let(:expected_format) { :OBJECT_FORMAT_SHA256 }
+
+ it 'sends a create_repository message with object format' do
+ client.create_repository(object_format: Repository::FORMAT_SHA256)
+ end
+ end
+
+ context 'with unknown format' do
+ let(:expected_format) { :OBJECT_FORMAT_UNSPECIFIED }
+
+ it 'sends a create_repository message with object format' do
+ client.create_repository(object_format: 'unknown')
+ end
+ end
+ end
end
describe '#raw_changes_between' do
@@ -479,6 +513,16 @@ RSpec.describe Gitlab::GitalyClient::RepositoryService, feature_category: :gital
end
end
+ describe '#object_format' do
+ it 'sends a object_format message' do
+ expect_any_instance_of(Gitaly::RepositoryService::Stub)
+ .to receive(:object_format)
+ .with(gitaly_request_with_path(storage_name, relative_path), kind_of(Hash))
+
+ client.object_format
+ end
+ end
+
describe '#get_file_attributes' do
let(:rev) { 'master' }
let(:paths) { ['file.txt'] }
diff --git a/spec/lib/gitlab/gitaly_client/storage_settings_spec.rb b/spec/lib/gitlab/gitaly_client/storage_settings_spec.rb
index 7252f7d6afb..6ea9dfde09d 100644
--- a/spec/lib/gitlab/gitaly_client/storage_settings_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/storage_settings_spec.rb
@@ -4,11 +4,11 @@ require 'spec_helper'
RSpec.describe Gitlab::GitalyClient::StorageSettings, feature_category: :gitaly do
describe "#initialize" do
- context 'when the storage contains no path' do
+ context 'when the storage contains no gitaly_address' do
it 'raises an error' do
expect do
described_class.new("foo" => {})
- end.to raise_error(described_class::InvalidConfigurationError)
+ end.to raise_error(described_class::InvalidConfigurationError, described_class::INVALID_STORAGE_MESSAGE)
end
end
@@ -23,21 +23,13 @@ RSpec.describe Gitlab::GitalyClient::StorageSettings, feature_category: :gitaly
context 'when the storage is valid' do
it 'raises no error' do
expect do
- described_class.new("path" => Rails.root)
+ described_class.new("gitaly_address" => "unix:tmp/tests/gitaly/gitaly.socket")
end.not_to raise_error
end
end
end
describe '.gitaly_address' do
- context 'when the storage settings have no gitaly address but one is requested' do
- it 'raises an error' do
- expect do
- described_class.new("path" => Rails.root).gitaly_address
- end.to raise_error("key not found: \"gitaly_address\"")
- end
- end
-
context 'when the storage settings have a gitaly address and one is requested' do
it 'returns the setting value' do
expect(described_class.new("path" => Rails.root, "gitaly_address" => "test").gitaly_address).to eq("test")
diff --git a/spec/lib/gitlab/gitaly_client_spec.rb b/spec/lib/gitlab/gitaly_client_spec.rb
index 00639d9574b..796fe75521a 100644
--- a/spec/lib/gitlab/gitaly_client_spec.rb
+++ b/spec/lib/gitlab/gitaly_client_spec.rb
@@ -40,16 +40,6 @@ RSpec.describe Gitlab::GitalyClient, feature_category: :gitaly do
end
end
- describe '.filesystem_id_from_disk' do
- it 'catches errors' do
- [Errno::ENOENT, Errno::EACCES, JSON::ParserError].each do |error|
- stub_file_read(described_class.storage_metadata_file_path('default'), error: error)
-
- expect(described_class.filesystem_id_from_disk('default')).to be_nil
- end
- end
- end
-
describe '.filesystem_id' do
it 'returns an empty string when the relevant storage status is not found in the response' do
response = double("response")
@@ -361,19 +351,6 @@ RSpec.describe Gitlab::GitalyClient, feature_category: :gitaly do
end
end
- describe '.can_use_disk?' do
- it 'properly caches a false result' do
- # spec_helper stubs this globally
- allow(described_class).to receive(:can_use_disk?).and_call_original
- expect(described_class).to receive(:filesystem_id).once
- expect(described_class).to receive(:filesystem_id_from_disk).once
-
- 2.times do
- described_class.can_use_disk?('unknown')
- end
- end
- end
-
describe '.connection_data' do
it 'returns connection data' do
address = 'tcp://localhost:9876'
@@ -919,4 +896,20 @@ RSpec.describe Gitlab::GitalyClient, feature_category: :gitaly do
it_behaves_like 'with_feature_flag_actor'
end
end
+
+ describe '.execute' do
+ subject(:execute) do
+ described_class.execute('default', :ref_service, :find_local_branches, Gitaly::FindLocalBranchesRequest.new,
+ remote_storage: nil, timeout: 10.seconds)
+ end
+
+ it 'raises an exception when running within a concurrent Ruby thread' do
+ Thread.current[:restrict_within_concurrent_ruby] = true
+
+ expect { execute }.to raise_error(Gitlab::Utils::ConcurrentRubyThreadIsUsedError,
+ "Cannot run 'gitaly' if running from `Concurrent::Promise`.")
+
+ Thread.current[:restrict_within_concurrent_ruby] = nil
+ end
+ end
end
diff --git a/spec/lib/gitlab/github_import/client_pool_spec.rb b/spec/lib/gitlab/github_import/client_pool_spec.rb
deleted file mode 100644
index aabb47c2cf1..00000000000
--- a/spec/lib/gitlab/github_import/client_pool_spec.rb
+++ /dev/null
@@ -1,41 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::GithubImport::ClientPool, feature_category: :importers do
- subject(:pool) { described_class.new(token_pool: %w[foo bar], per_page: 1, parallel: true) }
-
- describe '#best_client' do
- it 'returns the client with the most remaining requests' do
- allow(Gitlab::GithubImport::Client).to receive(:new).and_return(
- instance_double(
- Gitlab::GithubImport::Client,
- requests_remaining?: true, remaining_requests: 10, rate_limit_resets_in: 1
- ),
- instance_double(
- Gitlab::GithubImport::Client,
- requests_remaining?: true, remaining_requests: 20, rate_limit_resets_in: 2
- )
- )
-
- expect(pool.best_client.remaining_requests).to eq(20)
- end
-
- context 'when all clients are rate limited' do
- it 'returns the client with the closest rate limit reset time' do
- allow(Gitlab::GithubImport::Client).to receive(:new).and_return(
- instance_double(
- Gitlab::GithubImport::Client,
- requests_remaining?: false, remaining_requests: 10, rate_limit_resets_in: 10
- ),
- instance_double(
- Gitlab::GithubImport::Client,
- requests_remaining?: false, remaining_requests: 20, rate_limit_resets_in: 20
- )
- )
-
- expect(pool.best_client.rate_limit_resets_in).to eq(10)
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/github_import/importer/collaborators_importer_spec.rb b/spec/lib/gitlab/github_import/importer/collaborators_importer_spec.rb
index 6f602531d23..c1e9bed5681 100644
--- a/spec/lib/gitlab/github_import/importer/collaborators_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/collaborators_importer_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe Gitlab::GithubImport::Importer::CollaboratorsImporter, feature_ca
subject(:importer) { described_class.new(project, client, parallel: parallel) }
let(:parallel) { true }
- let(:project) { instance_double(Project, id: 4, import_source: 'foo/bar', import_state: nil) }
+ let(:project) { build(:project, id: 4, import_source: 'foo/bar', import_state: nil) }
let(:client) { instance_double(Gitlab::GithubImport::Client) }
let(:github_collaborator) do
@@ -74,6 +74,7 @@ RSpec.describe Gitlab::GithubImport::Importer::CollaboratorsImporter, feature_ca
describe '#parallel_import', :clean_gitlab_redis_cache do
before do
+ allow(Gitlab::Redis::SharedState).to receive(:with).and_return('OK')
allow(client).to receive(:collaborators).with(project.import_source, affiliation: 'direct')
.and_return([github_collaborator])
allow(client).to receive(:collaborators).with(project.import_source, affiliation: 'outside')
diff --git a/spec/lib/gitlab/github_import/importer/diff_note_importer_spec.rb b/spec/lib/gitlab/github_import/importer/diff_note_importer_spec.rb
index 7668451ad4e..bcd38e1e236 100644
--- a/spec/lib/gitlab/github_import/importer/diff_note_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/diff_note_importer_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe Gitlab::GithubImport::Importer::DiffNoteImporter, :aggregate_fail
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { create(:user) }
- let(:client) { double(:client) }
+ let(:client) { instance_double(Gitlab::GithubImport::Client) }
let(:discussion_id) { 'b0fa404393eeebb4e82becb8104f238812bb1fe6' }
let(:created_at) { Time.new(2017, 1, 1, 12, 00).utc }
let(:updated_at) { Time.new(2017, 1, 1, 12, 15).utc }
diff --git a/spec/lib/gitlab/github_import/importer/diff_notes_importer_spec.rb b/spec/lib/gitlab/github_import/importer/diff_notes_importer_spec.rb
index 4e8066ecb69..1eb146ea958 100644
--- a/spec/lib/gitlab/github_import/importer/diff_notes_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/diff_notes_importer_spec.rb
@@ -3,8 +3,8 @@
require 'spec_helper'
RSpec.describe Gitlab::GithubImport::Importer::DiffNotesImporter, feature_category: :importers do
- let(:project) { double(:project, id: 4, import_source: 'foo/bar') }
- let(:client) { double(:client) }
+ let(:project) { build(:project, id: 4, import_source: 'foo/bar') }
+ let(:client) { instance_double(Gitlab::GithubImport::Client) }
let(:github_comment) do
{
@@ -90,6 +90,10 @@ RSpec.describe Gitlab::GithubImport::Importer::DiffNotesImporter, feature_catego
end
describe '#parallel_import', :clean_gitlab_redis_cache do
+ before do
+ allow(Gitlab::Redis::SharedState).to receive(:with).and_return('OK')
+ end
+
it 'imports each diff note in parallel' do
importer = described_class.new(project, client)
diff --git a/spec/lib/gitlab/github_import/importer/events/changed_milestone_spec.rb b/spec/lib/gitlab/github_import/importer/events/changed_milestone_spec.rb
index bc14b81bd91..371e76efb75 100644
--- a/spec/lib/gitlab/github_import/importer/events/changed_milestone_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/events/changed_milestone_spec.rb
@@ -18,7 +18,7 @@ RSpec.describe Gitlab::GithubImport::Importer::Events::ChangedMilestone do
'actor' => { 'id' => user.id, 'login' => user.username },
'event' => event_type,
'commit_id' => nil,
- 'milestone_title' => milestone.title,
+ 'milestone_title' => milestone_title,
'issue_db_id' => issuable.id,
'created_at' => '2022-04-26 18:30:53 UTC',
'issue' => { 'number' => issuable.iid, pull_request: issuable.is_a?(MergeRequest) }
@@ -35,11 +35,23 @@ RSpec.describe Gitlab::GithubImport::Importer::Events::ChangedMilestone do
end
shared_examples 'new event' do
- it 'creates a new milestone event' do
- expect { importer.execute(issue_event) }.to change { issuable.resource_milestone_events.count }
- .from(0).to(1)
- expect(issuable.resource_milestone_events.last)
- .to have_attributes(expected_event_attrs)
+ context 'when a matching milestone exists in GitLab' do
+ let(:milestone_title) { milestone.title }
+
+ it 'creates a new milestone event' do
+ expect { importer.execute(issue_event) }.to change { issuable.resource_milestone_events.count }
+ .from(0).to(1)
+ expect(issuable.resource_milestone_events.last)
+ .to have_attributes(expected_event_attrs)
+ end
+ end
+
+ context 'when a matching milestone does not exist in GitLab' do
+ let(:milestone_title) { 'A deleted milestone title' }
+
+ it 'does not create a new milestone event without a milestone' do
+ expect { importer.execute(issue_event) }.not_to change { issuable.resource_milestone_events.count }
+ end
end
end
diff --git a/spec/lib/gitlab/github_import/importer/events/merged_spec.rb b/spec/lib/gitlab/github_import/importer/events/merged_spec.rb
new file mode 100644
index 00000000000..4ea62557dd6
--- /dev/null
+++ b/spec/lib/gitlab/github_import/importer/events/merged_spec.rb
@@ -0,0 +1,74 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::GithubImport::Importer::Events::Merged, feature_category: :importers do
+ subject(:importer) { described_class.new(project, client) }
+
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:user) { create(:user) }
+
+ let(:client) { instance_double('Gitlab::GithubImport::Client') }
+ let(:merge_request) { create(:merge_request, source_project: project, target_project: project) }
+ let(:commit_id) { nil }
+
+ let(:issue_event) do
+ Gitlab::GithubImport::Representation::IssueEvent.from_json_hash(
+ 'id' => 6501124486,
+ 'node_id' => 'CE_lADOHK9fA85If7x0zwAAAAGDf0mG',
+ 'url' => 'https://api.github.com/repos/elhowm/test-import/issues/events/6501124486',
+ 'actor' => { 'id' => user.id, 'login' => user.username },
+ 'event' => 'merged',
+ 'created_at' => '2022-04-26 18:30:53 UTC',
+ 'commit_id' => commit_id,
+ 'issue' => { 'number' => merge_request.iid, pull_request: true }
+ )
+ end
+
+ before do
+ allow_next_instance_of(Gitlab::GithubImport::IssuableFinder) do |finder|
+ allow(finder).to receive(:database_id).and_return(merge_request.id)
+ end
+ allow_next_instance_of(Gitlab::GithubImport::UserFinder) do |finder|
+ allow(finder).to receive(:find).with(user.id, user.username).and_return(user.id)
+ end
+ end
+
+ it 'creates expected event and state event' do
+ importer.execute(issue_event)
+
+ expect(merge_request.events.count).to eq 1
+ expect(merge_request.events.first).to have_attributes(
+ project_id: project.id,
+ author_id: user.id,
+ target_id: merge_request.id,
+ target_type: merge_request.class.name,
+ action: 'merged',
+ created_at: issue_event.created_at,
+ updated_at: issue_event.created_at
+ )
+
+ expect(merge_request.resource_state_events.count).to eq 1
+ expect(merge_request.resource_state_events.first).to have_attributes(
+ user_id: user.id,
+ merge_request_id: merge_request.id,
+ state: 'merged',
+ created_at: issue_event.created_at,
+ close_after_error_tracking_resolve: false,
+ close_auto_resolve_prometheus_alert: false
+ )
+ end
+
+ context 'when commit ID is present' do
+ let!(:commit) { create(:commit, project: project) }
+ let(:commit_id) { commit.id }
+
+ it 'creates expected event and state event' do
+ importer.execute(issue_event)
+
+ expect(merge_request.events.count).to eq 1
+ state_event = merge_request.resource_state_events.last
+ expect(state_event.source_commit).to eq commit_id[0..40]
+ end
+ end
+end
diff --git a/spec/lib/gitlab/github_import/importer/issue_event_importer_spec.rb b/spec/lib/gitlab/github_import/importer/issue_event_importer_spec.rb
index 91121f3c3fc..2389489e867 100644
--- a/spec/lib/gitlab/github_import/importer/issue_event_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/issue_event_importer_spec.rb
@@ -2,20 +2,19 @@
require 'spec_helper'
-RSpec.describe Gitlab::GithubImport::Importer::IssueEventImporter, :clean_gitlab_redis_cache do
+RSpec.describe Gitlab::GithubImport::Importer::IssueEventImporter, :clean_gitlab_redis_cache,
+ feature_category: :importers do
let(:importer) { described_class.new(issue_event, project, client) }
- let(:project) { create(:project) }
- let(:client) { instance_double('Gitlab::GithubImport::Client') }
- let(:user) { create(:user) }
- let(:issue) { create(:issue, project: project) }
+ let(:project) { build(:project) }
+ let(:client) { instance_double(Gitlab::GithubImport::Client) }
let(:issue_event) do
Gitlab::GithubImport::Representation::IssueEvent.from_json_hash(
'id' => 6501124486,
'node_id' => 'CE_lADOHK9fA85If7x0zwAAAAGDf0mG',
'url' => 'https://api.github.com/repos/elhowm/test-import/issues/events/6501124486',
- 'actor' => { 'id' => actor_id, 'login' => 'alice' },
+ 'actor' => { 'id' => 1, 'login' => 'alice' },
'event' => event_name,
'commit_id' => '570e7b2abdd848b95f2f578043fc23bd6f6fd24d',
'commit_url' =>
@@ -25,17 +24,13 @@ RSpec.describe Gitlab::GithubImport::Importer::IssueEventImporter, :clean_gitlab
)
end
- let(:actor_id) { user.id }
let(:event_name) { 'closed' }
shared_examples 'triggers specific event importer' do |importer_class|
it importer_class.name do
- specific_importer = double(importer_class.name) # rubocop:disable RSpec/VerifiedDoubles
-
- expect(importer_class)
- .to receive(:new).with(project, client)
- .and_return(specific_importer)
- expect(specific_importer).to receive(:execute).with(issue_event)
+ expect_next_instance_of(importer_class, project, client) do |importer|
+ expect(importer).to receive(:execute).with(issue_event)
+ end
importer.execute
end
@@ -45,85 +40,79 @@ RSpec.describe Gitlab::GithubImport::Importer::IssueEventImporter, :clean_gitlab
context "when it's closed issue event" do
let(:event_name) { 'closed' }
- it_behaves_like 'triggers specific event importer',
- Gitlab::GithubImport::Importer::Events::Closed
+ it_behaves_like 'triggers specific event importer', Gitlab::GithubImport::Importer::Events::Closed
end
context "when it's reopened issue event" do
let(:event_name) { 'reopened' }
- it_behaves_like 'triggers specific event importer',
- Gitlab::GithubImport::Importer::Events::Reopened
+ it_behaves_like 'triggers specific event importer', Gitlab::GithubImport::Importer::Events::Reopened
end
context "when it's labeled issue event" do
let(:event_name) { 'labeled' }
- it_behaves_like 'triggers specific event importer',
- Gitlab::GithubImport::Importer::Events::ChangedLabel
+ it_behaves_like 'triggers specific event importer', Gitlab::GithubImport::Importer::Events::ChangedLabel
end
context "when it's unlabeled issue event" do
let(:event_name) { 'unlabeled' }
- it_behaves_like 'triggers specific event importer',
- Gitlab::GithubImport::Importer::Events::ChangedLabel
+ it_behaves_like 'triggers specific event importer', Gitlab::GithubImport::Importer::Events::ChangedLabel
end
context "when it's renamed issue event" do
let(:event_name) { 'renamed' }
- it_behaves_like 'triggers specific event importer',
- Gitlab::GithubImport::Importer::Events::Renamed
+ it_behaves_like 'triggers specific event importer', Gitlab::GithubImport::Importer::Events::Renamed
end
context "when it's milestoned issue event" do
let(:event_name) { 'milestoned' }
- it_behaves_like 'triggers specific event importer',
- Gitlab::GithubImport::Importer::Events::ChangedMilestone
+ it_behaves_like 'triggers specific event importer', Gitlab::GithubImport::Importer::Events::ChangedMilestone
end
context "when it's demilestoned issue event" do
let(:event_name) { 'demilestoned' }
- it_behaves_like 'triggers specific event importer',
- Gitlab::GithubImport::Importer::Events::ChangedMilestone
+ it_behaves_like 'triggers specific event importer', Gitlab::GithubImport::Importer::Events::ChangedMilestone
end
context "when it's cross-referenced issue event" do
let(:event_name) { 'cross-referenced' }
- it_behaves_like 'triggers specific event importer',
- Gitlab::GithubImport::Importer::Events::CrossReferenced
+ it_behaves_like 'triggers specific event importer', Gitlab::GithubImport::Importer::Events::CrossReferenced
end
context "when it's assigned issue event" do
let(:event_name) { 'assigned' }
- it_behaves_like 'triggers specific event importer',
- Gitlab::GithubImport::Importer::Events::ChangedAssignee
+ it_behaves_like 'triggers specific event importer', Gitlab::GithubImport::Importer::Events::ChangedAssignee
end
context "when it's unassigned issue event" do
let(:event_name) { 'unassigned' }
- it_behaves_like 'triggers specific event importer',
- Gitlab::GithubImport::Importer::Events::ChangedAssignee
+ it_behaves_like 'triggers specific event importer', Gitlab::GithubImport::Importer::Events::ChangedAssignee
end
context "when it's review_requested issue event" do
let(:event_name) { 'review_requested' }
- it_behaves_like 'triggers specific event importer',
- Gitlab::GithubImport::Importer::Events::ChangedReviewer
+ it_behaves_like 'triggers specific event importer', Gitlab::GithubImport::Importer::Events::ChangedReviewer
end
context "when it's review_request_removed issue event" do
let(:event_name) { 'review_request_removed' }
- it_behaves_like 'triggers specific event importer',
- Gitlab::GithubImport::Importer::Events::ChangedReviewer
+ it_behaves_like 'triggers specific event importer', Gitlab::GithubImport::Importer::Events::ChangedReviewer
+ end
+
+ context "when it's merged issue event" do
+ let(:event_name) { 'merged' }
+
+ it_behaves_like 'triggers specific event importer', Gitlab::GithubImport::Importer::Events::Merged
end
context "when it's unknown issue event" do
diff --git a/spec/lib/gitlab/github_import/importer/issue_events_importer_spec.rb b/spec/lib/gitlab/github_import/importer/issue_events_importer_spec.rb
index 9aba6a2b02c..f7ee6fee6dc 100644
--- a/spec/lib/gitlab/github_import/importer/issue_events_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/issue_events_importer_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe Gitlab::GithubImport::Importer::IssueEventsImporter, feature_category: :importers do
subject(:importer) { described_class.new(project, client, parallel: parallel) }
- let(:project) { instance_double(Project, id: 4, import_source: 'foo/bar') }
+ let(:project) { build(:project, id: 4, import_source: 'foo/bar') }
let(:client) { instance_double(Gitlab::GithubImport::Client) }
let(:parallel) { true }
@@ -74,6 +74,10 @@ RSpec.describe Gitlab::GithubImport::Importer::IssueEventsImporter, feature_cate
end
describe '#parallel_import', :clean_gitlab_redis_cache do
+ before do
+ allow(Gitlab::Redis::SharedState).to receive(:with).and_return('OK')
+ end
+
it 'imports each note in parallel' do
allow(importer).to receive(:each_object_to_import).and_yield(issue_event)
diff --git a/spec/lib/gitlab/github_import/importer/issues_importer_spec.rb b/spec/lib/gitlab/github_import/importer/issues_importer_spec.rb
index 1bfdce04187..9451d1dfc37 100644
--- a/spec/lib/gitlab/github_import/importer/issues_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/issues_importer_spec.rb
@@ -3,8 +3,8 @@
require 'spec_helper'
RSpec.describe Gitlab::GithubImport::Importer::IssuesImporter, feature_category: :importers do
- let(:project) { double(:project, id: 4, import_source: 'foo/bar') }
- let(:client) { double(:client) }
+ let(:project) { build(:project, id: 4, import_source: 'foo/bar') }
+ let(:client) { instance_double(Gitlab::GithubImport::Client) }
let(:created_at) { Time.new(2017, 1, 1, 12, 00) }
let(:updated_at) { Time.new(2017, 1, 1, 12, 15) }
@@ -83,6 +83,10 @@ RSpec.describe Gitlab::GithubImport::Importer::IssuesImporter, feature_category:
end
describe '#parallel_import', :clean_gitlab_redis_cache do
+ before do
+ allow(Gitlab::Redis::SharedState).to receive(:with).and_return('OK')
+ end
+
it 'imports each issue in parallel' do
importer = described_class.new(project, client)
diff --git a/spec/lib/gitlab/github_import/importer/lfs_objects_importer_spec.rb b/spec/lib/gitlab/github_import/importer/lfs_objects_importer_spec.rb
index 3f5ee68d264..a5ec39b4177 100644
--- a/spec/lib/gitlab/github_import/importer/lfs_objects_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/lfs_objects_importer_spec.rb
@@ -111,6 +111,10 @@ RSpec.describe Gitlab::GithubImport::Importer::LfsObjectsImporter, feature_categ
end
describe '#parallel_import', :clean_gitlab_redis_cache do
+ before do
+ allow(Gitlab::Redis::SharedState).to receive(:with).and_return('OK')
+ end
+
it 'imports each lfs object in parallel' do
importer = described_class.new(project, client)
diff --git a/spec/lib/gitlab/github_import/importer/notes_importer_spec.rb b/spec/lib/gitlab/github_import/importer/notes_importer_spec.rb
index 8c93963f325..92d3071c826 100644
--- a/spec/lib/gitlab/github_import/importer/notes_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/notes_importer_spec.rb
@@ -3,8 +3,8 @@
require 'spec_helper'
RSpec.describe Gitlab::GithubImport::Importer::NotesImporter, feature_category: :importers do
- let(:project) { double(:project, id: 4, import_source: 'foo/bar') }
- let(:client) { double(:client) }
+ let(:project) { build(:project, id: 4, import_source: 'foo/bar') }
+ let(:client) { instance_double(Gitlab::GithubImport::Client) }
let(:github_comment) do
{
@@ -76,6 +76,10 @@ RSpec.describe Gitlab::GithubImport::Importer::NotesImporter, feature_category:
end
describe '#parallel_import', :clean_gitlab_redis_cache do
+ before do
+ allow(Gitlab::Redis::SharedState).to receive(:with).and_return('OK')
+ end
+
it 'imports each note in parallel' do
importer = described_class.new(project, client)
diff --git a/spec/lib/gitlab/github_import/importer/protected_branches_importer_spec.rb b/spec/lib/gitlab/github_import/importer/protected_branches_importer_spec.rb
index 8e99585109b..b0892767fb3 100644
--- a/spec/lib/gitlab/github_import/importer/protected_branches_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/protected_branches_importer_spec.rb
@@ -5,8 +5,8 @@ require 'spec_helper'
RSpec.describe Gitlab::GithubImport::Importer::ProtectedBranchesImporter, feature_category: :importers do
subject(:importer) { described_class.new(project, client, parallel: parallel) }
- let(:project) { instance_double('Project', id: 4, import_source: 'foo/bar') }
- let(:client) { instance_double('Gitlab::GithubImport::Client') }
+ let(:project) { build(:project, id: 4, import_source: 'foo/bar') }
+ let(:client) { instance_double(Gitlab::GithubImport::Client) }
let(:parallel) { true }
let(:branches) do
@@ -112,7 +112,7 @@ RSpec.describe Gitlab::GithubImport::Importer::ProtectedBranchesImporter, featur
end
it 'imports each protected branch in sequence' do
- protected_branch_importer = instance_double('Gitlab::GithubImport::Importer::ProtectedBranchImporter')
+ protected_branch_importer = instance_double(Gitlab::GithubImport::Importer::ProtectedBranchImporter)
expect(Gitlab::GithubImport::Importer::ProtectedBranchImporter)
.to receive(:new)
@@ -133,6 +133,7 @@ RSpec.describe Gitlab::GithubImport::Importer::ProtectedBranchesImporter, featur
describe '#parallel_import', :clean_gitlab_redis_cache do
before do
+ allow(Gitlab::Redis::SharedState).to receive(:with).and_return('OK')
allow(client).to receive(:branches).and_return(branches)
allow(client)
.to receive(:branch_protection)
diff --git a/spec/lib/gitlab/github_import/importer/pull_requests/reviews_importer_spec.rb b/spec/lib/gitlab/github_import/importer/pull_requests/reviews_importer_spec.rb
index 4321997815a..f5779f300b8 100644
--- a/spec/lib/gitlab/github_import/importer/pull_requests/reviews_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/pull_requests/reviews_importer_spec.rb
@@ -46,6 +46,10 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequests::ReviewsImporter, fe
let(:review) { { id: 1 } }
+ before do
+ allow(Gitlab::Redis::SharedState).to receive(:with).and_return('OK')
+ end
+
it 'fetches the pull requests reviews data' do
page = Struct.new(:objects, :number).new([review], 1)
diff --git a/spec/lib/gitlab/github_import/importer/pull_requests_importer_spec.rb b/spec/lib/gitlab/github_import/importer/pull_requests_importer_spec.rb
index 10e413fdfe5..1a0adbbe3a3 100644
--- a/spec/lib/gitlab/github_import/importer/pull_requests_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/pull_requests_importer_spec.rb
@@ -93,6 +93,10 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestsImporter, feature_cat
end
describe '#parallel_import', :clean_gitlab_redis_cache do
+ before do
+ allow(Gitlab::Redis::SharedState).to receive(:with).and_return('OK')
+ end
+
it 'imports each note in parallel' do
importer = described_class.new(project, client)
@@ -112,9 +116,8 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestsImporter, feature_cat
end
describe '#each_object_to_import', :clean_gitlab_redis_cache do
- let(:importer) { described_class.new(project, client) }
-
before do
+ allow(Gitlab::Redis::SharedState).to receive(:with).and_return('OK')
page = double(:page, objects: [pull_request], number: 1)
expect(client)
@@ -127,6 +130,8 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestsImporter, feature_cat
.and_yield(page)
end
+ let(:importer) { described_class.new(project, client) }
+
it 'yields every pull request to the supplied block' do
expect { |b| importer.each_object_to_import(&b) }
.to yield_with_args(pull_request)
diff --git a/spec/lib/gitlab/github_import/importer/single_endpoint_diff_notes_importer_spec.rb b/spec/lib/gitlab/github_import/importer/single_endpoint_diff_notes_importer_spec.rb
index 081d08edfb3..6fe0494d7cd 100644
--- a/spec/lib/gitlab/github_import/importer/single_endpoint_diff_notes_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/single_endpoint_diff_notes_importer_spec.rb
@@ -29,6 +29,10 @@ RSpec.describe Gitlab::GithubImport::Importer::SingleEndpointDiffNotesImporter d
let(:note) { { id: 1 } }
let(:page) { double(objects: [note], number: 1) }
+ before do
+ allow(Gitlab::Redis::SharedState).to receive(:with).and_return('OK')
+ end
+
it 'fetches data' do
expect(client)
.to receive(:each_page)
diff --git a/spec/lib/gitlab/github_import/importer/single_endpoint_issue_events_importer_spec.rb b/spec/lib/gitlab/github_import/importer/single_endpoint_issue_events_importer_spec.rb
index dde730d46d2..91f89f0779c 100644
--- a/spec/lib/gitlab/github_import/importer/single_endpoint_issue_events_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/single_endpoint_issue_events_importer_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::GithubImport::Importer::SingleEndpointIssueEventsImporter do
+RSpec.describe Gitlab::GithubImport::Importer::SingleEndpointIssueEventsImporter, feature_category: :importers do
let(:client) { double }
let_it_be(:project) { create(:project, :import_started, import_source: 'http://somegithub.com') }
@@ -101,14 +101,10 @@ RSpec.describe Gitlab::GithubImport::Importer::SingleEndpointIssueEventsImporter
let(:page_counter) { instance_double(Gitlab::GithubImport::PageCounter) }
before do
- allow(client).to receive(:each_page)
- .once
- .with(
- :issue_timeline,
- project.import_source,
- issuable.iid,
- { state: 'all', sort: 'created', direction: 'asc', page: 1 }
- ).and_yield(page)
+ allow(Gitlab::Redis::SharedState).to receive(:with).and_return('OK')
+ allow(client).to receive(:each_page).once.with(:issue_timeline,
+ project.import_source, issuable.iid, { state: 'all', sort: 'created', direction: 'asc', page: 1 }
+ ).and_yield(page)
end
context 'with issues' do
@@ -192,5 +188,18 @@ RSpec.describe Gitlab::GithubImport::Importer::SingleEndpointIssueEventsImporter
expect(counter).to eq 0
end
end
+
+ context 'when event is not supported' do
+ let(:issue_event) do
+ struct = Struct.new(:id, :event, :created_at, :issue, keyword_init: true)
+ struct.new(id: 1, event: 'not_supported_event', created_at: '2022-04-26 18:30:53 UTC')
+ end
+
+ it "doesn't process this event" do
+ counter = 0
+ subject.each_object_to_import { counter += 1 }
+ expect(counter).to eq 0
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/github_import/importer/single_endpoint_issue_notes_importer_spec.rb b/spec/lib/gitlab/github_import/importer/single_endpoint_issue_notes_importer_spec.rb
index e1f65546e1d..88613244c8b 100644
--- a/spec/lib/gitlab/github_import/importer/single_endpoint_issue_notes_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/single_endpoint_issue_notes_importer_spec.rb
@@ -28,6 +28,10 @@ RSpec.describe Gitlab::GithubImport::Importer::SingleEndpointIssueNotesImporter
let(:note) { { id: 1 } }
let(:page) { double(objects: [note], number: 1) }
+ before do
+ allow(Gitlab::Redis::SharedState).to receive(:with).and_return('OK')
+ end
+
it 'fetches data' do
expect(client)
.to receive(:each_page)
diff --git a/spec/lib/gitlab/github_import/importer/single_endpoint_merge_request_notes_importer_spec.rb b/spec/lib/gitlab/github_import/importer/single_endpoint_merge_request_notes_importer_spec.rb
index 5523b97acc3..601cd7a8f15 100644
--- a/spec/lib/gitlab/github_import/importer/single_endpoint_merge_request_notes_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/single_endpoint_merge_request_notes_importer_spec.rb
@@ -29,6 +29,10 @@ RSpec.describe Gitlab::GithubImport::Importer::SingleEndpointMergeRequestNotesIm
let(:note) { { id: 1 } }
let(:page) { double(objects: [note], number: 1) }
+ before do
+ allow(Gitlab::Redis::SharedState).to receive(:with).and_return('OK')
+ end
+
it 'fetches data' do
expect(client)
.to receive(:each_page)
diff --git a/spec/lib/gitlab/github_import/issuable_finder_spec.rb b/spec/lib/gitlab/github_import/issuable_finder_spec.rb
index 977fef95d64..3fe07923a50 100644
--- a/spec/lib/gitlab/github_import/issuable_finder_spec.rb
+++ b/spec/lib/gitlab/github_import/issuable_finder_spec.rb
@@ -48,34 +48,6 @@ RSpec.describe Gitlab::GithubImport::IssuableFinder, :clean_gitlab_redis_cache,
expect { finder.database_id }.to raise_error(TypeError)
end
- context 'with FF import_fallback_to_db_empty_cache disabled' do
- before do
- stub_feature_flags(import_fallback_to_db_empty_cache: false)
- end
-
- it 'returns nil if object does not exist' do
- missing_issue = double(:issue, issuable_type: 'MergeRequest', issuable_id: 999)
-
- expect(described_class.new(project, missing_issue).database_id).to be_nil
- end
-
- it 'does not fetch object id from database if not in cache' do
- expect(finder.database_id).to eq(nil)
- end
-
- it 'fetches object id from cache if present' do
- finder.cache_database_id(10)
-
- expect(finder.database_id).to eq(10)
- end
-
- it 'returns -1 if cache is -1' do
- finder.cache_database_id(-1)
-
- expect(finder.database_id).to eq(-1)
- end
- end
-
context 'when group is present' do
context 'when settings single_endpoint_notes_import is enabled' do
let(:single_endpoint_optional_stage) { true }
diff --git a/spec/lib/gitlab/github_import/job_delay_calculator_spec.rb b/spec/lib/gitlab/github_import/job_delay_calculator_spec.rb
new file mode 100644
index 00000000000..3ddf8136dcf
--- /dev/null
+++ b/spec/lib/gitlab/github_import/job_delay_calculator_spec.rb
@@ -0,0 +1,33 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::GithubImport::JobDelayCalculator, feature_category: :importers do
+ let(:project) { build(:project) }
+
+ let(:importer_class) do
+ Class.new do
+ attr_reader :project
+
+ def initialize(project)
+ @project = project
+ end
+
+ include Gitlab::GithubImport::JobDelayCalculator
+ end
+ end
+
+ describe "#parallel_import_batch" do
+ subject { importer_class.new(project).parallel_import_batch }
+
+ it { is_expected.to eq({ size: 5000, delay: 1.minute }) }
+
+ context 'when `github_import_increased_concurrent_workers` feature flag is disabled' do
+ before do
+ stub_feature_flags(github_import_increased_concurrent_workers: false)
+ end
+
+ it { is_expected.to eq({ size: 1000, delay: 1.minute }) }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/github_import/label_finder_spec.rb b/spec/lib/gitlab/github_import/label_finder_spec.rb
index e46595974d1..4c01e2b65da 100644
--- a/spec/lib/gitlab/github_import/label_finder_spec.rb
+++ b/spec/lib/gitlab/github_import/label_finder_spec.rb
@@ -49,34 +49,6 @@ RSpec.describe Gitlab::GithubImport::LabelFinder, :clean_gitlab_redis_cache, fea
expect(finder.id_for(feature.name)).to eq(feature.id)
end
end
-
- context 'with FF import_fallback_to_db_empty_cache disabled' do
- before do
- stub_feature_flags(import_fallback_to_db_empty_cache: false)
- end
-
- it 'returns nil for a non existing label name' do
- expect(finder.id_for('kittens')).to be_nil
- end
-
- it 'does not fetch object id from database if not in cache' do
- expect(finder.id_for(feature.name)).to be_nil
- end
-
- it 'fetches object id from cache if present' do
- finder.build_cache
-
- expect(finder.id_for(feature.name)).to eq(feature.id)
- end
-
- it 'returns -1 if cache is -1' do
- key = finder.cache_key_for(bug.name)
-
- Gitlab::Cache::Import::Caching.write(key, -1)
-
- expect(finder.id_for(bug.name)).to eq(-1)
- end
- end
end
describe '#build_cache' do
diff --git a/spec/lib/gitlab/github_import/milestone_finder_spec.rb b/spec/lib/gitlab/github_import/milestone_finder_spec.rb
index 62886981de1..91f1c3b8cb9 100644
--- a/spec/lib/gitlab/github_import/milestone_finder_spec.rb
+++ b/spec/lib/gitlab/github_import/milestone_finder_spec.rb
@@ -57,36 +57,6 @@ RSpec.describe Gitlab::GithubImport::MilestoneFinder, :clean_gitlab_redis_cache,
expect(finder.id_for(issuable)).to eq(milestone.id)
end
end
-
- context 'with FF import_fallback_to_db_empty_cache disabled' do
- before do
- stub_feature_flags(import_fallback_to_db_empty_cache: false)
- end
-
- it 'returns nil if object does not exist' do
- missing_issuable = double(:issuable, milestone_number: 999)
-
- expect(finder.id_for(missing_issuable)).to be_nil
- end
-
- it 'does not fetch object id from database if not in cache' do
- expect(finder.id_for(issuable)).to be_nil
- end
-
- it 'fetches object id from cache if present' do
- finder.build_cache
-
- expect(finder.id_for(issuable)).to eq(milestone.id)
- end
-
- it 'returns -1 if cache is -1' do
- key = finder.cache_key_for(milestone.iid)
-
- Gitlab::Cache::Import::Caching.write(key, -1)
-
- expect(finder.id_for(issuable)).to eq(-1)
- end
- end
end
describe '#build_cache' do
diff --git a/spec/lib/gitlab/github_import/object_counter_spec.rb b/spec/lib/gitlab/github_import/object_counter_spec.rb
index 964bdd6aad1..aa551195a35 100644
--- a/spec/lib/gitlab/github_import/object_counter_spec.rb
+++ b/spec/lib/gitlab/github_import/object_counter_spec.rb
@@ -5,6 +5,10 @@ require 'spec_helper'
RSpec.describe Gitlab::GithubImport::ObjectCounter, :clean_gitlab_redis_cache, feature_category: :importers do
let_it_be(:project) { create(:project, :import_started, import_type: 'github', import_url: 'https://github.com/vim/vim.git') }
+ before do
+ allow(Gitlab::Redis::SharedState).to receive(:with).and_return('OK')
+ end
+
it 'validates the operation being incremented' do
expect { described_class.increment(project, :issue, :unknown) }
.to raise_error(ArgumentError, 'operation must be fetched or imported')
diff --git a/spec/lib/gitlab/github_import/parallel_scheduling_spec.rb b/spec/lib/gitlab/github_import/parallel_scheduling_spec.rb
index e0b1ff1bc33..3188206de5b 100644
--- a/spec/lib/gitlab/github_import/parallel_scheduling_spec.rb
+++ b/spec/lib/gitlab/github_import/parallel_scheduling_spec.rb
@@ -275,17 +275,10 @@ RSpec.describe Gitlab::GithubImport::ParallelScheduling, feature_category: :impo
let(:batch_delay) { 1.minute }
before do
- allow(importer)
- .to receive(:representation_class)
- .and_return(repr_class)
-
- allow(importer)
- .to receive(:sidekiq_worker_class)
- .and_return(worker_class)
-
- allow(repr_class)
- .to receive(:from_api_response)
- .with(object, {})
+ allow(Gitlab::Redis::SharedState).to receive(:with).and_return('OK')
+ allow(importer).to receive(:representation_class).and_return(repr_class)
+ allow(importer).to receive(:sidekiq_worker_class).and_return(worker_class)
+ allow(repr_class).to receive(:from_api_response).with(object, {})
.and_return({ title: 'One' }, { title: 'Two' }, { title: 'Three' })
end
diff --git a/spec/lib/gitlab/github_import/representation/representable_spec.rb b/spec/lib/gitlab/github_import/representation/representable_spec.rb
new file mode 100644
index 00000000000..4bc495c40f5
--- /dev/null
+++ b/spec/lib/gitlab/github_import/representation/representable_spec.rb
@@ -0,0 +1,43 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::GithubImport::Representation::Representable, feature_category: :importers do
+ let(:representation_class) do
+ subject_module = described_class
+
+ Class.new do
+ include subject_module
+ end
+ end
+
+ let(:representable) { representation_class.new }
+
+ describe '#github_identifiers' do
+ subject(:github_identifiers) { representable.github_identifiers }
+
+ context 'when class does not define `#github_identifiers`' do
+ it 'tracks the error' do
+ error = NotImplementedError.new('Subclasses must implement #github_identifiers')
+
+ expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception).with(error)
+ is_expected.to eq({})
+ end
+ end
+
+ context 'when class defines `#github_identifiers`' do
+ let(:representation_class) do
+ Class.new(super()) do
+ def github_identifiers
+ { id: 1 }
+ end
+ end
+ end
+
+ it 'does not track an exception and returns the identifiers' do
+ expect(Gitlab::ErrorTracking).not_to receive(:track_and_raise_for_dev_exception)
+ is_expected.to eq({ id: 1 })
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/github_import/settings_spec.rb b/spec/lib/gitlab/github_import/settings_spec.rb
index de497bc6689..ea1526ca25f 100644
--- a/spec/lib/gitlab/github_import/settings_spec.rb
+++ b/spec/lib/gitlab/github_import/settings_spec.rb
@@ -62,12 +62,11 @@ RSpec.describe Gitlab::GithubImport::Settings, feature_category: :importers do
collaborators_import: false,
foo: :bar
},
- timeout_strategy: "optimistic",
- additional_access_tokens: %w[foo bar]
+ timeout_strategy: "optimistic"
}.stringify_keys
end
- it 'puts optional steps, timeout strategy & access tokens into projects import_data' do
+ it 'puts optional steps and timeout strategy into projects import_data' do
project.build_or_assign_import_data(credentials: { user: 'token' })
settings.write(data_input)
@@ -76,8 +75,6 @@ RSpec.describe Gitlab::GithubImport::Settings, feature_category: :importers do
.to eq optional_stages.stringify_keys
expect(project.import_data.data['timeout_strategy'])
.to eq("optimistic")
- expect(project.import_data.credentials.fetch(:additional_access_tokens))
- .to eq(data_input['additional_access_tokens'])
end
end
diff --git a/spec/lib/gitlab/github_import_spec.rb b/spec/lib/gitlab/github_import_spec.rb
index 8453f002bc0..1721f470b33 100644
--- a/spec/lib/gitlab/github_import_spec.rb
+++ b/spec/lib/gitlab/github_import_spec.rb
@@ -11,8 +11,6 @@ RSpec.describe Gitlab::GithubImport, feature_category: :importers do
let(:project) { double(:project, import_url: 'http://t0ken@github.com/user/repo.git', id: 1, group: nil) }
it 'returns a new Client with a custom token' do
- allow(project).to receive(:import_data)
-
expect(described_class::Client)
.to receive(:new)
.with('123', host: nil, parallel: true, per_page: 100)
@@ -26,7 +24,6 @@ RSpec.describe Gitlab::GithubImport, feature_category: :importers do
expect(project)
.to receive(:import_data)
.and_return(import_data)
- .twice
expect(described_class::Client)
.to receive(:new)
@@ -49,31 +46,12 @@ RSpec.describe Gitlab::GithubImport, feature_category: :importers do
described_class.ghost_user_id
end
end
-
- context 'when there are additional access tokens' do
- it 'returns a new ClientPool containing all tokens' do
- import_data = double(:import_data, credentials: { user: '123', additional_access_tokens: %w[foo bar] })
-
- expect(project)
- .to receive(:import_data)
- .and_return(import_data)
- .twice
-
- expect(described_class::ClientPool)
- .to receive(:new)
- .with(token_pool: %w[foo bar 123], host: nil, parallel: true, per_page: 100)
-
- described_class.new_client_for(project)
- end
- end
end
context 'GitHub Enterprise' do
let(:project) { double(:project, import_url: 'http://t0ken@github.another-domain.com/repo-org/repo.git', group: nil) }
it 'returns a new Client with a custom token' do
- allow(project).to receive(:import_data)
-
expect(described_class::Client)
.to receive(:new)
.with('123', host: 'http://github.another-domain.com/api/v3', parallel: true, per_page: 100)
@@ -87,7 +65,6 @@ RSpec.describe Gitlab::GithubImport, feature_category: :importers do
expect(project)
.to receive(:import_data)
.and_return(import_data)
- .twice
expect(described_class::Client)
.to receive(:new)
diff --git a/spec/lib/gitlab/hook_data/project_builder_spec.rb b/spec/lib/gitlab/hook_data/project_builder_spec.rb
index f80faac563d..9d5eaf0608c 100644
--- a/spec/lib/gitlab/hook_data/project_builder_spec.rb
+++ b/spec/lib/gitlab/hook_data/project_builder_spec.rb
@@ -4,8 +4,8 @@ require 'spec_helper'
RSpec.describe Gitlab::HookData::ProjectBuilder do
let_it_be(:user) { create(:user, name: 'John', email: 'john@example.com') }
- let_it_be(:namespace) { create(:namespace, owner: user) }
- let_it_be(:project) { create(:project, :internal, name: 'my_project', namespace: namespace) }
+ let_it_be(:user2) { create(:user, name: 'Peter') }
+ let_it_be(:user3_non_owner) { create(:user, name: 'Not_Owner') }
describe '#build' do
let(:data) { described_class.new(project).build(event) }
@@ -24,13 +24,13 @@ RSpec.describe Gitlab::HookData::ProjectBuilder do
expect(data[:created_at]).to eq(project.created_at.xmlschema)
expect(data[:updated_at]).to eq(project.updated_at.xmlschema)
- expect(data[:name]).to eq('my_project')
+ expect(data[:name]).to eq(project.name)
expect(data[:path]).to eq(project.path)
expect(data[:path_with_namespace]).to eq(project.full_path)
expect(data[:project_id]).to eq(project.id)
- expect(data[:owner_name]).to eq('John')
- expect(data[:owner_email]).to eq(_('[REDACTED]'))
- expect(data[:owners]).to contain_exactly({ name: 'John', email: _('[REDACTED]') })
+ expect(data[:owner_name]).to eq(owner_name)
+ expect(data[:owner_email]).to eq(owner_email)
+ expect(data[:owners]).to match_array(owners_data)
expect(data[:project_visibility]).to eq('internal')
end
end
@@ -48,40 +48,104 @@ RSpec.describe Gitlab::HookData::ProjectBuilder do
end
end
- context 'on create' do
- let(:event) { :create }
+ context 'the project is created in a personal namespace' do
+ let(:owner_name) { user.name }
+ let(:owner_email) { _('[REDACTED]') }
+ let(:owners_data) { [{ name: 'John', email: _('[REDACTED]') }, { name: 'Peter', email: _('[REDACTED]') }] }
+ let_it_be(:namespace) { create(:namespace, owner: user) }
+ let_it_be(:project) { create(:project, :internal, name: 'personal project', namespace: namespace) }
- it { expect(event_name).to eq('project_create') }
+ before_all do
+ project.add_owner(user2)
+ project.add_maintainer(user3_non_owner)
+ end
- it_behaves_like 'includes the required attributes'
- it_behaves_like 'does not include `old_path_with_namespace` attribute'
- end
+ context 'on create' do
+ let(:event) { :create }
- context 'on destroy' do
- let(:event) { :destroy }
+ it { expect(event_name).to eq('project_create') }
- it { expect(event_name).to eq('project_destroy') }
+ it_behaves_like 'includes the required attributes'
+ it_behaves_like 'does not include `old_path_with_namespace` attribute'
+ end
- it_behaves_like 'includes the required attributes'
- it_behaves_like 'does not include `old_path_with_namespace` attribute'
- end
+ context 'on destroy' do
+ let(:event) { :destroy }
+
+ it { expect(event_name).to eq('project_destroy') }
+
+ it_behaves_like 'includes the required attributes'
+ it_behaves_like 'does not include `old_path_with_namespace` attribute'
+ end
- context 'on rename' do
- let(:event) { :rename }
+ context 'on rename' do
+ let(:event) { :rename }
- it { expect(event_name).to eq('project_rename') }
+ it { expect(event_name).to eq('project_rename') }
- it_behaves_like 'includes the required attributes'
- it_behaves_like 'includes `old_path_with_namespace` attribute'
+ it_behaves_like 'includes the required attributes'
+ it_behaves_like 'includes `old_path_with_namespace` attribute'
+ end
+
+ context 'on transfer' do
+ let(:event) { :transfer }
+
+ it { expect(event_name).to eq('project_transfer') }
+
+ it_behaves_like 'includes the required attributes'
+ it_behaves_like 'includes `old_path_with_namespace` attribute'
+ end
end
- context 'on transfer' do
- let(:event) { :transfer }
+ context 'the project is created in a group' do
+ let(:owner_name) { group.name }
+ let(:owner_email) { "" }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, :internal, name: 'group project', namespace: group) }
+ let(:owners_data) { [{ name: 'John', email: _('[REDACTED]') }, { email: "[REDACTED]", name: "Peter" }] }
+
+ before_all do
+ group.add_owner(user)
+ group.add_owner(user2)
+ group.add_maintainer(user3_non_owner)
+ end
+
+ # Repeat the tests in the previous context
+ context 'on create' do
+ let(:event) { :create }
- it { expect(event_name).to eq('project_transfer') }
+ it { expect(event_name).to eq('project_create') }
- it_behaves_like 'includes the required attributes'
- it_behaves_like 'includes `old_path_with_namespace` attribute'
+ it_behaves_like 'includes the required attributes'
+ it_behaves_like 'does not include `old_path_with_namespace` attribute'
+ end
+
+ context 'on destroy' do
+ let(:event) { :destroy }
+
+ it { expect(event_name).to eq('project_destroy') }
+
+ it_behaves_like 'includes the required attributes'
+ it_behaves_like 'does not include `old_path_with_namespace` attribute'
+ end
+
+ context 'on rename' do
+ let(:event) { :rename }
+
+ it { expect(event_name).to eq('project_rename') }
+
+ it_behaves_like 'includes the required attributes'
+ it_behaves_like 'includes `old_path_with_namespace` attribute'
+ end
+
+ context 'on transfer' do
+ let(:event) { :transfer }
+
+ it { expect(event_name).to eq('project_transfer') }
+
+ it_behaves_like 'includes the required attributes'
+ it_behaves_like 'includes `old_path_with_namespace` attribute'
+ end
end
end
end
diff --git a/spec/lib/gitlab/http_spec.rb b/spec/lib/gitlab/http_spec.rb
index a9e0c6a3b92..3fc486a8984 100644
--- a/spec/lib/gitlab/http_spec.rb
+++ b/spec/lib/gitlab/http_spec.rb
@@ -32,6 +32,45 @@ RSpec.describe Gitlab::HTTP, feature_category: :shared do
described_class.get('/path', allow_object_storage: true)
end
end
+
+ context 'when passing async:true' do
+ it 'calls Gitlab::HTTP_V2.get with default options and async:true' do
+ expect(Gitlab::HTTP_V2).to receive(:get)
+ .with('/path', default_options.merge(async: true))
+
+ described_class.get('/path', async: true)
+ end
+
+ it 'returns a Gitlab::HTTP_V2::LazyResponse object' do
+ stub_request(:get, 'http://example.org').to_return(status: 200, body: 'hello world')
+ result = described_class.get('http://example.org', async: true)
+
+ expect(result).to be_a(Gitlab::HTTP_V2::LazyResponse)
+
+ result.execute
+ result.wait
+
+ expect(result.value).to be_a(HTTParty::Response)
+ expect(result.value.body).to eq('hello world')
+ end
+
+ context 'when there is a DB call in the concurrent thread' do
+ it 'raises Gitlab::Utils::ConcurrentRubyThreadIsUsedError error',
+ quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/432145' do
+ stub_request(:get, 'http://example.org').to_return(status: 200, body: 'hello world')
+
+ result = described_class.get('http://example.org', async: true) do |_fragment|
+ User.first
+ end
+
+ result.execute
+ result.wait
+
+ expect { result.value }.to raise_error(Gitlab::Utils::ConcurrentRubyThreadIsUsedError,
+ "Cannot run 'db' if running from `Concurrent::Promise`.")
+ end
+ end
+ end
end
describe '.try_get' do
diff --git a/spec/lib/gitlab/import_export/all_models.yml b/spec/lib/gitlab/import_export/all_models.yml
index 722b47ac9b8..688487df778 100644
--- a/spec/lib/gitlab/import_export/all_models.yml
+++ b/spec/lib/gitlab/import_export/all_models.yml
@@ -76,6 +76,8 @@ work_item_type:
- work_items
- widget_definitions
- enabled_widget_definitions
+- child_restrictions
+- allowed_child_types_by_name
events:
- author
- project
@@ -420,6 +422,7 @@ builds:
- job_artifacts_cluster_image_scanning
- job_artifacts_cyclonedx
- job_artifacts_requirements_v2
+- job_artifacts_repository_xray
- runner_manager
- runner_manager_build
- runner_session
@@ -521,6 +524,7 @@ container_repositories:
- name
project:
- catalog_resource
+- catalog_resource_sync_events
- catalog_resource_versions
- ci_components
- external_status_checks
@@ -570,7 +574,6 @@ project:
- google_play_integration
- pipelines_email_integration
- mattermost_slash_commands_integration
-- shimo_integration
- slack_slash_commands_integration
- harbor_integration
- irker_integration
@@ -831,6 +834,7 @@ project:
- target_branch_rules
- organization
- dora_performance_scores
+- xray_reports
award_emoji:
- awardable
- user
@@ -1053,6 +1057,7 @@ catalog_resource:
- project
- catalog_resource_components
- catalog_resource_versions
+ - catalog_resource_sync_events
catalog_resource_versions:
- project
- release
diff --git a/spec/lib/gitlab/import_export/safe_model_attributes.yml b/spec/lib/gitlab/import_export/safe_model_attributes.yml
index b6328994c5b..3efa33d8879 100644
--- a/spec/lib/gitlab/import_export/safe_model_attributes.yml
+++ b/spec/lib/gitlab/import_export/safe_model_attributes.yml
@@ -706,6 +706,7 @@ ProjectFeature:
- monitor_access_level
- infrastructure_access_level
- model_experiments_access_level
+- model_registry_access_level
- created_at
- updated_at
ProtectedBranch::MergeAccessLevel:
@@ -820,7 +821,6 @@ ProjectSetting:
- allow_merge_on_skipped_pipeline
- only_allow_merge_if_all_status_checks_passed
- has_confluence
-- has_shimo
- has_vulnerabilities
ProtectedEnvironment:
- id
diff --git a/spec/lib/gitlab/import_sources_spec.rb b/spec/lib/gitlab/import_sources_spec.rb
index 19f17c9079d..48cdeee3d2f 100644
--- a/spec/lib/gitlab/import_sources_spec.rb
+++ b/spec/lib/gitlab/import_sources_spec.rb
@@ -74,38 +74,6 @@ RSpec.describe Gitlab::ImportSources, feature_category: :importers do
end
end
- describe '.import_table' do
- subject { described_class.import_table }
-
- describe 'Bitbucket cloud' do
- it 'returns the ParallelImporter' do
- is_expected.to include(
- described_class::ImportSource.new(
- 'bitbucket',
- 'Bitbucket Cloud',
- Gitlab::BitbucketImport::ParallelImporter
- )
- )
- end
-
- context 'when flag is disabled' do
- before do
- stub_feature_flags(bitbucket_parallel_importer: false)
- end
-
- it 'returns the legacy Importer' do
- is_expected.to include(
- described_class::ImportSource.new(
- 'bitbucket',
- 'Bitbucket Cloud',
- Gitlab::BitbucketImport::Importer
- )
- )
- end
- end
- end
- end
-
describe '.title' do
import_sources = {
'github' => 'GitHub',
diff --git a/spec/lib/gitlab/instrumentation/connection_pool_spec.rb b/spec/lib/gitlab/instrumentation/connection_pool_spec.rb
new file mode 100644
index 00000000000..b7cab2e9900
--- /dev/null
+++ b/spec/lib/gitlab/instrumentation/connection_pool_spec.rb
@@ -0,0 +1,69 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require 'support/helpers/rails_helpers'
+
+RSpec.describe Gitlab::Instrumentation::ConnectionPool, feature_category: :redis do
+ let(:option) { { name: 'test', size: 5 } }
+ let(:pool) { ConnectionPool.new(option) { 'nothing' } }
+
+ let_it_be(:size_gauge_args) { [:gitlab_connection_pool_size, 'Size of connection pool', {}, :all] }
+ let_it_be(:available_gauge_args) do
+ [:gitlab_connection_pool_available_count,
+ 'Number of available connections in the pool', {}, :all]
+ end
+
+ subject(:checkout_pool) { pool.checkout }
+
+ describe '.checkout' do
+ let(:size_gauge_double) { instance_double(::Prometheus::Client::Gauge) }
+
+ context 'when tracking for the first time' do
+ it 'initialises gauges' do
+ expect(::Gitlab::Metrics).to receive(:gauge).with(*size_gauge_args).and_call_original
+ expect(::Gitlab::Metrics).to receive(:gauge).with(*available_gauge_args).and_call_original
+
+ checkout_pool
+ end
+ end
+
+ it 'sets the size gauge only once' do
+ expect(::Gitlab::Metrics.gauge(*size_gauge_args)).to receive(:set).with(
+ { pool_name: 'test', pool_key: anything, connection_class: "String" }, 5).once
+
+ checkout_pool
+ checkout_pool
+ end
+
+ context 'when tracking on subsequent calls' do
+ before do
+ pool.checkout # initialise instance variables
+ end
+
+ it 'uses memoized gauges' do
+ expect(::Gitlab::Metrics).not_to receive(:gauge).with(*size_gauge_args)
+ expect(::Gitlab::Metrics).not_to receive(:gauge).with(*available_gauge_args)
+
+ expect(pool.instance_variable_get(:@size_gauge)).not_to receive(:set)
+ .with({ pool_name: 'test', pool_key: anything, connection_class: "String" }, 5)
+ expect(pool.instance_variable_get(:@available_gauge)).to receive(:set)
+ .with({ pool_name: 'test', pool_key: anything, connection_class: "String" }, 4)
+
+ checkout_pool
+ end
+
+ context 'when pool name is omitted' do
+ let(:option) { {} }
+
+ it 'uses unknown name' do
+ expect(pool.instance_variable_get(:@size_gauge)).not_to receive(:set)
+ .with({ pool_name: 'unknown', pool_key: anything, connection_class: "String" }, 5)
+ expect(pool.instance_variable_get(:@available_gauge)).to receive(:set)
+ .with({ pool_name: 'unknown', pool_key: anything, connection_class: "String" }, 4)
+
+ checkout_pool
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/instrumentation/redis_base_spec.rb b/spec/lib/gitlab/instrumentation/redis_base_spec.rb
index 426997f6e86..f0854b38353 100644
--- a/spec/lib/gitlab/instrumentation/redis_base_spec.rb
+++ b/spec/lib/gitlab/instrumentation/redis_base_spec.rb
@@ -222,4 +222,18 @@ RSpec.describe Gitlab::Instrumentation::RedisBase, :request_store do
instrumentation_class_a.log_exception(StandardError.new)
end
end
+
+ describe '.instance_count_connection_exception' do
+ before do
+ # initialise connection_exception_counter
+ instrumentation_class_a.instance_count_connection_exception(StandardError.new)
+ end
+
+ it 'counts connection exception' do
+ expect(instrumentation_class_a.instance_variable_get(:@connection_exception_counter)).to receive(:increment)
+ .with({ storage: instrumentation_class_a.storage_key, exception: 'Redis::ConnectionError' })
+
+ instrumentation_class_a.instance_count_connection_exception(Redis::ConnectionError.new)
+ end
+ end
end
diff --git a/spec/lib/gitlab/instrumentation/redis_helper_spec.rb b/spec/lib/gitlab/instrumentation/redis_helper_spec.rb
new file mode 100644
index 00000000000..54659ca2c02
--- /dev/null
+++ b/spec/lib/gitlab/instrumentation/redis_helper_spec.rb
@@ -0,0 +1,136 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Instrumentation::RedisHelper, :request_store, feature_category: :scalability do
+ include RedisHelpers
+
+ let(:minimal_test_class) do
+ Class.new do
+ include Gitlab::Instrumentation::RedisHelper
+ def initialize
+ @instrumentation_class = Gitlab::Instrumentation::Redis::Cache
+ end
+
+ def check_command(commands, pipelined)
+ instrument_call(commands, @instrumentation_class, pipelined) { 'empty block' }
+ end
+
+ def test_read(result)
+ measure_read_size(result, @instrumentation_class)
+ end
+
+ def test_write(command)
+ measure_write_size(command, @instrumentation_class)
+ end
+
+ def test_exclusion(commands)
+ exclude_from_apdex?(commands)
+ end
+ end
+ end
+
+ before do
+ stub_const("MinimalTestClass", minimal_test_class)
+ end
+
+ subject(:minimal_test_class_instance) { MinimalTestClass.new }
+
+ describe '.instrument_call' do
+ it 'instruments request count' do
+ expect(Gitlab::Instrumentation::Redis::Cache).to receive(:instance_count_request).with(1)
+ expect(Gitlab::Instrumentation::Redis::Cache).not_to receive(:instance_count_pipelined_request)
+
+ minimal_test_class_instance.check_command([[:set, 'foo', 'bar']], false)
+ end
+
+ it 'performs cluster validation' do
+ expect(Gitlab::Instrumentation::Redis::Cache).to receive(:redis_cluster_validate!).once
+
+ minimal_test_class_instance.check_command([[:set, 'foo', 'bar']], false)
+ end
+
+ context 'when command is not valid for Redis Cluster' do
+ before do
+ allow(Gitlab::Instrumentation::Redis::Cache).to receive(:redis_cluster_validate!).and_return(false)
+ end
+
+ it 'reports cross slot request' do
+ expect(Gitlab::Instrumentation::Redis::Cache).to receive(:increment_cross_slot_request_count).once
+
+ minimal_test_class_instance.check_command([[:mget, 'foo', 'bar']], false)
+ end
+ end
+
+ context 'when an error is raised' do
+ # specific error behaviours are tested in spec/lib/gitlab/instrumentation/redis_client_middleware_spec.rb
+ # this spec tests for the generic behaviour to verify that `ensure` works for any general error types
+ before do
+ allow(Gitlab::Instrumentation::Redis::Cache).to receive(:instance_count_request)
+ .and_raise(StandardError)
+ end
+
+ it 'ensures duration is tracked' do
+ commands = [[:set, 'foo', 'bar']]
+ allow(Gitlab::Instrumentation::Redis::Cache).to receive(:instance_observe_duration).once
+ allow(Gitlab::Instrumentation::Redis::Cache).to receive(:increment_request_count).with(1).once
+ allow(Gitlab::Instrumentation::Redis::Cache).to receive(:add_duration).once
+ allow(Gitlab::Instrumentation::Redis::Cache).to receive(:add_call_details).with(anything, commands).once
+
+ expect { minimal_test_class_instance.check_command(commands, false) }.to raise_error(StandardError)
+ end
+ end
+
+ context 'when pipelined' do
+ it 'instruments pipelined request count' do
+ expect(Gitlab::Instrumentation::Redis::Cache).to receive(:instance_count_pipelined_request)
+
+ minimal_test_class_instance.check_command([[:get, '{user1}:bar'], [:get, '{user1}:foo']], true)
+ end
+ end
+ end
+
+ describe '.measure_read_size' do
+ it 'reads array' do
+ expect(Gitlab::Instrumentation::Redis::Cache).to receive(:increment_read_bytes).with(3).exactly(3).times
+
+ minimal_test_class_instance.test_read(%w[bar foo buz])
+ end
+
+ it 'reads Integer' do
+ expect(Gitlab::Instrumentation::Redis::Cache).to receive(:increment_read_bytes).with(4)
+
+ minimal_test_class_instance.test_read(1234)
+ end
+
+ it 'reads String' do
+ expect(Gitlab::Instrumentation::Redis::Cache).to receive(:increment_read_bytes).with(3)
+
+ minimal_test_class_instance.test_read('bar')
+ end
+ end
+
+ describe '.measure_write_size' do
+ it 'measures command size' do
+ expect(Gitlab::Instrumentation::Redis::Cache).to receive(:increment_write_bytes).with(9)
+
+ minimal_test_class_instance.test_write([:set, 'foo', 'bar'])
+ end
+
+ it 'accept array input' do
+ expect(Gitlab::Instrumentation::Redis::Cache).to receive(:increment_write_bytes).with((9 + 12))
+
+ minimal_test_class_instance.test_write([[:set, 'foo', 'bar'], [:lpush, 'que', 'item']])
+ end
+ end
+
+ describe '.exclude_from_apdex?' do
+ it 'returns false if all commands are allowed' do
+ expect(minimal_test_class_instance.test_exclusion([[:set, 'foo', 'bar'], [:lpush, 'que', 'item']])).to eq(false)
+ end
+
+ it 'returns true if any commands are banned' do
+ expect(minimal_test_class_instance.test_exclusion([[:brpop, 'foo', 2], [:lpush, 'que', 'item']])).to eq(true)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/instrumentation/redis_interceptor_spec.rb b/spec/lib/gitlab/instrumentation/redis_interceptor_spec.rb
index 4168fdf5425..e9bd0056e5f 100644
--- a/spec/lib/gitlab/instrumentation/redis_interceptor_spec.rb
+++ b/spec/lib/gitlab/instrumentation/redis_interceptor_spec.rb
@@ -72,6 +72,25 @@ RSpec.describe Gitlab::Instrumentation::RedisInterceptor, :request_store, featur
end
end
+ context 'when encountering connection exceptions within process' do
+ before do
+ redis_store_class.with do |redis|
+ allow(redis._client).to receive(:write).and_call_original
+ end
+ end
+
+ it 'counts connection exceptions' do
+ redis_store_class.with do |redis|
+ expect(redis._client).to receive(:write).with([:get, 'foobar']).and_raise(::Redis::ConnectionError)
+ end
+
+ expect(instrumentation_class).to receive(:instance_count_connection_exception)
+ .with(instance_of(Redis::ConnectionError)).and_call_original
+
+ redis_store_class.with { |redis| redis.call(:get, 'foobar') }
+ end
+ end
+
context 'when encountering exceptions' do
where(:case_name, :exception, :exception_counter) do
'generic exception' | Redis::CommandError | :instance_count_exception
diff --git a/spec/lib/gitlab/internal_events_spec.rb b/spec/lib/gitlab/internal_events_spec.rb
index 20625add292..7ac583b24ce 100644
--- a/spec/lib/gitlab/internal_events_spec.rb
+++ b/spec/lib/gitlab/internal_events_spec.rb
@@ -7,27 +7,42 @@ RSpec.describe Gitlab::InternalEvents, :snowplow, feature_category: :product_ana
include SnowplowHelpers
before do
+ allow(Gitlab::AppJsonLogger).to receive(:warn)
allow(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception)
allow(Gitlab::UsageDataCounters::HLLRedisCounter).to receive(:track_event)
allow(redis).to receive(:incr)
allow(Gitlab::Redis::SharedState).to receive(:with).and_yield(redis)
allow(Gitlab::Tracking).to receive(:tracker).and_return(fake_snowplow)
- allow(Gitlab::InternalEvents::EventDefinitions).to receive(:unique_property).and_return(:user)
+ allow(Gitlab::InternalEvents::EventDefinitions).to receive(:unique_property).and_return(unique_property)
allow(fake_snowplow).to receive(:event)
end
- def expect_redis_hll_tracking(event_name)
+ shared_examples 'an event that logs an error' do
+ it 'logs an error' do
+ described_class.track_event(event_name, **event_kwargs)
+
+ expect(Gitlab::ErrorTracking).to have_received(:track_and_raise_for_dev_exception)
+ .with(described_class::InvalidPropertyTypeError,
+ event_name: event_name,
+ kwargs: event_kwargs
+ )
+ end
+ end
+
+ def expect_redis_hll_tracking
expect(Gitlab::UsageDataCounters::HLLRedisCounter).to have_received(:track_event)
.with(event_name, values: unique_value)
end
- def expect_redis_tracking(event_name)
- expect(redis).to have_received(:incr) do |redis_key|
- expect(redis_key).to end_with(event_name)
+ def expect_redis_tracking
+ call_index = 0
+ expect(redis).to have_received(:incr).twice do |redis_key|
+ expect(redis_key).to end_with(redis_arguments[call_index])
+ call_index += 1
end
end
- def expect_snowplow_tracking(event_name)
+ def expect_snowplow_tracking(expected_namespace = nil)
service_ping_context = Gitlab::Tracking::ServicePingContext
.new(data_source: :redis_hll, event: event_name)
.to_context
@@ -36,33 +51,125 @@ RSpec.describe Gitlab::InternalEvents, :snowplow, feature_category: :product_ana
expect(SnowplowTracker::SelfDescribingJson).to have_received(:new)
.with(service_ping_context[:schema], service_ping_context[:data]).at_least(:once)
- # Add test for creation of both contexts
- contexts = [instance_of(SnowplowTracker::SelfDescribingJson), instance_of(SnowplowTracker::SelfDescribingJson)]
+ expect(fake_snowplow).to have_received(:event) do |category, provided_event_name, args|
+ expect(category).to eq('InternalEventTracking')
+ expect(provided_event_name).to eq(event_name)
+
+ contexts = args[:context]&.map(&:to_json)
+
+ # Verify Standard Context
+ standard_context = contexts.find do |c|
+ c[:schema] == Gitlab::Tracking::StandardContext::GITLAB_STANDARD_SCHEMA_URL
+ end
+
+ validate_standard_context(standard_context, expected_namespace)
+
+ # Verify Service Ping context
+ service_ping_context = contexts.find { |c| c[:schema] == Gitlab::Tracking::ServicePingContext::SCHEMA_URL }
- expect(fake_snowplow).to have_received(:event)
- .with('InternalEventTracking', event_name, context: contexts)
+ validate_service_ping_context(service_ping_context)
+ end
+ end
+
+ def validate_standard_context(standard_context, expected_namespace)
+ namespace = expected_namespace || project&.namespace
+ expect(standard_context).not_to eq(nil)
+ expect(standard_context[:data][:user_id]).to eq(user&.id)
+ expect(standard_context[:data][:namespace_id]).to eq(namespace&.id)
+ expect(standard_context[:data][:project_id]).to eq(project&.id)
+ end
+
+ def validate_service_ping_context(service_ping_context)
+ expect(service_ping_context).not_to eq(nil)
+ expect(service_ping_context[:data][:data_source]).to eq(:redis_hll)
+ expect(service_ping_context[:data][:event_name]).to eq(event_name)
end
let_it_be(:user) { build(:user, id: 1) }
- let_it_be(:project) { build(:project, id: 2) }
- let_it_be(:namespace) { project.namespace }
+ let_it_be(:project_namespace) { build(:namespace, id: 2) }
+ let_it_be(:project) { build(:project, id: 3, namespace: project_namespace) }
let(:redis) { instance_double('Redis') }
let(:fake_snowplow) { instance_double(Gitlab::Tracking::Destinations::Snowplow) }
let(:event_name) { 'g_edit_by_web_ide' }
+ let(:unique_property) { :user }
let(:unique_value) { user.id }
+ let(:redis_arguments) { [event_name, Date.today.strftime('%G-%V')] }
+
+ context 'when only user is passed' do
+ let(:project) { nil }
+ let(:namespace) { nil }
+
+ it 'updated all tracking methods' do
+ described_class.track_event(event_name, user: user)
+
+ expect_redis_tracking
+ expect_redis_hll_tracking
+ expect_snowplow_tracking
+ end
+ end
+
+ context 'when namespace is passed' do
+ let(:namespace) { build(:namespace, id: 4) }
+
+ it 'uses id from namespace' do
+ described_class.track_event(event_name, user: user, project: project, namespace: namespace)
+
+ expect_redis_tracking
+ expect_redis_hll_tracking
+ expect_snowplow_tracking(namespace)
+ end
+ end
+
+ context 'when namespace is not passed' do
+ let(:unique_property) { :namespace }
+ let(:unique_value) { project.namespace.id }
+
+ it 'uses id from projects namespace' do
+ described_class.track_event(event_name, user: user, project: project)
+
+ expect_redis_tracking
+ expect_redis_hll_tracking
+ expect_snowplow_tracking(project.namespace)
+ end
+ end
+
+ context 'when arguments are invalid' do
+ context 'when user is not an instance of User' do
+ let(:user) { 'a_string' }
+
+ it_behaves_like 'an event that logs an error' do
+ let(:event_kwargs) { { user: user, project: project.id } }
+ end
+ end
+
+ context 'when project is not an instance of Project' do
+ let(:project) { 42 }
+
+ it_behaves_like 'an event that logs an error' do
+ let(:event_kwargs) { { user: user.id, project: project } }
+ end
+ end
+
+ context 'when namespace is not an instance of Namespace' do
+ let(:namespace) { false }
+
+ it_behaves_like 'an event that logs an error' do
+ let(:event_kwargs) { { user: user.id, namespace: namespace } }
+ end
+ end
+ end
it 'updates Redis, RedisHLL and Snowplow', :aggregate_failures do
- params = { user: user, project: project, namespace: namespace }
- described_class.track_event(event_name, **params)
+ described_class.track_event(event_name, user: user, project: project)
- expect_redis_tracking(event_name)
- expect_redis_hll_tracking(event_name)
- expect_snowplow_tracking(event_name) # Add test for arguments
+ expect_redis_tracking
+ expect_redis_hll_tracking
+ expect_snowplow_tracking
end
it 'rescues error', :aggregate_failures do
- params = { user: user, project: project, namespace: namespace }
+ params = { user: user, project: project }
error = StandardError.new("something went wrong")
allow(fake_snowplow).to receive(:event).and_raise(error)
@@ -83,12 +190,12 @@ RSpec.describe Gitlab::InternalEvents, :snowplow, feature_category: :product_ana
expect { described_class.track_event('unknown_event') }.not_to raise_error
end
- it 'logs error on missing property', :aggregate_failures do
+ it 'logs warning on missing property', :aggregate_failures do
expect { described_class.track_event(event_name, merge_request_id: 1) }.not_to raise_error
- expect_redis_tracking(event_name)
- expect(Gitlab::ErrorTracking).to have_received(:track_and_raise_for_dev_exception)
- .with(described_class::InvalidPropertyError, event_name: event_name, kwargs: { merge_request_id: 1 })
+ expect_redis_tracking
+ expect(Gitlab::AppJsonLogger).to have_received(:warn)
+ .with(message: /should be triggered with a named parameter/)
end
context 'when unique property is missing' do
@@ -100,7 +207,7 @@ RSpec.describe Gitlab::InternalEvents, :snowplow, feature_category: :product_ana
it 'logs error on missing unique property', :aggregate_failures do
expect { described_class.track_event(event_name, merge_request_id: 1) }.not_to raise_error
- expect_redis_tracking(event_name)
+ expect_redis_tracking
expect(Gitlab::ErrorTracking).to have_received(:track_and_raise_for_dev_exception)
end
end
@@ -119,27 +226,17 @@ RSpec.describe Gitlab::InternalEvents, :snowplow, feature_category: :product_ana
it 'is used when logging to RedisHLL', :aggregate_failures do
described_class.track_event(event_name, user: user, project: project)
- expect_redis_tracking(event_name)
- expect_redis_hll_tracking(event_name)
- expect_snowplow_tracking(event_name)
+ expect_redis_tracking
+ expect_redis_hll_tracking
+ expect_snowplow_tracking
end
context 'when property is missing' do
it 'logs error' do
expect { described_class.track_event(event_name, merge_request_id: 1) }.not_to raise_error
- expect(Gitlab::ErrorTracking).to have_received(:track_and_raise_for_dev_exception)
- .with(described_class::InvalidPropertyError, event_name: event_name, kwargs: { merge_request_id: 1 })
- end
- end
-
- context 'when method does not exist on property', :aggregate_failures do
- it 'logs error on missing method' do
- expect { described_class.track_event(event_name, project: "a_string") }.not_to raise_error
-
- expect_redis_tracking(event_name)
- expect(Gitlab::ErrorTracking).to have_received(:track_and_raise_for_dev_exception)
- .with(described_class::InvalidMethodError, event_name: event_name, kwargs: { project: 'a_string' })
+ expect(Gitlab::AppJsonLogger).to have_received(:warn)
+ .with(message: /should be triggered with a named parameter/)
end
end
@@ -147,8 +244,8 @@ RSpec.describe Gitlab::InternalEvents, :snowplow, feature_category: :product_ana
it 'logs to Redis and RedisHLL but not Snowplow' do
described_class.track_event(event_name, send_snowplow_event: false, user: user, project: project)
- expect_redis_tracking(event_name)
- expect_redis_hll_tracking(event_name)
+ expect_redis_tracking
+ expect_redis_hll_tracking
expect(fake_snowplow).not_to have_received(:event)
end
end
@@ -166,9 +263,75 @@ RSpec.describe Gitlab::InternalEvents, :snowplow, feature_category: :product_ana
it 'logs to Redis and Snowplow but not RedisHLL', :aggregate_failures do
described_class.track_event(event_name, user: user, project: project)
- expect_redis_tracking(event_name)
- expect_snowplow_tracking(event_name)
+ expect_redis_tracking
+ expect_snowplow_tracking(project.namespace)
expect(Gitlab::UsageDataCounters::HLLRedisCounter).not_to have_received(:track_event)
end
end
+
+ describe 'Product Analytics tracking' do
+ let(:app_id) { 'foobar' }
+ let(:url) { 'http://localhost:4000' }
+ let(:sdk_client) { instance_double('GitlabSDK::Client') }
+ let(:event_kwargs) { { user: user, project: project } }
+
+ before do
+ described_class.clear_memoization(:gitlab_sdk_client)
+
+ stub_env('GITLAB_ANALYTICS_ID', app_id)
+ stub_env('GITLAB_ANALYTICS_URL', url)
+ end
+
+ subject(:track_event) { described_class.track_event(event_name, **event_kwargs) }
+
+ shared_examples 'does not send a Product Analytics event' do
+ it 'does not call the Product Analytics Ruby SDK' do
+ expect(GitlabSDK::Client).not_to receive(:new)
+
+ track_event
+ end
+ end
+
+ context 'when internal_events_for_product_analytics FF is enabled' do
+ before do
+ stub_feature_flags(internal_events_for_product_analytics: true)
+
+ allow(GitlabSDK::Client)
+ .to receive(:new)
+ .with(app_id: app_id, host: url)
+ .and_return(sdk_client)
+ end
+
+ it 'calls Product Analytics Ruby SDK', :aggregate_failures do
+ expect(sdk_client).to receive(:identify).with(user.id)
+ expect(sdk_client).to receive(:track)
+ .with(event_name, { project_id: project.id, namespace_id: project.namespace.id })
+
+ track_event
+ end
+
+ context 'when GITLAB_ANALYTICS_ID is nil' do
+ let(:app_id) { nil }
+
+ it_behaves_like 'does not send a Product Analytics event'
+ end
+
+ context 'when GITLAB_ANALYTICS_URL is nil' do
+ let(:url) { nil }
+
+ it_behaves_like 'does not send a Product Analytics event'
+ end
+ end
+
+ context 'when internal_events_for_product_analytics FF is disabled' do
+ let(:app_id) { 'foobar' }
+ let(:url) { 'http://localhost:4000' }
+
+ before do
+ stub_feature_flags(internal_events_for_product_analytics: false)
+ end
+
+ it_behaves_like 'does not send a Product Analytics event'
+ end
+ end
end
diff --git a/spec/lib/gitlab/issuables_count_for_state_spec.rb b/spec/lib/gitlab/issuables_count_for_state_spec.rb
index cc4ebba863d..e85dc890cbf 100644
--- a/spec/lib/gitlab/issuables_count_for_state_spec.rb
+++ b/spec/lib/gitlab/issuables_count_for_state_spec.rb
@@ -72,7 +72,6 @@ RSpec.describe Gitlab::IssuablesCountForState do
let_it_be(:group) { create(:group) }
let(:cache_options) { { expires_in: 1.hour } }
- let(:cache_key) { ['group', group.id, 'issues'] }
let(:threshold) { described_class::THRESHOLD }
let(:states_count) { { opened: 1, closed: 1, all: 2 } }
let(:params) { {} }
@@ -95,9 +94,7 @@ RSpec.describe Gitlab::IssuablesCountForState do
end
end
- context 'with Issues' do
- let(:finder) { IssuesFinder.new(user, params) }
-
+ shared_examples 'calculating counts for issuables' do
it 'returns -1 for the requested state' do
allow(finder).to receive(:count_by_state).and_raise(ActiveRecord::QueryCanceled)
expect(Rails.cache).not_to receive(:write)
@@ -162,6 +159,20 @@ RSpec.describe Gitlab::IssuablesCountForState do
end
end
+ context 'with Issues' do
+ let(:finder) { IssuesFinder.new(user, params) }
+ let(:cache_key) { ['group', group.id, 'issues'] }
+
+ it_behaves_like 'calculating counts for issuables'
+ end
+
+ context 'with Work Items' do
+ let(:finder) { ::WorkItems::WorkItemsFinder.new(user, params) }
+ let(:cache_key) { ['group', group.id, 'work_items'] }
+
+ it_behaves_like 'calculating counts for issuables'
+ end
+
context 'with Merge Requests' do
let(:finder) { MergeRequestsFinder.new(user, params) }
diff --git a/spec/lib/gitlab/kas/client_spec.rb b/spec/lib/gitlab/kas/client_spec.rb
index e8884ce352f..f2745d940de 100644
--- a/spec/lib/gitlab/kas/client_spec.rb
+++ b/spec/lib/gitlab/kas/client_spec.rb
@@ -45,25 +45,25 @@ RSpec.describe Gitlab::Kas::Client do
expect(token).to receive(:audience=).with(described_class::JWT_AUDIENCE)
end
- describe '#get_connected_agents' do
+ describe '#get_connected_agents_by_agent_ids' do
let(:stub) { instance_double(Gitlab::Agent::AgentTracker::Rpc::AgentTracker::Stub) }
- let(:request) { instance_double(Gitlab::Agent::AgentTracker::Rpc::GetConnectedAgentsRequest) }
- let(:response) { double(Gitlab::Agent::AgentTracker::Rpc::GetConnectedAgentsResponse, agents: connected_agents) }
+ let(:request) { instance_double(Gitlab::Agent::AgentTracker::Rpc::GetConnectedAgentsByAgentIdsRequest) }
+ let(:response) { double(Gitlab::Agent::AgentTracker::Rpc::GetConnectedAgentsByAgentIdsResponse, agents: connected_agents) }
let(:connected_agents) { [double] }
- subject { described_class.new.get_connected_agents(project: project) }
+ subject { described_class.new.get_connected_agents_by_agent_ids(agent_ids: [agent.id]) }
before do
expect(Gitlab::Agent::AgentTracker::Rpc::AgentTracker::Stub).to receive(:new)
.with('example.kas.internal', :this_channel_is_insecure, timeout: described_class::TIMEOUT)
.and_return(stub)
- expect(Gitlab::Agent::AgentTracker::Rpc::GetConnectedAgentsRequest).to receive(:new)
- .with(project_id: project.id)
+ expect(Gitlab::Agent::AgentTracker::Rpc::GetConnectedAgentsByAgentIdsRequest).to receive(:new)
+ .with(agent_ids: [agent.id])
.and_return(request)
- expect(stub).to receive(:get_connected_agents)
+ expect(stub).to receive(:get_connected_agents_by_agent_ids)
.with(request, metadata: { 'authorization' => 'bearer test-token' })
.and_return(response)
end
diff --git a/spec/lib/gitlab/markdown_cache/redis/store_spec.rb b/spec/lib/gitlab/markdown_cache/redis/store_spec.rb
index 07a87b245c2..3bda8dfc52e 100644
--- a/spec/lib/gitlab/markdown_cache/redis/store_spec.rb
+++ b/spec/lib/gitlab/markdown_cache/redis/store_spec.rb
@@ -50,7 +50,7 @@ RSpec.describe Gitlab::MarkdownCache::Redis::Store, :clean_gitlab_redis_cache do
results = described_class.bulk_read([storable])
- expect(results[storable.cache_key].value.symbolize_keys)
+ expect(results[storable.cache_key].symbolize_keys)
.to eq(field_1_html: "hello", field_2_html: "world", cached_markdown_version: "1")
end
end
diff --git a/spec/lib/gitlab/memory/watchdog_spec.rb b/spec/lib/gitlab/memory/watchdog_spec.rb
index c442208617f..9f97ef99a5d 100644
--- a/spec/lib/gitlab/memory/watchdog_spec.rb
+++ b/spec/lib/gitlab/memory/watchdog_spec.rb
@@ -178,19 +178,6 @@ RSpec.describe Gitlab::Memory::Watchdog, :aggregate_failures, feature_category:
watchdog.call
end
- context 'when enforce_memory_watchdog ops toggle is off' do
- before do
- stub_feature_flags(enforce_memory_watchdog: false)
- end
-
- it 'always uses the NullHandler' do
- expect(handler).not_to receive(:call)
- expect(described_class::Handlers::NullHandler.instance).to receive(:call).and_return(true)
-
- watchdog.call
- end
- end
-
context 'when multiple monitors exceeds allowed number of strikes' do
before do
watchdog.configure do |config|
diff --git a/spec/lib/gitlab/metrics/system_spec.rb b/spec/lib/gitlab/metrics/system_spec.rb
deleted file mode 100644
index e4f53ab3f49..00000000000
--- a/spec/lib/gitlab/metrics/system_spec.rb
+++ /dev/null
@@ -1,363 +0,0 @@
-# frozen_string_literal: true
-
-require 'fast_spec_helper'
-
-RSpec.describe Gitlab::Metrics::System do
- context 'when /proc files exist' do
- # Modified column 22 to be 1000 (starttime ticks)
- let(:proc_stat) do
- <<~SNIP
- 2095 (ruby) R 0 2095 2095 34818 2095 4194560 211267 7897 2 0 287 51 10 1 20 0 5 0 1000 566210560 80885 18446744073709551615 94736211292160 94736211292813 140720919612064 0 0 0 0 0 1107394127 0 0 0 17 3 0 0 0 0 0 94736211303768 94736211304544 94736226689024 140720919619473 140720919619513 140720919619513 140720919621604 0
- SNIP
- end
-
- # Fixtures pulled from:
- # Linux carbon 5.3.0-7648-generic #41~1586789791~19.10~9593806-Ubuntu SMP Mon Apr 13 17:50:40 UTC x86_64 x86_64 x86_64 GNU/Linux
- let(:proc_status) do
- # most rows omitted for brevity
- <<~SNIP
- Name: less
- VmHWM: 2468 kB
- VmRSS: 2468 kB
- RssAnon: 260 kB
- RssFile: 1024 kB
- SNIP
- end
-
- let(:proc_smaps_rollup) do
- # full snapshot
- <<~SNIP
- Rss: 2564 kB
- Pss: 503 kB
- Pss_Anon: 312 kB
- Pss_File: 191 kB
- Pss_Shmem: 0 kB
- Shared_Clean: 2100 kB
- Shared_Dirty: 0 kB
- Private_Clean: 152 kB
- Private_Dirty: 312 kB
- Referenced: 2564 kB
- Anonymous: 312 kB
- LazyFree: 0 kB
- AnonHugePages: 0 kB
- ShmemPmdMapped: 0 kB
- Shared_Hugetlb: 0 kB
- Private_Hugetlb: 0 kB
- Swap: 0 kB
- SwapPss: 0 kB
- Locked: 0 kB
- SNIP
- end
-
- let(:proc_limits) do
- # full snapshot
- <<~SNIP
- Limit Soft Limit Hard Limit Units
- Max cpu time unlimited unlimited seconds
- Max file size unlimited unlimited bytes
- Max data size unlimited unlimited bytes
- Max stack size 8388608 unlimited bytes
- Max core file size 0 unlimited bytes
- Max resident set unlimited unlimited bytes
- Max processes 126519 126519 processes
- Max open files 1024 1048576 files
- Max locked memory 67108864 67108864 bytes
- Max address space unlimited unlimited bytes
- Max file locks unlimited unlimited locks
- Max pending signals 126519 126519 signals
- Max msgqueue size 819200 819200 bytes
- Max nice priority 0 0
- Max realtime priority 0 0
- Max realtime timeout unlimited unlimited us
- SNIP
- end
-
- let(:mem_info) do
- # full snapshot
- <<~SNIP
- MemTotal: 15362536 kB
- MemFree: 3403136 kB
- MemAvailable: 13044528 kB
- Buffers: 272188 kB
- Cached: 8171312 kB
- SwapCached: 0 kB
- Active: 3332084 kB
- Inactive: 6981076 kB
- Active(anon): 1603868 kB
- Inactive(anon): 9044 kB
- Active(file): 1728216 kB
- Inactive(file): 6972032 kB
- Unevictable: 18676 kB
- Mlocked: 18676 kB
- SwapTotal: 0 kB
- SwapFree: 0 kB
- Dirty: 6808 kB
- Writeback: 0 kB
- AnonPages: 1888300 kB
- Mapped: 166164 kB
- Shmem: 12932 kB
- KReclaimable: 1275120 kB
- Slab: 1495480 kB
- SReclaimable: 1275120 kB
- SUnreclaim: 220360 kB
- KernelStack: 7072 kB
- PageTables: 11936 kB
- NFS_Unstable: 0 kB
- Bounce: 0 kB
- WritebackTmp: 0 kB
- CommitLimit: 7681268 kB
- Committed_AS: 4976100 kB
- VmallocTotal: 34359738367 kB
- VmallocUsed: 25532 kB
- VmallocChunk: 0 kB
- Percpu: 23200 kB
- HardwareCorrupted: 0 kB
- AnonHugePages: 202752 kB
- ShmemHugePages: 0 kB
- ShmemPmdMapped: 0 kB
- FileHugePages: 0 kB
- FilePmdMapped: 0 kB
- CmaTotal: 0 kB
- CmaFree: 0 kB
- HugePages_Total: 0
- HugePages_Free: 0
- HugePages_Rsvd: 0
- HugePages_Surp: 0
- Hugepagesize: 2048 kB
- Hugetlb: 0 kB
- DirectMap4k: 4637504 kB
- DirectMap2M: 11087872 kB
- DirectMap1G: 2097152 kB
- SNIP
- end
-
- describe '.memory_usage_rss' do
- context 'without PID' do
- it "returns a hash containing RSS metrics in bytes for current process" do
- mock_existing_proc_file('/proc/self/status', proc_status)
-
- expect(described_class.memory_usage_rss).to eq(
- total: 2527232,
- anon: 266240,
- file: 1048576
- )
- end
- end
-
- context 'with PID' do
- it "returns a hash containing RSS metrics in bytes for given process" do
- mock_existing_proc_file('/proc/7/status', proc_status)
-
- expect(described_class.memory_usage_rss(pid: 7)).to eq(
- total: 2527232,
- anon: 266240,
- file: 1048576
- )
- end
- end
- end
-
- describe '.file_descriptor_count' do
- it 'returns the amount of open file descriptors' do
- expect(Dir).to receive(:glob).and_return(['/some/path', '/some/other/path'])
-
- expect(described_class.file_descriptor_count).to eq(2)
- end
- end
-
- describe '.max_open_file_descriptors' do
- it 'returns the max allowed open file descriptors' do
- mock_existing_proc_file('/proc/self/limits', proc_limits)
-
- expect(described_class.max_open_file_descriptors).to eq(1024)
- end
- end
-
- describe '.memory_usage_uss_pss' do
- context 'without PID' do
- it "returns the current process' unique and porportional set size (USS/PSS) in bytes" do
- mock_existing_proc_file('/proc/self/smaps_rollup', proc_smaps_rollup)
-
- # (Private_Clean (152 kB) + Private_Dirty (312 kB) + Private_Hugetlb (0 kB)) * 1024
- expect(described_class.memory_usage_uss_pss).to eq(uss: 475136, pss: 515072)
- end
- end
-
- context 'with PID' do
- it "returns the given process' unique and porportional set size (USS/PSS) in bytes" do
- mock_existing_proc_file('/proc/7/smaps_rollup', proc_smaps_rollup)
-
- # (Private_Clean (152 kB) + Private_Dirty (312 kB) + Private_Hugetlb (0 kB)) * 1024
- expect(described_class.memory_usage_uss_pss(pid: 7)).to eq(uss: 475136, pss: 515072)
- end
- end
- end
-
- describe '.memory_total' do
- it "returns the current process' resident set size (RSS) in bytes" do
- mock_existing_proc_file('/proc/meminfo', mem_info)
-
- expect(described_class.memory_total).to eq(15731236864)
- end
- end
-
- describe '.process_runtime_elapsed_seconds' do
- it 'returns the seconds elapsed since the process was started' do
- # sets process starttime ticks to 1000
- mock_existing_proc_file('/proc/self/stat', proc_stat)
- # system clock ticks/sec
- expect(Etc).to receive(:sysconf).with(Etc::SC_CLK_TCK).and_return(100)
- # system uptime in seconds
- expect(::Process).to receive(:clock_gettime).and_return(15)
-
- # uptime - (starttime_ticks / ticks_per_sec)
- expect(described_class.process_runtime_elapsed_seconds).to eq(5)
- end
-
- context 'when inputs are not available' do
- it 'returns 0' do
- mock_missing_proc_file
- expect(::Process).to receive(:clock_gettime).and_raise(NameError)
-
- expect(described_class.process_runtime_elapsed_seconds).to eq(0)
- end
- end
- end
-
- describe '.summary' do
- it 'contains a selection of the available fields' do
- stub_const('RUBY_DESCRIPTION', 'ruby-3.0-patch1')
- mock_existing_proc_file('/proc/self/status', proc_status)
- mock_existing_proc_file('/proc/self/smaps_rollup', proc_smaps_rollup)
-
- summary = described_class.summary
-
- expect(summary[:version]).to eq('ruby-3.0-patch1')
- expect(summary[:gc_stat].keys).to eq(GC.stat.keys)
- expect(summary[:memory_rss]).to eq(2527232)
- expect(summary[:memory_uss]).to eq(475136)
- expect(summary[:memory_pss]).to eq(515072)
- expect(summary[:time_cputime]).to be_a(Float)
- expect(summary[:time_realtime]).to be_a(Float)
- expect(summary[:time_monotonic]).to be_a(Float)
- end
- end
- end
-
- context 'when /proc files do not exist' do
- before do
- mock_missing_proc_file
- end
-
- describe '.memory_usage_rss' do
- it 'returns 0 for all components' do
- expect(described_class.memory_usage_rss).to eq(
- total: 0,
- anon: 0,
- file: 0
- )
- end
- end
-
- describe '.memory_usage_uss_pss' do
- it "returns 0 for all components" do
- expect(described_class.memory_usage_uss_pss).to eq(uss: 0, pss: 0)
- end
- end
-
- describe '.file_descriptor_count' do
- it 'returns 0' do
- expect(Dir).to receive(:glob).and_return([])
-
- expect(described_class.file_descriptor_count).to eq(0)
- end
- end
-
- describe '.max_open_file_descriptors' do
- it 'returns 0' do
- expect(described_class.max_open_file_descriptors).to eq(0)
- end
- end
-
- describe '.summary' do
- it 'returns only available fields' do
- summary = described_class.summary
-
- expect(summary[:version]).to be_a(String)
- expect(summary[:gc_stat].keys).to eq(GC.stat.keys)
- expect(summary[:memory_rss]).to eq(0)
- expect(summary[:memory_uss]).to eq(0)
- expect(summary[:memory_pss]).to eq(0)
- expect(summary[:time_cputime]).to be_a(Float)
- expect(summary[:time_realtime]).to be_a(Float)
- expect(summary[:time_monotonic]).to be_a(Float)
- end
- end
- end
-
- describe '.cpu_time' do
- it 'returns a Float' do
- expect(described_class.cpu_time).to be_an(Float)
- end
- end
-
- describe '.real_time' do
- it 'returns a Float' do
- expect(described_class.real_time).to be_an(Float)
- end
- end
-
- describe '.monotonic_time' do
- it 'returns a Float' do
- expect(described_class.monotonic_time).to be_an(Float)
- end
- end
-
- describe '.thread_cpu_time' do
- it 'returns cpu_time on supported platform' do
- stub_const("Process::CLOCK_THREAD_CPUTIME_ID", 16)
-
- expect(Process).to receive(:clock_gettime)
- .with(16, kind_of(Symbol)) { 0.111222333 }
-
- expect(described_class.thread_cpu_time).to eq(0.111222333)
- end
-
- it 'returns nil on unsupported platform' do
- hide_const("Process::CLOCK_THREAD_CPUTIME_ID")
-
- expect(described_class.thread_cpu_time).to be_nil
- end
- end
-
- describe '.thread_cpu_duration' do
- let(:start_time) { described_class.thread_cpu_time }
-
- it 'returns difference between start and current time' do
- stub_const("Process::CLOCK_THREAD_CPUTIME_ID", 16)
-
- expect(Process).to receive(:clock_gettime)
- .with(16, kind_of(Symbol))
- .and_return(
- 0.111222333,
- 0.222333833
- )
-
- expect(described_class.thread_cpu_duration(start_time)).to eq(0.1111115)
- end
-
- it 'returns nil on unsupported platform' do
- hide_const("Process::CLOCK_THREAD_CPUTIME_ID")
-
- expect(described_class.thread_cpu_duration(start_time)).to be_nil
- end
- end
-
- def mock_existing_proc_file(path, content)
- allow(File).to receive(:open).with(path) { |_path, &block| block.call(StringIO.new(content)) }
- end
-
- def mock_missing_proc_file
- allow(File).to receive(:open).and_raise(Errno::ENOENT)
- end
-end
diff --git a/spec/lib/gitlab/middleware/path_traversal_check_spec.rb b/spec/lib/gitlab/middleware/path_traversal_check_spec.rb
index 91081cc88ea..a0d7711c881 100644
--- a/spec/lib/gitlab/middleware/path_traversal_check_spec.rb
+++ b/spec/lib/gitlab/middleware/path_traversal_check_spec.rb
@@ -5,7 +5,8 @@ require 'spec_helper'
RSpec.describe ::Gitlab::Middleware::PathTraversalCheck, feature_category: :shared do
using RSpec::Parameterized::TableSyntax
- let(:fake_response) { [200, { 'Content-Type' => 'text/plain' }, ['OK']] }
+ let(:fake_response_status) { 200 }
+ let(:fake_response) { [fake_response_status, { 'Content-Type' => 'text/plain' }, ['OK']] }
let(:fake_app) { ->(_) { fake_response } }
let(:middleware) { described_class.new(fake_app) }
@@ -31,8 +32,11 @@ RSpec.describe ::Gitlab::Middleware::PathTraversalCheck, feature_category: :shar
.and_call_original
expect(::Gitlab::AppLogger)
.to receive(:warn)
- .with({ class_name: described_class.name, duration_ms: instance_of(Float) })
- .and_call_original
+ .with({
+ class_name: described_class.name,
+ duration_ms: instance_of(Float),
+ status: fake_response_status
+ }).and_call_original
expect(subject).to eq(fake_response)
end
@@ -61,8 +65,11 @@ RSpec.describe ::Gitlab::Middleware::PathTraversalCheck, feature_category: :shar
.not_to receive(:check_path_traversal!)
expect(::Gitlab::AppLogger)
.to receive(:warn)
- .with({ class_name: described_class.name, duration_ms: instance_of(Float) })
- .and_call_original
+ .with({
+ class_name: described_class.name,
+ duration_ms: instance_of(Float),
+ status: fake_response_status
+ }).and_call_original
expect(subject).to eq(fake_response)
end
@@ -99,7 +106,8 @@ RSpec.describe ::Gitlab::Middleware::PathTraversalCheck, feature_category: :shar
duration_ms: instance_of(Float),
message: described_class::PATH_TRAVERSAL_MESSAGE,
fullpath: fullpath,
- method: method.upcase
+ method: method.upcase,
+ status: fake_response_status
}).and_call_original
expect(subject).to eq(fake_response)
@@ -124,7 +132,8 @@ RSpec.describe ::Gitlab::Middleware::PathTraversalCheck, feature_category: :shar
class_name: described_class.name,
message: described_class::PATH_TRAVERSAL_MESSAGE,
fullpath: fullpath,
- method: method.upcase
+ method: method.upcase,
+ status: fake_response_status
}).and_call_original
expect(subject).to eq(fake_response)
diff --git a/spec/lib/gitlab/middleware/request_context_spec.rb b/spec/lib/gitlab/middleware/request_context_spec.rb
index cd21209bcee..a0a6609c8bb 100644
--- a/spec/lib/gitlab/middleware/request_context_spec.rb
+++ b/spec/lib/gitlab/middleware/request_context_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
require 'rack'
require 'request_store'
-require_relative '../../../support/helpers/next_instance_of'
+require 'gitlab/rspec/next_instance_of'
RSpec.describe Gitlab::Middleware::RequestContext, feature_category: :application_instrumentation do
include NextInstanceOf
diff --git a/spec/lib/gitlab/nav/top_nav_menu_header_spec.rb b/spec/lib/gitlab/nav/top_nav_menu_header_spec.rb
deleted file mode 100644
index d9da3ba1e46..00000000000
--- a/spec/lib/gitlab/nav/top_nav_menu_header_spec.rb
+++ /dev/null
@@ -1,16 +0,0 @@
-# frozen_string_literal: true
-
-require 'fast_spec_helper'
-
-RSpec.describe ::Gitlab::Nav::TopNavMenuHeader do
- describe '.build' do
- it 'builds a hash from with the given header' do
- title = 'Test Header'
- expected = {
- title: title,
- type: :header
- }
- expect(described_class.build(title: title)).to eq(expected)
- end
- end
-end
diff --git a/spec/lib/gitlab/omniauth_initializer_spec.rb b/spec/lib/gitlab/omniauth_initializer_spec.rb
index 9b46b8eccc8..222a730a229 100644
--- a/spec/lib/gitlab/omniauth_initializer_spec.rb
+++ b/spec/lib/gitlab/omniauth_initializer_spec.rb
@@ -2,7 +2,9 @@
require 'spec_helper'
-RSpec.describe Gitlab::OmniauthInitializer do
+RSpec.describe Gitlab::OmniauthInitializer, feature_category: :system_access do
+ include LoginHelpers
+
let(:devise_config) { class_double(Devise) }
subject(:initializer) { described_class.new(devise_config) }
@@ -224,6 +226,119 @@ RSpec.describe Gitlab::OmniauthInitializer do
subject.execute([shibboleth_config])
end
+ context 'when SAML providers are configured' do
+ it 'configures default args for a single SAML provider' do
+ stub_omniauth_config(providers: [{ name: 'saml', args: { idp_sso_service_url: 'https://saml.example.com' } }])
+
+ expect(devise_config).to receive(:omniauth).with(
+ :saml,
+ {
+ idp_sso_service_url: 'https://saml.example.com',
+ attribute_statements: ::Gitlab::Auth::Saml::Config.default_attribute_statements
+ }
+ )
+
+ initializer.execute(Gitlab.config.omniauth.providers)
+ end
+
+ context 'when configuration provides matching keys' do
+ before do
+ stub_omniauth_config(
+ providers: [
+ {
+ name: 'saml',
+ args: { idp_sso_service_url: 'https://saml.example.com', attribute_statements: { email: ['custom_attr'] } }
+ }
+ ]
+ )
+ end
+
+ it 'merges arguments with user configuration preference' do
+ expect(devise_config).to receive(:omniauth).with(
+ :saml,
+ {
+ idp_sso_service_url: 'https://saml.example.com',
+ attribute_statements: ::Gitlab::Auth::Saml::Config.default_attribute_statements
+ .merge({ email: ['custom_attr'] })
+ }
+ )
+
+ initializer.execute(Gitlab.config.omniauth.providers)
+ end
+
+ it 'merges arguments with defaults preference when REVERT_OMNIAUTH_DEFAULT_MERGING is true' do
+ stub_env('REVERT_OMNIAUTH_DEFAULT_MERGING', 'true')
+
+ expect(devise_config).to receive(:omniauth).with(
+ :saml,
+ {
+ idp_sso_service_url: 'https://saml.example.com',
+ attribute_statements: ::Gitlab::Auth::Saml::Config.default_attribute_statements
+ }
+ )
+
+ initializer.execute(Gitlab.config.omniauth.providers)
+ end
+ end
+
+ it 'configures defaults args for multiple SAML providers' do
+ stub_omniauth_config(
+ providers: [
+ { name: 'saml', args: { idp_sso_service_url: 'https://saml.example.com' } },
+ {
+ name: 'saml2',
+ args: { strategy_class: 'OmniAuth::Strategies::SAML', idp_sso_service_url: 'https://saml2.example.com' }
+ }
+ ]
+ )
+
+ expect(devise_config).to receive(:omniauth).with(
+ :saml,
+ {
+ idp_sso_service_url: 'https://saml.example.com',
+ attribute_statements: ::Gitlab::Auth::Saml::Config.default_attribute_statements
+ }
+ )
+ expect(devise_config).to receive(:omniauth).with(
+ :saml2,
+ {
+ idp_sso_service_url: 'https://saml2.example.com',
+ strategy_class: OmniAuth::Strategies::SAML,
+ attribute_statements: ::Gitlab::Auth::Saml::Config.default_attribute_statements
+ }
+ )
+
+ initializer.execute(Gitlab.config.omniauth.providers)
+ end
+
+ it 'merges arguments with user configuration preference for custom SAML provider' do
+ stub_omniauth_config(
+ providers: [
+ {
+ name: 'custom_saml',
+ args: {
+ strategy_class: 'OmniAuth::Strategies::SAML',
+ idp_sso_service_url: 'https://saml2.example.com',
+ attribute_statements: { email: ['custom_attr'] }
+ }
+ }
+ ]
+ )
+
+ expect(devise_config).to receive(:omniauth).with(
+ :custom_saml,
+ {
+ idp_sso_service_url: 'https://saml2.example.com',
+ strategy_class: OmniAuth::Strategies::SAML,
+ attribute_statements: ::Gitlab::Auth::Saml::Config.default_attribute_statements
+ .merge({ email: ['custom_attr'] })
+ }
+ )
+
+ initializer.execute(Gitlab.config.omniauth.providers)
+ end
+ end
+
it 'configures defaults for google_oauth2' do
google_config = {
'name' => 'google_oauth2',
diff --git a/spec/lib/gitlab/pages/deployment_update_spec.rb b/spec/lib/gitlab/pages/deployment_update_spec.rb
index 9a7564ddd59..48f5b27b761 100644
--- a/spec/lib/gitlab/pages/deployment_update_spec.rb
+++ b/spec/lib/gitlab/pages/deployment_update_spec.rb
@@ -5,8 +5,8 @@ require 'spec_helper'
RSpec.describe Gitlab::Pages::DeploymentUpdate, feature_category: :pages do
let_it_be(:project, refind: true) { create(:project, :repository) }
- let_it_be(:old_pipeline) { create(:ci_pipeline, project: project, sha: project.commit('HEAD').sha) }
- let_it_be(:pipeline) { create(:ci_pipeline, project: project, sha: project.commit('HEAD').sha) }
+ let_it_be(:old_pipeline) { create(:ci_pipeline, project: project, sha: project.commit('HEAD~~').sha) }
+ let_it_be(:pipeline) { create(:ci_pipeline, project: project, sha: project.commit('HEAD~').sha) }
let(:build) { create(:ci_build, pipeline: pipeline, ref: 'HEAD') }
let(:invalid_file) { fixture_file_upload('spec/fixtures/dk.png') }
@@ -137,4 +137,35 @@ RSpec.describe Gitlab::Pages::DeploymentUpdate, feature_category: :pages do
expect(pages_deployment_update).to be_valid
end
end
+
+ context 'when validating if current build is outdated' do
+ before do
+ create(:ci_job_artifact, :correct_checksum, file: file, job: build)
+ create(:ci_job_artifact, file_type: :metadata, file_format: :gzip, file: metadata, job: build)
+ build.reload
+ end
+
+ context 'when there is NOT a newer build' do
+ it 'does not fail' do
+ expect(pages_deployment_update).to be_valid
+ end
+ end
+
+ context 'when there is a newer build' do
+ before do
+ new_pipeline = create(:ci_pipeline, project: project, sha: project.commit('HEAD').sha)
+ new_build = create(:ci_build, name: 'pages', pipeline: new_pipeline, ref: 'HEAD')
+ create(:ci_job_artifact, :correct_checksum, file: file, job: new_build)
+ create(:ci_job_artifact, file_type: :metadata, file_format: :gzip, file: metadata, job: new_build)
+ create(:pages_deployment, project: project, ci_build: new_build)
+ new_build.reload
+ end
+
+ it 'fails with outdated reference message' do
+ expect(pages_deployment_update).not_to be_valid
+ expect(pages_deployment_update.errors.full_messages)
+ .to include('build SHA is outdated for this ref')
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/pages/url_builder_spec.rb b/spec/lib/gitlab/pages/url_builder_spec.rb
index ae94bbadffe..1a97ca01c3e 100644
--- a/spec/lib/gitlab/pages/url_builder_spec.rb
+++ b/spec/lib/gitlab/pages/url_builder_spec.rb
@@ -14,6 +14,7 @@ RSpec.describe Gitlab::Pages::UrlBuilder, feature_category: :pages do
let(:project_public) { true }
let(:unique_domain) { 'unique-domain' }
let(:unique_domain_enabled) { false }
+ let(:namespace_in_path) { false }
let(:project_setting) do
instance_double(
@@ -43,7 +44,8 @@ RSpec.describe Gitlab::Pages::UrlBuilder, feature_category: :pages do
protocol: 'http',
artifacts_server: artifacts_server,
access_control: access_control,
- port: port
+ port: port,
+ namespace_in_path: namespace_in_path
)
end
@@ -52,63 +54,131 @@ RSpec.describe Gitlab::Pages::UrlBuilder, feature_category: :pages do
it { is_expected.to eq('http://group.example.com/project') }
- context 'when namespace is upper cased' do
- let(:full_path) { 'Group/project' }
+ context 'when namespace_in_path is false' do
+ let(:namespace_in_path) { false }
- it { is_expected.to eq('http://group.example.com/project') }
- end
+ context 'when namespace is upper cased' do
+ let(:full_path) { 'Group/project' }
- context 'when project is in a nested group page' do
- let(:full_path) { 'group/subgroup/project' }
+ it { is_expected.to eq('http://group.example.com/project') }
+ end
- it { is_expected.to eq('http://group.example.com/subgroup/project') }
- end
+ context 'when project is in a nested group page' do
+ let(:full_path) { 'group/subgroup/project' }
+
+ it { is_expected.to eq('http://group.example.com/subgroup/project') }
+ end
+
+ context 'when using domain pages' do
+ let(:full_path) { 'group/group.example.com' }
+
+ it { is_expected.to eq('http://group.example.com') }
+
+ context 'in development mode' do
+ let(:port) { 3010 }
+
+ before do
+ stub_rails_env('development')
+ end
+
+ it { is_expected.to eq('http://group.example.com:3010') }
+ end
+ end
+
+ context 'when not using pages_unique_domain' do
+ subject(:pages_url) { builder.pages_url(with_unique_domain: false) }
- context 'when using domain pages' do
- let(:full_path) { 'group/group.example.com' }
+ context 'when pages_unique_domain_enabled is false' do
+ let(:unique_domain_enabled) { false }
- it { is_expected.to eq('http://group.example.com') }
+ it { is_expected.to eq('http://group.example.com/project') }
+ end
- context 'in development mode' do
- let(:port) { 3010 }
+ context 'when pages_unique_domain_enabled is true' do
+ let(:unique_domain_enabled) { true }
- before do
- stub_rails_env('development')
+ it { is_expected.to eq('http://group.example.com/project') }
end
+ end
+
+ context 'when using pages_unique_domain' do
+ subject(:pages_url) { builder.pages_url(with_unique_domain: true) }
+
+ context 'when pages_unique_domain_enabled is false' do
+ let(:unique_domain_enabled) { false }
+
+ it { is_expected.to eq('http://group.example.com/project') }
+ end
+
+ context 'when pages_unique_domain_enabled is true' do
+ let(:unique_domain_enabled) { true }
- it { is_expected.to eq('http://group.example.com:3010') }
+ it { is_expected.to eq('http://unique-domain.example.com') }
+ end
end
end
- context 'when not using pages_unique_domain' do
- subject(:pages_url) { builder.pages_url(with_unique_domain: false) }
+ context 'when namespace_in_path is true' do
+ let(:namespace_in_path) { true }
- context 'when pages_unique_domain_enabled is false' do
- let(:unique_domain_enabled) { false }
+ context 'when namespace is upper cased' do
+ let(:full_path) { 'Group/project' }
- it { is_expected.to eq('http://group.example.com/project') }
+ it { is_expected.to eq('http://example.com/group/project') }
end
- context 'when pages_unique_domain_enabled is true' do
- let(:unique_domain_enabled) { true }
+ context 'when project is in a nested group page' do
+ let(:full_path) { 'group/subgroup/project' }
- it { is_expected.to eq('http://group.example.com/project') }
+ it { is_expected.to eq('http://example.com/group/subgroup/project') }
end
- end
- context 'when using pages_unique_domain' do
- subject(:pages_url) { builder.pages_url(with_unique_domain: true) }
+ context 'when using domain pages' do
+ let(:full_path) { 'group/group.example.com' }
- context 'when pages_unique_domain_enabled is false' do
- let(:unique_domain_enabled) { false }
+ it { is_expected.to eq('http://example.com/group/group.example.com') }
- it { is_expected.to eq('http://group.example.com/project') }
+ context 'in development mode' do
+ let(:port) { 3010 }
+
+ before do
+ stub_rails_env('development')
+ end
+
+ it { is_expected.to eq('http://example.com:3010/group/group.example.com') }
+ end
+ end
+
+ context 'when not using pages_unique_domain' do
+ subject(:pages_url) { builder.pages_url(with_unique_domain: false) }
+
+ context 'when pages_unique_domain_enabled is false' do
+ let(:unique_domain_enabled) { false }
+
+ it { is_expected.to eq('http://example.com/group/project') }
+ end
+
+ context 'when pages_unique_domain_enabled is true' do
+ let(:unique_domain_enabled) { true }
+
+ it { is_expected.to eq('http://example.com/group/project') }
+ end
end
- context 'when pages_unique_domain_enabled is true' do
- let(:unique_domain_enabled) { true }
+ context 'when using pages_unique_domain' do
+ subject(:pages_url) { builder.pages_url(with_unique_domain: true) }
- it { is_expected.to eq('http://unique-domain.example.com') }
+ context 'when pages_unique_domain_enabled is false' do
+ let(:unique_domain_enabled) { false }
+
+ it { is_expected.to eq('http://example.com/group/project') }
+ end
+
+ context 'when pages_unique_domain_enabled is true' do
+ let(:unique_domain_enabled) { true }
+
+ it { is_expected.to eq('http://example.com/unique-domain') }
+ end
end
end
end
@@ -122,6 +192,12 @@ RSpec.describe Gitlab::Pages::UrlBuilder, feature_category: :pages do
it { is_expected.to be_nil }
end
+ context 'when namespace_in_path is true' do
+ let(:namespace_in_path) { true }
+
+ it { is_expected.to be_nil }
+ end
+
context 'when pages_unique_domain_enabled is true' do
let(:unique_domain_enabled) { true }
@@ -157,6 +233,19 @@ RSpec.describe Gitlab::Pages::UrlBuilder, feature_category: :pages do
it { is_expected.to eq("http://group.example.com:1234/-/project/-/jobs/1/artifacts/path/file.txt") }
end
end
+
+ context 'with namespace_in_path enabled and allowed extension' do
+ let(:artifact_name) { 'file.txt' }
+ let(:namespace_in_path) { true }
+
+ it { is_expected.to eq("http://example.com/group/-/project/-/jobs/1/artifacts/path/file.txt") }
+
+ context 'when port is configured' do
+ let(:port) { 1234 }
+
+ it { is_expected.to eq("http://example.com:1234/group/-/project/-/jobs/1/artifacts/path/file.txt") }
+ end
+ end
end
describe '#artifact_url_available?' do
diff --git a/spec/lib/gitlab/pagination/cursor_based_keyset_spec.rb b/spec/lib/gitlab/pagination/cursor_based_keyset_spec.rb
index e5958549a81..009c7299e9e 100644
--- a/spec/lib/gitlab/pagination/cursor_based_keyset_spec.rb
+++ b/spec/lib/gitlab/pagination/cursor_based_keyset_spec.rb
@@ -6,24 +6,24 @@ RSpec.describe Gitlab::Pagination::CursorBasedKeyset do
subject { described_class }
describe '.available_for_type?' do
- it 'returns true for Group' do
- expect(subject.available_for_type?(Group.all)).to be_truthy
- end
+ it 'returns true for when class implements .supported_keyset_orderings' do
+ model = Class.new(ApplicationRecord) do
+ self.table_name = 'users'
- it 'returns true for Ci::Build' do
- expect(subject.available_for_type?(Ci::Build.all)).to be_truthy
- end
+ def self.supported_keyset_orderings
+ { id: [:desc] }
+ end
+ end
- it 'returns true for Packages::BuildInfo' do
- expect(subject.available_for_type?(Packages::BuildInfo.all)).to be_truthy
+ expect(subject.available_for_type?(model.all)).to eq(true)
end
- it 'returns true for User' do
- expect(subject.available_for_type?(User.all)).to be_truthy
- end
+ it 'return false when class does not implement .supported_keyset_orderings' do
+ model = Class.new(ApplicationRecord) do
+ self.table_name = 'users'
+ end
- it 'return false for other types of relations' do
- expect(subject.available_for_type?(Issue.all)).to be_falsey
+ expect(subject.available_for_type?(model.all)).to eq(false)
end
end
@@ -68,53 +68,54 @@ RSpec.describe Gitlab::Pagination::CursorBasedKeyset do
describe '.available?' do
let(:request_context) { double('request_context', params: { order_by: order_by, sort: sort }) }
let(:cursor_based_request_context) { Gitlab::Pagination::Keyset::CursorBasedRequestContext.new(request_context) }
+ let(:model) do
+ Class.new(ApplicationRecord) do
+ self.table_name = 'users'
- context 'with order-by name asc' do
- let(:order_by) { :name }
- let(:sort) { :asc }
-
- it 'returns true for Group' do
- expect(subject.available?(cursor_based_request_context, Group.all)).to be_truthy
- end
-
- it 'return false for other types of relations' do
- expect(subject.available?(cursor_based_request_context, Issue.all)).to be_falsey
- expect(subject.available?(cursor_based_request_context, Ci::Build.all)).to be_falsey
- expect(subject.available?(cursor_based_request_context, Packages::BuildInfo.all)).to be_falsey
+ def self.supported_keyset_orderings
+ { id: [:desc] }
+ end
end
end
- context 'with order-by id desc' do
+ context 'when param order is supported by the model' do
let(:order_by) { :id }
let(:sort) { :desc }
- it 'returns true for Ci::Build' do
- expect(subject.available?(cursor_based_request_context, Ci::Build.all)).to be_truthy
+ it 'returns true' do
+ expect(subject.available?(cursor_based_request_context, model.all)).to eq(true)
end
+ end
- it 'returns true for AuditEvent' do
- expect(subject.available?(cursor_based_request_context, AuditEvent.all)).to be_truthy
- end
+ context 'when sort param is not supported by the model' do
+ let(:order_by) { :id }
+ let(:sort) { :asc }
- it 'returns true for Packages::BuildInfo' do
- expect(subject.available?(cursor_based_request_context, Packages::BuildInfo.all)).to be_truthy
+ it 'returns false' do
+ expect(subject.available?(cursor_based_request_context, model.all)).to eq(false)
end
+ end
+
+ context 'when order_by params is not supported by the model' do
+ let(:order_by) { :name }
+ let(:sort) { :desc }
- it 'returns true for User' do
- expect(subject.available?(cursor_based_request_context, User.all)).to be_truthy
+ it 'returns false' do
+ expect(subject.available?(cursor_based_request_context, model.all)).to eq(false)
end
end
- context 'with other order-by columns' do
- let(:order_by) { :path }
- let(:sort) { :asc }
-
- it 'returns false for Group' do
- expect(subject.available?(cursor_based_request_context, Group.all)).to be_falsey
+ context 'when model does not implement .supported_keyset_orderings' do
+ let(:order_by) { :id }
+ let(:sort) { :desc }
+ let(:model) do
+ Class.new(ApplicationRecord) do
+ self.table_name = 'users'
+ end
end
- it 'return false for other types of relations' do
- expect(subject.available?(cursor_based_request_context, Issue.all)).to be_falsey
+ it 'returns false' do
+ expect(subject.available?(cursor_based_request_context, model.all)).to eq(false)
end
end
end
diff --git a/spec/lib/gitlab/pagination/gitaly_keyset_pager_spec.rb b/spec/lib/gitlab/pagination/gitaly_keyset_pager_spec.rb
index cb3f1fe86dc..914c1e7bb74 100644
--- a/spec/lib/gitlab/pagination/gitaly_keyset_pager_spec.rb
+++ b/spec/lib/gitlab/pagination/gitaly_keyset_pager_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Pagination::GitalyKeysetPager do
+RSpec.describe Gitlab::Pagination::GitalyKeysetPager, feature_category: :source_code_management do
let(:pager) { described_class.new(request_context, project) }
let_it_be(:project) { create(:project, :repository) }
@@ -101,12 +101,17 @@ RSpec.describe Gitlab::Pagination::GitalyKeysetPager do
allow(request_context).to receive(:request).and_return(fake_request)
allow(BranchesFinder).to receive(:===).with(finder).and_return(true)
expect(finder).to receive(:execute).with(gitaly_pagination: true).and_return(branches)
+ allow(finder).to receive(:next_cursor)
end
context 'when next page could be available' do
let(:branches) { [branch1, branch2] }
+ let(:next_cursor) { branch2.name }
+ let(:expected_next_page_link) { %(<#{incoming_api_projects_url}?#{query.merge(page_token: next_cursor).to_query}>; rel="next") }
- let(:expected_next_page_link) { %(<#{incoming_api_projects_url}?#{query.merge(page_token: branch2.name).to_query}>; rel="next") }
+ before do
+ allow(finder).to receive(:next_cursor).and_return(next_cursor)
+ end
it 'uses keyset pagination and adds link headers' do
expect(request_context).to receive(:header).with('Link', expected_next_page_link)
diff --git a/spec/lib/gitlab/patch/sidekiq_scheduled_enq_spec.rb b/spec/lib/gitlab/patch/sidekiq_scheduled_enq_spec.rb
deleted file mode 100644
index cd3718f5dcc..00000000000
--- a/spec/lib/gitlab/patch/sidekiq_scheduled_enq_spec.rb
+++ /dev/null
@@ -1,73 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Patch::SidekiqScheduledEnq, :clean_gitlab_redis_queues, feature_category: :scalability do
- describe '#enqueue_jobs' do
- let_it_be(:payload) { {} }
-
- before do
- allow(Sidekiq).to receive(:load_json).and_return(payload)
-
- # stub data in both namespaces
- Gitlab::Redis::Queues.with { |c| c.zadd('resque:gitlab:schedule', 100, 'dummy') }
- Gitlab::Redis::Queues.with { |c| c.zadd('schedule', 100, 'dummy') }
- end
-
- subject { Sidekiq::Scheduled::Enq.new.enqueue_jobs }
-
- it 'polls both namespaces by default' do
- expect(Sidekiq::Client).to receive(:push).with(payload).twice
-
- subject
-
- Sidekiq.redis do |conn|
- expect(conn.zcard('schedule')).to eq(0)
- end
-
- Gitlab::Redis::Queues.with do |conn|
- expect(conn.zcard('resque:gitlab:schedule')).to eq(0)
- end
- end
-
- context 'when SIDEKIQ_ENABLE_DUAL_NAMESPACE_POLLING is disabled' do
- before do
- stub_env('SIDEKIQ_ENABLE_DUAL_NAMESPACE_POLLING', 'false')
- end
-
- it 'polls via Sidekiq.redis only' do
- expect(Sidekiq::Client).to receive(:push).with(payload).once
-
- subject
-
- Sidekiq.redis do |conn|
- expect(conn.zcard('schedule')).to eq(0)
- end
-
- Gitlab::Redis::Queues.with do |conn|
- expect(conn.zcard('resque:gitlab:schedule')).to eq(1)
- end
- end
- end
-
- context 'when SIDEKIQ_ENABLE_DUAL_NAMESPACE_POLLING is enabled' do
- before do
- stub_env('SIDEKIQ_ENABLE_DUAL_NAMESPACE_POLLING', 'true')
- end
-
- it 'polls both sets' do
- expect(Sidekiq::Client).to receive(:push).with(payload).twice
-
- subject
-
- Sidekiq.redis do |conn|
- expect(conn.zcard('schedule')).to eq(0)
- end
-
- Gitlab::Redis::Queues.with do |conn|
- expect(conn.zcard('resque:gitlab:schedule')).to eq(0)
- end
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/puma/error_handler_spec.rb b/spec/lib/gitlab/puma/error_handler_spec.rb
index 5b7cdf37af1..bfcbf32e899 100644
--- a/spec/lib/gitlab/puma/error_handler_spec.rb
+++ b/spec/lib/gitlab/puma/error_handler_spec.rb
@@ -12,11 +12,10 @@ RSpec.describe Gitlab::Puma::ErrorHandler, feature_category: :shared do
describe '#execute' do
it 'captures the exception and returns a Rack response' do
- allow(Raven.configuration).to receive(:capture_allowed?).and_return(true)
- expect(Raven).to receive(:capture_exception).with(
+ expect(Gitlab::ErrorTracking).to receive(:track_exception).with(
ex,
- tags: { handler: 'puma_low_level' },
- extra: { puma_env: env, status_code: status_code }
+ { puma_env: env, status_code: status_code },
+ { handler: 'puma_low_level' }
).and_call_original
status, headers, message = subject.execute(ex, env, status_code)
@@ -26,25 +25,10 @@ RSpec.describe Gitlab::Puma::ErrorHandler, feature_category: :shared do
expect(message).to eq(described_class::PROD_ERROR_MESSAGE)
end
- context 'when capture is not allowed' do
- it 'returns a Rack response without capturing the exception' do
- allow(Raven.configuration).to receive(:capture_allowed?).and_return(false)
- expect(Raven).not_to receive(:capture_exception)
-
- status, headers, message = subject.execute(ex, env, status_code)
-
- expect(status).to eq(500)
- expect(headers).to eq({})
- expect(message).to eq(described_class::PROD_ERROR_MESSAGE)
- end
- end
-
context 'when not in production' do
let(:is_production) { false }
it 'returns a Rack response with dev error message' do
- allow(Raven.configuration).to receive(:capture_allowed?).and_return(true)
-
status, headers, message = subject.execute(ex, env, status_code)
expect(status).to eq(500)
@@ -57,9 +41,6 @@ RSpec.describe Gitlab::Puma::ErrorHandler, feature_category: :shared do
let(:status_code) { 500 }
it 'defaults to error 500' do
- allow(Raven.configuration).to receive(:capture_allowed?).and_return(false)
- expect(Raven).not_to receive(:capture_exception)
-
status, headers, message = subject.execute(ex, env, status_code)
expect(status).to eq(500)
@@ -72,8 +53,6 @@ RSpec.describe Gitlab::Puma::ErrorHandler, feature_category: :shared do
let(:status_code) { 404 }
it 'uses the provided status code in the response' do
- allow(Raven.configuration).to receive(:capture_allowed?).and_return(true)
-
status, headers, message = subject.execute(ex, env, status_code)
expect(status).to eq(404)
diff --git a/spec/lib/gitlab/quick_actions/extractor_spec.rb b/spec/lib/gitlab/quick_actions/extractor_spec.rb
index 063b416c514..bb0adbc87f1 100644
--- a/spec/lib/gitlab/quick_actions/extractor_spec.rb
+++ b/spec/lib/gitlab/quick_actions/extractor_spec.rb
@@ -3,13 +3,15 @@
require 'spec_helper'
RSpec.describe Gitlab::QuickActions::Extractor, feature_category: :team_planning do
+ using RSpec::Parameterized::TableSyntax
+
let(:definitions) do
Class.new do
include Gitlab::QuickActions::Dsl
- command(:reopen, :open) {}
+ command(:reopen, :open, :close) {}
command(:assign) {}
- command(:labels) {}
+ command(:label) {}
command(:power) {}
command(:noop_command)
substitution(:substitution) { 'foo' }
@@ -44,7 +46,7 @@ RSpec.describe Gitlab::QuickActions::Extractor, feature_category: :team_planning
it 'extracts command' do
msg, commands = extractor.extract_commands(original_msg)
- expect(commands).to eq [['labels', '~foo ~"bar baz" label']]
+ expect(commands).to match_array [['label', '~foo ~"bar baz" label']]
expect(msg).to eq final_msg
end
end
@@ -137,42 +139,42 @@ RSpec.describe Gitlab::QuickActions::Extractor, feature_category: :team_planning
describe 'command with multiple arguments' do
context 'at the start of content' do
it_behaves_like 'command with multiple arguments' do
- let(:original_msg) { %(/labels ~foo ~"bar baz" label\nworld) }
+ let(:original_msg) { %(/label ~foo ~"bar baz" label\nworld) }
let(:final_msg) { "world" }
end
end
context 'in the middle of content' do
it_behaves_like 'command with multiple arguments' do
- let(:original_msg) { %(hello\n/labels ~foo ~"bar baz" label\nworld) }
+ let(:original_msg) { %(hello\n/label ~foo ~"bar baz" label\nworld) }
let(:final_msg) { "hello\nworld" }
end
end
context 'in the middle of a line' do
it 'does not extract command' do
- msg = %(hello\nworld /labels ~foo ~"bar baz" label)
+ msg = %(hello\nworld /label ~foo ~"bar baz" label)
msg, commands = extractor.extract_commands(msg)
expect(commands).to be_empty
- expect(msg).to eq %(hello\nworld /labels ~foo ~"bar baz" label)
+ expect(msg).to eq %(hello\nworld /label ~foo ~"bar baz" label)
end
end
context 'at the end of content' do
it_behaves_like 'command with multiple arguments' do
- let(:original_msg) { %(hello\n/labels ~foo ~"bar baz" label) }
+ let(:original_msg) { %(hello\n/label ~foo ~"bar baz" label) }
let(:final_msg) { "hello" }
end
end
context 'when argument is not separated with a space' do
it 'does not extract command' do
- msg = %(hello\n/labels~foo ~"bar baz" label\nworld)
+ msg = %(hello\n/label~foo ~"bar baz" label\nworld)
msg, commands = extractor.extract_commands(msg)
expect(commands).to be_empty
- expect(msg).to eq %(hello\n/labels~foo ~"bar baz" label\nworld)
+ expect(msg).to eq %(hello\n/label~foo ~"bar baz" label\nworld)
end
end
end
@@ -291,98 +293,82 @@ RSpec.describe Gitlab::QuickActions::Extractor, feature_category: :team_planning
expect(msg).to eq "hello\nworld"
end
- it 'does not alter original content if no command is found' do
- msg = 'Fixes #123'
- msg, commands = extractor.extract_commands(msg)
-
- expect(commands).to be_empty
- expect(msg).to eq 'Fixes #123'
- end
-
- it 'does not get confused if command comes before an inline code' do
- msg = "/reopen\n`some inline code`\n/labels ~a\n`more inline code`"
- msg, commands = extractor.extract_commands(msg)
-
- expect(commands).to eq([['reopen'], ['labels', '~a']])
- expect(msg).to eq "`some inline code`\n`more inline code`"
- end
+ it 'extracts command when between HTML comment and HTML tags' do
+ msg = <<~MSG.strip
+ <!-- this is a comment -->
- it 'does not get confused if command comes before a blockcode' do
- msg = "/reopen\n```\nsome blockcode\n```\n/labels ~a\n```\nmore blockcode\n```"
- msg, commands = extractor.extract_commands(msg)
+ /label ~bug
- expect(commands).to eq([['reopen'], ['labels', '~a']])
- expect(msg).to eq "```\nsome blockcode\n```\n```\nmore blockcode\n```"
- end
+ <p>
+ </p>
+ MSG
- it 'does not extract commands inside a blockcode' do
- msg = "Hello\r\n```\r\nThis is some text\r\n/close\r\n/assign @user\r\n```\r\n\r\nWorld"
- expected = msg.delete("\r")
msg, commands = extractor.extract_commands(msg)
- expect(commands).to be_empty
- expect(msg).to eq expected
+ expect(commands).to match_array [['label', '~bug']]
+ expect(msg).to eq "<!-- this is a comment -->\n\n<p>\n</p>"
end
- it 'does not extract commands inside a blockquote' do
- msg = "Hello\r\n>>>\r\nThis is some text\r\n/close\r\n/assign @user\r\n>>>\r\n\r\nWorld"
- expected = msg.delete("\r")
+ it 'does not alter original content if no command is found' do
+ msg = 'Fixes #123'
msg, commands = extractor.extract_commands(msg)
expect(commands).to be_empty
- expect(msg).to eq expected
+ expect(msg).to eq 'Fixes #123'
end
- it 'does not extract commands inside a HTML tag' do
- msg = "Hello\r\n<div>\r\nThis is some text\r\n/close\r\n/assign @user\r\n</div>\r\n\r\nWorld"
- expected = msg.delete("\r")
+ it 'does not get confused if command comes before an inline code' do
+ msg = "/reopen\n`some inline code`\n/label ~a\n`more inline code`"
msg, commands = extractor.extract_commands(msg)
- expect(commands).to be_empty
- expect(msg).to eq expected
+ expect(commands).to match_array([['reopen'], ['label', '~a']])
+ expect(msg).to eq "`some inline code`\n`more inline code`"
end
- it 'does not extract commands in multiline inline code on seperated rows' do
- msg = "Hello\r\n`\r\nThis is some text\r\n/close\r\n/assign @user\r\n`\r\n\r\nWorld"
- expected = msg.delete("\r")
+ it 'does not get confused if command comes before a code block' do
+ msg = "/reopen\n```\nsome blockcode\n```\n/label ~a\n```\nmore blockcode\n```"
msg, commands = extractor.extract_commands(msg)
- expect(commands).to be_empty
- expect(msg).to eq expected
+ expect(commands).to match_array([['reopen'], ['label', '~a']])
+ expect(msg).to eq "```\nsome blockcode\n```\n```\nmore blockcode\n```"
end
- it 'does not extract commands in multiline inline code starting from text' do
- msg = "Hello `This is some text\r\n/close\r\n/assign @user\r\n`\r\n\r\nWorld"
- expected = msg.delete("\r")
- msg, commands = extractor.extract_commands(msg)
-
- expect(commands).to be_empty
- expect(msg).to eq expected
- end
+ context 'does not extract commands inside' do
+ where(:description, :text) do
+ 'block HTML tags' | "Hello\r\n<div>\r\nText\r\n/close\r\n/assign @user\r\n</div>\r\n\r\nWorld"
+ 'inline html on seperated rows' | "Text\r\n<b>\r\n/close\r\n</b>"
+ 'HTML comments' | "<!--\n/assign @user\n-->"
+ 'blockquotes' | "> Text\r\n/reopen"
+ 'multiline blockquotes' | "Hello\r\n\r\n>>>\r\nText\r\n/close\r\n/assign @user\r\n>>>\r\n\r\nWorld"
+ 'code blocks' | "Hello\r\n```\r\nText\r\n/close\r\n/assign @user\r\n```\r\n\r\nWorld"
+ 'inline code on seperated rows' | "Hello `Text\r\n/close\r\n/assign @user\r\n`\r\n\r\nWorld"
+ end
- it 'does not extract commands in inline code' do
- msg = "`This is some text\r\n/close\r\n/assign @user\r\n`\r\n\r\nWorld"
- expected = msg.delete("\r")
- msg, commands = extractor.extract_commands(msg)
+ with_them do
+ specify do
+ expected = text.delete("\r")
+ msg, commands = extractor.extract_commands(text)
- expect(commands).to be_empty
- expect(msg).to eq expected
+ expect(commands).to be_empty
+ expect(msg).to eq expected
+ end
+ end
end
it 'limits to passed commands when they are passed' do
msg = <<~MSG.strip
Hello, we should only extract the commands passed
/reopen
- /labels hello world
+ /label hello world
/power
MSG
expected_msg = <<~EXPECTED.strip
Hello, we should only extract the commands passed
/power
EXPECTED
- expected_commands = [['reopen'], ['labels', 'hello world']]
+ expected_commands = [['reopen'], ['label', 'hello world']]
- msg, commands = extractor.extract_commands(msg, only: [:open, :labels])
+ msg, commands = extractor.extract_commands(msg, only: [:open, :label])
expect(commands).to eq(expected_commands)
expect(msg).to eq expected_msg
@@ -398,14 +384,13 @@ RSpec.describe Gitlab::QuickActions::Extractor, feature_category: :team_planning
end
describe '#redact_commands' do
- using RSpec::Parameterized::TableSyntax
-
where(:text, :expected) do
- "hello\n/labels ~label1 ~label2\nworld" | "hello\n`/labels ~label1 ~label2`\nworld"
- "hello\n/open\n/labels ~label1\nworld" | "hello\n`/open`\n`/labels ~label1`\nworld"
- "hello\n/reopen\nworld" | "hello\n`/reopen`\nworld"
- "/reopen\nworld" | "`/reopen`\nworld"
- "hello\n/open" | "hello\n`/open`"
+ "hello\n/label ~label1 ~label2\nworld" | "hello\n`/label ~label1 ~label2`\nworld"
+ "hello\n/open\n/label ~label1\nworld" | "hello\n`/open`\n`/label ~label1`\nworld"
+ "hello\n/reopen\nworld" | "hello\n`/reopen`\nworld"
+ "/reopen\nworld" | "`/reopen`\nworld"
+ "hello\n/open" | "hello\n`/open`"
+ "<!--\n/assign @user\n-->" | "<!--\n/assign @user\n-->"
end
with_them do
diff --git a/spec/lib/gitlab/redis/buffered_counter_spec.rb b/spec/lib/gitlab/redis/buffered_counter_spec.rb
new file mode 100644
index 00000000000..ef17b90d406
--- /dev/null
+++ b/spec/lib/gitlab/redis/buffered_counter_spec.rb
@@ -0,0 +1,7 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Redis::BufferedCounter, feature_category: :redis do
+ include_examples "redis_new_instance_shared_examples", 'buffered_counter', Gitlab::Redis::SharedState
+end
diff --git a/spec/lib/gitlab/redis/db_load_balancing_spec.rb b/spec/lib/gitlab/redis/db_load_balancing_spec.rb
index d3d3ced62a9..c3209ee12e5 100644
--- a/spec/lib/gitlab/redis/db_load_balancing_spec.rb
+++ b/spec/lib/gitlab/redis/db_load_balancing_spec.rb
@@ -5,40 +5,4 @@ require 'spec_helper'
RSpec.describe Gitlab::Redis::DbLoadBalancing, feature_category: :scalability do
include_examples "redis_new_instance_shared_examples", 'db_load_balancing', Gitlab::Redis::SharedState
include_examples "redis_shared_examples"
-
- describe '#pool' do
- let(:config_new_format_host) { "spec/fixtures/config/redis_new_format_host.yml" }
- let(:config_new_format_socket) { "spec/fixtures/config/redis_new_format_socket.yml" }
-
- subject { described_class.pool }
-
- before do
- allow(described_class).to receive(:config_file_name).and_return(config_new_format_host)
-
- # Override rails root to avoid having our fixtures overwritten by `redis.yml` if it exists
- allow(Gitlab::Redis::SharedState).to receive(:rails_root).and_return(mktmpdir)
- allow(Gitlab::Redis::SharedState).to receive(:config_file_name).and_return(config_new_format_socket)
- end
-
- around do |example|
- clear_pool
- example.run
- ensure
- clear_pool
- end
-
- it 'instantiates an instance of MultiStore' do
- subject.with do |redis_instance|
- expect(redis_instance).to be_instance_of(::Gitlab::Redis::MultiStore)
-
- expect(redis_instance.primary_store.connection[:id]).to eq("redis://test-host:6379/99")
- expect(redis_instance.secondary_store.connection[:id]).to eq("unix:///path/to/redis.sock/0")
-
- expect(redis_instance.instance_name).to eq('DbLoadBalancing')
- end
- end
-
- it_behaves_like 'multi store feature flags', :use_primary_and_secondary_stores_for_db_load_balancing,
- :use_primary_store_as_default_for_db_load_balancing
- end
end
diff --git a/spec/lib/gitlab/redis/sidekiq_status_spec.rb b/spec/lib/gitlab/redis/sidekiq_status_spec.rb
deleted file mode 100644
index 45578030ca8..00000000000
--- a/spec/lib/gitlab/redis/sidekiq_status_spec.rb
+++ /dev/null
@@ -1,56 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Redis::SidekiqStatus do
- # Note: this is a pseudo-store in front of `SharedState`, meant only as a tool
- # to move away from `Sidekiq.redis` for sidekiq status data. Thus, we use the
- # same store configuration as the former.
- let(:instance_specific_config_file) { "config/redis.shared_state.yml" }
-
- include_examples "redis_shared_examples"
-
- describe '#pool' do
- let(:config_new_format_host) { "spec/fixtures/config/redis_new_format_host.yml" }
- let(:config_new_format_socket) { "spec/fixtures/config/redis_new_format_socket.yml" }
- let(:rails_root) { mktmpdir }
-
- subject { described_class.pool }
-
- before do
- # Override rails root to avoid having our fixtures overwritten by `redis.yml` if it exists
- allow(Gitlab::Redis::SharedState).to receive(:rails_root).and_return(rails_root)
- allow(Gitlab::Redis::Queues).to receive(:rails_root).and_return(rails_root)
-
- allow(Gitlab::Redis::SharedState).to receive(:config_file_name).and_return(config_new_format_host)
- allow(Gitlab::Redis::Queues).to receive(:config_file_name).and_return(config_new_format_socket)
- end
-
- around do |example|
- clear_pool
- example.run
- ensure
- clear_pool
- end
-
- it 'instantiates an instance of MultiStore' do
- subject.with do |redis_instance|
- expect(redis_instance).to be_instance_of(::Gitlab::Redis::MultiStore)
-
- expect(redis_instance.primary_store.connection[:id]).to eq("redis://test-host:6379/99")
- expect(redis_instance.secondary_store.connection[:id]).to eq("unix:///path/to/redis.sock/0")
-
- expect(redis_instance.instance_name).to eq('SidekiqStatus')
- end
- end
-
- it_behaves_like 'multi store feature flags', :use_primary_and_secondary_stores_for_sidekiq_status,
- :use_primary_store_as_default_for_sidekiq_status
- end
-
- describe '#store_name' do
- it 'returns the name of the SharedState store' do
- expect(described_class.store_name).to eq('SharedState')
- end
- end
-end
diff --git a/spec/lib/gitlab/seeders/ci/catalog/resource_seeder_spec.rb b/spec/lib/gitlab/seeders/ci/catalog/resource_seeder_spec.rb
index 4bd4455d1bd..f4f38a861ee 100644
--- a/spec/lib/gitlab/seeders/ci/catalog/resource_seeder_spec.rb
+++ b/spec/lib/gitlab/seeders/ci/catalog/resource_seeder_spec.rb
@@ -7,10 +7,11 @@ RSpec.describe ::Gitlab::Seeders::Ci::Catalog::ResourceSeeder, feature_category:
let_it_be_with_reload(:group) { create(:group) }
let_it_be(:seed_count) { 2 }
let_it_be(:last_resource_id) { seed_count - 1 }
+ let(:publish) { true }
let(:group_path) { group.path }
- subject(:seeder) { described_class.new(group_path: group_path, seed_count: seed_count) }
+ subject(:seeder) { described_class.new(group_path: group_path, seed_count: seed_count, publish: publish) }
before_all do
group.add_owner(admin)
@@ -28,12 +29,26 @@ RSpec.describe ::Gitlab::Seeders::Ci::Catalog::ResourceSeeder, feature_category:
end
context 'when project name already exists' do
- before do
- create(:project, namespace: group, name: "ci_seed_resource_0")
+ context 'in the same group' do
+ before do
+ create(:project, namespace: group, name: 'ci_seed_resource_0')
+ end
+
+ it 'skips that project creation and keeps seeding' do
+ expect { seed }.to change { Project.count }.by(seed_count - 1)
+ end
end
- it 'skips that project creation and keeps seeding' do
- expect { seed }.to change { Project.count }.by(seed_count - 1)
+ context 'in a different group' do
+ let(:new_group) { create(:group) }
+
+ before do
+ create(:project, namespace: new_group, name: 'ci_seed_resource_0')
+ end
+
+ it 'executes the project creation' do
+ expect { seed }.to change { Project.count }.by(seed_count)
+ end
end
end
@@ -65,6 +80,26 @@ RSpec.describe ::Gitlab::Seeders::Ci::Catalog::ResourceSeeder, feature_category:
end
end
+ describe 'publish argument' do
+ context 'when false' do
+ let(:publish) { false }
+
+ it 'creates catalog resources in draft state' do
+ group.projects.each do |project|
+ expect(project.catalog_resource.state).to be('draft')
+ end
+ end
+ end
+
+ context 'when true' do
+ it 'creates catalog resources in published state' do
+ group.projects.each do |project|
+ expect(project.catalog_resource&.state).to be('published')
+ end
+ end
+ end
+ end
+
it 'skips seeding a project if the project name already exists' do
# We call the same command twice, as it means it would try to recreate
# projects that were already created!
@@ -87,12 +122,11 @@ RSpec.describe ::Gitlab::Seeders::Ci::Catalog::ResourceSeeder, feature_category:
project = group.projects.last
default_branch = project.default_branch_or_main
- expect(project.repository.blob_at(default_branch, "README.md")).not_to be_nil
- expect(project.repository.blob_at(default_branch, "template.yml")).not_to be_nil
+ expect(project.repository.blob_at(default_branch, 'README.md')).not_to be_nil
+ expect(project.repository.blob_at(default_branch, 'templates/component.yml')).not_to be_nil
end
- # This should be run again when fixing: https://gitlab.com/gitlab-org/gitlab/-/issues/429649
- xit 'creates projects with CI catalog resources' do
+ it 'creates projects with CI catalog resources' do
expect { seed }.to change { Project.count }.by(seed_count)
expect(group.projects.all?(&:catalog_resource)).to eq true
diff --git a/spec/lib/gitlab/seeders/ci/runner/runner_fleet_pipeline_seeder_spec.rb b/spec/lib/gitlab/seeders/ci/runner/runner_fleet_pipeline_seeder_spec.rb
index a15dbccc80c..930782dfadf 100644
--- a/spec/lib/gitlab/seeders/ci/runner/runner_fleet_pipeline_seeder_spec.rb
+++ b/spec/lib/gitlab/seeders/ci/runner/runner_fleet_pipeline_seeder_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
NULL_LOGGER = Gitlab::JsonLogger.new('/dev/null')
TAG_LIST = Gitlab::Seeders::Ci::Runner::RunnerFleetSeeder::TAG_LIST.to_set
-RSpec.describe ::Gitlab::Seeders::Ci::Runner::RunnerFleetPipelineSeeder, feature_category: :runner_fleet do
+RSpec.describe ::Gitlab::Seeders::Ci::Runner::RunnerFleetPipelineSeeder, feature_category: :fleet_visibility do
subject(:seeder) do
described_class.new(NULL_LOGGER, projects_to_runners: projects_to_runners, job_count: job_count)
end
diff --git a/spec/lib/gitlab/seeders/ci/runner/runner_fleet_seeder_spec.rb b/spec/lib/gitlab/seeders/ci/runner/runner_fleet_seeder_spec.rb
index 4597cc6b315..01cbce28159 100644
--- a/spec/lib/gitlab/seeders/ci/runner/runner_fleet_seeder_spec.rb
+++ b/spec/lib/gitlab/seeders/ci/runner/runner_fleet_seeder_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
NULL_LOGGER = Gitlab::JsonLogger.new('/dev/null')
-RSpec.describe ::Gitlab::Seeders::Ci::Runner::RunnerFleetSeeder, feature_category: :runner_fleet do
+RSpec.describe ::Gitlab::Seeders::Ci::Runner::RunnerFleetSeeder, feature_category: :fleet_visibility do
let_it_be(:user) { create(:user, :admin, username: 'test-admin') }
subject(:seeder) do
diff --git a/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb b/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb
index dbfab116479..5724c58f1a4 100644
--- a/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb
@@ -106,21 +106,9 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob,
end
context 'when TTL option is not set' do
- context 'when reduce_duplicate_job_key_ttl is enabled' do
- let(:expected_ttl) { described_class::SHORT_DUPLICATE_KEY_TTL }
+ let(:expected_ttl) { described_class::DEFAULT_DUPLICATE_KEY_TTL }
- it_behaves_like 'sets Redis keys with correct TTL'
- end
-
- context 'when reduce_duplicate_job_key_ttl is disabled' do
- before do
- stub_feature_flags(reduce_duplicate_job_key_ttl: false)
- end
-
- let(:expected_ttl) { described_class::DEFAULT_DUPLICATE_KEY_TTL }
-
- it_behaves_like 'sets Redis keys with correct TTL'
- end
+ it_behaves_like 'sets Redis keys with correct TTL'
end
context 'when TTL option is set' do
@@ -270,7 +258,7 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob,
}
end
- let(:argv) { ['main', 9, 'loc1', 'ci', nil, 'loc2'] }
+ let(:argv) { ['main', 9, 'loc1', 'ci', '', 'loc2'] }
it 'only updates the main connection' do
subject
diff --git a/spec/lib/gitlab/sidekiq_middleware/pause_control/strategies/click_house_migration_spec.rb b/spec/lib/gitlab/sidekiq_middleware/pause_control/strategies/click_house_migration_spec.rb
new file mode 100644
index 00000000000..470c860fb60
--- /dev/null
+++ b/spec/lib/gitlab/sidekiq_middleware/pause_control/strategies/click_house_migration_spec.rb
@@ -0,0 +1,66 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::SidekiqMiddleware::PauseControl::Strategies::ClickHouseMigration, feature_category: :database do
+ let(:worker_class) do
+ Class.new do
+ def self.name
+ 'TestPauseWorker'
+ end
+
+ include ::ApplicationWorker
+ include ::ClickHouseWorker
+
+ def perform(*); end
+ end
+ end
+
+ before do
+ stub_const('TestPauseWorker', worker_class)
+ end
+
+ describe '#call' do
+ include Gitlab::ExclusiveLeaseHelpers
+
+ shared_examples 'a worker being executed' do
+ it 'schedules the job' do
+ expect(Gitlab::SidekiqMiddleware::PauseControl::PauseControlService).not_to receive(:add_to_waiting_queue!)
+
+ worker_class.perform_async('args1')
+
+ expect(worker_class.jobs.count).to eq(1)
+ end
+ end
+
+ context 'when lock is not taken' do
+ it_behaves_like 'a worker being executed'
+ end
+
+ context 'when lock is taken' do
+ include ExclusiveLeaseHelpers
+
+ around do |example|
+ ClickHouse::MigrationSupport::ExclusiveLock.execute_migration do
+ example.run
+ end
+ end
+
+ it 'does not schedule the job' do
+ expect(Gitlab::SidekiqMiddleware::PauseControl::PauseControlService).to receive(:add_to_waiting_queue!).once
+
+ worker_class.perform_async('args1')
+
+ expect(worker_class.jobs.count).to eq(0)
+ end
+
+ context 'when pause_clickhouse_workers_during_migration FF is disabled' do
+ before do
+ stub_feature_flags(pause_clickhouse_workers_during_migration: false)
+ end
+
+ it_behaves_like 'a worker being executed'
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/sidekiq_middleware/pause_control/workers_map_spec.rb b/spec/lib/gitlab/sidekiq_middleware/pause_control/workers_map_spec.rb
new file mode 100644
index 00000000000..1aa4b470db0
--- /dev/null
+++ b/spec/lib/gitlab/sidekiq_middleware/pause_control/workers_map_spec.rb
@@ -0,0 +1,37 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::SidekiqMiddleware::PauseControl::WorkersMap, feature_category: :global_search do
+ let(:worker_class) do
+ Class.new do
+ def self.name
+ 'TestPauseWorker'
+ end
+
+ include ApplicationWorker
+
+ pause_control :zoekt
+
+ def perform(*); end
+ end
+ end
+
+ before do
+ stub_const('TestPauseWorker', worker_class)
+ end
+
+ describe '.strategy_for' do
+ it 'accepts classname' do
+ expect(described_class.strategy_for(worker: worker_class)).to eq(:zoekt)
+ end
+
+ it 'accepts worker instance' do
+ expect(described_class.strategy_for(worker: worker_class.new)).to eq(:zoekt)
+ end
+
+ it 'returns nil for unknown worker' do
+ expect(described_class.strategy_for(worker: described_class)).to be_nil
+ end
+ end
+end
diff --git a/spec/lib/gitlab/sidekiq_middleware/pause_control_spec.rb b/spec/lib/gitlab/sidekiq_middleware/pause_control_spec.rb
index a0cce0f61a0..2cb98b43051 100644
--- a/spec/lib/gitlab/sidekiq_middleware/pause_control_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware/pause_control_spec.rb
@@ -1,19 +1,23 @@
# frozen_string_literal: true
require 'fast_spec_helper'
+require 'rspec-parameterized'
RSpec.describe Gitlab::SidekiqMiddleware::PauseControl, feature_category: :global_search do
describe '.for' do
- it 'returns the right class for `zoekt`' do
- expect(described_class.for(:zoekt)).to eq(::Gitlab::SidekiqMiddleware::PauseControl::Strategies::Zoekt)
- end
+ using RSpec::Parameterized::TableSyntax
- it 'returns the right class for `none`' do
- expect(described_class.for(:none)).to eq(::Gitlab::SidekiqMiddleware::PauseControl::Strategies::None)
+ where(:strategy_name, :expected_class) do
+ :none | ::Gitlab::SidekiqMiddleware::PauseControl::Strategies::None
+ :unknown | ::Gitlab::SidekiqMiddleware::PauseControl::Strategies::None
+ :click_house_migration | ::Gitlab::SidekiqMiddleware::PauseControl::Strategies::ClickHouseMigration
+ :zoekt | ::Gitlab::SidekiqMiddleware::PauseControl::Strategies::Zoekt
end
- it 'returns nil when passing an unknown key' do
- expect(described_class.for(:unknown)).to eq(::Gitlab::SidekiqMiddleware::PauseControl::Strategies::None)
+ with_them do
+ it 'returns the right class' do
+ expect(described_class.for(strategy_name)).to eq(expected_class)
+ end
end
end
end
diff --git a/spec/lib/gitlab/sidekiq_middleware_spec.rb b/spec/lib/gitlab/sidekiq_middleware_spec.rb
index 5a38d1b7750..a5c6df5e9d5 100644
--- a/spec/lib/gitlab/sidekiq_middleware_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require 'sidekiq/testing'
-RSpec.describe Gitlab::SidekiqMiddleware do
+RSpec.describe Gitlab::SidekiqMiddleware, feature_category: :shared do
let(:job_args) { [0.01] }
let(:disabled_sidekiq_middlewares) { [] }
let(:chain) { Sidekiq::Middleware::Chain.new(Sidekiq) }
@@ -33,6 +33,7 @@ RSpec.describe Gitlab::SidekiqMiddleware do
configurator.call(chain)
stub_feature_flags("drop_sidekiq_jobs_#{worker_class.name}": false) # not dropping the job
end
+
it "passes through the right middlewares", :aggregate_failures do
enabled_sidekiq_middlewares.each do |middleware|
expect_next_instances_of(middleware, 1, true) do |middleware_instance|
@@ -68,6 +69,7 @@ RSpec.describe Gitlab::SidekiqMiddleware do
::Gitlab::SidekiqVersioning::Middleware,
::Gitlab::SidekiqStatus::ServerMiddleware,
::Gitlab::SidekiqMiddleware::WorkerContext::Server,
+ ::ClickHouse::MigrationSupport::SidekiqMiddleware,
::Gitlab::SidekiqMiddleware::DuplicateJobs::Server,
::Gitlab::Database::LoadBalancing::SidekiqServerMiddleware,
::Gitlab::SidekiqMiddleware::SkipJobs
diff --git a/spec/lib/gitlab/sidekiq_status_spec.rb b/spec/lib/gitlab/sidekiq_status_spec.rb
index a555e6a828a..55e3885d257 100644
--- a/spec/lib/gitlab/sidekiq_status_spec.rb
+++ b/spec/lib/gitlab/sidekiq_status_spec.rb
@@ -2,7 +2,8 @@
require 'spec_helper'
-RSpec.describe Gitlab::SidekiqStatus, :clean_gitlab_redis_queues, :clean_gitlab_redis_shared_state do
+RSpec.describe Gitlab::SidekiqStatus, :clean_gitlab_redis_queues,
+ :clean_gitlab_redis_shared_state do
shared_examples 'tracking status in redis' do
describe '.set' do
it 'stores the job ID' do
@@ -53,6 +54,31 @@ RSpec.describe Gitlab::SidekiqStatus, :clean_gitlab_redis_queues, :clean_gitlab_
end
end
+ describe '.expire' do
+ it 'refreshes the expiration time if key is present' do
+ described_class.set('123', 1.minute)
+ described_class.expire('123', 1.hour)
+
+ key = described_class.key_for('123')
+
+ with_redis do |redis|
+ expect(redis.exists?(key)).to eq(true)
+ expect(redis.ttl(key) > 5.minutes).to eq(true)
+ end
+ end
+
+ it 'does nothing if key is not present' do
+ described_class.expire('123', 1.minute)
+
+ key = described_class.key_for('123')
+
+ with_redis do |redis|
+ expect(redis.exists?(key)).to eq(false)
+ expect(redis.ttl(key)).to eq(-2)
+ end
+ end
+ end
+
describe '.all_completed?' do
it 'returns true if all jobs have been completed' do
expect(described_class.all_completed?(%w[123])).to eq(true)
@@ -133,11 +159,11 @@ RSpec.describe Gitlab::SidekiqStatus, :clean_gitlab_redis_queues, :clean_gitlab_
context 'with multi-store feature flags turned on' do
def with_redis(&block)
- Gitlab::Redis::SidekiqStatus.with(&block)
+ Gitlab::Redis::SharedState.with(&block)
end
- it 'uses Gitlab::Redis::SidekiqStatus.with' do
- expect(Gitlab::Redis::SidekiqStatus).to receive(:with).and_call_original
+ it 'uses Gitlab::Redis::SharedState.with' do
+ expect(Gitlab::Redis::SharedState).to receive(:with).and_call_original
expect(Sidekiq).not_to receive(:redis)
described_class.job_status(%w[123 456 789])
@@ -158,7 +184,7 @@ RSpec.describe Gitlab::SidekiqStatus, :clean_gitlab_redis_queues, :clean_gitlab_
it 'uses Sidekiq.redis' do
expect(Sidekiq).to receive(:redis).and_call_original
- expect(Gitlab::Redis::SidekiqStatus).not_to receive(:with)
+ expect(Gitlab::Redis::SharedState).not_to receive(:with)
described_class.job_status(%w[123 456 789])
end
diff --git a/spec/lib/gitlab/tracking/destinations/snowplow_micro_spec.rb b/spec/lib/gitlab/tracking/destinations/snowplow_micro_spec.rb
index ea3c030541f..19b578a4d6d 100644
--- a/spec/lib/gitlab/tracking/destinations/snowplow_micro_spec.rb
+++ b/spec/lib/gitlab/tracking/destinations/snowplow_micro_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Tracking::Destinations::SnowplowMicro do
+RSpec.describe Gitlab::Tracking::Destinations::SnowplowMicro, feature_category: :application_instrumentation do
include StubENV
let(:snowplow_micro_settings) do
@@ -18,6 +18,8 @@ RSpec.describe Gitlab::Tracking::Destinations::SnowplowMicro do
allow(Rails.env).to receive(:development?).and_return(true)
end
+ it { is_expected.to delegate_method(:flush).to(:tracker) }
+
describe '#hostname' do
context 'when snowplow_micro config is set' do
let(:address) { '127.0.0.1:9091' }
diff --git a/spec/lib/gitlab/tracking/event_definition_spec.rb b/spec/lib/gitlab/tracking/event_definition_spec.rb
index ab0660147e4..5e41c691da8 100644
--- a/spec/lib/gitlab/tracking/event_definition_spec.rb
+++ b/spec/lib/gitlab/tracking/event_definition_spec.rb
@@ -35,6 +35,33 @@ RSpec.describe Gitlab::Tracking::EventDefinition do
expect { described_class.definitions }.not_to raise_error
end
+ it 'has no duplicated actions in InternalEventTracking events', :aggregate_failures do
+ definitions_by_action = described_class.definitions
+ .select { |d| d.category == 'InternalEventTracking' }
+ .group_by(&:action)
+
+ definitions_by_action.each do |action, definitions|
+ expect(definitions.size).to eq(1),
+ "Multiple definitions use the action '#{action}': #{definitions.map(&:path).join(', ')}"
+ end
+ end
+
+ it 'has event definitions for all events used in Internal Events metric definitions', :aggregate_failures do
+ from_metric_definitions = Gitlab::Usage::MetricDefinition.definitions
+ .values
+ .select { |m| m.attributes[:data_source] == 'internal_events' }
+ .flat_map { |m| m.events&.keys }
+ .compact
+ .uniq
+
+ event_names = Gitlab::Tracking::EventDefinition.definitions.map { |e| e.attributes[:action] }
+
+ from_metric_definitions.each do |event|
+ expect(event_names).to include(event),
+ "Event '#{event}' is used in Internal Events but does not have an event definition yet. Please define it."
+ end
+ end
+
describe '#validate' do
using RSpec::Parameterized::TableSyntax
diff --git a/spec/lib/gitlab/tracking_spec.rb b/spec/lib/gitlab/tracking_spec.rb
index f3e27c72143..46213532071 100644
--- a/spec/lib/gitlab/tracking_spec.rb
+++ b/spec/lib/gitlab/tracking_spec.rb
@@ -3,6 +3,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Tracking, feature_category: :application_instrumentation do
include StubENV
+ using RSpec::Parameterized::TableSyntax
before do
stub_application_setting(snowplow_enabled: true)
@@ -17,6 +18,8 @@ RSpec.describe Gitlab::Tracking, feature_category: :application_instrumentation
described_class.instance_variable_set(:@tracker, nil)
end
+ it { is_expected.to delegate_method(:flush).to(:tracker) }
+
describe '.options' do
shared_examples 'delegates to destination' do |klass|
before do
@@ -295,29 +298,57 @@ RSpec.describe Gitlab::Tracking, feature_category: :application_instrumentation
end
describe 'snowplow_micro_enabled?' do
- before do
- allow(Rails.env).to receive(:development?).and_return(true)
+ where(:development?, :micro_verification_enabled?, :snowplow_micro_enabled, :result) do
+ true | true | true | true
+ true | true | false | false
+ false | true | true | true
+ false | true | false | false
+ false | false | true | false
+ false | false | false | false
+ true | false | true | true
+ true | false | false | false
end
- it 'returns true when snowplow_micro is enabled' do
- stub_config(snowplow_micro: { enabled: true })
-
- expect(described_class).to be_snowplow_micro_enabled
- end
+ with_them do
+ before do
+ allow(Rails.env).to receive(:development?).and_return(development?)
+ allow(described_class).to receive(:micro_verification_enabled?).and_return(micro_verification_enabled?)
+ stub_config(snowplow_micro: { enabled: snowplow_micro_enabled })
+ end
- it 'returns false when snowplow_micro is disabled' do
- stub_config(snowplow_micro: { enabled: false })
+ subject { described_class.snowplow_micro_enabled? }
- expect(described_class).not_to be_snowplow_micro_enabled
+ it { is_expected.to be(result) }
end
it 'returns false when snowplow_micro is not configured' do
+ allow(Rails.env).to receive(:development?).and_return(true)
allow(Gitlab.config).to receive(:snowplow_micro).and_raise(GitlabSettings::MissingSetting)
expect(described_class).not_to be_snowplow_micro_enabled
end
end
+ describe '.micro_verification_enabled?' do
+ where(:verify_tracking, :result) do
+ nil | false
+ 'true' | true
+ 'false' | false
+ '0' | false
+ '1' | true
+ end
+
+ with_them do
+ before do
+ stub_env('VERIFY_TRACKING', verify_tracking)
+ end
+
+ subject { described_class.micro_verification_enabled? }
+
+ it { is_expected.to be(result) }
+ end
+ end
+
describe 'tracker' do
it 'returns a SnowPlowMicro instance in development' do
allow(Rails.env).to receive(:development?).and_return(true)
diff --git a/spec/lib/gitlab/url_blocker_spec.rb b/spec/lib/gitlab/url_blocker_spec.rb
index 0f827921a66..9e98cdc05eb 100644
--- a/spec/lib/gitlab/url_blocker_spec.rb
+++ b/spec/lib/gitlab/url_blocker_spec.rb
@@ -265,6 +265,19 @@ RSpec.describe Gitlab::UrlBlocker, :stub_invalid_dns_only, feature_category: :sh
end
end
+ context 'when resolving runs into a timeout' do
+ let(:import_url) { 'http://example.com' }
+
+ before do
+ stub_const("#{described_class}::GETADDRINFO_TIMEOUT_SECONDS", 1)
+ allow(Addrinfo).to receive(:getaddrinfo) { sleep 2 }
+ end
+
+ it 'raises an error due to DNS timeout' do
+ expect { subject }.to raise_error(Gitlab::HTTP_V2::UrlBlocker::BlockedUrlError, "execution expired")
+ end
+ end
+
context 'when the URL hostname is a domain' do
context 'when domain can be resolved' do
let(:import_url) { 'https://example.org' }
diff --git a/spec/lib/gitlab/usage/metric_definition_spec.rb b/spec/lib/gitlab/usage/metric_definition_spec.rb
index 08adc031631..fb46d48c1bb 100644
--- a/spec/lib/gitlab/usage/metric_definition_spec.rb
+++ b/spec/lib/gitlab/usage/metric_definition_spec.rb
@@ -46,6 +46,34 @@ RSpec.describe Gitlab::Usage::MetricDefinition, feature_category: :service_ping
end
end
+ describe '.instrumentation_class' do
+ context 'for non internal events' do
+ let(:attributes) { { key_path: 'metric1', instrumentation_class: 'RedisHLLMetric', data_source: 'redis_hll' } }
+
+ it 'returns class from the definition' do
+ expect(definition.instrumentation_class).to eq('RedisHLLMetric')
+ end
+ end
+
+ context 'for internal events' do
+ context 'for total counter' do
+ let(:attributes) { { key_path: 'metric1', data_source: 'internal_events', events: [{ name: 'a' }] } }
+
+ it 'returns TotalCounterMetric' do
+ expect(definition.instrumentation_class).to eq('TotalCountMetric')
+ end
+ end
+
+ context 'for uniq counter' do
+ let(:attributes) { { key_path: 'metric1', data_source: 'internal_events', events: [{ name: 'a', unique: :id }] } }
+
+ it 'returns RedisHLLMetric' do
+ expect(definition.instrumentation_class).to eq('RedisHLLMetric')
+ end
+ end
+ end
+ end
+
describe 'not_removed' do
let(:all_definitions) do
metrics_definitions = [
@@ -71,12 +99,13 @@ RSpec.describe Gitlab::Usage::MetricDefinition, feature_category: :service_ping
describe '#with_instrumentation_class' do
let(:all_definitions) do
metrics_definitions = [
- { key_path: 'metric1', instrumentation_class: 'RedisHLLMetric', status: 'active' },
- { key_path: 'metric2', instrumentation_class: 'RedisHLLMetric', status: 'broken' },
- { key_path: 'metric3', instrumentation_class: 'RedisHLLMetric', status: 'active' },
- { key_path: 'metric4', instrumentation_class: 'RedisHLLMetric', status: 'removed' },
- { key_path: 'metric5', status: 'active' },
- { key_path: 'metric_missing_status' }
+ { key_path: 'metric1', status: 'active', data_source: 'redis_hll', instrumentation_class: 'RedisHLLMetric' },
+ { key_path: 'metric2', status: 'active', data_source: 'internal_events' }, # class is defined by data_source
+
+ { key_path: 'metric3', status: 'active', data_source: 'redis_hll' },
+ { key_path: 'metric4', status: 'removed', instrumentation_class: 'RedisHLLMetric', data_source: 'redis_hll' },
+ { key_path: 'metric5', status: 'removed', data_source: 'internal_events' },
+ { key_path: 'metric_missing_status', data_source: 'internal_events' }
]
metrics_definitions.map { |definition| described_class.new(definition[:key_path], definition.symbolize_keys) }
end
@@ -86,15 +115,7 @@ RSpec.describe Gitlab::Usage::MetricDefinition, feature_category: :service_ping
end
it 'includes definitions with instrumentation_class' do
- expect(described_class.with_instrumentation_class.count).to eq(3)
- end
-
- context 'with removed metric' do
- let(:metric_status) { 'removed' }
-
- it 'excludes removed definitions' do
- expect(described_class.with_instrumentation_class.count).to eq(3)
- end
+ expect(described_class.with_instrumentation_class.map(&:key_path)).to match_array(%w[metric1 metric2])
end
end
@@ -224,25 +245,9 @@ RSpec.describe Gitlab::Usage::MetricDefinition, feature_category: :service_ping
where(:instrumentation_class, :options, :events, :is_valid) do
'AnotherClass' | { events: ['a'] } | [{ name: 'a', unique: 'user.id' }] | false
- nil | { events: ['a'] } | [{ name: 'a', unique: 'user.id' }] | false
- 'RedisHLLMetric' | { events: ['a'] } | [{ name: 'a', unique: 'user.id' }] | true
+ 'RedisHLLMetric' | { events: ['a'] } | [{ name: 'a', unique: 'user.id' }] | false
'RedisHLLMetric' | { events: ['a'] } | nil | false
- 'RedisHLLMetric' | nil | [{ name: 'a', unique: 'user.id' }] | false
- 'RedisHLLMetric' | { events: ['a'] } | [{ name: 'a', unique: 'a' }] | false
- 'RedisHLLMetric' | { events: 'a' } | [{ name: 'a', unique: 'user.id' }] | false
- 'RedisHLLMetric' | { events: [2] } | [{ name: 'a', unique: 'user.id' }] | false
- 'RedisHLLMetric' | { events: ['a'], a: 'b' } | [{ name: 'a', unique: 'user.id' }] | false
- 'RedisHLLMetric' | { events: ['a'] } | [{ name: 'a', unique: 'user.id', b: 'c' }] | false
- 'RedisHLLMetric' | { events: ['a'] } | [{ name: 'a' }] | false
- 'RedisHLLMetric' | { events: ['a'] } | [{ unique: 'user.id' }] | false
- 'TotalCountMetric' | { events: ['a'] } | [{ name: 'a' }] | true
- 'TotalCountMetric' | { events: ['a'] } | [{ name: 'a', unique: 'user.id' }] | false
- 'TotalCountMetric' | { events: ['a'] } | nil | false
- 'TotalCountMetric' | nil | [{ name: 'a' }] | false
- 'TotalCountMetric' | { events: [2] } | [{ name: 'a' }] | false
- 'TotalCountMetric' | { events: ['a'] } | [{}] | false
- 'TotalCountMetric' | 'a' | [{ name: 'a' }] | false
- 'TotalCountMetric' | { events: ['a'], a: 'b' } | [{ name: 'a' }] | false
+ nil | { events: ['a'] } | [{ name: 'a', unique: 'user.id' }] | true
end
with_them do
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/bulk_imports_users_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/bulk_imports_users_metric_spec.rb
new file mode 100644
index 00000000000..90791bf223f
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/bulk_imports_users_metric_spec.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::Instrumentations::BulkImportsUsersMetric, feature_category: :importers do
+ let(:expected_value) { 3 }
+ let(:expected_query) { "SELECT COUNT(DISTINCT \"bulk_imports\".\"user_id\") FROM \"bulk_imports\"" }
+
+ before_all do
+ import = create :bulk_import, created_at: 3.days.ago
+ create :bulk_import, created_at: 35.days.ago
+ create :bulk_import, created_at: 3.days.ago
+ create :bulk_import, created_at: 3.days.ago, user: import.user
+ end
+
+ it_behaves_like 'a correct instrumented metric value and query', { time_frame: 'all' }
+
+ it_behaves_like 'a correct instrumented metric value and query', { time_frame: '28d' } do
+ let(:expected_value) { 2 }
+ let(:start) { 30.days.ago.to_fs(:db) }
+ let(:finish) { 2.days.ago.to_fs(:db) }
+ let(:expected_query) do
+ "SELECT COUNT(DISTINCT \"bulk_imports\".\"user_id\") FROM \"bulk_imports\" " \
+ "WHERE \"bulk_imports\".\"created_at\" BETWEEN '#{start}' AND '#{finish}'"
+ end
+ end
+end
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/count_service_desk_custom_email_enabled_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/count_service_desk_custom_email_enabled_metric_spec.rb
new file mode 100644
index 00000000000..6d10052ff66
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/count_service_desk_custom_email_enabled_metric_spec.rb
@@ -0,0 +1,16 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::Instrumentations::CountServiceDeskCustomEmailEnabledMetric, feature_category: :service_ping do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:credential) { create(:service_desk_custom_email_credential, project: project) }
+ let_it_be(:verification) { create(:service_desk_custom_email_verification, :finished, project: project) }
+ let_it_be(:setting) do
+ create(:service_desk_setting, project: project, custom_email: 'support@example.com', custom_email_enabled: true)
+ end
+
+ let(:expected_value) { 1 }
+
+ it_behaves_like 'a correct instrumented metric value', { time_frame: 'all', data_source: 'database' }
+end
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/csv_imports_users_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/csv_imports_users_metric_spec.rb
new file mode 100644
index 00000000000..1f620c2502d
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/csv_imports_users_metric_spec.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::Instrumentations::CsvImportsUsersMetric, feature_category: :importers do
+ let(:expected_value) { 3 }
+ let(:expected_query) { "SELECT COUNT(DISTINCT \"csv_issue_imports\".\"user_id\") FROM \"csv_issue_imports\"" }
+
+ before_all do
+ import = create :issue_csv_import, created_at: 3.days.ago
+ create :issue_csv_import, created_at: 35.days.ago
+ create :issue_csv_import, created_at: 3.days.ago
+ create :issue_csv_import, created_at: 3.days.ago, user: import.user
+ end
+
+ it_behaves_like 'a correct instrumented metric value and query', { time_frame: 'all' }
+
+ it_behaves_like 'a correct instrumented metric value and query', { time_frame: '28d' } do
+ let(:expected_value) { 2 }
+ let(:start) { 30.days.ago.to_fs(:db) }
+ let(:finish) { 2.days.ago.to_fs(:db) }
+ let(:expected_query) do
+ "SELECT COUNT(DISTINCT \"csv_issue_imports\".\"user_id\") FROM \"csv_issue_imports\" " \
+ "WHERE \"csv_issue_imports\".\"created_at\" BETWEEN '#{start}' AND '#{finish}'"
+ end
+ end
+end
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/gitlab_config_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/gitlab_config_metric_spec.rb
new file mode 100644
index 00000000000..e9814f0cb51
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/gitlab_config_metric_spec.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::Instrumentations::GitlabConfigMetric, feature_category: :service_ping do
+ describe 'config metric' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:config_value, :expected_value) do
+ false | false
+ true | true
+ end
+
+ with_them do
+ before do
+ stub_config(artifacts: { object_store: { enabled: config_value } })
+ end
+
+ it_behaves_like 'a correct instrumented metric value', {
+ time_frame: 'none',
+ options: {
+ config: {
+ artifacts: {
+ object_store: 'enabled'
+ }
+ }
+ }
+ }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/gitlab_settings_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/gitlab_settings_metric_spec.rb
new file mode 100644
index 00000000000..26210b9febf
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/gitlab_settings_metric_spec.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::Instrumentations::GitlabSettingsMetric, feature_category: :service_ping do
+ describe 'settings metric' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:setting_value, :expected_value) do
+ false | false
+ true | true
+ end
+
+ with_them do
+ before do
+ stub_application_setting(gravatar_enabled: setting_value)
+ end
+
+ it_behaves_like 'a correct instrumented metric value', {
+ time_frame: 'none',
+ options: {
+ setting_method: 'gravatar_enabled'
+ }
+ }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/group_imports_users_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/group_imports_users_metric_spec.rb
new file mode 100644
index 00000000000..6b7962fda64
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/group_imports_users_metric_spec.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::Instrumentations::GroupImportsUsersMetric, feature_category: :importers do
+ let(:expected_value) { 3 }
+ let(:expected_query) { "SELECT COUNT(DISTINCT \"group_import_states\".\"user_id\") FROM \"group_import_states\"" }
+
+ before_all do
+ import = create :group_import_state, created_at: 3.days.ago
+ create :group_import_state, created_at: 35.days.ago
+ create :group_import_state, created_at: 3.days.ago
+ create :group_import_state, created_at: 3.days.ago, user: import.user
+ end
+
+ it_behaves_like 'a correct instrumented metric value and query', { time_frame: 'all' }
+
+ it_behaves_like 'a correct instrumented metric value and query', { time_frame: '28d' } do
+ let(:expected_value) { 2 }
+ let(:start) { 30.days.ago.to_fs(:db) }
+ let(:finish) { 2.days.ago.to_fs(:db) }
+ let(:expected_query) do
+ "SELECT COUNT(DISTINCT \"group_import_states\".\"user_id\") FROM \"group_import_states\" " \
+ "WHERE \"group_import_states\".\"created_at\" BETWEEN '#{start}' AND '#{finish}'"
+ end
+ end
+end
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/in_product_marketing_email_cta_clicked_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/in_product_marketing_email_cta_clicked_metric_spec.rb
deleted file mode 100644
index 91ad81c4291..00000000000
--- a/spec/lib/gitlab/usage/metrics/instrumentations/in_product_marketing_email_cta_clicked_metric_spec.rb
+++ /dev/null
@@ -1,55 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Usage::Metrics::Instrumentations::InProductMarketingEmailCtaClickedMetric do
- using RSpec::Parameterized::TableSyntax
-
- let(:email_attributes) { { cta_clicked_at: Date.yesterday, track: 'verify', series: 0 } }
- let(:options) { { track: 'verify', series: 0 } }
- let(:expected_value) { 2 }
- let(:expected_query) do
- 'SELECT COUNT("in_product_marketing_emails"."id") FROM "in_product_marketing_emails" ' \
- 'WHERE "in_product_marketing_emails"."cta_clicked_at" IS NOT NULL ' \
- 'AND "in_product_marketing_emails"."series" = 0 ' \
- 'AND "in_product_marketing_emails"."track" = 1'
- end
-
- before do
- create_list :in_product_marketing_email, 2, email_attributes
-
- create :in_product_marketing_email, email_attributes.merge(cta_clicked_at: nil)
- create :in_product_marketing_email, email_attributes.merge(track: 'team')
- create :in_product_marketing_email, email_attributes.merge(series: 1)
- end
-
- it_behaves_like 'a correct instrumented metric value and query', {
- options: { track: 'verify', series: 0 },
- time_frame: 'all'
- }
-
- where(:options_key, :valid_value, :invalid_value) do
- :track | 'admin_verify' | 'invite_team'
- :series | 1 | 5
- end
-
- with_them do
- it "raises an exception if option is not present" do
- expect do
- described_class.new(options: options.except(options_key), time_frame: 'all')
- end.to raise_error(ArgumentError, %r{#{options_key} .* must be one of})
- end
-
- it "raises an exception if option has invalid value" do
- expect do
- options[options_key] = invalid_value
- described_class.new(options: options, time_frame: 'all')
- end.to raise_error(ArgumentError, %r{#{options_key} .* must be one of})
- end
-
- it "doesn't raise exceptions if option has valid value" do
- options[options_key] = valid_value
- described_class.new(options: options, time_frame: 'all')
- end
- end
-end
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/in_product_marketing_email_sent_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/in_product_marketing_email_sent_metric_spec.rb
deleted file mode 100644
index 3c51368f396..00000000000
--- a/spec/lib/gitlab/usage/metrics/instrumentations/in_product_marketing_email_sent_metric_spec.rb
+++ /dev/null
@@ -1,52 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Usage::Metrics::Instrumentations::InProductMarketingEmailSentMetric do
- using RSpec::Parameterized::TableSyntax
-
- let(:email_attributes) { { track: 'verify', series: 0 } }
- let(:expected_value) { 2 }
- let(:expected_query) do
- 'SELECT COUNT("in_product_marketing_emails"."id") FROM "in_product_marketing_emails" ' \
- 'WHERE "in_product_marketing_emails"."series" = 0 ' \
- 'AND "in_product_marketing_emails"."track" = 1'
- end
-
- before do
- create_list :in_product_marketing_email, 2, email_attributes
-
- create :in_product_marketing_email, email_attributes.merge(track: 'team')
- create :in_product_marketing_email, email_attributes.merge(series: 1)
- end
-
- it_behaves_like 'a correct instrumented metric value and query', {
- options: { track: 'verify', series: 0 },
- time_frame: 'all'
- }
-
- where(:options_key, :valid_value, :invalid_value) do
- :track | 'admin_verify' | 'invite_team'
- :series | 1 | 5
- end
-
- with_them do
- it "raises an exception if option is not present" do
- expect do
- described_class.new(options: email_attributes.except(options_key), time_frame: 'all')
- end.to raise_error(ArgumentError, %r{#{options_key} .* must be one of})
- end
-
- it "raises an exception if option has invalid value" do
- expect do
- email_attributes[options_key] = invalid_value
- described_class.new(options: email_attributes, time_frame: 'all')
- end.to raise_error(ArgumentError, %r{#{options_key} .* must be one of})
- end
-
- it "doesn't raise exceptions if option has valid value" do
- email_attributes[options_key] = valid_value
- described_class.new(options: email_attributes, time_frame: 'all')
- end
- end
-end
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/jira_imports_users_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/jira_imports_users_metric_spec.rb
new file mode 100644
index 00000000000..86bc4d98372
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/jira_imports_users_metric_spec.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::Instrumentations::JiraImportsUsersMetric, feature_category: :importers do
+ let(:expected_value) { 3 }
+ let(:expected_query) { "SELECT COUNT(DISTINCT \"jira_imports\".\"user_id\") FROM \"jira_imports\"" }
+
+ before_all do
+ import = create :jira_import_state, created_at: 3.days.ago
+ create :jira_import_state, created_at: 35.days.ago
+ create :jira_import_state, created_at: 3.days.ago
+ create :jira_import_state, created_at: 3.days.ago, user: import.user
+ end
+
+ it_behaves_like 'a correct instrumented metric value and query', { time_frame: 'all' }
+
+ it_behaves_like 'a correct instrumented metric value and query', { time_frame: '28d' } do
+ let(:expected_value) { 2 }
+ let(:start) { 30.days.ago.to_fs(:db) }
+ let(:finish) { 2.days.ago.to_fs(:db) }
+ let(:expected_query) do
+ "SELECT COUNT(DISTINCT \"jira_imports\".\"user_id\") FROM \"jira_imports\" " \
+ "WHERE \"jira_imports\".\"created_at\" BETWEEN '#{start}' AND '#{finish}'"
+ end
+ end
+end
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/omniauth_enabled_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/omniauth_enabled_metric_spec.rb
new file mode 100644
index 00000000000..20390e6abd9
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/omniauth_enabled_metric_spec.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::Instrumentations::OmniauthEnabledMetric, feature_category: :service_ping do
+ before do
+ allow(Gitlab::Auth).to receive(:omniauth_enabled?).and_return(expected_value)
+ end
+
+ [true, false].each do |setting|
+ context "when the setting is #{setting}" do
+ let(:expected_value) { setting }
+
+ it_behaves_like 'a correct instrumented metric value', { time_frame: 'none' }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/project_imports_creators_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/project_imports_creators_metric_spec.rb
new file mode 100644
index 00000000000..2a0e0a1a591
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/project_imports_creators_metric_spec.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::Instrumentations::ProjectImportsCreatorsMetric, feature_category: :importers do
+ let(:expected_value) { 3 }
+ let(:expected_query) do
+ "SELECT COUNT(DISTINCT \"projects\".\"creator_id\") FROM \"projects\" " \
+ "WHERE \"projects\".\"import_type\" IS NOT NULL"
+ end
+
+ before_all do
+ project = create :project, import_type: :jira, created_at: 3.days.ago
+ create :project, import_type: :jira, created_at: 35.days.ago
+ create :project, import_type: :jira, created_at: 3.days.ago
+ create :project, created_at: 3.days.ago
+ create :project, import_type: :jira, created_at: 3.days.ago, creator: project.creator
+ end
+
+ it_behaves_like 'a correct instrumented metric value and query', { time_frame: 'all' }
+
+ it_behaves_like 'a correct instrumented metric value and query', { time_frame: '28d' } do
+ let(:expected_value) { 2 }
+ let(:start) { 30.days.ago.to_fs(:db) }
+ let(:finish) { 2.days.ago.to_fs(:db) }
+ let(:expected_query) do
+ "SELECT COUNT(DISTINCT \"projects\".\"creator_id\") FROM \"projects\" WHERE " \
+ "\"projects\".\"import_type\" IS NOT NULL AND \"projects\".\"created_at\" BETWEEN '#{start}' AND '#{finish}'"
+ end
+ end
+end
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/prometheus_enabled_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/prometheus_enabled_metric_spec.rb
new file mode 100644
index 00000000000..dbd44cc3309
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/prometheus_enabled_metric_spec.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::Instrumentations::PrometheusEnabledMetric, feature_category: :service_ping do
+ before do
+ allow(Gitlab::Prometheus::Internal).to receive(:prometheus_enabled?).and_return(expected_value)
+ end
+
+ [true, false].each do |setting|
+ context "when the setting is #{setting}" do
+ let(:expected_value) { setting }
+
+ it_behaves_like 'a correct instrumented metric value', { time_frame: 'none' }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/prometheus_metrics_enabled_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/prometheus_metrics_enabled_metric_spec.rb
new file mode 100644
index 00000000000..3e6812f3b34
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/prometheus_metrics_enabled_metric_spec.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::Instrumentations::PrometheusMetricsEnabledMetric, feature_category: :service_ping do
+ before do
+ allow(Gitlab::Metrics).to receive(:prometheus_metrics_enabled?).and_return(expected_value)
+ end
+
+ [true, false].each do |setting|
+ context "when the setting is #{setting}" do
+ let(:expected_value) { setting }
+
+ it_behaves_like 'a correct instrumented metric value', { time_frame: 'none' }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/reply_by_email_enabled_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/reply_by_email_enabled_metric_spec.rb
new file mode 100644
index 00000000000..12eab4bb422
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/reply_by_email_enabled_metric_spec.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::Instrumentations::ReplyByEmailEnabledMetric, feature_category: :service_ping do
+ before do
+ allow(Gitlab::Email::IncomingEmail).to receive(:enabled?).and_return(expected_value)
+ end
+
+ [true, false].each do |setting|
+ context "when the setting is #{setting}" do
+ let(:expected_value) { setting }
+
+ it_behaves_like 'a correct instrumented metric value', { time_frame: 'none' }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/total_count_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/total_count_metric_spec.rb
index f3aa1ba4f88..b357d6ea7e4 100644
--- a/spec/lib/gitlab/usage/metrics/instrumentations/total_count_metric_spec.rb
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/total_count_metric_spec.rb
@@ -9,32 +9,112 @@ RSpec.describe Gitlab::Usage::Metrics::Instrumentations::TotalCountMetric, :clea
end
context 'with multiple similar events' do
- let(:expected_value) { 10 }
-
before do
+ last_week = Date.today - 7.days
+ two_weeks_ago = last_week - 1.week
+
+ redis_counter_key = described_class.redis_key('my_event', last_week)
+ 2.times do
+ Gitlab::Redis::SharedState.with { |redis| redis.incr(redis_counter_key) }
+ end
+
+ redis_counter_key = described_class.redis_key('my_event', two_weeks_ago)
+ 3.times do
+ Gitlab::Redis::SharedState.with { |redis| redis.incr(redis_counter_key) }
+ end
+
10.times do
Gitlab::InternalEvents.track_event('my_event')
end
end
- it_behaves_like 'a correct instrumented metric value', { time_frame: 'all', events: [{ name: 'my_event' }] }
+ context "with an 'all' time_frame" do
+ let(:expected_value) { 10 }
+
+ it_behaves_like 'a correct instrumented metric value', { time_frame: 'all', events: [{ name: 'my_event' }] }
+ end
+
+ context "with a 7d time_frame" do
+ let(:expected_value) { 2 }
+
+ it_behaves_like 'a correct instrumented metric value', { time_frame: '7d', events: [{ name: 'my_event' }] }
+ end
+
+ context "with a 28d time_frame" do
+ let(:expected_value) { 5 }
+
+ it_behaves_like 'a correct instrumented metric value', { time_frame: '28d', events: [{ name: 'my_event' }] }
+ end
end
context 'with multiple different events' do
let(:expected_value) { 2 }
before do
+ last_week = Date.today - 7.days
+ two_weeks_ago = last_week - 1.week
+
+ 2.times do
+ redis_counter_key =
+ Gitlab::Usage::Metrics::Instrumentations::TotalCountMetric.redis_key('my_event1', last_week)
+ Gitlab::Redis::SharedState.with { |redis| redis.incr(redis_counter_key) }
+ end
+
+ 3.times do
+ redis_counter_key =
+ Gitlab::Usage::Metrics::Instrumentations::TotalCountMetric.redis_key('my_event1', two_weeks_ago)
+ Gitlab::Redis::SharedState.with { |redis| redis.incr(redis_counter_key) }
+ end
+
+ 4.times do
+ redis_counter_key =
+ Gitlab::Usage::Metrics::Instrumentations::TotalCountMetric.redis_key('my_event2', last_week)
+ Gitlab::Redis::SharedState.with { |redis| redis.incr(redis_counter_key) }
+ end
+
Gitlab::InternalEvents.track_event('my_event1')
Gitlab::InternalEvents.track_event('my_event2')
end
- it_behaves_like 'a correct instrumented metric value',
- { time_frame: 'all', events: [{ name: 'my_event1' }, { name: 'my_event2' }] }
+ context "with an 'all' time_frame" do
+ let(:expected_value) { 2 }
+
+ it_behaves_like 'a correct instrumented metric value',
+ { time_frame: 'all', events: [{ name: 'my_event1' }, { name: 'my_event2' }] }
+ end
+
+ context "with a 7d time_frame" do
+ let(:expected_value) { 6 }
+
+ it_behaves_like 'a correct instrumented metric value',
+ { time_frame: '7d', events: [{ name: 'my_event1' }, { name: 'my_event2' }] }
+ end
+
+ context "with a 28d time_frame" do
+ let(:expected_value) { 9 }
+
+ it_behaves_like 'a correct instrumented metric value',
+ { time_frame: '28d', events: [{ name: 'my_event1' }, { name: 'my_event2' }] }
+ end
+ end
+
+ context "with an invalid time_frame" do
+ let(:metric) { described_class.new(time_frame: '14d', events: [{ name: 'my_event' }]) }
+
+ it 'raises an exception' do
+ expect { metric.value }.to raise_error(/Unknown time frame/)
+ end
end
describe '.redis_key' do
it 'adds the key prefix to the event name' do
expect(described_class.redis_key('my_event')).to eq('{event_counters}_my_event')
end
+
+ context "with a date" do
+ it 'adds the key prefix and suffix to the event name' do
+ expect(described_class.redis_key('my_event', Date.new(2023, 10, 19))).to eq("{event_counters}_my_event-2023-42")
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/unique_users_all_imports_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/unique_users_all_imports_metric_spec.rb
new file mode 100644
index 00000000000..4fdabb86e23
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/unique_users_all_imports_metric_spec.rb
@@ -0,0 +1,53 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::Instrumentations::UniqueUsersAllImportsMetric, feature_category: :importers do
+ let(:expected_value) { 6 }
+ let(:expected_query) do
+ <<~SQL.squish
+ SELECT
+ (SELECT COUNT(DISTINCT "projects"."creator_id") FROM "projects" WHERE "projects"."import_type" IS NOT NULL) +
+ (SELECT COUNT(DISTINCT "bulk_imports"."user_id") FROM "bulk_imports") +
+ (SELECT COUNT(DISTINCT "jira_imports"."user_id") FROM "jira_imports") +
+ (SELECT COUNT(DISTINCT "csv_issue_imports"."user_id") FROM "csv_issue_imports") +
+ (SELECT COUNT(DISTINCT "group_import_states"."user_id") FROM "group_import_states")
+ SQL
+ end
+
+ before_all do
+ import = create :jira_import_state, created_at: 3.days.ago
+ create :jira_import_state, created_at: 35.days.ago
+ create :jira_import_state, created_at: 3.days.ago, user: import.user
+
+ create :group_import_state, created_at: 3.days.ago
+ create :issue_csv_import, created_at: 3.days.ago
+ create :bulk_import, created_at: 3.days.ago
+ create :project, import_type: :jira, created_at: 3.days.ago
+ end
+
+ before do
+ described_class::IMPORTS_METRICS.each do |submetric_class|
+ metric = submetric_class.new(time_frame: time_frame, options: options)
+ allow(metric.send(:relation).connection).to receive(:transaction_open?).and_return(false)
+ end
+ end
+
+ it_behaves_like 'a correct instrumented metric value and query', { time_frame: 'all' }
+
+ it_behaves_like 'a correct instrumented metric value and query', { time_frame: '28d' } do
+ let(:expected_value) { 5 }
+ let(:start) { 30.days.ago.to_fs(:db) }
+ let(:finish) { 2.days.ago.to_fs(:db) }
+ let(:expected_query) do
+ <<~SQL.squish
+ SELECT
+ (SELECT COUNT(DISTINCT "projects"."creator_id") FROM "projects" WHERE "projects"."import_type" IS NOT NULL AND "projects"."created_at" BETWEEN '#{start}' AND '#{finish}') +
+ (SELECT COUNT(DISTINCT "bulk_imports"."user_id") FROM "bulk_imports" WHERE "bulk_imports"."created_at" BETWEEN '#{start}' AND '#{finish}') +
+ (SELECT COUNT(DISTINCT "jira_imports"."user_id") FROM "jira_imports" WHERE "jira_imports"."created_at" BETWEEN '#{start}' AND '#{finish}') +
+ (SELECT COUNT(DISTINCT "csv_issue_imports"."user_id") FROM "csv_issue_imports" WHERE "csv_issue_imports"."created_at" BETWEEN '#{start}' AND '#{finish}') +
+ (SELECT COUNT(DISTINCT "group_import_states"."user_id") FROM "group_import_states" WHERE "group_import_states"."created_at" BETWEEN '#{start}' AND '#{finish}')
+ SQL
+ end
+ end
+end
diff --git a/spec/lib/gitlab/usage_data_counters/editor_unique_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/editor_unique_counter_spec.rb
index 71e9e7a8e7d..cbf4d3c8261 100644
--- a/spec/lib/gitlab/usage_data_counters/editor_unique_counter_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/editor_unique_counter_spec.rb
@@ -19,9 +19,9 @@ RSpec.describe Gitlab::UsageDataCounters::EditorUniqueCounter, :clean_gitlab_red
specify do
aggregate_failures do
- expect(track_action(author: user, project: project)).to be_truthy
- expect(track_action(author: user2, project: project)).to be_truthy
- expect(track_action(author: user3, project: project)).to be_truthy
+ track_action(author: user, project: project)
+ track_action(author: user2, project: project)
+ track_action(author: user3, project: project)
expect(count_unique(date_from: time.beginning_of_week, date_to: 1.week.from_now)).to eq(3)
end
@@ -30,7 +30,9 @@ RSpec.describe Gitlab::UsageDataCounters::EditorUniqueCounter, :clean_gitlab_red
it_behaves_like 'internal event tracking'
it 'does not track edit actions if author is not present' do
- expect(track_action(author: nil, project: project)).to be_nil
+ track_action(author: nil, project: project)
+
+ expect(count_unique(date_from: time.beginning_of_week, date_to: 1.week.from_now)).to eq(0)
end
end
diff --git a/spec/lib/gitlab/usage_data_queries_spec.rb b/spec/lib/gitlab/usage_data_queries_spec.rb
index 6d30947167c..68af9cd9cfc 100644
--- a/spec/lib/gitlab/usage_data_queries_spec.rb
+++ b/spec/lib/gitlab/usage_data_queries_spec.rb
@@ -106,25 +106,4 @@ RSpec.describe Gitlab::UsageDataQueries do
expect(described_class.maximum_id(Project)).to eq(nil)
end
end
-
- describe 'sent_in_product_marketing_email_count' do
- it 'returns sql query that returns correct value' do
- expect(described_class.sent_in_product_marketing_email_count(nil, 0, 0)).to eq(
- 'SELECT COUNT("in_product_marketing_emails"."id") ' \
- 'FROM "in_product_marketing_emails" ' \
- 'WHERE "in_product_marketing_emails"."track" = 0 AND "in_product_marketing_emails"."series" = 0'
- )
- end
- end
-
- describe 'clicked_in_product_marketing_email_count' do
- it 'returns sql query that returns correct value' do
- expect(described_class.clicked_in_product_marketing_email_count(nil, 0, 0)).to eq(
- 'SELECT COUNT("in_product_marketing_emails"."id") ' \
- 'FROM "in_product_marketing_emails" ' \
- 'WHERE "in_product_marketing_emails"."track" = 0 AND "in_product_marketing_emails"."series" = 0 ' \
- 'AND "in_product_marketing_emails"."cta_clicked_at" IS NOT NULL'
- )
- end
- end
end
diff --git a/spec/lib/gitlab/usage_data_spec.rb b/spec/lib/gitlab/usage_data_spec.rb
index a1564318408..f43e49bd616 100644
--- a/spec/lib/gitlab/usage_data_spec.rb
+++ b/spec/lib/gitlab/usage_data_spec.rb
@@ -241,29 +241,6 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures, feature_category: :servic
)
end
- it 'includes import gmau usage data' do
- for_defined_days_back do
- user = create(:user)
- group = create(:group)
-
- group.add_owner(user)
-
- create(:project, import_type: :github, creator_id: user.id)
- create(:jira_import_state, :finished, project: create(:project, creator_id: user.id))
- create(:issue_csv_import, user: user)
- create(:group_import_state, group: group, user: user)
- create(:bulk_import, user: user)
- end
-
- expect(described_class.usage_activity_by_stage_manage({})).to include(
- unique_users_all_imports: 10
- )
-
- expect(described_class.usage_activity_by_stage_manage(described_class.monthly_time_range_db_params)).to include(
- unique_users_all_imports: 5
- )
- end
-
it 'includes imports usage data', :clean_gitlab_redis_cache do
for_defined_days_back do
user = create(:user)
@@ -366,7 +343,6 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures, feature_category: :servic
create(:issue, project: project, author: Users::Internal.support_bot)
create(:note, project: project, noteable: issue, author: user)
create(:todo, project: project, target: issue, author: user)
- create(:jira_integration, :jira_cloud_service, active: true, project: create(:project, :jira_dvcs_cloud, creator: user))
create(:jira_integration, active: true, project: create(:project, :jira_dvcs_server, creator: user))
end
@@ -377,7 +353,6 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures, feature_category: :servic
service_desk_enabled_projects: 2,
service_desk_issues: 2,
projects_jira_active: 2,
- projects_jira_dvcs_cloud_active: 2,
projects_jira_dvcs_server_active: 2
)
expect(described_class.usage_activity_by_stage_plan(described_class.monthly_time_range_db_params)).to include(
@@ -387,7 +362,6 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures, feature_category: :servic
service_desk_enabled_projects: 1,
service_desk_issues: 1,
projects_jira_active: 1,
- projects_jira_dvcs_cloud_active: 1,
projects_jira_dvcs_server_active: 1
)
end
@@ -590,67 +564,6 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures, feature_category: :servic
end
context 'when not relying on database records' do
- describe '.features_usage_data_ce' do
- subject { described_class.features_usage_data_ce }
-
- it 'gathers feature usage data', :aggregate_failures do
- expect(subject[:instance_auto_devops_enabled]).to eq(Gitlab::CurrentSettings.auto_devops_enabled?)
- expect(subject[:mattermost_enabled]).to eq(Gitlab.config.mattermost.enabled)
- expect(subject[:signup_enabled]).to eq(Gitlab::CurrentSettings.allow_signup?)
- expect(subject[:ldap_enabled]).to eq(Gitlab.config.ldap.enabled)
- expect(subject[:gravatar_enabled]).to eq(Gitlab::CurrentSettings.gravatar_enabled?)
- expect(subject[:omniauth_enabled]).to eq(Gitlab::Auth.omniauth_enabled?)
- expect(subject[:reply_by_email_enabled]).to eq(Gitlab::Email::IncomingEmail.enabled?)
- expect(subject[:container_registry_enabled]).to eq(Gitlab.config.registry.enabled)
- expect(subject[:dependency_proxy_enabled]).to eq(Gitlab.config.dependency_proxy.enabled)
- expect(subject[:gitlab_shared_runners_enabled]).to eq(Gitlab.config.gitlab_ci.shared_runners_enabled)
- expect(subject[:grafana_link_enabled]).to eq(Gitlab::CurrentSettings.grafana_enabled?)
- expect(subject[:gitpod_enabled]).to eq(Gitlab::CurrentSettings.gitpod_enabled?)
- end
-
- context 'with embedded Prometheus' do
- it 'returns true when embedded Prometheus is enabled' do
- allow(Gitlab::Prometheus::Internal).to receive(:prometheus_enabled?).and_return(true)
-
- expect(subject[:prometheus_enabled]).to eq(true)
- end
-
- it 'returns false when embedded Prometheus is disabled' do
- allow(Gitlab::Prometheus::Internal).to receive(:prometheus_enabled?).and_return(false)
-
- expect(subject[:prometheus_enabled]).to eq(false)
- end
- end
-
- context 'with embedded grafana' do
- it 'returns true when embedded grafana is enabled' do
- stub_application_setting(grafana_enabled: true)
-
- expect(subject[:grafana_link_enabled]).to eq(true)
- end
-
- it 'returns false when embedded grafana is disabled' do
- stub_application_setting(grafana_enabled: false)
-
- expect(subject[:grafana_link_enabled]).to eq(false)
- end
- end
-
- context 'with Gitpod' do
- it 'returns true when is enabled' do
- stub_application_setting(gitpod_enabled: true)
-
- expect(subject[:gitpod_enabled]).to eq(true)
- end
-
- it 'returns false when is disabled' do
- stub_application_setting(gitpod_enabled: false)
-
- expect(subject[:gitpod_enabled]).to eq(false)
- end
- end
- end
-
describe '.components_usage_data' do
subject { described_class.components_usage_data }
diff --git a/spec/lib/gitlab/utils/file_info_spec.rb b/spec/lib/gitlab/utils/file_info_spec.rb
index 480036b2fd0..1f52fcb48b6 100644
--- a/spec/lib/gitlab/utils/file_info_spec.rb
+++ b/spec/lib/gitlab/utils/file_info_spec.rb
@@ -16,7 +16,7 @@ RSpec.describe Gitlab::Utils::FileInfo, feature_category: :shared do
describe '.linked?' do
it 'raises an error when file does not exist' do
- expect { subject.linked?('foo') }.to raise_error(Errno::ENOENT)
+ expect { subject.linked?("#{tmpdir}/foo") }.to raise_error(Errno::ENOENT)
end
shared_examples 'identifies a linked file' do
@@ -56,7 +56,7 @@ RSpec.describe Gitlab::Utils::FileInfo, feature_category: :shared do
describe '.shares_hard_link?' do
it 'raises an error when file does not exist' do
- expect { subject.shares_hard_link?('foo') }.to raise_error(Errno::ENOENT)
+ expect { subject.shares_hard_link?("#{tmpdir}/foo") }.to raise_error(Errno::ENOENT)
end
shared_examples 'identifies a file that shares a hard link' do
diff --git a/spec/lib/gitlab/web_ide/default_oauth_application_spec.rb b/spec/lib/gitlab/web_ide/default_oauth_application_spec.rb
new file mode 100644
index 00000000000..9bfdc799aec
--- /dev/null
+++ b/spec/lib/gitlab/web_ide/default_oauth_application_spec.rb
@@ -0,0 +1,87 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::WebIde::DefaultOauthApplication, feature_category: :web_ide do
+ let_it_be(:current_user) { create(:user) }
+ let_it_be(:oauth_application) { create(:oauth_application, owner: nil) }
+
+ describe '#feature_enabled?' do
+ where(:vscode_web_ide, :web_ide_oauth, :expectation) do
+ [
+ [ref(:current_user), false, false],
+ [false, ref(:current_user), false],
+ [ref(:current_user), ref(:current_user), true]
+ ]
+ end
+
+ with_them do
+ it 'returns the expected value' do
+ stub_feature_flags(vscode_web_ide: vscode_web_ide, web_ide_oauth: web_ide_oauth)
+
+ expect(described_class.feature_enabled?(current_user)).to be(expectation)
+ end
+ end
+ end
+
+ describe '#oauth_application' do
+ it 'returns web_ide_oauth_application from application_settings' do
+ expect(described_class.oauth_application).to be_nil
+
+ stub_application_setting({ web_ide_oauth_application: oauth_application })
+
+ expect(described_class.oauth_application).to be(oauth_application)
+ end
+ end
+
+ describe '#oauth_callback_url' do
+ it 'returns route URL for oauth callback' do
+ expect(described_class.oauth_callback_url).to eq(Gitlab::Routing.url_helpers.ide_oauth_redirect_url)
+ end
+ end
+
+ describe '#ensure_oauth_application!' do
+ it 'if web_ide_oauth_application already exists, does nothing' do
+ expect(application_settings).not_to receive(:lock!)
+ expect(::Doorkeeper::Application).not_to receive(:new)
+
+ stub_application_setting({ web_ide_oauth_application: oauth_application })
+
+ described_class.ensure_oauth_application!
+ end
+
+ it 'if web_ide_oauth_application created while locked, does nothing' do
+ expect(application_settings).to receive(:lock!) do
+ stub_application_setting({ web_ide_oauth_application: oauth_application })
+ end
+ expect(::Doorkeeper::Application).not_to receive(:new)
+ expect(::Gitlab::CurrentSettings).not_to receive(:expire_current_application_settings)
+
+ described_class.ensure_oauth_application!
+ end
+
+ it 'creates web_ide_oauth_application' do
+ expect(application_settings).to receive(:transaction).and_call_original
+ expect(::Doorkeeper::Application).to receive(:new).and_call_original
+ expect(::Gitlab::CurrentSettings).to receive(:expire_current_application_settings).and_call_original
+
+ expect(application_settings.web_ide_oauth_application).to be_nil
+
+ described_class.ensure_oauth_application!
+
+ result = application_settings.web_ide_oauth_application
+ expect(result).not_to be_nil
+ expect(result).to have_attributes(
+ name: 'GitLab Web IDE',
+ redirect_uri: described_class.oauth_callback_url,
+ scopes: ['api'],
+ trusted: true,
+ confidential: false
+ )
+ end
+ end
+
+ def application_settings
+ ::Gitlab::CurrentSettings.current_application_settings
+ end
+end
diff --git a/spec/lib/gitlab/workhorse_spec.rb b/spec/lib/gitlab/workhorse_spec.rb
index d77763f89be..0d5ec5690a9 100644
--- a/spec/lib/gitlab/workhorse_spec.rb
+++ b/spec/lib/gitlab/workhorse_spec.rb
@@ -480,6 +480,14 @@ RSpec.describe Gitlab::Workhorse, feature_category: :shared do
describe '.send_url' do
let(:url) { 'http://example.com' }
+ let(:expected_params) do
+ {
+ 'URL' => url,
+ 'AllowRedirects' => false,
+ 'Body' => '',
+ 'Method' => 'GET'
+ }
+ end
it 'sets the header correctly' do
key, command, params = decode_workhorse_header(
@@ -488,12 +496,7 @@ RSpec.describe Gitlab::Workhorse, feature_category: :shared do
expect(key).to eq("Gitlab-Workhorse-Send-Data")
expect(command).to eq("send-url")
- expect(params).to eq({
- 'URL' => url,
- 'AllowRedirects' => false,
- 'Body' => '',
- 'Method' => 'GET'
- }.deep_stringify_keys)
+ expect(params).to eq(expected_params)
end
context 'when body, headers and method are specified' do
@@ -501,6 +504,14 @@ RSpec.describe Gitlab::Workhorse, feature_category: :shared do
let(:headers) { { Authorization: ['Bearer token'] } }
let(:method) { 'POST' }
+ let(:expected_params) do
+ super().merge(
+ 'Body' => body,
+ 'Header' => headers,
+ 'Method' => method
+ ).deep_stringify_keys
+ end
+
it 'sets the header correctly' do
key, command, params = decode_workhorse_header(
described_class.send_url(url, body: body, headers: headers, method: method)
@@ -508,13 +519,33 @@ RSpec.describe Gitlab::Workhorse, feature_category: :shared do
expect(key).to eq("Gitlab-Workhorse-Send-Data")
expect(command).to eq("send-url")
- expect(params).to eq({
- 'URL' => url,
- 'AllowRedirects' => false,
- 'Body' => body,
- 'Header' => headers,
- 'Method' => method
- }.deep_stringify_keys)
+ expect(params).to eq(expected_params)
+ end
+ end
+
+ context 'when timeouts are set' do
+ let(:timeouts) { { open: '5', read: '5' } }
+ let(:expected_params) { super().merge('DialTimeout' => '5s', 'ResponseHeaderTimeout' => '5s') }
+
+ it 'sets the header correctly' do
+ key, command, params = decode_workhorse_header(described_class.send_url(url, timeouts: timeouts))
+
+ expect(key).to eq("Gitlab-Workhorse-Send-Data")
+ expect(command).to eq("send-url")
+ expect(params).to eq(expected_params)
+ end
+ end
+
+ context 'when an response statuses are set' do
+ let(:response_statuses) { { error: :service_unavailable, timeout: :bad_request } }
+ let(:expected_params) { super().merge('ErrorResponseStatus' => 503, 'TimeoutResponseStatus' => 400) }
+
+ it 'sets the header correctly' do
+ key, command, params = decode_workhorse_header(described_class.send_url(url, response_statuses: response_statuses))
+
+ expect(key).to eq("Gitlab-Workhorse-Send-Data")
+ expect(command).to eq("send-url")
+ expect(params).to eq(expected_params)
end
end
end
diff --git a/spec/lib/integrations/google_cloud_platform/artifact_registry/client_spec.rb b/spec/lib/integrations/google_cloud_platform/artifact_registry/client_spec.rb
new file mode 100644
index 00000000000..36fa350e46f
--- /dev/null
+++ b/spec/lib/integrations/google_cloud_platform/artifact_registry/client_spec.rb
@@ -0,0 +1,98 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Integrations::GoogleCloudPlatform::ArtifactRegistry::Client, feature_category: :container_registry do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:rsa_key) { OpenSSL::PKey::RSA.generate(3072) }
+ let_it_be(:rsa_key_data) { rsa_key.to_s }
+
+ let(:gcp_project_id) { 'gcp_project_id' }
+ let(:gcp_location) { 'gcp_location' }
+ let(:gcp_repository) { 'gcp_repository' }
+ let(:gcp_wlif) { 'https://wlif.test' }
+
+ let(:user) { project.owner }
+ let(:client) do
+ described_class.new(
+ project: project,
+ user: user,
+ gcp_project_id: gcp_project_id,
+ gcp_location: gcp_location,
+ gcp_repository: gcp_repository,
+ gcp_wlif: gcp_wlif
+ )
+ end
+
+ describe '#list_docker_images' do
+ let(:page_token) { nil }
+
+ subject(:list) { client.list_docker_images(page_token: page_token) }
+
+ before do
+ stub_application_setting(ci_jwt_signing_key: rsa_key_data)
+ end
+
+ it 'calls glgo list docker images API endpoint' do
+ stub_list_docker_image(body: dummy_list_body)
+ expect(client).to receive(:encoded_jwt).with(wlif: gcp_wlif)
+
+ expect(list).to include(images: an_instance_of(Array), next_page_token: an_instance_of(String))
+ end
+
+ context 'with a page token set' do
+ let(:page_token) { 'token' }
+
+ it 'calls glgo list docker images API endpoint with a page token' do
+ stub_list_docker_image(body: dummy_list_body, page_token: page_token)
+
+ expect(list).to include(images: an_instance_of(Array), next_page_token: an_instance_of(String))
+ end
+ end
+
+ context 'with an erroneous response' do
+ it 'returns an empty hash' do
+ stub_list_docker_image(body: dummy_list_body, status_code: 400)
+
+ expect(list).to eq({})
+ end
+ end
+
+ private
+
+ def stub_list_docker_image(body:, page_token: nil, status_code: 200)
+ url = "#{described_class::GLGO_BASE_URL}/gcp/ar"
+ url << "/projects/#{gcp_project_id}"
+ url << "/locations/#{gcp_location}"
+ url << "/repositories/#{gcp_repository}/docker"
+ url << "?page_size=#{described_class::PAGE_SIZE}"
+ url << "&page_token=#{page_token}" if page_token.present?
+
+ stub_request(:get, url)
+ .to_return(status: status_code, body: body)
+ end
+
+ def dummy_list_body
+ <<-BODY
+ {
+ "images": [
+ {
+ "built_at": "2023-11-30T23:23:11.980068941Z",
+ "media_type": "application/vnd.docker.distribution.manifest.v2+json",
+ "name": "projects/project/locations/location/repositories/repo/dockerImages/image@sha256:6a0657acfef760bd9e293361c9b558e98e7d740ed0dffca823d17098a4ffddf5",
+ "size_bytes": 2827903,
+ "tags": [
+ "tag1",
+ "tag2"
+ ],
+ "updated_at": "2023-12-07T11:48:50.840751Z",
+ "uploaded_at": "2023-12-07T11:48:47.598511Z",
+ "uri": "location.pkg.dev/project/repo/image@sha256:6a0657acfef760bd9e293361c9b558e98e7d740ed0dffca823d17098a4ffddf5"
+ }
+ ],
+ "next_page_token": "next_page_token"
+ }
+ BODY
+ end
+ end
+end
diff --git a/spec/lib/integrations/google_cloud_platform/jwt_spec.rb b/spec/lib/integrations/google_cloud_platform/jwt_spec.rb
new file mode 100644
index 00000000000..51707c26a3a
--- /dev/null
+++ b/spec/lib/integrations/google_cloud_platform/jwt_spec.rb
@@ -0,0 +1,86 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Integrations::GoogleCloudPlatform::Jwt, feature_category: :shared do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
+
+ let(:claims) { { audience: 'http://sandbox.test', wlif: 'http://wlif.test' } }
+ let(:jwt) { described_class.new(project: project, user: user, claims: claims) }
+
+ describe '#encoded' do
+ let_it_be(:rsa_key) { OpenSSL::PKey::RSA.generate(3072) }
+ let_it_be(:rsa_key_data) { rsa_key.to_s }
+
+ subject(:encoded) { jwt.encoded }
+
+ before do
+ stub_application_setting(ci_jwt_signing_key: rsa_key_data)
+ end
+
+ it 'creates a valid jwt' do
+ payload, headers = JWT.decode(encoded, rsa_key.public_key, true, { algorithm: 'RS256' })
+
+ expect(payload).to include(
+ 'root_namespace_path' => project.root_namespace.full_path,
+ 'root_namespace_id' => project.root_namespace.id.to_s,
+ 'wlif' => claims[:wlif],
+ 'aud' => claims[:audience],
+ 'project_id' => project.id.to_s,
+ 'project_path' => project.full_path,
+ 'user_id' => user.id.to_s,
+ 'user_email' => user.email,
+ 'sub' => "project_#{project.id}_user_#{user.id}",
+ 'iss' => Gitlab.config.gitlab.url
+ )
+
+ expect(headers).to include(
+ 'kid' => rsa_key.public_key.to_jwk[:kid]
+ )
+ end
+
+ context 'with missing jwt audience' do
+ let(:claims) { { wlif: 'http://wlif.test' } }
+
+ it 'raises an ArgumentError' do
+ expect { encoded }.to raise_error(ArgumentError, described_class::JWT_OPTIONS_ERROR)
+ end
+ end
+
+ context 'with missing jwt wlif' do
+ let(:claims) { { audience: 'http://sandbox.test' } }
+
+ it 'raises an ArgumentError' do
+ expect { encoded }.to raise_error(ArgumentError, described_class::JWT_OPTIONS_ERROR)
+ end
+ end
+
+ context 'with no ci signing key' do
+ before do
+ stub_application_setting(ci_jwt_signing_key: nil)
+ end
+
+ it 'raises a NoSigningKeyError' do
+ expect { encoded }.to raise_error(described_class::NoSigningKeyError)
+ end
+ end
+
+ context 'with oidc_issuer_url feature flag disabled' do
+ before do
+ stub_feature_flags(oidc_issuer_url: false)
+ # Settings.gitlab.base_url and Gitlab.config.gitlab.url are the
+ # same for test. Changing that to assert the proper behavior here.
+ allow(Settings.gitlab).to receive(:base_url).and_return('test.dev')
+ end
+
+ it 'uses a different issuer' do
+ payload, _ = JWT.decode(encoded, rsa_key.public_key, true, { algorithm: 'RS256' })
+
+ expect(payload).to include(
+ 'iss' => Settings.gitlab.base_url
+ )
+ end
+ end
+ end
+end
diff --git a/spec/lib/organization/current_organization_spec.rb b/spec/lib/organization/current_organization_spec.rb
new file mode 100644
index 00000000000..ffd37ac4de9
--- /dev/null
+++ b/spec/lib/organization/current_organization_spec.rb
@@ -0,0 +1,66 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Organization::CurrentOrganization, feature_category: :organization do
+ include described_class
+
+ after do
+ # Wipe thread variables between specs.
+ Thread.current[described_class::CURRENT_ORGANIZATION_THREAD_VAR] = nil
+ end
+
+ describe '.current_organization' do
+ subject { current_organization }
+
+ context 'when current organization is set' do
+ let(:some_organization) { create(:organization) }
+
+ before do
+ self.current_organization = some_organization
+ end
+
+ it { is_expected.to eq some_organization }
+ end
+
+ context 'when organization is not set' do
+ it { is_expected.to be_nil }
+ end
+ end
+
+ describe '.current_organization=' do
+ subject(:setter) { self.current_organization = some_organization }
+
+ let(:some_organization) { create(:organization) }
+
+ it 'sets current organization' do
+ expect { setter }.to change { current_organization }.from(nil).to(some_organization)
+ end
+ end
+
+ describe '.with_current_organization' do
+ let(:some_organization) { create(:organization) }
+
+ it 'sets current organization within block' do
+ expect(current_organization).to be_nil
+ with_current_organization(some_organization) do
+ expect(current_organization).to eq some_organization
+ end
+ expect(current_organization).to be_nil
+ end
+
+ context 'when an error is raised' do
+ it 'resets current organization' do
+ begin
+ with_current_organization(some_organization) do
+ raise StandardError
+ end
+ rescue StandardError
+ nil
+ end
+
+ expect(current_organization).to be_nil
+ end
+ end
+ end
+end
diff --git a/spec/lib/peek/views/click_house_spec.rb b/spec/lib/peek/views/click_house_spec.rb
index 1ff49afd728..f7cecbaac88 100644
--- a/spec/lib/peek/views/click_house_spec.rb
+++ b/spec/lib/peek/views/click_house_spec.rb
@@ -34,13 +34,11 @@ RSpec.describe Peek::Views::ClickHouse, :click_house, :request_store, feature_ca
}),
a_hash_including({
sql: 'INSERT INTO events (id) VALUES (1)',
- database: 'database: main',
- statistics: include('written_rows=>"1"')
+ database: 'database: main'
}),
a_hash_including({
sql: 'INSERT INTO events (id) FORMAT CSV',
- database: 'database: main',
- statistics: include('written_rows=>"2"')
+ database: 'database: main'
})
])
end
diff --git a/spec/lib/product_analytics/event_params_spec.rb b/spec/lib/product_analytics/event_params_spec.rb
deleted file mode 100644
index e560fd10dfd..00000000000
--- a/spec/lib/product_analytics/event_params_spec.rb
+++ /dev/null
@@ -1,59 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe ProductAnalytics::EventParams do
- describe '.parse_event_params' do
- subject { described_class.parse_event_params(raw_event) }
-
- let(:raw_event) { Gitlab::Json.parse(fixture_file('product_analytics/event.json')) }
-
- it 'extracts all params from raw event' do
- expected_params = {
- project_id: '1',
- platform: 'web',
- name_tracker: 'sp',
- v_tracker: 'js-2.14.0',
- event_id: 'fbf14096-74ee-47e4-883c-8a0d6cb72e37',
- domain_userid: '79543c31-cfc3-4479-a737-fafb9333c8ba',
- domain_sessionid: '54f6d3f3-f4f9-4fdc-87e0-a2c775234c1b',
- domain_sessionidx: 4,
- page_url: 'http://example.com/products/1',
- page_referrer: 'http://example.com/products/1',
- br_lang: 'en-US',
- br_cookies: true,
- os_timezone: 'America/Los_Angeles',
- doc_charset: 'UTF-8',
- se_category: 'category',
- se_action: 'action',
- se_label: 'label',
- se_property: 'property',
- se_value: 12.34
- }
-
- expect(subject).to include(expected_params)
- end
- end
-
- describe '.has_required_params?' do
- subject { described_class.has_required_params?(params) }
-
- context 'aid and eid are present' do
- let(:params) { { 'aid' => 1, 'eid' => 2 } }
-
- it { expect(subject).to be_truthy }
- end
-
- context 'aid and eid are missing' do
- let(:params) { {} }
-
- it { expect(subject).to be_falsey }
- end
-
- context 'eid is missing' do
- let(:params) { { 'aid' => 1 } }
-
- it { expect(subject).to be_falsey }
- end
- end
-end
diff --git a/spec/lib/sbom/package_url_spec.rb b/spec/lib/sbom/package_url_spec.rb
index 92490b184df..a62332b44ad 100644
--- a/spec/lib/sbom/package_url_spec.rb
+++ b/spec/lib/sbom/package_url_spec.rb
@@ -26,7 +26,7 @@
require 'fast_spec_helper'
require 'rspec-parameterized'
-require_relative '../../support/helpers/next_instance_of'
+require 'gitlab/rspec/next_instance_of'
require_relative '../../support/shared_contexts/lib/sbom/package_url_shared_contexts'
RSpec.describe Sbom::PackageUrl, feature_category: :dependency_management do
diff --git a/spec/lib/sidebars/concerns/container_with_html_options_spec.rb b/spec/lib/sidebars/concerns/container_with_html_options_spec.rb
index 588e89a80f7..6adbfce3087 100644
--- a/spec/lib/sidebars/concerns/container_with_html_options_spec.rb
+++ b/spec/lib/sidebars/concerns/container_with_html_options_spec.rb
@@ -18,10 +18,4 @@ RSpec.describe Sidebars::Concerns::ContainerWithHtmlOptions, feature_category: :
expect(subject.container_html_options).to eq(aria: { label: 'Foo' })
end
end
-
- describe '#collapsed_container_html_options' do
- it 'includes by default aria-label attribute' do
- expect(subject.collapsed_container_html_options).to eq(aria: { label: 'Foo' })
- end
- end
end
diff --git a/spec/lib/sidebars/explore/menus/catalog_menu_spec.rb b/spec/lib/sidebars/explore/menus/catalog_menu_spec.rb
index 2c4c4c48eae..543f9b26a66 100644
--- a/spec/lib/sidebars/explore/menus/catalog_menu_spec.rb
+++ b/spec/lib/sidebars/explore/menus/catalog_menu_spec.rb
@@ -10,31 +10,19 @@ RSpec.describe Sidebars::Explore::Menus::CatalogMenu, feature_category: :navigat
subject { described_class.new(context) }
- context 'when `global_ci_catalog` is enabled`' do
- it 'renders' do
- expect(subject.render?).to be(true)
- end
-
- it 'renders the correct link' do
- expect(subject.link).to match "explore/catalog"
- end
-
- it 'renders the correct title' do
- expect(subject.title).to eq "CI/CD Catalog"
- end
-
- it 'renders the correct icon' do
- expect(subject.sprite_icon).to eq "catalog-checkmark"
- end
+ it 'renders' do
+ expect(subject.render?).to be(true)
end
- context 'when `global_ci_catalog` FF is disabled' do
- before do
- stub_feature_flags(global_ci_catalog: false)
- end
+ it 'renders the correct link' do
+ expect(subject.link).to match "explore/catalog"
+ end
+
+ it 'renders the correct title' do
+ expect(subject.title).to eq "CI/CD Catalog"
+ end
- it 'does not render' do
- expect(subject.render?).to be(false)
- end
+ it 'renders the correct icon' do
+ expect(subject.sprite_icon).to eq "catalog-checkmark"
end
end
diff --git a/spec/lib/sidebars/groups/menus/scope_menu_spec.rb b/spec/lib/sidebars/groups/menus/scope_menu_spec.rb
index 2cce2d28e68..00083fcfbf1 100644
--- a/spec/lib/sidebars/groups/menus/scope_menu_spec.rb
+++ b/spec/lib/sidebars/groups/menus/scope_menu_spec.rb
@@ -8,12 +8,6 @@ RSpec.describe Sidebars::Groups::Menus::ScopeMenu, feature_category: :navigation
let(:context) { Sidebars::Groups::Context.new(current_user: user, container: group) }
let(:menu) { described_class.new(context) }
- describe '#extra_nav_link_html_options' do
- subject { menu.extra_nav_link_html_options }
-
- specify { is_expected.to match(hash_including(class: 'context-header has-tooltip', title: context.group.name)) }
- end
-
it_behaves_like 'serializable as super_sidebar_menu_args' do
let(:extra_attrs) do
{
diff --git a/spec/lib/sidebars/groups/super_sidebar_menus/analyze_menu_spec.rb b/spec/lib/sidebars/groups/super_sidebar_menus/analyze_menu_spec.rb
index cc2809df85f..0ff9bbebdc3 100644
--- a/spec/lib/sidebars/groups/super_sidebar_menus/analyze_menu_spec.rb
+++ b/spec/lib/sidebars/groups/super_sidebar_menus/analyze_menu_spec.rb
@@ -16,7 +16,6 @@ RSpec.describe Sidebars::Groups::SuperSidebarMenus::AnalyzeMenu, feature_categor
expect(items.map(&:class).uniq).to eq([Sidebars::NilMenuItem])
expect(items.map(&:item_id)).to eq([
:analytics_dashboards,
- :dashboards_analytics,
:cycle_analytics,
:ci_cd_analytics,
:contribution_analytics,
diff --git a/spec/lib/sidebars/organizations/menus/manage_menu_spec.rb b/spec/lib/sidebars/organizations/menus/manage_menu_spec.rb
index 87346176a4c..7f1dab6a8b4 100644
--- a/spec/lib/sidebars/organizations/menus/manage_menu_spec.rb
+++ b/spec/lib/sidebars/organizations/menus/manage_menu_spec.rb
@@ -7,17 +7,15 @@ RSpec.describe Sidebars::Organizations::Menus::ManageMenu, feature_category: :na
let_it_be(:user) { build(:user) }
let_it_be(:context) { Sidebars::Context.new(current_user: user, container: organization) }
- let(:items) { subject.instance_variable_get(:@items) }
-
- subject { described_class.new(context) }
+ subject(:menu) { described_class.new(context) }
it 'has title and sprite_icon' do
- expect(subject.title).to eq(s_("Navigation|Manage"))
- expect(subject.sprite_icon).to eq("users")
+ expect(menu.title).to eq(s_("Navigation|Manage"))
+ expect(menu.sprite_icon).to eq("users")
end
describe 'Menu items' do
- subject { described_class.new(context).renderable_items.find { |e| e.item_id == item_id } }
+ subject(:item) { menu.renderable_items.find { |e| e.item_id == item_id } }
describe 'Groups and projects' do
let(:item_id) { :organization_groups_and_projects }
@@ -28,7 +26,15 @@ RSpec.describe Sidebars::Organizations::Menus::ManageMenu, feature_category: :na
describe 'Users' do
let(:item_id) { :organization_users }
- it { is_expected.not_to be_nil }
+ context 'when current user has permissions' do
+ let_it_be(:organization_user) { create(:organization_user, user: user, organization: organization) } # rubocop: disable RSpec/FactoryBot/AvoidCreate -- does not work with build_stubbed
+
+ it { is_expected.not_to be_nil }
+ end
+
+ context 'when current user does not have permissions' do
+ it { is_expected.to be_nil }
+ end
end
end
end
diff --git a/spec/lib/sidebars/projects/menus/repository_menu_spec.rb b/spec/lib/sidebars/projects/menus/repository_menu_spec.rb
index 1aa0ea30d0a..b29427d68dd 100644
--- a/spec/lib/sidebars/projects/menus/repository_menu_spec.rb
+++ b/spec/lib/sidebars/projects/menus/repository_menu_spec.rb
@@ -112,7 +112,7 @@ RSpec.describe Sidebars::Projects::Menus::RepositoryMenu, feature_category: :sou
end
end
- describe 'Contributor statistics' do
+ describe 'Contributor analytics' do
let_it_be(:item_id) { :contributors }
context 'when analytics is disabled' do
diff --git a/spec/lib/sidebars/projects/menus/scope_menu_spec.rb b/spec/lib/sidebars/projects/menus/scope_menu_spec.rb
index 108a98e28a4..fb1ec94dfe8 100644
--- a/spec/lib/sidebars/projects/menus/scope_menu_spec.rb
+++ b/spec/lib/sidebars/projects/menus/scope_menu_spec.rb
@@ -25,10 +25,4 @@ RSpec.describe Sidebars::Projects::Menus::ScopeMenu, feature_category: :navigati
specify { is_expected.to match(hash_including(class: 'shortcuts-project')) }
end
-
- describe '#extra_nav_link_html_options' do
- subject { described_class.new(context).extra_nav_link_html_options }
-
- specify { is_expected.to match(hash_including(class: 'context-header has-tooltip', title: context.project.name)) }
- end
end
diff --git a/spec/lib/sidebars/projects/menus/shimo_menu_spec.rb b/spec/lib/sidebars/projects/menus/shimo_menu_spec.rb
deleted file mode 100644
index e74647894fa..00000000000
--- a/spec/lib/sidebars/projects/menus/shimo_menu_spec.rb
+++ /dev/null
@@ -1,44 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Sidebars::Projects::Menus::ShimoMenu do
- let_it_be_with_reload(:project) { create(:project) }
-
- let(:context) { Sidebars::Projects::Context.new(current_user: project.first_owner, container: project) }
-
- subject(:shimo_menu) { described_class.new(context) }
-
- describe '#render?' do
- context 'without a valid Shimo integration' do
- it "doesn't render the menu" do
- expect(shimo_menu.render?).to be_falsey
- end
- end
-
- context 'with a valid Shimo integration' do
- let_it_be_with_reload(:shimo_integration) { create(:shimo_integration, project: project) }
-
- context 'when integration is active' do
- it 'renders the menu' do
- expect(shimo_menu.render?).to eq true
- end
-
- it 'renders menu link' do
- expected_url = Rails.application.routes.url_helpers.project_integrations_shimo_path(project)
- expect(shimo_menu.link).to eq expected_url
- end
- end
-
- context 'when integration is inactive' do
- before do
- shimo_integration.update!(active: false)
- end
-
- it "doesn't render the menu" do
- expect(shimo_menu.render?).to eq false
- end
- end
- end
- end
-end
diff --git a/spec/lib/sidebars/projects/panel_spec.rb b/spec/lib/sidebars/projects/panel_spec.rb
index ec1df438cf1..b6ff1acc176 100644
--- a/spec/lib/sidebars/projects/panel_spec.rb
+++ b/spec/lib/sidebars/projects/panel_spec.rb
@@ -30,28 +30,6 @@ RSpec.describe Sidebars::Projects::Panel, feature_category: :navigation do
expect(subject.index { |i| i.is_a?(Sidebars::Projects::Menus::WikiMenu) }).to be_nil
end
end
-
- context 'shimo only' do
- let_it_be(:shimo) { create(:shimo_integration, active: true) }
-
- let(:project) { shimo.project }
-
- it 'contains Shimo menu item' do
- expect(subject.index { |i| i.is_a?(Sidebars::Projects::Menus::ShimoMenu) }).not_to be_nil
- end
- end
-
- context 'confluence & shimo' do
- let_it_be(:confluence) { create(:confluence_integration, active: true) }
- let_it_be(:shimo) { create(:shimo_integration, active: true) }
-
- let(:project) { confluence.project }
-
- it 'contains Confluence menu item, not Shimo' do
- expect(subject.index { |i| i.is_a?(Sidebars::Projects::Menus::ConfluenceMenu) }).not_to be_nil
- expect(subject.index { |i| i.is_a?(Sidebars::Projects::Menus::ShimoMenu) }).to be_nil
- end
- end
end
context 'when integration is not present' do
diff --git a/spec/lib/sidebars/user_settings/menus/access_tokens_menu_spec.rb b/spec/lib/sidebars/user_settings/menus/access_tokens_menu_spec.rb
index fa33e7bedfb..eebd089ad3f 100644
--- a/spec/lib/sidebars/user_settings/menus/access_tokens_menu_spec.rb
+++ b/spec/lib/sidebars/user_settings/menus/access_tokens_menu_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe Sidebars::UserSettings::Menus::AccessTokensMenu, feature_category: :navigation do
it_behaves_like 'User settings menu',
- link: '/-/profile/personal_access_tokens',
+ link: '/-/user_settings/personal_access_tokens',
title: _('Access Tokens'),
icon: 'token',
active_routes: { controller: :personal_access_tokens }
diff --git a/spec/lib/sidebars/user_settings/menus/active_sessions_menu_spec.rb b/spec/lib/sidebars/user_settings/menus/active_sessions_menu_spec.rb
index be5f826ee58..d4b9c359a98 100644
--- a/spec/lib/sidebars/user_settings/menus/active_sessions_menu_spec.rb
+++ b/spec/lib/sidebars/user_settings/menus/active_sessions_menu_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe Sidebars::UserSettings::Menus::ActiveSessionsMenu, feature_category: :navigation do
it_behaves_like 'User settings menu',
- link: '/-/profile/active_sessions',
+ link: '/-/user_settings/active_sessions',
title: _('Active Sessions'),
icon: 'monitor-lines',
active_routes: { controller: :active_sessions }
diff --git a/spec/lib/sidebars/user_settings/menus/applications_menu_spec.rb b/spec/lib/sidebars/user_settings/menus/applications_menu_spec.rb
index eeda4fb844c..a0c175051df 100644
--- a/spec/lib/sidebars/user_settings/menus/applications_menu_spec.rb
+++ b/spec/lib/sidebars/user_settings/menus/applications_menu_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe Sidebars::UserSettings::Menus::ApplicationsMenu, feature_category: :navigation do
it_behaves_like 'User settings menu',
- link: '/-/profile/applications',
+ link: '/-/user_settings/applications',
title: _('Applications'),
icon: 'applications',
active_routes: { controller: 'oauth/applications' }
diff --git a/spec/lib/sidebars/user_settings/menus/authentication_log_menu_spec.rb b/spec/lib/sidebars/user_settings/menus/authentication_log_menu_spec.rb
index 33be5050c37..5a154d7dafb 100644
--- a/spec/lib/sidebars/user_settings/menus/authentication_log_menu_spec.rb
+++ b/spec/lib/sidebars/user_settings/menus/authentication_log_menu_spec.rb
@@ -4,10 +4,10 @@ require 'spec_helper'
RSpec.describe Sidebars::UserSettings::Menus::AuthenticationLogMenu, feature_category: :navigation do
it_behaves_like 'User settings menu',
- link: '/-/profile/audit_log',
+ link: '/-/user_settings/authentication_log',
title: _('Authentication Log'),
icon: 'log',
- active_routes: { path: 'profiles#audit_log' }
+ active_routes: { path: 'user_settings#authentication_log' }
it_behaves_like 'User settings menu #render? method'
end
diff --git a/spec/lib/sidebars/user_settings/menus/password_menu_spec.rb b/spec/lib/sidebars/user_settings/menus/password_menu_spec.rb
index 168019fea5d..83e47fd120a 100644
--- a/spec/lib/sidebars/user_settings/menus/password_menu_spec.rb
+++ b/spec/lib/sidebars/user_settings/menus/password_menu_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe Sidebars::UserSettings::Menus::PasswordMenu, feature_category: :navigation do
it_behaves_like 'User settings menu',
- link: '/-/profile/password',
+ link: '/-/user_settings/password',
title: _('Password'),
icon: 'lock',
active_routes: { controller: :passwords }
diff --git a/spec/lib/system_check/base_check_spec.rb b/spec/lib/system_check/base_check_spec.rb
index 168bda07791..2478e6e84ea 100644
--- a/spec/lib/system_check/base_check_spec.rb
+++ b/spec/lib/system_check/base_check_spec.rb
@@ -13,7 +13,7 @@ RSpec.describe SystemCheck::BaseCheck do
it 'responds to Gitlab::TaskHelpers methods' do
expect(subject).to respond_to :ask_to_continue, :os_name, :prompt, :run_and_match, :run_command,
:run_command!, :uid_for, :gid_for, :gitlab_user, :gitlab_user?, :warn_user_is_not_gitlab,
- :repository_storage_paths_args, :user_home, :checkout_or_clone_version, :clone_repo, :checkout_version
+ :user_home, :checkout_or_clone_version, :clone_repo, :checkout_version
end
end
end
diff --git a/spec/lib/system_check/orphans/namespace_check_spec.rb b/spec/lib/system_check/orphans/namespace_check_spec.rb
deleted file mode 100644
index 3964068b20c..00000000000
--- a/spec/lib/system_check/orphans/namespace_check_spec.rb
+++ /dev/null
@@ -1,61 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe SystemCheck::Orphans::NamespaceCheck, :silence_stdout do
- let(:storages) { Gitlab.config.repositories.storages.reject { |key, _| key.eql? 'broken' } }
-
- before do
- allow(Gitlab.config.repositories).to receive(:storages).and_return(storages)
- allow(subject).to receive(:fetch_disk_namespaces).and_return(disk_namespaces)
- end
-
- describe '#multi_check' do
- context 'all orphans' do
- let(:disk_namespaces) { %w[/repos/orphan1 /repos/orphan2 repos/@hashed] }
-
- it 'prints list of all orphaned namespaces except @hashed' do
- expect_list_of_orphans(%w[orphan1 orphan2])
-
- subject.multi_check
- end
- end
-
- context 'few orphans with existing namespace' do
- let!(:first_level) { create(:group, path: 'my-namespace') }
- let(:disk_namespaces) { %w[/repos/orphan1 /repos/orphan2 /repos/my-namespace /repos/@hashed] }
-
- it 'prints list of orphaned namespaces' do
- expect_list_of_orphans(%w[orphan1 orphan2])
-
- subject.multi_check
- end
- end
-
- context 'few orphans with existing namespace and parents with same name as orphans' do
- let!(:first_level) { create(:group, path: 'my-namespace') }
- let!(:second_level) { create(:group, path: 'second-level', parent: first_level) }
- let(:disk_namespaces) { %w[/repos/orphan1 /repos/orphan2 /repos/my-namespace /repos/second-level /repos/@hashed] }
-
- it 'prints list of orphaned namespaces ignoring parents with same namespace as orphans' do
- expect_list_of_orphans(%w[orphan1 orphan2 second-level])
-
- subject.multi_check
- end
- end
-
- context 'no orphans' do
- let(:disk_namespaces) { %w[@hashed] }
-
- it 'prints an empty list ignoring @hashed' do
- expect_list_of_orphans([])
-
- subject.multi_check
- end
- end
- end
-
- def expect_list_of_orphans(orphans)
- expect(subject).to receive(:print_orphans).with(orphans, 'default')
- end
-end
diff --git a/spec/lib/system_check/orphans/repository_check_spec.rb b/spec/lib/system_check/orphans/repository_check_spec.rb
deleted file mode 100644
index 0504e133ab9..00000000000
--- a/spec/lib/system_check/orphans/repository_check_spec.rb
+++ /dev/null
@@ -1,68 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe SystemCheck::Orphans::RepositoryCheck, :silence_stdout do
- let(:storages) { Gitlab.config.repositories.storages.reject { |key, _| key.eql? 'broken' } }
-
- before do
- allow(Gitlab.config.repositories).to receive(:storages).and_return(storages)
- allow(subject).to receive(:fetch_disk_namespaces).and_return(disk_namespaces)
- allow(subject).to receive(:fetch_disk_repositories).and_return(disk_repositories)
- end
-
- describe '#multi_check' do
- context 'all orphans' do
- let(:disk_namespaces) { %w[/repos/orphan1 /repos/orphan2 repos/@hashed] }
- let(:disk_repositories) { %w[repo1.git repo2.git] }
-
- it 'prints list of all orphaned namespaces except @hashed' do
- expect_list_of_orphans(%w[orphan1/repo1.git orphan1/repo2.git orphan2/repo1.git orphan2/repo2.git])
-
- subject.multi_check
- end
- end
-
- context 'few orphans with existing namespace' do
- let!(:first_level) { create(:group, path: 'my-namespace') }
- let!(:project) { create(:project, path: 'repo', namespace: first_level) }
- let(:disk_namespaces) { %w[/repos/orphan1 /repos/orphan2 /repos/my-namespace /repos/@hashed] }
- let(:disk_repositories) { %w[repo.git] }
-
- it 'prints list of orphaned namespaces' do
- expect_list_of_orphans(%w[orphan1/repo.git orphan2/repo.git])
-
- subject.multi_check
- end
- end
-
- context 'few orphans with existing namespace and parents with same name as orphans' do
- let!(:first_level) { create(:group, path: 'my-namespace') }
- let!(:second_level) { create(:group, path: 'second-level', parent: first_level) }
- let!(:project) { create(:project, path: 'repo', namespace: first_level) }
- let(:disk_namespaces) { %w[/repos/orphan1 /repos/orphan2 /repos/my-namespace /repos/second-level /repos/@hashed] }
- let(:disk_repositories) { %w[repo.git] }
-
- it 'prints list of orphaned namespaces ignoring parents with same namespace as orphans' do
- expect_list_of_orphans(%w[orphan1/repo.git orphan2/repo.git second-level/repo.git])
-
- subject.multi_check
- end
- end
-
- context 'no orphans' do
- let(:disk_namespaces) { %w[@hashed] }
- let(:disk_repositories) { %w[repo.git] }
-
- it 'prints an empty list ignoring @hashed' do
- expect_list_of_orphans([])
-
- subject.multi_check
- end
- end
- end
-
- def expect_list_of_orphans(orphans)
- expect(subject).to receive(:print_orphans).with(orphans, 'default')
- end
-end
diff --git a/spec/lib/uploaded_file_spec.rb b/spec/lib/uploaded_file_spec.rb
index 721b3d70feb..3a77b12be82 100644
--- a/spec/lib/uploaded_file_spec.rb
+++ b/spec/lib/uploaded_file_spec.rb
@@ -294,4 +294,8 @@ RSpec.describe UploadedFile, feature_category: :package_registry do
it { expect(described_class.new(temp_file.path).sanitize_filename('..')).to eq('_..') }
it { expect(described_class.new(temp_file.path).sanitize_filename('')).to eq('unnamed') }
end
+
+ describe '#empty_size?' do
+ it { expect(described_class.new(temp_file.path).empty_size?).to eq(true) }
+ end
end
diff --git a/spec/lib/vite_gdk_spec.rb b/spec/lib/vite_gdk_spec.rb
new file mode 100644
index 00000000000..f54ede9d877
--- /dev/null
+++ b/spec/lib/vite_gdk_spec.rb
@@ -0,0 +1,63 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+VITE_GDK_CONFIG_FILEPATH = "config/vite.gdk.json"
+
+RSpec.describe ViteGdk, feature_category: :tooling do
+ before do
+ allow(ViteRuby).to receive(:configure)
+ allow(ViteRuby.env).to receive(:[]=)
+ allow(YAML).to receive(:safe_load_file)
+ end
+
+ describe '#load_gdk_vite_config' do
+ context 'when not in production environment' do
+ before do
+ stub_env('RAILS_ENV', nil)
+ end
+
+ context 'when it loads file successfully' do
+ it 'configures ViteRuby' do
+ expect(File).to receive(:exist?) do |file_path|
+ expect(file_path).to end_with(VITE_GDK_CONFIG_FILEPATH)
+ end.and_return(true)
+ expect(YAML).to receive(:safe_load_file) do |file_path|
+ expect(file_path).to end_with(VITE_GDK_CONFIG_FILEPATH)
+ end.and_return('enabled' => true, 'port' => 3038, 'host' => 'gdk.test')
+ expect(ViteRuby).to receive(:configure).with(host: 'gdk.test', port: 3038)
+ expect(ViteRuby.env).to receive(:[]=).with('VITE_ENABLED', 'true')
+
+ described_class.load_gdk_vite_config
+ end
+ end
+
+ context 'when config file is missing' do
+ it 'does nothing' do
+ expect(File).to receive(:exist?) do |file_path|
+ expect(file_path).to end_with(VITE_GDK_CONFIG_FILEPATH)
+ end.and_return(false)
+ expect(ViteRuby).not_to receive(:configure)
+ expect(ViteRuby.env).not_to receive(:[]=).with('VITE_ENABLED', 'false')
+ expect(ViteRuby.env).not_to receive(:[]=).with('VITE_ENABLED', 'true')
+
+ described_class.load_gdk_vite_config
+ end
+ end
+ end
+
+ context 'when in production environment' do
+ before do
+ stub_env('RAILS_ENV', 'production')
+ end
+
+ it 'does not load and configure ViteRuby' do
+ expect(YAML).not_to receive(:safe_load_file)
+ expect(ViteRuby).not_to receive(:configure)
+ expect(ViteRuby.env).not_to receive(:[]=).with('VITE_ENABLED')
+
+ described_class.load_gdk_vite_config
+ end
+ end
+ end
+end
diff --git a/spec/mailers/emails/profile_spec.rb b/spec/mailers/emails/profile_spec.rb
index 7ddb4810d53..e19d4ceabd9 100644
--- a/spec/mailers/emails/profile_spec.rb
+++ b/spec/mailers/emails/profile_spec.rb
@@ -148,7 +148,7 @@ RSpec.describe Emails::Profile, feature_category: :user_profile do
end
it 'includes a link to personal access tokens page' do
- is_expected.to have_body_text /#{profile_personal_access_tokens_path}/
+ is_expected.to have_body_text /#{user_settings_personal_access_tokens_path}/
end
it 'includes the email reason' do
@@ -254,7 +254,7 @@ RSpec.describe Emails::Profile, feature_category: :user_profile do
end
it 'includes a link to personal access tokens page' do
- is_expected.to have_body_text /#{profile_personal_access_tokens_path}/
+ is_expected.to have_body_text /#{user_settings_personal_access_tokens_path}/
end
it 'includes the email reason' do
diff --git a/spec/mailers/emails/service_desk_spec.rb b/spec/mailers/emails/service_desk_spec.rb
index b700819ed2c..3ed531a16bc 100644
--- a/spec/mailers/emails/service_desk_spec.rb
+++ b/spec/mailers/emails/service_desk_spec.rb
@@ -146,16 +146,14 @@ RSpec.describe Emails::ServiceDesk, feature_category: :service_desk do
end
end
- shared_examples 'a custom email verification process result email with error' do |error_identifier, expected_text|
- context "when having #{error_identifier} error" do
- before do
- service_desk_setting.custom_email_verification.error = error_identifier
- end
+ shared_examples 'a custom email verification process result email with error' do
+ before do
+ service_desk_setting.custom_email_verification.error = error_identifier
+ end
- it 'contains correct error message headline in text part' do
- # look for text part because we can ignore HTML tags then
- expect(subject.text_part.body).to match(expected_text)
- end
+ it 'contains correct error message headline in text part' do
+ # look for text part because we can ignore HTML tags then
+ expect(subject.text_part.body).to match(expected_text)
end
end
@@ -179,6 +177,11 @@ RSpec.describe Emails::ServiceDesk, feature_category: :service_desk do
it 'uses SMTP delivery method and custom email settings' do
expect_service_desk_custom_email_delivery_options(service_desk_setting)
+ # Don't use ActionMailer::Base.smtp_settings, because it only contains explicitly set values.
+ merged_default_settings = Mail::SMTP.new({}).settings
+ # When forcibly used the configuration has a higher timeout. Ensure it's the default!
+ expect(subject.delivery_method.settings[:read_timeout]).to eq(merged_default_settings[:read_timeout])
+
expect(Gitlab::AppLogger).to have_received(:info).with({ category: 'custom_email' })
end
@@ -537,7 +540,7 @@ RSpec.describe Emails::ServiceDesk, feature_category: :service_desk do
}
end
- subject { Notify.service_desk_custom_email_verification_email(service_desk_setting) }
+ subject(:mail) { Notify.service_desk_custom_email_verification_email(service_desk_setting) }
it_behaves_like 'a custom email verification process email'
@@ -547,6 +550,7 @@ RSpec.describe Emails::ServiceDesk, feature_category: :service_desk do
it 'forcibly uses SMTP delivery method and has correct settings' do
expect_service_desk_custom_email_delivery_options(service_desk_setting)
+ expect(mail.delivery_method.settings[:read_timeout]).to eq(described_class::VERIFICATION_EMAIL_TIMEOUT)
# defaults are unchanged after email overrode settings
expect(Mail::SMTP.new({}).settings).to include(expected_delivery_method_defaults)
@@ -557,7 +561,7 @@ RSpec.describe Emails::ServiceDesk, feature_category: :service_desk do
end
it 'uses verification email address as recipient' do
- expect(subject.to).to eq([service_desk_setting.custom_email_address_for_verification])
+ expect(mail.to).to eq([service_desk_setting.custom_email_address_for_verification])
end
it 'contains verification token' do
@@ -591,10 +595,40 @@ RSpec.describe Emails::ServiceDesk, feature_category: :service_desk do
it_behaves_like 'an email sent from GitLab'
it_behaves_like 'a custom email verification process email'
it_behaves_like 'a custom email verification process notification email'
- it_behaves_like 'a custom email verification process result email with error', 'smtp_host_issue', 'SMTP host issue'
- it_behaves_like 'a custom email verification process result email with error', 'invalid_credentials', 'Invalid credentials'
- it_behaves_like 'a custom email verification process result email with error', 'mail_not_received_within_timeframe', 'Verification email not received within timeframe'
- it_behaves_like 'a custom email verification process result email with error', 'incorrect_from', 'Incorrect From header'
- it_behaves_like 'a custom email verification process result email with error', 'incorrect_token', 'Incorrect verification token'
+
+ it_behaves_like 'a custom email verification process result email with error' do
+ let(:error_identifier) { 'smtp_host_issue' }
+ let(:expected_text) { 'SMTP host issue' }
+ end
+
+ it_behaves_like 'a custom email verification process result email with error' do
+ let(:error_identifier) { 'invalid_credentials' }
+ let(:expected_text) { 'Invalid credentials' }
+ end
+
+ it_behaves_like 'a custom email verification process result email with error' do
+ let(:error_identifier) { 'mail_not_received_within_timeframe' }
+ let(:expected_text) { 'Verification email not received within timeframe' }
+ end
+
+ it_behaves_like 'a custom email verification process result email with error' do
+ let(:error_identifier) { 'incorrect_from' }
+ let(:expected_text) { 'Incorrect From header' }
+ end
+
+ it_behaves_like 'a custom email verification process result email with error' do
+ let(:error_identifier) { 'incorrect_token' }
+ let(:expected_text) { 'Incorrect verification token' }
+ end
+
+ it_behaves_like 'a custom email verification process result email with error' do
+ let(:error_identifier) { 'read_timeout' }
+ let(:expected_text) { 'Read timeout' }
+ end
+
+ it_behaves_like 'a custom email verification process result email with error' do
+ let(:error_identifier) { 'incorrect_forwarding_target' }
+ let(:expected_text) { 'Incorrect forwarding target' }
+ end
end
end
diff --git a/spec/mailers/notify_spec.rb b/spec/mailers/notify_spec.rb
index 9df89f84450..21878bc9b6d 100644
--- a/spec/mailers/notify_spec.rb
+++ b/spec/mailers/notify_spec.rb
@@ -1512,7 +1512,6 @@ RSpec.describe Notify do
context 'for service desk issues' do
before do
- stub_feature_flags(service_desk_custom_email: false)
issue.update!(external_author: 'service.desk@example.com')
issue.issue_email_participants.create!(email: 'service.desk@example.com')
end
@@ -1558,42 +1557,30 @@ RSpec.describe Notify do
end
end
- context 'when service_desk_custom_email is active' do
- before do
- stub_feature_flags(service_desk_custom_email: true)
- end
+ context 'when custom email is enabled' do
+ let_it_be(:credentials) { create(:service_desk_custom_email_credential, project: project) }
+ let_it_be(:verification) { create(:service_desk_custom_email_verification, project: project) }
- it_behaves_like 'a mail with default delivery method'
-
- it 'uses service bot name by default' do
- expect_sender(Users::Internal.support_bot)
+ let_it_be(:settings) do
+ create(
+ :service_desk_setting,
+ project: project,
+ custom_email: 'supersupport@example.com'
+ )
end
- context 'when custom email is enabled' do
- let_it_be(:credentials) { create(:service_desk_custom_email_credential, project: project) }
- let_it_be(:verification) { create(:service_desk_custom_email_verification, project: project) }
-
- let_it_be(:settings) do
- create(
- :service_desk_setting,
- project: project,
- custom_email: 'supersupport@example.com'
- )
- end
-
- before_all do
- verification.mark_as_finished!
- project.reset
- settings.update!(custom_email_enabled: true)
- end
+ before_all do
+ verification.mark_as_finished!
+ project.reset
+ settings.update!(custom_email_enabled: true)
+ end
- it 'uses custom email and service bot name in "from" header' do
- expect_sender(Users::Internal.support_bot, sender_email: 'supersupport@example.com')
- end
+ it 'uses custom email and service bot name in "from" header' do
+ expect_sender(Users::Internal.support_bot, sender_email: 'supersupport@example.com')
+ end
- it 'uses SMTP delivery method and has correct settings' do
- expect_service_desk_custom_email_delivery_options(settings)
- end
+ it 'uses SMTP delivery method and has correct settings' do
+ expect_service_desk_custom_email_delivery_options(settings)
end
end
end
@@ -1623,42 +1610,30 @@ RSpec.describe Notify do
end
end
- context 'when service_desk_custom_email is active' do
- before do
- stub_feature_flags(service_desk_custom_email: true)
- end
-
- it_behaves_like 'a mail with default delivery method'
+ context 'when custom email is enabled' do
+ let_it_be(:credentials) { create(:service_desk_custom_email_credential, project: project) }
+ let_it_be(:verification) { create(:service_desk_custom_email_verification, project: project) }
- it 'uses author\'s name in "from" header' do
- expect_sender(first_note.author)
+ let_it_be(:settings) do
+ create(
+ :service_desk_setting,
+ project: project,
+ custom_email: 'supersupport@example.com'
+ )
end
- context 'when custom email is enabled' do
- let_it_be(:credentials) { create(:service_desk_custom_email_credential, project: project) }
- let_it_be(:verification) { create(:service_desk_custom_email_verification, project: project) }
-
- let_it_be(:settings) do
- create(
- :service_desk_setting,
- project: project,
- custom_email: 'supersupport@example.com'
- )
- end
-
- before_all do
- verification.mark_as_finished!
- project.reset
- settings.update!(custom_email_enabled: true)
- end
+ before_all do
+ verification.mark_as_finished!
+ project.reset
+ settings.update!(custom_email_enabled: true)
+ end
- it 'uses custom email and author\'s name in "from" header' do
- expect_sender(first_note.author, sender_email: project.service_desk_setting.custom_email)
- end
+ it 'uses custom email and author\'s name in "from" header' do
+ expect_sender(first_note.author, sender_email: project.service_desk_setting.custom_email)
+ end
- it 'uses SMTP delivery method and has correct settings' do
- expect_service_desk_custom_email_delivery_options(settings)
- end
+ it 'uses SMTP delivery method and has correct settings' do
+ expect_service_desk_custom_email_delivery_options(settings)
end
end
end
diff --git a/spec/migrations/20221002234454_finalize_group_member_namespace_id_migration_spec.rb b/spec/migrations/20221002234454_finalize_group_member_namespace_id_migration_spec.rb
deleted file mode 100644
index e3adea47273..00000000000
--- a/spec/migrations/20221002234454_finalize_group_member_namespace_id_migration_spec.rb
+++ /dev/null
@@ -1,76 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe FinalizeGroupMemberNamespaceIdMigration, :migration, feature_category: :groups_and_projects do
- let(:batched_migrations) { table(:batched_background_migrations) }
-
- let!(:migration) { described_class::MIGRATION }
-
- describe '#up' do
- shared_examples 'finalizes the migration' do
- it 'finalizes the migration' do
- allow_next_instance_of(Gitlab::Database::BackgroundMigration::BatchedMigrationRunner) do |runner|
- expect(runner).to receive(:finalize).with(migration, :members, :id, [])
- end
- end
- end
-
- context 'when migration is missing' do
- before do
- batched_migrations.where(job_class_name: migration).delete_all
- end
-
- it 'warns migration not found' do
- expect(Gitlab::AppLogger)
- .to receive(:warn).with(/Could not find batched background migration for the given configuration:/)
-
- migrate!
- end
- end
-
- context 'with migration present' do
- let!(:group_member_namespace_id_backfill) do
- batched_migrations.create!(
- job_class_name: migration,
- table_name: :members,
- column_name: :id,
- job_arguments: [],
- interval: 2.minutes,
- min_value: 1,
- max_value: 2,
- batch_size: 1000,
- sub_batch_size: 200,
- gitlab_schema: :gitlab_main,
- status: 3 # finished
- )
- end
-
- context 'when migration finished successfully' do
- it 'does not raise exception' do
- expect { migrate! }.not_to raise_error
- end
- end
-
- context 'with different migration statuses' do
- using RSpec::Parameterized::TableSyntax
-
- where(:status, :description) do
- 0 | 'paused'
- 1 | 'active'
- 4 | 'failed'
- 5 | 'finalizing'
- end
-
- with_them do
- before do
- group_member_namespace_id_backfill.update!(status: status)
- end
-
- it_behaves_like 'finalizes the migration'
- end
- end
- end
- end
-end
diff --git a/spec/migrations/20221018050323_add_objective_and_keyresult_to_work_item_types_spec.rb b/spec/migrations/20221018050323_add_objective_and_keyresult_to_work_item_types_spec.rb
deleted file mode 100644
index d591b370d65..00000000000
--- a/spec/migrations/20221018050323_add_objective_and_keyresult_to_work_item_types_spec.rb
+++ /dev/null
@@ -1,73 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe AddObjectiveAndKeyresultToWorkItemTypes, :migration, feature_category: :team_planning do
- include MigrationHelpers::WorkItemTypesHelper
-
- let!(:work_item_types) { table(:work_item_types) }
-
- let(:base_types) do
- {
- issue: 0,
- incident: 1,
- test_case: 2,
- requirement: 3,
- task: 4,
- objective: 5,
- key_result: 6
- }
- end
-
- append_after(:all) do
- # Make sure base types are recreated after running the migration
- # because migration specs are not run in a transaction
- reset_work_item_types
- end
-
- it 'skips creating both objective & keyresult type record if it already exists' do
- reset_db_state_prior_to_migration
- work_item_types.find_or_create_by!(
- name: 'Key Result', namespace_id: nil, base_type: base_types[:key_result], icon_name: 'issue-type-keyresult'
- )
- work_item_types.find_or_create_by!(
- name: 'Objective', namespace_id: nil, base_type: base_types[:objective], icon_name: 'issue-type-objective'
- )
-
- expect do
- migrate!
- end.to not_change(work_item_types, :count)
- end
-
- it 'adds both objective & keyresult to base work item types' do
- reset_db_state_prior_to_migration
-
- expect do
- migrate!
- end.to change(work_item_types, :count).from(5).to(7)
-
- expect(work_item_types.all.pluck(:base_type)).to include(base_types[:objective])
- expect(work_item_types.all.pluck(:base_type)).to include(base_types[:key_result])
- end
-
- def reset_db_state_prior_to_migration
- # Database needs to be in a similar state as when this migration was created
- work_item_types.delete_all
- work_item_types.find_or_create_by!(
- name: 'Issue', namespace_id: nil, base_type: base_types[:issue], icon_name: 'issue-type-issue'
- )
- work_item_types.find_or_create_by!(
- name: 'Incident', namespace_id: nil, base_type: base_types[:incident], icon_name: 'issue-type-incident'
- )
- work_item_types.find_or_create_by!(
- name: 'Test Case', namespace_id: nil, base_type: base_types[:test_case], icon_name: 'issue-type-test-case'
- )
- work_item_types.find_or_create_by!(
- name: 'Requirement', namespace_id: nil, base_type: base_types[:requirement], icon_name: 'issue-type-requirements'
- )
- work_item_types.find_or_create_by!(
- name: 'Task', namespace_id: nil, base_type: base_types[:task], icon_name: 'issue-type-task'
- )
- end
-end
diff --git a/spec/migrations/20221018062308_schedule_backfill_project_namespace_details_spec.rb b/spec/migrations/20221018062308_schedule_backfill_project_namespace_details_spec.rb
deleted file mode 100644
index 9cca2a5adfc..00000000000
--- a/spec/migrations/20221018062308_schedule_backfill_project_namespace_details_spec.rb
+++ /dev/null
@@ -1,37 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe ScheduleBackfillProjectNamespaceDetails, schema: 20221018062308, feature_category: :groups_and_projects do
- context 'when on gitlab.com' do
- let!(:background_migration) { described_class::MIGRATION }
- let!(:migration) { described_class.new }
-
- before do
- migration.up
- end
-
- describe '#up' do
- it 'schedules background jobs for each batch of projects' do
- expect(background_migration).to(
- have_scheduled_batched_migration(
- table_name: :projects,
- column_name: :id,
- interval: described_class::INTERVAL,
- batch_size: described_class::BATCH_SIZE,
- sub_batch_size: described_class::SUB_BATCH_SIZE
- )
- )
- end
- end
-
- describe '#down' do
- it 'deletes all batched migration records' do
- migration.down
-
- expect(described_class::MIGRATION).not_to have_scheduled_batched_migration
- end
- end
- end
-end
diff --git a/spec/migrations/20221018193635_ensure_task_note_renaming_background_migration_finished_spec.rb b/spec/migrations/20221018193635_ensure_task_note_renaming_background_migration_finished_spec.rb
deleted file mode 100644
index da1df92691e..00000000000
--- a/spec/migrations/20221018193635_ensure_task_note_renaming_background_migration_finished_spec.rb
+++ /dev/null
@@ -1,95 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe EnsureTaskNoteRenamingBackgroundMigrationFinished, :migration, feature_category: :team_planning do
- let(:batched_migrations) { table(:batched_background_migrations) }
- let(:batch_failed_status) { 2 }
- let(:batch_finalized_status) { 3 }
-
- let!(:migration) { described_class::MIGRATION }
-
- describe '#up' do
- shared_examples 'finalizes the migration' do
- it 'finalizes the migration' do
- expect do
- migrate!
-
- task_renaming_migration.reload
- failed_job.reload
- end.to change(task_renaming_migration, :status).from(task_renaming_migration.status).to(3).and(
- change(failed_job, :status).from(batch_failed_status).to(batch_finalized_status)
- )
- end
- end
-
- context 'when migration is missing' do
- before do
- batched_migrations.where(job_class_name: migration).delete_all
- end
-
- it 'warns migration not found' do
- expect(Gitlab::AppLogger)
- .to receive(:warn).with(/Could not find batched background migration for the given configuration:/)
-
- migrate!
- end
- end
-
- context 'with migration present' do
- let!(:task_renaming_migration) do
- batched_migrations.create!(
- job_class_name: migration,
- table_name: :system_note_metadata,
- column_name: :id,
- job_arguments: [],
- interval: 2.minutes,
- min_value: 1,
- max_value: 2,
- batch_size: 1000,
- sub_batch_size: 200,
- gitlab_schema: :gitlab_main,
- status: 3 # finished
- )
- end
-
- context 'when migration finished successfully' do
- it 'does not raise exception' do
- expect { migrate! }.not_to raise_error
- end
- end
-
- context 'with different migration statuses', :redis do
- using RSpec::Parameterized::TableSyntax
-
- where(:status, :description) do
- 0 | 'paused'
- 1 | 'active'
- 4 | 'failed'
- 5 | 'finalizing'
- end
-
- with_them do
- let!(:failed_job) do
- table(:batched_background_migration_jobs).create!(
- batched_background_migration_id: task_renaming_migration.id,
- status: batch_failed_status,
- min_value: 1,
- max_value: 10,
- attempts: 2,
- batch_size: 100,
- sub_batch_size: 10
- )
- end
-
- before do
- task_renaming_migration.update!(status: status)
- end
-
- it_behaves_like 'finalizes the migration'
- end
- end
- end
- end
-end
diff --git a/spec/migrations/20221021145820_create_routing_table_for_builds_metadata_v2_spec.rb b/spec/migrations/20221021145820_create_routing_table_for_builds_metadata_v2_spec.rb
deleted file mode 100644
index 235351956c4..00000000000
--- a/spec/migrations/20221021145820_create_routing_table_for_builds_metadata_v2_spec.rb
+++ /dev/null
@@ -1,36 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe CreateRoutingTableForBuildsMetadataV2, :migration, feature_category: :continuous_integration do
- let!(:migration) { described_class.new }
-
- describe '#up' do
- context 'when the table is already partitioned' do
- before do
- # `convert_table_to_first_list_partition` checks if it's being executed
- # inside a transaction, but we're using transactional fixtures here so we
- # need to tell it that it's not inside a transaction.
- # We toggle the behavior depending on how many transactions we have open
- # instead of just returning `false` because the migration could have the
- # DDL transaction enabled.
- #
- open_transactions = ActiveRecord::Base.connection.open_transactions
- allow(migration).to receive(:transaction_open?) do
- ActiveRecord::Base.connection.open_transactions > open_transactions
- end
-
- migration.convert_table_to_first_list_partition(
- table_name: :ci_builds_metadata,
- partitioning_column: :partition_id,
- parent_table_name: :p_ci_builds_metadata,
- initial_partitioning_value: 100)
- end
-
- it 'skips the migration' do
- expect { migrate! }.not_to raise_error
- end
- end
- end
-end
diff --git a/spec/migrations/20221025043930_change_default_value_on_password_last_changed_at_to_user_details_spec.rb b/spec/migrations/20221025043930_change_default_value_on_password_last_changed_at_to_user_details_spec.rb
deleted file mode 100644
index 0e5bb419e32..00000000000
--- a/spec/migrations/20221025043930_change_default_value_on_password_last_changed_at_to_user_details_spec.rb
+++ /dev/null
@@ -1,37 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe ChangeDefaultValueOnPasswordLastChangedAtToUserDetails, :migration, feature_category: :user_profile do
- let(:namespace) { table(:namespaces).create!(name: 'user', path: 'user') }
- let(:users) { table(:users) }
- let(:user_details) { table(:user_details) }
-
- it 'correctly migrates up and down' do
- user = create_user!(email: '1234@abc')
- user_details.create!(user_id: user.id, provisioned_by_group_id: namespace.id)
-
- expect(UserDetail.find_by(user_id: user.id).password_last_changed_at).to be_nil
-
- migrate!
-
- user = create_user!(email: 'abc@1234')
- user_details.create!(user_id: user.id, provisioned_by_group_id: namespace.id)
-
- expect(UserDetail.find_by(user_id: user.id).password_last_changed_at).not_to be_nil
- end
-
- private
-
- def create_user!(name: "Example User", email: "user@example.com", user_type: nil)
- users.create!(
- name: name,
- email: email,
- username: name,
- projects_limit: 0,
- user_type: user_type,
- confirmed_at: Time.current
- )
- end
-end
diff --git a/spec/migrations/20221028022627_add_index_on_password_last_changed_at_to_user_details_spec.rb b/spec/migrations/20221028022627_add_index_on_password_last_changed_at_to_user_details_spec.rb
deleted file mode 100644
index 332b3a5abba..00000000000
--- a/spec/migrations/20221028022627_add_index_on_password_last_changed_at_to_user_details_spec.rb
+++ /dev/null
@@ -1,16 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe AddIndexOnPasswordLastChangedAtToUserDetails, :migration, feature_category: :user_profile do
- let(:index_name) { 'index_user_details_on_password_last_changed_at' }
-
- it 'correctly migrates up and down' do
- expect(subject).not_to be_index_exists_by_name(:user_details, index_name)
-
- migrate!
-
- expect(subject).to be_index_exists_by_name(:user_details, index_name)
- end
-end
diff --git a/spec/migrations/20221101032521_add_default_preferred_language_to_application_settings_spec.rb b/spec/migrations/20221101032521_add_default_preferred_language_to_application_settings_spec.rb
deleted file mode 100644
index deca498146b..00000000000
--- a/spec/migrations/20221101032521_add_default_preferred_language_to_application_settings_spec.rb
+++ /dev/null
@@ -1,27 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-require_migration!
-
-RSpec.describe AddDefaultPreferredLanguageToApplicationSettings, feature_category: :internationalization do
- let(:application_setting) { table(:application_settings).create! }
-
- describe "#up" do
- it 'allows to read default_preferred_language field' do
- migrate!
-
- expect(application_setting.attributes.keys).to include('default_preferred_language')
- expect(application_setting.default_preferred_language).to eq 'en'
- end
- end
-
- describe "#down" do
- it 'deletes default_preferred_language field' do
- migrate!
- schema_migrate_down!
-
- expect(application_setting.attributes.keys).not_to include('default_preferred_language')
- end
- end
-end
diff --git a/spec/migrations/20221101032600_add_text_limit_to_default_preferred_language_on_application_settings_spec.rb b/spec/migrations/20221101032600_add_text_limit_to_default_preferred_language_on_application_settings_spec.rb
deleted file mode 100644
index 3e36e99a0ca..00000000000
--- a/spec/migrations/20221101032600_add_text_limit_to_default_preferred_language_on_application_settings_spec.rb
+++ /dev/null
@@ -1,29 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-require_migration!
-
-RSpec.describe AddTextLimitToDefaultPreferredLanguageOnApplicationSettings, feature_category: :internationalization do
- let(:application_setting) { table(:application_settings).create! }
- let(:too_long_text) { SecureRandom.alphanumeric(described_class::MAXIMUM_LIMIT + 1) }
-
- subject { application_setting.update_column(:default_preferred_language, too_long_text) }
-
- describe "#up" do
- it 'adds text limit to default_preferred_language' do
- migrate!
-
- expect { subject }.to raise_error ActiveRecord::StatementInvalid
- end
- end
-
- describe "#down" do
- it 'deletes text limit to default_preferred_language' do
- migrate!
- schema_migrate_down!
-
- expect { subject }.not_to raise_error
- end
- end
-end
diff --git a/spec/migrations/20221102090940_create_next_ci_partitions_record_spec.rb b/spec/migrations/20221102090940_create_next_ci_partitions_record_spec.rb
deleted file mode 100644
index dc6f365fe2b..00000000000
--- a/spec/migrations/20221102090940_create_next_ci_partitions_record_spec.rb
+++ /dev/null
@@ -1,63 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe CreateNextCiPartitionsRecord, migration: :gitlab_ci, feature_category: :continuous_integration do
- let(:migration) { described_class.new }
- let(:partitions) { table(:ci_partitions) }
-
- describe '#up' do
- context 'when on sass' do
- before do
- allow(Gitlab).to receive(:com?).and_return(true)
- end
-
- it 'creates next partitions record and resets the sequence' do
- expect { migrate! }
- .to change { partitions.where(id: 101).any? }
- .from(false).to(true)
-
- expect { partitions.create! }.not_to raise_error
- end
- end
-
- context 'when self-managed' do
- before do
- allow(Gitlab).to receive(:com?).and_return(false)
- end
-
- it 'does not create records' do
- expect { migrate! }.not_to change(partitions, :count)
- end
- end
- end
-
- describe '#down' do
- context 'when on sass' do
- before do
- allow(Gitlab).to receive(:com?).and_return(true)
- end
-
- it 'removes the record' do
- migrate!
-
- expect { migration.down }
- .to change { partitions.where(id: 101).any? }
- .from(true).to(false)
- end
- end
-
- context 'when self-managed' do
- before do
- allow(Gitlab).to receive(:com?).and_return(true, false)
- end
-
- it 'does not remove the record' do
- expect { migrate! }.to change(partitions, :count).by(1)
-
- expect { migration.down }.not_to change(partitions, :count)
- end
- end
- end
-end
diff --git a/spec/migrations/20221102090943_create_second_partition_for_builds_metadata_spec.rb b/spec/migrations/20221102090943_create_second_partition_for_builds_metadata_spec.rb
deleted file mode 100644
index b4bd5136383..00000000000
--- a/spec/migrations/20221102090943_create_second_partition_for_builds_metadata_spec.rb
+++ /dev/null
@@ -1,61 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe CreateSecondPartitionForBuildsMetadata, :migration, feature_category: :continuous_integration do
- let(:migration) { described_class.new }
- let(:partitions) { table(:ci_partitions) }
-
- describe '#up' do
- context 'when on sass' do
- before do
- allow(Gitlab).to receive(:com?).and_return(true)
- end
-
- it 'creates a new partition' do
- expect { migrate! }.to change { partitions_count }.by(1)
- end
- end
-
- context 'when self-managed' do
- before do
- allow(Gitlab).to receive(:com?).and_return(false)
- end
-
- it 'does not create the partition' do
- expect { migrate! }.not_to change { partitions_count }
- end
- end
- end
-
- describe '#down' do
- context 'when on sass' do
- before do
- allow(Gitlab).to receive(:com?).and_return(true)
- end
-
- it 'removes the partition' do
- migrate!
-
- expect { migration.down }.to change { partitions_count }.by(-1)
- end
- end
-
- context 'when self-managed' do
- before do
- allow(Gitlab).to receive(:com?).and_return(false)
- end
-
- it 'does not change the partitions count' do
- migrate!
-
- expect { migration.down }.not_to change { partitions_count }
- end
- end
- end
-
- def partitions_count
- Gitlab::Database::PostgresPartition.for_parent_table(:p_ci_builds_metadata).size
- end
-end
diff --git a/spec/migrations/20230802085923_queue_fix_allow_descendants_override_disabled_shared_runners_spec.rb b/spec/migrations/20230802085923_queue_fix_allow_descendants_override_disabled_shared_runners_spec.rb
index c296ba24d9d..f12985bf6c9 100644
--- a/spec/migrations/20230802085923_queue_fix_allow_descendants_override_disabled_shared_runners_spec.rb
+++ b/spec/migrations/20230802085923_queue_fix_allow_descendants_override_disabled_shared_runners_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require_migration!
-RSpec.describe QueueFixAllowDescendantsOverrideDisabledSharedRunners, feature_category: :runner_fleet do
+RSpec.describe QueueFixAllowDescendantsOverrideDisabledSharedRunners, feature_category: :fleet_visibility do
let!(:batched_migration) { described_class::MIGRATION }
it 'schedules a new batched migration' do
diff --git a/spec/migrations/20231107092912_queue_backfill_branch_protection_namespace_setting_spec.rb b/spec/migrations/20231107092912_queue_backfill_branch_protection_namespace_setting_spec.rb
new file mode 100644
index 00000000000..ddf4fb1e1c4
--- /dev/null
+++ b/spec/migrations/20231107092912_queue_backfill_branch_protection_namespace_setting_spec.rb
@@ -0,0 +1,26 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe QueueBackfillBranchProtectionNamespaceSetting, feature_category: :database do
+ let!(:batched_migration) { described_class::MIGRATION }
+
+ it 'schedules a new batched migration' do
+ reversible_migration do |migration|
+ migration.before -> {
+ expect(batched_migration).not_to have_scheduled_batched_migration
+ }
+
+ migration.after -> {
+ expect(batched_migration).to have_scheduled_batched_migration(
+ table_name: :namespace_settings,
+ column_name: :namespace_id,
+ interval: described_class::DELAY_INTERVAL,
+ batch_size: described_class::BATCH_SIZE,
+ sub_batch_size: described_class::SUB_BATCH_SIZE
+ )
+ }
+ end
+ end
+end
diff --git a/spec/migrations/20230912105945_queue_backfill_finding_id_in_vulnerabilities_spec.rb b/spec/migrations/20231115025547_queue_backfill_merge_request_diffs_project_id_spec.rb
index 02c39408d40..6c80659e969 100644
--- a/spec/migrations/20230912105945_queue_backfill_finding_id_in_vulnerabilities_spec.rb
+++ b/spec/migrations/20231115025547_queue_backfill_merge_request_diffs_project_id_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require_migration!
-RSpec.describe QueueBackfillFindingIdInVulnerabilities, feature_category: :vulnerability_management do
+RSpec.describe QueueBackfillMergeRequestDiffsProjectId, feature_category: :code_review_workflow do
let!(:batched_migration) { described_class::MIGRATION }
it 'schedules a new batched migration' do
@@ -14,7 +14,7 @@ RSpec.describe QueueBackfillFindingIdInVulnerabilities, feature_category: :vulne
migration.after -> {
expect(batched_migration).to have_scheduled_batched_migration(
- table_name: :vulnerabilities,
+ table_name: :merge_request_diffs,
column_name: :id,
interval: described_class::DELAY_INTERVAL,
batch_size: described_class::BATCH_SIZE,
diff --git a/spec/migrations/20231001105945_requeue_backfill_finding_id_in_vulnerabilities_spec.rb b/spec/migrations/20231129105945_requeue_backfill_finding_id_in_vulnerabilities3_spec.rb
index f89fc55b6b8..ca007e7487c 100644
--- a/spec/migrations/20231001105945_requeue_backfill_finding_id_in_vulnerabilities_spec.rb
+++ b/spec/migrations/20231129105945_requeue_backfill_finding_id_in_vulnerabilities3_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require_migration!
-RSpec.describe RequeueBackfillFindingIdInVulnerabilities, feature_category: :vulnerability_management do
+RSpec.describe RequeueBackfillFindingIdInVulnerabilities3, feature_category: :vulnerability_management do
let!(:batched_migration) { described_class::MIGRATION }
it 'schedules a new batched migration' do
diff --git a/spec/migrations/queue_backfill_user_details_fields_spec.rb b/spec/migrations/20231130140901_queue_backfill_vs_code_settings_uuid_spec.rb
index 4613a85be40..3e697d6b1f3 100644
--- a/spec/migrations/queue_backfill_user_details_fields_spec.rb
+++ b/spec/migrations/20231130140901_queue_backfill_vs_code_settings_uuid_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require_migration!
-RSpec.describe QueueBackfillUserDetailsFields, feature_category: :user_profile do
+RSpec.describe QueueBackfillVsCodeSettingsUuid, feature_category: :web_ide do
let!(:batched_migration) { described_class::MIGRATION }
it 'schedules a new batched migration' do
@@ -14,9 +14,11 @@ RSpec.describe QueueBackfillUserDetailsFields, feature_category: :user_profile d
migration.after -> {
expect(batched_migration).to have_scheduled_batched_migration(
- table_name: :users,
+ table_name: :vs_code_settings,
column_name: :id,
- interval: described_class::INTERVAL
+ interval: described_class::DELAY_INTERVAL,
+ batch_size: described_class::BATCH_SIZE,
+ sub_batch_size: described_class::SUB_BATCH_SIZE
)
}
end
diff --git a/spec/migrations/20231201171229_detect_and_fix_duplicate_organizations_path_spec.rb b/spec/migrations/20231201171229_detect_and_fix_duplicate_organizations_path_spec.rb
new file mode 100644
index 00000000000..259327bec86
--- /dev/null
+++ b/spec/migrations/20231201171229_detect_and_fix_duplicate_organizations_path_spec.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe DetectAndFixDuplicateOrganizationsPath, feature_category: :cell do
+ let!(:default_organization) { table(:organizations).create!(name: 'Default', path: 'Default') }
+
+ let(:duplicate_path_name) { 'some_path' }
+ let!(:organization) { table(:organizations).create!(name: '_name_', path: duplicate_path_name) }
+ let!(:organization_duplicate) { table(:organizations).create!(name: '_name_', path: duplicate_path_name.upcase) }
+ let!(:organization_multiple_duplicate) do
+ table(:organizations).create!(name: '_name_', path: duplicate_path_name.upcase_first)
+ end
+
+ describe '#up' do
+ it 'removes the duplication', :aggregate_failures do
+ expect(organization.path).to eq(duplicate_path_name)
+ expect(organization_duplicate.path).to eq(duplicate_path_name.upcase)
+ expect(organization_multiple_duplicate.path).to eq(duplicate_path_name.upcase_first)
+ expect(default_organization.path).to eq('Default')
+
+ migrate!
+
+ expect(organization.reload.path).to eq(duplicate_path_name)
+ expect(organization_duplicate.reload.path).to eq("#{duplicate_path_name.upcase}1")
+ expect(organization_multiple_duplicate.reload.path).to eq("#{duplicate_path_name.upcase_first}2")
+ expect(default_organization.reload.path).to eq('Default')
+ end
+ end
+end
diff --git a/spec/migrations/20231011142714_queue_backfill_has_remediations_of_vulnerability_reads_spec.rb b/spec/migrations/20231201204712_requeue2_backfill_has_remediations_of_vulnerability_reads_spec.rb
index 27ecc255a2a..4299fdc731f 100644
--- a/spec/migrations/20231011142714_queue_backfill_has_remediations_of_vulnerability_reads_spec.rb
+++ b/spec/migrations/20231201204712_requeue2_backfill_has_remediations_of_vulnerability_reads_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require_migration!
-RSpec.describe QueueBackfillHasRemediationsOfVulnerabilityReads, feature_category: :database do
+RSpec.describe Requeue2BackfillHasRemediationsOfVulnerabilityReads, feature_category: :database do
let!(:batched_migration) { described_class::MIGRATION }
it 'schedules a new batched migration' do
diff --git a/spec/migrations/20231207194620_backfill_catalog_resources_visibility_level_spec.rb b/spec/migrations/20231207194620_backfill_catalog_resources_visibility_level_spec.rb
new file mode 100644
index 00000000000..9023aa705a1
--- /dev/null
+++ b/spec/migrations/20231207194620_backfill_catalog_resources_visibility_level_spec.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe BackfillCatalogResourcesVisibilityLevel, feature_category: :pipeline_composition do
+ let(:namespace) { table(:namespaces).create!(name: 'name', path: 'path') }
+
+ let(:project) do
+ table(:projects).create!(
+ visibility_level: Gitlab::VisibilityLevel::INTERNAL,
+ namespace_id: namespace.id, project_namespace_id: namespace.id
+ )
+ end
+
+ let(:resource) { table(:catalog_resources).create!(project_id: project.id) }
+
+ describe '#up' do
+ it 'updates the visibility_level to match the project' do
+ expect(resource.visibility_level).to eq(0)
+
+ migrate!
+
+ expect(resource.reload.visibility_level).to eq(Gitlab::VisibilityLevel::INTERNAL)
+ end
+ end
+end
diff --git a/spec/migrations/cleanup_vulnerability_state_transitions_with_same_from_state_to_state_spec.rb b/spec/migrations/cleanup_vulnerability_state_transitions_with_same_from_state_to_state_spec.rb
deleted file mode 100644
index b270f2b100f..00000000000
--- a/spec/migrations/cleanup_vulnerability_state_transitions_with_same_from_state_to_state_spec.rb
+++ /dev/null
@@ -1,50 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe CleanupVulnerabilityStateTransitionsWithSameFromStateToState, :migration,
- feature_category: :vulnerability_management do
- let!(:namespace) { table(:namespaces).create!(name: 'namespace', type: 'Group', path: 'namespace') }
- let!(:user) { table(:users).create!(email: 'author@example.com', username: 'author', projects_limit: 10) }
- let!(:project) do
- table(:projects).create!(
- path: 'project',
- namespace_id: namespace.id,
- project_namespace_id: namespace.id
- )
- end
-
- let!(:vulnerability) do
- table(:vulnerabilities).create!(
- project_id: project.id,
- author_id: user.id,
- title: 'test',
- severity: 7,
- confidence: 7,
- report_type: 0
- )
- end
-
- let!(:state_transitions) { table(:vulnerability_state_transitions) }
-
- let!(:state_transition_with_no_state_change) do
- state_transitions.create!(
- vulnerability_id: vulnerability.id,
- from_state: 2,
- to_state: 2
- )
- end
-
- let!(:state_transition_with_state_change) do
- state_transitions.create!(
- vulnerability_id: vulnerability.id,
- from_state: 1,
- to_state: 2
- )
- end
-
- it 'deletes state transitions with no state change' do
- expect { migrate! }.to change(state_transitions, :count).from(2).to(1)
- end
-end
diff --git a/spec/migrations/delete_migrate_shared_vulnerability_scanners_spec.rb b/spec/migrations/delete_migrate_shared_vulnerability_scanners_spec.rb
deleted file mode 100644
index 8a0c0250cdf..00000000000
--- a/spec/migrations/delete_migrate_shared_vulnerability_scanners_spec.rb
+++ /dev/null
@@ -1,73 +0,0 @@
-# frozen_string_literal: true
-
-require "spec_helper"
-
-require_migration!
-
-RSpec.describe DeleteMigrateSharedVulnerabilityScanners, :migration, feature_category: :vulnerability_management do
- let(:batched_background_migrations) { table(:batched_background_migrations) }
- let(:batched_background_migration_jobs) { table(:batched_background_migration_jobs) }
-
- let(:migration) do
- batched_background_migrations.create!(
- created_at: Time.zone.now,
- updated_at: Time.zone.now,
- min_value: 1,
- max_value: 1,
- batch_size: described_class::BATCH_SIZE,
- sub_batch_size: 100,
- interval: 300,
- status: 3,
- job_class_name: described_class::MIGRATION,
- batch_class_name: "PrimaryKeyBatchingStrategy",
- table_name: described_class::TABLE_NAME,
- column_name: described_class::BATCH_COLUMN,
- job_arguments: [],
- pause_ms: 100,
- max_batch_size: 1000,
- gitlab_schema: "gitlab_main"
- )
- end
-
- let(:jobs) do
- Array.new(10) do
- batched_background_migration_jobs.create!(
- batched_background_migration_id: migration.id,
- created_at: Time.zone.now,
- updated_at: Time.zone.now,
- min_value: 1,
- max_value: 1,
- batch_size: 1,
- sub_batch_size: 1,
- status: 0,
- attempts: 0,
- metrics: {},
- pause_ms: 100
- )
- end
- end
-
- describe "#up" do
- it "deletes jobs" do
- expect { migrate! }.to change(batched_background_migration_jobs, :count).from(jobs.count).to(0)
- end
-
- it "deletes the migration" do
- expect { migrate! }.to change { batched_background_migrations.find_by(id: migration.id) }.from(migration).to(nil)
- end
-
- context "when background migration does not exist" do
- before do
- migration.destroy!
- end
-
- it "does not delete jobs" do
- expect { migrate! }.not_to change(batched_background_migration_jobs, :count)
- end
-
- it "does not delete the migration" do
- expect { migrate! }.not_to change { batched_background_migrations.find_by(id: migration.id) }
- end
- end
- end
-end
diff --git a/spec/migrations/finalize_invalid_member_cleanup_spec.rb b/spec/migrations/finalize_invalid_member_cleanup_spec.rb
deleted file mode 100644
index ace973ea1af..00000000000
--- a/spec/migrations/finalize_invalid_member_cleanup_spec.rb
+++ /dev/null
@@ -1,76 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe FinalizeInvalidMemberCleanup, :migration, feature_category: :groups_and_projects do
- let(:batched_migrations) { table(:batched_background_migrations) }
-
- let!(:migration) { described_class::MIGRATION }
-
- describe '#up' do
- shared_examples 'finalizes the migration' do
- it 'finalizes the migration' do
- allow_next_instance_of(Gitlab::Database::BackgroundMigration::BatchedMigrationRunner) do |runner|
- expect(runner).to receive(:finalize).with('DestroyInvalidMembers', :members, :id, [])
- end
- end
- end
-
- context 'when migration is missing' do
- before do
- batched_migrations.where(job_class_name: migration).delete_all
- end
-
- it 'warns migration not found' do
- expect(Gitlab::AppLogger)
- .to receive(:warn).with(/Could not find batched background migration for the given configuration:/)
-
- migrate!
- end
- end
-
- context 'with migration present' do
- let!(:destroy_invalid_member_migration) do
- batched_migrations.create!(
- job_class_name: 'DestroyInvalidMembers',
- table_name: :members,
- column_name: :id,
- job_arguments: [],
- interval: 2.minutes,
- min_value: 1,
- max_value: 2,
- batch_size: 1000,
- sub_batch_size: 200,
- gitlab_schema: :gitlab_main,
- status: 3 # finished
- )
- end
-
- context 'when migration finished successfully' do
- it 'does not raise exception' do
- expect { migrate! }.not_to raise_error
- end
- end
-
- context 'with different migration statuses' do
- using RSpec::Parameterized::TableSyntax
-
- where(:status, :description) do
- 0 | 'paused'
- 1 | 'active'
- 4 | 'failed'
- 5 | 'finalizing'
- end
-
- with_them do
- before do
- destroy_invalid_member_migration.update!(status: status)
- end
-
- it_behaves_like 'finalizes the migration'
- end
- end
- end
- end
-end
diff --git a/spec/migrations/fix_broken_user_achievements_awarded_spec.rb b/spec/migrations/fix_broken_user_achievements_awarded_spec.rb
new file mode 100644
index 00000000000..cb31ca44a9f
--- /dev/null
+++ b/spec/migrations/fix_broken_user_achievements_awarded_spec.rb
@@ -0,0 +1,46 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe FixBrokenUserAchievementsAwarded, migration: :gitlab_main, feature_category: :user_profile do
+ let(:migration) { described_class.new }
+
+ let(:users_table) { table(:users) }
+ let(:namespaces_table) { table(:namespaces) }
+ let(:achievements_table) { table(:achievements) }
+ let(:user_achievements_table) { table(:user_achievements) }
+ let(:namespace) { namespaces_table.create!(name: 'something', path: generate(:username)) }
+ let(:achievement) { achievements_table.create!(name: 'something', namespace_id: namespace.id) }
+ let(:user) { users_table.create!(username: generate(:username), projects_limit: 0) }
+ let(:awarding_user) do
+ users_table.create!(username: generate(:username), email: generate(:email), projects_limit: 0)
+ end
+
+ let!(:user_achievement_invalid) do
+ user_achievements_table.create!(user_id: user.id, achievement_id: achievement.id,
+ awarded_by_user_id: awarding_user.id)
+ end
+
+ let!(:user_achievement_valid) do
+ user_achievements_table.create!(user_id: user.id, achievement_id: achievement.id,
+ awarded_by_user_id: user.id)
+ end
+
+ describe '#up' do
+ before do
+ awarding_user.delete
+ end
+
+ it 'migrates the invalid user achievement' do
+ expect { migrate! }
+ .to change { user_achievement_invalid.reload.awarded_by_user_id }
+ .from(nil).to(Users::Internal.ghost.id)
+ end
+
+ it 'does not migrate the valid user achievement' do
+ expect { migrate! }
+ .not_to change { user_achievement_valid.reload.awarded_by_user_id }
+ end
+ end
+end
diff --git a/spec/migrations/fix_broken_user_achievements_revoked_spec.rb b/spec/migrations/fix_broken_user_achievements_revoked_spec.rb
new file mode 100644
index 00000000000..34517ae67b4
--- /dev/null
+++ b/spec/migrations/fix_broken_user_achievements_revoked_spec.rb
@@ -0,0 +1,44 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe FixBrokenUserAchievementsRevoked, migration: :gitlab_main, feature_category: :user_profile do
+ let(:migration) { described_class.new }
+
+ let(:users_table) { table(:users) }
+ let(:namespaces_table) { table(:namespaces) }
+ let(:achievements_table) { table(:achievements) }
+ let(:user_achievements_table) { table(:user_achievements) }
+ let(:namespace) { namespaces_table.create!(name: 'something', path: generate(:username)) }
+ let(:achievement) { achievements_table.create!(name: 'something', namespace_id: namespace.id) }
+ let(:user) { users_table.create!(username: generate(:username), projects_limit: 0) }
+ let(:revoked_invalid) do
+ user_achievements_table.create!(user_id: user.id, achievement_id: achievement.id, revoked_at: Time.current)
+ end
+
+ let(:revoked_valid) do
+ user_achievements_table.create!(user_id: user.id, achievement_id: achievement.id, revoked_at: Time.current,
+ revoked_by_user_id: user.id)
+ end
+
+ let(:not_revoked) { user_achievements_table.create!(user_id: user.id, achievement_id: achievement.id) }
+
+ describe '#up' do
+ it 'migrates the invalid user achievement' do
+ expect { migrate! }
+ .to change { revoked_invalid.reload.revoked_by_user_id }
+ .from(nil).to(Users::Internal.ghost.id)
+ end
+
+ it 'does not migrate valid revoked user achievement' do
+ expect { migrate! }
+ .not_to change { revoked_valid.reload.revoked_by_user_id }
+ end
+
+ it 'does not migrate the not revoked user achievement' do
+ expect { migrate! }
+ .not_to change { not_revoked.reload.revoked_by_user_id }
+ end
+ end
+end
diff --git a/spec/migrations/queue_populate_projects_star_count_spec.rb b/spec/migrations/queue_populate_projects_star_count_spec.rb
deleted file mode 100644
index b30bb6a578b..00000000000
--- a/spec/migrations/queue_populate_projects_star_count_spec.rb
+++ /dev/null
@@ -1,24 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe QueuePopulateProjectsStarCount, feature_category: :user_profile do
- let!(:batched_migration) { described_class::MIGRATION }
-
- it 'schedules a new batched migration' do
- reversible_migration do |migration|
- migration.before -> {
- expect(batched_migration).not_to have_scheduled_batched_migration
- }
-
- migration.after -> {
- expect(batched_migration).to have_scheduled_batched_migration(
- table_name: :projects,
- column_name: :id,
- interval: described_class::DELAY_INTERVAL
- )
- }
- end
- end
-end
diff --git a/spec/migrations/recount_epic_cache_counts_spec.rb b/spec/migrations/recount_epic_cache_counts_spec.rb
deleted file mode 100644
index d065389a726..00000000000
--- a/spec/migrations/recount_epic_cache_counts_spec.rb
+++ /dev/null
@@ -1,32 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe RecountEpicCacheCounts, :migration, feature_category: :portfolio_management do
- let(:migration) { described_class::MIGRATION }
-
- describe '#up' do
- it 'schedules a batched background migration' do
- migrate!
-
- expect(migration).to have_scheduled_batched_migration(
- table_name: :epics,
- column_name: :id,
- interval: described_class::DELAY_INTERVAL,
- batch_size: described_class::BATCH_SIZE,
- max_batch_size: described_class::MAX_BATCH_SIZE,
- sub_batch_size: described_class::SUB_BATCH_SIZE
- )
- end
- end
-
- describe '#down' do
- it 'deletes all batched migration records' do
- migrate!
- schema_migrate_down!
-
- expect(migration).not_to have_scheduled_batched_migration
- end
- end
-end
diff --git a/spec/migrations/reschedule_migrate_shared_vulnerability_scanners_spec.rb b/spec/migrations/reschedule_migrate_shared_vulnerability_scanners_spec.rb
deleted file mode 100644
index 48422de81fe..00000000000
--- a/spec/migrations/reschedule_migrate_shared_vulnerability_scanners_spec.rb
+++ /dev/null
@@ -1,41 +0,0 @@
-# frozen_string_literal: true
-
-require "spec_helper"
-
-require_migration!
-
-RSpec.describe RescheduleMigrateSharedVulnerabilityScanners, :migration, feature_category: :vulnerability_management do
- include Gitlab::Database::Migrations::BatchedBackgroundMigrationHelpers
-
- def connection
- ApplicationRecord.connection
- end
-
- describe "#up" do
- before do
- migrate!
- end
-
- it "schedules" do
- expect(described_class::MIGRATION).to have_scheduled_batched_migration(
- table_name: described_class::TABLE_NAME,
- column_name: described_class::BATCH_COLUMN,
- interval: described_class::DELAY_INTERVAL,
- batch_size: described_class::BATCH_SIZE,
- max_batch_size: described_class::BATCH_SIZE,
- sub_batch_size: described_class::SUB_BATCH_SIZE,
- gitlab_schema: :gitlab_main
- )
- end
- end
-
- describe '#down' do
- before do
- schema_migrate_down!
- end
-
- it "deletes" do
- expect(described_class::MIGRATION).not_to have_scheduled_batched_migration
- end
- end
-end
diff --git a/spec/migrations/set_email_confirmation_setting_from_send_user_confirmation_email_setting_spec.rb b/spec/migrations/set_email_confirmation_setting_from_send_user_confirmation_email_setting_spec.rb
deleted file mode 100644
index ef1ced530c9..00000000000
--- a/spec/migrations/set_email_confirmation_setting_from_send_user_confirmation_email_setting_spec.rb
+++ /dev/null
@@ -1,41 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe SetEmailConfirmationSettingFromSendUserConfirmationEmailSetting, feature_category: :user_profile do
- let(:migration) { described_class.new }
- let(:application_settings_table) { table(:application_settings) }
-
- describe '#up' do
- context "when 'send_user_confirmation_email' is set to 'true'" do
- it "updates 'email_confirmation_setting' to '2' (hard)" do
- application_settings_table.create!(send_user_confirmation_email: true, email_confirmation_setting: 0)
-
- migration.up
-
- expect(application_settings_table.last.email_confirmation_setting).to eq 2
- end
- end
-
- context "when 'send_user_confirmation_email' is set to 'false'" do
- it "updates 'email_confirmation_setting' to '0' (off)" do
- application_settings_table.create!(send_user_confirmation_email: false, email_confirmation_setting: 0)
-
- migration.up
-
- expect(application_settings_table.last.email_confirmation_setting).to eq 0
- end
- end
- end
-
- describe '#down' do
- it "updates 'email_confirmation_setting' to default value: '0' (off)" do
- application_settings_table.create!(send_user_confirmation_email: true, email_confirmation_setting: 2)
-
- migration.down
-
- expect(application_settings_table.last.email_confirmation_setting).to eq 0
- end
- end
-end
diff --git a/spec/migrations/sync_new_amount_used_for_ci_namespace_monthly_usages_spec.rb b/spec/migrations/sync_new_amount_used_for_ci_namespace_monthly_usages_spec.rb
deleted file mode 100644
index c60447d04a1..00000000000
--- a/spec/migrations/sync_new_amount_used_for_ci_namespace_monthly_usages_spec.rb
+++ /dev/null
@@ -1,43 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-require_migration!
-
-RSpec.describe SyncNewAmountUsedForCiNamespaceMonthlyUsages, migration: :gitlab_ci,
- feature_category: :continuous_integration do
- let(:namespace_usages) { table(:ci_namespace_monthly_usages) }
-
- before do
- # Disabling the trigger temporarily to allow records being created with out-of-sync
- # `new_amount_used` and `amount_used`. This will simulate existing records before
- # we add the trigger.
- ActiveRecord::Base.connection
- .execute("ALTER TABLE ci_namespace_monthly_usages DISABLE TRIGGER sync_namespaces_amount_used_columns")
-
- this_month = Time.now.utc.beginning_of_month
- last_month = 1.month.ago.utc.beginning_of_month
- last_year = 1.year.ago.utc.beginning_of_month
-
- namespace_usages.create!(namespace_id: 1, date: last_year)
- namespace_usages.create!(namespace_id: 1, date: this_month, amount_used: 10, new_amount_used: 0)
- namespace_usages.create!(namespace_id: 1, date: last_month, amount_used: 20, new_amount_used: 0)
-
- namespace_usages.create!(namespace_id: 2, date: last_year)
- namespace_usages.create!(namespace_id: 2, date: this_month, amount_used: 30, new_amount_used: 0)
- namespace_usages.create!(namespace_id: 2, date: last_month, amount_used: 40, new_amount_used: 0)
-
- ActiveRecord::Base.connection
- .execute("ALTER TABLE ci_namespace_monthly_usages ENABLE TRIGGER sync_namespaces_amount_used_columns")
- end
-
- it 'updates `new_amount_used` with values from `amount_used`' do
- expect(namespace_usages.where(new_amount_used: 0).count).to eq(6)
-
- migrate!
-
- expect(namespace_usages.where(new_amount_used: 0).count).to eq(2)
- expect(namespace_usages.order(:id).pluck(:new_amount_used))
- .to contain_exactly(0, 0, 10, 20, 30, 40)
- end
-end
diff --git a/spec/migrations/sync_new_amount_used_for_ci_project_monthly_usages_spec.rb b/spec/migrations/sync_new_amount_used_for_ci_project_monthly_usages_spec.rb
deleted file mode 100644
index d7add66a97f..00000000000
--- a/spec/migrations/sync_new_amount_used_for_ci_project_monthly_usages_spec.rb
+++ /dev/null
@@ -1,43 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-require_migration!
-
-RSpec.describe SyncNewAmountUsedForCiProjectMonthlyUsages, migration: :gitlab_ci,
- feature_category: :continuous_integration do
- let(:project_usages) { table(:ci_project_monthly_usages) }
-
- before do
- # Disabling the trigger temporarily to allow records being created with out-of-sync
- # `new_amount_used` and `amount_used`. This will simulate existing records before
- # we add the trigger.
- ActiveRecord::Base.connection
- .execute("ALTER TABLE ci_project_monthly_usages DISABLE TRIGGER sync_projects_amount_used_columns")
-
- this_month = Time.now.utc.beginning_of_month
- last_month = 1.month.ago.utc.beginning_of_month
- last_year = 1.year.ago.utc.beginning_of_month
-
- project_usages.create!(project_id: 1, date: last_year)
- project_usages.create!(project_id: 1, date: this_month, amount_used: 10, new_amount_used: 0)
- project_usages.create!(project_id: 1, date: last_month, amount_used: 20, new_amount_used: 0)
-
- project_usages.create!(project_id: 2, date: last_year)
- project_usages.create!(project_id: 2, date: this_month, amount_used: 30, new_amount_used: 0)
- project_usages.create!(project_id: 2, date: last_month, amount_used: 40, new_amount_used: 0)
-
- ActiveRecord::Base.connection
- .execute("ALTER TABLE ci_project_monthly_usages ENABLE TRIGGER sync_projects_amount_used_columns")
- end
-
- it 'updates `new_amount_used` with values from `amount_used`' do
- expect(project_usages.where(new_amount_used: 0).count).to eq(6)
-
- migrate!
-
- expect(project_usages.where(new_amount_used: 0).count).to eq(2)
- expect(project_usages.order(:id).pluck(:new_amount_used))
- .to contain_exactly(0, 0, 10, 20, 30, 40)
- end
-end
diff --git a/spec/models/abuse_report_spec.rb b/spec/models/abuse_report_spec.rb
index 6500e5fac02..4eb3bd32bfe 100644
--- a/spec/models/abuse_report_spec.rb
+++ b/spec/models/abuse_report_spec.rb
@@ -15,11 +15,12 @@ RSpec.describe AbuseReport, feature_category: :insider_threat do
describe 'associations' do
it { is_expected.to belong_to(:reporter).class_name('User').inverse_of(:reported_abuse_reports) }
it { is_expected.to belong_to(:resolved_by).class_name('User').inverse_of(:resolved_abuse_reports) }
- it { is_expected.to belong_to(:assignee).class_name('User').inverse_of(:assigned_abuse_reports) }
it { is_expected.to belong_to(:user).inverse_of(:abuse_reports) }
it { is_expected.to have_many(:events).class_name('ResourceEvents::AbuseReportEvent').inverse_of(:abuse_report) }
it { is_expected.to have_many(:notes) }
it { is_expected.to have_many(:user_mentions).class_name('Abuse::Reports::UserMention') }
+ it { is_expected.to have_many(:admin_abuse_report_assignees).class_name('Admin::AbuseReportAssignee') }
+ it { is_expected.to have_many(:assignees).class_name('User').through(:admin_abuse_report_assignees) }
it "aliases reporter to author" do
expect(subject.author).to be(subject.reporter)
diff --git a/spec/models/achievements/achievement_spec.rb b/spec/models/achievements/achievement_spec.rb
index d3e3e40fc0c..ac34efad5bf 100644
--- a/spec/models/achievements/achievement_spec.rb
+++ b/spec/models/achievements/achievement_spec.rb
@@ -4,18 +4,12 @@ require 'spec_helper'
RSpec.describe Achievements::Achievement, type: :model, feature_category: :user_profile do
describe 'associations' do
- it { is_expected.to belong_to(:namespace).required }
+ it { is_expected.to belong_to(:namespace).inverse_of(:achievements).required }
it { is_expected.to have_many(:user_achievements).inverse_of(:achievement) }
it { is_expected.to have_many(:users).through(:user_achievements).inverse_of(:achievements) }
end
- describe 'modules' do
- subject { described_class }
-
- it { is_expected.to include_module(Avatarable) }
- end
-
describe 'validations' do
subject { create(:achievement) }
@@ -27,10 +21,15 @@ RSpec.describe Achievements::Achievement, type: :model, feature_category: :user_
describe '#name' do
it 'strips name' do
- achievement = described_class.new(name: ' AchievementTest ')
+ achievement = build(:achievement, name: ' AchievementTest ')
+
achievement.valid?
expect(achievement.name).to eq('AchievementTest')
end
end
+
+ it_behaves_like Avatarable do
+ let(:model) { create(:achievement, :with_avatar) }
+ end
end
diff --git a/spec/models/activity_pub/releases_subscription_spec.rb b/spec/models/activity_pub/releases_subscription_spec.rb
index 0c873a5c18a..0633f293971 100644
--- a/spec/models/activity_pub/releases_subscription_spec.rb
+++ b/spec/models/activity_pub/releases_subscription_spec.rb
@@ -55,23 +55,37 @@ RSpec.describe ActivityPub::ReleasesSubscription, type: :model, feature_category
end
end
- describe '.find_by_subscriber_url' do
+ describe '.find_by_project_and_subscriber' do
let_it_be(:subscription) { create(:activity_pub_releases_subscription) }
it 'returns a record if arguments match' do
- result = described_class.find_by_subscriber_url(subscription.subscriber_url)
+ result = described_class.find_by_project_and_subscriber(subscription.project_id,
+ subscription.subscriber_url)
expect(result).to eq(subscription)
end
- it 'returns a record if arguments match case insensitively' do
- result = described_class.find_by_subscriber_url(subscription.subscriber_url.upcase)
+ it 'returns a record if subscriber url matches case insensitively' do
+ result = described_class.find_by_project_and_subscriber(subscription.project_id,
+ subscription.subscriber_url.upcase)
expect(result).to eq(subscription)
end
+ it 'returns nil if project and url do not match' do
+ result = described_class.find_by_project_and_subscriber(0, 'I really should not exist')
+
+ expect(result).to be(nil)
+ end
+
it 'returns nil if project does not match' do
- result = described_class.find_by_subscriber_url('I really should not exist')
+ result = described_class.find_by_project_and_subscriber(0, subscription.subscriber_url)
+
+ expect(result).to be(nil)
+ end
+
+ it 'returns nil if url does not match' do
+ result = described_class.find_by_project_and_subscriber(subscription.project_id, 'I really should not exist')
expect(result).to be(nil)
end
diff --git a/spec/models/admin/abuse_report_assignee_spec.rb b/spec/models/admin/abuse_report_assignee_spec.rb
new file mode 100644
index 00000000000..3b6233c358f
--- /dev/null
+++ b/spec/models/admin/abuse_report_assignee_spec.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Admin::AbuseReportAssignee, feature_category: :insider_threat do
+ let_it_be(:report) { create(:abuse_report) }
+ let_it_be(:user) { create(:admin) }
+
+ subject(:abuse_report_assignee) { report.admin_abuse_report_assignees.build(assignee: user) }
+
+ it { expect(abuse_report_assignee).to be_valid }
+
+ describe 'associations' do
+ it { is_expected.to belong_to(:abuse_report) }
+ it { is_expected.to belong_to(:assignee).class_name('User').with_foreign_key(:user_id) }
+ end
+
+ describe 'validations' do
+ it { is_expected.to validate_uniqueness_of(:assignee).scoped_to(:abuse_report_id) }
+ end
+
+ context 'with loose foreign key on abuse_report_assignees.user_id' do
+ it_behaves_like 'cleanup by a loose foreign key' do
+ let_it_be(:parent) { user }
+ let_it_be(:report) { create(:abuse_report).tap { |r| r.update!(assignees: [parent]) } }
+ let_it_be(:model) { report.admin_abuse_report_assignees.first }
+ end
+ end
+end
diff --git a/spec/models/application_setting_spec.rb b/spec/models/application_setting_spec.rb
index a2d6c60fbd0..d16a78be533 100644
--- a/spec/models/application_setting_spec.rb
+++ b/spec/models/application_setting_spec.rb
@@ -26,6 +26,7 @@ RSpec.describe ApplicationSetting, feature_category: :shared, type: :model do
it { expect(setting.default_branch_protection_defaults).to eq({}) }
it { expect(setting.max_decompressed_archive_size).to eq(25600) }
it { expect(setting.decompress_archive_file_timeout).to eq(210) }
+ it { expect(setting.bulk_import_concurrent_pipeline_batch_limit).to eq(25) }
end
describe 'validations' do
@@ -162,6 +163,8 @@ RSpec.describe ApplicationSetting, feature_category: :shared, type: :model do
it { is_expected.to validate_inclusion_of(:user_defaults_to_private_profile).in_array([true, false]) }
+ it { is_expected.to validate_inclusion_of(:can_create_organization).in_array([true, false]) }
+
it { is_expected.to validate_inclusion_of(:allow_project_creation_for_guest_and_below).in_array([true, false]) }
it { is_expected.to validate_inclusion_of(:deny_all_requests_except_allowed).in_array([true, false]) }
@@ -736,6 +739,24 @@ RSpec.describe ApplicationSetting, feature_category: :shared, type: :model do
end
end
+ describe '#repository_storages_with_default_weight' do
+ context 'with no extra storage set-up in the config file', fips_mode: false do
+ it 'keeps existing key restrictions' do
+ expect(setting.repository_storages_with_default_weight).to eq({ 'default' => 100 })
+ end
+ end
+
+ context 'with extra storage set-up in the config file', fips_mode: false do
+ before do
+ stub_storage_settings({ 'default' => {}, 'custom' => {} })
+ end
+
+ it 'keeps existing key restrictions' do
+ expect(setting.repository_storages_with_default_weight).to eq({ 'default' => 100, 'custom' => 0 })
+ end
+ end
+ end
+
describe 'setting validated as `addressable_url` configured with external URI' do
before do
# Use any property that has the `addressable_url` validation.
@@ -1321,17 +1342,6 @@ RSpec.describe ApplicationSetting, feature_category: :shared, type: :model do
expect(subject.errors.messages[:default_group_visibility].first).to eq("cannot be set to a restricted visibility level")
expect(subject.errors.messages[:default_project_visibility].first).to eq("cannot be set to a restricted visibility level")
end
-
- context 'when prevent_visibility_restriction FF is disabled' do
- before do
- stub_feature_flags(prevent_visibility_restriction: false)
- end
-
- it { is_expected.to allow_value(10).for(:default_group_visibility) }
- it { is_expected.to allow_value(10).for(:default_project_visibility) }
- it { is_expected.to allow_value(20).for(:default_group_visibility) }
- it { is_expected.to allow_value(20).for(:default_project_visibility) }
- end
end
describe 'sentry_clientside_traces_sample_rate' do
@@ -1342,6 +1352,13 @@ RSpec.describe ApplicationSetting, feature_category: :shared, type: :model do
.with_message("must be a value between 0 and 1")
end
end
+
+ describe 'bulk_import_concurrent_pipeline_batch_limit' do
+ it do
+ is_expected.to validate_numericality_of(:bulk_import_concurrent_pipeline_batch_limit)
+ .is_greater_than(0)
+ end
+ end
end
context 'restrict creating duplicates' do
@@ -1658,17 +1675,17 @@ RSpec.describe ApplicationSetting, feature_category: :shared, type: :model do
end
context 'with plaintext token only' do
- let(:token) { '' }
+ let(:plaintext_token) { Devise.friendly_token(20) }
- it 'ignores the plaintext token' do
+ it 'encrypts the plaintext token' do
subject
- described_class.update_all(static_objects_external_storage_auth_token: 'Test')
+ described_class.update!(static_objects_external_storage_auth_token: plaintext_token)
setting.reload
expect(setting[:static_objects_external_storage_auth_token]).to be_nil
- expect(setting[:static_objects_external_storage_auth_token_encrypted]).to be_nil
- expect(setting.static_objects_external_storage_auth_token).to be_nil
+ expect(setting[:static_objects_external_storage_auth_token_encrypted]).not_to be_nil
+ expect(setting.static_objects_external_storage_auth_token).to eq(plaintext_token)
end
end
end
@@ -1723,4 +1740,8 @@ RSpec.describe ApplicationSetting, feature_category: :shared, type: :model do
expect(setting.personal_access_tokens_disabled?).to eq(false)
end
end
+
+ context 'security txt content' do
+ it { is_expected.to validate_length_of(:security_txt_content).is_at_most(2048) }
+ end
end
diff --git a/spec/models/award_emoji_spec.rb b/spec/models/award_emoji_spec.rb
index b179f2df816..a901453ba9f 100644
--- a/spec/models/award_emoji_spec.rb
+++ b/spec/models/award_emoji_spec.rb
@@ -155,6 +155,8 @@ RSpec.describe AwardEmoji, feature_category: :team_planning do
end
it 'broadcasts updates on the note when destroyed' do
+ award_emoji.save!
+
expect(note).to receive(:broadcast_noteable_notes_changed)
expect(note).to receive(:trigger_note_subscription_update)
@@ -185,6 +187,8 @@ RSpec.describe AwardEmoji, feature_category: :team_planning do
end
it 'does not broadcast updates on the issue when destroyed' do
+ award_emoji.save!
+
expect(issue).not_to receive(:broadcast_noteable_notes_changed)
expect(issue).not_to receive(:trigger_note_subscription_update)
@@ -315,6 +319,17 @@ RSpec.describe AwardEmoji, feature_category: :team_planning do
expect(new_award.url).to eq(custom_emoji.url)
end
+ describe 'when inside subgroup' do
+ let_it_be(:subgroup) { create(:group, parent: custom_emoji.group) }
+ let_it_be(:project) { create(:project, namespace: subgroup) }
+
+ it 'is set for custom emoji' do
+ new_award = build_award(custom_emoji.name)
+
+ expect(new_award.url).to eq(custom_emoji.url)
+ end
+ end
+
context 'feature flag disabled' do
before do
stub_feature_flags(custom_emoji: false)
diff --git a/spec/models/blob_spec.rb b/spec/models/blob_spec.rb
index 9c153f36d8b..598c91a64fb 100644
--- a/spec/models/blob_spec.rb
+++ b/spec/models/blob_spec.rb
@@ -387,6 +387,36 @@ RSpec.describe Blob do
expect(blob.auxiliary_viewer).to be_a(BlobViewer::License)
end
end
+
+ context 'when the blob is GitlabCiYml' do
+ it 'returns a matching viewer for .gitlab-ci.yml' do
+ blob = fake_blob(path: '.gitlab-ci.yml')
+
+ expect(blob.auxiliary_viewer).to be_a(BlobViewer::GitlabCiYml)
+ end
+
+ it 'returns nil for non .gitlab-ci.yml' do
+ blob = fake_blob(path: 'custom-ci.yml')
+
+ expect(blob.auxiliary_viewer).to be_nil
+ end
+
+ context 'when the project has a custom CI config path' do
+ let(:project) { build(:project, ci_config_path: 'custom-ci.yml') }
+
+ it 'returns a matching viewer for the custom CI file' do
+ blob = fake_blob(path: 'custom-ci.yml')
+
+ expect(blob.auxiliary_viewer).to be_a(BlobViewer::GitlabCiYml)
+ end
+
+ it 'returns nil for the incorrect CI file' do
+ blob = fake_blob(path: '.gitlab-ci.yml')
+
+ expect(blob.auxiliary_viewer).to be_nil
+ end
+ end
+ end
end
describe '#rendered_as_text?' do
diff --git a/spec/models/bulk_import_spec.rb b/spec/models/bulk_import_spec.rb
index ff24f57f7c4..57c6df39167 100644
--- a/spec/models/bulk_import_spec.rb
+++ b/spec/models/bulk_import_spec.rb
@@ -3,12 +3,12 @@
require 'spec_helper'
RSpec.describe BulkImport, type: :model, feature_category: :importers do
- let_it_be(:created_bulk_import) { create(:bulk_import, :created) }
- let_it_be(:started_bulk_import) { create(:bulk_import, :started) }
- let_it_be(:finished_bulk_import) { create(:bulk_import, :finished) }
+ let_it_be(:created_bulk_import) { create(:bulk_import, :created, updated_at: 2.hours.ago) }
+ let_it_be(:started_bulk_import) { create(:bulk_import, :started, updated_at: 3.hours.ago) }
+ let_it_be(:finished_bulk_import) { create(:bulk_import, :finished, updated_at: 1.hour.ago) }
let_it_be(:failed_bulk_import) { create(:bulk_import, :failed) }
- let_it_be(:stale_created_bulk_import) { create(:bulk_import, :created, created_at: 3.days.ago) }
- let_it_be(:stale_started_bulk_import) { create(:bulk_import, :started, created_at: 3.days.ago) }
+ let_it_be(:stale_created_bulk_import) { create(:bulk_import, :created, updated_at: 3.days.ago) }
+ let_it_be(:stale_started_bulk_import) { create(:bulk_import, :started, updated_at: 2.days.ago) }
describe 'associations' do
it { is_expected.to belong_to(:user).required }
@@ -23,10 +23,27 @@ RSpec.describe BulkImport, type: :model, feature_category: :importers do
it { is_expected.to define_enum_for(:source_type).with_values(%i[gitlab]) }
end
- describe '.stale scope' do
- subject { described_class.stale }
+ describe 'scopes' do
+ describe '.stale' do
+ subject { described_class.stale }
- it { is_expected.to contain_exactly(stale_created_bulk_import, stale_started_bulk_import) }
+ it { is_expected.to contain_exactly(stale_created_bulk_import, stale_started_bulk_import) }
+ end
+
+ describe '.order_by_updated_at_and_id' do
+ subject { described_class.order_by_updated_at_and_id(:desc) }
+
+ it 'sorts by given direction' do
+ is_expected.to eq([
+ failed_bulk_import,
+ finished_bulk_import,
+ created_bulk_import,
+ started_bulk_import,
+ stale_started_bulk_import,
+ stale_created_bulk_import
+ ])
+ end
+ end
end
describe '.all_human_statuses' do
diff --git a/spec/models/bulk_imports/batch_tracker_spec.rb b/spec/models/bulk_imports/batch_tracker_spec.rb
index 336943228c7..1c7cbc0cb8c 100644
--- a/spec/models/bulk_imports/batch_tracker_spec.rb
+++ b/spec/models/bulk_imports/batch_tracker_spec.rb
@@ -13,4 +13,19 @@ RSpec.describe BulkImports::BatchTracker, type: :model, feature_category: :impor
it { is_expected.to validate_presence_of(:batch_number) }
it { is_expected.to validate_uniqueness_of(:batch_number).scoped_to(:tracker_id) }
end
+
+ describe 'scopes' do
+ describe '.in_progress' do
+ it 'returns only batches that are in progress' do
+ created = create(:bulk_import_batch_tracker, :created)
+ started = create(:bulk_import_batch_tracker, :started)
+ create(:bulk_import_batch_tracker, :finished)
+ create(:bulk_import_batch_tracker, :timeout)
+ create(:bulk_import_batch_tracker, :failed)
+ create(:bulk_import_batch_tracker, :skipped)
+
+ expect(described_class.in_progress).to contain_exactly(created, started)
+ end
+ end
+ end
end
diff --git a/spec/models/bulk_imports/entity_spec.rb b/spec/models/bulk_imports/entity_spec.rb
index b822786579b..ce143a1aa33 100644
--- a/spec/models/bulk_imports/entity_spec.rb
+++ b/spec/models/bulk_imports/entity_spec.rb
@@ -191,6 +191,24 @@ RSpec.describe BulkImports::Entity, type: :model, feature_category: :importers d
expect(described_class.by_user_id(user.id)).to contain_exactly(entity_1, entity_2)
end
end
+
+ describe '.stale' do
+ it 'returns entities that are stale' do
+ entity_1 = create(:bulk_import_entity, updated_at: 3.days.ago)
+ create(:bulk_import_entity)
+
+ expect(described_class.stale).to contain_exactly(entity_1)
+ end
+ end
+
+ describe '.order_by_updated_at_and_id' do
+ it 'returns entities ordered by updated_at and id' do
+ entity_1 = create(:bulk_import_entity, updated_at: 3.days.ago)
+ entity_2 = create(:bulk_import_entity, updated_at: 2.days.ago)
+
+ expect(described_class.order_by_updated_at_and_id(:desc)).to eq([entity_2, entity_1])
+ end
+ end
end
describe '.all_human_statuses' do
@@ -444,6 +462,13 @@ RSpec.describe BulkImports::Entity, type: :model, feature_category: :importers d
expect(entity.has_failures).to eq(true)
end
+
+ it 'sets the has_failures flag on the parent import' do
+ create(:bulk_import_failure, entity: entity)
+
+ expect { entity.update_has_failures }
+ .to change { entity.bulk_import.has_failures? }.from(false).to(true)
+ end
end
context 'when entity does not have failures' do
diff --git a/spec/models/bulk_imports/export_status_spec.rb b/spec/models/bulk_imports/export_status_spec.rb
index c3faa2db19c..aa3bce78534 100644
--- a/spec/models/bulk_imports/export_status_spec.rb
+++ b/spec/models/bulk_imports/export_status_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe BulkImports::ExportStatus, feature_category: :importers do
+RSpec.describe BulkImports::ExportStatus, :clean_gitlab_redis_cache, feature_category: :importers do
let_it_be(:relation) { 'labels' }
let_it_be(:import) { create(:bulk_import) }
let_it_be(:config) { create(:bulk_import_configuration, bulk_import: import) }
@@ -100,7 +100,7 @@ RSpec.describe BulkImports::ExportStatus, feature_category: :importers do
end
it 'returns false' do
- expect(subject.started?).to eq(false)
+ expect(subject.failed?).to eq(false)
end
end
@@ -113,8 +113,8 @@ RSpec.describe BulkImports::ExportStatus, feature_category: :importers do
end
end
- it 'returns false' do
- expect(subject.started?).to eq(false)
+ it 'returns true' do
+ expect(subject.failed?).to eq(true)
end
end
end
@@ -156,7 +156,7 @@ RSpec.describe BulkImports::ExportStatus, feature_category: :importers do
end
it 'returns false' do
- expect(subject.started?).to eq(false)
+ expect(subject.empty?).to eq(false)
end
end
end
@@ -282,4 +282,111 @@ RSpec.describe BulkImports::ExportStatus, feature_category: :importers do
end
end
end
+
+ describe 'caching' do
+ let(:cached_status) do
+ subject.send(:status)
+ subject.send(:status_from_cache)
+ end
+
+ shared_examples 'does not result in a cached status' do
+ specify do
+ expect(cached_status).to be_nil
+ end
+ end
+
+ shared_examples 'results in a cached status' do
+ specify do
+ expect(cached_status).to include('status' => status)
+ end
+
+ context 'when something goes wrong during export status fetch' do
+ before do
+ allow_next_instance_of(BulkImports::Clients::HTTP) do |client|
+ allow(client).to receive(:get).and_raise(
+ BulkImports::NetworkError.new("Unsuccessful response", response: nil)
+ )
+ end
+ end
+
+ include_examples 'does not result in a cached status'
+ end
+ end
+
+ context 'when export status is started' do
+ let(:status) { BulkImports::Export::STARTED }
+
+ it_behaves_like 'does not result in a cached status'
+ end
+
+ context 'when export status is failed' do
+ let(:status) { BulkImports::Export::FAILED }
+
+ it_behaves_like 'results in a cached status'
+ end
+
+ context 'when export status is finished' do
+ let(:status) { BulkImports::Export::FINISHED }
+
+ it_behaves_like 'results in a cached status'
+ end
+
+ context 'when export status is not present' do
+ let(:status) { nil }
+
+ it_behaves_like 'does not result in a cached status'
+ end
+
+ context 'when the cache is empty' do
+ let(:status) { BulkImports::Export::FAILED }
+
+ it 'fetches the status from the remote' do
+ expect(subject).to receive(:status_from_remote).and_call_original
+ expect(subject.send(:status)).to include('status' => status)
+ end
+ end
+
+ context 'when the cache is not empty' do
+ let(:status) { BulkImports::Export::FAILED }
+
+ before do
+ Gitlab::Cache::Import::Caching.write(
+ described_class.new(tracker, 'labels').send(:cache_key),
+ { 'status' => 'mock status' }.to_json
+ )
+ end
+
+ it 'does not fetch the status from the remote' do
+ expect(subject).not_to receive(:status_from_remote)
+ expect(subject.send(:status)).to eq({ 'status' => 'mock status' })
+ end
+
+ context 'with a different entity' do
+ before do
+ tracker.entity = create(:bulk_import_entity, bulk_import: import, source_full_path: 'foo')
+ end
+
+ it 'fetches the status from the remote' do
+ expect(subject).to receive(:status_from_remote).and_call_original
+ expect(subject.send(:status)).to include('status' => status)
+ end
+ end
+
+ context 'with a different relation' do
+ let_it_be(:relation) { 'merge_requests' }
+
+ let(:response_double) do
+ instance_double(HTTParty::Response, parsed_response: [
+ { 'relation' => 'labels', 'status' => status },
+ { 'relation' => 'merge_requests', 'status' => status }
+ ])
+ end
+
+ it 'fetches the status from the remote' do
+ expect(subject).to receive(:status_from_remote).and_call_original
+ expect(subject.send(:status)).to include('status' => status)
+ end
+ end
+ end
+ end
end
diff --git a/spec/models/bulk_imports/export_upload_spec.rb b/spec/models/bulk_imports/export_upload_spec.rb
index d9ae41af0db..ca45fe73b0e 100644
--- a/spec/models/bulk_imports/export_upload_spec.rb
+++ b/spec/models/bulk_imports/export_upload_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe BulkImports::ExportUpload do
+RSpec.describe BulkImports::ExportUpload, type: :model, feature_category: :importers do
subject { described_class.new(export: create(:bulk_import_export)) }
describe 'associations' do
@@ -20,4 +20,18 @@ RSpec.describe BulkImports::ExportUpload do
expect(subject.public_send(method).url).to eq(url)
end
+
+ describe 'ActiveRecord callbacks' do
+ let(:after_save_callbacks) { described_class._save_callbacks.select { |cb| cb.kind == :after } }
+ let(:after_commit_callbacks) { described_class._commit_callbacks.select { |cb| cb.kind == :after } }
+
+ def find_callback(callbacks, key)
+ callbacks.find { |cb| cb.filter == key }
+ end
+
+ it 'export file is stored in after_commit callback' do
+ expect(find_callback(after_commit_callbacks, :store_export_file!)).to be_present
+ expect(find_callback(after_save_callbacks, :store_export_file!)).to be_nil
+ end
+ end
end
diff --git a/spec/models/bulk_imports/tracker_spec.rb b/spec/models/bulk_imports/tracker_spec.rb
index edd9adfa5f6..25cd5489a9f 100644
--- a/spec/models/bulk_imports/tracker_spec.rb
+++ b/spec/models/bulk_imports/tracker_spec.rb
@@ -83,5 +83,31 @@ RSpec.describe BulkImports::Tracker, type: :model, feature_category: :importers
"'InexistingPipeline' is not a valid BulkImport Pipeline"
)
end
+
+ context 'when using delegation methods' do
+ context 'with group pipelines' do
+ let(:entity) { create(:bulk_import_entity) }
+
+ it 'does not raise' do
+ entity.pipelines.each do |pipeline|
+ tracker = create(:bulk_import_tracker, entity: entity, pipeline_name: pipeline[:pipeline])
+ expect { tracker.abort_on_failure? }.not_to raise_error
+ expect { tracker.file_extraction_pipeline? }.not_to raise_error
+ end
+ end
+ end
+
+ context 'with project pipelines' do
+ let(:entity) { create(:bulk_import_entity, :project_entity) }
+
+ it 'does not raise' do
+ entity.pipelines.each do |pipeline|
+ tracker = create(:bulk_import_tracker, entity: entity, pipeline_name: pipeline[:pipeline])
+ expect { tracker.abort_on_failure? }.not_to raise_error
+ expect { tracker.file_extraction_pipeline? }.not_to raise_error
+ end
+ end
+ end
+ end
end
end
diff --git a/spec/models/ci/bridge_spec.rb b/spec/models/ci/bridge_spec.rb
index 1d0c3bb5dee..ae8c5aea858 100644
--- a/spec/models/ci/bridge_spec.rb
+++ b/spec/models/ci/bridge_spec.rb
@@ -36,6 +36,24 @@ RSpec.describe Ci::Bridge, feature_category: :continuous_integration do
expect(bridge).to have_one(:downstream_pipeline)
end
+ describe 'no-op methods for compatibility with Ci::Build' do
+ it 'returns an empty array job_artifacts' do
+ expect(bridge.job_artifacts).to eq(Ci::JobArtifact.none)
+ end
+
+ it 'return nil for artifacts_expire_at' do
+ expect(bridge.artifacts_expire_at).to be_nil
+ end
+
+ it 'return nil for runner' do
+ expect(bridge.runner).to be_nil
+ end
+
+ it 'returns an empty TagList for tag_list' do
+ expect(bridge.tag_list).to be_a(ActsAsTaggableOn::TagList)
+ end
+ end
+
describe '#retryable?' do
let(:bridge) { create(:ci_bridge, :success) }
@@ -595,6 +613,203 @@ RSpec.describe Ci::Bridge, feature_category: :continuous_integration do
end
end
+ describe 'variables expansion' do
+ let(:options) do
+ {
+ trigger: {
+ project: 'my/project',
+ branch: 'master',
+ forward: { yaml_variables: true,
+ pipeline_variables: true }.compact
+ }
+ }
+ end
+
+ let(:yaml_variables) do
+ [
+ {
+ key: 'EXPANDED_PROJECT_VAR6',
+ value: 'project value6 $PROJECT_PROTECTED_VAR'
+ },
+ {
+ key: 'EXPANDED_GROUP_VAR6',
+ value: 'group value6 $GROUP_PROTECTED_VAR'
+ },
+
+ {
+ key: 'VAR7',
+ value: 'value7 $VAR1',
+ raw: true
+ }
+ ]
+ end
+
+ let_it_be(:downstream_creator_user) { create(:user) }
+ let_it_be(:bridge_creator_user) { create(:user) }
+
+ let_it_be(:bridge_group) { create(:group) }
+ let_it_be(:downstream_group) { create(:group) }
+ let_it_be(:downstream_project) { create(:project, creator: downstream_creator_user, group: downstream_group) }
+ let_it_be(:project) { create(:project, :repository, :in_group, creator: bridge_creator_user, group: bridge_group) }
+ let(:bridge) { build(:ci_bridge, :playable, pipeline: pipeline, downstream: downstream_project) }
+ let!(:pipeline) { create(:ci_pipeline, project: project) }
+
+ let!(:ci_variable) do
+ create(:ci_variable,
+ project: project,
+ key: 'PROJECT_PROTECTED_VAR',
+ value: 'this is a secret',
+ protected: is_variable_protected?)
+ end
+
+ let!(:ci_group_variable) do
+ create(:ci_group_variable,
+ group: bridge_group,
+ key: 'GROUP_PROTECTED_VAR',
+ value: 'this is a secret',
+ protected: is_variable_protected?)
+ end
+
+ before do
+ bridge.yaml_variables = yaml_variables
+ allow(bridge.project).to receive(:protected_for?).and_return(true)
+ end
+
+ shared_examples 'expands variables from a project downstream' do
+ it do
+ vars = bridge.downstream_variables
+ expect(vars).to include({ key: 'EXPANDED_PROJECT_VAR6', value: 'project value6 this is a secret' })
+ end
+ end
+
+ shared_examples 'expands variables from a group downstream' do
+ it do
+ vars = bridge.downstream_variables
+ expect(vars).to include({ key: 'EXPANDED_GROUP_VAR6', value: 'group value6 this is a secret' })
+ end
+ end
+
+ shared_examples 'expands project and group variables downstream' do
+ it_behaves_like 'expands variables from a project downstream'
+
+ it_behaves_like 'expands variables from a group downstream'
+ end
+
+ shared_examples 'does not expand variables from a project downstream' do
+ it do
+ vars = bridge.downstream_variables
+ expect(vars).not_to include({ key: 'EXPANDED_PROJECT_VAR6', value: 'project value6 this is a secret' })
+ end
+ end
+
+ shared_examples 'does not expand variables from a group downstream' do
+ it do
+ vars = bridge.downstream_variables
+ expect(vars).not_to include({ key: 'EXPANDED_GROUP_VAR6', value: 'group value6 this is a secret' })
+ end
+ end
+
+ shared_examples 'feature flag is disabled' do
+ before do
+ stub_feature_flags(exclude_protected_variables_from_multi_project_pipeline_triggers: false)
+ end
+
+ it_behaves_like 'expands project and group variables downstream'
+ end
+
+ shared_examples 'does not expand project and group variables downstream' do
+ it_behaves_like 'does not expand variables from a project downstream'
+
+ it_behaves_like 'does not expand variables from a group downstream'
+ end
+
+ context 'when they are protected' do
+ let!(:is_variable_protected?) { true }
+
+ context 'and downstream project group is different from bridge group' do
+ it_behaves_like 'does not expand project and group variables downstream'
+
+ it_behaves_like 'feature flag is disabled'
+ end
+
+ context 'and there is no downstream project' do
+ let(:downstream_project) { nil }
+
+ it_behaves_like 'expands project and group variables downstream'
+
+ it_behaves_like 'feature flag is disabled'
+ end
+
+ context 'and downstream project equals bridge project' do
+ let(:downstream_project) { project }
+
+ it_behaves_like 'expands project and group variables downstream'
+
+ it_behaves_like 'feature flag is disabled'
+ end
+
+ context 'and downstream project group is equal to bridge project group' do
+ let_it_be(:downstream_project) { create(:project, creator: downstream_creator_user, group: bridge_group) }
+
+ it_behaves_like 'expands variables from a group downstream'
+
+ it_behaves_like 'does not expand variables from a project downstream'
+
+ it_behaves_like 'feature flag is disabled'
+ end
+
+ context 'and downstream project has no group' do
+ let_it_be(:downstream_project) { create(:project, creator: downstream_creator_user) }
+
+ it_behaves_like 'does not expand project and group variables downstream'
+
+ it_behaves_like 'feature flag is disabled'
+ end
+ end
+
+ context 'when they are not protected' do
+ let!(:is_variable_protected?) { false }
+
+ context 'and downstream project group is different from bridge group' do
+ it_behaves_like 'expands project and group variables downstream'
+
+ it_behaves_like 'feature flag is disabled'
+ end
+
+ context 'and there is no downstream project' do
+ let(:downstream_project) { nil }
+
+ it_behaves_like 'expands project and group variables downstream'
+
+ it_behaves_like 'feature flag is disabled'
+ end
+
+ context 'and downstream project equals bridge project' do
+ let(:downstream_project) { project }
+
+ it_behaves_like 'expands project and group variables downstream'
+
+ it_behaves_like 'feature flag is disabled'
+ end
+
+ context 'and downstream project group is equal to bridge project group' do
+ let_it_be(:downstream_project) { create(:project, creator: downstream_creator_user, group: bridge_group) }
+
+ it_behaves_like 'expands project and group variables downstream'
+
+ it_behaves_like 'feature flag is disabled'
+ end
+
+ context 'and downstream project has no group' do
+ let_it_be(:downstream_project) { create(:project, creator: downstream_creator_user) }
+
+ it_behaves_like 'expands project and group variables downstream'
+
+ it_behaves_like 'feature flag is disabled'
+ end
+ end
+ end
+
describe '#forward_pipeline_variables?' do
using RSpec::Parameterized::TableSyntax
@@ -824,8 +1039,9 @@ RSpec.describe Ci::Bridge, feature_category: :continuous_integration do
end
it 'creates the metadata record and assigns its partition' do
- # the factory doesn't use any metadatable setters by default
- # so the record will be initialized by the before_validation callback
+ # The record is initialized by the factory calling metadatable setters
+ bridge.metadata = nil
+
expect(bridge.metadata).to be_nil
expect(bridge.save!).to be_truthy
diff --git a/spec/models/ci/build_need_spec.rb b/spec/models/ci/build_need_spec.rb
index 4f76a7650ec..7ce3c63458f 100644
--- a/spec/models/ci/build_need_spec.rb
+++ b/spec/models/ci/build_need_spec.rb
@@ -11,11 +11,21 @@ RSpec.describe Ci::BuildNeed, model: true, feature_category: :continuous_integra
it { is_expected.to validate_presence_of(:name) }
it { is_expected.to validate_length_of(:name).is_at_most(255) }
- describe '.artifacts' do
- let_it_be(:with_artifacts) { create(:ci_build_need, artifacts: true) }
- let_it_be(:without_artifacts) { create(:ci_build_need, artifacts: false) }
+ describe 'scopes' do
+ describe '.scoped_build' do
+ subject(:scoped_build) { described_class.scoped_build }
- it { expect(described_class.artifacts).to contain_exactly(with_artifacts) }
+ it 'includes partition_id filter' do
+ expect(scoped_build.where_values_hash).to match(a_hash_including('partition_id'))
+ end
+ end
+
+ describe '.artifacts' do
+ let_it_be(:with_artifacts) { create(:ci_build_need, artifacts: true) }
+ let_it_be(:without_artifacts) { create(:ci_build_need, artifacts: false) }
+
+ it { expect(described_class.artifacts).to contain_exactly(with_artifacts) }
+ end
end
describe 'BulkInsertSafe' do
diff --git a/spec/models/ci/build_spec.rb b/spec/models/ci/build_spec.rb
index 2e552c8d524..18c7e57d464 100644
--- a/spec/models/ci/build_spec.rb
+++ b/spec/models/ci/build_spec.rb
@@ -987,6 +987,28 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration, factory_def
describe '#artifacts_public?' do
subject { build.artifacts_public? }
+ context 'artifacts with defaults - public' do
+ let(:build) { create(:ci_build, :artifacts, pipeline: pipeline) }
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'non public artifacts' do
+ let(:build) { create(:ci_build, :private_artifacts, pipeline: pipeline) }
+
+ it { is_expected.to be_falsey }
+ end
+
+ context 'no artifacts' do
+ let(:build) { create(:ci_build, pipeline: pipeline) }
+
+ it { is_expected.to be_truthy }
+ end
+ end
+
+ describe '#artifact_is_public_in_config?' do
+ subject { build.artifact_is_public_in_config? }
+
context 'artifacts with defaults' do
let(:build) { create(:ci_build, :artifacts, pipeline: pipeline) }
@@ -994,10 +1016,22 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration, factory_def
end
context 'non public artifacts' do
- let(:build) { create(:ci_build, :artifacts, :with_private_artifacts_config, pipeline: pipeline) }
+ let(:build) { create(:ci_build, :with_private_artifacts_config, pipeline: pipeline) }
it { is_expected.to be_falsey }
end
+
+ context 'public artifacts' do
+ let(:build) { create(:ci_build, :with_public_artifacts_config, pipeline: pipeline) }
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'no artifacts' do
+ let(:build) { create(:ci_build, pipeline: pipeline) }
+
+ it { is_expected.to be_truthy }
+ end
end
describe '#artifacts_expired?' do
diff --git a/spec/models/ci/catalog/components_project_spec.rb b/spec/models/ci/catalog/components_project_spec.rb
index 79e1a113e47..5f739c244a5 100644
--- a/spec/models/ci/catalog/components_project_spec.rb
+++ b/spec/models/ci/catalog/components_project_spec.rb
@@ -97,6 +97,7 @@ RSpec.describe Ci::Catalog::ComponentsProject, feature_category: :pipeline_compo
'dast' | 'image: alpine_2' | 'templates/dast/template.yml'
'template' | 'image: alpine_3' | 'templates/template.yml'
'blank-yaml' | '' | 'templates/blank-yaml.yml'
+ 'non/exist' | nil | nil
end
with_them do
diff --git a/spec/models/ci/catalog/listing_spec.rb b/spec/models/ci/catalog/listing_spec.rb
index 7a1e12165ac..2ffffb9112c 100644
--- a/spec/models/ci/catalog/listing_spec.rb
+++ b/spec/models/ci/catalog/listing_spec.rb
@@ -3,59 +3,61 @@
require 'spec_helper'
RSpec.describe Ci::Catalog::Listing, feature_category: :pipeline_composition do
- let_it_be(:namespace) { create(:group) }
- let_it_be(:project_x) { create(:project, namespace: namespace, name: 'X Project') }
- let_it_be(:project_a) { create(:project, :public, namespace: namespace, name: 'A Project') }
- let_it_be(:project_noaccess) { create(:project, namespace: namespace, name: 'C Project') }
- let_it_be(:project_ext) { create(:project, :public, name: 'TestProject') }
let_it_be(:user) { create(:user) }
+ let_it_be(:namespace) { create(:group) }
+ let_it_be(:public_namespace_project) do
+ create(:project, :public, namespace: namespace, name: 'A public namespace project')
+ end
- let_it_be(:project_b) do
+ let_it_be(:public_project) { create(:project, :public, name: 'B public test project') }
+ let_it_be(:namespace_project_a) { create(:project, namespace: namespace, name: 'Test namespace project') }
+ let_it_be(:namespace_project_b) { create(:project, namespace: namespace, name: 'X namespace Project') }
+ let_it_be(:project_noaccess) { create(:project, namespace: namespace, name: 'Project with no access') }
+ let_it_be(:internal_project) { create(:project, :internal, name: 'Internal project') }
+
+ let_it_be(:private_project) do
create(:project, namespace: namespace, name: 'B Project', description: 'Rspec test framework')
end
let(:list) { described_class.new(user) }
before_all do
- project_x.add_reporter(user)
- project_b.add_reporter(user)
- project_a.add_reporter(user)
- project_ext.add_reporter(user)
+ namespace_project_a.add_reporter(user)
+ namespace_project_b.add_reporter(user)
+ public_namespace_project.add_reporter(user)
+ public_project.add_reporter(user)
+ internal_project.add_owner(user)
end
describe '#resources' do
subject(:resources) { list.resources(**params) }
- context 'when user is anonymous' do
- let(:user) { nil }
- let(:params) { {} }
+ let(:params) { {} }
- let!(:resource_1) { create(:ci_catalog_resource, project: project_a) }
- let!(:resource_2) { create(:ci_catalog_resource, project: project_ext) }
- let!(:resource_3) { create(:ci_catalog_resource, project: project_b) }
+ let_it_be(:public_resource_a) { create(:ci_catalog_resource, :published, project: public_namespace_project) }
+ let_it_be(:public_resource_b) { create(:ci_catalog_resource, :published, project: public_project) }
+ let_it_be(:internal_resource) { create(:ci_catalog_resource, :published, project: internal_project) }
+ let_it_be(:private_namespace_resource) { create(:ci_catalog_resource, :published, project: namespace_project_a) }
+ let_it_be(:unpublished_resource) { create(:ci_catalog_resource, project: namespace_project_b) }
- it 'returns only resources for public projects' do
- is_expected.to contain_exactly(resource_1, resource_2)
- end
+ it 'by default returns all resources visible to the current user' do
+ is_expected.to contain_exactly(public_resource_a, public_resource_b, private_namespace_resource,
+ internal_resource)
+ end
- context 'when sorting is provided' do
- let(:params) { { sort: :name_desc } }
+ context 'when user is anonymous' do
+ let(:user) { nil }
- it 'returns only resources for public projects sorted by name DESC' do
- is_expected.to contain_exactly(resource_2, resource_1)
- end
+ it 'returns only published resources for public projects' do
+ is_expected.to contain_exactly(public_resource_a, public_resource_b)
end
end
context 'when search params are provided' do
let(:params) { { search: 'test' } }
- let!(:resource_1) { create(:ci_catalog_resource, project: project_a) }
- let!(:resource_2) { create(:ci_catalog_resource, project: project_ext) }
- let!(:resource_3) { create(:ci_catalog_resource, project: project_b) }
-
it 'returns the resources that match the search params' do
- is_expected.to contain_exactly(resource_2, resource_3)
+ is_expected.to contain_exactly(public_resource_b, private_namespace_resource)
end
context 'when search term is too small' do
@@ -65,117 +67,197 @@ RSpec.describe Ci::Catalog::Listing, feature_category: :pipeline_composition do
end
end
- context 'when namespace is provided' do
- let(:params) { { namespace: namespace } }
+ context 'when the scope is :namespaces' do
+ let_it_be(:public_resource_no_namespace) do
+ create(:ci_catalog_resource, project: create(:project, :public, name: 'public'))
+ end
- context 'when namespace is not a root namespace' do
- let(:namespace) { create(:group, :nested) }
+ let(:params) { { scope: :namespaces } }
- it 'raises an exception' do
- expect { resources }.to raise_error(ArgumentError, 'Namespace is not a root namespace')
+ context 'when the `ci_guard_query_for_catalog_resource_scope` ff is enabled' do
+ it "returns the catalog resources belonging to the user's authorized namespaces" do
+ is_expected.to contain_exactly(public_resource_a, public_resource_b, internal_resource,
+ private_namespace_resource)
end
end
- context 'when the user has access to all projects in the namespace' do
- context 'when the namespace has no catalog resources' do
- it { is_expected.to be_empty }
+ context 'when the `ci_guard_query_for_catalog_resource_scope` ff is disabled' do
+ before do
+ stub_feature_flags(ci_guard_for_catalog_resource_scope: false)
end
- context 'when the namespace has catalog resources' do
- let_it_be(:today) { Time.zone.now }
- let_it_be(:yesterday) { today - 1.day }
- let_it_be(:tomorrow) { today + 1.day }
+ it 'returns all resources visible to the current user' do
+ is_expected.to contain_exactly(
+ public_resource_a, public_resource_b, private_namespace_resource,
+ internal_resource)
+ end
+ end
+ end
- let_it_be(:resource_1) do
- create(:ci_catalog_resource, project: project_x, latest_released_at: yesterday, created_at: today)
- end
+ context 'with a sort parameter' do
+ let_it_be(:today) { Time.zone.now }
+ let_it_be(:yesterday) { today - 1.day }
+ let_it_be(:tomorrow) { today + 1.day }
- let_it_be(:resource_2) do
- create(:ci_catalog_resource, project: project_b, latest_released_at: today, created_at: yesterday)
- end
+ let(:params) { { sort: sort } }
- let_it_be(:resource_3) do
- create(:ci_catalog_resource, project: project_a, latest_released_at: nil, created_at: tomorrow)
- end
+ before_all do
+ public_resource_a.update!(created_at: today, latest_released_at: yesterday)
+ public_resource_b.update!(created_at: yesterday, latest_released_at: today)
+ private_namespace_resource.update!(created_at: tomorrow, latest_released_at: tomorrow)
+ internal_resource.update!(created_at: tomorrow + 1)
+ end
- let_it_be(:other_namespace_resource) do
- create(:ci_catalog_resource, project: project_ext, latest_released_at: tomorrow)
- end
+ context 'when the sort is created_at ascending' do
+ let_it_be(:sort) { :created_at_asc }
+
+ it 'contains catalog resources sorted by created_at ascending' do
+ is_expected.to eq([public_resource_b, public_resource_a, private_namespace_resource, internal_resource])
+ end
+ end
+
+ context 'when the sort is created_at descending' do
+ let_it_be(:sort) { :created_at_desc }
+
+ it 'contains catalog resources sorted by created_at descending' do
+ is_expected.to eq([internal_resource, private_namespace_resource, public_resource_a, public_resource_b])
+ end
+ end
+
+ context 'when the sort is name ascending' do
+ let_it_be(:sort) { :name_asc }
+
+ it 'contains catalog resources for projects sorted by name ascending' do
+ is_expected.to eq([public_resource_a, public_resource_b, internal_resource, private_namespace_resource])
+ end
+ end
+
+ context 'when the sort is name descending' do
+ let_it_be(:sort) { :name_desc }
+
+ it 'contains catalog resources for projects sorted by name descending' do
+ is_expected.to eq([private_namespace_resource, internal_resource, public_resource_b, public_resource_a])
+ end
+ end
- it 'contains only catalog resources for projects in that namespace' do
- is_expected.to contain_exactly(resource_1, resource_2, resource_3)
+ context 'when the sort is latest_released_at ascending' do
+ let_it_be(:sort) { :latest_released_at_asc }
+
+ it 'contains catalog resources sorted by latest_released_at ascending with nulls last' do
+ is_expected.to eq([public_resource_a, public_resource_b, private_namespace_resource, internal_resource])
+ end
+ end
+
+ context 'when the sort is latest_released_at descending' do
+ let_it_be(:sort) { :latest_released_at_desc }
+
+ it 'contains catalog resources sorted by latest_released_at descending with nulls last' do
+ is_expected.to eq([private_namespace_resource, public_resource_b, public_resource_a, internal_resource])
+ end
+ end
+ end
+
+ context 'when namespace is provided' do
+ let(:params) { { namespace: namespace } }
+
+ context 'when it is a root namespace' do
+ context 'when it has catalog resources' do
+ it 'returns resources in the namespace visible to the user' do
+ is_expected.to contain_exactly(public_resource_a, private_namespace_resource)
end
+ end
- context 'with a sort parameter' do
- let(:params) { { namespace: namespace, sort: sort } }
+ context 'when the namespace has no catalog resources' do
+ let(:namespace) { build(:namespace) }
- context 'when the sort is created_at ascending' do
- let_it_be(:sort) { :created_at_asc }
+ it { is_expected.to be_empty }
+ end
+ end
- it 'contains catalog resources sorted by created_at ascending' do
- is_expected.to eq([resource_2, resource_1, resource_3])
- end
- end
+ context 'when namespace is not a root namespace' do
+ let_it_be(:namespace) { create(:group, :nested) }
- context 'when the sort is created_at descending' do
- let_it_be(:sort) { :created_at_desc }
+ it 'raises an exception' do
+ expect { resources }.to raise_error(ArgumentError, 'Namespace is not a root namespace')
+ end
+ end
+ end
+ end
- it 'contains catalog resources sorted by created_at descending' do
- is_expected.to eq([resource_3, resource_1, resource_2])
- end
- end
+ describe '#find_resource' do
+ let_it_be(:accessible_resource) { create(:ci_catalog_resource, :published, project: public_project) }
+ let_it_be(:inaccessible_resource) { create(:ci_catalog_resource, :published, project: project_noaccess) }
+ let_it_be(:draft_resource) { create(:ci_catalog_resource, project: public_namespace_project, state: :draft) }
- context 'when the sort is name ascending' do
- let_it_be(:sort) { :name_asc }
+ context 'when using the ID argument' do
+ subject { list.find_resource(id: id) }
- it 'contains catalog resources for projects sorted by name ascending' do
- is_expected.to eq([resource_3, resource_2, resource_1])
- end
- end
+ context 'when the resource is published and visible to the user' do
+ let(:id) { accessible_resource.id }
- context 'when the sort is name descending' do
- let_it_be(:sort) { :name_desc }
+ it 'fetches the resource' do
+ is_expected.to eq(accessible_resource)
+ end
+ end
- it 'contains catalog resources for projects sorted by name descending' do
- is_expected.to eq([resource_1, resource_2, resource_3])
- end
- end
+ context 'when the resource is not found' do
+ let(:id) { 'not-an-id' }
- context 'when the sort is latest_released_at ascending' do
- let_it_be(:sort) { :latest_released_at_asc }
+ it 'returns nil' do
+ is_expected.to be_nil
+ end
+ end
- it 'contains catalog resources sorted by latest_released_at ascending with nulls last' do
- is_expected.to eq([resource_1, resource_2, resource_3])
- end
- end
+ context 'when the resource is not published' do
+ let(:id) { draft_resource.id }
- context 'when the sort is latest_released_at descending' do
- let_it_be(:sort) { :latest_released_at_desc }
+ it 'returns nil' do
+ is_expected.to be_nil
+ end
+ end
- it 'contains catalog resources sorted by latest_released_at descending with nulls last' do
- is_expected.to eq([resource_2, resource_1, resource_3])
- end
- end
- end
+ context "when the current user cannot read code on the resource's project" do
+ let(:id) { inaccessible_resource.id }
+
+ it 'returns nil' do
+ is_expected.to be_nil
end
end
+ end
- context 'when the user only has access to some projects in the namespace' do
- let!(:accessible_resource) { create(:ci_catalog_resource, project: project_x) }
- let!(:inaccessible_resource) { create(:ci_catalog_resource, project: project_noaccess) }
+ context 'when using the full_path argument' do
+ subject { list.find_resource(full_path: full_path) }
- it 'only returns catalog resources for projects the user has access to' do
- is_expected.to contain_exactly(accessible_resource)
+ context 'when the resource is published and visible to the user' do
+ let(:full_path) { accessible_resource.project.full_path }
+
+ it 'fetches the resource' do
+ is_expected.to eq(accessible_resource)
end
end
- context 'when the user does not have access to the namespace' do
- let!(:project) { create(:project) }
- let!(:resource) { create(:ci_catalog_resource, project: project) }
+ context 'when the resource is not found' do
+ let(:full_path) { 'not-a-path' }
- let(:namespace) { project.namespace }
+ it 'returns nil' do
+ is_expected.to be_nil
+ end
+ end
- it { is_expected.to be_empty }
+ context 'when the resource is not published' do
+ let(:full_path) { draft_resource.project.full_path }
+
+ it 'returns nil' do
+ is_expected.to be_nil
+ end
+ end
+
+ context "when the current user cannot read code on the resource's project" do
+ let(:full_path) { inaccessible_resource.project.full_path }
+
+ it 'returns nil' do
+ is_expected.to be_nil
+ end
end
end
end
diff --git a/spec/models/ci/catalog/resource_spec.rb b/spec/models/ci/catalog/resource_spec.rb
index 098772b1ea9..15d8b4f440b 100644
--- a/spec/models/ci/catalog/resource_spec.rb
+++ b/spec/models/ci/catalog/resource_spec.rb
@@ -3,50 +3,57 @@
require 'spec_helper'
RSpec.describe Ci::Catalog::Resource, feature_category: :pipeline_composition do
- let_it_be(:today) { Time.zone.now }
- let_it_be(:yesterday) { today - 1.day }
- let_it_be(:tomorrow) { today + 1.day }
+ let_it_be(:current_user) { create(:user) }
- let_it_be_with_reload(:project) { create(:project, name: 'A') }
- let_it_be(:project_2) { build(:project, name: 'Z') }
- let_it_be(:project_3) { build(:project, name: 'L', description: 'Z') }
- let_it_be_with_reload(:resource) { create(:ci_catalog_resource, project: project, latest_released_at: tomorrow) }
- let_it_be(:resource_2) { create(:ci_catalog_resource, project: project_2, latest_released_at: today) }
- let_it_be(:resource_3) { create(:ci_catalog_resource, project: project_3, latest_released_at: nil) }
+ let_it_be(:project_a) { create(:project, name: 'A') }
+ let_it_be(:project_b) { create(:project, name: 'B') }
+ let_it_be(:project_c) { create(:project, name: 'C', description: 'B') }
- let_it_be(:release1) { create(:release, project: project, released_at: yesterday) }
- let_it_be(:release2) { create(:release, project: project, released_at: today) }
- let_it_be(:release3) { create(:release, project: project, released_at: tomorrow) }
+ let_it_be_with_reload(:resource_a) do
+ create(:ci_catalog_resource, project: project_a, latest_released_at: '2023-02-01T00:00:00Z')
+ end
+
+ let_it_be(:resource_b) do
+ create(:ci_catalog_resource, project: project_b, latest_released_at: '2023-01-01T00:00:00Z')
+ end
+
+ let_it_be(:resource_c) { create(:ci_catalog_resource, project: project_c) }
it { is_expected.to belong_to(:project) }
it do
is_expected.to(
- have_many(:components).class_name('Ci::Catalog::Resources::Component').with_foreign_key(:catalog_resource_id)
- )
+ have_many(:components).class_name('Ci::Catalog::Resources::Component').with_foreign_key(:catalog_resource_id))
end
- it { is_expected.to have_many(:versions).class_name('Ci::Catalog::Resources::Version') }
+ it do
+ is_expected.to(
+ have_many(:versions).class_name('Ci::Catalog::Resources::Version').with_foreign_key(:catalog_resource_id))
+ end
+
+ it do
+ is_expected.to(
+ have_many(:sync_events).class_name('Ci::Catalog::Resources::SyncEvent').with_foreign_key(:catalog_resource_id))
+ end
it { is_expected.to delegate_method(:avatar_path).to(:project) }
it { is_expected.to delegate_method(:star_count).to(:project) }
- it { is_expected.to delegate_method(:forks_count).to(:project) }
it { is_expected.to define_enum_for(:state).with_values({ draft: 0, published: 1 }) }
describe '.for_projects' do
it 'returns catalog resources for the given project IDs' do
- resources_for_projects = described_class.for_projects(project.id)
+ resources_for_projects = described_class.for_projects(project_a.id)
- expect(resources_for_projects).to contain_exactly(resource)
+ expect(resources_for_projects).to contain_exactly(resource_a)
end
end
describe '.search' do
it 'returns catalog resources whose name or description match the search term' do
- resources = described_class.search('Z')
+ resources = described_class.search('B')
- expect(resources).to contain_exactly(resource_2, resource_3)
+ expect(resources).to contain_exactly(resource_b, resource_c)
end
end
@@ -54,7 +61,7 @@ RSpec.describe Ci::Catalog::Resource, feature_category: :pipeline_composition do
it 'returns catalog resources sorted by descending created at' do
ordered_resources = described_class.order_by_created_at_desc
- expect(ordered_resources.to_a).to eq([resource_3, resource_2, resource])
+ expect(ordered_resources.to_a).to eq([resource_c, resource_b, resource_a])
end
end
@@ -62,7 +69,7 @@ RSpec.describe Ci::Catalog::Resource, feature_category: :pipeline_composition do
it 'returns catalog resources sorted by ascending created at' do
ordered_resources = described_class.order_by_created_at_asc
- expect(ordered_resources.to_a).to eq([resource, resource_2, resource_3])
+ expect(ordered_resources.to_a).to eq([resource_a, resource_b, resource_c])
end
end
@@ -70,13 +77,13 @@ RSpec.describe Ci::Catalog::Resource, feature_category: :pipeline_composition do
subject(:ordered_resources) { described_class.order_by_name_desc }
it 'returns catalog resources sorted by descending name' do
- expect(ordered_resources.pluck(:name)).to eq(%w[Z L A])
+ expect(ordered_resources.pluck(:name)).to eq(%w[C B A])
end
it 'returns catalog resources sorted by descending name with nulls last' do
- resource.update!(name: nil)
+ resource_a.update!(name: nil)
- expect(ordered_resources.pluck(:name)).to eq(['Z', 'L', nil])
+ expect(ordered_resources.pluck(:name)).to eq(['C', 'B', nil])
end
end
@@ -84,13 +91,13 @@ RSpec.describe Ci::Catalog::Resource, feature_category: :pipeline_composition do
subject(:ordered_resources) { described_class.order_by_name_asc }
it 'returns catalog resources sorted by ascending name' do
- expect(ordered_resources.pluck(:name)).to eq(%w[A L Z])
+ expect(ordered_resources.pluck(:name)).to eq(%w[A B C])
end
it 'returns catalog resources sorted by ascending name with nulls last' do
- resource.update!(name: nil)
+ resource_a.update!(name: nil)
- expect(ordered_resources.pluck(:name)).to eq(['L', 'Z', nil])
+ expect(ordered_resources.pluck(:name)).to eq(['B', 'C', nil])
end
end
@@ -98,7 +105,7 @@ RSpec.describe Ci::Catalog::Resource, feature_category: :pipeline_composition do
it 'returns catalog resources sorted by latest_released_at descending with nulls last' do
ordered_resources = described_class.order_by_latest_released_at_desc
- expect(ordered_resources).to eq([resource, resource_2, resource_3])
+ expect(ordered_resources).to eq([resource_a, resource_b, resource_c])
end
end
@@ -106,96 +113,215 @@ RSpec.describe Ci::Catalog::Resource, feature_category: :pipeline_composition do
it 'returns catalog resources sorted by latest_released_at ascending with nulls last' do
ordered_resources = described_class.order_by_latest_released_at_asc
- expect(ordered_resources).to eq([resource_2, resource, resource_3])
+ expect(ordered_resources).to eq([resource_b, resource_a, resource_c])
+ end
+ end
+
+ describe 'authorized catalog resources' do
+ let_it_be(:namespace) { create(:group) }
+ let_it_be(:other_namespace) { create(:group) }
+ let_it_be(:other_user) { create(:user) }
+
+ let_it_be(:public_project) { create(:project, :public) }
+ let_it_be(:internal_project) { create(:project, :internal) }
+ let_it_be(:internal_namespace_project) { create(:project, :internal, namespace: namespace) }
+ let_it_be(:private_namespace_project) { create(:project, namespace: namespace) }
+ let_it_be(:other_private_namespace_project) { create(:project, namespace: other_namespace) }
+
+ let_it_be(:public_resource) { create(:ci_catalog_resource, project: public_project) }
+ let_it_be(:internal_resource) { create(:ci_catalog_resource, project: internal_project) }
+ let_it_be(:internal_namespace_resource) { create(:ci_catalog_resource, project: internal_namespace_project) }
+ let_it_be(:private_namespace_resource) { create(:ci_catalog_resource, project: private_namespace_project) }
+
+ let_it_be(:other_private_namespace_resource) do
+ create(:ci_catalog_resource, project: other_private_namespace_project)
+ end
+
+ before_all do
+ namespace.add_reporter(current_user)
+ other_namespace.add_guest(other_user)
+ end
+
+ describe '.public_or_visible_to_user' do
+ subject(:resources) { described_class.public_or_visible_to_user(current_user) }
+
+ it 'returns all resources visible to the user' do
+ expect(resources).to contain_exactly(
+ public_resource, internal_resource, internal_namespace_resource, private_namespace_resource)
+ end
+
+ context 'with a different user' do
+ let(:current_user) { other_user }
+
+ it 'returns all resources visible to the user' do
+ expect(resources).to contain_exactly(
+ public_resource, internal_resource, internal_namespace_resource, other_private_namespace_resource)
+ end
+ end
+
+ context 'when the user is nil' do
+ let(:current_user) { nil }
+
+ it 'returns only public resources' do
+ expect(resources).to contain_exactly(public_resource)
+ end
+ end
+ end
+
+ describe '.visible_to_user' do
+ subject(:resources) { described_class.visible_to_user(current_user) }
+
+ it "returns resources belonging to the user's authorized namespaces" do
+ expect(resources).to contain_exactly(internal_namespace_resource, private_namespace_resource)
+ end
+
+ context 'with a different user' do
+ let(:current_user) { other_user }
+
+ it "returns resources belonging to the user's authorized namespaces" do
+ expect(resources).to contain_exactly(other_private_namespace_resource)
+ end
+ end
+
+ context 'when the user is nil' do
+ let(:current_user) { nil }
+
+ it 'does not return any resources' do
+ expect(resources).to be_empty
+ end
+ end
end
end
describe '#state' do
it 'defaults to draft' do
- expect(resource.state).to eq('draft')
+ expect(resource_a.state).to eq('draft')
end
end
describe '#publish!' do
context 'when the catalog resource is in draft state' do
it 'updates the state of the catalog resource to published' do
- expect(resource.state).to eq('draft')
+ expect(resource_a.state).to eq('draft')
- resource.publish!
+ resource_a.publish!
- expect(resource.reload.state).to eq('published')
+ expect(resource_a.reload.state).to eq('published')
end
end
- context 'when a catalog resource already has a published state' do
+ context 'when the catalog resource already has a published state' do
it 'leaves the state as published' do
- resource.update!(state: 'published')
+ resource_a.update!(state: :published)
+ expect(resource_a.state).to eq('published')
- resource.publish!
+ resource_a.publish!
- expect(resource.state).to eq('published')
+ expect(resource_a.state).to eq('published')
end
end
end
- describe '#unpublish!' do
- context 'when the catalog resource is in published state' do
- it 'updates the state to draft' do
- resource.update!(state: :published)
- expect(resource.state).to eq('published')
+ describe 'synchronizing denormalized columns with `projects` table', :sidekiq_inline do
+ let_it_be_with_reload(:project) { create(:project, name: 'Test project', description: 'Test description') }
- resource.unpublish!
+ context 'when the catalog resource is created' do
+ let(:resource) { build(:ci_catalog_resource, project: project) }
+
+ it 'updates the catalog resource columns to match the project' do
+ resource.save!
+ resource.reload
- expect(resource.reload.state).to eq('draft')
+ expect(resource.name).to eq(project.name)
+ expect(resource.description).to eq(project.description)
+ expect(resource.visibility_level).to eq(project.visibility_level)
end
end
- context 'when the catalog resource is already in draft state' do
- it 'leaves the state as draft' do
- expect(resource.state).to eq('draft')
+ context 'when the project is updated' do
+ let_it_be(:resource) { create(:ci_catalog_resource, project: project) }
+
+ context 'when project name is updated' do
+ it 'updates the catalog resource name to match' do
+ project.update!(name: 'New name')
+
+ expect(resource.reload.name).to eq(project.name)
+ end
+ end
+
+ context 'when project description is updated' do
+ it 'updates the catalog resource description to match' do
+ project.update!(description: 'New description')
+
+ expect(resource.reload.description).to eq(project.description)
+ end
+ end
- resource.unpublish!
+ context 'when project visibility_level is updated' do
+ it 'updates the catalog resource visibility_level to match' do
+ project.update!(visibility_level: Gitlab::VisibilityLevel::INTERNAL)
- expect(resource.reload.state).to eq('draft')
+ expect(resource.reload.visibility_level).to eq(project.visibility_level)
+ end
end
end
end
- describe 'sync with project' do
- shared_examples 'denormalized columns of the catalog resource match the project' do
- it do
- expect(resource.name).to eq(project.name)
- expect(resource.description).to eq(project.description)
- expect(resource.visibility_level).to eq(project.visibility_level)
- end
+ describe '#update_latest_released_at! triggered in model callbacks' do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:resource) { create(:ci_catalog_resource, project: project) }
+
+ let_it_be_with_refind(:january_release) do
+ create(:release, :with_catalog_resource_version, project: project, tag: 'v1', released_at: '2023-01-01T00:00:00Z')
end
- context 'when the catalog resource is created' do
- it_behaves_like 'denormalized columns of the catalog resource match the project'
+ let_it_be_with_refind(:february_release) do
+ create(:release, :with_catalog_resource_version, project: project, tag: 'v2', released_at: '2023-02-01T00:00:00Z')
end
- context 'when the project name is updated' do
- before do
- project.update!(name: 'My new project name')
- end
+ it 'has the expected latest_released_at value' do
+ expect(resource.reload.latest_released_at).to eq(february_release.released_at)
+ end
+
+ context 'when a new catalog resource version is created' do
+ it 'updates the latest_released_at value' do
+ march_release = create(:release, :with_catalog_resource_version, project: project, tag: 'v3',
+ released_at: '2023-03-01T00:00:00Z')
- it_behaves_like 'denormalized columns of the catalog resource match the project'
+ expect(resource.reload.latest_released_at).to eq(march_release.released_at)
+ end
end
- context 'when the project description is updated' do
- before do
- project.update!(description: 'My new description')
+ context 'when a catalog resource version is destroyed' do
+ it 'updates the latest_released_at value' do
+ february_release.catalog_resource_version.destroy!
+
+ expect(resource.reload.latest_released_at).to eq(january_release.released_at)
end
+ end
+
+ context 'when the released_at value of a release is updated' do
+ it 'updates the latest_released_at value' do
+ january_release.update!(released_at: '2024-01-01T00:00:00Z')
- it_behaves_like 'denormalized columns of the catalog resource match the project'
+ expect(resource.reload.latest_released_at).to eq(january_release.released_at)
+ end
end
- context 'when the project visibility_level is updated' do
- before do
- project.update!(visibility_level: 10)
+ context 'when a release is destroyed' do
+ it 'updates the latest_released_at value' do
+ february_release.destroy!
+ expect(resource.reload.latest_released_at).to eq(january_release.released_at)
end
+ end
- it_behaves_like 'denormalized columns of the catalog resource match the project'
+ context 'when all releases associated with the catalog resource are destroyed' do
+ it 'updates the latest_released_at value to nil' do
+ january_release.destroy!
+ february_release.destroy!
+
+ expect(resource.reload.latest_released_at).to be_nil
+ end
end
end
end
diff --git a/spec/models/ci/catalog/resources/sync_event_spec.rb b/spec/models/ci/catalog/resources/sync_event_spec.rb
new file mode 100644
index 00000000000..5d907aae9b6
--- /dev/null
+++ b/spec/models/ci/catalog/resources/sync_event_spec.rb
@@ -0,0 +1,190 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::Catalog::Resources::SyncEvent, type: :model, feature_category: :pipeline_composition do
+ let_it_be_with_reload(:project1) { create(:project) }
+ let_it_be_with_reload(:project2) { create(:project) }
+ let_it_be(:resource1) { create(:ci_catalog_resource, project: project1) }
+
+ it { is_expected.to belong_to(:catalog_resource).class_name('Ci::Catalog::Resource') }
+ it { is_expected.to belong_to(:project) }
+
+ describe 'PG triggers' do
+ context 'when the associated project of a catalog resource is updated' do
+ context 'when project name is updated' do
+ it 'creates a sync event record' do
+ expect do
+ project1.update!(name: 'New name')
+ end.to change { described_class.count }.by(1)
+ end
+ end
+
+ context 'when project description is updated' do
+ it 'creates a sync event record' do
+ expect do
+ project1.update!(description: 'New description')
+ end.to change { described_class.count }.by(1)
+ end
+ end
+
+ context 'when project visibility_level is updated' do
+ it 'creates a sync event record' do
+ expect do
+ project1.update!(visibility_level: Gitlab::VisibilityLevel::INTERNAL)
+ end.to change { described_class.count }.by(1)
+ end
+ end
+ end
+
+ context 'when a project without an associated catalog resource is updated' do
+ it 'does not create a sync event record' do
+ expect do
+ project2.update!(name: 'New name')
+ end.not_to change { described_class.count }
+ end
+ end
+ end
+
+ describe 'when there are sync event records' do
+ let_it_be(:resource2) { create(:ci_catalog_resource, project: project2) }
+
+ before_all do
+ create(:ci_catalog_resource_sync_event, catalog_resource: resource1, status: :processed)
+ create(:ci_catalog_resource_sync_event, catalog_resource: resource1)
+ create_list(:ci_catalog_resource_sync_event, 2, catalog_resource: resource2)
+ end
+
+ describe '.unprocessed_events' do
+ it 'returns the events in pending status' do
+ # 1 pending event from resource1 + 2 pending events from resource2
+ expect(described_class.unprocessed_events.size).to eq(3)
+ end
+
+ it 'selects the partition attribute in the result' do
+ described_class.unprocessed_events.each do |event|
+ expect(event.partition).not_to be_nil
+ end
+ end
+ end
+
+ describe '.mark_records_processed' do
+ it 'updates the records to processed status' do
+ expect(described_class.status_pending.count).to eq(3)
+ expect(described_class.status_processed.count).to eq(1)
+
+ described_class.mark_records_processed(described_class.unprocessed_events)
+
+ expect(described_class.pluck(:status).uniq).to eq(['processed'])
+
+ expect(described_class.status_pending.count).to eq(0)
+ expect(described_class.status_processed.count).to eq(4)
+ end
+ end
+ end
+
+ describe '.upper_bound_count' do
+ it 'returns 0 when there are no records in the table' do
+ expect(described_class.upper_bound_count).to eq(0)
+ end
+
+ it 'returns an estimated number of unprocessed records' do
+ create_list(:ci_catalog_resource_sync_event, 5, catalog_resource: resource1)
+ described_class.order(:id).limit(2).update_all(status: :processed)
+
+ expect(described_class.upper_bound_count).to eq(3)
+ end
+ end
+
+ describe 'sliding_list partitioning' do
+ let(:partition_manager) { Gitlab::Database::Partitioning::PartitionManager.new(described_class) }
+
+ describe 'next_partition_if callback' do
+ let(:active_partition) { described_class.partitioning_strategy.active_partition }
+
+ subject(:value) { described_class.partitioning_strategy.next_partition_if.call(active_partition) }
+
+ context 'when the partition is empty' do
+ it { is_expected.to eq(false) }
+ end
+
+ context 'when the partition has records' do
+ before do
+ create(:ci_catalog_resource_sync_event, catalog_resource: resource1, status: :processed)
+ create(:ci_catalog_resource_sync_event, catalog_resource: resource1)
+ end
+
+ it { is_expected.to eq(false) }
+ end
+
+ context 'when the first record of the partition is older than PARTITION_DURATION' do
+ before do
+ create(:ci_catalog_resource_sync_event, catalog_resource: resource1)
+ described_class.first.update!(created_at: (described_class::PARTITION_DURATION + 1.day).ago)
+ end
+
+ it { is_expected.to eq(true) }
+ end
+ end
+
+ describe 'detach_partition_if callback' do
+ let(:active_partition) { described_class.partitioning_strategy.active_partition }
+
+ subject(:value) { described_class.partitioning_strategy.detach_partition_if.call(active_partition) }
+
+ before_all do
+ create(:ci_catalog_resource_sync_event, catalog_resource: resource1, status: :processed)
+ create(:ci_catalog_resource_sync_event, catalog_resource: resource1)
+ end
+
+ context 'when the partition contains unprocessed records' do
+ it { is_expected.to eq(false) }
+ end
+
+ context 'when the partition contains only processed records' do
+ before do
+ described_class.update_all(status: :processed)
+ end
+
+ it { is_expected.to eq(true) }
+ end
+ end
+
+ describe 'strategy behavior' do
+ it 'moves records to new partitions as time passes', :freeze_time do
+ # We start with partition 1
+ expect(described_class.partitioning_strategy.current_partitions.map(&:value)).to eq([1])
+
+ # Add one record so the initial partition is not empty
+ create(:ci_catalog_resource_sync_event, catalog_resource: resource1)
+
+ # It's not a day old yet so no new partitions are created
+ partition_manager.sync_partitions
+
+ expect(described_class.partitioning_strategy.current_partitions.map(&:value)).to eq([1])
+
+ # After traveling forward a day
+ travel(described_class::PARTITION_DURATION + 1.second)
+
+ # a new partition is created
+ partition_manager.sync_partitions
+
+ expect(described_class.partitioning_strategy.current_partitions.map(&:value)).to contain_exactly(1, 2)
+
+ # and we can insert to the new partition
+ create(:ci_catalog_resource_sync_event, catalog_resource: resource1)
+
+ # After processing records in partition 1
+ described_class.mark_records_processed(described_class.for_partition(1).select_with_partition)
+
+ partition_manager.sync_partitions
+
+ # partition 1 is removed
+ expect(described_class.partitioning_strategy.current_partitions.map(&:value)).to eq([2])
+
+ # and we only have the newly created partition left.
+ expect(described_class.count).to eq(1)
+ end
+ end
+ end
+end
diff --git a/spec/models/ci/catalog/resources/version_spec.rb b/spec/models/ci/catalog/resources/version_spec.rb
index 7114d2b6709..aafd51699b5 100644
--- a/spec/models/ci/catalog/resources/version_spec.rb
+++ b/spec/models/ci/catalog/resources/version_spec.rb
@@ -10,9 +10,6 @@ RSpec.describe Ci::Catalog::Resources::Version, type: :model, feature_category:
it { is_expected.to belong_to(:project) }
it { is_expected.to have_many(:components).class_name('Ci::Catalog::Resources::Component') }
- it { is_expected.to delegate_method(:name).to(:release) }
- it { is_expected.to delegate_method(:description).to(:release) }
- it { is_expected.to delegate_method(:tag).to(:release) }
it { is_expected.to delegate_method(:sha).to(:release) }
it { is_expected.to delegate_method(:released_at).to(:release) }
it { is_expected.to delegate_method(:author_id).to(:release) }
@@ -104,4 +101,51 @@ RSpec.describe Ci::Catalog::Resources::Version, type: :model, feature_category:
end
end
end
+
+ describe '#update_catalog_resource' do
+ let_it_be(:release) { create(:release, project: project1, tag: 'v1') }
+ let(:version) { build(:ci_catalog_resource_version, catalog_resource: resource1, release: release) }
+
+ context 'when a version is created' do
+ it 'calls update_catalog_resource' do
+ expect(version).to receive(:update_catalog_resource).once
+
+ version.save!
+ end
+ end
+
+ context 'when a version is destroyed' do
+ it 'calls update_catalog_resource' do
+ version.save!
+
+ expect(version).to receive(:update_catalog_resource).once
+
+ version.destroy!
+ end
+ end
+ end
+
+ describe '#name' do
+ it 'is equivalent to release.tag' do
+ release_v1_0.update!(name: 'Release v1.0')
+
+ expect(v1_0.name).to eq(release_v1_0.tag)
+ end
+ end
+
+ describe '#commit' do
+ subject(:commit) { v1_0.commit }
+
+ it 'returns a commit' do
+ is_expected.to be_a(Commit)
+ end
+
+ context 'when the sha is nil' do
+ it 'returns nil' do
+ release_v1_0.update!(sha: nil)
+
+ is_expected.to be_nil
+ end
+ end
+ end
end
diff --git a/spec/models/ci/job_artifact_spec.rb b/spec/models/ci/job_artifact_spec.rb
index 48d46824c11..e65c1e2f577 100644
--- a/spec/models/ci/job_artifact_spec.rb
+++ b/spec/models/ci/job_artifact_spec.rb
@@ -176,16 +176,6 @@ RSpec.describe Ci::JobArtifact, feature_category: :build_artifacts do
let!(:artifact) { build(:ci_job_artifact, :private) }
it { is_expected.to be_falsey }
-
- context 'and the non_public_artifacts feature flag is disabled' do
- let!(:artifact) { build(:ci_job_artifact, :private) }
-
- before do
- stub_feature_flags(non_public_artifacts: false)
- end
-
- it { is_expected.to be_truthy }
- end
end
end
diff --git a/spec/models/ci/job_token/scope_spec.rb b/spec/models/ci/job_token/scope_spec.rb
index d41286f5a45..adb9f461f63 100644
--- a/spec/models/ci/job_token/scope_spec.rb
+++ b/spec/models/ci/job_token/scope_spec.rb
@@ -160,18 +160,6 @@ RSpec.describe Ci::JobToken::Scope, feature_category: :continuous_integration, f
with_them do
it { is_expected.to eq(result) }
end
-
- context "with FF restrict_ci_job_token_for_public_and_internal_projects disabled" do
- before do
- stub_feature_flags(restrict_ci_job_token_for_public_and_internal_projects: false)
- end
-
- let(:accessed_project) { unscoped_public_project }
-
- it "restricts public and internal outbound projects not in allowlist" do
- is_expected.to eq(false)
- end
- end
end
end
end
diff --git a/spec/models/ci/pipeline_metadata_spec.rb b/spec/models/ci/pipeline_metadata_spec.rb
index 977c90bcc2a..1a426118063 100644
--- a/spec/models/ci/pipeline_metadata_spec.rb
+++ b/spec/models/ci/pipeline_metadata_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Ci::PipelineMetadata do
+RSpec.describe Ci::PipelineMetadata, feature_category: :pipeline_composition do
it { is_expected.to belong_to(:project) }
it { is_expected.to belong_to(:pipeline) }
@@ -10,5 +10,21 @@ RSpec.describe Ci::PipelineMetadata do
it { is_expected.to validate_length_of(:name).is_at_least(1).is_at_most(255) }
it { is_expected.to validate_presence_of(:project) }
it { is_expected.to validate_presence_of(:pipeline) }
+
+ it do
+ is_expected.to define_enum_for(
+ :auto_cancel_on_new_commit
+ ).with_values(
+ conservative: 0, interruptible: 1, disabled: 2
+ ).with_prefix
+ end
+
+ it do
+ is_expected.to define_enum_for(
+ :auto_cancel_on_job_failure
+ ).with_values(
+ none: 0, all: 1
+ ).with_prefix
+ end
end
end
diff --git a/spec/models/ci/pipeline_spec.rb b/spec/models/ci/pipeline_spec.rb
index 9696ba7b3ee..024d3ae4240 100644
--- a/spec/models/ci/pipeline_spec.rb
+++ b/spec/models/ci/pipeline_spec.rb
@@ -86,6 +86,7 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep, feature_category:
it { is_expected.to respond_to :short_sha }
it { is_expected.to delegate_method(:full_path).to(:project).with_prefix }
it { is_expected.to delegate_method(:name).to(:pipeline_metadata).allow_nil }
+ it { is_expected.to delegate_method(:auto_cancel_on_job_failure).to(:pipeline_metadata).allow_nil }
describe 'validations' do
it { is_expected.to validate_presence_of(:sha) }
@@ -163,7 +164,6 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep, feature_category:
before do
stub_const('Ci::Refs::UnlockPreviousPipelinesWorker', unlock_previous_pipelines_worker_spy)
- stub_feature_flags(ci_stop_unlock_pipelines: false)
end
shared_examples 'not unlocking pipelines' do |event:|
@@ -202,42 +202,6 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep, feature_category:
it_behaves_like 'unlocking pipelines', event: :skip
it_behaves_like 'unlocking pipelines', event: :cancel
it_behaves_like 'unlocking pipelines', event: :block
-
- context 'and ci_stop_unlock_pipelines is enabled' do
- before do
- stub_feature_flags(ci_stop_unlock_pipelines: true)
- end
-
- it_behaves_like 'not unlocking pipelines', event: :succeed
- it_behaves_like 'not unlocking pipelines', event: :drop
- it_behaves_like 'not unlocking pipelines', event: :skip
- it_behaves_like 'not unlocking pipelines', event: :cancel
- it_behaves_like 'not unlocking pipelines', event: :block
- end
-
- context 'and ci_unlock_non_successful_pipelines is disabled' do
- before do
- stub_feature_flags(ci_unlock_non_successful_pipelines: false)
- end
-
- it_behaves_like 'unlocking pipelines', event: :succeed
- it_behaves_like 'not unlocking pipelines', event: :drop
- it_behaves_like 'not unlocking pipelines', event: :skip
- it_behaves_like 'not unlocking pipelines', event: :cancel
- it_behaves_like 'not unlocking pipelines', event: :block
-
- context 'and ci_stop_unlock_pipelines is enabled' do
- before do
- stub_feature_flags(ci_stop_unlock_pipelines: true)
- end
-
- it_behaves_like 'not unlocking pipelines', event: :succeed
- it_behaves_like 'not unlocking pipelines', event: :drop
- it_behaves_like 'not unlocking pipelines', event: :skip
- it_behaves_like 'not unlocking pipelines', event: :cancel
- it_behaves_like 'not unlocking pipelines', event: :block
- end
- end
end
context 'when transitioning to a non-unlockable state' do
@@ -246,14 +210,6 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep, feature_category:
end
it_behaves_like 'not unlocking pipelines', event: :run
-
- context 'and ci_unlock_non_successful_pipelines is disabled' do
- before do
- stub_feature_flags(ci_unlock_non_successful_pipelines: false)
- end
-
- it_behaves_like 'not unlocking pipelines', event: :run
- end
end
end
@@ -2028,17 +1984,7 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep, feature_category:
end
end
- context 'when only_allow_merge_if_pipeline_succeeds? returns false and widget_pipeline_pass_subscription_update disabled' do
- let(:only_allow_merge_if_pipeline_succeeds?) { false }
-
- before do
- stub_feature_flags(widget_pipeline_pass_subscription_update: false)
- end
-
- it_behaves_like 'state transition not triggering GraphQL subscription mergeRequestMergeStatusUpdated'
- end
-
- context 'when only_allow_merge_if_pipeline_succeeds? returns false and widget_pipeline_pass_subscription_update enabled' do
+ context 'when only_allow_merge_if_pipeline_succeeds? returns false' do
let(:only_allow_merge_if_pipeline_succeeds?) { false }
it_behaves_like 'triggers GraphQL subscription mergeRequestMergeStatusUpdated' do
@@ -3848,6 +3794,44 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep, feature_category:
end
end
+ describe '#set_failed' do
+ let(:pipeline) { build(:ci_pipeline) }
+
+ it 'marks the pipeline as failed with the given reason without saving', :aggregate_failures do
+ pipeline.set_failed(:filtered_by_rules)
+
+ expect(pipeline).to be_failed
+ expect(pipeline).to be_filtered_by_rules
+ expect(pipeline).not_to be_persisted
+ end
+ end
+
+ describe '#filtered_as_empty?' do
+ let(:pipeline) { build_stubbed(:ci_pipeline) }
+
+ subject { pipeline.filtered_as_empty? }
+
+ it { is_expected.to eq false }
+
+ context 'when the pipeline is failed' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:drop_reason, :expected) do
+ :unknown_failure | false
+ :filtered_by_rules | true
+ :filtered_by_workflow_rules | true
+ end
+
+ with_them do
+ before do
+ pipeline.set_failed(drop_reason)
+ end
+
+ it { is_expected.to eq expected }
+ end
+ end
+ end
+
describe '#has_yaml_errors?' do
let(:pipeline) { build_stubbed(:ci_pipeline) }
@@ -4065,8 +4049,8 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep, feature_category:
describe '#builds_in_self_and_project_descendants' do
subject(:builds) { pipeline.builds_in_self_and_project_descendants }
- let(:pipeline) { create(:ci_pipeline) }
- let!(:build) { create(:ci_build, pipeline: pipeline) }
+ let_it_be_with_refind(:pipeline) { create(:ci_pipeline) }
+ let_it_be(:build) { create(:ci_build, pipeline: pipeline) }
context 'when pipeline is standalone' do
it 'returns the list of builds' do
@@ -4093,6 +4077,10 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep, feature_category:
expect(builds).to contain_exactly(build, child_build, child_of_child_build)
end
end
+
+ it 'includes partition_id filter' do
+ expect(builds.where_values_hash).to match(a_hash_including('partition_id' => pipeline.partition_id))
+ end
end
describe '#build_with_artifacts_in_self_and_project_descendants' do
@@ -4118,7 +4106,7 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep, feature_category:
describe '#jobs_in_self_and_project_descendants' do
subject(:jobs) { pipeline.jobs_in_self_and_project_descendants }
- let(:pipeline) { create(:ci_pipeline) }
+ let_it_be_with_refind(:pipeline) { create(:ci_pipeline) }
shared_examples_for 'fetches jobs in self and project descendant pipelines' do |factory_type|
let!(:job) { create(factory_type, pipeline: pipeline) }
@@ -4151,6 +4139,10 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep, feature_category:
expect(jobs).to contain_exactly(job, child_job, child_of_child_job, child_source_bridge, child_of_child_source_bridge)
end
end
+
+ it 'includes partition_id filter' do
+ expect(jobs.where_values_hash).to match(a_hash_including('partition_id' => pipeline.partition_id))
+ end
end
context 'when job is build' do
@@ -5651,6 +5643,22 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep, feature_category:
end
end
+ describe '.current_partition_value' do
+ subject { described_class.current_partition_value }
+
+ it { is_expected.to eq(101) }
+
+ it 'accepts an optional argument' do
+ expect(described_class.current_partition_value(build_stubbed(:project))).to eq(101)
+ end
+
+ it 'returns 100 when the flag is disabled' do
+ stub_feature_flags(ci_current_partition_value_101: false)
+
+ is_expected.to eq(100)
+ end
+ end
+
describe '#notes=' do
context 'when notes already exist' do
it 'does not create duplicate notes', :aggregate_failures do
diff --git a/spec/models/ci/processable_spec.rb b/spec/models/ci/processable_spec.rb
index 8c0143d5f18..5d457c4f213 100644
--- a/spec/models/ci/processable_spec.rb
+++ b/spec/models/ci/processable_spec.rb
@@ -58,7 +58,7 @@ RSpec.describe Ci::Processable, feature_category: :continuous_integration do
let(:clone_accessors) do
%i[pipeline project ref tag options name allow_failure stage stage_idx trigger_request yaml_variables
when environment coverage_regex description tag_list protected needs_attributes job_variables_attributes
- resource_group scheduling_type ci_stage partition_id id_tokens]
+ resource_group scheduling_type ci_stage partition_id id_tokens interruptible]
end
let(:reject_accessors) do
@@ -76,7 +76,7 @@ RSpec.describe Ci::Processable, feature_category: :continuous_integration do
job_artifacts_network_referee job_artifacts_dotenv
job_artifacts_cobertura needs job_artifacts_accessibility
job_artifacts_requirements job_artifacts_coverage_fuzzing
- job_artifacts_requirements_v2
+ job_artifacts_requirements_v2 job_artifacts_repository_xray
job_artifacts_api_fuzzing terraform_state_versions job_artifacts_cyclonedx
job_annotations job_artifacts_annotations].freeze
end
@@ -114,7 +114,8 @@ RSpec.describe Ci::Processable, feature_category: :continuous_integration do
shared_examples_for 'clones the processable' do
before_all do
- processable.update!(stage: 'test', stage_id: stage.id)
+ processable.assign_attributes(stage: 'test', stage_id: stage.id, interruptible: true)
+ processable.save!
create(:ci_build_need, build: processable)
end
@@ -187,7 +188,7 @@ RSpec.describe Ci::Processable, feature_category: :continuous_integration do
Ci::Build.attribute_names.map(&:to_sym) +
Ci::Build.attribute_aliases.keys.map(&:to_sym) +
Ci::Build.reflect_on_all_associations.map(&:name) +
- [:tag_list, :needs_attributes, :job_variables_attributes, :id_tokens]
+ [:tag_list, :needs_attributes, :job_variables_attributes, :id_tokens, :interruptible]
current_accessors.uniq!
diff --git a/spec/models/ci/runner_manager_build_spec.rb b/spec/models/ci/runner_manager_build_spec.rb
index 3a381313b76..a4dd3a2c748 100644
--- a/spec/models/ci/runner_manager_build_spec.rb
+++ b/spec/models/ci/runner_manager_build_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Ci::RunnerManagerBuild, model: true, feature_category: :runner_fleet do
+RSpec.describe Ci::RunnerManagerBuild, model: true, feature_category: :fleet_visibility do
let_it_be(:runner) { create(:ci_runner) }
let_it_be(:runner_manager) { create(:ci_runner_machine, runner: runner) }
let_it_be(:build) { create(:ci_build, runner_manager: runner_manager) }
diff --git a/spec/models/ci/runner_manager_spec.rb b/spec/models/ci/runner_manager_spec.rb
index 01275ffd31c..02a72afe0c6 100644
--- a/spec/models/ci/runner_manager_spec.rb
+++ b/spec/models/ci/runner_manager_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Ci::RunnerManager, feature_category: :runner_fleet, type: :model do
+RSpec.describe Ci::RunnerManager, feature_category: :fleet_visibility, type: :model do
it_behaves_like 'having unique enum values'
it_behaves_like 'it has loose foreign keys' do
diff --git a/spec/models/ci/runner_version_spec.rb b/spec/models/ci/runner_version_spec.rb
index bce1f2a6c39..32f840a8034 100644
--- a/spec/models/ci/runner_version_spec.rb
+++ b/spec/models/ci/runner_version_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Ci::RunnerVersion, feature_category: :runner_fleet do
+RSpec.describe Ci::RunnerVersion, feature_category: :fleet_visibility do
let_it_be(:runner_version_upgrade_recommended) do
create(:ci_runner_version, version: 'abc234', status: :recommended)
end
diff --git a/spec/models/concerns/ci/partitionable_spec.rb b/spec/models/concerns/ci/partitionable_spec.rb
index 6daafc78cff..735b81f54bc 100644
--- a/spec/models/concerns/ci/partitionable_spec.rb
+++ b/spec/models/concerns/ci/partitionable_spec.rb
@@ -5,6 +5,12 @@ require 'spec_helper'
RSpec.describe Ci::Partitionable do
let(:ci_model) { Class.new(Ci::ApplicationRecord) }
+ around do |ex|
+ Gitlab::Database::SharedModel.using_connection(ci_model.connection) do
+ ex.run
+ end
+ end
+
describe 'partitionable models inclusion' do
subject { ci_model.include(described_class) }
@@ -61,10 +67,58 @@ RSpec.describe Ci::Partitionable do
context 'when partitioned is true' do
let(:partitioned) { true }
+ let(:partitioning_strategy) { ci_model.partitioning_strategy }
it { expect(ci_model.ancestors).to include(PartitionedTable) }
- it { expect(ci_model.partitioning_strategy).to be_a(Gitlab::Database::Partitioning::CiSlidingListStrategy) }
- it { expect(ci_model.partitioning_strategy.partitioning_key).to eq(:partition_id) }
+ it { expect(partitioning_strategy).to be_a(Gitlab::Database::Partitioning::CiSlidingListStrategy) }
+ it { expect(partitioning_strategy.partitioning_key).to eq(:partition_id) }
+
+ describe 'next_partition_if callback' do
+ let(:active_partition) { partitioning_strategy.active_partition }
+
+ let(:table_options) do
+ {
+ primary_key: [:id, :partition_id],
+ options: 'PARTITION BY LIST (partition_id)',
+ if_not_exists: false
+ }
+ end
+
+ before do
+ ci_model.connection.create_table(:_test_table_name, **table_options) do |t|
+ t.bigserial :id, null: false
+ t.bigint :partition_id, null: false
+ end
+
+ ci_model.table_name = :_test_table_name
+ end
+
+ subject(:value) { partitioning_strategy.next_partition_if.call(active_partition) }
+
+ context 'without any existing partitions' do
+ it { is_expected.to eq(true) }
+ end
+
+ context 'with initial partition attached' do
+ before do
+ ci_model.connection.execute(<<~SQL)
+ CREATE TABLE IF NOT EXISTS _test_table_name_100 PARTITION OF _test_table_name FOR VALUES IN (100);
+ SQL
+ end
+
+ it { is_expected.to eq(true) }
+ end
+
+ context 'with an existing partition for partition_id = 101' do
+ before do
+ ci_model.connection.execute(<<~SQL)
+ CREATE TABLE IF NOT EXISTS _test_table_name_101 PARTITION OF _test_table_name FOR VALUES IN (101);
+ SQL
+ end
+
+ it { is_expected.to eq(false) }
+ end
+ end
end
context 'when partitioned is false' do
@@ -74,4 +128,30 @@ RSpec.describe Ci::Partitionable do
it { expect(ci_model).not_to respond_to(:partitioning_strategy) }
end
end
+
+ describe '.in_partition' do
+ before do
+ stub_const("#{described_class}::Testing::PARTITIONABLE_MODELS", [ci_model.name])
+ ci_model.table_name = :p_ci_builds
+ ci_model.include(described_class)
+ end
+
+ subject(:scope_values) { ci_model.in_partition(value).where_values_hash }
+
+ context 'with integer parameters' do
+ let(:value) { 101 }
+
+ it 'adds a partition_id filter' do
+ expect(scope_values).to include('partition_id' => 101)
+ end
+ end
+
+ context 'with partitionable records' do
+ let(:value) { build_stubbed(:ci_pipeline, partition_id: 101) }
+
+ it 'adds a partition_id filter' do
+ expect(scope_values).to include('partition_id' => 101)
+ end
+ end
+ end
end
diff --git a/spec/models/concerns/disables_sti_spec.rb b/spec/models/concerns/disables_sti_spec.rb
new file mode 100644
index 00000000000..07eea635289
--- /dev/null
+++ b/spec/models/concerns/disables_sti_spec.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe DisablesSti, feature_category: :shared do
+ describe '.allow_legacy_sti_class' do
+ it 'is nil by default' do
+ expect(ApplicationRecord.allow_legacy_sti_class).to eq(nil)
+ end
+
+ it 'is true on legacy models' do
+ expect(PersonalSnippet.allow_legacy_sti_class).to eq(true)
+ end
+ end
+end
diff --git a/spec/models/concerns/enums/sbom_spec.rb b/spec/models/concerns/enums/sbom_spec.rb
index e2f56cc637d..3bbdf619a8c 100644
--- a/spec/models/concerns/enums/sbom_spec.rb
+++ b/spec/models/concerns/enums/sbom_spec.rb
@@ -3,9 +3,9 @@
require "spec_helper"
RSpec.describe Enums::Sbom, feature_category: :dependency_management do
- describe '.purl_types' do
- using RSpec::Parameterized::TableSyntax
+ using RSpec::Parameterized::TableSyntax
+ describe '.purl_types' do
subject(:actual_purl_type) { described_class.purl_types[package_manager] }
where(:given_package_manager, :expected_purl_type) do
@@ -35,5 +35,63 @@ RSpec.describe Enums::Sbom, feature_category: :dependency_management do
expect(actual_purl_type).to eql(expected_purl_type)
end
end
+
+ it 'contains all of the dependency scanning and container scanning purl types' do
+ expect(described_class::DEPENDENCY_SCANNING_PURL_TYPES + described_class::CONTAINER_SCANNING_PURL_TYPES)
+ .to eql(described_class::PURL_TYPES.keys)
+ end
+ end
+
+ describe '.dependency_scanning_purl_type?' do
+ where(:purl_type, :expected) do
+ :composer | false
+ 'composer' | true
+ 'conan' | true
+ 'gem' | true
+ 'golang' | true
+ 'maven' | true
+ 'npm' | true
+ 'nuget' | true
+ 'pypi' | true
+ 'unknown' | false
+ 'apk' | false
+ 'rpm' | false
+ 'deb' | false
+ 'wolfi' | false
+ end
+
+ with_them do
+ it 'returns true if the purl_type is for dependency_scanning' do
+ actual = described_class.dependency_scanning_purl_type?(purl_type)
+ expect(actual).to eql(expected)
+ end
+ end
+ end
+
+ describe '.container_scanning_purl_type?' do
+ where(:purl_type, :expected) do
+ 'composer' | false
+ 'conan' | false
+ 'gem' | false
+ 'golang' | false
+ 'maven' | false
+ 'npm' | false
+ 'nuget' | false
+ 'pypi' | false
+ 'unknown' | false
+ :apk | false
+ 'apk' | true
+ 'rpm' | true
+ 'deb' | true
+ 'cbl-mariner' | true
+ 'wolfi' | true
+ end
+
+ with_them do
+ it 'returns true if the purl_type is for container_scanning' do
+ actual = described_class.container_scanning_purl_type?(purl_type)
+ expect(actual).to eql(expected)
+ end
+ end
end
end
diff --git a/spec/models/concerns/ignorable_columns_spec.rb b/spec/models/concerns/ignorable_columns_spec.rb
index 339f06f9c45..44dc0bb6da6 100644
--- a/spec/models/concerns/ignorable_columns_spec.rb
+++ b/spec/models/concerns/ignorable_columns_spec.rb
@@ -27,6 +27,12 @@ RSpec.describe IgnorableColumns do
expect { subject.ignore_columns(:name, remove_after: nil, remove_with: 12.6) }.to raise_error(ArgumentError, /Please indicate/)
end
+ it 'allows setting remove_never: true and not setting other remove options' do
+ expect do
+ subject.ignore_columns(%i[name created_at], remove_never: true)
+ end.to change { subject.ignored_columns }.from([]).to(%w[name created_at])
+ end
+
it 'requires remove_after attribute to be set' do
expect { subject.ignore_columns(:name, remove_after: "not a date", remove_with: 12.6) }.to raise_error(ArgumentError, /Please indicate/)
end
@@ -73,9 +79,11 @@ RSpec.describe IgnorableColumns do
end
describe IgnorableColumns::ColumnIgnore do
- subject { described_class.new(remove_after, remove_with) }
+ subject { described_class.new(remove_after, remove_with, remove_never) }
+ let(:remove_after) { nil }
let(:remove_with) { double }
+ let(:remove_never) { false }
describe '#safe_to_remove?' do
context 'after remove_after date has passed' do
@@ -93,6 +101,14 @@ RSpec.describe IgnorableColumns do
expect(subject.safe_to_remove?).to be_falsey
end
end
+
+ context 'with remove_never: true' do
+ let(:remove_never) { true }
+
+ it 'is false' do
+ expect(subject.safe_to_remove?).to be_falsey
+ end
+ end
end
end
end
diff --git a/spec/models/concerns/pg_full_text_searchable_spec.rb b/spec/models/concerns/pg_full_text_searchable_spec.rb
index 7da48489e12..f3289408643 100644
--- a/spec/models/concerns/pg_full_text_searchable_spec.rb
+++ b/spec/models/concerns/pg_full_text_searchable_spec.rb
@@ -87,6 +87,12 @@ RSpec.describe PgFullTextSearchable, feature_category: :global_search do
[english, with_accent, japanese].each(&:update_search_data!)
end
+ it 'builds a search query using `search_vector` from the search_data table' do
+ sql = model_class.pg_full_text_search('test').to_sql
+
+ expect(sql).to include('"issue_search_data"."search_vector" @@ to_tsquery')
+ end
+
it 'searches across all fields' do
expect(model_class.pg_full_text_search('title english')).to contain_exactly(english, japanese)
end
@@ -158,6 +164,14 @@ RSpec.describe PgFullTextSearchable, feature_category: :global_search do
end
end
+ describe '.pg_full_text_search_in_model' do
+ it 'builds a search query using `search_vector` from the model table' do
+ sql = model_class.pg_full_text_search_in_model('test').to_sql
+
+ expect(sql).to include('"issues"."search_vector" @@ to_tsquery')
+ end
+ end
+
describe '#update_search_data!' do
let(:model) { model_class.create!(project: project, namespace: project.project_namespace, title: 'title', description: 'description') }
diff --git a/spec/models/concerns/routable_spec.rb b/spec/models/concerns/routable_spec.rb
index 7e324812b97..e71392f7bbc 100644
--- a/spec/models/concerns/routable_spec.rb
+++ b/spec/models/concerns/routable_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.shared_examples 'routable resource' do
- shared_examples_for '.find_by_full_path' do |has_cross_join: false|
+ shared_examples_for '.find_by_full_path' do
it 'finds records by their full path' do
expect(described_class.find_by_full_path(record.full_path)).to eq(record)
expect(described_class.find_by_full_path(record.full_path.upcase)).to eq(record)
@@ -46,22 +46,98 @@ RSpec.shared_examples 'routable resource' do
end
end
- if has_cross_join
- it 'has a cross-join' do
- expect(Gitlab::Database).to receive(:allow_cross_joins_across_databases)
+ it 'does not have cross-join' do
+ expect(Gitlab::Database).not_to receive(:allow_cross_joins_across_databases)
- described_class.find_by_full_path(record.full_path)
+ described_class.find_by_full_path(record.full_path)
+ end
+ end
+
+ it_behaves_like '.find_by_full_path', :aggregate_failures
+
+ shared_examples_for '.where_full_path_in' do
+ context 'without any paths' do
+ it 'returns an empty relation' do
+ expect(described_class.where_full_path_in([])).to eq([])
+ end
+ end
+
+ context 'without any valid paths' do
+ it 'returns an empty relation' do
+ expect(described_class.where_full_path_in(%w[unknown])).to eq([])
+ end
+ end
+
+ context 'with valid paths' do
+ it 'returns the entities matching the paths' do
+ result = described_class.where_full_path_in([record.full_path, record_2.full_path])
+
+ expect(result).to contain_exactly(record, record_2)
+ end
+
+ it 'returns entities regardless of the casing of paths' do
+ result = described_class.where_full_path_in([record.full_path.upcase, record_2.full_path.upcase])
+
+ expect(result).to contain_exactly(record, record_2)
+ end
+ end
+
+ context 'on the usage of `use_includes` parameter' do
+ let_it_be(:klass) { record.class.to_s.downcase }
+ let_it_be(:record_3) { create(:"#{klass}") }
+ let_it_be(:record_4) { create(:"#{klass}") }
+
+ context 'when use_includes: true' do
+ it 'includes route information when loading records' do
+ control_count = ActiveRecord::QueryRecorder.new do
+ described_class.where_full_path_in([record.full_path, record_2.full_path], use_includes: true)
+ .map(&:route)
+ end
+
+ expect do
+ described_class.where_full_path_in(
+ [
+ record.full_path,
+ record_2.full_path,
+ record_3.full_path,
+ record_4.full_path
+ ], use_includes: true)
+ .map(&:route)
+ end.to issue_same_number_of_queries_as(control_count)
+ end
end
- else
- it 'does not have cross-join' do
- expect(Gitlab::Database).not_to receive(:allow_cross_joins_across_databases)
- described_class.find_by_full_path(record.full_path)
+ context 'when use_includes: false' do
+ it 'does not include route information when loading records' do
+ control_count = ActiveRecord::QueryRecorder.new do
+ described_class.where_full_path_in([record.full_path, record_2.full_path], use_includes: false)
+ .map(&:route)
+ end
+
+ expect do
+ described_class.where_full_path_in(
+ [
+ record.full_path,
+ record_2.full_path,
+ record_3.full_path,
+ record_4.full_path
+ ], use_includes: false)
+ .map(&:route)
+ end.not_to issue_same_number_of_queries_as(control_count)
+ end
end
end
end
- it_behaves_like '.find_by_full_path', :aggregate_failures
+ it_behaves_like '.where_full_path_in', :aggregate_failures
+
+ context 'when the `optimize_where_full_path_in` feature flag is turned OFF' do
+ before do
+ stub_feature_flags(optimize_where_full_path_in: false)
+ end
+
+ it_behaves_like '.where_full_path_in', :aggregate_failures
+ end
end
RSpec.shared_examples 'routable resource with parent' do
@@ -105,10 +181,12 @@ RSpec.describe Group, 'Routable', :with_clean_rails_cache, feature_category: :gr
it_behaves_like 'routable resource' do
let_it_be(:record) { group }
+ let_it_be(:record_2) { nested_group }
end
it_behaves_like 'routable resource with parent' do
let_it_be(:record) { nested_group }
+ let_it_be(:record_2) { group }
end
describe 'Validations' do
@@ -169,34 +247,6 @@ RSpec.describe Group, 'Routable', :with_clean_rails_cache, feature_category: :gr
expect(group.route.namespace).to eq(group)
end
- describe '.where_full_path_in' do
- context 'without any paths' do
- it 'returns an empty relation' do
- expect(described_class.where_full_path_in([])).to eq([])
- end
- end
-
- context 'without any valid paths' do
- it 'returns an empty relation' do
- expect(described_class.where_full_path_in(%w[unknown])).to eq([])
- end
- end
-
- context 'with valid paths' do
- it 'returns the projects matching the paths' do
- result = described_class.where_full_path_in([group.to_param, nested_group.to_param])
-
- expect(result).to contain_exactly(group, nested_group)
- end
-
- it 'returns projects regardless of the casing of paths' do
- result = described_class.where_full_path_in([group.to_param.upcase, nested_group.to_param.upcase])
-
- expect(result).to contain_exactly(group, nested_group)
- end
- end
- end
-
describe '#parent_loaded?' do
before do
group.parent = create(:group)
@@ -232,9 +282,11 @@ end
RSpec.describe Project, 'Routable', :with_clean_rails_cache, feature_category: :groups_and_projects do
let_it_be(:namespace) { create(:namespace) }
let_it_be(:project) { create(:project, namespace: namespace) }
+ let_it_be(:project_2) { create(:project) }
it_behaves_like 'routable resource with parent' do
let_it_be(:record) { project }
+ let_it_be(:record_2) { project_2 }
end
it 'creates route with namespace referencing project namespace' do
@@ -252,6 +304,17 @@ RSpec.describe Project, 'Routable', :with_clean_rails_cache, feature_category: :
expect(record).to be_nil
end
end
+
+ describe '.where_full_path_in' do
+ it 'does not return records if the sources are different, but the IDs match' do
+ group = create(:group, id: 1992)
+ project = create(:project, id: 1992)
+
+ records = described_class.where(id: project.id).where_full_path_in([group.full_path])
+
+ expect(records).to be_empty
+ end
+ end
end
RSpec.describe Namespaces::ProjectNamespace, 'Routable', :with_clean_rails_cache, feature_category: :groups_and_projects do
diff --git a/spec/models/concerns/transitionable_spec.rb b/spec/models/concerns/transitionable_spec.rb
index b80d363ef78..c8cba1ae226 100644
--- a/spec/models/concerns/transitionable_spec.rb
+++ b/spec/models/concerns/transitionable_spec.rb
@@ -22,19 +22,16 @@ RSpec.describe Transitionable, feature_category: :code_review_workflow do
let(:object) { klass.new(transitioning) }
describe '#transitioning?' do
- where(:transitioning, :feature_flag, :result) do
- true | true | true
- false | false | false
- true | false | false
- false | true | false
+ context "when trasnitioning" do
+ let(:transitioning) { true }
+
+ it { expect(object.transitioning?).to eq(true) }
end
- with_them do
- before do
- stub_feature_flags(skip_validations_during_transitions: feature_flag)
- end
+ context "when not trasnitioning" do
+ let(:transitioning) { false }
- it { expect(object.transitioning?).to eq(result) }
+ it { expect(object.transitioning?).to eq(false) }
end
end
end
diff --git a/spec/models/concerns/triggerable_hooks_spec.rb b/spec/models/concerns/triggerable_hooks_spec.rb
index 28cda269458..c209d6476f3 100644
--- a/spec/models/concerns/triggerable_hooks_spec.rb
+++ b/spec/models/concerns/triggerable_hooks_spec.rb
@@ -10,6 +10,8 @@ RSpec.describe TriggerableHooks do
include TriggerableHooks # rubocop:disable RSpec/DescribedClass
triggerable_hooks [:push_hooks]
+ self.allow_legacy_sti_class = true
+
scope :executable, -> { all }
end
end
diff --git a/spec/models/concerns/vulnerability_finding_helpers_spec.rb b/spec/models/concerns/vulnerability_finding_helpers_spec.rb
deleted file mode 100644
index 023ecccb520..00000000000
--- a/spec/models/concerns/vulnerability_finding_helpers_spec.rb
+++ /dev/null
@@ -1,27 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe VulnerabilityFindingHelpers do
- let(:cls) do
- Class.new do
- include VulnerabilityFindingHelpers
-
- attr_accessor :report_type
-
- def initialize(report_type)
- @report_type = report_type
- end
- end
- end
-
- describe '#requires_manual_resolution?' do
- it 'returns false if the finding does not require manual resolution' do
- expect(cls.new('sast').requires_manual_resolution?).to eq(false)
- end
-
- it 'returns true when the finding requires manual resolution' do
- expect(cls.new('secret_detection').requires_manual_resolution?).to eq(true)
- end
- end
-end
diff --git a/spec/models/container_registry/protection/rule_spec.rb b/spec/models/container_registry/protection/rule_spec.rb
index 9f162736efd..1706fcf76ae 100644
--- a/spec/models/container_registry/protection/rule_spec.rb
+++ b/spec/models/container_registry/protection/rule_spec.rb
@@ -38,9 +38,9 @@ RSpec.describe ContainerRegistry::Protection::Rule, type: :model, feature_catego
describe 'validations' do
subject { build(:container_registry_protection_rule) }
- describe '#container_path_pattern' do
- it { is_expected.to validate_presence_of(:container_path_pattern) }
- it { is_expected.to validate_length_of(:container_path_pattern).is_at_most(255) }
+ describe '#repository_path_pattern' do
+ it { is_expected.to validate_presence_of(:repository_path_pattern) }
+ it { is_expected.to validate_length_of(:repository_path_pattern).is_at_most(255) }
end
describe '#delete_protected_up_to_access_level' do
diff --git a/spec/models/container_repository_spec.rb b/spec/models/container_repository_spec.rb
index 027fd20462b..fb32c796016 100644
--- a/spec/models/container_repository_spec.rb
+++ b/spec/models/container_repository_spec.rb
@@ -713,7 +713,7 @@ RSpec.describe ContainerRepository, :aggregate_failures, feature_category: :cont
{
name: 'latest',
digest: 'sha256:6c3c624b58dbbcd3c0dd82b4c53f04191247c6eebdaab7c610cf7d66709b3',
- config_digest: 'sha256:66b1132a0173910b01ee694462c99efbe1b9ab5bf8083231232312',
+ config_digest: nil,
media_type: 'application/vnd.oci.image.manifest.v1+json',
size_bytes: 1234567892,
created_at: 10.minutes.ago,
@@ -742,7 +742,7 @@ RSpec.describe ContainerRepository, :aggregate_failures, feature_category: :cont
expect(return_value[:pagination]).to eq(response_body[:pagination])
return_value[:tags].each_with_index do |tag, index|
- expected_revision = tags_response[index][:config_digest].to_s.split(':')[1]
+ expected_revision = tags_response[index][:config_digest].to_s.split(':')[1].to_s
expect(tag.is_a?(ContainerRegistry::Tag)).to eq(true)
expect(tag).to have_attributes(
diff --git a/spec/models/custom_emoji_spec.rb b/spec/models/custom_emoji_spec.rb
index 15655d08556..cbdf05cf28f 100644
--- a/spec/models/custom_emoji_spec.rb
+++ b/spec/models/custom_emoji_spec.rb
@@ -48,4 +48,45 @@ RSpec.describe CustomEmoji do
expect(emoji.errors.messages).to eq(file: ["is blocked: Only allowed schemes are http, https"])
end
end
+
+ describe '#for_resource' do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:custom_emoji) { create(:custom_emoji, namespace: group) }
+
+ context 'when custom_emoji feature flag is disabled' do
+ before do
+ stub_feature_flags(custom_emoji: false)
+ end
+
+ it { expect(described_class.for_resource(group)).to eq([]) }
+ end
+
+ context 'when group is nil' do
+ let_it_be(:group) { nil }
+
+ it { expect(described_class.for_resource(group)).to eq([]) }
+ end
+
+ context 'when resource is a project' do
+ let_it_be(:project) { create(:project) }
+
+ it { expect(described_class.for_resource(project)).to eq([]) }
+ end
+
+ it { expect(described_class.for_resource(group)).to eq([custom_emoji]) }
+ end
+
+ describe '#for_namespaces' do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:custom_emoji) { create(:custom_emoji, namespace: group, name: 'parrot') }
+
+ it { expect(described_class.for_namespaces([group.id])).to eq([custom_emoji]) }
+
+ context 'with subgroup' do
+ let_it_be(:subgroup) { create(:group, parent: group) }
+ let_it_be(:subgroup_emoji) { create(:custom_emoji, namespace: subgroup, name: 'parrot') }
+
+ it { expect(described_class.for_namespaces([subgroup.id, group.id])).to eq([subgroup_emoji]) }
+ end
+ end
end
diff --git a/spec/models/deploy_token_spec.rb b/spec/models/deploy_token_spec.rb
index 1b8dd62455e..36479dffe21 100644
--- a/spec/models/deploy_token_spec.rb
+++ b/spec/models/deploy_token_spec.rb
@@ -473,4 +473,12 @@ RSpec.describe DeployToken, feature_category: :continuous_delivery do
expect(subject.impersonated?).to be(false)
end
end
+
+ describe '.token' do
+ # Specify a blank token_encrypted so that the model's method is used
+ # instead of the factory value
+ subject(:plaintext) { create(:deploy_token, token_encrypted: nil).token }
+
+ it { is_expected.to match(/gldt-[A-Za-z0-9_-]{20}/) }
+ end
end
diff --git a/spec/models/deployment_spec.rb b/spec/models/deployment_spec.rb
index ee48e8cac6c..cb2c38c15e0 100644
--- a/spec/models/deployment_spec.rb
+++ b/spec/models/deployment_spec.rb
@@ -1270,10 +1270,15 @@ RSpec.describe Deployment, feature_category: :continuous_delivery do
shared_examples_for 'gracefully handling error' do
it 'tracks an exception' do
- expect(Gitlab::ErrorTracking).to receive(:track_exception).with(
- instance_of(described_class::StatusSyncError),
- deployment_id: deployment.id,
- job_id: job.id)
+ expect(Gitlab::ErrorTracking).to(
+ receive(:track_exception).with(
+ instance_of(described_class::StatusSyncError),
+ deployment_id: deployment.id,
+ job_id: job.id
+ ) do |error|
+ expect(error.backtrace).to be_present
+ end
+ )
is_expected.to eq(false)
diff --git a/spec/models/diff_viewer/base_spec.rb b/spec/models/diff_viewer/base_spec.rb
index 8ab7b090928..17925ef2576 100644
--- a/spec/models/diff_viewer/base_spec.rb
+++ b/spec/models/diff_viewer/base_spec.rb
@@ -100,6 +100,28 @@ RSpec.describe DiffViewer::Base do
end
end
+ describe '#generated?' do
+ before do
+ allow(diff_file).to receive(:generated?).and_return(generated)
+ end
+
+ context 'when the diff file is generated' do
+ let(:generated) { true }
+
+ it 'returns true' do
+ expect(viewer.generated?).to be_truthy
+ end
+ end
+
+ context 'when the diff file is not generated' do
+ let(:generated) { false }
+
+ it 'returns true' do
+ expect(viewer.generated?).to be_falsey
+ end
+ end
+ end
+
describe '#render_error' do
context 'when the combined blob size is larger than the size limit' do
before do
diff --git a/spec/models/event_spec.rb b/spec/models/event_spec.rb
index 3e4fe57c59b..6eb69bf958a 100644
--- a/spec/models/event_spec.rb
+++ b/spec/models/event_spec.rb
@@ -68,15 +68,6 @@ RSpec.describe Event, feature_category: :user_profile do
end.not_to change { project.last_repository_updated_at }
end
end
-
- describe 'after_create UserInteractedProject.track' do
- let(:event) { build(:push_event, project: project, author: project.first_owner) }
-
- it 'passes event to UserInteractedProject.track' do
- expect(UserInteractedProject).to receive(:track).with(event)
- event.save!
- end
- end
end
describe 'validations' do
@@ -115,6 +106,18 @@ RSpec.describe Event, feature_category: :user_profile do
end
end
+ describe '.for_merge_request' do
+ let(:mr_event) { create(:event, :for_merge_request, project: project) }
+
+ before do
+ create(:event, :for_issue, project: project)
+ end
+
+ it 'returns events for MergeRequest target_type' do
+ expect(described_class.for_merge_request).to contain_exactly(mr_event)
+ end
+ end
+
describe '.created_at' do
it 'can find the right event' do
time = 1.day.ago
@@ -128,6 +131,21 @@ RSpec.describe Event, feature_category: :user_profile do
end
end
+ describe '.created_between' do
+ it 'returns events created between given timestamps' do
+ start_time = 2.days.ago
+ end_time = Date.today
+
+ create(:event, created_at: 3.days.ago)
+ e1 = create(:event, created_at: 2.days.ago)
+ e2 = create(:event, created_at: 1.day.ago)
+
+ found = described_class.created_between(start_time, end_time)
+
+ expect(found).to contain_exactly(e1, e2)
+ end
+ end
+
describe '.for_fingerprint' do
let_it_be(:with_fingerprint) { create(:event, fingerprint: 'aaa', project: project) }
@@ -152,16 +170,28 @@ RSpec.describe Event, feature_category: :user_profile do
end
describe '.contributions' do
- let!(:merge_request_event) { create(:event, :created, :for_merge_request, project: project) }
- let!(:issue_event) { create(:event, :created, :for_issue, project: project) }
+ let!(:merge_request_events) do
+ %i[created closed merged approved].map do |action|
+ create(:event, :for_merge_request, action: action, project: project)
+ end
+ end
+
let!(:work_item_event) { create(:event, :created, :for_work_item, project: project) }
- let!(:design_event) { create(:design_event, project: project) }
+ let!(:issue_events) do
+ %i[created closed].map { |action| create(:event, :for_issue, action: action, project: project) }
+ end
- it 'returns events for MergeRequest, Issue and WorkItem' do
+ let!(:push_event) { create_push_event(project, project.owner) }
+ let!(:comment_event) { create(:event, :commented, project: project) }
+
+ before do
+ create(:design_event, project: project) # should not be in scope
+ end
+
+ it 'returns events for MergeRequest, Issue, WorkItem and push, comment events' do
expect(described_class.contributions).to contain_exactly(
- merge_request_event,
- issue_event,
- work_item_event
+ *merge_request_events, *issue_events, work_item_event,
+ push_event, comment_event
)
end
end
@@ -881,7 +911,7 @@ RSpec.describe Event, feature_category: :user_profile do
context 'when a project was updated more than 1 hour ago', :clean_gitlab_redis_shared_state do
before do
::Gitlab::Redis::SharedState.with do |redis|
- redis.hset('inactive_projects_deletion_warning_email_notified', "project:#{project.id}", Date.current)
+ redis.hset('inactive_projects_deletion_warning_email_notified', "project:#{project.id}", Date.current.to_s)
end
end
@@ -1146,4 +1176,11 @@ RSpec.describe Event, feature_category: :user_profile do
event
end
+
+ context 'with loose foreign key on events.author_id' do
+ it_behaves_like 'cleanup by a loose foreign key' do
+ let_it_be(:parent) { create(:user) }
+ let_it_be(:model) { create(:event, author: parent) }
+ end
+ end
end
diff --git a/spec/models/every_model_spec.rb b/spec/models/every_model_spec.rb
new file mode 100644
index 00000000000..479fc8d3dfa
--- /dev/null
+++ b/spec/models/every_model_spec.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Every model', feature_category: :shared do
+ describe 'disallows STI', :eager_load do
+ include_examples 'Model disables STI' do
+ let(:models) { ApplicationRecord.descendants.reject(&:abstract_class?) }
+ end
+ end
+end
diff --git a/spec/models/group_spec.rb b/spec/models/group_spec.rb
index 7a31067732f..1fafa64a535 100644
--- a/spec/models/group_spec.rb
+++ b/spec/models/group_spec.rb
@@ -11,10 +11,11 @@ RSpec.describe Group, feature_category: :groups_and_projects do
describe 'associations' do
it { is_expected.to have_many :projects }
it { is_expected.to have_many(:all_group_members).dependent(:destroy) }
+ it { is_expected.to have_many(:all_owner_members) }
it { is_expected.to have_many(:group_members).dependent(:destroy) }
it { is_expected.to have_many(:namespace_members) }
it { is_expected.to have_many(:users).through(:group_members) }
- it { is_expected.to have_many(:owners).through(:all_group_members) }
+ it { is_expected.to have_many(:owners).through(:all_owner_members) }
it { is_expected.to have_many(:requesters).dependent(:destroy) }
it { is_expected.to have_many(:namespace_requesters) }
it { is_expected.to have_many(:members_and_requesters) }
@@ -391,12 +392,19 @@ RSpec.describe Group, feature_category: :groups_and_projects do
expect(internal_group.errors[:visibility_level]).to include('private is not allowed since this group contains projects with higher visibility.')
end
- it 'is valid if higher visibility project is deleted' do
+ it 'is valid if higher visibility project is currently undergoing deletion' do
internal_project.update_attribute(:pending_delete, true)
internal_group.visibility_level = Gitlab::VisibilityLevel::PRIVATE
expect(internal_group).to be_valid
end
+
+ it 'is valid if higher visibility project is pending deletion via marked_for_deletion_at' do
+ internal_project.update_attribute(:marked_for_deletion_at, Time.current)
+ internal_group.visibility_level = Gitlab::VisibilityLevel::PRIVATE
+
+ expect(internal_group).to be_valid
+ end
end
context 'when group has a higher visibility' do
@@ -917,6 +925,18 @@ RSpec.describe Group, feature_category: :groups_and_projects do
it { is_expected.to eq([group_1, group_2, group_4, group_3]) }
end
+ context 'when sort by path_asc' do
+ let(:sort) { 'path_asc' }
+
+ it { is_expected.to eq([group_1, group_2, group_3, group_4].sort_by(&:path)) }
+ end
+
+ context 'when sort by path_desc' do
+ let(:sort) { 'path_desc' }
+
+ it { is_expected.to eq([group_1, group_2, group_3, group_4].sort_by(&:path).reverse) }
+ end
+
context 'when sort by recently_created' do
let(:sort) { 'created_desc' }
@@ -1257,40 +1277,8 @@ RSpec.describe Group, feature_category: :groups_and_projects do
end
end
- describe '#avatar_type' do
- let(:user) { create(:user) }
-
- before do
- group.add_member(user, GroupMember::MAINTAINER)
- end
-
- it "is true if avatar is image" do
- group.update_attribute(:avatar, 'uploads/avatar.png')
- expect(group.avatar_type).to be_truthy
- end
-
- it "is false if avatar is html page" do
- group.update_attribute(:avatar, 'uploads/avatar.html')
- group.avatar_type
-
- expect(group.errors.added?(:avatar, "file format is not supported. Please try one of the following supported formats: png, jpg, jpeg, gif, bmp, tiff, ico, webp")).to be true
- end
- end
-
- describe '#avatar_url' do
- let!(:group) { create(:group, :with_avatar) }
- let(:user) { create(:user) }
-
- context 'when avatar file is uploaded' do
- before do
- group.add_maintainer(user)
- end
-
- it 'shows correct avatar url' do
- expect(group.avatar_url).to eq(group.avatar.url)
- expect(group.avatar_url(only_path: false)).to eq([Gitlab.config.gitlab.url, group.avatar.url].join)
- end
- end
+ it_behaves_like Avatarable do
+ let(:model) { create(:group, :with_avatar) }
end
describe '.search' do
@@ -3014,14 +3002,6 @@ RSpec.describe Group, feature_category: :groups_and_projects do
end
end
- describe '#activity_path' do
- it 'returns the group activity_path' do
- expected_path = "/groups/#{group.name}/-/activity"
-
- expect(group.activity_path).to eq(expected_path)
- end
- end
-
context 'with export' do
let(:group) { create(:group, :with_export) }
diff --git a/spec/models/hooks/system_hook_spec.rb b/spec/models/hooks/system_hook_spec.rb
index da4771d801d..d48b411c800 100644
--- a/spec/models/hooks/system_hook_spec.rb
+++ b/spec/models/hooks/system_hook_spec.rb
@@ -127,7 +127,7 @@ RSpec.describe SystemHook, feature_category: :webhooks do
end
it 'group destroy hook' do
- group.destroy!
+ create(:group).destroy!
expect(WebMock).to have_requested(:post, system_hook.url).with(
body: /group_destroy/,
diff --git a/spec/models/integration_spec.rb b/spec/models/integration_spec.rb
index 5af6a592c66..3a96de4efe2 100644
--- a/spec/models/integration_spec.rb
+++ b/spec/models/integration_spec.rb
@@ -81,10 +81,9 @@ RSpec.describe Integration, feature_category: :integrations do
let!(:integration1) { create(:jira_integration, project: project) }
let!(:integration2) { create(:redmine_integration, project: project) }
let!(:integration3) { create(:confluence_integration, project: project) }
- let!(:integration4) { create(:shimo_integration, project: project) }
it 'returns the right group integration' do
- expect(described_class.third_party_wikis).to contain_exactly(integration3, integration4)
+ expect(described_class.third_party_wikis).to contain_exactly(integration3)
end
end
diff --git a/spec/models/integrations/base_third_party_wiki_spec.rb b/spec/models/integrations/base_third_party_wiki_spec.rb
deleted file mode 100644
index 763f7131b94..00000000000
--- a/spec/models/integrations/base_third_party_wiki_spec.rb
+++ /dev/null
@@ -1,46 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Integrations::BaseThirdPartyWiki, feature_category: :integrations do
- describe 'default values' do
- it { expect(subject.category).to eq(:third_party_wiki) }
- end
-
- describe 'Validations' do
- let_it_be_with_reload(:project) { create(:project) }
-
- describe 'only one third party wiki per project' do
- subject(:integration) { build(:shimo_integration, project: project, active: true) }
-
- before_all do
- create(:confluence_integration, project: project, active: true)
- end
-
- context 'when integration is changed manually by user' do
- it 'executes the validation' do
- valid = integration.valid?(:manual_change)
-
- expect(valid).to be_falsey
- error_message = 'Another third-party wiki is already in use. '\
- 'Only one third-party wiki integration can be active at a time'
- expect(integration.errors[:base]).to include _(error_message)
- end
- end
-
- context 'when integration is changed internally' do
- it 'does not execute the validation' do
- expect(integration.valid?).to be_truthy
- end
- end
-
- context 'when integration is not on the project level' do
- subject(:integration) { build(:shimo_integration, :instance, active: true) }
-
- it 'executes the validation' do
- expect(integration.valid?(:manual_change)).to be_truthy
- end
- end
- end
- end
-end
diff --git a/spec/models/integrations/field_spec.rb b/spec/models/integrations/field_spec.rb
index 49eaecd1b2e..22ad71135e7 100644
--- a/spec/models/integrations/field_spec.rb
+++ b/spec/models/integrations/field_spec.rb
@@ -186,6 +186,22 @@ RSpec.describe ::Integrations::Field, feature_category: :integrations do
end
end
+ describe '#api_type' do
+ it 'returns String' do
+ expect(field.api_type).to eq(String)
+ end
+
+ context 'when type is checkbox' do
+ before do
+ attrs[:type] = :checkbox
+ end
+
+ it 'returns Boolean' do
+ expect(field.api_type).to eq(::API::Integrations::Boolean)
+ end
+ end
+ end
+
describe '#key?' do
it { is_expected.to be_key(:type) }
it { is_expected.not_to be_key(:foo) }
diff --git a/spec/models/integrations/irker_spec.rb b/spec/models/integrations/irker_spec.rb
index e98b8b54e03..2a733e67a3c 100644
--- a/spec/models/integrations/irker_spec.rb
+++ b/spec/models/integrations/irker_spec.rb
@@ -5,7 +5,7 @@ require 'socket'
require 'timeout'
require 'json'
-RSpec.describe Integrations::Irker do
+RSpec.describe Integrations::Irker, feature_category: :integrations do
describe 'Validations' do
context 'when integration is active' do
before do
@@ -30,10 +30,7 @@ RSpec.describe Integrations::Irker do
let(:irker) { described_class.new }
let(:irker_server) { TCPServer.new('localhost', 0) }
- let(:sample_data) do
- Gitlab::DataBuilder::Push.build_sample(project, user)
- end
-
+ let(:sample_data) { Gitlab::DataBuilder::Push.build_sample(project, user) }
let(:recipients) { '#commits irc://test.net/#test ftp://bad' }
let(:colorize_messages) { '1' }
@@ -58,7 +55,7 @@ RSpec.describe Integrations::Irker do
it 'sends valid JSON messages to an Irker listener', :sidekiq_might_not_need_inline do
expect(Integrations::IrkerWorker).to receive(:perform_async)
- .with(project.id, irker.channels, colorize_messages, sample_data, irker.settings)
+ .with(project.id, irker.channels, colorize_messages, sample_data.deep_stringify_keys, irker.settings)
.and_call_original
irker.execute(sample_data)
diff --git a/spec/models/integrations/shimo_spec.rb b/spec/models/integrations/shimo_spec.rb
deleted file mode 100644
index 95289343d0d..00000000000
--- a/spec/models/integrations/shimo_spec.rb
+++ /dev/null
@@ -1,69 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe ::Integrations::Shimo, feature_category: :integrations do
- describe '#fields' do
- let(:shimo_integration) { build(:shimo_integration) }
-
- it 'returns custom fields' do
- expect(shimo_integration.fields.pluck(:name)).to eq(%w[external_wiki_url])
- end
- end
-
- describe '#create' do
- let_it_be(:project) { create(:project, :repository) }
- let(:external_wiki_url) { 'https://shimo.example.com/desktop' }
- let(:params) { { active: true, project: project, external_wiki_url: external_wiki_url } }
-
- context 'with valid params' do
- it 'creates the Shimo integration' do
- shimo = described_class.create!(params)
-
- expect(shimo.valid?).to be true
- expect(shimo.render?).to be true
- expect(shimo.external_wiki_url).to eq(external_wiki_url)
- end
- end
-
- context 'with invalid params' do
- it 'cannot create the Shimo integration without external_wiki_url' do
- params['external_wiki_url'] = nil
- expect { described_class.create!(params) }.to raise_error(ActiveRecord::RecordInvalid)
- end
-
- it 'cannot create the Shimo integration with invalid external_wiki_url' do
- params['external_wiki_url'] = 'Fake Invalid URL'
- expect { described_class.create!(params) }.to raise_error(ActiveRecord::RecordInvalid)
- end
- end
- end
-
- describe 'Caching has_shimo on project_settings' do
- let_it_be(:project) { create(:project) }
-
- subject { project.project_setting.has_shimo? }
-
- it 'sets the property to true when integration is active' do
- create(:shimo_integration, project: project, active: true)
-
- is_expected.to be(true)
- end
-
- it 'sets the property to false when integration is not active' do
- create(:shimo_integration, project: project, active: false)
-
- is_expected.to be(false)
- end
-
- it 'creates a project_setting record if one was not already created' do
- expect { create(:shimo_integration) }.to change(ProjectSetting, :count).by(1)
- end
- end
-
- describe '#avatar_url' do
- it 'returns the avatar image path' do
- expect(subject.avatar_url).to eq(ActionController::Base.helpers.image_path('logos/shimo.svg'))
- end
- end
-end
diff --git a/spec/models/issue_spec.rb b/spec/models/issue_spec.rb
index 594492a160d..48e19cd0ad5 100644
--- a/spec/models/issue_spec.rb
+++ b/spec/models/issue_spec.rb
@@ -380,6 +380,16 @@ RSpec.describe Issue, feature_category: :team_planning do
end
end
+ describe '.in_namespaces' do
+ let(:group) { create(:group) }
+ let!(:group_work_item) { create(:issue, :group_level, namespace: group) }
+ let!(:project_work_item) { create(:issue, project: reusable_project) }
+
+ subject { described_class.in_namespaces(group) }
+
+ it { is_expected.to contain_exactly(group_work_item) }
+ end
+
describe '.with_issue_type' do
let_it_be(:issue) { create(:issue, project: reusable_project) }
let_it_be(:incident) { create(:incident, project: reusable_project) }
@@ -2195,4 +2205,21 @@ RSpec.describe Issue, feature_category: :team_planning do
end
end
end
+
+ describe '#gfm_reference' do
+ where(:issue_type, :expected_name) do
+ :issue | 'issue'
+ :incident | 'incident'
+ :test_case | 'test case'
+ :task | 'task'
+ end
+
+ with_them do
+ it 'uses the issue type as the reference name' do
+ issue = create(:issue, issue_type, project: reusable_project)
+
+ expect(issue.gfm_reference).to eq("#{expected_name} #{issue.to_reference}")
+ end
+ end
+ end
end
diff --git a/spec/models/key_spec.rb b/spec/models/key_spec.rb
index 7a46e5e7e53..dd4dbd53a94 100644
--- a/spec/models/key_spec.rb
+++ b/spec/models/key_spec.rb
@@ -348,56 +348,10 @@ RSpec.describe Key, :mailer do
end
end
- context 'validate it meets key restrictions' do
- where(:factory, :minimum, :result) do
- forbidden = ApplicationSetting::FORBIDDEN_KEY_VALUE
+ context 'ssh key' do
+ subject { build(:key) }
- [
- [:rsa_key_2048, 0, true],
- [:dsa_key_2048, 0, true],
- [:ecdsa_key_256, 0, true],
- [:ed25519_key_256, 0, true],
- [:ecdsa_sk_key_256, 0, true],
- [:ed25519_sk_key_256, 0, true],
-
- [:rsa_key_2048, 1024, true],
- [:rsa_key_2048, 2048, true],
- [:rsa_key_2048, 4096, false],
-
- [:dsa_key_2048, 1024, true],
- [:dsa_key_2048, 2048, true],
- [:dsa_key_2048, 4096, false],
-
- [:ecdsa_key_256, 256, true],
- [:ecdsa_key_256, 384, false],
-
- [:ed25519_key_256, 256, true],
- [:ed25519_key_256, 384, false],
-
- [:ecdsa_sk_key_256, 256, true],
- [:ecdsa_sk_key_256, 384, false],
-
- [:ed25519_sk_key_256, 256, true],
- [:ed25519_sk_key_256, 384, false],
-
- [:rsa_key_2048, forbidden, false],
- [:dsa_key_2048, forbidden, false],
- [:ecdsa_key_256, forbidden, false],
- [:ed25519_key_256, forbidden, false],
- [:ecdsa_sk_key_256, forbidden, false],
- [:ed25519_sk_key_256, forbidden, false]
- ]
- end
-
- with_them do
- subject(:key) { build(factory) }
-
- before do
- stub_application_setting("#{key.public_key.type}_key_restriction" => minimum)
- end
-
- it { expect(key.valid?).to eq(result) }
- end
+ it_behaves_like 'meets ssh key restrictions'
end
context 'callbacks' do
diff --git a/spec/models/member_spec.rb b/spec/models/member_spec.rb
index b4941c71d6a..db2ae319bc9 100644
--- a/spec/models/member_spec.rb
+++ b/spec/models/member_spec.rb
@@ -1184,8 +1184,8 @@ RSpec.describe Member, feature_category: :groups_and_projects do
context 'with loose foreign key on members.user_id' do
it_behaves_like 'cleanup by a loose foreign key' do
- let!(:parent) { create(:user) }
- let!(:model) { create(:group_member, user: parent) }
+ let_it_be(:parent) { create(:user) }
+ let_it_be(:model) { create(:group_member, user: parent) }
end
end
end
diff --git a/spec/models/members/project_member_spec.rb b/spec/models/members/project_member_spec.rb
index a9725a796bf..70f843be0e1 100644
--- a/spec/models/members/project_member_spec.rb
+++ b/spec/models/members/project_member_spec.rb
@@ -57,6 +57,7 @@ RSpec.describe ProjectMember, feature_category: :groups_and_projects do
let_it_be(:developer) { create(:user) }
let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project, group: group) }
+ let_it_be(:admin) { create(:admin) }
before do
project.add_owner(owner)
@@ -74,6 +75,30 @@ RSpec.describe ProjectMember, feature_category: :groups_and_projects do
end
end
+ context 'when member can manage owners via admin' do
+ let(:user) { admin }
+
+ context 'with admin mode', :enable_admin_mode do
+ it 'returns Gitlab::Access.options_with_owner' do
+ expect(access_levels).to eq(Gitlab::Access.options_with_owner)
+ end
+ end
+
+ context 'without admin mode' do
+ it 'returns empty hash' do
+ expect(access_levels).to eq({})
+ end
+ end
+ end
+
+ context 'when user is not a project member' do
+ let(:user) { create(:user) }
+
+ it 'return an empty hash' do
+ expect(access_levels).to eq({})
+ end
+ end
+
context 'when member cannot manage owners' do
let(:user) { maintainer }
diff --git a/spec/models/merge_request_diff_spec.rb b/spec/models/merge_request_diff_spec.rb
index bcab2029942..2e68cd9e74a 100644
--- a/spec/models/merge_request_diff_spec.rb
+++ b/spec/models/merge_request_diff_spec.rb
@@ -172,6 +172,30 @@ RSpec.describe MergeRequestDiff, feature_category: :code_review_workflow do
end
end
+ describe '#ensure_project_id' do
+ let_it_be(:merge_request) { create(:merge_request, :without_diffs) }
+
+ let(:diff) { build(:merge_request_diff, merge_request: merge_request, project_id: project_id) }
+
+ subject { diff.save! }
+
+ context 'when project_id is null' do
+ let(:project_id) { nil }
+
+ it do
+ expect { subject }.to change(diff, :project_id).from(nil).to(merge_request.target_project_id)
+ end
+ end
+
+ context 'when project_id is already set' do
+ let(:project_id) { create(:project, :stubbed_repository).id }
+
+ it do
+ expect { subject }.not_to change(diff, :project_id)
+ end
+ end
+ end
+
describe '#update_external_diff_store' do
let_it_be(:merge_request) { create(:merge_request) }
@@ -366,9 +390,10 @@ RSpec.describe MergeRequestDiff, feature_category: :code_review_workflow do
shared_examples_for 'merge request diffs' do
let(:merge_request) { create(:merge_request) }
- let!(:diff) { merge_request.merge_request_diff.reload }
context 'when it was not cleaned by the system' do
+ let!(:diff) { merge_request.merge_request_diff.reload }
+
it 'returns persisted diffs' do
expect(diff).to receive(:load_diffs).and_call_original
@@ -377,6 +402,8 @@ RSpec.describe MergeRequestDiff, feature_category: :code_review_workflow do
end
context 'when diff was cleaned by the system' do
+ let!(:diff) { merge_request.merge_request_diff.reload }
+
before do
diff.clean!
end
@@ -737,6 +764,63 @@ RSpec.describe MergeRequestDiff, feature_category: :code_review_workflow do
end
end
+ describe '#get_patch_id_sha' do
+ let(:mr_diff) { create(:merge_request).merge_request_diff }
+
+ context 'when the patch_id exists on the model' do
+ it 'returns the patch_id' do
+ expect(mr_diff.patch_id_sha).not_to be_nil
+ expect(mr_diff.get_patch_id_sha).to eq(mr_diff.patch_id_sha)
+ end
+ end
+
+ context 'when the patch_id does not exist on the model' do
+ it 'retrieves the patch id, saves the model, and returns it' do
+ expect(mr_diff.patch_id_sha).not_to be_nil
+
+ patch_id = mr_diff.patch_id_sha
+ mr_diff.update!(patch_id_sha: nil)
+
+ expect(mr_diff.get_patch_id_sha).to eq(patch_id)
+ expect(mr_diff.reload.patch_id_sha).to eq(patch_id)
+ end
+
+ context 'when base_sha is nil' do
+ before do
+ mr_diff.update!(patch_id_sha: nil)
+ allow(mr_diff).to receive(:base_commit_sha).and_return(nil)
+ end
+
+ it 'returns nil' do
+ expect(mr_diff.reload.get_patch_id_sha).to be_nil
+ end
+ end
+
+ context 'when head_sha is nil' do
+ before do
+ mr_diff.update!(patch_id_sha: nil)
+ allow(mr_diff).to receive(:head_commit_sha).and_return(nil)
+ end
+
+ it 'returns nil' do
+ expect(mr_diff.reload.get_patch_id_sha).to be_nil
+ end
+ end
+
+ context 'when base_sha and head_sha dont match' do
+ before do
+ mr_diff.update!(patch_id_sha: nil)
+ allow(mr_diff).to receive(:head_commit_sha).and_return('123123')
+ allow(mr_diff).to receive(:base_commit_sha).and_return('43121')
+ end
+
+ it 'returns nil' do
+ expect(mr_diff.reload.get_patch_id_sha).to be_nil
+ end
+ end
+ end
+ end
+
describe '#save_diffs' do
it 'saves collected state' do
mr_diff = create(:merge_request).merge_request_diff
@@ -825,6 +909,57 @@ RSpec.describe MergeRequestDiff, feature_category: :code_review_workflow do
expect(diff_file.diff).to include(content)
end
end
+
+ context 'handling generated files' do
+ let(:project) { create(:project, :repository) }
+ let(:target_branch) { project.default_branch }
+ let(:source_branch) { 'test-generated-diff-file' }
+ let(:generated_file_name) { 'generated.txt' }
+ let(:regular_file_name) { 'regular.rb' }
+ let(:merge_request) do
+ create(
+ :merge_request,
+ target_project: project,
+ source_project: project,
+ source_branch: source_branch,
+ target_branch: target_branch
+ )
+ end
+
+ let(:diff_files) do
+ merge_request.merge_request_diff.merge_request_diff_files
+ end
+
+ before do
+ project.repository.update_file(
+ project.creator,
+ '.gitattributes',
+ '*.txt gitlab-generated',
+ message: 'Update',
+ branch_name: target_branch)
+
+ create_file_in_repo(project, target_branch, source_branch, generated_file_name, "generated text\n")
+ create_file_in_repo(project, source_branch, source_branch, regular_file_name, "something else\n")
+ end
+
+ context 'with collapse_generated_diff_files feature flag' do
+ it 'sets generated field correctly' do
+ expect(diff_files.find_by(new_path: generated_file_name)).to be_generated
+ expect(diff_files.find_by(new_path: regular_file_name)).not_to be_generated
+ end
+ end
+
+ context 'without collapse_generated_diff_files feature flag' do
+ before do
+ stub_feature_flags(collapse_generated_diff_files: false)
+ end
+
+ it 'sets generated field correctly' do
+ expect(diff_files.find_by(new_path: generated_file_name)).not_to be_generated
+ expect(diff_files.find_by(new_path: regular_file_name)).not_to be_generated
+ end
+ end
+ end
end
end
@@ -905,6 +1040,7 @@ RSpec.describe MergeRequestDiff, feature_category: :code_review_workflow do
include_examples 'merge request diffs'
it 'stores up-to-date diffs in the database' do
+ diff = merge_request.merge_request_diff.reload
expect(diff).not_to be_stored_externally
end
@@ -921,7 +1057,8 @@ RSpec.describe MergeRequestDiff, feature_category: :code_review_workflow do
end
it 'stores diffs for old MR versions in external storage' do
- old_diff = diff
+ old_diff = merge_request.merge_request_diff.reload
+
merge_request.create_merge_request_diff
old_diff.migrate_files_to_external_storage!
diff --git a/spec/models/merge_request_spec.rb b/spec/models/merge_request_spec.rb
index 1c6a29f065f..2b5f4165d8c 100644
--- a/spec/models/merge_request_spec.rb
+++ b/spec/models/merge_request_spec.rb
@@ -404,7 +404,7 @@ RSpec.describe MergeRequest, factory_default: :keep, feature_category: :code_rev
end
end
- context "with the skip_validations_during_transition_feature_flag" do
+ context "when transitioning between states" do
let(:merge_request) { build(:merge_request, transitioning: transitioning) }
where(:transitioning, :to_or_not_to) do
@@ -6026,47 +6026,11 @@ RSpec.describe MergeRequest, factory_default: :keep, feature_category: :code_rev
subject(:current_patch_id_sha) { merge_request.current_patch_id_sha }
before do
- allow(merge_request).to receive(:merge_request_diff).and_return(merge_request_diff)
+ allow(merge_request).to receive(:latest_merge_request_diff).and_return(merge_request_diff)
allow(merge_request_diff).to receive(:patch_id_sha).and_return(patch_id)
end
it { is_expected.to eq(patch_id) }
-
- context 'when related merge_request_diff does not have a patch_id_sha' do
- let(:diff_refs) { instance_double(Gitlab::Diff::DiffRefs, base_sha: base_sha, head_sha: head_sha) }
- let(:base_sha) { 'abc123' }
- let(:head_sha) { 'def456' }
-
- before do
- allow(merge_request_diff).to receive(:patch_id_sha).and_return(nil)
- allow(merge_request).to receive(:diff_refs).and_return(diff_refs)
-
- allow(merge_request.project.repository)
- .to receive(:get_patch_id)
- .with(diff_refs.base_sha, diff_refs.head_sha)
- .and_return(patch_id)
- end
-
- it { is_expected.to eq(patch_id) }
-
- context 'when base_sha is nil' do
- let(:base_sha) { nil }
-
- it { is_expected.to be_nil }
- end
-
- context 'when head_sha is nil' do
- let(:head_sha) { nil }
-
- it { is_expected.to be_nil }
- end
-
- context 'when base_sha and head_sha match' do
- let(:head_sha) { base_sha }
-
- it { is_expected.to be_nil }
- end
- end
end
describe '#all_mergeability_checks_results' do
@@ -6138,4 +6102,29 @@ RSpec.describe MergeRequest, factory_default: :keep, feature_category: :code_rev
it { is_expected.to eq(false) }
end
end
+
+ describe '#allow_merge_without_pipeline?' do
+ let(:merge_request) { build_stubbed(:merge_request) }
+
+ subject(:result) { merge_request.allow_merge_without_pipeline? }
+
+ before do
+ allow(merge_request.project)
+ .to receive(:allow_merge_without_pipeline?)
+ .with(inherit_group_setting: true)
+ .and_return(allow_merge_without_pipeline?)
+ end
+
+ context 'when associated project allow_merge_without_pipeline? returns true' do
+ let(:allow_merge_without_pipeline?) { true }
+
+ it { is_expected.to eq(true) }
+ end
+
+ context 'when associated project allow_merge_without_pipeline? returns false' do
+ let(:allow_merge_without_pipeline?) { false }
+
+ it { is_expected.to eq(false) }
+ end
+ end
end
diff --git a/spec/models/milestone_spec.rb b/spec/models/milestone_spec.rb
index 15bcbb3962c..79b663807b4 100644
--- a/spec/models/milestone_spec.rb
+++ b/spec/models/milestone_spec.rb
@@ -772,4 +772,24 @@ RSpec.describe Milestone, feature_category: :team_planning do
it { is_expected.to eq(false) }
end
end
+
+ describe '.with_ids_or_title' do
+ subject(:milestones) { described_class.with_ids_or_title(ids: ids, title: title) }
+
+ let_it_be(:milestone1) { create(:milestone, title: 'Foo') }
+ let_it_be(:milestone2) { create(:milestone) }
+
+ let(:ids) { [milestone1.id] }
+ let(:title) { milestone2.title }
+
+ before do
+ # Milestones below should not be returned
+ create(:milestone, title: 'Bar')
+ create(:milestone, id: 10)
+ end
+
+ it 'returns milestones with matching id or title' do
+ expect(milestones).to contain_exactly(milestone1, milestone2)
+ end
+ end
end
diff --git a/spec/models/ml/candidate_spec.rb b/spec/models/ml/candidate_spec.rb
index d5b71e2c3f7..7b3dee2da7b 100644
--- a/spec/models/ml/candidate_spec.rb
+++ b/spec/models/ml/candidate_spec.rb
@@ -5,7 +5,12 @@ require 'spec_helper'
RSpec.describe Ml::Candidate, factory_default: :keep, feature_category: :mlops do
let_it_be(:candidate) { create(:ml_candidates, :with_metrics_and_params, :with_artifact, name: 'candidate0') }
let_it_be(:candidate2) do
- create(:ml_candidates, experiment: candidate.experiment, user: create(:user), name: 'candidate2')
+ create(:ml_candidates, experiment: candidate.experiment, name: 'candidate2', project: candidate.project)
+ end
+
+ let_it_be(:existing_model) { create(:ml_models, project: candidate2.project) }
+ let_it_be(:existing_model_version) do
+ create(:ml_model_versions, model: existing_model, candidate: candidate2)
end
let(:project) { candidate.project }
@@ -38,8 +43,8 @@ RSpec.describe Ml::Candidate, factory_default: :keep, feature_category: :mlops d
describe 'validation' do
let_it_be(:model) { create(:ml_models, project: candidate.project) }
- let_it_be(:model_version1) { create(:ml_model_versions, model: model) }
- let_it_be(:model_version2) { create(:ml_model_versions, model: model) }
+ let_it_be(:model_version1) { create(:ml_model_versions, model: model, candidate: nil) }
+ let_it_be(:model_version2) { create(:ml_model_versions, model: model, candidate: nil) }
let_it_be(:validation_candidate) do
create(:ml_candidates, model_version: model_version1, project: candidate.project)
end
@@ -231,6 +236,14 @@ RSpec.describe Ml::Candidate, factory_default: :keep, feature_category: :mlops d
end
end
+ describe '#without_model_version' do
+ subject { described_class.without_model_version }
+
+ it 'finds only candidates without model version' do
+ expect(subject).to match_array([candidate])
+ end
+ end
+
describe 'from_ci?' do
subject { candidate }
@@ -272,8 +285,8 @@ RSpec.describe Ml::Candidate, factory_default: :keep, feature_category: :mlops d
context 'with loose foreign key on ml_candidates.ci_build_id' do
it_behaves_like 'cleanup by a loose foreign key' do
- let!(:parent) { create(:ci_build) }
- let!(:model) { create(:ml_candidates, ci_build: parent) }
+ let_it_be(:parent) { create(:ci_build) }
+ let_it_be(:model) { create(:ml_candidates, ci_build: parent) }
end
end
end
diff --git a/spec/models/ml/model_spec.rb b/spec/models/ml/model_spec.rb
index ae7c3f163f3..e1de44b0030 100644
--- a/spec/models/ml/model_spec.rb
+++ b/spec/models/ml/model_spec.rb
@@ -63,6 +63,19 @@ RSpec.describe Ml::Model, feature_category: :mlops do
end
end
+ describe 'candidates' do
+ let_it_be(:candidate1) { create(:ml_model_versions, model: existing_model).candidate }
+ let_it_be(:candidate2) do
+ create(:ml_candidates, experiment: existing_model.default_experiment, project: project1)
+ end
+
+ let_it_be(:candidate3) { create(:ml_candidates, project: project1) }
+
+ it 'returns only the candidates for default experiment that do not belong to a model version' do
+ expect(existing_model.candidates).to match_array([candidate2])
+ end
+ end
+
describe '.by_project' do
subject { described_class.by_project(project1) }
@@ -128,4 +141,27 @@ RSpec.describe Ml::Model, feature_category: :mlops do
it { is_expected.to be(nil) }
end
end
+
+ describe '#all_packages' do
+ it 'returns an empty array when no model versions exist' do
+ expect(existing_model.all_packages).to eq([])
+ end
+
+ it 'returns one package when a single model version exists' do
+ version = create(:ml_model_versions, :with_package, model: existing_model)
+
+ all_packages = existing_model.all_packages
+ expect(all_packages.length).to be(1)
+ expect(all_packages.first).to eq(version.package)
+ end
+
+ it 'returns multiple packages when multiple model versions exist' do
+ version1 = create(:ml_model_versions, :with_package, model: existing_model)
+ version2 = create(:ml_model_versions, :with_package, model: existing_model)
+
+ all_packages = existing_model.all_packages
+ expect(all_packages.length).to be(2)
+ expect(all_packages).to match_array([version1.package, version2.package])
+ end
+ end
end
diff --git a/spec/models/namespace/package_setting_spec.rb b/spec/models/namespace/package_setting_spec.rb
index e6096bc9267..f06490d7999 100644
--- a/spec/models/namespace/package_setting_spec.rb
+++ b/spec/models/namespace/package_setting_spec.rb
@@ -16,6 +16,9 @@ RSpec.describe Namespace::PackageSetting, feature_category: :package_registry do
it { is_expected.to validate_inclusion_of(:nuget_duplicates_allowed).in_array([true, false]) }
end
+ it { is_expected.to allow_value(true, false).for(:nuget_symbol_server_enabled) }
+ it { is_expected.not_to allow_value(nil).for(:nuget_symbol_server_enabled) }
+
describe 'regex values' do
let_it_be(:package_settings) { create(:namespace_package_setting) }
diff --git a/spec/models/namespace/root_storage_statistics_spec.rb b/spec/models/namespace/root_storage_statistics_spec.rb
index 4b66b7532a7..9a66b4745c0 100644
--- a/spec/models/namespace/root_storage_statistics_spec.rb
+++ b/spec/models/namespace/root_storage_statistics_spec.rb
@@ -12,7 +12,7 @@ RSpec.describe Namespace::RootStorageStatistics, type: :model, feature_category:
describe '.for_namespace_ids' do
it 'returns only requested namespaces' do
stats = create_list(:namespace_root_storage_statistics, 3)
- namespace_ids = stats[0..1].map { |s| s.namespace_id }
+ namespace_ids = stats[0..1].map(&:namespace_id)
requested_stats = described_class.for_namespace_ids(namespace_ids).pluck(:namespace_id)
diff --git a/spec/models/namespace_spec.rb b/spec/models/namespace_spec.rb
index 85569a68252..0e6513764f5 100644
--- a/spec/models/namespace_spec.rb
+++ b/spec/models/namespace_spec.rb
@@ -34,6 +34,7 @@ RSpec.describe Namespace, feature_category: :groups_and_projects do
it { is_expected.to have_many :namespace_members }
it { is_expected.to have_one :cluster_enabled_grant }
it { is_expected.to have_many(:work_items) }
+ it { is_expected.to have_many(:work_items_dates_source) }
it { is_expected.to have_many :achievements }
it { is_expected.to have_many(:namespace_commit_emails).class_name('Users::NamespaceCommitEmail') }
it { is_expected.to have_many(:cycle_analytics_stages) }
@@ -82,7 +83,7 @@ RSpec.describe Namespace, feature_category: :groups_and_projects do
describe 'validations' do
it { is_expected.to validate_presence_of(:name) }
it { is_expected.to validate_length_of(:name).is_at_most(255) }
- it { is_expected.to validate_length_of(:description).is_at_most(255) }
+ it { is_expected.to validate_length_of(:description).is_at_most(500) }
it { is_expected.to validate_presence_of(:path) }
it { is_expected.to validate_length_of(:path).is_at_most(255) }
it { is_expected.to validate_presence_of(:owner) }
@@ -2387,8 +2388,8 @@ RSpec.describe Namespace, feature_category: :groups_and_projects do
context 'with loose foreign key on organization_id' do
it_behaves_like 'cleanup by a loose foreign key' do
- let!(:parent) { create(:organization) }
- let!(:model) { create(:namespace, organization: parent) }
+ let_it_be(:parent) { create(:organization) }
+ let_it_be(:model) { create(:namespace, organization: parent) }
end
end
end
diff --git a/spec/models/notification_recipient_spec.rb b/spec/models/notification_recipient_spec.rb
index f19c0a68f87..65bf7aec269 100644
--- a/spec/models/notification_recipient_spec.rb
+++ b/spec/models/notification_recipient_spec.rb
@@ -440,8 +440,8 @@ RSpec.describe NotificationRecipient, feature_category: :team_planning do
described_class.new(user, :participating, custom_action: :issue_due, target: target, project: project)
end
- it 'returns true' do
- expect(recipient.suitable_notification_level?).to eq true
+ it 'returns false' do
+ expect(recipient.suitable_notification_level?).to eq false
end
end
end
diff --git a/spec/models/notification_setting_spec.rb b/spec/models/notification_setting_spec.rb
index 9bf95051730..cb1bbb91a67 100644
--- a/spec/models/notification_setting_spec.rb
+++ b/spec/models/notification_setting_spec.rb
@@ -193,7 +193,11 @@ RSpec.describe NotificationSetting do
end
it 'includes EXCLUDED_WATCHER_EVENTS' do
- expect(subject).to include(*described_class::EXCLUDED_WATCHER_EVENTS)
+ expect(subject).to include(
+ :push_to_merge_request,
+ :issue_due,
+ :success_pipeline
+ )
end
end
@@ -224,8 +228,8 @@ RSpec.describe NotificationSetting do
context 'with loose foreign key on notification_settings.user_id' do
it_behaves_like 'cleanup by a loose foreign key' do
- let!(:parent) { create(:user) }
- let!(:model) { create(:notification_setting, user: parent) }
+ let_it_be(:parent) { create(:user) }
+ let_it_be(:model) { create(:notification_setting, user: parent) }
end
end
end
diff --git a/spec/models/onboarding/progress_spec.rb b/spec/models/onboarding/progress_spec.rb
index c45d8c97385..9c5a55d3313 100644
--- a/spec/models/onboarding/progress_spec.rb
+++ b/spec/models/onboarding/progress_spec.rb
@@ -292,28 +292,4 @@ RSpec.describe Onboarding::Progress do
it { is_expected.to eq(:subscription_created_at) }
end
-
- describe '#number_of_completed_actions' do
- subject do
- build(:onboarding_progress, actions.map { |x| { x => Time.current } }.inject(:merge)).number_of_completed_actions
- end
-
- context 'with 0 completed actions' do
- let(:actions) { [:created_at, :updated_at] }
-
- it { is_expected.to eq(0) }
- end
-
- context 'with 1 completed action' do
- let(:actions) { [:created_at, :subscription_created_at] }
-
- it { is_expected.to eq(1) }
- end
-
- context 'with 2 completed actions' do
- let(:actions) { [:subscription_created_at, :git_write_at] }
-
- it { is_expected.to eq(2) }
- end
- end
end
diff --git a/spec/models/organizations/organization_detail_spec.rb b/spec/models/organizations/organization_detail_spec.rb
new file mode 100644
index 00000000000..3f44a9cc637
--- /dev/null
+++ b/spec/models/organizations/organization_detail_spec.rb
@@ -0,0 +1,26 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Organizations::OrganizationDetail, type: :model, feature_category: :cell do
+ describe 'associations' do
+ it { is_expected.to belong_to(:organization).inverse_of(:organization_detail) }
+ end
+
+ describe 'validations' do
+ it { is_expected.to validate_presence_of(:organization) }
+ it { is_expected.to validate_length_of(:description).is_at_most(1024) }
+ end
+
+ it_behaves_like Avatarable do
+ let(:model) { create(:organization_detail) }
+ end
+
+ context 'with uploads' do
+ it_behaves_like 'model with uploads', false do
+ let(:model_object) { create(:organization_detail) }
+ let(:upload_attribute) { :avatar }
+ let(:uploader_class) { AttachmentUploader }
+ end
+ end
+end
diff --git a/spec/models/organizations/organization_spec.rb b/spec/models/organizations/organization_spec.rb
index 0670002135c..756024b6437 100644
--- a/spec/models/organizations/organization_spec.rb
+++ b/spec/models/organizations/organization_spec.rb
@@ -7,6 +7,8 @@ RSpec.describe Organizations::Organization, type: :model, feature_category: :cel
let_it_be(:default_organization) { create(:organization, :default) }
describe 'associations' do
+ it { is_expected.to have_one(:organization_detail).inverse_of(:organization).autosave(true) }
+
it { is_expected.to have_many :namespaces }
it { is_expected.to have_many :groups }
it { is_expected.to have_many(:users).through(:organization_users).inverse_of(:organizations) }
@@ -21,6 +23,7 @@ RSpec.describe Organizations::Organization, type: :model, feature_category: :cel
it { is_expected.to validate_length_of(:name).is_at_most(255) }
it { is_expected.to validate_presence_of(:path) }
it { is_expected.to validate_length_of(:path).is_at_least(2).is_at_most(255) }
+ it { is_expected.to validate_uniqueness_of(:path).case_insensitive }
describe 'path validator' do
using RSpec::Parameterized::TableSyntax
@@ -54,6 +57,16 @@ RSpec.describe Organizations::Organization, type: :model, feature_category: :cel
end
end
+ describe 'delegations' do
+ it { is_expected.to delegate_method(:description).to(:organization_detail) }
+ it { is_expected.to delegate_method(:avatar).to(:organization_detail) }
+ it { is_expected.to delegate_method(:avatar_url).to(:organization_detail) }
+ end
+
+ describe 'nested attributes' do
+ it { is_expected.to accept_nested_attributes_for(:organization_detail) }
+ end
+
context 'when using scopes' do
describe '.without_default' do
it 'excludes default organization' do
@@ -120,6 +133,13 @@ RSpec.describe Organizations::Organization, type: :model, feature_category: :cel
end
end
+ describe '#organization_detail' do
+ it 'ensures organization has organization_detail upon initialization' do
+ expect(organization.organization_detail).to be_present
+ expect(organization.organization_detail).not_to be_persisted
+ end
+ end
+
describe '#default?' do
context 'when organization is default' do
it 'returns true' do
diff --git a/spec/models/packages/nuget/symbol_spec.rb b/spec/models/packages/nuget/symbol_spec.rb
index f43f3a3bdeb..bae8f90c7d5 100644
--- a/spec/models/packages/nuget/symbol_spec.rb
+++ b/spec/models/packages/nuget/symbol_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe Packages::Nuget::Symbol, type: :model, feature_category: :package
it { is_expected.to be_a FileStoreMounter }
it { is_expected.to be_a ShaAttribute }
+ it { is_expected.to be_a Packages::Destructible }
describe 'relationships' do
it { is_expected.to belong_to(:package).inverse_of(:nuget_symbols) }
@@ -26,6 +27,63 @@ RSpec.describe Packages::Nuget::Symbol, type: :model, feature_category: :package
it { is_expected.to delegate_method(:project_id).to(:package) }
end
+ describe 'scopes' do
+ describe '.stale' do
+ subject { described_class.stale }
+
+ let_it_be(:symbol) { create(:nuget_symbol) }
+ let_it_be(:stale_symbol) { create(:nuget_symbol, :stale) }
+
+ it { is_expected.to contain_exactly(stale_symbol) }
+ end
+
+ describe '.pending_destruction' do
+ subject { described_class.pending_destruction }
+
+ let_it_be(:symbol) { create(:nuget_symbol, :stale, :processing) }
+ let_it_be(:stale_symbol) { create(:nuget_symbol, :stale) }
+
+ it { is_expected.to contain_exactly(stale_symbol) }
+ end
+
+ describe '.with_signature' do
+ subject(:with_signature) { described_class.with_signature(signature) }
+
+ let_it_be(:signature) { 'signature' }
+ let_it_be(:symbol) { create(:nuget_symbol, signature: signature) }
+
+ it 'returns symbols with the given signature' do
+ expect(with_signature).to eq([symbol])
+ end
+ end
+
+ describe '.with_file_name' do
+ subject(:with_file_name) { described_class.with_file_name(file_name) }
+
+ let_it_be(:file_name) { 'file_name' }
+ let_it_be(:symbol) { create(:nuget_symbol) }
+
+ before do
+ symbol.update_column(:file, file_name)
+ end
+
+ it 'returns symbols with the given file_name' do
+ expect(with_file_name).to eq([symbol])
+ end
+ end
+
+ describe '.with_file_sha256' do
+ subject(:with_file_sha256) { described_class.with_file_sha256(checksums) }
+
+ let_it_be(:checksums) { OpenSSL::Digest.hexdigest('SHA256', 'checksums') }
+ let_it_be(:symbol) { create(:nuget_symbol, file_sha256: checksums) }
+
+ it 'returns symbols with the given checksums' do
+ expect(with_file_sha256).to eq([symbol])
+ end
+ end
+ end
+
describe 'callbacks' do
describe 'before_validation' do
describe '#set_object_storage_key' do
diff --git a/spec/models/packages/package_spec.rb b/spec/models/packages/package_spec.rb
index 8e3b97e55f3..0ed6f058768 100644
--- a/spec/models/packages/package_spec.rb
+++ b/spec/models/packages/package_spec.rb
@@ -1355,6 +1355,30 @@ RSpec.describe Packages::Package, type: :model, feature_category: :package_regis
end
end
+ describe '#sync_npm_metadata_cache' do
+ let_it_be(:package) { create(:npm_package) }
+
+ subject { package.sync_npm_metadata_cache }
+
+ it 'enqueues a sync worker job' do
+ expect(::Packages::Npm::CreateMetadataCacheWorker)
+ .to receive(:perform_async).with(package.project_id, package.name)
+
+ subject
+ end
+
+ context 'with a non npm package' do
+ let_it_be(:package) { create(:maven_package) }
+
+ it 'does not enqueue a sync worker job' do
+ expect(::Packages::Npm::CreateMetadataCacheWorker)
+ .not_to receive(:perform_async)
+
+ subject
+ end
+ end
+ end
+
describe '#mark_package_files_for_destruction' do
let_it_be(:package) { create(:npm_package, :pending_destruction) }
diff --git a/spec/models/packages/tag_spec.rb b/spec/models/packages/tag_spec.rb
index 6842d1946e5..2d045615756 100644
--- a/spec/models/packages/tag_spec.rb
+++ b/spec/models/packages/tag_spec.rb
@@ -1,9 +1,10 @@
# frozen_string_literal: true
+
require 'spec_helper'
RSpec.describe Packages::Tag, type: :model, feature_category: :package_registry do
- let!(:project) { create(:project) }
- let!(:package) { create(:npm_package, version: '1.0.2', project: project, updated_at: 3.days.ago) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:package) { create(:npm_package, version: '1.0.2', project: project, updated_at: 3.days.ago) }
describe '#ensure_project_id' do
it 'sets the project_id before saving' do
@@ -83,4 +84,25 @@ RSpec.describe Packages::Tag, type: :model, feature_category: :package_registry
it { is_expected.to contain_exactly(tag1, tag3) }
end
end
+
+ describe '.for_package_ids_with_distinct_names' do
+ let_it_be(:package2) { create(:package, project: project) }
+ let_it_be(:package3) { create(:package, project: project) }
+ let_it_be(:tag1) { create(:packages_tag, name: 'latest', package: package, updated_at: 4.days.ago) }
+ let_it_be(:tag2) { create(:packages_tag, name: 'latest', package: package2, updated_at: 3.days.ago) }
+ let_it_be(:tag3) { create(:packages_tag, name: 'latest', package: package2, updated_at: 2.days.ago) }
+ let_it_be(:tag4) { create(:packages_tag, name: 'tag4', package: package3, updated_at: 5.days.ago) }
+ let_it_be(:tag5) { create(:packages_tag, name: 'tag5', package: package3, updated_at: 4.days.ago) }
+ let_it_be(:tag6) { create(:packages_tag, name: 'tag6', package: package3, updated_at: 6.days.ago) }
+
+ subject { described_class.for_package_ids_with_distinct_names(project.packages) }
+
+ before do
+ stub_const("#{described_class}::FOR_PACKAGES_TAGS_LIMIT", 3)
+ end
+
+ # `tag3` is returned because it's the most recently updated with the name `latest`.
+ # `tag5` is returned before `tag4` because it was updated more recently than `tag4`.
+ it { is_expected.to eq([tag3, tag5, tag4]) }
+ end
end
diff --git a/spec/models/pages/lookup_path_spec.rb b/spec/models/pages/lookup_path_spec.rb
index 570c369016b..ccca6e7a48a 100644
--- a/spec/models/pages/lookup_path_spec.rb
+++ b/spec/models/pages/lookup_path_spec.rb
@@ -3,19 +3,38 @@
require 'spec_helper'
RSpec.describe Pages::LookupPath, feature_category: :pages do
- let(:project) { create(:project, :pages_private, pages_https_only: true) }
let(:trim_prefix) { nil }
- let(:domain) { nil }
+ let(:path_prefix) { nil }
+ let(:file_store) { ::ObjectStorage::Store::REMOTE }
+ let(:group) { build(:group, path: 'mygroup') }
+ let(:deployment) do
+ build(
+ :pages_deployment,
+ id: 1,
+ project: project,
+ path_prefix: path_prefix,
+ file_store: file_store)
+ end
+
+ let(:project) do
+ build(
+ :project,
+ :pages_private,
+ group: group,
+ path: 'myproject',
+ pages_https_only: true)
+ end
- subject(:lookup_path) { described_class.new(project, trim_prefix: trim_prefix, domain: domain) }
+ subject(:lookup_path) { described_class.new(deployment: deployment, trim_prefix: trim_prefix) }
before do
stub_pages_setting(
+ enabled: true,
access_control: true,
external_https: ["1.1.1.1:443"],
url: 'http://example.com',
- protocol: 'http'
- )
+ protocol: 'http')
+
stub_pages_object_storage(::Pages::DeploymentUploader)
end
@@ -32,7 +51,11 @@ RSpec.describe Pages::LookupPath, feature_category: :pages do
end
describe '#https_only' do
+ subject(:lookup_path) { described_class.new(deployment: deployment, domain: domain) }
+
context 'when no domain provided' do
+ let(:domain) { nil }
+
it 'delegates to Project#pages_https_only?' do
expect(lookup_path.https_only).to eq(true)
end
@@ -48,66 +71,55 @@ RSpec.describe Pages::LookupPath, feature_category: :pages do
end
describe '#source' do
- let(:source) { lookup_path.source }
-
- it 'returns nil' do
- expect(source).to eq(nil)
+ it 'uses deployment from object storage', :freeze_time do
+ expect(lookup_path.source).to eq(
+ type: 'zip',
+ path: deployment.file.url(expire_at: 1.day.from_now),
+ global_id: "gid://gitlab/PagesDeployment/#{deployment.id}",
+ sha256: deployment.file_sha256,
+ file_size: deployment.size,
+ file_count: deployment.file_count
+ )
end
- context 'when there is pages deployment' do
- let!(:deployment) { create(:pages_deployment, project: project) }
-
- it 'uses deployment from object storage' do
- freeze_time do
- expect(source).to eq(
- type: 'zip',
- path: deployment.file.url(expire_at: 1.day.from_now),
- global_id: "gid://gitlab/PagesDeployment/#{deployment.id}",
- sha256: deployment.file_sha256,
- file_size: deployment.size,
- file_count: deployment.file_count
- )
- end
- end
+ it 'does not recreate source hash' do
+ expect(deployment.file).to receive(:url_or_file_path).once
+
+ 2.times { lookup_path.source }
+ end
- context 'when deployment is in the local storage' do
- before do
- deployment.file.migrate!(::ObjectStorage::Store::LOCAL)
- end
-
- it 'uses file protocol' do
- freeze_time do
- expect(source).to eq(
- type: 'zip',
- path: "file://#{deployment.file.path}",
- global_id: "gid://gitlab/PagesDeployment/#{deployment.id}",
- sha256: deployment.file_sha256,
- file_size: deployment.size,
- file_count: deployment.file_count
- )
- end
- end
+ context 'when deployment is in the local storage' do
+ let(:file_store) { ::ObjectStorage::Store::LOCAL }
+
+ it 'uses file protocol', :freeze_time do
+ expect(lookup_path.source).to eq(
+ type: 'zip',
+ path: "file://#{deployment.file.path}",
+ global_id: "gid://gitlab/PagesDeployment/#{deployment.id}",
+ sha256: deployment.file_sha256,
+ file_size: deployment.size,
+ file_count: deployment.file_count
+ )
end
end
end
describe '#prefix' do
- let(:trim_prefix) { 'mygroup' }
-
- context 'when pages group root projects' do
- let(:project) { instance_double(Project, full_path: "namespace/namespace.example.com") }
+ using RSpec::Parameterized::TableSyntax
- it 'returns "/"' do
- expect(lookup_path.prefix).to eq('/')
- end
+ where(:full_path, :trim_prefix, :path_prefix, :result) do
+ 'mygroup/myproject' | nil | nil | '/'
+ 'mygroup/myproject' | 'mygroup' | nil | '/myproject/'
+ 'mygroup/myproject' | nil | 'PREFIX' | '/PREFIX/'
+ 'mygroup/myproject' | 'mygroup' | 'PREFIX' | '/myproject/PREFIX/'
end
- context 'when pages in the given prefix' do
- let(:project) { instance_double(Project, full_path: 'mygroup/myproject') }
-
- it 'returns the project full path with the provided prefix removed' do
- expect(lookup_path.prefix).to eq('/myproject/')
+ with_them do
+ before do
+ allow(project).to receive(:full_path).and_return(full_path)
end
+
+ it { expect(lookup_path.prefix).to eq(result) }
end
end
@@ -122,6 +134,16 @@ RSpec.describe Pages::LookupPath, feature_category: :pages do
end
end
+ context 'when namespace_in_path is enabled' do
+ before do
+ stub_pages_setting(namespace_in_path: true)
+ end
+
+ it 'returns nil' do
+ expect(lookup_path.unique_host).to be_nil
+ end
+ end
+
context 'when unique domain is enabled' do
it 'returns the project unique domain' do
project.project_setting.pages_unique_domain_enabled = true
@@ -129,26 +151,12 @@ RSpec.describe Pages::LookupPath, feature_category: :pages do
expect(lookup_path.unique_host).to eq('unique-domain.example.com')
end
-
- context 'when there is domain provided' do
- let(:domain) { instance_double(PagesDomain) }
-
- it 'returns nil' do
- expect(lookup_path.unique_host).to eq(nil)
- end
- end
end
end
describe '#root_directory' do
- context 'when there is no deployment' do
- it 'returns nil' do
- expect(lookup_path.root_directory).to be_nil
- end
- end
-
context 'when there is a deployment' do
- let!(:deployment) { create(:pages_deployment, project: project, root_directory: 'foo') }
+ let(:deployment) { build_stubbed(:pages_deployment, project: project, root_directory: 'foo') }
it 'returns the deployment\'s root_directory' do
expect(lookup_path.root_directory).to eq('foo')
diff --git a/spec/models/pages/virtual_domain_spec.rb b/spec/models/pages/virtual_domain_spec.rb
index 02e3fd67f2d..5925b662ee8 100644
--- a/spec/models/pages/virtual_domain_spec.rb
+++ b/spec/models/pages/virtual_domain_spec.rb
@@ -3,9 +3,30 @@
require 'spec_helper'
RSpec.describe Pages::VirtualDomain, feature_category: :pages do
+ let(:domain) { nil }
+ let(:trim_prefix) { nil }
+
+ let_it_be(:group) { create(:group, path: 'mygroup') }
+ let_it_be(:project_a) { create(:project, group: group) }
+ let_it_be(:project_a_main_deployment) { create(:pages_deployment, project: project_a, path_prefix: nil) }
+ let_it_be(:project_a_versioned_deployment) { create(:pages_deployment, project: project_a, path_prefix: 'v1') }
+ let_it_be(:project_b) { create(:project, group: group) }
+ let_it_be(:project_b_main_deployment) { create(:pages_deployment, project: project_b, path_prefix: nil) }
+ let_it_be(:project_b_versioned_deployment) { create(:pages_deployment, project: project_b, path_prefix: 'v1') }
+ let_it_be(:project_c) { create(:project, group: group) }
+ let_it_be(:project_c_main_deployment) { create(:pages_deployment, project: project_c, path_prefix: nil) }
+ let_it_be(:project_c_versioned_deployment) { create(:pages_deployment, project: project_c, path_prefix: 'v1') }
+
+ before_all do
+ # Those deployments are created to ensure that deactivated deployments won't be returned on the queries
+ deleted_at = 1.hour.ago
+ create(:pages_deployment, project: project_a, path_prefix: 'v2', deleted_at: deleted_at)
+ create(:pages_deployment, project: project_b, path_prefix: 'v2', deleted_at: deleted_at)
+ create(:pages_deployment, project: project_c, path_prefix: 'v2', deleted_at: deleted_at)
+ end
+
describe '#certificate and #key pair' do
- let(:domain) { nil }
- let(:project) { instance_double(Project) }
+ let(:project) { project_a }
subject(:virtual_domain) { described_class.new(projects: [project], domain: domain) }
@@ -25,51 +46,52 @@ RSpec.describe Pages::VirtualDomain, feature_category: :pages do
end
describe '#lookup_paths' do
- let(:domain) { nil }
- let(:trim_prefix) { nil }
- let(:project_a) { instance_double(Project) }
- let(:project_b) { instance_double(Project) }
- let(:project_c) { instance_double(Project) }
- let(:pages_lookup_path_a) { instance_double(Pages::LookupPath, prefix: 'aaa', source: { type: 'zip', path: 'https://example.com' }) }
- let(:pages_lookup_path_b) { instance_double(Pages::LookupPath, prefix: 'bbb', source: { type: 'zip', path: 'https://example.com' }) }
- let(:pages_lookup_path_without_source) { instance_double(Pages::LookupPath, prefix: 'ccc', source: nil) }
+ let(:project_list) { [project_a, project_b, project_c] }
subject(:virtual_domain) do
described_class.new(projects: project_list, domain: domain, trim_prefix: trim_prefix)
end
- before do
- allow(Pages::LookupPath)
- .to receive(:new)
- .with(project_a, domain: domain, trim_prefix: trim_prefix)
- .and_return(pages_lookup_path_a)
-
- allow(Pages::LookupPath)
- .to receive(:new)
- .with(project_b, domain: domain, trim_prefix: trim_prefix)
- .and_return(pages_lookup_path_b)
-
- allow(Pages::LookupPath)
- .to receive(:new)
- .with(project_c, domain: domain, trim_prefix: trim_prefix)
- .and_return(pages_lookup_path_without_source)
- end
+ context 'when pages multiple versions is disabled' do
+ before do
+ allow(::Gitlab::Pages)
+ .to receive(:multiple_versions_enabled_for?)
+ .and_return(false)
+ end
- context 'when there is pages domain provided' do
- let(:domain) { instance_double(PagesDomain) }
- let(:project_list) { [project_a, project_b, project_c] }
+ it 'returns only the main deployments for each project' do
+ global_ids = virtual_domain.lookup_paths.map do |lookup_path|
+ lookup_path.source[:global_id]
+ end
- it 'returns collection of projects pages lookup paths sorted by prefix in reverse' do
- expect(virtual_domain.lookup_paths).to eq([pages_lookup_path_b, pages_lookup_path_a])
+ expect(global_ids).to match_array([
+ project_a_main_deployment.to_gid.to_s,
+ project_b_main_deployment.to_gid.to_s,
+ project_c_main_deployment.to_gid.to_s
+ ])
end
end
- context 'when there is trim_prefix provided' do
- let(:trim_prefix) { 'group/' }
- let(:project_list) { [project_a, project_b] }
+ context 'when pages multiple versions is enabled' do
+ before do
+ allow(::Gitlab::Pages)
+ .to receive(:multiple_versions_enabled_for?)
+ .and_return(true)
+ end
it 'returns collection of projects pages lookup paths sorted by prefix in reverse' do
- expect(virtual_domain.lookup_paths).to eq([pages_lookup_path_b, pages_lookup_path_a])
+ global_ids = virtual_domain.lookup_paths.map do |lookup_path|
+ lookup_path.source[:global_id]
+ end
+
+ expect(global_ids).to match_array([
+ project_a_main_deployment.to_gid.to_s,
+ project_a_versioned_deployment.to_gid.to_s,
+ project_b_main_deployment.to_gid.to_s,
+ project_b_versioned_deployment.to_gid.to_s,
+ project_c_main_deployment.to_gid.to_s,
+ project_c_versioned_deployment.to_gid.to_s
+ ])
end
end
end
diff --git a/spec/models/preloaders/runner_manager_policy_preloader_spec.rb b/spec/models/preloaders/runner_manager_policy_preloader_spec.rb
index 1977e2c5787..b1950273380 100644
--- a/spec/models/preloaders/runner_manager_policy_preloader_spec.rb
+++ b/spec/models/preloaders/runner_manager_policy_preloader_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Preloaders::RunnerManagerPolicyPreloader, feature_category: :runner_fleet do
+RSpec.describe Preloaders::RunnerManagerPolicyPreloader, feature_category: :fleet_visibility do
let_it_be(:user) { create(:user) }
let_it_be(:runner1) { create(:ci_runner) }
let_it_be(:runner2) { create(:ci_runner) }
diff --git a/spec/models/product_analytics_event_spec.rb b/spec/models/product_analytics_event_spec.rb
deleted file mode 100644
index 801e6dd5e10..00000000000
--- a/spec/models/product_analytics_event_spec.rb
+++ /dev/null
@@ -1,52 +0,0 @@
-# frozen_string_literal: true
-require 'spec_helper'
-
-RSpec.describe ProductAnalyticsEvent, type: :model do
- it { is_expected.to belong_to(:project) }
- it { expect(described_class).to respond_to(:order_by_time) }
-
- describe 'validations' do
- it { is_expected.to validate_presence_of(:project_id) }
- it { is_expected.to validate_presence_of(:event_id) }
- it { is_expected.to validate_presence_of(:v_collector) }
- it { is_expected.to validate_presence_of(:v_etl) }
- end
-
- describe '.timerange' do
- let_it_be(:event_1) { create(:product_analytics_event, collector_tstamp: Time.zone.now - 1.day) }
- let_it_be(:event_2) { create(:product_analytics_event, collector_tstamp: Time.zone.now - 5.days) }
- let_it_be(:event_3) { create(:product_analytics_event, collector_tstamp: Time.zone.now - 15.days) }
-
- it { expect(described_class.timerange(3.days)).to match_array([event_1]) }
- it { expect(described_class.timerange(7.days)).to match_array([event_1, event_2]) }
- it { expect(described_class.timerange(30.days)).to match_array([event_1, event_2, event_3]) }
- end
-
- describe '.count_by_graph' do
- let_it_be(:events) do
- [
- create(:product_analytics_event, platform: 'web'),
- create(:product_analytics_event, platform: 'web'),
- create(:product_analytics_event, platform: 'app'),
- create(:product_analytics_event, platform: 'mobile', collector_tstamp: Time.zone.now - 10.days)
- ]
- end
-
- it { expect(described_class.count_by_graph('platform', 7.days)).to eq({ 'app' => 1, 'web' => 2 }) }
- it { expect(described_class.count_by_graph('platform', 30.days)).to eq({ 'app' => 1, 'mobile' => 1, 'web' => 2 }) }
- end
-
- describe '.count_collector_tstamp_by_day' do
- let_it_be(:time_now) { Time.zone.now }
- let_it_be(:time_ago) { Time.zone.now - 5.days }
-
- let_it_be(:events) do
- create_list(:product_analytics_event, 3, collector_tstamp: time_now) +
- create_list(:product_analytics_event, 2, collector_tstamp: time_ago)
- end
-
- subject { described_class.count_collector_tstamp_by_day(7.days) }
-
- it { is_expected.to eq({ time_now.beginning_of_day => 3, time_ago.beginning_of_day => 2 }) }
- end
-end
diff --git a/spec/models/project_authorization_spec.rb b/spec/models/project_authorization_spec.rb
index a5f29fcbe8b..00376e1a871 100644
--- a/spec/models/project_authorization_spec.rb
+++ b/spec/models/project_authorization_spec.rb
@@ -34,7 +34,12 @@ RSpec.describe ProjectAuthorization, feature_category: :groups_and_projects do
end
context 'with duplicate user and project authorization' do
- subject { project_auth.dup }
+ subject do
+ project_auth.dup.tap do |auth|
+ auth.project = project_1
+ auth.user = user
+ end
+ end
it { is_expected.to be_invalid }
@@ -52,6 +57,8 @@ RSpec.describe ProjectAuthorization, feature_category: :groups_and_projects do
context 'with multiple access levels for the same user and project' do
subject do
project_auth.dup.tap do |auth|
+ auth.project = project_1
+ auth.user = user
auth.access_level = Gitlab::Access::MAINTAINER
end
end
@@ -103,6 +110,23 @@ RSpec.describe ProjectAuthorization, feature_category: :groups_and_projects do
end
end
+ describe '.owners' do
+ let_it_be(:project_original_owner_authorization) { project.owner.project_authorizations.first }
+ let_it_be(:project_authorization_owner) { create(:project_authorization, :owner, project: project) }
+
+ before_all do
+ create(:project_authorization, :guest, project: project)
+ create(:project_authorization, :developer, project: project)
+ end
+
+ it 'returns all records which only have Owners access' do
+ expect(described_class.owners.map(&:attributes)).to match_array([
+ project_original_owner_authorization,
+ project_authorization_owner
+ ].map(&:attributes))
+ end
+ end
+
describe '.for_project' do
let_it_be(:project_2) { create(:project, namespace: user.namespace) }
let_it_be(:project_3) { create(:project, namespace: user.namespace) }
@@ -146,4 +170,11 @@ RSpec.describe ProjectAuthorization, feature_category: :groups_and_projects do
expect(user.project_authorizations.pluck(:user_id, :project_id, :access_level)).to match_array(attributes.map(&:values))
end
end
+
+ context 'with loose foreign key on project_authorizations.user_id' do
+ it_behaves_like 'cleanup by a loose foreign key' do
+ let_it_be(:parent) { create(:user) }
+ let_it_be(:model) { create(:project_authorization, user: parent) }
+ end
+ end
end
diff --git a/spec/models/project_authorizations/changes_spec.rb b/spec/models/project_authorizations/changes_spec.rb
index 5f4dd963fb3..d6ccfccbcbe 100644
--- a/spec/models/project_authorizations/changes_spec.rb
+++ b/spec/models/project_authorizations/changes_spec.rb
@@ -37,6 +37,7 @@ RSpec.describe ProjectAuthorizations::Changes, feature_category: :groups_and_pro
.with(data: project_data)
.and_return(project_event)
+ allow(::Gitlab::EventStore).to receive(:publish)
expect(::Gitlab::EventStore).to receive(:publish).with(project_event)
end
@@ -44,9 +45,51 @@ RSpec.describe ProjectAuthorizations::Changes, feature_category: :groups_and_pro
end
end
- shared_examples_for 'does not publishes AuthorizationsChangedEvent' do
- it 'does not publishes a AuthorizationsChangedEvent event' do
+ shared_examples_for 'publishes AuthorizationsRemovedEvent' do
+ it 'publishes a AuthorizationsRemovedEvent event with project id' do
+ project_ids.each do |project_id|
+ project_data = { project_id: project_id, user_ids: user_ids }
+ project_event = instance_double('::ProjectAuthorizations::AuthorizationsRemovedEvent', data: project_data)
+
+ allow(::ProjectAuthorizations::AuthorizationsRemovedEvent).to receive(:new)
+ .with(data: project_data)
+ .and_return(project_event)
+
+ allow(::Gitlab::EventStore).to receive(:publish)
+ expect(::Gitlab::EventStore).to receive(:publish).with(project_event)
+ end
+
+ apply_project_authorization_changes
+ end
+
+ context 'when feature flag "user_approval_rules_removal" is disabled' do
+ before do
+ stub_feature_flags(user_approval_rules_removal: false)
+ end
+
+ it 'does not publish a AuthorizationsRemovedEvent event' do
+ expect(::Gitlab::EventStore).not_to(
+ receive(:publish).with(an_instance_of(::ProjectAuthorizations::AuthorizationsRemovedEvent))
+ )
+
+ apply_project_authorization_changes
+ end
+ end
+ end
+
+ shared_examples_for 'does not publish AuthorizationsChangedEvent' do
+ it 'does not publish a AuthorizationsChangedEvent event' do
+ expect(::Gitlab::EventStore).not_to receive(:publish)
+ .with(an_instance_of(::ProjectAuthorizations::AuthorizationsChangedEvent))
+
+ apply_project_authorization_changes
+ end
+ end
+
+ shared_examples_for 'does not publish AuthorizationsRemovedEvent' do
+ it 'does not publish a AuthorizationsRemovedEvent event' do
expect(::Gitlab::EventStore).not_to receive(:publish)
+ .with(an_instance_of(::ProjectAuthorizations::AuthorizationsRemovedEvent))
apply_project_authorization_changes
end
@@ -112,6 +155,7 @@ RSpec.describe ProjectAuthorizations::Changes, feature_category: :groups_and_pro
it_behaves_like 'logs the detail', batch_size: 2
it_behaves_like 'publishes AuthorizationsChangedEvent'
+ it_behaves_like 'does not publish AuthorizationsRemovedEvent'
context 'when the GitLab installation does not have a replica database configured' do
before do
@@ -122,6 +166,7 @@ RSpec.describe ProjectAuthorizations::Changes, feature_category: :groups_and_pro
it_behaves_like 'inserts the rows in batches, as per the `per_batch` size, without a delay between batches'
it_behaves_like 'does not log any detail'
it_behaves_like 'publishes AuthorizationsChangedEvent'
+ it_behaves_like 'does not publish AuthorizationsRemovedEvent'
end
end
@@ -133,6 +178,7 @@ RSpec.describe ProjectAuthorizations::Changes, feature_category: :groups_and_pro
it_behaves_like 'inserts the rows in batches, as per the `per_batch` size, without a delay between batches'
it_behaves_like 'does not log any detail'
it_behaves_like 'publishes AuthorizationsChangedEvent'
+ it_behaves_like 'does not publish AuthorizationsRemovedEvent'
end
end
@@ -195,6 +241,7 @@ RSpec.describe ProjectAuthorizations::Changes, feature_category: :groups_and_pro
it_behaves_like 'logs the detail', batch_size: 2
it_behaves_like 'publishes AuthorizationsChangedEvent'
+ it_behaves_like 'publishes AuthorizationsRemovedEvent'
context 'when the GitLab installation does not have a replica database configured' do
before do
@@ -205,6 +252,7 @@ RSpec.describe ProjectAuthorizations::Changes, feature_category: :groups_and_pro
it_behaves_like 'removes project authorizations of the users in the current project, without a delay'
it_behaves_like 'does not log any detail'
it_behaves_like 'publishes AuthorizationsChangedEvent'
+ it_behaves_like 'publishes AuthorizationsRemovedEvent'
end
end
@@ -216,20 +264,23 @@ RSpec.describe ProjectAuthorizations::Changes, feature_category: :groups_and_pro
it_behaves_like 'removes project authorizations of the users in the current project, without a delay'
it_behaves_like 'does not log any detail'
it_behaves_like 'publishes AuthorizationsChangedEvent'
+ it_behaves_like 'publishes AuthorizationsRemovedEvent'
end
context 'when the user_ids list is empty' do
let(:user_ids) { [] }
it_behaves_like 'does not removes project authorizations of the users in the current project'
- it_behaves_like 'does not publishes AuthorizationsChangedEvent'
+ it_behaves_like 'does not publish AuthorizationsChangedEvent'
+ it_behaves_like 'does not publish AuthorizationsRemovedEvent'
end
context 'when the user_ids list is nil' do
let(:user_ids) { nil }
it_behaves_like 'does not removes project authorizations of the users in the current project'
- it_behaves_like 'does not publishes AuthorizationsChangedEvent'
+ it_behaves_like 'does not publish AuthorizationsChangedEvent'
+ it_behaves_like 'does not publish AuthorizationsRemovedEvent'
end
end
@@ -241,6 +292,7 @@ RSpec.describe ProjectAuthorizations::Changes, feature_category: :groups_and_pro
let_it_be(:project_4) { create(:project) }
let(:project_ids) { [project_1.id, project_2.id, project_3.id] }
+ let(:user_ids) { [user.id] }
let(:project_authorization_changes) do
ProjectAuthorizations::Changes.new do |changes|
@@ -291,6 +343,7 @@ RSpec.describe ProjectAuthorizations::Changes, feature_category: :groups_and_pro
it_behaves_like 'logs the detail', batch_size: 2
it_behaves_like 'publishes AuthorizationsChangedEvent'
+ it_behaves_like 'publishes AuthorizationsRemovedEvent'
context 'when the GitLab installation does not have a replica database configured' do
before do
@@ -301,6 +354,7 @@ RSpec.describe ProjectAuthorizations::Changes, feature_category: :groups_and_pro
it_behaves_like 'removes project authorizations of projects from the current user, without a delay'
it_behaves_like 'does not log any detail'
it_behaves_like 'publishes AuthorizationsChangedEvent'
+ it_behaves_like 'publishes AuthorizationsRemovedEvent'
end
end
@@ -312,20 +366,23 @@ RSpec.describe ProjectAuthorizations::Changes, feature_category: :groups_and_pro
it_behaves_like 'removes project authorizations of projects from the current user, without a delay'
it_behaves_like 'does not log any detail'
it_behaves_like 'publishes AuthorizationsChangedEvent'
+ it_behaves_like 'publishes AuthorizationsRemovedEvent'
end
context 'when the project_ids list is empty' do
let(:project_ids) { [] }
it_behaves_like 'does not removes any project authorizations from the current user'
- it_behaves_like 'does not publishes AuthorizationsChangedEvent'
+ it_behaves_like 'does not publish AuthorizationsChangedEvent'
+ it_behaves_like 'does not publish AuthorizationsRemovedEvent'
end
context 'when the user_ids list is nil' do
let(:project_ids) { nil }
it_behaves_like 'does not removes any project authorizations from the current user'
- it_behaves_like 'does not publishes AuthorizationsChangedEvent'
+ it_behaves_like 'does not publish AuthorizationsChangedEvent'
+ it_behaves_like 'does not publish AuthorizationsRemovedEvent'
end
end
end
diff --git a/spec/models/project_feature_spec.rb b/spec/models/project_feature_spec.rb
index c0a78ff2f53..149b0d4df8c 100644
--- a/spec/models/project_feature_spec.rb
+++ b/spec/models/project_feature_spec.rb
@@ -31,6 +31,7 @@ RSpec.describe ProjectFeature, feature_category: :groups_and_projects do
specify { expect(subject.package_registry_access_level).to eq(ProjectFeature::ENABLED) }
specify { expect(subject.container_registry_access_level).to eq(ProjectFeature::ENABLED) }
specify { expect(subject.model_experiments_access_level).to eq(ProjectFeature::ENABLED) }
+ specify { expect(subject.model_registry_access_level).to eq(ProjectFeature::ENABLED) }
end
describe 'PRIVATE_FEATURES_MIN_ACCESS_LEVEL_FOR_PRIVATE_PROJECT' do
diff --git a/spec/models/project_group_link_spec.rb b/spec/models/project_group_link_spec.rb
index f141b8e83d6..bfbb7c91f47 100644
--- a/spec/models/project_group_link_spec.rb
+++ b/spec/models/project_group_link_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe ProjectGroupLink do
+RSpec.describe ProjectGroupLink, feature_category: :groups_and_projects do
describe "Associations" do
it { is_expected.to belong_to(:group) }
it { is_expected.to belong_to(:project) }
@@ -18,6 +18,7 @@ RSpec.describe ProjectGroupLink do
it { is_expected.to validate_uniqueness_of(:group_id).scoped_to(:project_id).with_message(/already shared/) }
it { is_expected.to validate_presence_of(:group) }
it { is_expected.to validate_presence_of(:group_access) }
+ it { is_expected.to validate_inclusion_of(:group_access).in_array(Gitlab::Access.all_values) }
it "doesn't allow a project to be shared with the group it is in" do
project_group_link.group = group
@@ -30,12 +31,6 @@ RSpec.describe ProjectGroupLink do
expect(project_group_link).not_to be_valid
end
-
- it 'does not allow a project to be shared with `OWNER` access level' do
- project_group_link.group_access = Gitlab::Access::OWNER
-
- expect(project_group_link).not_to be_valid
- end
end
describe 'scopes' do
@@ -62,4 +57,27 @@ RSpec.describe ProjectGroupLink do
it { expect(described_class.search(group.name)).to eq([project_group_link]) }
it { expect(described_class.search('not-a-group-name')).to be_empty }
end
+
+ describe '#owner_access?' do
+ it 'returns true for links with OWNER access' do
+ link = create(:project_group_link, :owner)
+
+ expect(link.owner_access?).to eq(true)
+ end
+
+ it 'returns false for links without OWNER access' do
+ link = create(:project_group_link, :guest)
+
+ expect(link.owner_access?).to eq(false)
+ end
+ end
+
+ describe '#human_access' do
+ it 'delegates to Gitlab::Access' do
+ project_group_link = create(:project_group_link, :reporter)
+ expect(Gitlab::Access).to receive(:human_access).with(project_group_link.group_access).and_call_original
+
+ expect(project_group_link.human_access).to eq('Reporter')
+ end
+ end
end
diff --git a/spec/models/project_repository_spec.rb b/spec/models/project_repository_spec.rb
index eba908b0fdb..a38782cfd51 100644
--- a/spec/models/project_repository_spec.rb
+++ b/spec/models/project_repository_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe ProjectRepository do
+RSpec.describe ProjectRepository, feature_category: :source_code_management do
describe 'associations' do
it { is_expected.to belong_to(:shard) }
it { is_expected.to belong_to(:project) }
@@ -25,4 +25,28 @@ RSpec.describe ProjectRepository do
expect(described_class.find_project('@@unexisting/path/to/project')).to be_nil
end
end
+
+ describe '#object_format' do
+ subject { project_repository.object_format }
+
+ let(:project_repository) { build(:project_repository, object_format: object_format) }
+
+ context 'when object format is sha1' do
+ let(:object_format) { 'sha1' }
+
+ it { is_expected.to eq 'sha1' }
+ end
+
+ context 'when object format is sha256' do
+ let(:object_format) { 'sha256' }
+
+ it { is_expected.to eq 'sha256' }
+ end
+
+ context 'when object format is not set' do
+ let(:project_repository) { build(:project_repository) }
+
+ it { is_expected.to eq 'sha1' }
+ end
+ end
end
diff --git a/spec/models/project_spec.rb b/spec/models/project_spec.rb
index 3ea5f6ea0ae..c256c4f10f8 100644
--- a/spec/models/project_spec.rb
+++ b/spec/models/project_spec.rb
@@ -50,6 +50,7 @@ RSpec.describe Project, factory_default: :keep, feature_category: :groups_and_pr
it { is_expected.to have_one(:catalog_resource) }
it { is_expected.to have_many(:ci_components).class_name('Ci::Catalog::Resources::Component') }
it { is_expected.to have_many(:catalog_resource_versions).class_name('Ci::Catalog::Resources::Version') }
+ it { is_expected.to have_many(:catalog_resource_sync_events).class_name('Ci::Catalog::Resources::SyncEvent') }
it { is_expected.to have_one(:microsoft_teams_integration) }
it { is_expected.to have_one(:mattermost_integration) }
it { is_expected.to have_one(:hangouts_chat_integration) }
@@ -1128,6 +1129,7 @@ RSpec.describe Project, factory_default: :keep, feature_category: :groups_and_pr
it { is_expected.to delegate_method(:container_registry_access_level).to(:project_feature) }
it { is_expected.to delegate_method(:environments_access_level).to(:project_feature) }
it { is_expected.to delegate_method(:model_experiments_access_level).to(:project_feature) }
+ it { is_expected.to delegate_method(:model_registry_access_level).to(:project_feature) }
it { is_expected.to delegate_method(:feature_flags_access_level).to(:project_feature) }
it { is_expected.to delegate_method(:releases_access_level).to(:project_feature) }
it { is_expected.to delegate_method(:infrastructure_access_level).to(:project_feature) }
@@ -2049,50 +2051,31 @@ RSpec.describe Project, factory_default: :keep, feature_category: :groups_and_pr
end
end
- describe '#avatar_type' do
- let(:project) { create(:project) }
-
- it 'is true if avatar is image' do
- project.update_attribute(:avatar, 'uploads/avatar.png')
- expect(project.avatar_type).to be_truthy
- end
-
- it 'is false if avatar is html page' do
- project.update_attribute(:avatar, 'uploads/avatar.html')
- project.avatar_type
-
- expect(project.errors.added?(:avatar, "file format is not supported. Please try one of the following supported formats: png, jpg, jpeg, gif, bmp, tiff, ico, webp")).to be true
+ context 'with avatar' do
+ it_behaves_like Avatarable do
+ let(:model) { create(:project, :with_avatar) }
end
- end
- describe '#avatar_url' do
- subject { project.avatar_url }
+ describe '#avatar_url' do
+ subject { project.avatar_url }
- let(:project) { create(:project) }
+ let(:project) { create(:project) }
- context 'when avatar file is uploaded' do
- let(:project) { create(:project, :public, :with_avatar) }
+ context 'when avatar file in git' do
+ before do
+ allow(project).to receive(:avatar_in_git) { true }
+ end
- it 'shows correct url' do
- expect(project.avatar_url).to eq(project.avatar.url)
- expect(project.avatar_url(only_path: false)).to eq([Gitlab.config.gitlab.url, project.avatar.url].join)
- end
- end
+ let(:avatar_path) { "/#{project.full_path}/-/avatar" }
- context 'when avatar file in git' do
- before do
- allow(project).to receive(:avatar_in_git) { true }
+ it { is_expected.to eq "http://#{Gitlab.config.gitlab.host}#{avatar_path}" }
end
- let(:avatar_path) { "/#{project.full_path}/-/avatar" }
-
- it { is_expected.to eq "http://#{Gitlab.config.gitlab.host}#{avatar_path}" }
- end
-
- context 'when git repo is empty' do
- let(:project) { create(:project) }
+ context 'when git repo is empty' do
+ let(:project) { create(:project) }
- it { is_expected.to eq nil }
+ it { is_expected.to eq nil }
+ end
end
end
@@ -2176,19 +2159,9 @@ RSpec.describe Project, factory_default: :keep, feature_category: :groups_and_pr
end
end
- describe '.with_jira_dvcs_cloud' do
- it 'returns the correct project' do
- jira_dvcs_cloud_project = create(:project, :jira_dvcs_cloud)
- create(:project, :jira_dvcs_server)
-
- expect(described_class.with_jira_dvcs_cloud).to contain_exactly(jira_dvcs_cloud_project)
- end
- end
-
describe '.with_jira_dvcs_server' do
it 'returns the correct project' do
jira_dvcs_server_project = create(:project, :jira_dvcs_server)
- create(:project, :jira_dvcs_cloud)
expect(described_class.with_jira_dvcs_server).to contain_exactly(jira_dvcs_server_project)
end
@@ -2470,17 +2443,6 @@ RSpec.describe Project, factory_default: :keep, feature_category: :groups_and_pr
it 'returns custom email address' do
expect(subject).to eq(custom_email)
end
-
- context 'when feature flag service_desk_custom_email is disabled' do
- before do
- stub_feature_flags(service_desk_custom_email: false)
- end
-
- it 'returns custom email address' do
- # Don't check for a specific value. Just make sure it's not the custom email
- expect(subject).not_to eq(custom_email)
- end
- end
end
end
@@ -3102,23 +3064,6 @@ RSpec.describe Project, factory_default: :keep, feature_category: :groups_and_pr
expect(project.repository).not_to eq(previous_repository)
end
-
- context 'when "replicate_object_pool_on_move" FF is disabled' do
- before do
- stub_feature_flags(replicate_object_pool_on_move: false)
- end
-
- it 'does not update a memoized repository value' do
- previous_repository = project.repository
-
- allow(project).to receive(:disk_path).and_return('fancy/new/path')
- allow(project).to receive(:repository_storage).and_return('foo')
-
- project.track_project_repository
-
- expect(project.repository).to eq(previous_repository)
- end
- end
end
end
@@ -3151,7 +3096,7 @@ RSpec.describe Project, factory_default: :keep, feature_category: :groups_and_pr
end
it 'passes through default branch' do
- expect(project.repository).to receive(:create_repository).with('pineapple')
+ expect(project.repository).to receive(:create_repository).with('pineapple', object_format: nil)
expect(project.create_repository(default_branch: 'pineapple')).to eq(true)
end
@@ -3165,6 +3110,13 @@ RSpec.describe Project, factory_default: :keep, feature_category: :groups_and_pr
project.create_repository
end
end
+
+ context 'using a SHA256 repository' do
+ it 'creates the repository' do
+ expect(project.repository).to receive(:create_repository).with(nil, object_format: Repository::FORMAT_SHA256)
+ expect(project.create_repository(object_format: Repository::FORMAT_SHA256)).to eq(true)
+ end
+ end
end
describe '#ensure_repository' do
@@ -6949,12 +6901,7 @@ RSpec.describe Project, factory_default: :keep, feature_category: :groups_and_pr
let(:repository_storage) { shard_to.name }
before do
- stub_storage_settings(
- 'test_second_storage' => {
- 'gitaly_address' => Gitlab.config.repositories.storages.default.gitaly_address,
- 'path' => TestEnv::SECOND_STORAGE_PATH
- }
- )
+ stub_storage_settings('test_second_storage' => {})
project.update!(pool_repository: project_pool, repository_storage: repository_storage)
end
@@ -6969,14 +6916,6 @@ RSpec.describe Project, factory_default: :keep, feature_category: :groups_and_pr
expect { swap_pool_repository! }.to change { project.reload.pool_repository }.from(pool1).to(pool2)
end
- context 'when feature flag replicate_object_pool_on_move is disabled' do
- before do
- stub_feature_flags(replicate_object_pool_on_move: false)
- end
-
- it_behaves_like 'no pool repository swap'
- end
-
context 'when repository does not exist' do
let(:project) { build(:project) }
@@ -7071,18 +7010,6 @@ RSpec.describe Project, factory_default: :keep, feature_category: :groups_and_pr
subject
end
-
- context 'when feature flag replicate_object_pool_on_move is disabled' do
- before do
- stub_feature_flags(replicate_object_pool_on_move: false)
- end
-
- it 'links pool repository to project repository' do
- expect(pool).to receive(:link_repository).with(project.repository)
-
- subject
- end
- end
end
end
@@ -7963,14 +7890,6 @@ RSpec.describe Project, factory_default: :keep, feature_category: :groups_and_pr
end
end
end
-
- describe '#activity_path' do
- it 'returns the project activity_path' do
- expected_path = "/#{project.full_path}/activity"
-
- expect(project.activity_path).to eq(expected_path)
- end
- end
end
describe '#default_branch_or_main' do
@@ -9000,62 +8919,53 @@ RSpec.describe Project, factory_default: :keep, feature_category: :groups_and_pr
end
end
- # TODO: Remove/update this spec after background syncing is implemented. See https://gitlab.com/gitlab-org/gitlab/-/issues/429376.
- describe '#update_catalog_resource' do
- let_it_be_with_reload(:project) { create(:project, name: 'My project name', description: 'My description') }
- let_it_be_with_reload(:resource) { create(:ci_catalog_resource, project: project) }
+ describe 'catalog resource process sync events worker' do
+ let_it_be_with_reload(:project) { create(:project, name: 'Test project', description: 'Test description') }
- shared_examples 'name, description, and visibility_level of the catalog resource match the project' do
- it do
- expect(project).to receive(:update_catalog_resource).once.and_call_original
-
- project.save!
+ context 'when the project has a catalog resource' do
+ let_it_be(:resource) { create(:ci_catalog_resource, project: project) }
- expect(resource.name).to eq(project.name)
- expect(resource.description).to eq(project.description)
- expect(resource.visibility_level).to eq(project.visibility_level)
- end
- end
+ context 'when project name is updated' do
+ it 'enqueues Ci::Catalog::Resources::ProcessSyncEventsWorker' do
+ expect(Ci::Catalog::Resources::ProcessSyncEventsWorker).to receive(:perform_async).once
- context 'when the project name is updated' do
- before do
- project.name = 'My new project name'
+ project.update!(name: 'New name')
+ end
end
- it_behaves_like 'name, description, and visibility_level of the catalog resource match the project'
- end
+ context 'when project description is updated' do
+ it 'enqueues Ci::Catalog::Resources::ProcessSyncEventsWorker' do
+ expect(Ci::Catalog::Resources::ProcessSyncEventsWorker).to receive(:perform_async).once
- context 'when the project description is updated' do
- before do
- project.description = 'My new description'
+ project.update!(description: 'New description')
+ end
end
- it_behaves_like 'name, description, and visibility_level of the catalog resource match the project'
- end
+ context 'when project visibility_level is updated' do
+ it 'enqueues Ci::Catalog::Resources::ProcessSyncEventsWorker' do
+ expect(Ci::Catalog::Resources::ProcessSyncEventsWorker).to receive(:perform_async).once
- context 'when the project visibility_level is updated' do
- before do
- project.visibility_level = 10
+ project.update!(visibility_level: Gitlab::VisibilityLevel::INTERNAL)
+ end
end
- it_behaves_like 'name, description, and visibility_level of the catalog resource match the project'
- end
-
- context 'when neither the project name, description, nor visibility_level are updated' do
- it 'does not call update_catalog_resource' do
- expect(project).not_to receive(:update_catalog_resource)
+ context 'when neither the project name, description, nor visibility_level are updated' do
+ it 'does not enqueue Ci::Catalog::Resources::ProcessSyncEventsWorker' do
+ expect(Ci::Catalog::Resources::ProcessSyncEventsWorker).not_to receive(:perform_async)
- project.update!(path: 'path')
+ project.update!(path: 'path')
+ end
end
end
context 'when the project does not have a catalog resource' do
- let_it_be(:project2) { create(:project) }
-
- it 'does not call update_catalog_resource' do
- expect(project2).not_to receive(:update_catalog_resource)
+ it 'does not enqueue Ci::Catalog::Resources::ProcessSyncEventsWorker' do
+ expect(Ci::Catalog::Resources::ProcessSyncEventsWorker).not_to receive(:perform_async)
- project.update!(name: 'name')
+ project.update!(
+ name: 'New name',
+ description: 'New description',
+ visibility_level: Gitlab::VisibilityLevel::INTERNAL)
end
end
end
diff --git a/spec/models/project_statistics_spec.rb b/spec/models/project_statistics_spec.rb
index dd7989244d4..211ac257c53 100644
--- a/spec/models/project_statistics_spec.rb
+++ b/spec/models/project_statistics_spec.rb
@@ -15,8 +15,8 @@ RSpec.describe ProjectStatistics do
describe '.for_project_ids' do
it 'returns only requested projects' do
stats = create_list(:project_statistics, 3)
- project_ids = stats[0..1].map { |s| s.project_id }
- expected_ids = stats[0..1].map { |s| s.id }
+ project_ids = stats[0..1].map(&:project_id)
+ expected_ids = stats[0..1].map(&:id)
requested_stats = described_class.for_project_ids(project_ids).pluck(:id)
diff --git a/spec/models/projects/repository_storage_move_spec.rb b/spec/models/projects/repository_storage_move_spec.rb
index c5fbc92176f..afa9a8c4319 100644
--- a/spec/models/projects/repository_storage_move_spec.rb
+++ b/spec/models/projects/repository_storage_move_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Projects::RepositoryStorageMove, type: :model do
+RSpec.describe Projects::RepositoryStorageMove, type: :model, feature_category: :source_code_management do
it_behaves_like 'handles repository moves' do
let_it_be_with_refind(:container) { create(:project) }
diff --git a/spec/models/projects/topic_spec.rb b/spec/models/projects/topic_spec.rb
index b3a55ccd370..ebe53f3761d 100644
--- a/spec/models/projects/topic_spec.rb
+++ b/spec/models/projects/topic_spec.rb
@@ -96,31 +96,8 @@ RSpec.describe Projects::Topic do
end
end
- describe '#avatar_type' do
- it "is true if avatar is image" do
- topic.update_attribute(:avatar, 'uploads/avatar.png')
- expect(topic.avatar_type).to be_truthy
- end
-
- it "is false if avatar is html page" do
- topic.update_attribute(:avatar, 'uploads/avatar.html')
- topic.avatar_type
-
- expect(topic.errors.added?(:avatar, "file format is not supported. Please try one of the following supported formats: png, jpg, jpeg, gif, bmp, tiff, ico, webp")).to be true
- end
- end
-
- describe '#avatar_url' do
- context 'when avatar file is uploaded' do
- before do
- topic.update!(avatar: fixture_file_upload("spec/fixtures/dk.png"))
- end
-
- it 'shows correct avatar url' do
- expect(topic.avatar_url).to eq(topic.avatar.url)
- expect(topic.avatar_url(only_path: false)).to eq([Gitlab.config.gitlab.url, topic.avatar.url].join)
- end
- end
+ it_behaves_like Avatarable do
+ let(:model) { create(:topic, :with_avatar) }
end
describe '#title_or_name' do
diff --git a/spec/models/release_spec.rb b/spec/models/release_spec.rb
index 164cef95cb6..bff9f73e44a 100644
--- a/spec/models/release_spec.rb
+++ b/spec/models/release_spec.rb
@@ -39,6 +39,24 @@ RSpec.describe Release, feature_category: :release_orchestration do
end
end
+ describe 'scopes' do
+ let_it_be(:another_project) { create(:project) }
+ let_it_be(:release) { create(:release, project: project, author: user, tag: 'v1') }
+ let_it_be(:another_release) { create(:release, project: another_project, tag: 'v2') }
+
+ describe '.for_projects' do
+ it 'returns releases for the given projects' do
+ expect(described_class.for_projects([project])).to eq([release])
+ end
+ end
+
+ describe '.by_tag' do
+ it 'returns releases with the given tag' do
+ expect(described_class.by_tag(release.tag)).to eq([release])
+ end
+ end
+ end
+
context 'when description of a release is longer than the limit' do
let(:description) { 'a' * (Gitlab::Database::MAX_TEXT_SIZE_LIMIT + 1) }
let(:release) { build(:release, project: project, description: description) }
@@ -86,6 +104,56 @@ RSpec.describe Release, feature_category: :release_orchestration do
end
end
+ describe '#update_catalog_resource' do
+ let_it_be(:project) { create(:project) }
+ let_it_be_with_refind(:release) { create(:release, project: project, author: user) }
+
+ context 'when the project is a catalog resource' do
+ before_all do
+ create(:ci_catalog_resource, project: project)
+ end
+
+ context 'when released_at is updated' do
+ it 'calls update_catalog_resource' do
+ expect(release).to receive(:update_catalog_resource).once
+
+ release.update!(released_at: release.released_at + 1.day)
+ end
+ end
+
+ context 'when the release is destroyed' do
+ it 'calls update_catalog_resource' do
+ expect(release).to receive(:update_catalog_resource).once
+
+ release.destroy!
+ end
+ end
+ end
+
+ context 'when the project is not a catalog resource' do
+ it 'does not call update_catalog_resource' do
+ expect(release).not_to receive(:update_catalog_resource)
+
+ release.update!(released_at: release.released_at + 1.day)
+ release.destroy!
+ end
+ end
+ end
+
+ describe 'tagged' do
+ # We only test for empty string since there's a not null constraint at the database level
+ it 'does not return the tagless release' do
+ empty_string_tag = create(:release, tag: 'v99.0.0')
+ empty_string_tag.update_column(:tag, '')
+
+ expect(described_class.tagged).not_to include(empty_string_tag)
+ end
+
+ it 'does return the tagged releases' do
+ expect(described_class.tagged).to include(release)
+ end
+ end
+
describe 'latest releases' do
let_it_be(:yesterday) { Time.zone.now - 1.day }
let_it_be(:tomorrow) { Time.zone.now + 1.day }
diff --git a/spec/models/repository_spec.rb b/spec/models/repository_spec.rb
index 606c4ea05b9..eeb0bbb8e7d 100644
--- a/spec/models/repository_spec.rb
+++ b/spec/models/repository_spec.rb
@@ -1585,6 +1585,29 @@ RSpec.describe Repository, feature_category: :source_code_management do
end
end
+ describe "#jenkinsfile?" do
+ let_it_be(:project) { create(:project, :repository) }
+
+ it 'returns valid file' do
+ files = [TestBlob.new('file'), TestBlob.new('Jenkinsfile'), TestBlob.new('copying')]
+ expect(repository.tree).to receive(:blobs).and_return(files)
+
+ expect(repository.jenkinsfile?).to be(true)
+ end
+
+ it 'is case-insensitive' do
+ files = [TestBlob.new('file'), TestBlob.new('JENKINSFILE'), TestBlob.new('copying')]
+ expect(repository.tree).to receive(:blobs).and_return(files)
+
+ expect(repository.jenkinsfile?).to be(true)
+ end
+
+ it 'returns false if does not exists' do
+ expect(repository.tree).to receive(:blobs).and_return([])
+ expect(repository.jenkinsfile?).to be(false)
+ end
+ end
+
describe '#ambiguous_ref?' do
subject { repository.ambiguous_ref?(ref) }
@@ -2201,46 +2224,82 @@ RSpec.describe Repository, feature_category: :source_code_management do
describe 'rolling back the `rebase_commit_sha`' do
let(:new_sha) { Digest::SHA1.hexdigest('foo') }
- it 'does not rollback when there are no errors' do
- second_response = double(pre_receive_error: nil, git_error: nil)
- mock_gitaly(second_response)
+ context 'when there are no errors' do
+ before do
+ responses = [
+ double(rebase_sha: new_sha).as_null_object,
+ double
+ ]
+
+ expect_any_instance_of(
+ Gitaly::OperationService::Stub
+ ).to receive(:user_rebase_confirmable).and_return(responses.each)
+ end
- repository.rebase(user, merge_request)
+ it 'does not rollback when there are no errors' do
+ repository.rebase(user, merge_request)
- expect(merge_request.reload.rebase_commit_sha).to eq(new_sha)
+ expect(merge_request.reload.rebase_commit_sha).to eq(new_sha)
+ end
end
- it 'does rollback when a PreReceiveError is encountered in the second step' do
- second_response = double(pre_receive_error: 'my_error', git_error: nil)
- mock_gitaly(second_response)
+ context 'when there was an error' do
+ let(:first_response) do
+ double(rebase_sha: new_sha).as_null_object
+ end
- expect do
- repository.rebase(user, merge_request)
- end.to raise_error(Gitlab::Git::PreReceiveError)
+ before do
+ request_enum = double(push: nil).as_null_object
+ allow(Gitlab::GitalyClient::QueueEnumerator).to receive(:new).and_return(request_enum)
- expect(merge_request.reload.rebase_commit_sha).to be_nil
- end
+ expect_any_instance_of(
+ Gitaly::OperationService::Stub
+ ).to receive(:user_rebase_confirmable).and_return(first_response)
+
+ # Faking second request failure
+ allow(request_enum).to receive(:push)
+ .with(Gitaly::UserRebaseConfirmableRequest.new(apply: true)) { raise(error) }
+ end
- it 'does rollback when a GitError is encountered in the second step' do
- second_response = double(pre_receive_error: nil, git_error: 'git error')
- mock_gitaly(second_response)
+ context 'when a PreReceiveError is encountered in the second step' do
+ let(:error) do
+ new_detailed_error(
+ GRPC::Core::StatusCodes::INTERNAL,
+ 'something failed',
+ Gitaly::UserRebaseConfirmableError.new(
+ access_check: Gitaly::AccessCheckError.new(
+ error_message: 'something went wrong'
+ )))
+ end
- expect do
- repository.rebase(user, merge_request)
- end.to raise_error(Gitlab::Git::Repository::GitError)
+ it 'does rollback' do
+ expect do
+ repository.rebase(user, merge_request)
+ end.to raise_error(Gitlab::Git::PreReceiveError)
- expect(merge_request.reload.rebase_commit_sha).to be_nil
- end
+ expect(merge_request.reload.rebase_commit_sha).to be_nil
+ end
+ end
- def mock_gitaly(second_response)
- responses = [
- double(rebase_sha: new_sha).as_null_object,
- second_response
- ]
+ context 'when a when a GitError is encountered in the second step' do
+ let(:error) do
+ new_detailed_error(
+ GRPC::Core::StatusCodes::INTERNAL,
+ 'something failed',
+ Gitaly::UserSquashError.new(
+ rebase_conflict: Gitaly::MergeConflictError.new(
+ conflicting_files: ['conflicting-file']
+ )))
+ end
- expect_any_instance_of(
- Gitaly::OperationService::Stub
- ).to receive(:user_rebase_confirmable).and_return(responses.each)
+ it 'does rollback' do
+ expect do
+ repository.rebase(user, merge_request)
+ end.to raise_error(Gitlab::Git::Repository::GitError)
+
+ expect(merge_request.reload.rebase_commit_sha).to be_nil
+ end
+ end
end
end
end
@@ -3626,12 +3685,8 @@ RSpec.describe Repository, feature_category: :source_code_management do
describe '.pick_storage_shard', :request_store do
before do
- storages = {
- 'default' => Gitlab::GitalyClient::StorageSettings.new('path' => 'tmp/tests/repositories'),
- 'picked' => Gitlab::GitalyClient::StorageSettings.new('path' => 'tmp/tests/repositories')
- }
+ stub_storage_settings('picked' => {})
- allow(Gitlab.config.repositories).to receive(:storages).and_return(storages)
stub_env('IN_MEMORY_APPLICATION_SETTINGS', 'false')
Gitlab::CurrentSettings.current_application_settings
@@ -3955,6 +4010,28 @@ RSpec.describe Repository, feature_category: :source_code_management do
end
end
+ describe '#object_format' do
+ subject { repository.object_format }
+
+ context 'for SHA1 repository' do
+ it { is_expected.to eq('sha1') }
+ end
+
+ context 'for SHA256 repository' do
+ let(:project) { create(:project, :empty_repo, object_format: Repository::FORMAT_SHA256) }
+
+ it { is_expected.to eq('sha256') }
+ end
+
+ context 'for missing repository' do
+ before do
+ allow(repository).to receive(:exists?).and_return(false)
+ end
+
+ it { is_expected.to be_nil }
+ end
+ end
+
describe '#get_file_attributes' do
let(:project) do
create(:project, :custom_repo, files: {
diff --git a/spec/models/route_spec.rb b/spec/models/route_spec.rb
index aa5fc231e14..7cada013636 100644
--- a/spec/models/route_spec.rb
+++ b/spec/models/route_spec.rb
@@ -311,8 +311,8 @@ RSpec.describe Route do
context 'with loose foreign key on routes.namespace_id' do
it_behaves_like 'cleanup by a loose foreign key' do
- let!(:parent) { create(:namespace) }
- let!(:model) { parent.route }
+ let_it_be(:parent) { create(:namespace) }
+ let_it_be(:model) { parent.route }
end
end
end
diff --git a/spec/models/service_desk/custom_email_credential_spec.rb b/spec/models/service_desk/custom_email_credential_spec.rb
index dbf47a8f6a7..6ba5906e264 100644
--- a/spec/models/service_desk/custom_email_credential_spec.rb
+++ b/spec/models/service_desk/custom_email_credential_spec.rb
@@ -21,7 +21,7 @@ RSpec.describe ServiceDesk::CustomEmailCredential, feature_category: :service_de
it { is_expected.not_to allow_value('/example').for(:smtp_address) }
it { is_expected.not_to allow_value('localhost').for(:smtp_address) }
it { is_expected.not_to allow_value('127.0.0.1').for(:smtp_address) }
- it { is_expected.not_to allow_value('192.168.12.12').for(:smtp_address) } # disallow local network
+ it { is_expected.to allow_value('192.168.12.12').for(:smtp_address) } # allow local network on self-managed
it { is_expected.to validate_presence_of(:smtp_port) }
it { is_expected.to validate_numericality_of(:smtp_port).only_integer.is_greater_than(0) }
@@ -31,6 +31,10 @@ RSpec.describe ServiceDesk::CustomEmailCredential, feature_category: :service_de
it { is_expected.to validate_presence_of(:smtp_password) }
it { is_expected.to validate_length_of(:smtp_password).is_at_least(8).is_at_most(128) }
+
+ context 'when SaaS', :saas do
+ it { is_expected.not_to allow_value('192.168.12.12').for(:smtp_address) } # Disallow local network on .com
+ end
end
describe 'encrypted #smtp_username' do
diff --git a/spec/models/snippets/repository_storage_move_spec.rb b/spec/models/snippets/repository_storage_move_spec.rb
index ed518faf6ff..b01ad823d68 100644
--- a/spec/models/snippets/repository_storage_move_spec.rb
+++ b/spec/models/snippets/repository_storage_move_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Snippets::RepositoryStorageMove, type: :model do
+RSpec.describe Snippets::RepositoryStorageMove, type: :model, feature_category: :source_code_management do
it_behaves_like 'handles repository moves' do
let_it_be_with_refind(:container) { create(:snippet) }
diff --git a/spec/models/tree_spec.rb b/spec/models/tree_spec.rb
index 20d786f311f..302cd9e9f10 100644
--- a/spec/models/tree_spec.rb
+++ b/spec/models/tree_spec.rb
@@ -2,69 +2,58 @@
require 'spec_helper'
-RSpec.describe Tree do
- let_it_be(:repository) { create(:project, :repository).repository }
-
- let(:sha) { repository.root_ref }
-
+RSpec.describe Tree, feature_category: :source_code_management do
subject(:tree) { described_class.new(repository, '54fcc214') }
- describe '#readme' do
- before do
- stub_const('FakeBlob', Class.new)
- FakeBlob.class_eval do
- attr_reader :name
+ let_it_be(:repository) { create(:project, :repository).repository }
- def initialize(name)
- @name = name
- end
+ describe '#readme' do
+ subject { tree.readme }
- def readme?
- name =~ /^readme/i
- end
- end
+ before do
+ allow(tree).to receive(:blobs).and_return(files)
end
- it 'returns nil when repository does not contains a README file' do
- files = [FakeBlob.new('file'), FakeBlob.new('license'), FakeBlob.new('copying')]
- expect(subject).to receive(:blobs).and_return(files)
+ context 'when repository does not contains a README file' do
+ let(:files) { [fake_blob('file'), fake_blob('license'), fake_blob('copying')] }
- expect(subject.readme).to eq nil
+ it { is_expected.to be_nil }
end
- it 'returns nil when repository does not contains a previewable README file' do
- files = [FakeBlob.new('file'), FakeBlob.new('README.pages'), FakeBlob.new('README.png')]
- expect(subject).to receive(:blobs).and_return(files)
+ context 'when repository does not contains a previewable README file' do
+ let(:files) { [fake_blob('file'), fake_blob('README.pages'), fake_blob('README.png')] }
- expect(subject.readme).to eq nil
+ it { is_expected.to be_nil }
end
- it 'returns README when repository contains a previewable README file' do
- files = [FakeBlob.new('README.png'), FakeBlob.new('README'), FakeBlob.new('file')]
- expect(subject).to receive(:blobs).and_return(files)
+ context 'when repository contains a previewable README file' do
+ let(:files) { [fake_blob('README.png'), fake_blob('README'), fake_blob('file')] }
- expect(subject.readme.name).to eq 'README'
+ it { is_expected.to have_attributes(name: 'README') }
end
- it 'returns first previewable README when repository contains more than one' do
- files = [FakeBlob.new('file'), FakeBlob.new('README.md'), FakeBlob.new('README.asciidoc')]
- expect(subject).to receive(:blobs).and_return(files)
+ context 'when repository contains more than one README file' do
+ let(:files) { [fake_blob('file'), fake_blob('README.md'), fake_blob('README.asciidoc')] }
- expect(subject.readme.name).to eq 'README.md'
- end
+ it 'returns first previewable README' do
+ is_expected.to have_attributes(name: 'README.md')
+ end
- it 'returns first plain text README when repository contains more than one' do
- files = [FakeBlob.new('file'), FakeBlob.new('README'), FakeBlob.new('README.txt')]
- expect(subject).to receive(:blobs).and_return(files)
+ context 'when only plain-text READMEs' do
+ let(:files) { [fake_blob('file'), fake_blob('README'), fake_blob('README.txt')] }
- expect(subject.readme.name).to eq 'README'
+ it 'returns first plain text README' do
+ is_expected.to have_attributes(name: 'README')
+ end
+ end
end
- it 'prioritizes previewable README file over one in plain text' do
- files = [FakeBlob.new('file'), FakeBlob.new('README'), FakeBlob.new('README.md')]
- expect(subject).to receive(:blobs).and_return(files)
+ context 'when the repository has a previewable and plain text READMEs' do
+ let(:files) { [fake_blob('file'), fake_blob('README'), fake_blob('README.md')] }
- expect(subject.readme.name).to eq 'README.md'
+ it 'prefers previewable README file' do
+ is_expected.to have_attributes(name: 'README.md')
+ end
end
end
@@ -73,4 +62,10 @@ RSpec.describe Tree do
it { is_expected.to be_an_instance_of(Gitaly::PaginationCursor) }
end
+
+ private
+
+ def fake_blob(name)
+ instance_double(Gitlab::Git::Blob, name: name)
+ end
end
diff --git a/spec/models/user_custom_attribute_spec.rb b/spec/models/user_custom_attribute_spec.rb
index 4c27e8d8944..6eaa1452651 100644
--- a/spec/models/user_custom_attribute_spec.rb
+++ b/spec/models/user_custom_attribute_spec.rb
@@ -122,4 +122,80 @@ RSpec.describe UserCustomAttribute, feature_category: :user_profile do
expect(user.custom_attributes.find_by(key: 'arkose_custom_score').value).to eq(custom_score)
end
end
+
+ describe '.set_deleted_own_account_at' do
+ let_it_be(:user) { create(:user) }
+
+ subject(:method_call) { described_class.set_deleted_own_account_at(user) }
+
+ it 'creates a custom attribute with "deleted_own_account_at" key associated to the user' do
+ freeze_time do
+ expect { method_call }.to change { user.custom_attributes.count }.by(1)
+
+ record = user.custom_attributes.find_by_key(UserCustomAttribute::DELETED_OWN_ACCOUNT_AT)
+ expect(record.value).to eq Time.zone.now.to_s
+ end
+ end
+
+ context 'when passed in user is nil' do
+ let(:user) { nil }
+
+ it 'does nothing' do
+ expect { method_call }.not_to change { UserCustomAttribute.count }
+ end
+ end
+ end
+
+ describe '.set_skipped_account_deletion_at' do
+ let_it_be(:user) { create(:user) }
+
+ subject(:method_call) { described_class.set_skipped_account_deletion_at(user) }
+
+ it 'creates a custom attribute with "skipped_account_deletion_at" key associated to the user' do
+ freeze_time do
+ expect { method_call }.to change { user.custom_attributes.count }.by(1)
+
+ record = user.custom_attributes.find_by_key(UserCustomAttribute::SKIPPED_ACCOUNT_DELETION_AT)
+ expect(record.value).to eq Time.zone.now.to_s
+ end
+ end
+
+ context 'when passed in user is nil' do
+ let(:user) { nil }
+
+ it 'does nothing' do
+ expect { method_call }.not_to change { UserCustomAttribute.count }
+ end
+ end
+ end
+
+ describe '.set_assumed_high_risk_reason' do
+ let_it_be(:user) { create(:user) }
+ let(:reason) { 'Because' }
+
+ subject(:call_method) { described_class.set_assumed_high_risk_reason(user: user, reason: reason) }
+
+ it 'creates a custom attribute with correct attribute values for the user' do
+ expect { call_method }.to change { user.custom_attributes.count }.by(1)
+
+ record = user.custom_attributes.find_by_key(UserCustomAttribute::ASSUMED_HIGH_RISK_REASON)
+ expect(record.value).to eq 'Because'
+ end
+
+ context 'when passed in user is nil' do
+ let(:user) { nil }
+
+ it 'does nothing' do
+ expect { call_method }.not_to change { UserCustomAttribute.count }
+ end
+ end
+
+ context 'when there is no reason passed in' do
+ let(:reason) { nil }
+
+ it 'does nothing' do
+ expect { call_method }.not_to change { UserCustomAttribute.count }
+ end
+ end
+ end
end
diff --git a/spec/models/user_highest_role_spec.rb b/spec/models/user_highest_role_spec.rb
index 7ef04466b6f..d8cf09e7fd4 100644
--- a/spec/models/user_highest_role_spec.rb
+++ b/spec/models/user_highest_role_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe UserHighestRole do
+RSpec.describe UserHighestRole, feature_category: :sm_provisioning do
describe 'associations' do
it { is_expected.to belong_to(:user).required }
end
@@ -26,4 +26,22 @@ RSpec.describe UserHighestRole do
end
end
end
+
+ describe '.allowed_values' do
+ let(:expected_allowed_values) do
+ [
+ Gitlab::Access::GUEST,
+ Gitlab::Access::REPORTER,
+ Gitlab::Access::DEVELOPER,
+ Gitlab::Access::MAINTAINER,
+ Gitlab::Access::OWNER
+ ]
+ end
+
+ it 'returns all access values' do
+ expected_allowed_values << Gitlab::Access::MINIMAL_ACCESS if Gitlab.ee?
+
+ expect(::UserHighestRole.allowed_values).to eq(expected_allowed_values)
+ end
+ end
end
diff --git a/spec/models/user_interacted_project_spec.rb b/spec/models/user_interacted_project_spec.rb
deleted file mode 100644
index aa038b06d8d..00000000000
--- a/spec/models/user_interacted_project_spec.rb
+++ /dev/null
@@ -1,52 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe UserInteractedProject do
- let_it_be(:project) { create(:project) }
- let_it_be(:author) { project.creator }
-
- describe '.track' do
- subject { described_class.track(event) }
-
- let(:event) { build(:event, project: project, author: author) }
-
- Event.actions.each_key do |action|
- context "for all actions (event types)" do
- let(:event) { build(:event, project: project, author: author, action: action) }
-
- it 'creates a record' do
- expect { subject }.to change { described_class.count }.from(0).to(1)
- end
- end
- end
-
- it 'sets project accordingly' do
- subject
- expect(described_class.first.project).to eq(event.project)
- end
-
- it 'sets user accordingly' do
- subject
- expect(described_class.first.user).to eq(event.author)
- end
-
- it 'only creates a record once per user/project' do
- expect do
- subject
- described_class.track(event)
- end.to change { described_class.count }.from(0).to(1)
- end
-
- describe 'with an event without a project' do
- let(:event) { build(:event, project: nil) }
-
- it 'ignores the event' do
- expect { subject }.not_to change { described_class.count }
- end
- end
- end
-
- it { is_expected.to validate_presence_of(:project_id) }
- it { is_expected.to validate_presence_of(:user_id) }
-end
diff --git a/spec/models/user_preference_spec.rb b/spec/models/user_preference_spec.rb
index 343576de4d3..ee3fbb97e47 100644
--- a/spec/models/user_preference_spec.rb
+++ b/spec/models/user_preference_spec.rb
@@ -64,6 +64,10 @@ RSpec.describe UserPreference, feature_category: :user_profile do
end
end
+ describe 'associations' do
+ it { is_expected.to belong_to(:home_organization).class_name('Organizations::Organization').optional }
+ end
+
describe 'notes filters global keys' do
it 'contains expected values' do
expect(UserPreference::NOTES_FILTERS.keys).to match_array([:all_notes, :only_comments, :only_activity])
@@ -291,4 +295,30 @@ RSpec.describe UserPreference, feature_category: :user_profile do
expect(pref.read_attribute(:render_whitespace_in_code)).to eq(true)
end
end
+
+ describe '#user_belongs_to_home_organization' do
+ let_it_be(:organization) { create(:organization) }
+
+ context 'when user is an organization user' do
+ before do
+ create(:organization_user, organization: organization, user: user)
+ end
+
+ it 'does not add any validation errors' do
+ user_preference.home_organization = organization
+
+ expect(user_preference).to be_valid
+ expect(user_preference.errors).to be_empty
+ end
+ end
+
+ context 'when user is not an organization user' do
+ it 'adds a validation error' do
+ user_preference.home_organization = organization
+
+ expect(user_preference).to be_invalid
+ expect(user_preference.errors.messages[:user].first).to eq(_("is not part of the given organization"))
+ end
+ end
+ end
end
diff --git a/spec/models/user_spec.rb b/spec/models/user_spec.rb
index fe229ce836f..cc0ea69401e 100644
--- a/spec/models/user_spec.rb
+++ b/spec/models/user_spec.rb
@@ -95,6 +95,10 @@ RSpec.describe User, feature_category: :user_profile do
it { is_expected.to delegate_method(:achievements_enabled).to(:user_preference) }
it { is_expected.to delegate_method(:achievements_enabled=).to(:user_preference).with_arguments(:args) }
+ it { is_expected.to delegate_method(:home_organization).to(:user_preference) }
+ it { is_expected.to delegate_method(:home_organization_id).to(:user_preference) }
+ it { is_expected.to delegate_method(:home_organization_id=).to(:user_preference).with_arguments(:args) }
+
it { is_expected.to delegate_method(:job_title).to(:user_detail).allow_nil }
it { is_expected.to delegate_method(:job_title=).to(:user_detail).with_arguments(:args).allow_nil }
@@ -175,7 +179,6 @@ RSpec.describe User, feature_category: :user_profile do
it { is_expected.to have_many(:uploads) }
it { is_expected.to have_many(:abuse_reports).dependent(:nullify).inverse_of(:user) }
it { is_expected.to have_many(:reported_abuse_reports).dependent(:nullify).class_name('AbuseReport').inverse_of(:reporter) }
- it { is_expected.to have_many(:assigned_abuse_reports).class_name('AbuseReport').inverse_of(:assignee) }
it { is_expected.to have_many(:resolved_abuse_reports).class_name('AbuseReport').inverse_of(:resolved_by) }
it { is_expected.to have_many(:abuse_events).class_name('Abuse::Event').inverse_of(:user) }
it { is_expected.to have_many(:custom_attributes).class_name('UserCustomAttribute') }
@@ -199,6 +202,13 @@ RSpec.describe User, feature_category: :user_profile do
it { is_expected.to have_many(:abuse_trust_scores).class_name('Abuse::TrustScore') }
it { is_expected.to have_many(:issue_assignment_events).class_name('ResourceEvents::IssueAssignmentEvent') }
it { is_expected.to have_many(:merge_request_assignment_events).class_name('ResourceEvents::MergeRequestAssignmentEvent') }
+ it { is_expected.to have_many(:admin_abuse_report_assignees).class_name('Admin::AbuseReportAssignee') }
+
+ it do
+ is_expected.to have_many(:assigned_abuse_reports).class_name('AbuseReport')
+ .through(:admin_abuse_report_assignees)
+ .source(:abuse_report)
+ end
it do
is_expected.to have_many(:organization_users).class_name('Organizations::OrganizationUser').inverse_of(:user)
@@ -461,7 +471,7 @@ RSpec.describe User, feature_category: :user_profile do
describe 'validations' do
describe 'password' do
- let!(:user) { build_stubbed(:user) }
+ let!(:user) { build(:user) }
before do
allow(Devise).to receive(:password_length).and_return(8..128)
@@ -541,9 +551,7 @@ RSpec.describe User, feature_category: :user_profile do
context 'namespace_move_dir_allowed' do
context 'when the user is not a new record' do
- before do
- expect(user.new_record?).to eq(false)
- end
+ let!(:user) { create(:user) }
it 'checks when username changes' do
expect(user).to receive(:namespace_move_dir_allowed)
@@ -1437,6 +1445,25 @@ RSpec.describe User, feature_category: :user_profile do
end
end
+ describe '#user_belongs_to_organization?' do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:organization) { create(:organization) }
+
+ subject { user.user_belongs_to_organization?(organization) }
+
+ context 'when user is an organization user' do
+ before do
+ create(:organization_user, organization: organization, user: user)
+ end
+
+ it { is_expected.to eq true }
+ end
+
+ context 'when user is not an organization user' do
+ it { is_expected.to eq false }
+ end
+ end
+
context 'strip attributes' do
context 'name' do
let(:user) { described_class.new(name: ' John Smith ') }
@@ -3537,32 +3564,8 @@ RSpec.describe User, feature_category: :user_profile do
end
end
- describe '#avatar_type' do
- let(:user) { create(:user) }
-
- it 'is true if avatar is image' do
- user.update_attribute(:avatar, 'uploads/avatar.png')
-
- expect(user.avatar_type).to be_truthy
- end
-
- it 'is false if avatar is html page' do
- user.update_attribute(:avatar, 'uploads/avatar.html')
- user.avatar_type
-
- expect(user.errors.added?(:avatar, "file format is not supported. Please try one of the following supported formats: png, jpg, jpeg, gif, bmp, tiff, ico, webp")).to be true
- end
- end
-
- describe '#avatar_url' do
- let(:user) { create(:user, :with_avatar) }
-
- context 'when avatar file is uploaded' do
- it 'shows correct avatar url' do
- expect(user.avatar_url).to eq(user.avatar.url)
- expect(user.avatar_url(only_path: false)).to eq([Gitlab.config.gitlab.url, user.avatar.url].join)
- end
- end
+ it_behaves_like Avatarable do
+ let(:model) { create(:user, :with_avatar) }
end
describe '#clear_avatar_caches' do
@@ -5430,8 +5433,7 @@ RSpec.describe User, feature_category: :user_profile do
before do
shared_project.project_group_links.create!(
- group: group2,
- group_access: ProjectGroupLink.default_access
+ group: group2
)
group2.add_member(user, GroupMember::OWNER)
@@ -5682,21 +5684,35 @@ RSpec.describe User, feature_category: :user_profile do
describe '#ensure_namespace_correct' do
context 'for a new user' do
- let(:user) { build(:user) }
+ let(:user) { described_class.new attributes_for(:user) }
- it 'creates the namespace' do
+ it 'does not create the namespace' do
expect(user.namespace).to be_nil
- user.save!
+ user.valid?
- expect(user.namespace).not_to be_nil
- expect(user.namespace).to be_kind_of(Namespaces::UserNamespace)
+ expect(user.namespace).to be_nil
end
- it 'creates the namespace setting' do
- user.save!
+ context 'when create_personal_ns_outside_model feature flag is disabled' do
+ before do
+ stub_feature_flags(create_personal_ns_outside_model: false)
+ end
- expect(user.namespace.namespace_settings).to be_persisted
+ it 'creates the namespace' do
+ expect(user.namespace).to be_nil
+
+ user.save!
+
+ expect(user.namespace).to be_present
+ expect(user.namespace).to be_kind_of(Namespaces::UserNamespace)
+ end
+
+ it 'creates the namespace setting' do
+ user.save!
+
+ expect(user.namespace.namespace_settings).to be_persisted
+ end
end
end
@@ -5764,6 +5780,37 @@ RSpec.describe User, feature_category: :user_profile do
end
end
+ describe '#assign_personal_namespace' do
+ subject(:personal_namespace) { user.assign_personal_namespace }
+
+ context 'when namespace exists' do
+ let(:user) { build(:user) }
+
+ it 'leaves the namespace untouched' do
+ expect { personal_namespace }.not_to change(user, :namespace)
+ end
+
+ it 'returns the personal namespace' do
+ expect(personal_namespace).to eq(user.namespace)
+ end
+ end
+
+ context 'when namespace does not exist' do
+ let(:user) { described_class.new attributes_for(:user) }
+
+ it 'builds a new namespace' do
+ subject
+
+ expect(user.namespace).to be_kind_of(Namespaces::UserNamespace)
+ expect(user.namespace.namespace_settings).to be_present
+ end
+
+ it 'returns the personal namespace' do
+ expect(personal_namespace).to eq(user.namespace)
+ end
+ end
+ end
+
describe '#username_changed_hook' do
context 'for a new user' do
let(:user) { build(:user) }
@@ -6130,6 +6177,15 @@ RSpec.describe User, feature_category: :user_profile do
end
end
+ it 'adds a custom attribute that indicates the user deleted their own account' do
+ freeze_time do
+ expect { user.delete_async(deleted_by: deleted_by) }.to change { user.custom_attributes.count }.by(1)
+
+ expect(user.custom_attributes.last.key).to eq UserCustomAttribute::DELETED_OWN_ACCOUNT_AT
+ expect(user.custom_attributes.last.value).to eq Time.zone.now.to_s
+ end
+ end
+
context 'when delay_delete_own_user feature flag is disabled' do
before do
stub_feature_flags(delay_delete_own_user: false)
@@ -6142,6 +6198,10 @@ RSpec.describe User, feature_category: :user_profile do
it 'does not update the note' do
expect { user.delete_async(deleted_by: deleted_by) }.not_to change { user.note }
end
+
+ it 'does not add any new custom attrribute' do
+ expect { user.delete_async(deleted_by: deleted_by) }.not_to change { user.custom_attributes.count }
+ end
end
describe '#trusted?' do
@@ -6330,6 +6390,34 @@ RSpec.describe User, feature_category: :user_profile do
end
end
+ describe '#max_member_access_for_group' do
+ let(:user) { create(:user) }
+ let(:group) { create(:group) }
+
+ context 'when user has no access' do
+ it 'returns Gitlab::Access::NO_ACCESS' do
+ expect(user.max_member_access_for_group(group.id)).to eq(Gitlab::Access::NO_ACCESS)
+ end
+ end
+
+ context 'when user has access via a single permission' do
+ it 'returns Gitlab::Access::DEVELOPER' do
+ group.add_developer(user)
+
+ expect(user.max_member_access_for_group(group.id)).to eq(Gitlab::Access::DEVELOPER)
+ end
+ end
+
+ context 'when user has access via a multiple groups' do
+ it 'returns Gitlab::Access::MAINTAINER' do
+ group.add_developer(user)
+ group.add_maintainer(user)
+
+ expect(user.max_member_access_for_group(group.id)).to eq(Gitlab::Access::MAINTAINER)
+ end
+ end
+ end
+
context 'changing a username' do
let(:user) { create(:user, username: 'foo') }
@@ -6381,6 +6469,49 @@ RSpec.describe User, feature_category: :user_profile do
end
end
end
+
+ context 'with multiple versions of terms' do
+ shared_examples 'terms acceptance' do
+ let(:another_term) { create :term }
+ let(:required_terms_are_accepted) { !required_terms_not_accepted }
+
+ context 'when the latest term is not accepted' do
+ before do
+ accept_terms(user)
+ another_term
+ end
+
+ it { expect(required_terms_are_accepted).to be result_for_latest_not_accepted }
+ end
+
+ context 'when the latest term is accepted' do
+ before do
+ another_term
+ accept_terms(user)
+ end
+
+ it { expect(required_terms_are_accepted).to be result_for_latest_accepted }
+ end
+ end
+
+ context 'when enforce_acceptance_of_changed_terms is enabled' do
+ let(:result_for_latest_not_accepted) { false }
+ let(:result_for_latest_accepted) { true }
+
+ include_examples 'terms acceptance'
+ end
+
+ context 'when enforce_acceptance_of_changed_terms is disabled' do
+ let(:result_for_latest_not_accepted) { true }
+ let(:result_for_latest_accepted) { true }
+
+ before do
+ stub_feature_flags(enforce_acceptance_of_changed_terms: false)
+ end
+
+ include_examples 'terms acceptance'
+ end
+ end
end
end
@@ -7966,4 +8097,24 @@ RSpec.describe User, feature_category: :user_profile do
end
end
end
+
+ describe '#deleted_own_account?' do
+ let_it_be(:user) { create(:user) }
+
+ subject(:result) { user.deleted_own_account? }
+
+ context 'when user has a DELETED_OWN_ACCOUNT_AT custom attribute' do
+ let_it_be(:custom_attr) do
+ create(:user_custom_attribute, user: user, key: UserCustomAttribute::DELETED_OWN_ACCOUNT_AT, value: 'now')
+ end
+
+ it { is_expected.to eq true }
+ end
+
+ context 'when user does not have a DELETED_OWN_ACCOUNT_AT custom attribute' do
+ let_it_be(:user) { create(:user) }
+
+ it { is_expected.to eq false }
+ end
+ end
end
diff --git a/spec/models/users/in_product_marketing_email_spec.rb b/spec/models/users/in_product_marketing_email_spec.rb
index d333a51ae3b..b1642383e42 100644
--- a/spec/models/users/in_product_marketing_email_spec.rb
+++ b/spec/models/users/in_product_marketing_email_spec.rb
@@ -134,15 +134,4 @@ RSpec.describe Users::InProductMarketingEmail, type: :model, feature_category: :
end
end
end
-
- describe '.ACTIVE_TRACKS' do
- it 'has an entry for every track' do
- tracks = Namespaces::InProductMarketingEmailsService::TRACKS.keys
- expect(tracks).to match_array(described_class::ACTIVE_TRACKS.keys.map(&:to_sym))
- end
-
- it 'does not include INACTIVE_TRACK_NAMES' do
- expect(described_class::ACTIVE_TRACKS.keys).not_to include(*described_class::INACTIVE_TRACK_NAMES)
- end
- end
end
diff --git a/spec/models/users/phone_number_validation_spec.rb b/spec/models/users/phone_number_validation_spec.rb
index e41719d8ca3..15bbb507dee 100644
--- a/spec/models/users/phone_number_validation_spec.rb
+++ b/spec/models/users/phone_number_validation_spec.rb
@@ -39,16 +39,26 @@ RSpec.describe Users::PhoneNumberValidation, feature_category: :instance_resilie
end
context 'when banned user has the same international dial code and phone number' do
- before do
- create(:phone_number_validation, user: banned_user)
+ context 'and the matching record has not been verified' do
+ before do
+ create(:phone_number_validation, user: banned_user)
+ end
+
+ it { is_expected.to eq(false) }
end
- it { is_expected.to eq(true) }
+ context 'and the matching record has been verified' do
+ before do
+ create(:phone_number_validation, :validated, user: banned_user)
+ end
+
+ it { is_expected.to eq(true) }
+ end
end
context 'when banned user has the same international dial code and phone number, but different country code' do
before do
- create(:phone_number_validation, user: banned_user, country: 'CA')
+ create(:phone_number_validation, :validated, user: banned_user, country: 'CA')
end
it { is_expected.to eq(true) }
@@ -56,7 +66,7 @@ RSpec.describe Users::PhoneNumberValidation, feature_category: :instance_resilie
context 'when banned user does not have the same international dial code' do
before do
- create(:phone_number_validation, user: banned_user, international_dial_code: 61)
+ create(:phone_number_validation, :validated, user: banned_user, international_dial_code: 61)
end
it { is_expected.to eq(false) }
@@ -64,7 +74,7 @@ RSpec.describe Users::PhoneNumberValidation, feature_category: :instance_resilie
context 'when banned user does not have the same phone number' do
before do
- create(:phone_number_validation, user: banned_user, phone_number: '666')
+ create(:phone_number_validation, :validated, user: banned_user, phone_number: '666')
end
it { is_expected.to eq(false) }
@@ -72,7 +82,7 @@ RSpec.describe Users::PhoneNumberValidation, feature_category: :instance_resilie
context 'when not-banned user has the same international dial code and phone number' do
before do
- create(:phone_number_validation, user: user)
+ create(:phone_number_validation, :validated, user: user)
end
it { is_expected.to eq(false) }
diff --git a/spec/models/work_item_spec.rb b/spec/models/work_item_spec.rb
index 476d346db10..68e9e8ee50d 100644
--- a/spec/models/work_item_spec.rb
+++ b/spec/models/work_item_spec.rb
@@ -17,6 +17,13 @@ RSpec.describe WorkItem, feature_category: :portfolio_management do
.with_foreign_key('work_item_id')
end
+ it 'has one `dates_source`' do
+ is_expected.to have_one(:dates_source)
+ .class_name('WorkItems::DatesSource')
+ .with_foreign_key('issue_id')
+ .inverse_of(:work_item)
+ end
+
it 'has many `work_item_children`' do
is_expected.to have_many(:work_item_children)
.class_name('WorkItem')
@@ -79,16 +86,6 @@ RSpec.describe WorkItem, feature_category: :portfolio_management do
end
end
- describe '.in_namespaces' do
- let(:group) { create(:group) }
- let!(:group_work_item) { create(:work_item, namespace: group) }
- let!(:project_work_item) { create(:work_item, project: reusable_project) }
-
- subject { described_class.in_namespaces(group) }
-
- it { is_expected.to contain_exactly(group_work_item) }
- end
-
describe '.with_confidentiality_check' do
let(:user) { create(:user) }
let!(:authored_work_item) { create(:work_item, :confidential, project: reusable_project, author: user) }
diff --git a/spec/models/work_items/dates_source_spec.rb b/spec/models/work_items/dates_source_spec.rb
new file mode 100644
index 00000000000..d75500cab16
--- /dev/null
+++ b/spec/models/work_items/dates_source_spec.rb
@@ -0,0 +1,23 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe WorkItems::DatesSource, feature_category: :portfolio_management do
+ describe 'ssociations' do
+ it { is_expected.to belong_to(:namespace).inverse_of(:work_items_dates_source) }
+ it { is_expected.to belong_to(:work_item).with_foreign_key('issue_id').inverse_of(:dates_source) }
+ it { is_expected.to belong_to(:due_date_sourcing_work_item).class_name('WorkItem') }
+ it { is_expected.to belong_to(:start_date_sourcing_work_item).class_name('WorkItem') }
+ it { is_expected.to belong_to(:due_date_sourcing_milestone).class_name('Milestone') }
+ it { is_expected.to belong_to(:start_date_sourcing_milestone).class_name('Milestone') }
+ end
+
+ it 'ensures to use work_item namespace' do
+ work_item = create(:work_item)
+ date_source = described_class.new(work_item: work_item)
+
+ date_source.valid?
+
+ expect(date_source.namespace).to eq(work_item.namespace)
+ end
+end
diff --git a/spec/models/work_items/type_spec.rb b/spec/models/work_items/type_spec.rb
index 7f836ce4e90..135b7b97ce9 100644
--- a/spec/models/work_items/type_spec.rb
+++ b/spec/models/work_items/type_spec.rb
@@ -24,6 +24,43 @@ RSpec.describe WorkItems::Type, feature_category: :team_planning do
expect(type.enabled_widget_definitions).to match_array([widget1])
end
+
+ it 'has many `child_restrictions`' do
+ is_expected.to have_many(:child_restrictions)
+ .class_name('WorkItems::HierarchyRestriction')
+ .with_foreign_key('parent_type_id')
+ end
+
+ describe 'allowed_child_types_by_name' do
+ it 'defines association' do
+ is_expected.to have_many(:allowed_child_types_by_name)
+ .through(:child_restrictions)
+ .class_name('::WorkItems::Type')
+ .with_foreign_key(:child_type_id)
+ end
+
+ it 'sorts by name ascending' do
+ expected_type_names = %w[Atype Ztype gtype]
+ parent_type = create(:work_item_type)
+
+ expected_type_names.shuffle.each do |name|
+ create(:hierarchy_restriction, parent_type: parent_type, child_type: create(:work_item_type, name: name))
+ end
+
+ expect(parent_type.allowed_child_types_by_name.pluck(:name)).to match_array(expected_type_names)
+ end
+ end
+ end
+
+ describe 'callbacks' do
+ describe 'after_save' do
+ subject(:work_item_type) { build(:work_item_type) }
+
+ it 'calls #clear_reactive_cache!' do
+ is_expected.to receive(:clear_reactive_cache!)
+ work_item_type.save!(name: 'foo')
+ end
+ end
end
describe 'scopes' do
@@ -166,4 +203,48 @@ RSpec.describe WorkItems::Type, feature_category: :team_planning do
end
end
end
+
+ describe '#allowed_child_types' do
+ let_it_be(:work_item_type) { create(:work_item_type) }
+ let_it_be(:child_type) { create(:work_item_type) }
+ let_it_be(:restriction) { create(:hierarchy_restriction, parent_type: work_item_type, child_type: child_type) }
+
+ subject { work_item_type.allowed_child_types(cache: cached) }
+
+ context 'when cache is true' do
+ let(:cached) { true }
+
+ before do
+ allow(work_item_type).to receive(:with_reactive_cache).and_call_original
+ end
+
+ it 'returns the cached data' do
+ expect(work_item_type).to receive(:with_reactive_cache)
+ expect(Rails.cache).to receive(:exist?).with("work_items_type:#{work_item_type.id}:alive")
+ is_expected.to eq([child_type])
+ end
+ end
+
+ context 'when cache is false' do
+ let(:cached) { false }
+
+ it 'returns queried data' do
+ expect(work_item_type).not_to receive(:with_reactive_cache)
+ is_expected.to eq([child_type])
+ end
+ end
+ end
+
+ describe '#calculate_reactive_cache' do
+ let(:work_item_type) { build(:work_item_type) }
+
+ subject { work_item_type.calculate_reactive_cache }
+
+ it 'returns cache data for allowed child types' do
+ child_types = create_list(:work_item_type, 2)
+ expect(work_item_type).to receive(:allowed_child_types_by_name).and_return(child_types)
+
+ is_expected.to eq(child_types)
+ end
+ end
end
diff --git a/spec/models/work_items/widgets/assignees_spec.rb b/spec/models/work_items/widgets/assignees_spec.rb
index 19c17658ce4..b44246855ce 100644
--- a/spec/models/work_items/widgets/assignees_spec.rb
+++ b/spec/models/work_items/widgets/assignees_spec.rb
@@ -17,6 +17,32 @@ RSpec.describe WorkItems::Widgets::Assignees do
it { is_expected.to include(:assignee_ids) }
end
+ describe '.can_invite_members?' do
+ let(:user) { build_stubbed(:user) }
+
+ subject(:execute) { described_class.can_invite_members?(user, resource_parent) }
+
+ context 'when resource_parent is a project' do
+ let(:resource_parent) { build_stubbed(:project) }
+
+ it 'checks the ability with the correct permission' do
+ expect(user).to receive(:can?).with(:admin_project_member, resource_parent)
+
+ execute
+ end
+ end
+
+ context 'when resource_parent is a group' do
+ let(:resource_parent) { build_stubbed(:group) }
+
+ it 'checks the ability with the correct permission' do
+ expect(user).to receive(:can?).with(:admin_group_member, resource_parent)
+
+ execute
+ end
+ end
+ end
+
describe '#type' do
subject { described_class.new(work_item).type }
diff --git a/spec/policies/abuse_report_policy_spec.rb b/spec/policies/abuse_report_policy_spec.rb
index 01ab29d1cf1..3cc92749ab4 100644
--- a/spec/policies/abuse_report_policy_spec.rb
+++ b/spec/policies/abuse_report_policy_spec.rb
@@ -12,6 +12,7 @@ RSpec.describe AbuseReportPolicy, feature_category: :insider_threat do
it 'cannot read_abuse_report' do
expect(policy).to be_disallowed(:read_abuse_report)
+ expect(policy).to be_disallowed(:read_note)
expect(policy).to be_disallowed(:create_note)
end
end
@@ -21,6 +22,7 @@ RSpec.describe AbuseReportPolicy, feature_category: :insider_threat do
it 'can read_abuse_report' do
expect(policy).to be_allowed(:read_abuse_report)
+ expect(policy).to be_allowed(:read_note)
expect(policy).to be_allowed(:create_note)
end
end
diff --git a/spec/policies/ci/runner_manager_policy_spec.rb b/spec/policies/ci/runner_manager_policy_spec.rb
index d7004033ceb..82894e729bf 100644
--- a/spec/policies/ci/runner_manager_policy_spec.rb
+++ b/spec/policies/ci/runner_manager_policy_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Ci::RunnerManagerPolicy, feature_category: :runner_fleet do
+RSpec.describe Ci::RunnerManagerPolicy, feature_category: :fleet_visibility do
let_it_be(:owner) { create(:user) }
describe 'ability :read_runner_manager' do
@@ -13,10 +13,15 @@ RSpec.describe Ci::RunnerManagerPolicy, feature_category: :runner_fleet do
let_it_be_with_reload(:group) { create(:group, name: 'top-level', path: 'top-level') }
let_it_be_with_reload(:subgroup) { create(:group, name: 'subgroup', path: 'subgroup', parent: group) }
let_it_be_with_reload(:project) { create(:project, group: subgroup) }
+ let_it_be_with_reload(:group_without_project) { create(:group, name: 'top-level2', path: 'top-level2') }
let_it_be(:instance_runner) { create(:ci_runner, :instance, :with_runner_manager) }
let_it_be(:group_runner) { create(:ci_runner, :group, :with_runner_manager, groups: [group]) }
+ let_it_be(:subgroup_runner) { create(:ci_runner, :group, :with_runner_manager, groups: [subgroup]) }
let_it_be(:project_runner) { create(:ci_runner, :project, :with_runner_manager, projects: [project]) }
+ let_it_be(:runner_on_group_without_project) do
+ create(:ci_runner, :group, :with_runner_manager, groups: [group_without_project])
+ end
let(:runner_manager) { runner.runner_managers.first }
@@ -31,39 +36,116 @@ RSpec.describe Ci::RunnerManagerPolicy, feature_category: :runner_fleet do
shared_examples 'a policy allowing reading instance runner manager depending on runner sharing' do
context 'with instance runner' do
- let(:runner) { instance_runner }
+ using RSpec::Parameterized::TableSyntax
- it { expect_allowed :read_runner_manager }
+ where(:shared_runners_enabled_on_group, :shared_runners_enabled_on_project, :expect_can_read) do
+ false | false | false
+ false | true | true
+ true | false | true
+ true | true | true
+ end
+
+ with_them do
+ let(:runner) { instance_runner }
- context 'with shared runners disabled on projects' do
before do
- project.update!(shared_runners_enabled: false)
+ group.update!(shared_runners_enabled: shared_runners_enabled_on_group)
+ project.update!(shared_runners_enabled: shared_runners_enabled_on_project)
+ end
+
+ specify do
+ if expect_can_read
+ expect_allowed :read_runner_manager
+ else
+ expect_disallowed :read_runner_manager
+ end
end
+ end
+ end
+ end
+
+ shared_examples 'a policy allowing reading group runner manager depending on runner sharing' do |user_role|
+ let(:group_runners_enabled_on_project) { true }
+
+ before do
+ project.update!(group_runners_enabled: group_runners_enabled_on_project)
+ end
+
+ context 'with group runner' do
+ let(:runner) { group_runner }
+
+ # NOTE: The user is allowed to read the runner manager because:
+ # - the user is a developer+ in the runner's group
+ # - the user is a developer+ in `group/subgroup/project`, and the runner is shared to that project
+ it { expect_allowed :read_runner_manager }
+
+ context 'with sharing of group runners disabled' do
+ let(:group_runners_enabled_on_project) { false }
it { expect_allowed :read_runner_manager }
end
- context 'with shared runners disabled for groups and projects' do
- before do
- group.update!(shared_runners_enabled: false)
- project.update!(shared_runners_enabled: false)
+ context 'when user belongs to subgroup only' do
+ let_it_be(:subgroup_member) { create(:user) }
+
+ let(:user) { subgroup_member }
+
+ context 'with runner visible to group project' do
+ # NOTE: The user is allowed to read the runner manager because the user is a developer+
+ # in `group/subgroup/project`, and the runner is shared to that project
+ it { expect_allowed :read_runner_manager }
+
+ before_all do
+ subgroup.add_member(subgroup_member, user_role)
+ end
+
+ context 'with sharing of group runners disabled' do
+ let(:group_runners_enabled_on_project) { false }
+
+ it { expect_disallowed :read_runner_manager }
+ end
end
+ context 'without projects in group' do
+ let(:runner) { runner_on_group_without_project }
+
+ before_all do
+ subgroup.add_member(subgroup_member, user_role)
+ end
+
+ it { expect_disallowed :read_runner_manager }
+ end
+ end
+
+ context "when user is not #{user_role} in associated group" do
+ let_it_be(:user_with_role) { create(:user) }
+
+ let(:user) { user_with_role }
+
it { expect_disallowed :read_runner_manager }
+
+ context "when user is #{user_role} in a group invited to group as #{user_role}" do
+ let_it_be(:invited_group) { create(:group, name: "#{user_role}s", path: "#{user_role}s") }
+
+ before_all do
+ invited_group.add_developer(user_with_role)
+ create(:group_group_link, :developer, shared_group: group, shared_with_group: invited_group)
+ end
+
+ it { expect_allowed :read_runner_manager }
+ end
end
end
- end
- shared_examples 'a policy allowing reading group runner manager depending on runner sharing' do
- context 'with group runner' do
- let(:runner) { group_runner }
+ context 'when runner is in subgroup' do
+ let(:runner) { subgroup_runner }
+ # NOTE: The user is allowed to read the runner manager because the user is a developer+ in
+ # `group/subgroup/project`, and the runner is shared to that project
it { expect_allowed :read_runner_manager }
context 'with sharing of group runners disabled' do
- before do
- project.update!(group_runners_enabled: false)
- end
+ let(:group_runners_enabled_on_project) { false }
it { expect_disallowed :read_runner_manager }
end
@@ -124,12 +206,34 @@ RSpec.describe Ci::RunnerManagerPolicy, feature_category: :runner_fleet do
it_behaves_like 'a policy allowing reading instance runner manager depending on runner sharing'
- it_behaves_like 'a policy allowing reading group runner manager depending on runner sharing'
+ it_behaves_like 'a policy allowing reading group runner manager depending on runner sharing', :developer
context 'with project runner' do
let(:runner) { project_runner }
- it { expect_disallowed :read_runner_manager }
+ it { expect_allowed :read_runner_manager }
+
+ context 'when user is not developer in parent group' do
+ let_it_be(:developers_group_developer) { create(:user) }
+ let_it_be_with_reload(:developers_group) { create(:group, name: 'developers', path: 'developers') }
+
+ let(:user) { developers_group_developer }
+
+ before_all do
+ create(:project_group_link, :developer, group: developers_group, project: project)
+ developers_group.add_reporter(developers_group_developer)
+ end
+
+ it { expect_disallowed :read_runner_manager }
+
+ context 'when user is developer in a group invited to project as developer' do
+ before_all do
+ developers_group.add_developer(developers_group_developer)
+ end
+
+ it { expect_allowed :read_runner_manager }
+ end
+ end
end
end
@@ -138,7 +242,7 @@ RSpec.describe Ci::RunnerManagerPolicy, feature_category: :runner_fleet do
it_behaves_like 'a policy allowing reading instance runner manager depending on runner sharing'
- it_behaves_like 'a policy allowing reading group runner manager depending on runner sharing'
+ it_behaves_like 'a policy allowing reading group runner manager depending on runner sharing', :maintainer
context 'with project runner' do
let(:runner) { project_runner }
diff --git a/spec/policies/ci/runner_policy_spec.rb b/spec/policies/ci/runner_policy_spec.rb
index e0a9e3c2870..6259cf6d247 100644
--- a/spec/policies/ci/runner_policy_spec.rb
+++ b/spec/policies/ci/runner_policy_spec.rb
@@ -7,21 +7,26 @@ RSpec.describe Ci::RunnerPolicy, feature_category: :runner do
describe 'ability :read_runner' do
let_it_be(:guest) { create(:user) }
+ let_it_be(:reporter) { create(:user) }
let_it_be(:developer) { create(:user) }
let_it_be(:maintainer) { create(:user) }
let_it_be_with_reload(:group) { create(:group, name: 'top-level', path: 'top-level') }
let_it_be_with_reload(:subgroup) { create(:group, name: 'subgroup', path: 'subgroup', parent: group) }
let_it_be_with_reload(:project) { create(:project, group: subgroup) }
+ let_it_be_with_reload(:group_without_project) { create(:group, name: 'top-level2', path: 'top-level2') }
let_it_be(:instance_runner) { create(:ci_runner, :instance) }
let_it_be(:group_runner) { create(:ci_runner, :group, groups: [group]) }
+ let_it_be(:subgroup_runner) { create(:ci_runner, :group, groups: [subgroup]) }
let_it_be(:project_runner) { create(:ci_runner, :project, projects: [project]) }
+ let_it_be(:runner_on_group_without_project) { create(:ci_runner, :group, groups: [group_without_project]) }
subject(:policy) { described_class.new(user, runner) }
before_all do
group.add_guest(guest)
+ group.add_reporter(reporter)
group.add_developer(developer)
group.add_maintainer(maintainer)
group.add_owner(owner)
@@ -29,39 +34,121 @@ RSpec.describe Ci::RunnerPolicy, feature_category: :runner do
shared_examples 'a policy allowing reading instance runner depending on runner sharing' do
context 'with instance runner' do
- let(:runner) { instance_runner }
+ using RSpec::Parameterized::TableSyntax
- it { expect_allowed :read_runner }
+ where(:shared_runners_enabled_on_group, :shared_runners_enabled_on_project, :expect_can_read) do
+ false | false | false
+ false | true | true
+ true | false | true
+ true | true | true
+ end
+
+ with_them do
+ let(:runner) { instance_runner }
- context 'with shared runners disabled on projects' do
before do
- project.update!(shared_runners_enabled: false)
+ group.update!(shared_runners_enabled: shared_runners_enabled_on_group)
+ project.update!(shared_runners_enabled: shared_runners_enabled_on_project)
end
+ specify do
+ if expect_can_read
+ expect_allowed :read_runner
+ else
+ expect_disallowed :read_runner
+ end
+ end
+ end
+ end
+ end
+
+ shared_examples 'a policy allowing reading group runner depending on runner sharing' do |user_role|
+ let(:group_runners_enabled_on_project) { true }
+
+ before do
+ project.update!(group_runners_enabled: group_runners_enabled_on_project)
+ end
+
+ context 'with group runner' do
+ let(:runner) { group_runner }
+
+ # NOTE: The user is allowed to read the runner because:
+ # - the user is a developer+ in the runner's group
+ # - the user is a developer+ in `group/subgroup/project`, and the runner is shared to that project
+ it { expect_allowed :read_runner }
+
+ context 'with sharing of group runners disabled' do
+ let(:group_runners_enabled_on_project) { false }
+
it { expect_allowed :read_runner }
end
- context 'with shared runners disabled for groups and projects' do
- before do
- group.update!(shared_runners_enabled: false)
- project.update!(shared_runners_enabled: false)
+ context 'when user belongs to subgroup only' do
+ let_it_be(:subgroup_member) do
+ create(:user).tap { |subgroup_member| subgroup.add_member(subgroup_member, user_role) }
+ end
+
+ let(:user) { subgroup_member }
+
+ context 'with runner visible to group project' do
+ # NOTE: The user is allowed to read the runner because the user is a developer+ in `group/subgroup/project`,
+ # and the runner is shared to that project
+ it { expect_allowed :read_runner }
+
+ context 'with sharing of group runners disabled' do
+ let(:group_runners_enabled_on_project) { false }
+
+ it { expect_disallowed :read_runner }
+ end
end
+ context 'without projects in group' do
+ let(:runner) { runner_on_group_without_project }
+
+ it { expect_disallowed :read_runner }
+ end
+ end
+
+ context "when user is not #{user_role} in associated group" do
+ let_it_be(:user_with_role) { create(:user) }
+
+ let(:user) { user_with_role }
+
it { expect_disallowed :read_runner }
+
+ context "when user is #{user_role} in a group invited to group as #{user_role}" do
+ let_it_be(:invited_group) { create(:group, name: "#{user_role}s", path: "#{user_role}s") }
+
+ before_all do
+ invited_group.add_member(user_with_role, user_role)
+ create(:group_group_link, :developer, shared_group: group, shared_with_group: invited_group)
+ end
+
+ it { expect_allowed :read_runner }
+ end
+
+ context "when user is a reporter in a group invited to group as #{user_role}" do
+ let_it_be(:invited_group) { create(:group, name: "#{user_role}s", path: "#{user_role}s") }
+
+ before_all do
+ invited_group.add_reporter(user_with_role)
+ create(:group_group_link, user_role, shared_group: group, shared_with_group: invited_group)
+ end
+
+ it { expect_disallowed :read_runner }
+ end
end
end
- end
- shared_examples 'a policy allowing reading group runner depending on runner sharing' do
- context 'with group runner' do
- let(:runner) { group_runner }
+ context 'when runner is in subgroup' do
+ let(:runner) { subgroup_runner }
+ # NOTE: The user is allowed to read the runner because the user is a developer+ in `group/subgroup/project`,
+ # and the runner is shared to that project
it { expect_allowed :read_runner }
context 'with sharing of group runners disabled' do
- before do
- project.update!(group_runners_enabled: false)
- end
+ let(:group_runners_enabled_on_project) { false }
it { expect_disallowed :read_runner }
end
@@ -89,6 +176,17 @@ RSpec.describe Ci::RunnerPolicy, feature_category: :runner do
it { expect_disallowed :read_runner }
+ context 'with group invited as maintainer to group containing runner' do
+ let_it_be(:invited_group) { create(:group) }
+ let_it_be(:runner) { create(:ci_runner, :group, groups: [invited_group]) }
+
+ before_all do
+ create(:group_group_link, :maintainer, shared_group: group, shared_with_group: invited_group)
+ end
+
+ it { expect_disallowed :read_runner }
+ end
+
context 'with sharing of group runners disabled' do
before do
project.update!(group_runners_enabled: false)
@@ -117,17 +215,45 @@ RSpec.describe Ci::RunnerPolicy, feature_category: :runner do
it_behaves_like 'does not allow reading runners on any scope'
end
+ context 'with reporter access' do
+ let(:user) { reporter }
+
+ it_behaves_like 'does not allow reading runners on any scope'
+ end
+
context 'with developer access' do
let(:user) { developer }
it_behaves_like 'a policy allowing reading instance runner depending on runner sharing'
- it_behaves_like 'a policy allowing reading group runner depending on runner sharing'
+ it_behaves_like 'a policy allowing reading group runner depending on runner sharing', :developer
context 'with project runner' do
let(:runner) { project_runner }
- it { expect_disallowed :read_runner }
+ it { expect_allowed :read_runner }
+
+ context 'when user is not developer in parent group' do
+ let_it_be(:developers_group_developer) { create(:user) }
+ let_it_be_with_reload(:developers_group) { create(:group, name: 'developers', path: 'developers') }
+
+ let(:user) { developers_group_developer }
+
+ before_all do
+ create(:project_group_link, :developer, group: developers_group, project: project)
+ developers_group.add_reporter(developers_group_developer)
+ end
+
+ it { expect_disallowed :read_runner }
+
+ context 'when user is developer in a group invited to project as developer' do
+ before_all do
+ developers_group.add_developer(developers_group_developer)
+ end
+
+ it { expect_allowed :read_runner }
+ end
+ end
end
end
@@ -136,7 +262,7 @@ RSpec.describe Ci::RunnerPolicy, feature_category: :runner do
it_behaves_like 'a policy allowing reading instance runner depending on runner sharing'
- it_behaves_like 'a policy allowing reading group runner depending on runner sharing'
+ it_behaves_like 'a policy allowing reading group runner depending on runner sharing', :maintainer
context 'with project runner' do
let(:runner) { project_runner }
diff --git a/spec/policies/global_policy_spec.rb b/spec/policies/global_policy_spec.rb
index 52fea8d782e..1d58b941d41 100644
--- a/spec/policies/global_policy_spec.rb
+++ b/spec/policies/global_policy_spec.rb
@@ -694,6 +694,14 @@ RSpec.describe GlobalPolicy, feature_category: :shared do
let(:current_user) { user }
it { is_expected.to be_allowed(:create_organization) }
+
+ context 'when disallowed by admin' do
+ before do
+ stub_application_setting(can_create_organization: false)
+ end
+
+ it { is_expected.to be_disallowed(:create_organization) }
+ end
end
context 'with anonymous' do
diff --git a/spec/policies/group_policy_spec.rb b/spec/policies/group_policy_spec.rb
index cb7884b141e..4632fcca12a 100644
--- a/spec/policies/group_policy_spec.rb
+++ b/spec/policies/group_policy_spec.rb
@@ -18,7 +18,6 @@ RSpec.describe GroupPolicy, feature_category: :system_access do
expect_disallowed(*developer_permissions)
expect_disallowed(*maintainer_permissions)
expect_disallowed(*owner_permissions)
- expect_disallowed(:read_namespace)
expect_disallowed(:read_namespace_via_membership)
end
end
@@ -34,7 +33,6 @@ RSpec.describe GroupPolicy, feature_category: :system_access do
expect_disallowed(*developer_permissions)
expect_disallowed(*maintainer_permissions)
expect_disallowed(*owner_permissions)
- expect_disallowed(:read_namespace)
expect_disallowed(:read_namespace_via_membership)
end
end
@@ -1110,85 +1108,148 @@ RSpec.describe GroupPolicy, feature_category: :system_access do
it { is_expected.to be_allowed(:admin_dependency_proxy) }
end
+ shared_examples 'disallows all dependency proxy access' do
+ it { is_expected.to be_disallowed(:read_dependency_proxy) }
+ it { is_expected.to be_disallowed(:admin_dependency_proxy) }
+ end
+
+ shared_examples 'allows dependency proxy read access but not admin' do
+ it { is_expected.to be_allowed(:read_dependency_proxy) }
+ it { is_expected.to be_disallowed(:admin_dependency_proxy) }
+ end
+
context 'feature disabled' do
let(:current_user) { owner }
- it { is_expected.to be_disallowed(:read_dependency_proxy) }
- it { is_expected.to be_disallowed(:admin_dependency_proxy) }
+ before do
+ stub_config(dependency_proxy: { enabled: false })
+ end
+
+ it_behaves_like 'disallows all dependency proxy access'
end
context 'feature enabled' do
before do
- stub_config(dependency_proxy: { enabled: true })
+ stub_config(dependency_proxy: { enabled: true }, registry: { enabled: true })
end
- context 'reporter' do
- let(:current_user) { reporter }
+ context 'human user' do
+ context 'reporter' do
+ let(:current_user) { reporter }
- it { is_expected.to be_allowed(:read_dependency_proxy) }
- it { is_expected.to be_disallowed(:admin_dependency_proxy) }
- end
+ it_behaves_like 'allows dependency proxy read access but not admin'
+ end
- context 'developer' do
- let(:current_user) { developer }
+ context 'developer' do
+ let(:current_user) { developer }
+
+ it_behaves_like 'allows dependency proxy read access but not admin'
+ end
+
+ context 'maintainer' do
+ let(:current_user) { maintainer }
+
+ it_behaves_like 'allows dependency proxy read access but not admin'
+ it_behaves_like 'disabling admin_package feature flag'
+ end
+
+ context 'owner' do
+ let(:current_user) { owner }
- it { is_expected.to be_allowed(:read_dependency_proxy) }
- it { is_expected.to be_disallowed(:admin_dependency_proxy) }
+ it { is_expected.to be_allowed(:read_dependency_proxy) }
+ it { is_expected.to be_allowed(:admin_dependency_proxy) }
+
+ it_behaves_like 'disabling admin_package feature flag'
+ end
end
- context 'maintainer' do
- let(:current_user) { maintainer }
+ context 'deploy token user' do
+ let!(:group_deploy_token) do
+ create(:group_deploy_token, group: group, deploy_token: deploy_token)
+ end
+
+ subject { described_class.new(deploy_token, group) }
- it { is_expected.to be_allowed(:read_dependency_proxy) }
- it { is_expected.to be_disallowed(:admin_dependency_proxy) }
+ context 'with insufficient scopes' do
+ let_it_be(:deploy_token) { create(:deploy_token, :group) }
+
+ it_behaves_like 'disallows all dependency proxy access'
+ end
- it_behaves_like 'disabling admin_package feature flag'
+ context 'with sufficient scopes' do
+ let_it_be(:deploy_token) { create(:deploy_token, :group, :dependency_proxy_scopes) }
+
+ it_behaves_like 'allows dependency proxy read access but not admin'
+ end
end
- context 'owner' do
- let(:current_user) { owner }
+ context 'group access token user' do
+ let_it_be(:bot_user) { create(:user, :project_bot) }
+ let_it_be(:token) { create(:personal_access_token, user: bot_user, scopes: [Gitlab::Auth::READ_API_SCOPE]) }
+
+ subject { described_class.new(bot_user, group) }
- it { is_expected.to be_allowed(:read_dependency_proxy) }
- it { is_expected.to be_allowed(:admin_dependency_proxy) }
+ context 'not a member of the group' do
+ it_behaves_like 'disallows all dependency proxy access'
+ end
+
+ context 'a member of the group' do
+ before do
+ group.add_guest(bot_user)
+ end
- it_behaves_like 'disabling admin_package feature flag'
+ it_behaves_like 'allows dependency proxy read access but not admin'
+ end
end
- end
- end
- context 'deploy token access' do
- let!(:group_deploy_token) do
- create(:group_deploy_token, group: group, deploy_token: deploy_token)
- end
+ context 'all other user types' do
+ User::USER_TYPES.except(:human, :project_bot).each_value do |user_type|
+ context "with user_type #{user_type}" do
+ before do
+ current_user.update!(user_type: user_type)
+ end
- subject { described_class.new(deploy_token, group) }
+ context 'when the user has sufficient access' do
+ let(:current_user) { guest }
- context 'a deploy token with read_package_registry scope' do
- let(:deploy_token) { create(:deploy_token, :group, read_package_registry: true) }
+ it_behaves_like 'allows dependency proxy read access but not admin'
+ end
- it { is_expected.to be_allowed(:read_package) }
- it { is_expected.to be_allowed(:read_group) }
- it { is_expected.to be_disallowed(:create_package) }
+ context 'when the user does not have sufficient access' do
+ let(:current_user) { non_group_member }
+
+ it_behaves_like 'disallows all dependency proxy access'
+ end
+ end
+ end
+ end
end
+ end
- context 'a deploy token with write_package_registry scope' do
- let(:deploy_token) { create(:deploy_token, :group, write_package_registry: true) }
+ context 'package registry' do
+ context 'deploy token user' do
+ let!(:group_deploy_token) do
+ create(:group_deploy_token, group: group, deploy_token: deploy_token)
+ end
- it { is_expected.to be_allowed(:create_package) }
- it { is_expected.to be_allowed(:read_package) }
- it { is_expected.to be_allowed(:read_group) }
- it { is_expected.to be_disallowed(:destroy_package) }
- end
+ subject { described_class.new(deploy_token, group) }
- context 'a deploy token with dependency proxy scopes' do
- let_it_be(:deploy_token) { create(:deploy_token, :group, :dependency_proxy_scopes) }
+ context 'with read_package_registry scope' do
+ let(:deploy_token) { create(:deploy_token, :group, read_package_registry: true) }
- before do
- stub_config(dependency_proxy: { enabled: true })
+ it { is_expected.to be_allowed(:read_package) }
+ it { is_expected.to be_allowed(:read_group) }
+ it { is_expected.to be_disallowed(:create_package) }
end
- it { is_expected.to be_allowed(:read_dependency_proxy) }
- it { is_expected.to be_disallowed(:admin_dependency_proxy) }
+ context 'with write_package_registry scope' do
+ let(:deploy_token) { create(:deploy_token, :group, write_package_registry: true) }
+
+ it { is_expected.to be_allowed(:create_package) }
+ it { is_expected.to be_allowed(:read_package) }
+ it { is_expected.to be_allowed(:read_group) }
+ it { is_expected.to be_disallowed(:destroy_package) }
+ end
end
end
diff --git a/spec/policies/merge_request_policy_spec.rb b/spec/policies/merge_request_policy_spec.rb
index 285f52956eb..c21e1244402 100644
--- a/spec/policies/merge_request_policy_spec.rb
+++ b/spec/policies/merge_request_policy_spec.rb
@@ -462,37 +462,6 @@ RSpec.describe MergeRequestPolicy do
end
end
- context 'when enabling generate diff summary permission' do
- let_it_be(:project) { create(:project) }
- let_it_be(:mr) { create(:merge_request, target_project: project, source_project: project) }
- let_it_be(:user) { create(:user) }
- let(:policy) { permissions(user, mr) }
-
- context 'when can read_merge_request' do
- before do
- project.add_developer(user)
- end
-
- it 'allows to generate_diff_summary' do
- expect(policy).to be_allowed(:generate_diff_summary)
- end
- end
-
- context 'when can not read_merge_request' do
- it 'does not allow to generate_diff_summary' do
- expect(policy).not_to be_allowed(:generate_diff_summary)
- end
-
- context 'and when is the LLM bot' do
- let(:user) { create(:user, :llm_bot) }
-
- it 'allows to generate_diff_summary' do
- expect(policy).to be_allowed(:generate_diff_summary)
- end
- end
- end
- end
-
context 'when the author of the merge request is banned', feature_category: :insider_threat do
let_it_be(:user) { create(:user) }
let_it_be(:admin) { create(:user, :admin) }
diff --git a/spec/policies/organizations/organization_policy_spec.rb b/spec/policies/organizations/organization_policy_spec.rb
index 3fcfa63b1b2..7eed497d644 100644
--- a/spec/policies/organizations/organization_policy_spec.rb
+++ b/spec/policies/organizations/organization_policy_spec.rb
@@ -12,6 +12,7 @@ RSpec.describe Organizations::OrganizationPolicy, feature_category: :cell do
let_it_be(:current_user) { nil }
it { is_expected.to be_allowed(:read_organization) }
+ it { is_expected.to be_disallowed(:admin_organization) }
end
context 'when the user is an admin' do
@@ -34,11 +35,13 @@ RSpec.describe Organizations::OrganizationPolicy, feature_category: :cell do
create :organization_user, organization: organization, user: current_user
end
- it { is_expected.to be_allowed(:read_organization_user) }
+ it { is_expected.to be_allowed(:admin_organization) }
it { is_expected.to be_allowed(:read_organization) }
+ it { is_expected.to be_allowed(:read_organization_user) }
end
context 'when the user is not part of the organization' do
+ it { is_expected.to be_disallowed(:admin_organization) }
it { is_expected.to be_disallowed(:read_organization_user) }
# All organizations are currently public, and hence they are allowed to be read
# even if the user is not a part of the organization.
diff --git a/spec/policies/project_group_link_policy_spec.rb b/spec/policies/project_group_link_policy_spec.rb
index 1047d3acb1e..9fcd6ead524 100644
--- a/spec/policies/project_group_link_policy_spec.rb
+++ b/spec/policies/project_group_link_policy_spec.rb
@@ -7,21 +7,75 @@ RSpec.describe ProjectGroupLinkPolicy, feature_category: :system_access do
let_it_be(:group2) { create(:group, :private) }
let_it_be(:project) { create(:project, :private) }
- let(:project_group_link) do
- create(:project_group_link, project: project, group: group2, group_access: Gitlab::Access::DEVELOPER)
+ subject(:policy) { described_class.new(user, project_group_link) }
+
+ describe 'destroy_project_group_link' do
+ let_it_be(:project_group_link) do
+ create(:project_group_link, project: project, group: group2, group_access: Gitlab::Access::DEVELOPER)
+ end
+
+ context 'when the user is a group owner' do
+ before_all do
+ group2.add_owner(user)
+ end
+
+ it 'can destroy group_project_link' do
+ expect(policy).to be_allowed(:destroy_project_group_link)
+ end
+
+ context 'when group link has owner access' do
+ it 'can destroy group_project_link' do
+ project_group_link.update!(group_access: Gitlab::Access::OWNER)
+
+ expect(policy).to be_allowed(:destroy_project_group_link)
+ end
+ end
+ end
+
+ context 'when user is a project maintainer' do
+ before do
+ project_group_link.project.add_maintainer(user)
+ end
+
+ context 'when group link has owner access' do
+ it 'cannot destroy group_project_link' do
+ project_group_link.update!(group_access: Gitlab::Access::OWNER)
+
+ expect(policy).to be_disallowed(:destroy_project_group_link)
+ end
+ end
+
+ context 'when group link has maintainer access' do
+ it 'can destroy group_project_link' do
+ project_group_link.update!(group_access: Gitlab::Access::MAINTAINER)
+
+ expect(policy).to be_allowed(:destroy_project_group_link)
+ end
+ end
+ end
+
+ context 'when user is not a project maintainer' do
+ it 'cannot destroy group_project_link' do
+ project_group_link.project.add_developer(user)
+
+ expect(policy).to be_disallowed(:destroy_project_group_link)
+ end
+ end
end
- subject(:policy) { described_class.new(user, project_group_link) }
+ describe 'manage_destroy' do
+ let_it_be(:project_group_link) do
+ create(:project_group_link, project: project, group: group2, group_access: Gitlab::Access::DEVELOPER)
+ end
- describe 'admin_project_group_link' do
context 'when the user is a group owner' do
before_all do
group2.add_owner(user)
end
context 'when user is not project maintainer' do
- it 'can admin group_project_link' do
- expect(policy).to be_allowed(:admin_project_group_link)
+ it 'can manage_destroy' do
+ expect(policy).to be_allowed(:manage_destroy)
end
end
@@ -30,32 +84,50 @@ RSpec.describe ProjectGroupLinkPolicy, feature_category: :system_access do
project_group_link.project.add_maintainer(user)
end
- it 'can admin group_project_link' do
- expect(policy).to be_allowed(:admin_project_group_link)
+ it 'can admin manage_destroy' do
+ expect(policy).to be_allowed(:manage_destroy)
end
end
end
context 'when user is not a group owner' do
context 'when user is a project maintainer' do
- it 'can admin group_project_link' do
+ before do
project_group_link.project.add_maintainer(user)
+ end
+
+ context 'when group link has owner access' do
+ it 'can manage_destroy' do
+ project_group_link.update!(group_access: Gitlab::Access::OWNER)
- expect(policy).to be_allowed(:admin_project_group_link)
+ expect(policy).to be_allowed(:manage_destroy)
+ end
+ end
+
+ context 'when group link has maintainer access' do
+ it 'can manage_destroy' do
+ project_group_link.update!(group_access: Gitlab::Access::MAINTAINER)
+
+ expect(policy).to be_allowed(:manage_destroy)
+ end
end
end
context 'when user is not a project maintainer' do
- it 'cannot admin group_project_link' do
+ it 'cannot manage_destroy' do
project_group_link.project.add_developer(user)
- expect(policy).to be_disallowed(:admin_project_group_link)
+ expect(policy).to be_disallowed(:manage_destroy)
end
end
end
end
describe 'read_shared_with_group' do
+ let_it_be(:project_group_link) do
+ create(:project_group_link, project: project, group: group2, group_access: Gitlab::Access::MAINTAINER)
+ end
+
context 'when the user is a project member' do
before_all do
project.add_guest(user)
@@ -83,9 +155,9 @@ RSpec.describe ProjectGroupLinkPolicy, feature_category: :system_access do
end
context 'when the group is public' do
- let_it_be(:group2) { create(:group, :public) }
-
it 'can read_shared_with_group' do
+ group2.update!(visibility_level: Gitlab::VisibilityLevel::PUBLIC)
+
expect(policy).to be_allowed(:read_shared_with_group)
end
end
@@ -102,4 +174,68 @@ RSpec.describe ProjectGroupLinkPolicy, feature_category: :system_access do
end
end
end
+
+ describe 'manage_owners' do
+ let_it_be(:project_group_link) do
+ create(:project_group_link, project: project, group: group2, group_access: Gitlab::Access::MAINTAINER)
+ end
+
+ context 'when the user is a project owner' do
+ before_all do
+ project.add_owner(user)
+ end
+
+ it 'can manage_owners' do
+ expect(policy).to be_allowed(:manage_owners)
+ end
+ end
+
+ context 'when the user is a project maintainer' do
+ before_all do
+ project.add_maintainer(user)
+ end
+
+ it 'cannot manage_owners' do
+ expect(policy).to be_disallowed(:manage_owners)
+ end
+ end
+ end
+
+ describe 'manage_group_link_with_owner_access' do
+ context 'when group link has owner access' do
+ let_it_be(:project_group_link) do
+ create(:project_group_link, project: project, group: group2, group_access: Gitlab::Access::OWNER)
+ end
+
+ context 'when the user is a project owner' do
+ before_all do
+ project.add_owner(user)
+ end
+
+ it 'can manage_group_link_with_owner_access' do
+ expect(policy).to be_allowed(:manage_group_link_with_owner_access)
+ end
+ end
+
+ context 'when the user is a project maintainer' do
+ before_all do
+ project.add_maintainer(user)
+ end
+
+ it 'cannot manage_group_link_with_owner_access' do
+ expect(policy).to be_disallowed(:manage_group_link_with_owner_access)
+ end
+ end
+ end
+
+ context 'when group link has maintainer access' do
+ let_it_be(:project_group_link) do
+ create(:project_group_link, project: project, group: group2, group_access: Gitlab::Access::MAINTAINER)
+ end
+
+ it 'can manage_group_link_with_owner_access' do
+ expect(policy).to be_allowed(:manage_group_link_with_owner_access)
+ end
+ end
+ end
end
diff --git a/spec/policies/project_policy_spec.rb b/spec/policies/project_policy_spec.rb
index fda889ff422..a9a4575d747 100644
--- a/spec/policies/project_policy_spec.rb
+++ b/spec/policies/project_policy_spec.rb
@@ -2573,7 +2573,7 @@ RSpec.describe ProjectPolicy, feature_category: :system_access do
RSpec.shared_examples 'CI_JOB_TOKEN enforces the expected permissions' do
with_them do
let(:current_user) { public_send(user_role) }
- let(:project) { public_send("#{project_visibility}_project") }
+ let(:project) { public_project }
let(:job) { build_stubbed(:ci_build, project: scope_project, user: current_user) }
let(:scope_project) do
@@ -2607,20 +2607,19 @@ RSpec.describe ProjectPolicy, feature_category: :system_access do
end
end
- # Remove project_visibility on FF restrict_ci_job_token_for_public_and_internal_projects cleanup
- where(:project_visibility, :user_role, :external_user, :scope_project_type, :token_scope_enabled, :result) do
- :public | :reporter | false | :same | true | true
- :public | :reporter | true | :same | true | true
- :public | :reporter | false | :same | false | true
- :public | :reporter | false | :different | true | false
- :public | :reporter | true | :different | true | false
- :public | :reporter | false | :different | false | true
- :public | :guest | false | :same | true | true
- :public | :guest | true | :same | true | true
- :public | :guest | false | :same | false | true
- :public | :guest | false | :different | true | false
- :public | :guest | true | :different | true | false
- :public | :guest | false | :different | false | true
+ where(:user_role, :external_user, :scope_project_type, :token_scope_enabled, :result) do
+ :reporter | false | :same | true | true
+ :reporter | true | :same | true | true
+ :reporter | false | :same | false | true
+ :reporter | false | :different | true | false
+ :reporter | true | :different | true | false
+ :reporter | false | :different | false | true
+ :guest | false | :same | true | true
+ :guest | true | :same | true | true
+ :guest | false | :same | false | true
+ :guest | false | :different | true | false
+ :guest | true | :different | true | false
+ :guest | false | :different | false | true
end
include_examples "CI_JOB_TOKEN enforces the expected permissions"
@@ -2663,61 +2662,8 @@ RSpec.describe ProjectPolicy, feature_category: :system_access do
permissions.each { |p| expect_disallowed(p) }
end
-
- context "with restrict_ci_job_token_for_public_and_internal_projects disabled" do
- before do
- stub_feature_flags(restrict_ci_job_token_for_public_and_internal_projects: false)
- end
-
- it 'allows all permissions for private' do
- project.project_feature.update!("#{feature}_access_level": ProjectFeature::PRIVATE)
-
- permissions.each { |p| expect_allowed(p) }
- end
- end
end
end
-
- context "with FF restrict_ci_job_token_for_public_and_internal_projects disabled" do
- before do
- stub_feature_flags(restrict_ci_job_token_for_public_and_internal_projects: false)
- end
-
- where(:project_visibility, :user_role, :external_user, :scope_project_type, :token_scope_enabled, :result) do
- :private | :reporter | false | :same | true | true
- :private | :reporter | false | :same | false | true
- :private | :reporter | false | :different | true | false
- :private | :reporter | false | :different | false | true
- :private | :guest | false | :same | true | true
- :private | :guest | false | :same | false | true
- :private | :guest | false | :different | true | false
- :private | :guest | false | :different | false | true
-
- :internal | :reporter | false | :same | true | true
- :internal | :reporter | true | :same | true | true
- :internal | :reporter | false | :same | false | true
- :internal | :reporter | false | :different | true | true
- :internal | :reporter | true | :different | true | false
- :internal | :reporter | false | :different | false | true
- :internal | :guest | false | :same | true | true
- :internal | :guest | true | :same | true | true
- :internal | :guest | false | :same | false | true
- :internal | :guest | false | :different | true | true
- :internal | :guest | true | :different | true | false
- :internal | :guest | false | :different | false | true
-
- :public | :reporter | false | :same | true | true
- :public | :reporter | false | :same | false | true
- :public | :reporter | false | :different | true | true
- :public | :reporter | false | :different | false | true
- :public | :guest | false | :same | true | true
- :public | :guest | false | :same | false | true
- :public | :guest | false | :different | true | true
- :public | :guest | false | :different | false | true
- end
-
- include_examples "CI_JOB_TOKEN enforces the expected permissions"
- end
end
describe 'container_image policies' do
@@ -3321,37 +3267,46 @@ RSpec.describe ProjectPolicy, feature_category: :system_access do
end
describe 'read_model_registry' do
- let(:project_with_feature) { project }
- let(:current_user) { owner }
-
- before do
- stub_feature_flags(model_registry: false)
- stub_feature_flags(model_registry: project_with_feature) if project_with_feature
- end
+ using RSpec::Parameterized::TableSyntax
- context 'feature flag is enabled' do
- specify { is_expected.to be_allowed(:read_model_registry) }
+ where(:feature_flag_enabled, :current_user, :access_level, :allowed) do
+ false | ref(:owner) | Featurable::ENABLED | false
+ true | ref(:guest) | Featurable::ENABLED | true
+ true | ref(:guest) | Featurable::PRIVATE | true
+ true | ref(:guest) | Featurable::DISABLED | false
+ true | ref(:non_member) | Featurable::ENABLED | true
+ true | ref(:non_member) | Featurable::PRIVATE | false
+ true | ref(:non_member) | Featurable::DISABLED | false
end
+ with_them do
+ before do
+ stub_feature_flags(model_registry: feature_flag_enabled)
+ project.project_feature.update!(model_registry_access_level: access_level)
+ end
- context 'feature flag is disabled' do
- let(:project_with_feature) { nil }
-
- specify { is_expected.not_to be_allowed(:read_model_registry) }
+ if params[:allowed]
+ it { expect_allowed(:read_model_registry) }
+ else
+ it { expect_disallowed(:read_model_registry) }
+ end
end
end
describe 'write_model_registry' do
using RSpec::Parameterized::TableSyntax
- where(:ff_model_registry_enabled, :current_user, :allowed) do
- true | ref(:reporter) | true
- true | ref(:guest) | false
- false | ref(:owner) | false
+ where(:feature_flag_enabled, :current_user, :access_level, :allowed) do
+ false | ref(:owner) | Featurable::ENABLED | false
+ true | ref(:reporter) | Featurable::ENABLED | true
+ true | ref(:reporter) | Featurable::PRIVATE | true
+ true | ref(:reporter) | Featurable::DISABLED | false
+ true | ref(:guest) | Featurable::ENABLED | false
+ true | ref(:non_member) | Featurable::ENABLED | false
end
with_them do
before do
- stub_feature_flags(model_registry: false)
- stub_feature_flags(model_registry: project) if ff_model_registry_enabled
+ stub_feature_flags(model_registry: feature_flag_enabled)
+ project.project_feature.update!(model_registry_access_level: access_level)
end
if params[:allowed]
diff --git a/spec/policies/protected_branch_access_policy_spec.rb b/spec/policies/protected_branch_access_policy_spec.rb
index 68a130d666a..6725cde0cb1 100644
--- a/spec/policies/protected_branch_access_policy_spec.rb
+++ b/spec/policies/protected_branch_access_policy_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe ProtectedBranchAccessPolicy do
+RSpec.describe ProtectedBranchAccessPolicy, feature_category: :source_code_management do
let(:user) { create(:user) }
let(:protected_branch_access) { create(:protected_branch_merge_access_level) }
let(:project) { protected_branch_access.protected_branch.project }
@@ -14,9 +14,7 @@ RSpec.describe ProtectedBranchAccessPolicy do
project.add_maintainer(user)
end
- it 'can be read' do
- is_expected.to be_allowed(:read_protected_branch)
- end
+ it_behaves_like 'allows protected branch crud'
end
context 'as guests' do
@@ -24,8 +22,6 @@ RSpec.describe ProtectedBranchAccessPolicy do
project.add_guest(user)
end
- it 'can not be read' do
- is_expected.to be_disallowed(:read_protected_branch)
- end
+ it_behaves_like 'disallows protected branch crud'
end
end
diff --git a/spec/policies/protected_branch_policy_spec.rb b/spec/policies/protected_branch_policy_spec.rb
index d676de14735..d2040a0d334 100644
--- a/spec/policies/protected_branch_policy_spec.rb
+++ b/spec/policies/protected_branch_policy_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe ProtectedBranchPolicy do
+RSpec.describe ProtectedBranchPolicy, feature_category: :source_code_management do
let(:user) { create(:user) }
let(:name) { 'feature' }
let(:protected_branch) { create(:protected_branch, name: name) }
@@ -10,47 +10,27 @@ RSpec.describe ProtectedBranchPolicy do
subject { described_class.new(user, protected_branch) }
- context 'as maintainers' do
+ context 'as a maintainer' do
before do
project.add_maintainer(user)
end
- it 'can be read' do
- is_expected.to be_allowed(:read_protected_branch)
- end
-
- it 'can be created' do
- is_expected.to be_allowed(:create_protected_branch)
- end
+ it_behaves_like 'allows protected branch crud'
+ end
- it 'can be updated' do
- is_expected.to be_allowed(:update_protected_branch)
+ context 'as a developer' do
+ before do
+ project.add_developer(user)
end
- it 'can be destroyed' do
- is_expected.to be_allowed(:destroy_protected_branch)
- end
+ it_behaves_like 'disallows protected branch crud'
end
- context 'as guests' do
+ context 'as a guest' do
before do
project.add_guest(user)
end
- it 'can be read' do
- is_expected.to be_disallowed(:read_protected_branch)
- end
-
- it 'can be created' do
- is_expected.to be_disallowed(:create_protected_branch)
- end
-
- it 'can be updated' do
- is_expected.to be_disallowed(:update_protected_branch)
- end
-
- it 'cannot be destroyed' do
- is_expected.to be_disallowed(:destroy_protected_branch)
- end
+ it_behaves_like 'disallows protected branch crud'
end
end
diff --git a/spec/presenters/issue_presenter_spec.rb b/spec/presenters/issue_presenter_spec.rb
index 07a9f8015e9..6c971a55e74 100644
--- a/spec/presenters/issue_presenter_spec.rb
+++ b/spec/presenters/issue_presenter_spec.rb
@@ -73,8 +73,8 @@ RSpec.describe IssuePresenter do
end
end
- describe '#project_emails_disabled?' do
- subject { presenter.project_emails_disabled? }
+ describe '#parent_emails_disabled?' do
+ subject { presenter.parent_emails_disabled? }
it 'returns false when emails notifications is enabled for project' do
is_expected.to be(false)
@@ -87,6 +87,22 @@ RSpec.describe IssuePresenter do
it { is_expected.to be(true) }
end
+
+ context 'for group-level issue' do
+ let(:presented_issue) { create(:issue, :group_level, namespace: group) }
+
+ it 'returns false when email notifications are enabled for group' do
+ is_expected.to be(false)
+ end
+
+ context 'when email notifications are disabled for group' do
+ before do
+ allow(group).to receive(:emails_disabled?).and_return(true)
+ end
+
+ it { is_expected.to be(true) }
+ end
+ end
end
describe '#service_desk_reply_to' do
diff --git a/spec/presenters/ml/candidate_details_presenter_spec.rb b/spec/presenters/ml/candidate_details_presenter_spec.rb
index 34de1e66a8a..98b152f727c 100644
--- a/spec/presenters/ml/candidate_details_presenter_spec.rb
+++ b/spec/presenters/ml/candidate_details_presenter_spec.rb
@@ -3,13 +3,18 @@
require 'spec_helper'
RSpec.describe ::Ml::CandidateDetailsPresenter, feature_category: :mlops do
- let_it_be(:user) { create(:user, :with_avatar) } # rubocop:disable RSpec/FactoryBot/AvoidCreate
- let_it_be(:project) { create(:project, :private, creator: user) } # rubocop:disable RSpec/FactoryBot/AvoidCreate
- let_it_be(:experiment) { create(:ml_experiments, user: user, project: project) } # rubocop:disable RSpec/FactoryBot/AvoidCreate
+ let_it_be(:user) { build_stubbed(:user, :with_avatar) }
+ let_it_be(:project) { build_stubbed(:project, :private, creator: user) }
+ let_it_be(:experiment) { build_stubbed(:ml_experiments, user: user, project: project, iid: 100) }
let_it_be(:candidate) do
- create(:ml_candidates, :with_artifact, experiment: experiment, user: user, project: project) # rubocop:disable RSpec/FactoryBot/AvoidCreate
+ build_stubbed(:ml_candidates, :with_artifact, experiment: experiment, user: user, project: project,
+ internal_id: 100)
end
+ let_it_be(:pipeline) { build_stubbed(:ci_pipeline, project: project, user: user) }
+ let_it_be(:build) { candidate.ci_build = build_stubbed(:ci_build, pipeline: pipeline, user: user) }
+ let_it_be(:mr) { pipeline.merge_request = build_stubbed(:merge_request, source_project: project) }
+
let_it_be(:metrics) do
[
build_stubbed(:ml_candidate_metrics, name: 'metric1', value: 0.1, candidate: candidate),
@@ -27,16 +32,61 @@ RSpec.describe ::Ml::CandidateDetailsPresenter, feature_category: :mlops do
]
end
- let(:include_ci_job) { true }
-
- subject { Gitlab::Json.parse(described_class.new(candidate, include_ci_job).present)['candidate'] }
+ let(:can_user_read_build) { true }
before do
allow(candidate).to receive(:metrics).and_return(metrics)
allow(candidate).to receive(:params).and_return(params)
+
+ allow(Ability).to receive(:allowed?).and_call_original
+ allow(Ability).to receive(:allowed?)
+ .with(user, :read_build, candidate.ci_build)
+ .and_return(can_user_read_build)
+ end
+
+ describe '#present' do
+ subject { described_class.new(candidate, user).present }
+
+ it 'presents the candidate correctly' do
+ is_expected.to eq(
+ {
+ candidate: {
+ info: {
+ iid: candidate.iid,
+ eid: candidate.eid,
+ path_to_artifact: "/#{project.full_path}/-/packages/#{candidate.artifact.id}",
+ experiment_name: candidate.experiment.name,
+ path_to_experiment: "/#{project.full_path}/-/ml/experiments/#{experiment.iid}",
+ path: "/#{project.full_path}/-/ml/candidates/#{candidate.iid}",
+ status: candidate.status,
+ ci_job: {
+ merge_request: {
+ iid: mr.iid,
+ path: "/#{project.full_path}/-/merge_requests/#{mr.iid}",
+ title: mr.title
+ },
+ name: "test",
+ path: "/#{project.full_path}/-/jobs/#{build.id}",
+ user: {
+ avatar: user.avatar_url,
+ name: pipeline.user.name,
+ path: "/#{pipeline.user.username}",
+ username: pipeline.user.username
+ }
+ }
+ },
+ params: params,
+ metrics: metrics,
+ metadata: []
+ }
+ }
+ )
+ end
end
- describe '#execute' do
+ describe '#present_as_json' do
+ subject { Gitlab::Json.parse(described_class.new(candidate, user).present_as_json)['candidate'] }
+
context 'when candidate has metrics, params and artifacts' do
it 'generates the correct params' do
expect(subject['params']).to include(
@@ -59,9 +109,9 @@ RSpec.describe ::Ml::CandidateDetailsPresenter, feature_category: :mlops do
expected_info = {
'iid' => candidate.iid,
'eid' => candidate.eid,
- 'path_to_artifact' => "/#{project.full_path}/-/packages/#{candidate.artifact.id}",
- 'experiment_name' => candidate.experiment.name,
- 'path_to_experiment' => "/#{project.full_path}/-/ml/experiments/#{experiment.iid}",
+ 'pathToArtifact' => "/#{project.full_path}/-/packages/#{candidate.artifact.id}",
+ 'experimentName' => candidate.experiment.name,
+ 'pathToExperiment' => "/#{project.full_path}/-/ml/experiments/#{experiment.iid}",
'status' => 'running',
'path' => "/#{project.full_path}/-/ml/candidates/#{candidate.iid}"
}
@@ -71,11 +121,6 @@ RSpec.describe ::Ml::CandidateDetailsPresenter, feature_category: :mlops do
end
context 'when candidate has job' do
- let_it_be(:pipeline) { build_stubbed(:ci_pipeline, project: project, user: user) }
- let_it_be(:build) { candidate.ci_build = build_stubbed(:ci_build, pipeline: pipeline, user: user) }
-
- let(:can_read_build) { true }
-
it 'generates the correct ci' do
expected_info = {
'path' => "/#{project.full_path}/-/jobs/#{build.id}",
@@ -88,25 +133,18 @@ RSpec.describe ::Ml::CandidateDetailsPresenter, feature_category: :mlops do
}
}
- expect(subject.dig('info', 'ci_job')).to include(expected_info)
+ expect(subject.dig('info', 'ciJob')).to include(expected_info)
end
context 'when build user is nil' do
it 'does not include build user info' do
- expected_info = {
- 'path' => "/#{project.full_path}/-/jobs/#{build.id}",
- 'name' => 'test'
- }
-
allow(build).to receive(:user).and_return(nil)
- expect(subject.dig('info', 'ci_job')).to eq(expected_info)
+ expect(subject.dig('info', 'ciJob')).not_to include(:user)
end
end
context 'and job is from MR' do
- let_it_be(:mr) { pipeline.merge_request = build_stubbed(:merge_request, source_project: project) }
-
it 'generates the correct ci' do
expected_info = {
'path' => "/#{project.full_path}/-/merge_requests/#{mr.iid}",
@@ -114,15 +152,15 @@ RSpec.describe ::Ml::CandidateDetailsPresenter, feature_category: :mlops do
'title' => mr.title
}
- expect(subject.dig('info', 'ci_job', 'merge_request')).to include(expected_info)
+ expect(subject.dig('info', 'ciJob', 'mergeRequest')).to include(expected_info)
end
end
context 'when ci job is not to be added' do
- let(:include_ci_job) { false }
+ let(:can_user_read_build) { false }
- it 'ci_job is nil' do
- expect(subject.dig('info', 'ci_job')).to be_nil
+ it 'ciJob is nil' do
+ expect(subject.dig('info', 'ciJob')).to be_nil
end
end
end
diff --git a/spec/presenters/ml/candidate_presenter_spec.rb b/spec/presenters/ml/candidate_presenter_spec.rb
new file mode 100644
index 00000000000..f24b53d84e8
--- /dev/null
+++ b/spec/presenters/ml/candidate_presenter_spec.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ml::CandidatePresenter, feature_category: :mlops do
+ let_it_be(:project) { build_stubbed(:project) }
+ let_it_be(:candidate) { build_stubbed(:ml_candidates, :with_artifact, internal_id: 1, project: project) }
+ let_it_be(:presenter) { candidate.present }
+
+ describe '#path' do
+ subject { presenter.path }
+
+ it { is_expected.to eq("/#{project.full_path}/-/ml/candidates/#{candidate.iid}") }
+ end
+
+ describe '#artifact_path' do
+ subject { presenter.artifact_path }
+
+ it { is_expected.to eq("/#{project.full_path}/-/packages/#{candidate.package_id}") }
+ end
+end
diff --git a/spec/presenters/ml/model_presenter_spec.rb b/spec/presenters/ml/model_presenter_spec.rb
index 31bf4e7ad6c..92398a8bb4d 100644
--- a/spec/presenters/ml/model_presenter_spec.rb
+++ b/spec/presenters/ml/model_presenter_spec.rb
@@ -8,6 +8,8 @@ RSpec.describe Ml::ModelPresenter, feature_category: :mlops do
let_it_be(:model2) { build_stubbed(:ml_models, :with_latest_version_and_package, project: project) }
let_it_be(:model3) { build_stubbed(:ml_models, :with_versions, project: project) }
+ let_it_be(:model4) { build_stubbed(:ml_models, project: project) }
+
describe '#latest_version_name' do
subject { model.present.latest_version_name }
@@ -42,6 +44,18 @@ RSpec.describe Ml::ModelPresenter, feature_category: :mlops do
end
end
+ describe '#candidate_count' do
+ let(:candidates) { build_stubbed_list(:ml_candidates, 2, experiment: model4.default_experiment) }
+
+ before do
+ allow(model4).to receive(:candidates).and_return(candidates)
+ end
+
+ subject { model4.present.candidate_count }
+
+ it { is_expected.to eq(2) }
+ end
+
describe '#latest_package_path' do
subject { model.present.latest_package_path }
diff --git a/spec/presenters/project_presenter_spec.rb b/spec/presenters/project_presenter_spec.rb
index 48db41ea8e3..a7e6bbf19d9 100644
--- a/spec/presenters/project_presenter_spec.rb
+++ b/spec/presenters/project_presenter_spec.rb
@@ -228,12 +228,8 @@ RSpec.describe ProjectPresenter do
let_it_be(:project) { create(:project, :empty_repo) }
describe '#storage_anchor_data' do
- it 'returns storage data' do
- expect(presenter.storage_anchor_data).to have_attributes(
- is_link: true,
- label: a_string_including('0 B'),
- link: nil
- )
+ it 'does not return storage data' do
+ expect(presenter.storage_anchor_data).to be_nil
end
end
@@ -282,12 +278,8 @@ RSpec.describe ProjectPresenter do
let(:presenter) { described_class.new(project, current_user: user) }
describe '#storage_anchor_data' do
- it 'returns storage data without usage quotas link for non-admin users' do
- expect(presenter.storage_anchor_data).to have_attributes(
- is_link: true,
- label: a_string_including('0 B'),
- link: nil
- )
+ it 'does not return storage data for non-admin users' do
+ expect(presenter.storage_anchor_data).to be(nil)
end
it 'returns storage data with usage quotas link for admin users' do
@@ -428,6 +420,10 @@ RSpec.describe ProjectPresenter do
end
describe '#new_file_anchor_data' do
+ before do
+ stub_feature_flags(project_overview_reorg: false)
+ end
+
it 'returns new file data if user can push' do
project.add_developer(user)
@@ -751,6 +747,7 @@ RSpec.describe ProjectPresenter do
subject(:empty_repo_statistics_buttons) { presenter.empty_repo_statistics_buttons }
before do
+ stub_feature_flags(project_overview_reorg: false)
allow(project).to receive(:auto_devops_enabled?).and_return(false)
end
diff --git a/spec/requests/acme_challenges_controller_spec.rb b/spec/requests/acme_challenges_controller_spec.rb
new file mode 100644
index 00000000000..f37aefed488
--- /dev/null
+++ b/spec/requests/acme_challenges_controller_spec.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe AcmeChallengesController, type: :request, feature_category: :pages do
+ it_behaves_like 'Base action controller' do
+ subject(:request) { get acme_challenge_path }
+ end
+end
diff --git a/spec/requests/api/admin/dictionary_spec.rb b/spec/requests/api/admin/dictionary_spec.rb
index effd3572423..b35aacd6ba0 100644
--- a/spec/requests/api/admin/dictionary_spec.rb
+++ b/spec/requests/api/admin/dictionary_spec.rb
@@ -29,29 +29,13 @@ RSpec.describe API::Admin::Dictionary, feature_category: :database do
end
end
- context 'with a malicious table_name' do
- it 'returns an error' do
- get api("/admin/databases/main/dictionary/tables/%2E%2E%2Fpasswords.yml", admin, admin_mode: true)
-
- expect(response).to have_gitlab_http_status(:error)
- end
- end
-
context 'when the params are correct' do
- let(:dictionary_dir) { Rails.root.join('spec/fixtures') }
- let(:path_file) { Rails.root.join(dictionary_dir, 'achievements.yml') }
-
it 'fetches the table dictionary' do
- allow(Gitlab::Database::GitlabSchema).to receive(:dictionary_paths).and_return([dictionary_dir])
-
- expect(Gitlab::PathTraversal).to receive(:check_allowed_absolute_path_and_path_traversal!).twice.with(
- path_file.to_s, [dictionary_dir.to_s]).and_call_original
-
show_table_dictionary
aggregate_failures "testing response" do
expect(json_response['table_name']).to eq('achievements')
- expect(json_response['feature_categories']).to eq(['feature_category_example'])
+ expect(json_response['feature_categories']).to eq(['user_profile'])
end
end
end
diff --git a/spec/requests/api/ci/job_artifacts_spec.rb b/spec/requests/api/ci/job_artifacts_spec.rb
index b96ba356855..a8c09a5191d 100644
--- a/spec/requests/api/ci/job_artifacts_spec.rb
+++ b/spec/requests/api/ci/job_artifacts_spec.rb
@@ -187,7 +187,7 @@ RSpec.describe API::Ci::JobArtifacts, feature_category: :build_artifacts do
end
context 'when project is public with artifacts that are non public' do
- let(:job) { create(:ci_build, :artifacts, :with_private_artifacts_config, pipeline: pipeline) }
+ let(:job) { create(:ci_build, :private_artifacts, :with_private_artifacts_config, pipeline: pipeline) }
it 'rejects access to artifacts' do
project.update_column(:visibility_level, Gitlab::VisibilityLevel::PUBLIC)
@@ -197,21 +197,6 @@ RSpec.describe API::Ci::JobArtifacts, feature_category: :build_artifacts do
expect(response).to have_gitlab_http_status(:forbidden)
end
-
- context 'with the non_public_artifacts feature flag disabled' do
- before do
- stub_feature_flags(non_public_artifacts: false)
- end
-
- it 'allows access to artifacts' do
- project.update_column(:visibility_level, Gitlab::VisibilityLevel::PUBLIC)
- project.update_column(:public_builds, true)
-
- get_artifact_file(artifact)
-
- expect(response).to have_gitlab_http_status(:ok)
- end
- end
end
context 'when project is public with builds access disabled' do
@@ -433,7 +418,7 @@ RSpec.describe API::Ci::JobArtifacts, feature_category: :build_artifacts do
context 'when public project guest and artifacts are non public' do
let(:api_user) { guest }
- let(:job) { create(:ci_build, :artifacts, :with_private_artifacts_config, pipeline: pipeline) }
+ let(:job) { create(:ci_build, :private_artifacts, :with_private_artifacts_config, pipeline: pipeline) }
before do
project.update_column(:visibility_level,
@@ -445,17 +430,6 @@ RSpec.describe API::Ci::JobArtifacts, feature_category: :build_artifacts do
it 'rejects access and hides existence of artifacts' do
expect(response).to have_gitlab_http_status(:forbidden)
end
-
- context 'with the non_public_artifacts feature flag disabled' do
- before do
- stub_feature_flags(non_public_artifacts: false)
- get api("/projects/#{project.id}/jobs/#{job.id}/artifacts", api_user)
- end
-
- it 'allows access to artifacts' do
- expect(response).to have_gitlab_http_status(:ok)
- end
- end
end
it 'does not return job artifacts if not uploaded' do
@@ -639,7 +613,7 @@ RSpec.describe API::Ci::JobArtifacts, feature_category: :build_artifacts do
end
context 'when project is public with non public artifacts' do
- let(:job) { create(:ci_build, :artifacts, :with_private_artifacts_config, pipeline: pipeline, user: api_user) }
+ let(:job) { create(:ci_build, :private_artifacts, :with_private_artifacts_config, pipeline: pipeline, user: api_user) }
let(:visibility_level) { Gitlab::VisibilityLevel::PUBLIC }
let(:public_builds) { true }
@@ -651,18 +625,6 @@ RSpec.describe API::Ci::JobArtifacts, feature_category: :build_artifacts do
expect(response.headers.to_h)
.not_to include('Gitlab-Workhorse-Send-Data' => /artifacts-entry/)
end
-
- context 'with the non_public_artifacts feature flag disabled' do
- before do
- stub_feature_flags(non_public_artifacts: false)
- end
-
- it 'allows access to artifacts', :sidekiq_might_not_need_inline do
- get_artifact_file(artifact)
-
- expect(response).to have_gitlab_http_status(:ok)
- end
- end
end
context 'when project is private' do
diff --git a/spec/requests/api/ci/pipeline_schedules_spec.rb b/spec/requests/api/ci/pipeline_schedules_spec.rb
index a4bb379d01c..f534b093b7c 100644
--- a/spec/requests/api/ci/pipeline_schedules_spec.rb
+++ b/spec/requests/api/ci/pipeline_schedules_spec.rb
@@ -241,7 +241,7 @@ RSpec.describe API::Ci::PipelineSchedules, feature_category: :continuous_integra
let(:url) { "/projects/#{project.id}/pipeline_schedules/#{pipeline_schedule.id}/pipelines" }
matcher :return_pipeline_schedule_pipelines_successfully do
- match_unless_raises do |reponse|
+ match_unless_raises do |response|
expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(response).to match_response_schema('public_api/v4/pipelines')
diff --git a/spec/requests/api/ci/runner/jobs_request_post_spec.rb b/spec/requests/api/ci/runner/jobs_request_post_spec.rb
index 2a870a25ea6..3d6d86335eb 100644
--- a/spec/requests/api/ci/runner/jobs_request_post_spec.rb
+++ b/spec/requests/api/ci/runner/jobs_request_post_spec.rb
@@ -272,16 +272,19 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state, feature_catego
expect(json_response['job_info']).to include(expected_job_info)
expect(json_response['git_info']).to eq(expected_git_info)
expect(json_response['image']).to eq(
- { 'name' => 'image:1.0', 'entrypoint' => '/bin/sh', 'ports' => [], 'pull_policy' => nil }
+ { 'name' => 'image:1.0', 'entrypoint' => '/bin/sh', 'ports' => [], 'executor_opts' => {},
+ 'pull_policy' => nil }
)
expect(json_response['services']).to eq(
[
{ 'name' => 'postgres', 'entrypoint' => nil, 'alias' => nil, 'command' => nil, 'ports' => [],
- 'variables' => nil, 'pull_policy' => nil },
- { 'name' => 'docker:stable-dind', 'entrypoint' => '/bin/sh', 'alias' => 'docker', 'command' => 'sleep 30',
- 'ports' => [], 'variables' => [], 'pull_policy' => nil },
+ 'variables' => nil, 'executor_opts' => {}, 'pull_policy' => nil },
+ { 'name' => 'docker:stable-dind', 'entrypoint' => '/bin/sh', 'alias' => 'docker',
+ 'command' => 'sleep 30', 'ports' => [], 'variables' => [], 'executor_opts' => {},
+ 'pull_policy' => nil },
{ 'name' => 'mysql:latest', 'entrypoint' => nil, 'alias' => nil, 'command' => nil, 'ports' => [],
- 'variables' => [{ 'key' => 'MYSQL_ROOT_PASSWORD', 'value' => 'root123.' }], 'pull_policy' => nil }
+ 'variables' => [{ 'key' => 'MYSQL_ROOT_PASSWORD', 'value' => 'root123.' }], 'executor_opts' => {},
+ 'pull_policy' => nil }
])
expect(json_response['steps']).to eq(expected_steps)
expect(json_response['hooks']).to eq(expected_hooks)
@@ -920,6 +923,41 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state, feature_catego
end
end
+ context 'when image has docker options' do
+ let(:job) { create(:ci_build, :pending, :queued, pipeline: pipeline, options: options) }
+
+ let(:options) do
+ {
+ image: {
+ name: 'ruby',
+ executor_opts: {
+ docker: {
+ platform: 'amd64'
+ }
+ }
+ }
+ }
+ end
+
+ it 'returns the image with docker options' do
+ request_job
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response).to include(
+ 'id' => job.id,
+ 'image' => { 'name' => 'ruby',
+ 'executor_opts' => {
+ 'docker' => {
+ 'platform' => 'amd64'
+ }
+ },
+ 'pull_policy' => nil,
+ 'entrypoint' => nil,
+ 'ports' => [] }
+ )
+ end
+ end
+
context 'when image has pull_policy' do
let(:job) { create(:ci_build, :pending, :queued, pipeline: pipeline, options: options) }
@@ -938,7 +976,11 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state, feature_catego
expect(response).to have_gitlab_http_status(:created)
expect(json_response).to include(
'id' => job.id,
- 'image' => { 'name' => 'ruby', 'pull_policy' => ['if-not-present'], 'entrypoint' => nil, 'ports' => [] }
+ 'image' => { 'name' => 'ruby',
+ 'executor_opts' => {},
+ 'pull_policy' => ['if-not-present'],
+ 'entrypoint' => nil,
+ 'ports' => [] }
)
end
end
@@ -962,7 +1004,8 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state, feature_catego
expect(json_response).to include(
'id' => job.id,
'services' => [{ 'alias' => nil, 'command' => nil, 'entrypoint' => nil, 'name' => 'postgres:11.9',
- 'ports' => [], 'pull_policy' => ['if-not-present'], 'variables' => [] }]
+ 'ports' => [], 'executor_opts' => {}, 'pull_policy' => ['if-not-present'],
+ 'variables' => [] }]
)
end
end
diff --git a/spec/requests/api/ci/runner/jobs_request_yamls_spec.rb b/spec/requests/api/ci/runner/jobs_request_yamls_spec.rb
new file mode 100644
index 00000000000..f399c3e310e
--- /dev/null
+++ b/spec/requests/api/ci/runner/jobs_request_yamls_spec.rb
@@ -0,0 +1,64 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state, feature_category: :continuous_integration do
+ include StubGitlabCalls
+
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :repository, shared_runners_enabled: false) }
+ let_it_be(:runner) { create(:ci_runner, :project, projects: [project]) }
+
+ let(:user_agent) { 'gitlab-runner 9.0.0 (9-0-stable; go1.7.4; linux/amd64)' }
+
+ before_all do
+ project.add_maintainer(user)
+ end
+
+ Dir[Rails.root.join("spec/requests/api/ci/runner/yamls/*.yml")].each do |yml_file|
+ context "for #{File.basename(yml_file)}" do
+ let(:yaml_content) { YAML.load_file(yml_file) }
+ let(:gitlab_ci_yml) { yaml_content.fetch("gitlab_ci") }
+ let(:request_response) { yaml_content.fetch("request_response") }
+
+ it 'runs a job' do
+ stub_ci_pipeline_yaml_file(YAML.dump(gitlab_ci_yml))
+
+ pipeline_response = create_pipeline!
+ expect(pipeline_response).to be_success, pipeline_response.message
+ expect(pipeline_response.payload).to be_created_successfully
+ expect(pipeline_response.payload.builds).to be_one
+
+ build = pipeline_response.payload.builds.first
+
+ process_pipeline!(pipeline_response.payload)
+ expect(build.reload).to be_pending
+
+ request_job(runner.token)
+ expect(response).to have_gitlab_http_status(:created)
+ expect(response.headers['Content-Type']).to eq('application/json')
+ expect(json_response).to include('id' => build.id, 'token' => build.token)
+ expect(json_response).to include(request_response)
+ end
+ end
+ end
+
+ def create_pipeline!
+ params = { ref: 'master',
+ before: '00000000',
+ after: project.commit.id,
+ commits: [{ message: 'some commit' }] }
+
+ Ci::CreatePipelineService.new(project, user, params).execute(:push)
+ end
+
+ def process_pipeline!(pipeline)
+ PipelineProcessWorker.new.perform(pipeline.id)
+ end
+
+ def request_job(token, **params)
+ new_params = params.merge(token: token)
+ post api('/jobs/request'), params: new_params.to_json,
+ headers: { 'User-Agent' => user_agent, 'Content-Type': 'application/json' }
+ end
+end
diff --git a/spec/requests/api/ci/runner/runners_delete_spec.rb b/spec/requests/api/ci/runner/runners_delete_spec.rb
index d1488828bad..61420afd578 100644
--- a/spec/requests/api/ci/runner/runners_delete_spec.rb
+++ b/spec/requests/api/ci/runner/runners_delete_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state, feature_category: :runner_fleet do
+RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state, feature_category: :fleet_visibility do
include StubGitlabCalls
include RedisHelpers
include WorkhorseHelpers
diff --git a/spec/requests/api/ci/runner/runners_post_spec.rb b/spec/requests/api/ci/runner/runners_post_spec.rb
index 1490172d1c3..748efe3cd54 100644
--- a/spec/requests/api/ci/runner/runners_post_spec.rb
+++ b/spec/requests/api/ci/runner/runners_post_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state, feature_category: :runner_fleet do
+RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state, feature_category: :fleet_visibility do
describe '/api/v4/runners' do
describe 'POST /api/v4/runners' do
it_behaves_like 'runner migrations backoff' do
diff --git a/spec/requests/api/ci/runner/runners_reset_spec.rb b/spec/requests/api/ci/runner/runners_reset_spec.rb
index 03cb6238fc1..92de1276dbb 100644
--- a/spec/requests/api/ci/runner/runners_reset_spec.rb
+++ b/spec/requests/api/ci/runner/runners_reset_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state, feature_category: :runner_fleet do
+RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state, feature_category: :fleet_visibility do
include StubGitlabCalls
include RedisHelpers
include WorkhorseHelpers
diff --git a/spec/requests/api/ci/runner/yamls/README.md b/spec/requests/api/ci/runner/yamls/README.md
new file mode 100644
index 00000000000..db8ef51ff9c
--- /dev/null
+++ b/spec/requests/api/ci/runner/yamls/README.md
@@ -0,0 +1,15 @@
+# .gitlab-ci.yml end-to-end tests
+
+The purpose of this folder is to provide a single job `.gitlab-ci.yml`
+that will be validated against end-to-end response that is send to runner.
+
+This allows to easily test end-to-end all CI job transformation that
+and impact on how such job is rendered to be executed by the GitLab Runner.
+
+```yaml
+gitlab_ci:
+ # .gitlab-ci.yml to stub
+
+request_response:
+ # exact payload that is checked as returned by `/api/v4/jobs/request`
+```
diff --git a/spec/requests/api/ci/runner/yamls/image-basic.yml b/spec/requests/api/ci/runner/yamls/image-basic.yml
new file mode 100644
index 00000000000..0c01dbc6e8b
--- /dev/null
+++ b/spec/requests/api/ci/runner/yamls/image-basic.yml
@@ -0,0 +1,19 @@
+gitlab_ci:
+ rspec:
+ image: alpine:latest
+ script: echo Hello World
+
+request_response:
+ image:
+ name: alpine:latest
+ entrypoint: null
+ executor_opts: {}
+ ports: []
+ pull_policy: null
+ steps:
+ - name: script
+ script: ["echo Hello World"]
+ timeout: 3600
+ when: on_success
+ allow_failure: false
+ services: []
diff --git a/spec/requests/api/ci/runner/yamls/image-executor_opts-platform.yml b/spec/requests/api/ci/runner/yamls/image-executor_opts-platform.yml
new file mode 100644
index 00000000000..62e301f2e9a
--- /dev/null
+++ b/spec/requests/api/ci/runner/yamls/image-executor_opts-platform.yml
@@ -0,0 +1,25 @@
+gitlab_ci:
+ rspec:
+ image:
+ name: alpine:latest
+ docker:
+ platform: amd64
+ script: echo Hello World
+
+request_response:
+ image:
+ name: alpine:latest
+ entrypoint: null
+ executor_opts:
+ docker:
+ platform: amd64
+ ports: []
+ pull_policy: null
+ steps:
+ - name: script
+ script: ["echo Hello World"]
+ timeout: 3600
+ when: on_success
+ allow_failure: false
+ services: []
+
diff --git a/spec/requests/api/ci/runner/yamls/service-basic.yml b/spec/requests/api/ci/runner/yamls/service-basic.yml
new file mode 100644
index 00000000000..5438837c496
--- /dev/null
+++ b/spec/requests/api/ci/runner/yamls/service-basic.yml
@@ -0,0 +1,23 @@
+gitlab_ci:
+ rspec:
+ services:
+ - docker:dind
+ script: echo Hello World
+
+request_response:
+ image: null
+ steps:
+ - name: script
+ script: ["echo Hello World"]
+ timeout: 3600
+ when: on_success
+ allow_failure: false
+ services:
+ - name: docker:dind
+ alias: null
+ command: null
+ entrypoint: null
+ executor_opts: {}
+ ports: []
+ pull_policy: null
+ variables: []
diff --git a/spec/requests/api/ci/runner/yamls/service-executor_opts-platform.yml b/spec/requests/api/ci/runner/yamls/service-executor_opts-platform.yml
new file mode 100644
index 00000000000..6483d749c45
--- /dev/null
+++ b/spec/requests/api/ci/runner/yamls/service-executor_opts-platform.yml
@@ -0,0 +1,27 @@
+gitlab_ci:
+ rspec:
+ services:
+ - name: docker:dind
+ docker:
+ platform: amd64
+ script: echo Hello World
+
+request_response:
+ image: null
+ steps:
+ - name: script
+ script: ["echo Hello World"]
+ timeout: 3600
+ when: on_success
+ allow_failure: false
+ services:
+ - name: docker:dind
+ alias: null
+ command: null
+ entrypoint: null
+ executor_opts:
+ docker:
+ platform: amd64
+ ports: []
+ pull_policy: null
+ variables: []
diff --git a/spec/requests/api/ci/runner/yamls/service-variables.yml b/spec/requests/api/ci/runner/yamls/service-variables.yml
new file mode 100644
index 00000000000..c8e4dde674b
--- /dev/null
+++ b/spec/requests/api/ci/runner/yamls/service-variables.yml
@@ -0,0 +1,30 @@
+gitlab_ci:
+ rspec:
+ services:
+ - name: docker:dind
+ variables:
+ DOCKER_HOST: tcp://docker:2375
+ DOCKER_DRIVER: overlay2
+ script: echo Hello World
+
+request_response:
+ image: null
+ steps:
+ - name: script
+ script: ["echo Hello World"]
+ timeout: 3600
+ when: on_success
+ allow_failure: false
+ services:
+ - name: docker:dind
+ alias: null
+ command: null
+ entrypoint: null
+ executor_opts: {}
+ ports: []
+ pull_policy: null
+ variables:
+ - key: DOCKER_HOST
+ value: tcp://docker:2375
+ - key: DOCKER_DRIVER
+ value: overlay2
diff --git a/spec/requests/api/ci/runners_reset_registration_token_spec.rb b/spec/requests/api/ci/runners_reset_registration_token_spec.rb
index 98edde93e95..0b6a6abf419 100644
--- a/spec/requests/api/ci/runners_reset_registration_token_spec.rb
+++ b/spec/requests/api/ci/runners_reset_registration_token_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe API::Ci::Runners, feature_category: :runner_fleet do
+RSpec.describe API::Ci::Runners, feature_category: :fleet_visibility do
let_it_be(:admin_mode) { false }
subject { post api("#{prefix}/runners/reset_registration_token", user, admin_mode: admin_mode) }
diff --git a/spec/requests/api/ci/runners_spec.rb b/spec/requests/api/ci/runners_spec.rb
index ba80684e89e..187880e16a4 100644
--- a/spec/requests/api/ci/runners_spec.rb
+++ b/spec/requests/api/ci/runners_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe API::Ci::Runners, :aggregate_failures, feature_category: :runner_fleet do
+RSpec.describe API::Ci::Runners, :aggregate_failures, feature_category: :fleet_visibility do
let_it_be(:admin) { create(:user, :admin) }
let_it_be(:user) { create(:user) }
let_it_be(:user2) { create(:user) }
diff --git a/spec/requests/api/commits_spec.rb b/spec/requests/api/commits_spec.rb
index 6a112918288..4ec5d195ff8 100644
--- a/spec/requests/api/commits_spec.rb
+++ b/spec/requests/api/commits_spec.rb
@@ -44,6 +44,30 @@ RSpec.describe API::Commits, feature_category: :source_code_management do
expect(response).to include_limited_pagination_headers
end
+
+ describe "commit trailers" do
+ it "doesn't include the commit trailers by default" do
+ get api(route, current_user), params: { page: 2 }
+
+ commit_with_trailers = json_response.find { |c| c["trailers"].any? }
+
+ expect(commit_with_trailers).to be_nil
+ expect(json_response.first["trailers"]).to eq({})
+ end
+
+ it "does include the commit trailers when specified in the params" do
+ # Test repo commits with trailers are further down the list, so use a
+ # higher page number.
+ get api(route, current_user), params: { page: 2, trailers: true }
+
+ commit_with_trailers = json_response.find { |c| c["trailers"].any? }
+
+ expect(commit_with_trailers["trailers"]).to be_a(Hash)
+ expect(commit_with_trailers["extended_trailers"]).to be_a(Hash)
+ expect(commit_with_trailers["trailers"].size).to be > 0
+ expect(commit_with_trailers["extended_trailers"].size).to be > 0
+ end
+ end
end
context 'when unauthenticated', 'and project is public' do
@@ -426,6 +450,10 @@ RSpec.describe API::Commits, feature_category: :source_code_management do
expect(commit['trailers']).to eq(
'Signed-off-by' => 'Dmitriy Zaporozhets <dmitriy.zaporozhets@gmail.com>'
)
+
+ expect(commit['extended_trailers']).to eq(
+ 'Signed-off-by' => ['Dmitriy Zaporozhets <dmitriy.zaporozhets@gmail.com>']
+ )
end
end
end
diff --git a/spec/requests/api/deploy_tokens_spec.rb b/spec/requests/api/deploy_tokens_spec.rb
index c0e36bf03bf..2f215cd5bd1 100644
--- a/spec/requests/api/deploy_tokens_spec.rb
+++ b/spec/requests/api/deploy_tokens_spec.rb
@@ -395,6 +395,7 @@ RSpec.describe API::DeployTokens, :aggregate_failures, feature_category: :contin
expect(json_response['scopes']).to eq(['read_repository'])
expect(json_response['username']).to eq('Bar')
expect(json_response['expires_at'].to_time.to_i).to eq(expires_time.to_i)
+ expect(json_response['token']).to match(/gldt-[A-Za-z0-9_-]{20}/)
end
context 'with no optional params given' do
diff --git a/spec/requests/api/deployments_spec.rb b/spec/requests/api/deployments_spec.rb
index 41c5847e940..5a8e1649e75 100644
--- a/spec/requests/api/deployments_spec.rb
+++ b/spec/requests/api/deployments_spec.rb
@@ -161,24 +161,56 @@ RSpec.describe API::Deployments, feature_category: :continuous_delivery do
end
describe 'GET /projects/:id/deployments/:deployment_id' do
- let(:project) { deployment.environment.project }
- let!(:deployment) { create(:deployment, :success) }
+ let_it_be(:deployment_with_bridge) { create(:deployment, :with_bridge, :success) }
+ let_it_be(:deployment_with_build) { create(:deployment, :success) }
context 'as a member of the project' do
- it 'returns the projects deployment' do
- get api("/projects/#{project.id}/deployments/#{deployment.id}", user)
+ shared_examples "returns project deployments" do
+ let(:project) { deployment.environment.project }
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response['sha']).to match /\A\h{40}\z/
- expect(json_response['id']).to eq(deployment.id)
+ it 'returns the expected response' do
+ get api("/projects/#{project.id}/deployments/#{deployment.id}", user)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['sha']).to match /\A\h{40}\z/
+ expect(json_response['id']).to eq(deployment.id)
+ end
+ end
+
+ context 'when the deployable is a build' do
+ it_behaves_like 'returns project deployments' do
+ let!(:deployment) { deployment_with_build }
+ end
+ end
+
+ context 'when the deployable is a bridge' do
+ it_behaves_like 'returns project deployments' do
+ let!(:deployment) { deployment_with_bridge }
+ end
end
end
context 'as non member' do
- it 'returns a 404 status code' do
- get api("/projects/#{project.id}/deployments/#{deployment.id}", non_member)
+ shared_examples 'deployment will not be found' do
+ let(:project) { deployment.environment.project }
- expect(response).to have_gitlab_http_status(:not_found)
+ it 'returns a 404 status code' do
+ get api("/projects/#{project.id}/deployments/#{deployment.id}", non_member)
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ context 'when the deployable is a build' do
+ it_behaves_like 'deployment will not be found' do
+ let!(:deployment) { deployment_with_build }
+ end
+ end
+
+ context 'when the deployable is a bridge' do
+ it_behaves_like 'deployment will not be found' do
+ let!(:deployment) { deployment_with_bridge }
+ end
end
end
end
@@ -229,6 +261,22 @@ RSpec.describe API::Deployments, feature_category: :continuous_delivery do
expect(json_response['environment']['name']).to eq('production')
end
+ it 'errors when creating a deployment with an invalid ref', :aggregate_failures do
+ post(
+ api("/projects/#{project.id}/deployments", user),
+ params: {
+ environment: 'production',
+ sha: sha,
+ ref: 'doesnotexist',
+ tag: false,
+ status: 'success'
+ }
+ )
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response['message']).to eq({ "ref" => ["The branch or tag does not exist"] })
+ end
+
it 'errors when creating a deployment with an invalid name' do
post(
api("/projects/#{project.id}/deployments", user),
diff --git a/spec/requests/api/environments_spec.rb b/spec/requests/api/environments_spec.rb
index 498e030da0b..aed97bcfe7c 100644
--- a/spec/requests/api/environments_spec.rb
+++ b/spec/requests/api/environments_spec.rb
@@ -374,32 +374,71 @@ RSpec.describe API::Environments, feature_category: :continuous_delivery do
end
describe 'GET /projects/:id/environments/:environment_id' do
+ let_it_be(:bridge_job) { create(:ci_bridge, :running, project: project, user: user) }
+ let_it_be(:build_job) { create(:ci_build, :running, project: project, user: user) }
+
context 'as member of the project' do
- it 'returns project environments' do
- create(:deployment, :success, project: project, environment: environment)
+ shared_examples "returns project environments" do
+ it 'returns expected response' do
+ create(
+ :deployment,
+ :success,
+ project: project,
+ environment: environment,
+ deployable: job
+ )
+
+ get api("/projects/#{project.id}/environments/#{environment.id}", user)
- get api("/projects/#{project.id}/environments/#{environment.id}", user)
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to match_response_schema('public_api/v4/environment')
+ expect(json_response['last_deployment']).to be_present
+ end
+ end
- expect(response).to have_gitlab_http_status(:ok)
- expect(response).to match_response_schema('public_api/v4/environment')
- expect(json_response['last_deployment']).to be_present
+ context "when the deployable is a bridge" do
+ it_behaves_like "returns project environments" do
+ let(:job) { bridge_job }
+ end
+
+ # No test for Ci::Bridge JOB-TOKEN auth because it doesn't implement the `.token` method.
end
- it 'returns 200 HTTP status when using JOB-TOKEN auth' do
- job = create(:ci_build, :running, project: project, user: user)
+ context "when the deployable is a build" do
+ it_behaves_like "returns project environments" do
+ let(:job) { build_job }
+ end
- get api("/projects/#{project.id}/environments/#{environment.id}"),
- params: { job_token: job.token }
+ it 'returns 200 HTTP status when using JOB-TOKEN auth' do
+ get(
+ api("/projects/#{project.id}/environments/#{environment.id}"),
+ params: { job_token: build_job.token }
+ )
- expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to have_gitlab_http_status(:ok)
+ end
end
end
context 'as non member' do
- it 'returns a 404 status code' do
- get api("/projects/#{project.id}/environments/#{environment.id}", non_member)
+ shared_examples 'environment will not be found' do
+ it 'returns a 404 status code' do
+ get api("/projects/#{project.id}/environments/#{environment.id}", non_member)
- expect(response).to have_gitlab_http_status(:not_found)
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ context "when the deployable is a bridge" do
+ it_behaves_like "environment will not be found" do
+ let(:job) { bridge_job }
+ end
+ end
+
+ context "when the deployable is a build" do
+ it_behaves_like "environment will not be found" do
+ let(:job) { build_job }
+ end
end
end
end
diff --git a/spec/requests/api/events_spec.rb b/spec/requests/api/events_spec.rb
index f884aaabb53..9da32e6cd37 100644
--- a/spec/requests/api/events_spec.rb
+++ b/spec/requests/api/events_spec.rb
@@ -3,13 +3,13 @@
require 'spec_helper'
RSpec.describe API::Events, feature_category: :user_profile do
- let(:user) { create(:user) }
- let(:non_member) { create(:user) }
- let(:private_project) { create(:project, :private, creator_id: user.id, namespace: user.namespace) }
- let(:closed_issue) { create(:closed_issue, project: private_project, author: user) }
- let!(:closed_issue_event) { create(:event, :closed, project: private_project, author: user, target: closed_issue, created_at: Date.new(2016, 12, 30)) }
- let(:closed_issue2) { create(:closed_issue, project: private_project, author: non_member) }
- let!(:closed_issue_event2) { create(:event, :closed, project: private_project, author: non_member, target: closed_issue2, created_at: Date.new(2016, 12, 30)) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:non_member) { create(:user) }
+ let_it_be(:private_project) { create(:project, :private, creator_id: user.id, namespace: user.namespace) }
+ let_it_be(:closed_issue) { create(:closed_issue, project: private_project, author: user) }
+ let_it_be(:closed_issue_event) { create(:event, :closed, project: private_project, author: user, target: closed_issue, created_at: Date.new(2016, 12, 30)) }
+ let_it_be(:closed_issue2) { create(:closed_issue, project: private_project, author: non_member) }
+ let_it_be(:closed_issue_event2) { create(:event, :closed, project: private_project, author: non_member, target: closed_issue2, created_at: Date.new(2016, 12, 30)) }
describe 'GET /events' do
context 'when unauthenticated' do
diff --git a/spec/requests/api/graphql/abuse_report_spec.rb b/spec/requests/api/graphql/abuse_report_spec.rb
index f74b1fb4061..8ab0e92d838 100644
--- a/spec/requests/api/graphql/abuse_report_spec.rb
+++ b/spec/requests/api/graphql/abuse_report_spec.rb
@@ -25,11 +25,7 @@ RSpec.describe 'Querying an Abuse Report', feature_category: :insider_threat do
it 'returns all fields' do
expect(abuse_report_data).to include(
- 'id' => global_id,
- 'userPermissions' => {
- 'readAbuseReport' => true,
- 'createNote' => true
- }
+ 'id' => global_id
)
end
end
diff --git a/spec/requests/api/graphql/ci/catalog/resource_spec.rb b/spec/requests/api/graphql/ci/catalog/resource_spec.rb
index fce773f320b..9fe73e7ba45 100644
--- a/spec/requests/api/graphql/ci/catalog/resource_spec.rb
+++ b/spec/requests/api/graphql/ci/catalog/resource_spec.rb
@@ -15,11 +15,14 @@ RSpec.describe 'Query.ciCatalogResource', feature_category: :pipeline_compositio
description: 'A simple component',
namespace: namespace,
star_count: 1,
- files: { 'README.md' => '[link](README.md)' }
+ files: {
+ 'README.md' => '[link](README.md)',
+ 'templates/secret-detection.yml' => "spec:\n inputs:\n website:\n---\nimage: alpine_1"
+ }
)
end
- let_it_be(:resource) { create(:ci_catalog_resource, project: project) }
+ let_it_be(:resource) { create(:ci_catalog_resource, :published, project: project) }
let(:query) do
<<~GQL
@@ -33,10 +36,12 @@ RSpec.describe 'Query.ciCatalogResource', feature_category: :pipeline_compositio
subject(:post_query) { post_graphql(query, current_user: user) }
+ before_all do
+ namespace.add_developer(user)
+ end
+
context 'when the current user has permission to read the namespace catalog' do
it 'returns the resource with the expected data' do
- namespace.add_developer(user)
-
post_query
expect(graphql_data_at(:ciCatalogResource)).to match(
@@ -45,7 +50,6 @@ RSpec.describe 'Query.ciCatalogResource', feature_category: :pipeline_compositio
icon: project.avatar_path,
webPath: "/#{project.full_path}",
starCount: project.star_count,
- forksCount: project.forks_count,
readmeHtml: a_string_including(
"#{project.full_path}/-/blob/#{project.default_branch}/README.md"
)
@@ -64,15 +68,94 @@ RSpec.describe 'Query.ciCatalogResource', feature_category: :pipeline_compositio
end
end
- describe 'versions' do
- before_all do
- namespace.add_developer(user)
+ describe 'components' do
+ let(:query) do
+ <<~GQL
+ query {
+ ciCatalogResource(id: "#{resource.to_global_id}") {
+ id
+ versions {
+ nodes {
+ id
+ components {
+ nodes {
+ id
+ name
+ path
+ inputs {
+ name
+ default
+ required
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ GQL
end
- before do
- stub_licensed_features(ci_namespace_catalog: true)
+ context 'when the catalog resource has components' do
+ let_it_be(:inputs) do
+ {
+ website: nil,
+ environment: {
+ default: 'test'
+ },
+ tags: {
+ type: 'array'
+ }
+ }
+ end
+
+ let_it_be(:version) do
+ create(:release, :with_catalog_resource_version, project: project).catalog_resource_version
+ end
+
+ let_it_be(:components) do
+ create_list(:ci_catalog_resource_component, 2, version: version, inputs: inputs, path: 'templates/comp.yml')
+ end
+
+ it 'returns the resource with the component data' do
+ post_query
+
+ expect(graphql_data_at(:ciCatalogResource)).to match(a_graphql_entity_for(resource))
+
+ expect(graphql_data_at(:ciCatalogResource, :versions, :nodes, :components, :nodes)).to contain_exactly(
+ a_graphql_entity_for(
+ components.first,
+ name: components.first.name,
+ path: components.first.path,
+ inputs: [
+ a_graphql_entity_for(
+ name: 'tags',
+ default: nil,
+ required: true
+ ),
+ a_graphql_entity_for(
+ name: 'website',
+ default: nil,
+ required: true
+ ),
+ a_graphql_entity_for(
+ name: 'environment',
+ default: 'test',
+ required: false
+ )
+ ]
+ ),
+ a_graphql_entity_for(
+ components.last,
+ name: components.last.name,
+ path: components.last.path
+ )
+ )
+ end
end
+ end
+ describe 'versions' do
let(:query) do
<<~GQL
query {
@@ -82,6 +165,7 @@ RSpec.describe 'Query.ciCatalogResource', feature_category: :pipeline_compositio
nodes {
id
tagName
+ tagPath
releasedAt
author {
id
@@ -99,11 +183,13 @@ RSpec.describe 'Query.ciCatalogResource', feature_category: :pipeline_compositio
let_it_be(:author) { create(:user, name: 'author') }
let_it_be(:version1) do
- create(:release, project: project, released_at: '2023-01-01T00:00:00Z', author: author)
+ create(:release, :with_catalog_resource_version, project: project, released_at: '2023-01-01T00:00:00Z',
+ author: author).catalog_resource_version
end
let_it_be(:version2) do
- create(:release, project: project, released_at: '2023-02-01T00:00:00Z', author: author)
+ create(:release, :with_catalog_resource_version, project: project, released_at: '2023-02-01T00:00:00Z',
+ author: author).catalog_resource_version
end
it 'returns the resource with the versions data' do
@@ -116,13 +202,15 @@ RSpec.describe 'Query.ciCatalogResource', feature_category: :pipeline_compositio
expect(graphql_data_at(:ciCatalogResource, :versions, :nodes)).to contain_exactly(
a_graphql_entity_for(
version1,
- tagName: version1.tag,
+ tagName: version1.name,
+ tagPath: project_tag_path(project, version1.name),
releasedAt: version1.released_at,
author: a_graphql_entity_for(author, :name)
),
a_graphql_entity_for(
version2,
- tagName: version2.tag,
+ tagName: version2.name,
+ tagPath: project_tag_path(project, version2.name),
releasedAt: version2.released_at,
author: a_graphql_entity_for(author, :name)
)
@@ -142,14 +230,6 @@ RSpec.describe 'Query.ciCatalogResource', feature_category: :pipeline_compositio
end
describe 'latestVersion' do
- before_all do
- namespace.add_developer(user)
- end
-
- before do
- stub_licensed_features(ci_namespace_catalog: true)
- end
-
let(:query) do
<<~GQL
query {
@@ -158,6 +238,7 @@ RSpec.describe 'Query.ciCatalogResource', feature_category: :pipeline_compositio
latestVersion {
id
tagName
+ tagPath
releasedAt
author {
id
@@ -174,12 +255,14 @@ RSpec.describe 'Query.ciCatalogResource', feature_category: :pipeline_compositio
let_it_be(:author) { create(:user, name: 'author') }
let_it_be(:latest_version) do
- create(:release, project: project, released_at: '2023-02-01T00:00:00Z', author: author)
+ create(:release, :with_catalog_resource_version, project: project, released_at: '2023-02-01T00:00:00Z',
+ author: author).catalog_resource_version
end
before_all do
- # Previous version of the project
- create(:release, project: project, released_at: '2023-01-01T00:00:00Z', author: author)
+ # Previous version of the catalog resource
+ create(:release, :with_catalog_resource_version, project: project, released_at: '2023-01-01T00:00:00Z',
+ author: author)
end
it 'returns the resource with the latest version data' do
@@ -190,7 +273,8 @@ RSpec.describe 'Query.ciCatalogResource', feature_category: :pipeline_compositio
resource,
latestVersion: a_graphql_entity_for(
latest_version,
- tagName: latest_version.tag,
+ tagName: latest_version.name,
+ tagPath: project_tag_path(project, latest_version.name),
releasedAt: latest_version.released_at,
author: a_graphql_entity_for(author, :name)
)
@@ -210,47 +294,7 @@ RSpec.describe 'Query.ciCatalogResource', feature_category: :pipeline_compositio
end
end
- describe 'rootNamespace' do
- before_all do
- namespace.add_developer(user)
- end
-
- before do
- stub_licensed_features(ci_namespace_catalog: true)
- end
-
- let(:query) do
- <<~GQL
- query {
- ciCatalogResource(id: "#{resource.to_global_id}") {
- id
- rootNamespace {
- id
- name
- path
- }
- }
- }
- GQL
- end
-
- it 'returns the correct root namespace data' do
- post_query
-
- expect(graphql_data_at(:ciCatalogResource)).to match(
- a_graphql_entity_for(
- resource,
- rootNamespace: a_graphql_entity_for(namespace, :name, :path)
- )
- )
- end
- end
-
describe 'openIssuesCount' do
- before do
- stub_licensed_features(ci_namespace_catalog: true)
- end
-
context 'when open_issue_count is requested' do
let(:query) do
<<~GQL
@@ -266,8 +310,6 @@ RSpec.describe 'Query.ciCatalogResource', feature_category: :pipeline_compositio
create(:issue, :opened, project: project)
create(:issue, :opened, project: project)
- namespace.add_developer(user)
-
post_query
expect(graphql_data_at(:ciCatalogResource)).to match(
@@ -279,8 +321,6 @@ RSpec.describe 'Query.ciCatalogResource', feature_category: :pipeline_compositio
context 'when open_issue_count is zero' do
it 'returns zero' do
- namespace.add_developer(user)
-
post_query
expect(graphql_data_at(:ciCatalogResource)).to match(
@@ -294,10 +334,6 @@ RSpec.describe 'Query.ciCatalogResource', feature_category: :pipeline_compositio
end
describe 'openMergeRequestsCount' do
- before do
- stub_licensed_features(ci_namespace_catalog: true)
- end
-
context 'when merge_requests_count is requested' do
let(:query) do
<<~GQL
@@ -312,8 +348,6 @@ RSpec.describe 'Query.ciCatalogResource', feature_category: :pipeline_compositio
it 'returns the correct count' do
create(:merge_request, :opened, source_project: project)
- namespace.add_developer(user)
-
post_query
expect(graphql_data_at(:ciCatalogResource)).to match(
@@ -325,8 +359,6 @@ RSpec.describe 'Query.ciCatalogResource', feature_category: :pipeline_compositio
context 'when open merge_requests_count is zero' do
it 'returns zero' do
- namespace.add_developer(user)
-
post_query
expect(graphql_data_at(:ciCatalogResource)).to match(
diff --git a/spec/requests/api/graphql/ci/catalog/resources_spec.rb b/spec/requests/api/graphql/ci/catalog/resources_spec.rb
index 7c955a1202c..49a3f3be1d7 100644
--- a/spec/requests/api/graphql/ci/catalog/resources_spec.rb
+++ b/spec/requests/api/graphql/ci/catalog/resources_spec.rb
@@ -29,8 +29,11 @@ RSpec.describe 'Query.ciCatalogResources', feature_category: :pipeline_compositi
)
end
- let_it_be(:resource1) { create(:ci_catalog_resource, project: project1, latest_released_at: '2023-01-01T00:00:00Z') }
- let_it_be(:public_resource) { create(:ci_catalog_resource, project: public_project) }
+ let_it_be(:resource1) do
+ create(:ci_catalog_resource, :published, project: project1, latest_released_at: '2023-01-01T00:00:00Z')
+ end
+
+ let_it_be(:public_resource) { create(:ci_catalog_resource, :published, project: public_project) }
let(:query) do
<<~GQL
@@ -44,7 +47,6 @@ RSpec.describe 'Query.ciCatalogResources', feature_category: :pipeline_compositi
webPath
latestReleasedAt
starCount
- forksCount
readmeHtml
}
}
@@ -58,11 +60,11 @@ RSpec.describe 'Query.ciCatalogResources', feature_category: :pipeline_compositi
it do
ctx = { current_user: user }
- control_count = ActiveRecord::QueryRecorder.new do
+ control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do
run_with_clean_state(query, context: ctx)
end
- create(:ci_catalog_resource, project: project2)
+ create(:ci_catalog_resource, :published, project: project2)
expect do
run_with_clean_state(query, context: ctx)
@@ -83,7 +85,6 @@ RSpec.describe 'Query.ciCatalogResources', feature_category: :pipeline_compositi
icon: project1.avatar_path,
webPath: "/#{project1.full_path}",
starCount: project1.star_count,
- forksCount: project1.forks_count,
readmeHtml: a_string_including('Test</strong>'),
latestReleasedAt: resource1.latest_released_at
),
@@ -121,7 +122,7 @@ RSpec.describe 'Query.ciCatalogResources', feature_category: :pipeline_compositi
end
it 'limits the request to 1 resource at a time' do
- create(:ci_catalog_resource, project: project2)
+ create(:ci_catalog_resource, :published, project: project2)
post_query
@@ -135,11 +136,13 @@ RSpec.describe 'Query.ciCatalogResources', feature_category: :pipeline_compositi
let_it_be(:author2) { create(:user, name: 'author2') }
let_it_be(:latest_version1) do
- create(:release, project: project1, released_at: '2023-02-01T00:00:00Z', author: author1)
+ create(:release, :with_catalog_resource_version, project: project1, released_at: '2023-02-01T00:00:00Z',
+ author: author1).catalog_resource_version
end
let_it_be(:latest_version2) do
- create(:release, project: public_project, released_at: '2023-02-01T00:00:00Z', author: author2)
+ create(:release, :with_catalog_resource_version, project: public_project, released_at: '2023-02-01T00:00:00Z',
+ author: author2).catalog_resource_version
end
let(:query) do
@@ -167,9 +170,11 @@ RSpec.describe 'Query.ciCatalogResources', feature_category: :pipeline_compositi
before_all do
namespace.add_developer(user)
- # Previous versions of the projects
- create(:release, project: project1, released_at: '2023-01-01T00:00:00Z', author: author1)
- create(:release, project: public_project, released_at: '2023-01-01T00:00:00Z', author: author2)
+ # Previous versions of the catalog resources
+ create(:release, :with_catalog_resource_version, project: project1, released_at: '2023-01-01T00:00:00Z',
+ author: author1)
+ create(:release, :with_catalog_resource_version, project: public_project, released_at: '2023-01-01T00:00:00Z',
+ author: author2)
end
it 'returns all resources with the latest version data' do
@@ -180,7 +185,7 @@ RSpec.describe 'Query.ciCatalogResources', feature_category: :pipeline_compositi
resource1,
latestVersion: a_graphql_entity_for(
latest_version1,
- tagName: latest_version1.tag,
+ tagName: latest_version1.name,
releasedAt: latest_version1.released_at,
author: a_graphql_entity_for(author1, :name)
)
@@ -189,7 +194,7 @@ RSpec.describe 'Query.ciCatalogResources', feature_category: :pipeline_compositi
public_resource,
latestVersion: a_graphql_entity_for(
latest_version2,
- tagName: latest_version2.tag,
+ tagName: latest_version2.name,
releasedAt: latest_version2.released_at,
author: a_graphql_entity_for(author2, :name)
)
@@ -197,43 +202,7 @@ RSpec.describe 'Query.ciCatalogResources', feature_category: :pipeline_compositi
)
end
- # TODO: https://gitlab.com/gitlab-org/gitlab/-/issues/430350
- # it_behaves_like 'avoids N+1 queries'
- end
-
- describe 'rootNamespace' do
- before_all do
- namespace.add_developer(user)
- end
-
- let(:query) do
- <<~GQL
- query {
- ciCatalogResources {
- nodes {
- id
- rootNamespace {
- id
- name
- path
- }
- }
- }
- }
- GQL
- end
-
- it 'returns the correct root namespace data' do
- post_query
-
- expect(graphql_data_at(:ciCatalogResources, :nodes)).to contain_exactly(
- a_graphql_entity_for(
- resource1,
- rootNamespace: a_graphql_entity_for(namespace, :name, :path)
- ),
- a_graphql_entity_for(public_resource, rootNamespace: nil)
- )
- end
+ it_behaves_like 'avoids N+1 queries'
end
describe 'openIssuesCount' do
@@ -326,8 +295,8 @@ RSpec.describe 'Query.ciCatalogResources', feature_category: :pipeline_compositi
end
it 'returns catalog resources with the expected data' do
- resource2 = create(:ci_catalog_resource, project: project2)
- _resource_in_another_namespace = create(:ci_catalog_resource)
+ resource2 = create(:ci_catalog_resource, :published, project: project2)
+ _resource_in_another_namespace = create(:ci_catalog_resource, :published)
post_query
@@ -338,7 +307,6 @@ RSpec.describe 'Query.ciCatalogResources', feature_category: :pipeline_compositi
icon: project2.avatar_path,
webPath: "/#{project2.full_path}",
starCount: project2.star_count,
- forksCount: project2.forks_count,
readmeHtml: '',
latestReleasedAt: resource2.latest_released_at
)
diff --git a/spec/requests/api/graphql/ci/runner_spec.rb b/spec/requests/api/graphql/ci/runner_spec.rb
index 6f1eb77fa9b..8262640b283 100644
--- a/spec/requests/api/graphql/ci/runner_spec.rb
+++ b/spec/requests/api/graphql/ci/runner_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Query.runner(id)', :freeze_time, feature_category: :runner_fleet do
+RSpec.describe 'Query.runner(id)', :freeze_time, feature_category: :fleet_visibility do
include GraphqlHelpers
using RSpec::Parameterized::TableSyntax
diff --git a/spec/requests/api/graphql/ci/runner_web_url_edge_spec.rb b/spec/requests/api/graphql/ci/runner_web_url_edge_spec.rb
index 76e2dda4ce2..8e3efb67ee5 100644
--- a/spec/requests/api/graphql/ci/runner_web_url_edge_spec.rb
+++ b/spec/requests/api/graphql/ci/runner_web_url_edge_spec.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
require 'spec_helper'
-RSpec.describe 'RunnerWebUrlEdge', feature_category: :runner_fleet do
+RSpec.describe 'RunnerWebUrlEdge', feature_category: :fleet_visibility do
include GraphqlHelpers
describe 'inside a Query.group' do
diff --git a/spec/requests/api/graphql/ci/runners_spec.rb b/spec/requests/api/graphql/ci/runners_spec.rb
index 0e2712d742d..0fe14bef778 100644
--- a/spec/requests/api/graphql/ci/runners_spec.rb
+++ b/spec/requests/api/graphql/ci/runners_spec.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
require 'spec_helper'
-RSpec.describe 'Query.runners', feature_category: :runner_fleet do
+RSpec.describe 'Query.runners', feature_category: :fleet_visibility do
include GraphqlHelpers
let_it_be(:current_user) { create_default(:user, :admin) }
@@ -35,17 +35,19 @@ RSpec.describe 'Query.runners', feature_category: :runner_fleet do
end
context 'with filters' do
- shared_examples 'a working graphql query returning expected runner' do
+ shared_examples 'a working graphql query returning expected runners' do
it_behaves_like 'a working graphql query' do
before do
post_graphql(query, current_user: current_user)
end
end
- it 'returns expected runner' do
+ it 'returns expected runners' do
post_graphql(query, current_user: current_user)
- expect(runners_graphql_data['nodes']).to contain_exactly(a_graphql_entity_for(expected_runner))
+ expect(runners_graphql_data['nodes']).to contain_exactly(
+ *Array(expected_runners).map { |expected_runner| a_graphql_entity_for(expected_runner) }
+ )
end
it 'does not execute more queries per runner', :aggregate_failures do
@@ -95,24 +97,36 @@ RSpec.describe 'Query.runners', feature_category: :runner_fleet do
let(:runner_type) { 'INSTANCE_TYPE' }
let(:status) { 'ACTIVE' }
- let!(:expected_runner) { instance_runner }
+ let(:expected_runners) { instance_runner }
- it_behaves_like 'a working graphql query returning expected runner'
+ it_behaves_like 'a working graphql query returning expected runners'
end
context 'runner_type is PROJECT_TYPE and status is NEVER_CONTACTED' do
let(:runner_type) { 'PROJECT_TYPE' }
let(:status) { 'NEVER_CONTACTED' }
- let!(:expected_runner) { project_runner }
+ let(:expected_runners) { project_runner }
- it_behaves_like 'a working graphql query returning expected runner'
+ it_behaves_like 'a working graphql query returning expected runners'
end
end
context 'when filtered on version prefix' do
- let_it_be(:version_runner) { create(:ci_runner, :project, active: false, description: 'Runner with machine') }
- let_it_be(:version_runner_machine) { create(:ci_runner_machine, runner: version_runner, version: '15.11.0') }
+ let_it_be(:runner_15_10_1) { create_ci_runner(version: '15.10.1') }
+
+ let_it_be(:runner_15_11_0) { create_ci_runner(version: '15.11.0') }
+ let_it_be(:runner_15_11_1) { create_ci_runner(version: '15.11.1') }
+
+ let_it_be(:runner_16_1_0) { create_ci_runner(version: '16.1.0') }
+
+ let(:fields) do
+ <<~QUERY
+ nodes {
+ id
+ }
+ QUERY
+ end
let(:query) do
%(
@@ -124,12 +138,44 @@ RSpec.describe 'Query.runners', feature_category: :runner_fleet do
)
end
- context 'version_prefix is "15."' do
+ context 'when version_prefix is "15."' do
let(:version_prefix) { '15.' }
- let!(:expected_runner) { version_runner }
+ it_behaves_like 'a working graphql query returning expected runners' do
+ let(:expected_runners) { [runner_15_10_1, runner_15_11_0, runner_15_11_1] }
+ end
+ end
+
+ context 'when version_prefix is "15.11."' do
+ let(:version_prefix) { '15.11.' }
- it_behaves_like 'a working graphql query returning expected runner'
+ it_behaves_like 'a working graphql query returning expected runners' do
+ let(:expected_runners) { [runner_15_11_0, runner_15_11_1] }
+ end
+ end
+
+ context 'when version_prefix is "15.11.0"' do
+ let(:version_prefix) { '15.11.0' }
+
+ it_behaves_like 'a working graphql query returning expected runners' do
+ let(:expected_runners) { runner_15_11_0 }
+ end
+ end
+
+ context 'when version_prefix is not digits' do
+ let(:version_prefix) { 'a.b' }
+
+ it_behaves_like 'a working graphql query returning expected runners' do
+ let(:expected_runners) do
+ [instance_runner, project_runner, runner_15_10_1, runner_15_11_0, runner_15_11_1, runner_16_1_0]
+ end
+ end
+ end
+
+ def create_ci_runner(args = {}, version:)
+ create(:ci_runner, :project, **args).tap do |runner|
+ create(:ci_runner_machine, runner: runner, version: version)
+ end
end
end
end
diff --git a/spec/requests/api/graphql/container_repository/container_repository_details_spec.rb b/spec/requests/api/graphql/container_repository/container_repository_details_spec.rb
index 20277c7e27b..2acdd509355 100644
--- a/spec/requests/api/graphql/container_repository/container_repository_details_spec.rb
+++ b/spec/requests/api/graphql/container_repository/container_repository_details_spec.rb
@@ -11,11 +11,12 @@ RSpec.describe 'container repository details', feature_category: :container_regi
let_it_be_with_reload(:project) { create(:project) }
let_it_be_with_reload(:container_repository) { create(:container_repository, project: project) }
+ let(:excluded) { %w[pipeline size agentConfigurations iterations iterationCadences productAnalyticsState] }
let(:query) do
graphql_query_for(
'containerRepository',
{ id: container_repository_global_id },
- all_graphql_fields_for('ContainerRepositoryDetails', excluded: %w[pipeline size])
+ all_graphql_fields_for('ContainerRepositoryDetails', excluded: excluded, max_depth: 4)
)
end
diff --git a/spec/requests/api/graphql/custom_emoji_query_spec.rb b/spec/requests/api/graphql/custom_emoji_query_spec.rb
index 1858ea831dd..c89ad0002b4 100644
--- a/spec/requests/api/graphql/custom_emoji_query_spec.rb
+++ b/spec/requests/api/graphql/custom_emoji_query_spec.rb
@@ -35,14 +35,14 @@ RSpec.describe 'getting custom emoji within namespace', feature_category: :share
expect(graphql_data['group']['customEmoji']['nodes'].first['name']).to eq(custom_emoji.name)
end
- it 'returns nil custom emoji when the custom_emoji feature flag is disabled' do
+ it 'returns empty array when the custom_emoji feature flag is disabled' do
stub_feature_flags(custom_emoji: false)
post_graphql(custom_emoji_query(group), current_user: current_user)
expect(response).to have_gitlab_http_status(:ok)
expect(graphql_data['group']).to be_present
- expect(graphql_data['group']['customEmoji']).to be_nil
+ expect(graphql_data['group']['customEmoji']['nodes']).to eq([])
end
it 'returns nil group when unauthorised' do
diff --git a/spec/requests/api/graphql/group/issues_spec.rb b/spec/requests/api/graphql/group/issues_spec.rb
index 95aeed32558..1da6abf3cac 100644
--- a/spec/requests/api/graphql/group/issues_spec.rb
+++ b/spec/requests/api/graphql/group/issues_spec.rb
@@ -15,6 +15,8 @@ RSpec.describe 'getting an issue list for a group', feature_category: :team_plan
let_it_be(:issue2) { create(:issue, project: project2) }
let_it_be(:issue3) { create(:issue, project: project3) }
+ let_it_be(:group_level_issue) { create(:issue, :epic, :group_level, namespace: group1) }
+
let(:issue1_gid) { issue1.to_global_id.to_s }
let(:issue2_gid) { issue2.to_global_id.to_s }
let(:issues_data) { graphql_data['group']['issues']['edges'] }
@@ -142,6 +144,40 @@ RSpec.describe 'getting an issue list for a group', feature_category: :team_plan
end
end
+ context 'when querying epic types' do
+ let(:query) do
+ graphql_query_for(
+ 'group',
+ { 'fullPath' => group1.full_path },
+ "issues(types: [EPIC]) { #{fields} }"
+ )
+ end
+
+ before_all do
+ group1.add_developer(current_user)
+ end
+
+ it 'returns group-level epics' do
+ post_graphql(query, current_user: current_user)
+
+ expect_graphql_errors_to_be_empty
+ expect(issues_ids).to contain_exactly(group_level_issue.to_global_id.to_s)
+ end
+
+ context 'when namespace_level_work_items is disabled' do
+ before do
+ stub_feature_flags(namespace_level_work_items: false)
+ end
+
+ it 'returns no epics' do
+ post_graphql(query, current_user: current_user)
+
+ expect_graphql_errors_to_be_empty
+ expect(issues_ids).to be_empty
+ end
+ end
+ end
+
def issues_ids
graphql_dig_at(issues_data, :node, :id)
end
diff --git a/spec/requests/api/graphql/group/work_item_state_counts_spec.rb b/spec/requests/api/graphql/group/work_item_state_counts_spec.rb
new file mode 100644
index 00000000000..2ae623c39f2
--- /dev/null
+++ b/spec/requests/api/graphql/group/work_item_state_counts_spec.rb
@@ -0,0 +1,107 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require 'request_store'
+
+RSpec.describe 'getting Work Item counts by state', feature_category: :portfolio_management do
+ include GraphqlHelpers
+
+ let_it_be(:current_user) { create(:user) }
+ let_it_be(:group) { create(:group, :private) }
+ let_it_be(:work_item_opened1) { create(:work_item, namespace: group) }
+ let_it_be(:work_item_opened2) { create(:work_item, namespace: group, author: current_user) }
+ let_it_be(:work_item_closed1) { create(:work_item, :closed, namespace: group) }
+ let_it_be(:work_item_closed2) { create(:work_item, :closed, namespace: group) }
+
+ let(:params) { {} }
+
+ subject(:query_counts) { post_graphql(query, current_user: current_user) }
+
+ context 'with work items count data' do
+ let(:work_item_counts) { graphql_data.dig('group', 'workItemStateCounts') }
+
+ context 'with group permissions' do
+ before_all do
+ group.add_developer(current_user)
+ end
+
+ it_behaves_like 'a working graphql query' do
+ before do
+ query_counts
+ end
+ end
+
+ it 'returns the correct counts for each state' do
+ query_counts
+
+ expect(work_item_counts).to eq(
+ 'all' => 4,
+ 'opened' => 2,
+ 'closed' => 2
+ )
+ end
+
+ context 'when filters are provided' do
+ context 'when filtering by author username' do
+ let(:params) { { 'authorUsername' => current_user.username } }
+
+ it 'returns the correct counts for each state' do
+ query_counts
+
+ expect(work_item_counts).to eq(
+ 'all' => 1,
+ 'opened' => 1,
+ 'closed' => 0
+ )
+ end
+ end
+
+ context 'when filtering by search' do
+ let(:params) { { search: 'foo', in: [:TITLE] } }
+
+ it 'returns an error for filters that are not supported' do
+ query_counts
+
+ expect(graphql_errors).to contain_exactly(
+ hash_including('message' => 'Searching is not available for work items at the namespace level yet')
+ )
+ end
+ end
+ end
+
+ context 'when the namespace_level_work_items feature flag is disabled' do
+ before do
+ stub_feature_flags(namespace_level_work_items: false)
+ end
+
+ it 'does not return work item counts' do
+ query_counts
+
+ expect_graphql_errors_to_be_empty
+ expect(work_item_counts).to be_nil
+ end
+ end
+ end
+
+ context 'without group permissions' do
+ it 'does not return work item counts' do
+ query_counts
+
+ expect_graphql_errors_to_be_empty
+ expect(work_item_counts).to be_nil
+ end
+ end
+ end
+
+ def query(args: params)
+ fields = <<~QUERY
+ #{all_graphql_fields_for('WorkItemStateCountsType'.classify)}
+ QUERY
+
+ graphql_query_for(
+ 'group',
+ { 'fullPath' => group.full_path },
+ query_graphql_field('workItemStateCounts', args, fields)
+ )
+ end
+end
diff --git a/spec/requests/api/graphql/group/work_item_types_spec.rb b/spec/requests/api/graphql/group/work_item_types_spec.rb
index 791c0fb9524..fbebcdad389 100644
--- a/spec/requests/api/graphql/group/work_item_types_spec.rb
+++ b/spec/requests/api/graphql/group/work_item_types_spec.rb
@@ -5,56 +5,19 @@ require 'spec_helper'
RSpec.describe 'getting a list of work item types for a group', feature_category: :team_planning do
include GraphqlHelpers
- let_it_be(:developer) { create(:user) }
let_it_be(:group) { create(:group, :private) }
+ let_it_be(:developer) { create(:user).tap { |u| group.add_developer(u) } }
- before_all do
- group.add_developer(developer)
- end
-
- let(:current_user) { developer }
-
- let(:fields) do
- <<~GRAPHQL
- workItemTypes{
- nodes { id name iconName }
- }
- GRAPHQL
- end
-
- let(:query) do
- graphql_query_for(
- 'group',
- { 'fullPath' => group.full_path },
- fields
- )
- end
-
- context 'when user has access to the group' do
- before do
- post_graphql(query, current_user: current_user)
- end
+ it_behaves_like 'graphql work item type list request spec' do
+ let(:current_user) { developer }
+ let(:parent_key) { :group }
- it_behaves_like 'a working graphql query'
-
- it 'returns all default work item types' do
- expect(graphql_data.dig('group', 'workItemTypes', 'nodes')).to match_array(
- WorkItems::Type.default.map do |type|
- hash_including('id' => type.to_global_id.to_s, 'name' => type.name, 'iconName' => type.icon_name)
- end
+ let(:query) do
+ graphql_query_for(
+ 'group',
+ { 'fullPath' => group.full_path },
+ query_nodes('WorkItemTypes', work_item_type_fields)
)
end
end
-
- context "when user doesn't have access to the group" do
- let(:current_user) { create(:user) }
-
- before do
- post_graphql(query, current_user: current_user)
- end
-
- it 'does not return the group' do
- expect(graphql_data).to eq('group' => nil)
- end
- end
end
diff --git a/spec/requests/api/graphql/milestone_spec.rb b/spec/requests/api/graphql/milestone_spec.rb
index 2cea9fd0408..0dc2eabc3e1 100644
--- a/spec/requests/api/graphql/milestone_spec.rb
+++ b/spec/requests/api/graphql/milestone_spec.rb
@@ -151,4 +151,18 @@ RSpec.describe 'Querying a Milestone', feature_category: :team_planning do
end
end
end
+
+ context 'for common GraphQL/REST' do
+ it_behaves_like 'group milestones including ancestors and descendants'
+
+ def query_group_milestone_ids(params)
+ query = graphql_query_for('group', { 'fullPath' => group.full_path },
+ query_graphql_field('milestones', params, query_graphql_path([:nodes], :id))
+ )
+
+ post_graphql(query, current_user: current_user)
+
+ graphql_data_at(:group, :milestones, :nodes).pluck('id').map { |gid| GlobalID.parse(gid).model_id.to_i }
+ end
+ end
end
diff --git a/spec/requests/api/graphql/mutations/admin/sidekiq_queues/delete_jobs_spec.rb b/spec/requests/api/graphql/mutations/admin/sidekiq_queues/delete_jobs_spec.rb
index 316b0f3755d..808dcefb84d 100644
--- a/spec/requests/api/graphql/mutations/admin/sidekiq_queues/delete_jobs_spec.rb
+++ b/spec/requests/api/graphql/mutations/admin/sidekiq_queues/delete_jobs_spec.rb
@@ -26,7 +26,7 @@ RSpec.describe 'Deleting Sidekiq jobs', :clean_gitlab_redis_queues, feature_cate
context 'when the user is an admin' do
let(:current_user) { admin }
- context 'valid request' do
+ context 'when valid request' do
around do |example|
Sidekiq::Queue.new(queue).clear
Sidekiq::Testing.disable!(&example)
@@ -40,7 +40,7 @@ RSpec.describe 'Deleting Sidekiq jobs', :clean_gitlab_redis_queues, feature_cate
'args' => args,
'meta.user' => user.username
)
- raise 'Not enqueued!' if Sidekiq::Queue.new(queue).size.zero?
+ raise 'Not enqueued!' if Sidekiq::Queue.new(queue).size.zero? # rubocop:disable Style/ZeroLengthPredicate -- Sidekiq::Queue doesn't implement #blank? or #empty?
end
it 'returns info about the deleted jobs' do
diff --git a/spec/requests/api/graphql/mutations/branch_rules/update_spec.rb b/spec/requests/api/graphql/mutations/branch_rules/update_spec.rb
new file mode 100644
index 00000000000..14874bdfaa8
--- /dev/null
+++ b/spec/requests/api/graphql/mutations/branch_rules/update_spec.rb
@@ -0,0 +1,95 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'BranchRuleUpdate', feature_category: :source_code_management do
+ include GraphqlHelpers
+
+ let_it_be(:project) { create(:project, :public) }
+ let_it_be(:user) { create(:user) }
+ let!(:branch_rule_1) { create(:protected_branch, project: project, name: name_1) }
+ let!(:branch_rule_2) { create(:protected_branch, project: project, name: name_2) }
+ let(:name_1) { "name_1" }
+ let(:name_2) { "name_2" }
+ let(:new_name) { "new name" }
+ let(:id) { branch_rule_1.to_global_id }
+ let(:project_path) { project.full_path }
+ let(:name) { new_name }
+ let(:params) do
+ {
+ id: id,
+ project_path: project_path,
+ name: name
+ }
+ end
+
+ let(:mutation) { graphql_mutation(:branch_rule_update, params) }
+
+ subject(:post_mutation) { post_graphql_mutation(mutation, current_user: user) }
+
+ def mutation_response
+ graphql_mutation_response(:branch_rule_update)
+ end
+
+ context 'when the user does not have permission' do
+ before_all do
+ project.add_developer(user)
+ end
+
+ it 'does not update the branch rule' do
+ expect { post_mutation }.not_to change { branch_rule_1 }
+ end
+ end
+
+ context 'when the user can update a branch rules' do
+ let(:current_user) { user }
+
+ before_all do
+ project.add_maintainer(user)
+ end
+
+ it 'updates the protected branch' do
+ post_mutation
+
+ expect(branch_rule_1.reload.name).to eq(new_name)
+ end
+
+ it 'returns the updated branch rule' do
+ post_mutation
+
+ expect(mutation_response).to have_key('branchRule')
+ expect(mutation_response['branchRule']['name']).to eq(new_name)
+ expect(mutation_response['errors']).to be_empty
+ end
+
+ context 'when name already exists for the project' do
+ let(:params) do
+ {
+ id: id,
+ project_path: project_path,
+ name: name_2
+ }
+ end
+
+ it 'returns an error' do
+ post_mutation
+
+ expect(mutation_response['errors'].first).to eq('Name has already been taken')
+ end
+ end
+
+ context 'when the protected branch cannot be found' do
+ let(:id) { "gid://gitlab/ProtectedBranch/#{non_existing_record_id}" }
+
+ it_behaves_like 'a mutation that returns top-level errors',
+ errors: [Gitlab::Graphql::Authorize::AuthorizeResource::RESOURCE_ACCESS_ERROR]
+ end
+
+ context 'when the project cannot be found' do
+ let(:project_path) { 'not a project path' }
+
+ it_behaves_like 'a mutation that returns top-level errors',
+ errors: [Gitlab::Graphql::Authorize::AuthorizeResource::RESOURCE_ACCESS_ERROR]
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/mutations/ci/catalog/resources/destroy_spec.rb b/spec/requests/api/graphql/mutations/ci/catalog/resources/destroy_spec.rb
new file mode 100644
index 00000000000..3b278f973b7
--- /dev/null
+++ b/spec/requests/api/graphql/mutations/ci/catalog/resources/destroy_spec.rb
@@ -0,0 +1,41 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'CatalogResourceDestroy', feature_category: :pipeline_composition do
+ include GraphqlHelpers
+
+ let_it_be(:current_user) { create(:user) }
+ let_it_be(:project) { create(:project, :catalog_resource_with_components) }
+ let_it_be(:catalog_resource) { create(:ci_catalog_resource, project: project) }
+
+ let(:mutation) do
+ variables = {
+ project_path: project.full_path
+ }
+ graphql_mutation(:catalog_resources_destroy, variables,
+ <<-QL.strip_heredoc
+ errors
+ QL
+ )
+ end
+
+ context 'when unauthorized' do
+ it_behaves_like 'a mutation that returns a top-level access error'
+ end
+
+ context 'when authorized' do
+ before do
+ catalog_resource.project.add_owner(current_user)
+ end
+
+ it 'destroys the catalog resource' do
+ expect(project.catalog_resource).to eq(catalog_resource)
+
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ expect(project.reload.catalog_resource).to be_nil
+ expect_graphql_errors_to_be_empty
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/mutations/ci/catalog/unpublish_spec.rb b/spec/requests/api/graphql/mutations/ci/catalog/unpublish_spec.rb
deleted file mode 100644
index 07465777263..00000000000
--- a/spec/requests/api/graphql/mutations/ci/catalog/unpublish_spec.rb
+++ /dev/null
@@ -1,52 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'CatalogResourceUnpublish', feature_category: :pipeline_composition do
- include GraphqlHelpers
-
- let_it_be(:current_user) { create(:user) }
- let_it_be_with_reload(:resource) { create(:ci_catalog_resource) }
-
- let(:mutation) do
- graphql_mutation(
- :catalog_resource_unpublish,
- id: resource.to_gid.to_s
- )
- end
-
- subject(:post_query) { post_graphql_mutation(mutation, current_user: current_user) }
-
- context 'when unauthorized' do
- it_behaves_like 'a mutation that returns a top-level access error'
- end
-
- context 'when authorized' do
- before_all do
- resource.project.add_owner(current_user)
- end
-
- context 'when the catalog resource is in published state' do
- it 'updates the state to draft' do
- resource.update!(state: :published)
- expect(resource.state).to eq('published')
-
- post_query
-
- expect(resource.reload.state).to eq('draft')
- expect_graphql_errors_to_be_empty
- end
- end
-
- context 'when the catalog resource is already in draft state' do
- it 'leaves the state as draft' do
- expect(resource.state).to eq('draft')
-
- post_query
-
- expect(resource.reload.state).to eq('draft')
- expect_graphql_errors_to_be_empty
- end
- end
- end
-end
diff --git a/spec/requests/api/graphql/mutations/ci/runner/create_spec.rb b/spec/requests/api/graphql/mutations/ci/runner/create_spec.rb
index b697b9f73b7..567ef12df2b 100644
--- a/spec/requests/api/graphql/mutations/ci/runner/create_spec.rb
+++ b/spec/requests/api/graphql/mutations/ci/runner/create_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'RunnerCreate', feature_category: :runner_fleet do
+RSpec.describe 'RunnerCreate', feature_category: :fleet_visibility do
include GraphqlHelpers
let_it_be(:user) { create(:user) }
diff --git a/spec/requests/api/graphql/mutations/ci/runners_registration_token/reset_spec.rb b/spec/requests/api/graphql/mutations/ci/runners_registration_token/reset_spec.rb
index 752242c3ab3..ef752448966 100644
--- a/spec/requests/api/graphql/mutations/ci/runners_registration_token/reset_spec.rb
+++ b/spec/requests/api/graphql/mutations/ci/runners_registration_token/reset_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'RunnersRegistrationTokenReset', feature_category: :runner_fleet do
+RSpec.describe 'RunnersRegistrationTokenReset', feature_category: :fleet_visibility do
include GraphqlHelpers
let(:mutation) { graphql_mutation(:runners_registration_token_reset, input) }
diff --git a/spec/requests/api/graphql/mutations/container_registry/protection/rule/create_spec.rb b/spec/requests/api/graphql/mutations/container_registry/protection/rule/create_spec.rb
index 0c708c3dc41..71b8c99c1c0 100644
--- a/spec/requests/api/graphql/mutations/container_registry/protection/rule/create_spec.rb
+++ b/spec/requests/api/graphql/mutations/container_registry/protection/rule/create_spec.rb
@@ -15,7 +15,7 @@ RSpec.describe 'Creating the container registry protection rule', :aggregate_fai
let(:kwargs) do
{
project_path: project.full_path,
- container_path_pattern: container_registry_protection_rule_attributes.container_path_pattern,
+ repository_path_pattern: container_registry_protection_rule_attributes.repository_path_pattern,
push_protected_up_to_access_level: 'MAINTAINER',
delete_protected_up_to_access_level: 'MAINTAINER'
}
@@ -26,7 +26,7 @@ RSpec.describe 'Creating the container registry protection rule', :aggregate_fai
<<~QUERY
containerRegistryProtectionRule {
id
- containerPathPattern
+ repositoryPathPattern
}
clientMutationId
errors
@@ -48,7 +48,7 @@ RSpec.describe 'Creating the container registry protection rule', :aggregate_fai
'errors' => be_blank,
'containerRegistryProtectionRule' => {
'id' => be_present,
- 'containerPathPattern' => kwargs[:container_path_pattern]
+ 'repositoryPathPattern' => kwargs[:repository_path_pattern]
}
)
end
@@ -57,7 +57,7 @@ RSpec.describe 'Creating the container registry protection rule', :aggregate_fai
expect { subject }.to change { ::ContainerRegistry::Protection::Rule.count }.by(1)
expect(::ContainerRegistry::Protection::Rule.where(project: project,
- container_path_pattern: kwargs[:container_path_pattern])).to exist
+ repository_path_pattern: kwargs[:repository_path_pattern])).to exist
end
end
@@ -84,9 +84,9 @@ RSpec.describe 'Creating the container registry protection rule', :aggregate_fai
}
end
- context 'with invalid input field `containerPathPattern`' do
+ context 'with invalid input field `repositoryPathPattern`' do
let(:kwargs) do
- super().merge(container_path_pattern: '')
+ super().merge(repository_path_pattern: '')
end
it_behaves_like 'an erroneous response'
@@ -95,7 +95,7 @@ RSpec.describe 'Creating the container registry protection rule', :aggregate_fai
it {
subject.tap do
- expect(mutation_response['errors']).to eq ["Container path pattern can't be blank"]
+ expect(mutation_response['errors']).to eq ["Repository path pattern can't be blank"]
end
}
end
@@ -108,9 +108,9 @@ RSpec.describe 'Creating the container registry protection rule', :aggregate_fai
context 'when container name pattern is slightly different' do
let(:kwargs) do
- # The field `container_path_pattern` is unique; this is why we change the value in a minimum way
+ # The field `repository_path_pattern` is unique; this is why we change the value in a minimum way
super().merge(
- container_path_pattern: "#{existing_container_registry_protection_rule.container_path_pattern}-unique"
+ repository_path_pattern: "#{existing_container_registry_protection_rule.repository_path_pattern}-unique"
)
end
@@ -121,9 +121,9 @@ RSpec.describe 'Creating the container registry protection rule', :aggregate_fai
end
end
- context 'when field `container_path_pattern` is taken' do
+ context 'when field `repository_path_pattern` is taken' do
let(:kwargs) do
- super().merge(container_path_pattern: existing_container_registry_protection_rule.container_path_pattern,
+ super().merge(repository_path_pattern: existing_container_registry_protection_rule.repository_path_pattern,
push_protected_up_to_access_level: 'MAINTAINER')
end
@@ -134,12 +134,12 @@ RSpec.describe 'Creating the container registry protection rule', :aggregate_fai
it 'returns without error' do
subject
- expect(mutation_response['errors']).to eq ['Container path pattern has already been taken']
+ expect(mutation_response['errors']).to eq ['Repository path pattern has already been taken']
end
it 'does not create new container protection rules' do
expect(::ContainerRegistry::Protection::Rule.where(project: project,
- container_path_pattern: kwargs[:container_path_pattern],
+ repository_path_pattern: kwargs[:repository_path_pattern],
push_protected_up_to_access_level: Gitlab::Access::MAINTAINER)).not_to exist
end
end
diff --git a/spec/requests/api/graphql/mutations/container_registry/protection/rule/delete_spec.rb b/spec/requests/api/graphql/mutations/container_registry/protection/rule/delete_spec.rb
new file mode 100644
index 00000000000..dd661c302ff
--- /dev/null
+++ b/spec/requests/api/graphql/mutations/container_registry/protection/rule/delete_spec.rb
@@ -0,0 +1,102 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Deleting a container registry protection rule', :aggregate_failures, feature_category: :container_registry do
+ include GraphqlHelpers
+
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be_with_refind(:container_protection_rule) do
+ create(:container_registry_protection_rule, project: project)
+ end
+
+ let_it_be(:current_user) { create(:user, maintainer_projects: [project]) }
+
+ let(:mutation) { graphql_mutation(:delete_container_registry_protection_rule, input) }
+ let(:mutation_response) { graphql_mutation_response(:delete_container_registry_protection_rule) }
+ let(:input) { { id: container_protection_rule.to_global_id } }
+
+ subject(:post_graphql_mutation_delete_container_registry_protection_rule) do
+ post_graphql_mutation(mutation, current_user: current_user)
+ end
+
+ shared_examples 'an erroneous response' do
+ it { post_graphql_mutation_delete_container_registry_protection_rule.tap { expect(mutation_response).to be_blank } }
+
+ it do
+ expect { post_graphql_mutation_delete_container_registry_protection_rule }
+ .not_to change { ::ContainerRegistry::Protection::Rule.count }
+ end
+ end
+
+ it_behaves_like 'a working GraphQL mutation'
+
+ it 'responds with deleted container registry protection rule' do
+ expect { post_graphql_mutation_delete_container_registry_protection_rule }
+ .to change { ::ContainerRegistry::Protection::Rule.count }.from(1).to(0)
+
+ expect_graphql_errors_to_be_empty
+
+ expect(mutation_response).to include(
+ 'errors' => be_blank,
+ 'containerRegistryProtectionRule' => {
+ 'id' => container_protection_rule.to_global_id.to_s,
+ 'repositoryPathPattern' => container_protection_rule.repository_path_pattern,
+ 'deleteProtectedUpToAccessLevel' => container_protection_rule.delete_protected_up_to_access_level.upcase,
+ 'pushProtectedUpToAccessLevel' => container_protection_rule.push_protected_up_to_access_level.upcase
+ }
+ )
+ end
+
+ context 'with existing container registry protection rule belonging to other project' do
+ let_it_be(:container_protection_rule) do
+ create(:container_registry_protection_rule, repository_path_pattern: 'protection_rule_other_project')
+ end
+
+ it_behaves_like 'an erroneous response'
+
+ it { is_expected.tap { expect_graphql_errors_to_include(/you don't have permission to perform this action/) } }
+ end
+
+ context 'with deleted container registry protection rule' do
+ let!(:container_protection_rule) do
+ create(:container_registry_protection_rule, project: project,
+ repository_path_pattern: 'protection_rule_deleted').destroy!
+ end
+
+ it_behaves_like 'an erroneous response'
+
+ it { is_expected.tap { expect_graphql_errors_to_include(/you don't have permission to perform this action/) } }
+ end
+
+ context 'when current_user does not have permission' do
+ let_it_be(:developer) { create(:user).tap { |u| project.add_developer(u) } }
+ let_it_be(:reporter) { create(:user).tap { |u| project.add_reporter(u) } }
+ let_it_be(:guest) { create(:user).tap { |u| project.add_guest(u) } }
+ let_it_be(:anonymous) { create(:user) }
+
+ where(:current_user) do
+ [ref(:developer), ref(:reporter), ref(:guest), ref(:anonymous)]
+ end
+
+ with_them do
+ it_behaves_like 'an erroneous response'
+
+ it { is_expected.tap { expect_graphql_errors_to_include(/you don't have permission to perform this action/) } }
+ end
+ end
+
+ context "when feature flag ':container_registry_protected_containers' disabled" do
+ before do
+ stub_feature_flags(container_registry_protected_containers: false)
+ end
+
+ it_behaves_like 'an erroneous response'
+
+ it do
+ post_graphql_mutation_delete_container_registry_protection_rule
+
+ expect_graphql_errors_to_include(/'container_registry_protected_containers' feature flag is disabled/)
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/mutations/container_registry/protection/rule/update_spec.rb b/spec/requests/api/graphql/mutations/container_registry/protection/rule/update_spec.rb
new file mode 100644
index 00000000000..cd2c8b9f0a2
--- /dev/null
+++ b/spec/requests/api/graphql/mutations/container_registry/protection/rule/update_spec.rb
@@ -0,0 +1,143 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Updating the container registry protection rule', :aggregate_failures, feature_category: :container_registry do
+ include GraphqlHelpers
+
+ let_it_be(:project) { create(:project) }
+ let_it_be_with_reload(:container_registry_protection_rule) do
+ create(:container_registry_protection_rule, project: project, push_protected_up_to_access_level: :developer)
+ end
+
+ let_it_be(:current_user) { create(:user, maintainer_projects: [project]) }
+
+ let(:container_registry_protection_rule_attributes) do
+ build_stubbed(:container_registry_protection_rule, project: project)
+ end
+
+ let(:mutation) do
+ graphql_mutation(:update_container_registry_protection_rule, input,
+ <<~QUERY
+ containerRegistryProtectionRule {
+ repositoryPathPattern
+ deleteProtectedUpToAccessLevel
+ pushProtectedUpToAccessLevel
+ }
+ clientMutationId
+ errors
+ QUERY
+ )
+ end
+
+ let(:input) do
+ {
+ id: container_registry_protection_rule.to_global_id,
+ repository_path_pattern: "#{container_registry_protection_rule.repository_path_pattern}-updated",
+ delete_protected_up_to_access_level: 'OWNER',
+ push_protected_up_to_access_level: 'MAINTAINER'
+ }
+ end
+
+ let(:mutation_response) { graphql_mutation_response(:update_container_registry_protection_rule) }
+
+ subject { post_graphql_mutation(mutation, current_user: current_user) }
+
+ shared_examples 'a successful response' do
+ it { subject.tap { expect_graphql_errors_to_be_empty } }
+
+ it 'returns the updated container registry protection rule' do
+ subject
+
+ expect(mutation_response).to include(
+ 'containerRegistryProtectionRule' => {
+ 'repositoryPathPattern' => input[:repository_path_pattern],
+ 'deleteProtectedUpToAccessLevel' => input[:delete_protected_up_to_access_level],
+ 'pushProtectedUpToAccessLevel' => input[:push_protected_up_to_access_level]
+ }
+ )
+ end
+
+ it do
+ subject.tap do
+ expect(container_registry_protection_rule.reload).to have_attributes(
+ repository_path_pattern: input[:repository_path_pattern],
+ push_protected_up_to_access_level: input[:push_protected_up_to_access_level].downcase
+ )
+ end
+ end
+ end
+
+ shared_examples 'an erroneous reponse' do
+ it { subject.tap { expect(mutation_response).to be_blank } }
+ it { expect { subject }.not_to change { container_registry_protection_rule.reload.updated_at } }
+ end
+
+ it_behaves_like 'a successful response'
+
+ context 'with other existing container registry protection rule with same repository_path_pattern' do
+ let_it_be_with_reload(:other_existing_container_registry_protection_rule) do
+ create(:container_registry_protection_rule, project: project,
+ repository_path_pattern: "#{container_registry_protection_rule.repository_path_pattern}-other")
+ end
+
+ let(:input) do
+ super().merge(repository_path_pattern: other_existing_container_registry_protection_rule.repository_path_pattern)
+ end
+
+ it { is_expected.tap { expect_graphql_errors_to_be_empty } }
+
+ it 'returns a blank container registry protection rule' do
+ is_expected.tap { expect(mutation_response['containerRegistryProtectionRule']).to be_blank }
+ end
+
+ it 'includes error message in response' do
+ is_expected.tap { expect(mutation_response['errors']).to eq ['Repository path pattern has already been taken'] }
+ end
+ end
+
+ context 'with invalid input param `pushProtectedUpToAccessLevel`' do
+ let(:input) { super().merge(push_protected_up_to_access_level: nil) }
+
+ it_behaves_like 'an erroneous reponse'
+
+ it { is_expected.tap { expect_graphql_errors_to_include(/pushProtectedUpToAccessLevel can't be blank/) } }
+ end
+
+ context 'with invalid input param `repositoryPathPattern`' do
+ let(:input) { super().merge(repository_path_pattern: '') }
+
+ it_behaves_like 'an erroneous reponse'
+
+ it { is_expected.tap { expect_graphql_errors_to_include(/repositoryPathPattern can't be blank/) } }
+ end
+
+ context 'when current_user does not have permission' do
+ let_it_be(:developer) { create(:user).tap { |u| project.add_developer(u) } }
+ let_it_be(:reporter) { create(:user).tap { |u| project.add_reporter(u) } }
+ let_it_be(:guest) { create(:user).tap { |u| project.add_guest(u) } }
+ let_it_be(:anonymous) { create(:user) }
+
+ where(:current_user) do
+ [ref(:developer), ref(:reporter), ref(:guest), ref(:anonymous)]
+ end
+
+ with_them do
+ it { is_expected.tap { expect_graphql_errors_to_include(/you don't have permission to perform this action/) } }
+ end
+ end
+
+ context "when feature flag ':container_registry_protected_containers' disabled" do
+ before do
+ stub_feature_flags(container_registry_protected_containers: false)
+ end
+
+ it_behaves_like 'an erroneous reponse'
+
+ it 'returns error of disabled feature flag' do
+ is_expected.tap do
+ expect_graphql_errors_to_include(/'container_registry_protected_containers' feature flag is disabled/)
+ end
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/mutations/merge_requests/set_assignees_spec.rb b/spec/requests/api/graphql/mutations/merge_requests/set_assignees_spec.rb
index cb7bac771b3..1bd239ecd87 100644
--- a/spec/requests/api/graphql/mutations/merge_requests/set_assignees_spec.rb
+++ b/spec/requests/api/graphql/mutations/merge_requests/set_assignees_spec.rb
@@ -127,7 +127,7 @@ RSpec.describe 'Setting assignees of a merge request', :assume_throttled, featur
context 'when passing append as true' do
let(:mode) { Types::MutationOperationModeEnum.enum[:append] }
let(:input) { { assignee_usernames: [assignee2.username], operation_mode: mode } }
- let(:db_query_limit) { 23 }
+ let(:db_query_limit) { 25 }
before do
# In CE, APPEND is a NOOP as you can't have multiple assignees
@@ -147,7 +147,7 @@ RSpec.describe 'Setting assignees of a merge request', :assume_throttled, featur
end
context 'when passing remove as true' do
- let(:db_query_limit) { 31 }
+ let(:db_query_limit) { 33 }
let(:mode) { Types::MutationOperationModeEnum.enum[:remove] }
let(:input) { { assignee_usernames: [assignee.username], operation_mode: mode } }
let(:expected_result) { [] }
diff --git a/spec/requests/api/graphql/mutations/namespace/package_settings/update_spec.rb b/spec/requests/api/graphql/mutations/namespace/package_settings/update_spec.rb
index 738dc3078e7..05c1a2d96d9 100644
--- a/spec/requests/api/graphql/mutations/namespace/package_settings/update_spec.rb
+++ b/spec/requests/api/graphql/mutations/namespace/package_settings/update_spec.rb
@@ -22,7 +22,8 @@ RSpec.describe 'Updating the package settings', feature_category: :package_regis
npm_package_requests_forwarding: true,
lock_npm_package_requests_forwarding: true,
pypi_package_requests_forwarding: true,
- lock_pypi_package_requests_forwarding: true
+ lock_pypi_package_requests_forwarding: true,
+ nuget_symbol_server_enabled: true
}
end
@@ -42,6 +43,7 @@ RSpec.describe 'Updating the package settings', feature_category: :package_regis
lockNpmPackageRequestsForwarding
pypiPackageRequestsForwarding
lockPypiPackageRequestsForwarding
+ nugetSymbolServerEnabled
}
errors
QL
@@ -70,6 +72,7 @@ RSpec.describe 'Updating the package settings', feature_category: :package_regis
expect(package_settings_response['lockPypiPackageRequestsForwarding']).to eq(params[:lock_pypi_package_requests_forwarding])
expect(package_settings_response['npmPackageRequestsForwarding']).to eq(params[:npm_package_requests_forwarding])
expect(package_settings_response['lockNpmPackageRequestsForwarding']).to eq(params[:lock_npm_package_requests_forwarding])
+ expect(package_settings_response['nugetSymbolServerEnabled']).to eq(params[:nuget_symbol_server_enabled])
end
end
@@ -111,7 +114,8 @@ RSpec.describe 'Updating the package settings', feature_category: :package_regis
npm_package_requests_forwarding: nil,
lock_npm_package_requests_forwarding: false,
pypi_package_requests_forwarding: nil,
- lock_pypi_package_requests_forwarding: false
+ lock_pypi_package_requests_forwarding: false,
+ nuget_symbol_server_enabled: false
}, to: {
maven_duplicates_allowed: false,
maven_duplicate_exception_regex: 'foo-.*',
@@ -124,7 +128,8 @@ RSpec.describe 'Updating the package settings', feature_category: :package_regis
npm_package_requests_forwarding: true,
lock_npm_package_requests_forwarding: true,
pypi_package_requests_forwarding: true,
- lock_pypi_package_requests_forwarding: true
+ lock_pypi_package_requests_forwarding: true,
+ nuget_symbol_server_enabled: true
}
it_behaves_like 'returning a success'
diff --git a/spec/requests/api/graphql/mutations/organizations/create_spec.rb b/spec/requests/api/graphql/mutations/organizations/create_spec.rb
index ac6b04104ba..8ab80685822 100644
--- a/spec/requests/api/graphql/mutations/organizations/create_spec.rb
+++ b/spec/requests/api/graphql/mutations/organizations/create_spec.rb
@@ -4,20 +4,24 @@ require 'spec_helper'
RSpec.describe Mutations::Organizations::Create, feature_category: :cell do
include GraphqlHelpers
+ include WorkhorseHelpers
let_it_be(:user) { create(:user) }
let(:mutation) { graphql_mutation(:organization_create, params) }
let(:name) { 'Name' }
let(:path) { 'path' }
+ let(:description) { nil }
+ let(:avatar) { fixture_file_upload("spec/fixtures/dk.png") }
let(:params) do
{
name: name,
- path: path
+ path: path,
+ avatar: avatar
}
end
- subject(:create_organization) { post_graphql_mutation(mutation, current_user: current_user) }
+ subject(:create_organization) { post_graphql_mutation_with_uploads(mutation, current_user: current_user) }
it { expect(described_class).to require_graphql_authorizations(:create_organization) }
@@ -27,6 +31,7 @@ RSpec.describe Mutations::Organizations::Create, feature_category: :cell do
context 'when the user does not have permission' do
let(:current_user) { nil }
+ let(:avatar) { nil }
it_behaves_like 'a mutation that returns a top-level access error'
@@ -48,17 +53,35 @@ RSpec.describe Mutations::Organizations::Create, feature_category: :cell do
end
end
- it 'creates an organization' do
- expect { create_organization }.to change { Organizations::Organization.count }.by(1)
+ shared_examples 'creating an organization' do
+ it 'creates an organization' do
+ expect { create_organization }.to change { Organizations::Organization.count }.by(1)
+ end
+
+ it 'returns the new organization' do
+ create_organization
+
+ expect(graphql_data_at(:organization_create, :organization)).to match a_hash_including(
+ 'name' => name,
+ 'path' => path,
+ 'description' => description
+ )
+ end
end
- it 'returns the new organization' do
- create_organization
+ context 'with description' do
+ let(:description) { 'Organization description' }
+ let(:params) do
+ {
+ name: name,
+ path: path,
+ description: description
+ }
+ end
- expect(graphql_data_at(:organization_create, :organization)).to match a_hash_including(
- 'name' => name,
- 'path' => path
- )
+ include_examples 'creating an organization'
end
+
+ include_examples 'creating an organization'
end
end
diff --git a/spec/requests/api/graphql/mutations/organizations/update_spec.rb b/spec/requests/api/graphql/mutations/organizations/update_spec.rb
new file mode 100644
index 00000000000..4e819c280d0
--- /dev/null
+++ b/spec/requests/api/graphql/mutations/organizations/update_spec.rb
@@ -0,0 +1,120 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Mutations::Organizations::Update, feature_category: :cell do
+ include GraphqlHelpers
+ include WorkhorseHelpers
+
+ let_it_be(:user) { create(:user) }
+ let_it_be_with_reload(:organization) do
+ create(:organization) { |org| create(:organization_user, organization: org, user: user) }
+ end
+
+ let(:mutation) { graphql_mutation(:organization_update, params) }
+ let(:name) { 'Name' }
+ let(:path) { 'path' }
+ let(:description) { 'org-description' }
+ let(:avatar) { nil }
+ let(:params) do
+ {
+ id: organization.to_global_id.to_s,
+ name: name,
+ path: path,
+ description: description,
+ avatar: avatar
+ }
+ end
+
+ subject(:update_organization) { post_graphql_mutation_with_uploads(mutation, current_user: current_user) }
+
+ it { expect(described_class).to require_graphql_authorizations(:admin_organization) }
+
+ def mutation_response
+ graphql_mutation_response(:organization_update)
+ end
+
+ context 'when the user does not have permission' do
+ let(:current_user) { nil }
+
+ it_behaves_like 'a mutation that returns a top-level access error'
+
+ it 'does not update the organization' do
+ initial_name = organization.name
+ initial_path = organization.path
+
+ update_organization
+ organization.reset
+
+ expect(organization.name).to eq(initial_name)
+ expect(organization.path).to eq(initial_path)
+ end
+ end
+
+ context 'when the user has permission' do
+ let(:current_user) { user }
+
+ context 'when the params are invalid' do
+ let(:name) { '' }
+
+ it 'returns the validation error' do
+ update_organization
+
+ expect(mutation_response).to include('errors' => ["Name can't be blank"])
+ end
+ end
+
+ context 'when single attribute is update' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(attribute: %w[name path description])
+
+ with_them do
+ let(:value) { "new-#{attribute}" }
+ let(:attribute_hash) { { attribute => value } }
+ let(:params) { { id: organization.to_global_id.to_s }.merge(attribute_hash) }
+
+ it 'updates the given field' do
+ update_organization
+
+ expect(graphql_data_at(:organization_update, :organization)).to match a_hash_including(attribute_hash)
+ expect(mutation_response['errors']).to be_empty
+ end
+ end
+ end
+
+ it 'returns the updated organization' do
+ update_organization
+
+ expect(graphql_data_at(:organization_update, :organization)).to match a_hash_including(
+ 'name' => name,
+ 'path' => path,
+ 'description' => description
+ )
+ expect(mutation_response['errors']).to be_empty
+ end
+
+ context 'with a new avatar' do
+ let(:filename) { 'spec/fixtures/dk.png' }
+ let(:avatar) { fixture_file_upload(filename) }
+
+ it 'returns the updated organization' do
+ update_organization
+
+ expect(
+ graphql_data_at(:organization_update, :organization)
+ ).to(
+ match(
+ a_hash_including(
+ 'name' => name,
+ 'path' => path,
+ 'description' => description
+ )
+ )
+ )
+ expect(File.basename(organization.reload.avatar.file.file)).to eq(File.basename(filename))
+ expect(mutation_response['errors']).to be_empty
+ end
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/mutations/packages/bulk_destroy_spec.rb b/spec/requests/api/graphql/mutations/packages/bulk_destroy_spec.rb
index d0980a2b43d..084958be1fb 100644
--- a/spec/requests/api/graphql/mutations/packages/bulk_destroy_spec.rb
+++ b/spec/requests/api/graphql/mutations/packages/bulk_destroy_spec.rb
@@ -38,6 +38,26 @@ RSpec.describe 'Destroying multiple packages', feature_category: :package_regist
end
it_behaves_like 'returning response status', :success
+
+ context 'when npm package' do
+ let_it_be_with_reload(:packages1) { create_list(:npm_package, 3, project: project1, name: 'test-package-1') }
+ let_it_be_with_reload(:packages2) { create_list(:npm_package, 2, project: project2, name: 'test-package-2') }
+
+ it 'enqueues the worker to sync a metadata cache' do
+ arguments = []
+
+ expect(Packages::Npm::CreateMetadataCacheWorker)
+ .to receive(:bulk_perform_async_with_contexts).and_wrap_original do |original_method, *args|
+ packages = args.first
+ arguments = packages.map(&args.second[:arguments_proc]).uniq
+ original_method.call(*args)
+ end
+
+ mutation_request
+
+ expect(arguments).to contain_exactly([project1.id, 'test-package-1'], [project2.id, 'test-package-2'])
+ end
+ end
end
shared_examples 'denying the mutation request' do
diff --git a/spec/requests/api/graphql/mutations/packages/destroy_spec.rb b/spec/requests/api/graphql/mutations/packages/destroy_spec.rb
index 86167e7116f..6e0e5bd8aae 100644
--- a/spec/requests/api/graphql/mutations/packages/destroy_spec.rb
+++ b/spec/requests/api/graphql/mutations/packages/destroy_spec.rb
@@ -35,6 +35,17 @@ RSpec.describe 'Destroying a package', feature_category: :package_registry do
.to change { ::Packages::Package.pending_destruction.count }.by(1)
end
+ context 'when npm package' do
+ let_it_be_with_reload(:package) { create(:npm_package) }
+
+ it 'enqueues the worker to sync a metadata cache' do
+ expect(Packages::Npm::CreateMetadataCacheWorker)
+ .to receive(:perform_async).with(project.id, package.name)
+
+ mutation_request
+ end
+ end
+
it_behaves_like 'returning response status', :success
end
diff --git a/spec/requests/api/graphql/mutations/packages/protection/rule/delete_spec.rb b/spec/requests/api/graphql/mutations/packages/protection/rule/delete_spec.rb
index 1d94d520674..6c300f8ce57 100644
--- a/spec/requests/api/graphql/mutations/packages/protection/rule/delete_spec.rb
+++ b/spec/requests/api/graphql/mutations/packages/protection/rule/delete_spec.rb
@@ -15,7 +15,7 @@ RSpec.describe 'Deleting a package protection rule', :aggregate_failures, featur
subject { post_graphql_mutation(mutation, current_user: current_user) }
- shared_examples 'an erroneous reponse' do
+ shared_examples 'an erroneous response' do
it { subject.tap { expect(mutation_response).to be_blank } }
it { expect { subject }.not_to change { ::Packages::Protection::Rule.count } }
end
@@ -44,7 +44,7 @@ RSpec.describe 'Deleting a package protection rule', :aggregate_failures, featur
create(:package_protection_rule, package_name_pattern: 'protection_rule_other_project')
end
- it_behaves_like 'an erroneous reponse'
+ it_behaves_like 'an erroneous response'
it { subject.tap { expect_graphql_errors_to_include(/you don't have permission to perform this action/) } }
end
@@ -54,7 +54,7 @@ RSpec.describe 'Deleting a package protection rule', :aggregate_failures, featur
create(:package_protection_rule, project: project, package_name_pattern: 'protection_rule_deleted').destroy!
end
- it_behaves_like 'an erroneous reponse'
+ it_behaves_like 'an erroneous response'
it { subject.tap { expect_graphql_errors_to_include(/you don't have permission to perform this action/) } }
end
@@ -70,7 +70,7 @@ RSpec.describe 'Deleting a package protection rule', :aggregate_failures, featur
end
with_them do
- it_behaves_like 'an erroneous reponse'
+ it_behaves_like 'an erroneous response'
it { subject.tap { expect_graphql_errors_to_include(/you don't have permission to perform this action/) } }
end
@@ -81,7 +81,7 @@ RSpec.describe 'Deleting a package protection rule', :aggregate_failures, featur
stub_feature_flags(packages_protected_packages: false)
end
- it_behaves_like 'an erroneous reponse'
+ it_behaves_like 'an erroneous response'
it { subject.tap { expect_graphql_errors_to_include(/'packages_protected_packages' feature flag is disabled/) } }
end
diff --git a/spec/requests/api/graphql/mutations/packages/protection/rule/update_spec.rb b/spec/requests/api/graphql/mutations/packages/protection/rule/update_spec.rb
new file mode 100644
index 00000000000..efc919062d6
--- /dev/null
+++ b/spec/requests/api/graphql/mutations/packages/protection/rule/update_spec.rb
@@ -0,0 +1,134 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Updating the packages protection rule', :aggregate_failures, feature_category: :package_registry do
+ include GraphqlHelpers
+
+ let_it_be(:project) { create(:project) }
+ let_it_be_with_reload(:package_protection_rule) do
+ create(:package_protection_rule, project: project, push_protected_up_to_access_level: :developer)
+ end
+
+ let_it_be(:current_user) { create(:user, maintainer_projects: [project]) }
+
+ let(:package_protection_rule_attributes) { build_stubbed(:package_protection_rule, project: project) }
+
+ let(:mutation) do
+ graphql_mutation(:update_packages_protection_rule, input,
+ <<~QUERY
+ packageProtectionRule {
+ packageNamePattern
+ pushProtectedUpToAccessLevel
+ }
+ clientMutationId
+ errors
+ QUERY
+ )
+ end
+
+ let(:input) do
+ {
+ id: package_protection_rule.to_global_id,
+ package_name_pattern: "#{package_protection_rule.package_name_pattern}-updated",
+ push_protected_up_to_access_level: 'MAINTAINER'
+ }
+ end
+
+ let(:mutation_response) { graphql_mutation_response(:update_packages_protection_rule) }
+
+ subject { post_graphql_mutation(mutation, current_user: current_user) }
+
+ shared_examples 'a successful response' do
+ it { subject.tap { expect_graphql_errors_to_be_empty } }
+
+ it 'returns the updated package protection rule' do
+ subject
+
+ expect(mutation_response).to include(
+ 'packageProtectionRule' => {
+ 'packageNamePattern' => input[:package_name_pattern],
+ 'pushProtectedUpToAccessLevel' => input[:push_protected_up_to_access_level]
+ }
+ )
+ end
+
+ it do
+ subject.tap do
+ expect(package_protection_rule.reload).to have_attributes(
+ package_name_pattern: input[:package_name_pattern],
+ push_protected_up_to_access_level: input[:push_protected_up_to_access_level].downcase
+ )
+ end
+ end
+ end
+
+ shared_examples 'an erroneous response' do
+ it { subject.tap { expect(mutation_response).to be_blank } }
+ it { expect { subject }.not_to change { package_protection_rule.reload.updated_at } }
+ end
+
+ it_behaves_like 'a successful response'
+
+ context 'with other existing package protection rule with same package_name_pattern' do
+ let_it_be_with_reload(:other_existing_package_protection_rule) do
+ create(:package_protection_rule, project: project,
+ package_name_pattern: "#{package_protection_rule.package_name_pattern}-other")
+ end
+
+ let(:input) { super().merge(package_name_pattern: other_existing_package_protection_rule.package_name_pattern) }
+
+ it { is_expected.tap { expect_graphql_errors_to_be_empty } }
+
+ it 'returns a blank package protection rule' do
+ is_expected.tap { expect(mutation_response['packageProtectionRule']).to be_blank }
+ end
+
+ it 'includes error message in response' do
+ is_expected.tap { expect(mutation_response['errors']).to eq ['Package name pattern has already been taken'] }
+ end
+ end
+
+ context 'with invalid input param `pushProtectedUpToAccessLevel`' do
+ let(:input) { super().merge(push_protected_up_to_access_level: nil) }
+
+ it_behaves_like 'an erroneous response'
+
+ it { is_expected.tap { expect_graphql_errors_to_include(/pushProtectedUpToAccessLevel can't be blank/) } }
+ end
+
+ context 'with invalid input param `packageNamePattern`' do
+ let(:input) { super().merge(package_name_pattern: '') }
+
+ it_behaves_like 'an erroneous response'
+
+ it { is_expected.tap { expect_graphql_errors_to_include(/packageNamePattern can't be blank/) } }
+ end
+
+ context 'when current_user does not have permission' do
+ let_it_be(:developer) { create(:user).tap { |u| project.add_developer(u) } }
+ let_it_be(:reporter) { create(:user).tap { |u| project.add_reporter(u) } }
+ let_it_be(:guest) { create(:user).tap { |u| project.add_guest(u) } }
+ let_it_be(:anonymous) { create(:user) }
+
+ where(:current_user) do
+ [ref(:developer), ref(:reporter), ref(:guest), ref(:anonymous)]
+ end
+
+ with_them do
+ it { is_expected.tap { expect_graphql_errors_to_include(/you don't have permission to perform this action/) } }
+ end
+ end
+
+ context "when feature flag ':packages_protected_packages' disabled" do
+ before do
+ stub_feature_flags(packages_protected_packages: false)
+ end
+
+ it_behaves_like 'an erroneous response'
+
+ it 'returns error of disabled feature flag' do
+ is_expected.tap { expect_graphql_errors_to_include(/'packages_protected_packages' feature flag is disabled/) }
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/mutations/user_preferences/update_spec.rb b/spec/requests/api/graphql/mutations/user_preferences/update_spec.rb
index 65b8083c74f..b1cd3259eeb 100644
--- a/spec/requests/api/graphql/mutations/user_preferences/update_spec.rb
+++ b/spec/requests/api/graphql/mutations/user_preferences/update_spec.rb
@@ -12,7 +12,8 @@ RSpec.describe Mutations::UserPreferences::Update, feature_category: :user_profi
let(:input) do
{
'issuesSort' => sort_value,
- 'visibilityPipelineIdType' => 'IID'
+ 'visibilityPipelineIdType' => 'IID',
+ 'useWebIdeExtensionMarketplace' => true
}
end
@@ -26,19 +27,26 @@ RSpec.describe Mutations::UserPreferences::Update, feature_category: :user_profi
expect(response).to have_gitlab_http_status(:success)
expect(mutation_response['userPreferences']['issuesSort']).to eq(sort_value)
expect(mutation_response['userPreferences']['visibilityPipelineIdType']).to eq('IID')
+ expect(mutation_response['userPreferences']['useWebIdeExtensionMarketplace']).to eq(true)
expect(current_user.user_preference.persisted?).to eq(true)
expect(current_user.user_preference.issues_sort).to eq(Types::IssueSortEnum.values[sort_value].value.to_s)
expect(current_user.user_preference.visibility_pipeline_id_type).to eq('iid')
+ expect(current_user.user_preference.use_web_ide_extension_marketplace).to eq(true)
end
end
context 'when user has existing preference' do
- before do
- current_user.create_user_preference!(
+ let(:init_user_preference) do
+ {
issues_sort: Types::IssueSortEnum.values['TITLE_DESC'].value,
- visibility_pipeline_id_type: 'id'
- )
+ visibility_pipeline_id_type: 'id',
+ use_web_ide_extension_marketplace: true
+ }
+ end
+
+ before do
+ current_user.create_user_preference!(init_user_preference)
end
it 'updates the existing value' do
@@ -53,5 +61,29 @@ RSpec.describe Mutations::UserPreferences::Update, feature_category: :user_profi
expect(current_user.user_preference.issues_sort).to eq(Types::IssueSortEnum.values[sort_value].value.to_s)
expect(current_user.user_preference.visibility_pipeline_id_type).to eq('iid')
end
+
+ context 'when input has nil attributes' do
+ let(:input) do
+ {
+ 'issuesSort' => nil,
+ 'visibilityPipelineIdType' => nil,
+ 'useWebIdeExtensionMarketplace' => nil
+ }
+ end
+
+ it 'updates only nullable attributes' do
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ current_user.user_preference.reload
+
+ expect(current_user.user_preference).to have_attributes({
+ # These are nullable and are exepcted to change
+ issues_sort: nil,
+ # These should not have changed
+ visibility_pipeline_id_type: init_user_preference[:visibility_pipeline_id_type],
+ use_web_ide_extension_marketplace: init_user_preference[:use_web_ide_extension_marketplace]
+ })
+ end
+ end
end
end
diff --git a/spec/requests/api/graphql/mutations/work_items/delete_task_spec.rb b/spec/requests/api/graphql/mutations/work_items/delete_task_spec.rb
deleted file mode 100644
index b1828de046f..00000000000
--- a/spec/requests/api/graphql/mutations/work_items/delete_task_spec.rb
+++ /dev/null
@@ -1,79 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe "Delete a task in a work item's description", feature_category: :team_planning do
- include GraphqlHelpers
-
- let_it_be(:project) { create(:project) }
- let_it_be(:developer) { create(:user).tap { |user| project.add_developer(user) } }
- let_it_be(:task) { create(:work_item, :task, project: project, author: developer) }
- let_it_be(:work_item, refind: true) do
- create(:work_item, project: project, description: "- [ ] #{task.to_reference}+", lock_version: 3)
- end
-
- before_all do
- create(:issue_link, source_id: work_item.id, target_id: task.id)
- end
-
- let(:lock_version) { work_item.lock_version }
- let(:input) do
- {
- 'id' => work_item.to_global_id.to_s,
- 'lockVersion' => lock_version,
- 'taskData' => {
- 'id' => task.to_global_id.to_s,
- 'lineNumberStart' => 1,
- 'lineNumberEnd' => 1
- }
- }
- end
-
- let(:mutation) { graphql_mutation(:workItemDeleteTask, input) }
- let(:mutation_response) { graphql_mutation_response(:work_item_delete_task) }
-
- context 'the user is not allowed to update a work item' do
- let(:current_user) { create(:user) }
-
- it_behaves_like 'a mutation that returns a top-level access error'
- end
-
- context 'when user can update the description but not delete the task' do
- let(:current_user) { create(:user).tap { |u| project.add_developer(u) } }
-
- it_behaves_like 'a mutation that returns a top-level access error'
- end
-
- context 'when user has permissions to remove a task' do
- let(:current_user) { developer }
-
- it 'removes the task from the work item' do
- expect do
- post_graphql_mutation(mutation, current_user: current_user)
- work_item.reload
- end.to change(WorkItem, :count).by(-1).and(
- change(IssueLink, :count).by(-1)
- ).and(
- change(work_item, :description).from("- [ ] #{task.to_reference}+").to("- [ ] #{task.title}")
- )
-
- expect(response).to have_gitlab_http_status(:success)
- expect(mutation_response['workItem']).to include('id' => work_item.to_global_id.to_s)
- end
-
- context 'when removing the task fails' do
- let(:lock_version) { 2 }
-
- it 'makes no changes to the DB and returns an error message' do
- expect do
- post_graphql_mutation(mutation, current_user: current_user)
- work_item.reload
- end.to not_change(WorkItem, :count).and(
- not_change(work_item, :description)
- )
-
- expect(mutation_response['errors']).to contain_exactly('Stale work item. Check lock version')
- end
- end
- end
-end
diff --git a/spec/requests/api/graphql/organizations/organization_query_spec.rb b/spec/requests/api/graphql/organizations/organization_query_spec.rb
index c243e0613ad..c485e3b170d 100644
--- a/spec/requests/api/graphql/organizations/organization_query_spec.rb
+++ b/spec/requests/api/graphql/organizations/organization_query_spec.rb
@@ -23,7 +23,8 @@ RSpec.describe 'getting organization information', feature_category: :cell do
let_it_be(:organization_user) { create(:organization_user) }
let_it_be(:organization) { organization_user.organization }
let_it_be(:user) { organization_user.user }
- let_it_be(:public_group) { create(:group, name: 'public-group', organization: organization) }
+ let_it_be(:parent_group) { create(:group, name: 'parent-group', organization: organization) }
+ let_it_be(:public_group) { create(:group, name: 'public-group', parent: parent_group, organization: organization) }
let_it_be(:other_group) { create(:group, name: 'other-group', organization: organization) }
let_it_be(:outside_organization_group) { create(:group) }
@@ -74,6 +75,12 @@ RSpec.describe 'getting organization information', feature_category: :cell do
end
end
+ it 'does not return ancestors of authorized groups' do
+ request_organization
+
+ expect(groups.pluck('id')).not_to include(parent_group.to_global_id.to_s)
+ end
+
context 'when requesting organization user' do
let(:organization_fields) do
<<~FIELDS
diff --git a/spec/requests/api/graphql/project/alert_management/integrations_spec.rb b/spec/requests/api/graphql/project/alert_management/integrations_spec.rb
index e48db541e1f..c4d3a217027 100644
--- a/spec/requests/api/graphql/project/alert_management/integrations_spec.rb
+++ b/spec/requests/api/graphql/project/alert_management/integrations_spec.rb
@@ -67,7 +67,7 @@ RSpec.describe 'getting Alert Management Integrations', feature_category: :incid
'name' => 'Prometheus',
'active' => prometheus_integration.manual_configuration?,
'token' => project_alerting_setting.token,
- 'url' => "http://localhost/#{project.full_path}/prometheus/alerts/notify.json",
+ 'url' => "http://#{Gitlab.config.gitlab.host}/#{project.full_path}/prometheus/alerts/notify.json",
'apiUrl' => prometheus_integration.api_url
)
]
diff --git a/spec/requests/api/graphql/project/cluster_agents_spec.rb b/spec/requests/api/graphql/project/cluster_agents_spec.rb
index 181f21001ea..104f4f41cba 100644
--- a/spec/requests/api/graphql/project/cluster_agents_spec.rb
+++ b/spec/requests/api/graphql/project/cluster_agents_spec.rb
@@ -22,7 +22,7 @@ RSpec.describe 'Project.cluster_agents', feature_category: :deployment_managemen
end
before do
- allow(Gitlab::Kas::Client).to receive(:new).and_return(double(get_connected_agents: []))
+ allow(Gitlab::Kas::Client).to receive(:new).and_return(double(get_connected_agents_by_agent_ids: []))
end
it 'can retrieve cluster agents' do
@@ -87,7 +87,7 @@ RSpec.describe 'Project.cluster_agents', feature_category: :deployment_managemen
let(:cluster_agents_fields) { [:id, query_nodes(:connections, [:connection_id, :connected_at, metadata_fields])] }
before do
- allow(Gitlab::Kas::Client).to receive(:new).and_return(double(get_connected_agents: [connected_agent]))
+ allow(Gitlab::Kas::Client).to receive(:new).and_return(double(get_connected_agents_by_agent_ids: [connected_agent]))
end
it 'can retrieve connections and agent metadata' do
diff --git a/spec/requests/api/graphql/project/value_streams_spec.rb b/spec/requests/api/graphql/project/value_streams_spec.rb
new file mode 100644
index 00000000000..01e937c1e47
--- /dev/null
+++ b/spec/requests/api/graphql/project/value_streams_spec.rb
@@ -0,0 +1,105 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Project.value_streams', feature_category: :value_stream_management do
+ include GraphqlHelpers
+
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
+
+ let(:query) do
+ <<~QUERY
+ query($fullPath: ID!) {
+ project(fullPath: $fullPath) {
+ valueStreams {
+ nodes {
+ name
+ stages {
+ name
+ startEventIdentifier
+ endEventIdentifier
+ }
+ }
+ }
+ }
+ }
+ QUERY
+ end
+
+ context 'when user has permissions to read value streams' do
+ let(:expected_value_stream) do
+ {
+ 'project' => {
+ 'valueStreams' => {
+ 'nodes' => [
+ {
+ 'name' => 'default',
+ 'stages' => expected_stages
+ }
+ ]
+ }
+ }
+ }
+ end
+
+ let(:expected_stages) do
+ [
+ {
+ 'name' => 'issue',
+ 'startEventIdentifier' => 'ISSUE_CREATED',
+ 'endEventIdentifier' => 'ISSUE_STAGE_END'
+ },
+ {
+ 'name' => 'plan',
+ 'startEventIdentifier' => 'PLAN_STAGE_START',
+ 'endEventIdentifier' => 'ISSUE_FIRST_MENTIONED_IN_COMMIT'
+ },
+ {
+ 'name' => 'code',
+ 'startEventIdentifier' => 'CODE_STAGE_START',
+ 'endEventIdentifier' => 'MERGE_REQUEST_CREATED'
+ },
+ {
+ 'name' => 'test',
+ 'startEventIdentifier' => 'MERGE_REQUEST_LAST_BUILD_STARTED',
+ 'endEventIdentifier' => 'MERGE_REQUEST_LAST_BUILD_FINISHED'
+ },
+ {
+ 'name' => 'review',
+ 'startEventIdentifier' => 'MERGE_REQUEST_CREATED',
+ 'endEventIdentifier' => 'MERGE_REQUEST_MERGED'
+ },
+ {
+ 'name' => 'staging',
+ 'startEventIdentifier' => 'MERGE_REQUEST_MERGED',
+ 'endEventIdentifier' => 'MERGE_REQUEST_FIRST_DEPLOYED_TO_PRODUCTION'
+ }
+ ]
+ end
+
+ before_all do
+ project.add_guest(user)
+ end
+
+ before do
+ post_graphql(query, current_user: user, variables: { fullPath: project.full_path })
+ end
+
+ it_behaves_like 'a working graphql query'
+
+ it 'returns only `default` value stream' do
+ expect(graphql_data).to eq(expected_value_stream)
+ end
+ end
+
+ context 'when user does not have permission to read value streams' do
+ before do
+ post_graphql(query, current_user: user, variables: { fullPath: project.full_path })
+ end
+
+ it 'returns nil' do
+ expect(graphql_data_at(:project, :valueStreams)).to be_nil
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/project/work_item_state_counts_spec.rb b/spec/requests/api/graphql/project/work_item_state_counts_spec.rb
new file mode 100644
index 00000000000..d13204a36b7
--- /dev/null
+++ b/spec/requests/api/graphql/project/work_item_state_counts_spec.rb
@@ -0,0 +1,123 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'getting Work Item counts by state', feature_category: :portfolio_management do
+ include GraphqlHelpers
+
+ let_it_be(:current_user) { create(:user) }
+ let_it_be(:group) { create(:group, :private) }
+ let_it_be(:project) { create(:project, :repository, :private, group: group) }
+ let_it_be(:work_item_opened1) { create(:work_item, project: project, title: 'Foo') }
+ let_it_be(:work_item_opened2) { create(:work_item, project: project, author: current_user) }
+ let_it_be(:work_item_closed) { create(:work_item, :closed, project: project, description: 'Bar') }
+
+ let(:params) { {} }
+
+ subject(:query_counts) { post_graphql(query, current_user: current_user) }
+
+ context 'with work items count data' do
+ let(:work_item_counts) { graphql_data.dig('project', 'workItemStateCounts') }
+
+ context 'with project permissions' do
+ before_all do
+ group.add_developer(current_user)
+ end
+
+ it_behaves_like 'a working graphql query' do
+ before do
+ query_counts
+ end
+ end
+
+ it 'returns the correct counts for each state' do
+ query_counts
+
+ expect(work_item_counts).to eq(
+ 'all' => 3,
+ 'opened' => 2,
+ 'closed' => 1
+ )
+ end
+
+ context 'when other work items are present in the group' do
+ it 'only returns counts for work items in the current project' do
+ other_project = create(:project, :repository, group: group)
+ create(:work_item, project: other_project)
+ query_counts
+
+ expect(work_item_counts).to eq(
+ 'all' => 3,
+ 'opened' => 2,
+ 'closed' => 1
+ )
+ end
+ end
+
+ context 'when filters are provided' do
+ context 'when filtering by author username' do
+ let(:params) { { 'authorUsername' => current_user.username } }
+
+ it 'returns the correct counts for each status' do
+ query_counts
+
+ expect(work_item_counts).to eq(
+ 'all' => 1,
+ 'opened' => 1,
+ 'closed' => 0
+ )
+ end
+ end
+
+ context 'when searching in title' do
+ let(:params) { { search: 'Foo', in: [:TITLE] } }
+
+ it 'returns the correct counts for each status' do
+ query_counts
+
+ expect(work_item_counts).to eq(
+ 'all' => 1,
+ 'opened' => 1,
+ 'closed' => 0
+ )
+ end
+ end
+
+ context 'when searching in description' do
+ let(:params) { { search: 'Bar', in: [:DESCRIPTION] } }
+
+ it 'returns the correct counts for each status' do
+ query_counts
+
+ expect(work_item_counts).to eq(
+ 'all' => 1,
+ 'opened' => 0,
+ 'closed' => 1
+ )
+ end
+ end
+ end
+ end
+
+ context 'without project permissions' do
+ it 'does not return work item counts' do
+ query_counts
+
+ expect_graphql_errors_to_be_empty
+ expect(work_item_counts).to be_nil
+ end
+ end
+ end
+
+ def query(args: params)
+ fields = <<~QUERY
+ #{all_graphql_fields_for('WorkItemStateCountsType'.classify)}
+ QUERY
+
+ graphql_query_for(
+ 'project',
+ { 'fullPath' => project.full_path },
+ query_graphql_field('workItemStateCounts', args, fields)
+ )
+ end
+end
diff --git a/spec/requests/api/graphql/project/work_item_types_spec.rb b/spec/requests/api/graphql/project/work_item_types_spec.rb
index c31a260c4b8..086db983760 100644
--- a/spec/requests/api/graphql/project/work_item_types_spec.rb
+++ b/spec/requests/api/graphql/project/work_item_types_spec.rb
@@ -5,56 +5,19 @@ require 'spec_helper'
RSpec.describe 'getting a list of work item types for a project', feature_category: :team_planning do
include GraphqlHelpers
- let_it_be(:developer) { create(:user) }
let_it_be(:project) { create(:project) }
+ let_it_be(:developer) { create(:user).tap { |u| project.add_developer(u) } }
- before_all do
- project.add_developer(developer)
- end
-
- let(:current_user) { developer }
-
- let(:fields) do
- <<~GRAPHQL
- workItemTypes{
- nodes { id name iconName }
- }
- GRAPHQL
- end
-
- let(:query) do
- graphql_query_for(
- 'project',
- { 'fullPath' => project.full_path },
- fields
- )
- end
-
- context 'when user has access to the project' do
- before do
- post_graphql(query, current_user: current_user)
- end
+ it_behaves_like 'graphql work item type list request spec' do
+ let(:current_user) { developer }
+ let(:parent_key) { :project }
- it_behaves_like 'a working graphql query'
-
- it 'returns all default work item types' do
- expect(graphql_data.dig('project', 'workItemTypes', 'nodes')).to match_array(
- WorkItems::Type.default.map do |type|
- hash_including('id' => type.to_global_id.to_s, 'name' => type.name, 'iconName' => type.icon_name)
- end
+ let(:query) do
+ graphql_query_for(
+ 'project',
+ { 'fullPath' => project.full_path },
+ query_nodes('WorkItemTypes', work_item_type_fields)
)
end
end
-
- context "when user doesn't have access to the project" do
- let(:current_user) { create(:user) }
-
- before do
- post_graphql(query, current_user: current_user)
- end
-
- it 'does not return the project' do
- expect(graphql_data).to eq('project' => nil)
- end
- end
end
diff --git a/spec/requests/api/graphql/work_item_spec.rb b/spec/requests/api/graphql/work_item_spec.rb
index 36a27abd982..fe77b7ae736 100644
--- a/spec/requests/api/graphql/work_item_spec.rb
+++ b/spec/requests/api/graphql/work_item_spec.rb
@@ -104,6 +104,18 @@ RSpec.describe 'Query.work_item(id)', feature_category: :team_planning do
end
end
+ context 'when querying work item type information' do
+ include_context 'with work item types request context'
+
+ let(:work_item_fields) { "workItemType { #{work_item_type_fields} }" }
+
+ it 'returns work item type information' do
+ expect(work_item_data['workItemType']).to match(
+ expected_work_item_type_response(work_item.work_item_type).first
+ )
+ end
+ end
+
context 'when querying widgets' do
describe 'description widget' do
let(:work_item_fields) do
diff --git a/spec/requests/api/graphql/work_items_by_reference_spec.rb b/spec/requests/api/graphql/work_items_by_reference_spec.rb
new file mode 100644
index 00000000000..ad2303a81e7
--- /dev/null
+++ b/spec/requests/api/graphql/work_items_by_reference_spec.rb
@@ -0,0 +1,130 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'find work items by reference', feature_category: :portfolio_management do
+ include GraphqlHelpers
+
+ let_it_be(:current_user) { create(:user) }
+ let_it_be(:project) { create(:project, :repository, :public) }
+ let_it_be(:group2) { create(:group, :public) }
+ let_it_be(:project2) { create(:project, :repository, :public, group: group2) }
+ let_it_be(:private_project2) { create(:project, :repository, :private, group: group2) }
+ let_it_be(:work_item) { create(:work_item, :task, project: project2) }
+ let_it_be(:private_work_item) { create(:work_item, :task, project: private_project2) }
+
+ let(:references) { [work_item.to_reference(full: true), private_work_item.to_reference(full: true)] }
+
+ shared_examples 'response with matching work items' do
+ it 'returns accessible work item' do
+ post_graphql(query, current_user: current_user)
+
+ expected_items = items.map { |item| a_graphql_entity_for(item) }
+ expect(graphql_data_at('workItemsByReference', 'nodes')).to match(expected_items)
+ end
+ end
+
+ context 'when user has access only to public work items' do
+ it_behaves_like 'a working graphql query that returns data' do
+ before do
+ post_graphql(query, current_user: current_user)
+ end
+ end
+
+ it_behaves_like 'response with matching work items' do
+ let(:items) { [work_item] }
+ end
+
+ it 'avoids N+1 queries', :use_sql_query_cache do
+ post_graphql(query, current_user: current_user) # warm up
+
+ control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do
+ post_graphql(query, current_user: current_user)
+ end
+ expect(graphql_data_at('workItemsByReference', 'nodes').size).to eq(1)
+
+ extra_work_items = create_list(:work_item, 2, :task, project: project2)
+ refs = references + extra_work_items.map { |item| item.to_reference(full: true) }
+
+ expect do
+ post_graphql(query(refs: refs), current_user: current_user)
+ end.not_to exceed_all_query_limit(control_count)
+ expect(graphql_data_at('workItemsByReference', 'nodes').size).to eq(3)
+ end
+ end
+
+ context 'when user has access to work items in private project' do
+ before_all do
+ private_project2.add_guest(current_user)
+ end
+
+ it_behaves_like 'response with matching work items' do
+ let(:items) { [private_work_item, work_item] }
+ end
+ end
+
+ context 'when refs includes links' do
+ let_it_be(:work_item_with_url) { create(:work_item, :task, project: project2) }
+ let(:references) { [work_item.to_reference(full: true), Gitlab::UrlBuilder.build(work_item_with_url)] }
+
+ it_behaves_like 'response with matching work items' do
+ let(:items) { [work_item_with_url, work_item] }
+ end
+ end
+
+ context 'when refs includes a short reference present in the context project' do
+ let_it_be(:same_project_work_item) { create(:work_item, :task, project: project) }
+ let(:references) { ["##{same_project_work_item.iid}"] }
+
+ it_behaves_like 'response with matching work items' do
+ let(:items) { [same_project_work_item] }
+ end
+ end
+
+ context 'when user cannot access context namespace' do
+ it 'returns error' do
+ post_graphql(query(namespace_path: private_project2.full_path), current_user: current_user)
+
+ expect(graphql_data_at('workItemsByReference')).to be_nil
+ expect(graphql_errors).to contain_exactly(a_hash_including(
+ 'message' => a_string_including("you don't have permission to perform this action"),
+ 'path' => %w[workItemsByReference]
+ ))
+ end
+ end
+
+ context 'when the context is a group' do
+ it 'returns empty result' do
+ group2.add_guest(current_user)
+ post_graphql(query(namespace_path: group2.full_path), current_user: current_user)
+
+ expect_graphql_errors_to_be_empty
+ expect(graphql_data_at('workItemsByReference', 'nodes')).to be_empty
+ end
+ end
+
+ context 'when there are more than the max allowed references' do
+ let(:references_limit) { ::Resolvers::WorkItemReferencesResolver::REFERENCES_LIMIT }
+ let(:references) { (0..references_limit).map { |n| "##{n}" } }
+ let(:error_msg) do
+ "Number of references exceeds the limit. " \
+ "Please provide no more than #{references_limit} references at the same time."
+ end
+
+ it 'returns an error message' do
+ post_graphql(query, current_user: current_user)
+
+ expect_graphql_errors_to_include(error_msg)
+ end
+ end
+
+ def query(namespace_path: project.full_path, refs: references)
+ fields = <<~GRAPHQL
+ nodes {
+ #{all_graphql_fields_for('WorkItem', max_depth: 2)}
+ }
+ GRAPHQL
+
+ graphql_query_for('workItemsByReference', { contextNamespacePath: namespace_path, refs: refs }, fields)
+ end
+end
diff --git a/spec/requests/api/group_export_spec.rb b/spec/requests/api/group_export_spec.rb
index d0f7c000544..c48ade1cb8b 100644
--- a/spec/requests/api/group_export_spec.rb
+++ b/spec/requests/api/group_export_spec.rb
@@ -311,6 +311,8 @@ RSpec.describe API::GroupExport, feature_category: :importers do
expect(response).to have_gitlab_http_status(:ok)
expect(json_response.pluck('relation')).to contain_exactly('labels', 'milestones', 'badges')
expect(json_response.pluck('status')).to contain_exactly(-1, 0, 1)
+ expect(json_response.pluck('batched')).to all(eq(false))
+ expect(json_response.pluck('batches_count')).to all(eq(0))
end
context 'when relation is specified' do
@@ -322,6 +324,36 @@ RSpec.describe API::GroupExport, feature_category: :importers do
expect(json_response['status']).to eq(0)
end
end
+
+ context 'when there is a batched export' do
+ let_it_be(:batched_export) do
+ create(:bulk_import_export, :started, :batched, group: group, relation: 'boards', batches_count: 1)
+ end
+
+ let_it_be(:batch) { create(:bulk_import_export_batch, objects_count: 5, export: batched_export) }
+
+ it 'returns a list of batched relation export statuses' do
+ get api(status_path, user)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to include(
+ hash_including(
+ 'relation' => batched_export.relation,
+ 'batched' => true,
+ 'batches_count' => 1,
+ 'batches' => contain_exactly(
+ {
+ 'batch_number' => 1,
+ 'error' => nil,
+ 'objects_count' => batch.objects_count,
+ 'status' => batch.status,
+ 'updated_at' => batch.updated_at.as_json
+ }
+ )
+ )
+ )
+ end
+ end
end
context 'when bulk import is disabled' do
diff --git a/spec/requests/api/group_milestones_spec.rb b/spec/requests/api/group_milestones_spec.rb
index 2f05b0fcf21..82a4311f7d0 100644
--- a/spec/requests/api/group_milestones_spec.rb
+++ b/spec/requests/api/group_milestones_spec.rb
@@ -30,75 +30,103 @@ RSpec.describe API::GroupMilestones, feature_category: :team_planning do
it_behaves_like 'group and project milestones', "/groups/:id/milestones"
describe 'GET /groups/:id/milestones' do
- let_it_be(:ancestor_group) { create(:group, :private) }
- let_it_be(:ancestor_group_milestone) { create(:milestone, group: ancestor_group, updated_at: 2.days.ago) }
+ context 'for REST only' do
+ let_it_be(:ancestor_group) { create(:group, :private) }
+ let_it_be(:ancestor_group_milestone) { create(:milestone, group: ancestor_group, updated_at: 2.days.ago) }
- before_all do
- group.update!(parent: ancestor_group)
- end
+ before_all do
+ group.update!(parent: ancestor_group)
+ end
- context 'when include_parent_milestones is true' do
- let(:params) { { include_parent_milestones: true } }
+ context 'when include_ancestors is true' do
+ let(:params) { { include_ancestors: true } }
- context 'when user has access to ancestor groups' do
- let(:milestones) { [ancestor_group_milestone, milestone, closed_milestone] }
+ context 'when user has access to ancestor groups' do
+ let(:milestones) { [ancestor_group_milestone, milestone, closed_milestone] }
- before do
- ancestor_group.add_guest(user)
- group.add_guest(user)
- end
+ before do
+ ancestor_group.add_guest(user)
+ group.add_guest(user)
+ end
- it_behaves_like 'listing all milestones'
+ it_behaves_like 'listing all milestones'
- context 'when iids param is present' do
- let(:params) { { include_parent_milestones: true, iids: [milestone.iid] } }
+ context 'when deprecated include_parent_milestones is true' do
+ let(:params) { { include_parent_milestones: true } }
- it_behaves_like 'listing all milestones'
- end
+ it_behaves_like 'listing all milestones'
+ end
- context 'when updated_before param is present' do
- let(:params) { { updated_before: 1.day.ago.iso8601, include_parent_milestones: true } }
+ context 'when both include_parent_milestones and include_ancestors are specified' do
+ let(:params) { { include_ancestors: true, include_parent_milestones: true } }
- it_behaves_like 'listing all milestones' do
- let(:milestones) { [ancestor_group_milestone, milestone] }
+ it 'returns 400' do
+ get api(route, user), params: params
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+ end
+
+ context 'when iids param is present' do
+ let(:params) { { include_ancestors: true, iids: [milestone.iid] } }
+
+ it_behaves_like 'listing all milestones'
+ end
+
+ context 'when updated_before param is present' do
+ let(:params) { { updated_before: 1.day.ago.iso8601, include_ancestors: true } }
+
+ it_behaves_like 'listing all milestones' do
+ let(:milestones) { [ancestor_group_milestone, milestone] }
+ end
+ end
+
+ context 'when updated_after param is present' do
+ let(:params) { { updated_after: 1.day.ago.iso8601, include_ancestors: true } }
+
+ it_behaves_like 'listing all milestones' do
+ let(:milestones) { [closed_milestone] }
+ end
end
end
- context 'when updated_after param is present' do
- let(:params) { { updated_after: 1.day.ago.iso8601, include_parent_milestones: true } }
+ context 'when user has no access to ancestor groups' do
+ let(:user) { create(:user) }
+
+ before do
+ group.add_guest(user)
+ end
it_behaves_like 'listing all milestones' do
- let(:milestones) { [closed_milestone] }
+ let(:milestones) { [milestone, closed_milestone] }
end
end
end
- context 'when user has no access to ancestor groups' do
- let(:user) { create(:user) }
-
- before do
- group.add_guest(user)
- end
+ context 'when updated_before param is present' do
+ let(:params) { { updated_before: 1.day.ago.iso8601 } }
it_behaves_like 'listing all milestones' do
- let(:milestones) { [milestone, closed_milestone] }
+ let(:milestones) { [milestone] }
end
end
- end
- context 'when updated_before param is present' do
- let(:params) { { updated_before: 1.day.ago.iso8601 } }
+ context 'when updated_after param is present' do
+ let(:params) { { updated_after: 1.day.ago.iso8601 } }
- it_behaves_like 'listing all milestones' do
- let(:milestones) { [milestone] }
+ it_behaves_like 'listing all milestones' do
+ let(:milestones) { [closed_milestone] }
+ end
end
end
- context 'when updated_after param is present' do
- let(:params) { { updated_after: 1.day.ago.iso8601 } }
+ context 'for common GraphQL/REST' do
+ it_behaves_like 'group milestones including ancestors and descendants'
+
+ def query_group_milestone_ids(params)
+ get api(route, current_user), params: params
- it_behaves_like 'listing all milestones' do
- let(:milestones) { [closed_milestone] }
+ json_response.pluck('id')
end
end
end
diff --git a/spec/requests/api/import_github_spec.rb b/spec/requests/api/import_github_spec.rb
index 9a42b11dc76..f555f39ff74 100644
--- a/spec/requests/api/import_github_spec.rb
+++ b/spec/requests/api/import_github_spec.rb
@@ -5,8 +5,7 @@ require 'spec_helper'
RSpec.describe API::ImportGithub, feature_category: :importers do
let(:token) { "asdasd12345" }
let(:provider) { :github }
- let(:access_params) { { github_access_token: token, additional_access_tokens: additional_access_tokens } }
- let(:additional_access_tokens) { nil }
+ let(:access_params) { { github_access_token: token } }
let(:provider_username) { user.username }
let(:provider_user) { double('provider', login: provider_username).as_null_object }
let(:provider_repo) do
@@ -134,28 +133,6 @@ RSpec.describe API::ImportGithub, feature_category: :importers do
expect(response).to have_gitlab_http_status(:bad_request)
end
end
-
- context 'when additional access tokens are provided' do
- let(:additional_access_tokens) { 'token1,token2' }
-
- it 'returns 201' do
- expected_access_params = { github_access_token: token, additional_access_tokens: %w[token1 token2] }
-
- expect(Gitlab::LegacyGithubImport::ProjectCreator)
- .to receive(:new)
- .with(provider_repo, provider_repo[:name], user.namespace, user, type: provider, **expected_access_params)
- .and_return(double(execute: project))
-
- post api("/import/github", user), params: {
- target_namespace: user.namespace_path,
- personal_access_token: token,
- repo_id: non_existing_record_id,
- additional_access_tokens: 'token1,token2'
- }
-
- expect(response).to have_gitlab_http_status(:created)
- end
- end
end
describe "POST /import/github/cancel" do
diff --git a/spec/requests/api/integrations_spec.rb b/spec/requests/api/integrations_spec.rb
index d8ac9d5abf7..4696be07045 100644
--- a/spec/requests/api/integrations_spec.rb
+++ b/spec/requests/api/integrations_spec.rb
@@ -46,10 +46,9 @@ RSpec.describe API::Integrations, feature_category: :integrations do
where(:integration) do
# The Project Integrations API supports all integrations except:
# - The GitLab Slack Application integration, as it must be installed via the UI.
- # - Shimo and ZenTao integrations, as new integrations are blocked from being created.
+ # - ZenTao integration, as new integration is blocked from being created.
unavailable_integration_names = [
Integrations::GitlabSlackApplication.to_param,
- Integrations::Shimo.to_param,
Integrations::Zentao.to_param
]
diff --git a/spec/requests/api/internal/kubernetes_spec.rb b/spec/requests/api/internal/kubernetes_spec.rb
index 551ed0babf1..5ef041881b9 100644
--- a/spec/requests/api/internal/kubernetes_spec.rb
+++ b/spec/requests/api/internal/kubernetes_spec.rb
@@ -27,18 +27,6 @@ RSpec.describe API::Internal::Kubernetes, feature_category: :deployment_manageme
expect(response).to have_gitlab_http_status(:unauthorized)
end
end
-
- context 'kubernetes_agent_internal_api feature flag disabled' do
- before do
- stub_feature_flags(kubernetes_agent_internal_api: false)
- end
-
- it 'returns 404' do
- send_request
-
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
end
shared_examples 'agent authentication' do
@@ -134,15 +122,17 @@ RSpec.describe API::Internal::Kubernetes, feature_category: :deployment_manageme
k8s_api_proxy_requests_via_user_access: 44,
k8s_api_proxy_requests_via_pat_access: 45
}
+ users = create_list(:user, 3)
+ user_ids = users.map(&:id) << users[0].id
unique_counters = {
- agent_users_using_ci_tunnel: [10, 999, 777, 10],
- k8s_api_proxy_requests_unique_users_via_ci_access: [10, 999, 777, 10],
- k8s_api_proxy_requests_unique_agents_via_ci_access: [10, 999, 777, 10],
- k8s_api_proxy_requests_unique_users_via_user_access: [10, 999, 777, 10],
- k8s_api_proxy_requests_unique_agents_via_user_access: [10, 999, 777, 10],
- k8s_api_proxy_requests_unique_users_via_pat_access: [10, 999, 777, 10],
- k8s_api_proxy_requests_unique_agents_via_pat_access: [10, 999, 777, 10],
- flux_git_push_notified_unique_projects: [10, 999, 777, 10]
+ agent_users_using_ci_tunnel: user_ids,
+ k8s_api_proxy_requests_unique_users_via_ci_access: user_ids,
+ k8s_api_proxy_requests_unique_agents_via_ci_access: user_ids,
+ k8s_api_proxy_requests_unique_users_via_user_access: user_ids,
+ k8s_api_proxy_requests_unique_agents_via_user_access: user_ids,
+ k8s_api_proxy_requests_unique_users_via_pat_access: user_ids,
+ k8s_api_proxy_requests_unique_agents_via_pat_access: user_ids,
+ flux_git_push_notified_unique_projects: user_ids
}
expected_counters = {
kubernetes_agent_gitops_sync: request_count * counters[:gitops_sync],
@@ -172,6 +162,87 @@ RSpec.describe API::Internal::Kubernetes, feature_category: :deployment_manageme
end
end
+ describe 'POST /internal/kubernetes/agent_events', :clean_gitlab_redis_shared_state do
+ def send_request(headers: {}, params: {})
+ post api('/internal/kubernetes/agent_events'), params: params, headers: headers.reverse_merge(jwt_auth_headers)
+ end
+
+ include_examples 'authorization'
+ include_examples 'error handling'
+
+ context 'is authenticated for an agent' do
+ let!(:agent_token) { create(:cluster_agent_token) }
+
+ context 'when events are valid' do
+ let(:request_count) { 2 }
+ let(:users) { create_list(:user, 3).index_by(&:id) }
+ let(:projects) { create_list(:project, 3).index_by(&:id) }
+ let(:events) do
+ user_ids = users.keys
+ project_ids = projects.keys
+ event_data = Array.new(3) do |i|
+ { user_id: user_ids[i], project_id: project_ids[i] }
+ end
+ {
+ k8s_api_proxy_requests_unique_users_via_ci_access: event_data,
+ k8s_api_proxy_requests_unique_users_via_user_access: event_data,
+ k8s_api_proxy_requests_unique_users_via_pat_access: event_data
+ }
+ end
+
+ it 'tracks events and returns no_content', :aggregate_failures do
+ events.each do |event_name, event_list|
+ event_list.each do |event|
+ expect(Gitlab::InternalEvents).to receive(:track_event)
+ .with(event_name.to_s, user: users[event[:user_id]], project: projects[event[:project_id]])
+ .exactly(request_count).times
+ end
+ end
+
+ request_count.times do
+ send_request(params: { events: events })
+ end
+
+ expect(response).to have_gitlab_http_status(:no_content)
+ end
+ end
+
+ context 'when events are empty' do
+ let(:events) do
+ {
+ k8s_api_proxy_requests_unique_users_via_ci_access: [],
+ k8s_api_proxy_requests_unique_users_via_user_access: [],
+ k8s_api_proxy_requests_unique_users_via_pat_access: []
+ }
+ end
+
+ it 'returns no_content for empty events' do
+ expect(Gitlab::InternalEvents).not_to receive(:track_event)
+ send_request(params: { events: events })
+
+ expect(response).to have_gitlab_http_status(:no_content)
+ end
+ end
+
+ context 'when events have non-integer values' do
+ let(:events) do
+ {
+ k8s_api_proxy_requests_unique_users_via_ci_access: [
+ { user_id: 'string', project_id: 111 }
+ ]
+ }
+ end
+
+ it 'returns 400 for non-integer values' do
+ expect(Gitlab::InternalEvents).not_to receive(:track_event)
+ send_request(params: { events: events })
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+ end
+ end
+ end
+
describe 'POST /internal/kubernetes/agent_configuration' do
def send_request(headers: {}, params: {})
post api('/internal/kubernetes/agent_configuration'), params: params, headers: headers.reverse_merge(jwt_auth_headers)
@@ -254,8 +325,7 @@ RSpec.describe API::Internal::Kubernetes, feature_category: :deployment_manageme
'agent_name' => agent.name,
'gitaly_info' => a_hash_including(
'address' => match(/\.socket$/),
- 'token' => 'secret',
- 'features' => Feature::Gitaly.server_feature_flags
+ 'token' => 'secret'
),
'gitaly_repository' => a_hash_including(
'storage_name' => project.repository_storage,
@@ -297,8 +367,7 @@ RSpec.describe API::Internal::Kubernetes, feature_category: :deployment_manageme
'project_id' => project.id,
'gitaly_info' => a_hash_including(
'address' => match(/\.socket$/),
- 'token' => 'secret',
- 'features' => Feature::Gitaly.server_feature_flags
+ 'token' => 'secret'
),
'gitaly_repository' => a_hash_including(
'storage_name' => project.repository_storage,
diff --git a/spec/requests/api/issues/issues_spec.rb b/spec/requests/api/issues/issues_spec.rb
index ed71089c5a9..44d5d61ffd2 100644
--- a/spec/requests/api/issues/issues_spec.rb
+++ b/spec/requests/api/issues/issues_spec.rb
@@ -20,7 +20,7 @@ RSpec.describe API::Issues, feature_category: :team_planning do
let_it_be(:milestone) { create(:milestone, title: '1.0.0', project: project) }
let_it_be(:empty_milestone) { create(:milestone, title: '2.0.0', project: project) }
- let_it_be(:task) { create(:issue, :task, author: user, project: project) }
+ let_it_be(:objective) { create(:issue, :objective, author: user, project: project) }
let_it_be(:closed_issue) do
create :closed_issue,
diff --git a/spec/requests/api/maven_packages_spec.rb b/spec/requests/api/maven_packages_spec.rb
index 578a4821b5e..2110e4a077d 100644
--- a/spec/requests/api/maven_packages_spec.rb
+++ b/spec/requests/api/maven_packages_spec.rb
@@ -43,7 +43,7 @@ RSpec.describe API::MavenPackages, feature_category: :package_registry do
project.add_developer(user)
end
- shared_examples 'handling groups and subgroups for' do |shared_example_name, visibilities: { public: :redirect }|
+ shared_examples 'handling groups and subgroups for' do |shared_example_name, shared_example_args = {}, visibilities: { public: :redirect }|
context 'within a group' do
visibilities.each do |visibility, not_found_response|
context "that is #{visibility}" do
@@ -51,7 +51,7 @@ RSpec.describe API::MavenPackages, feature_category: :package_registry do
group.update!(visibility_level: Gitlab::VisibilityLevel.level_value(visibility.to_s))
end
- it_behaves_like shared_example_name, not_found_response
+ it_behaves_like shared_example_name, not_found_response, shared_example_args
end
end
end
@@ -70,7 +70,7 @@ RSpec.describe API::MavenPackages, feature_category: :package_registry do
group.update!(visibility_level: Gitlab::VisibilityLevel.level_value(visibility.to_s))
end
- it_behaves_like shared_example_name, not_found_response
+ it_behaves_like shared_example_name, not_found_response, shared_example_args
end
end
end
@@ -621,7 +621,15 @@ RSpec.describe API::MavenPackages, feature_category: :package_registry do
it_behaves_like 'rejecting request with invalid params'
- it_behaves_like 'handling groups and subgroups for', 'getting a file for a group', visibilities: { internal: :unauthorized, public: :redirect }
+ it_behaves_like 'handling groups and subgroups for', 'getting a file for a group', visibilities: { internal: :unauthorized, public: :unauthorized }
+
+ context 'when the FF maven_remove_permissions_check_from_finder disabled' do
+ before do
+ stub_feature_flags(maven_remove_permissions_check_from_finder: false)
+ end
+
+ it_behaves_like 'handling groups and subgroups for', 'getting a file for a group', visibilities: { internal: :unauthorized, public: :redirect }
+ end
end
context 'private project' do
@@ -631,7 +639,7 @@ RSpec.describe API::MavenPackages, feature_category: :package_registry do
subject { download_file_with_token(file_name: package_file.file_name) }
- shared_examples 'getting a file for a group' do |not_found_response|
+ shared_examples 'getting a file for a group' do |not_found_response, download_denied_status: :forbidden|
it_behaves_like 'tracking the file download event'
it_behaves_like 'bumping the package last downloaded at field'
it_behaves_like 'successfully returning the file'
@@ -641,7 +649,7 @@ RSpec.describe API::MavenPackages, feature_category: :package_registry do
subject
- expect(response).to have_gitlab_http_status(:redirect)
+ expect(response).to have_gitlab_http_status(download_denied_status)
end
it 'denies download when no private token' do
@@ -682,7 +690,43 @@ RSpec.describe API::MavenPackages, feature_category: :package_registry do
end
end
- it_behaves_like 'handling groups and subgroups for', 'getting a file for a group', visibilities: { private: :unauthorized, internal: :unauthorized, public: :redirect }
+ context 'with the duplicate packages in the two projects' do
+ let_it_be(:recent_project) { create(:project, :private, namespace: group) }
+
+ let!(:package_dup) { create(:maven_package, project: recent_project, name: package.name, version: package.version) }
+
+ before do
+ group.add_guest(user)
+ project.add_developer(user)
+ end
+
+ context 'when user does not have enough permission for the recent project' do
+ it 'tries to download the recent package' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+
+ context 'when the FF maven_remove_permissions_check_from_finder disabled' do
+ before do
+ stub_feature_flags(maven_remove_permissions_check_from_finder: false)
+ end
+
+ it_behaves_like 'bumping the package last downloaded at field'
+ it_behaves_like 'successfully returning the file'
+ end
+ end
+
+ it_behaves_like 'handling groups and subgroups for', 'getting a file for a group', visibilities: { private: :unauthorized, internal: :unauthorized, public: :unauthorized }
+
+ context 'when the FF maven_remove_permissions_check_from_finder disabled' do
+ before do
+ stub_feature_flags(maven_remove_permissions_check_from_finder: false)
+ end
+
+ it_behaves_like 'handling groups and subgroups for', 'getting a file for a group', { download_denied_status: :redirect }, visibilities: { private: :unauthorized, internal: :unauthorized, public: :redirect }
+ end
context 'with a reporter from a subgroup accessing the root group' do
let_it_be(:root_group) { create(:group, :private) }
diff --git a/spec/requests/api/ml/mlflow/model_versions_spec.rb b/spec/requests/api/ml/mlflow/model_versions_spec.rb
index f59888ec70f..e62bccf1507 100644
--- a/spec/requests/api/ml/mlflow/model_versions_spec.rb
+++ b/spec/requests/api/ml/mlflow/model_versions_spec.rb
@@ -35,9 +35,9 @@ RSpec.describe API::Ml::Mlflow::ModelVersions, feature_category: :mlops do
response
end
- describe 'GET /projects/:id/ml/mlflow/api/2.0/mlflow/model_versions/get' do
+ describe 'GET /projects/:id/ml/mlflow/api/2.0/mlflow/model-versions/get' do
let(:route) do
- "/projects/#{project_id}/ml/mlflow/api/2.0/mlflow/model_versions/get?name=#{name}&version=#{version}"
+ "/projects/#{project_id}/ml/mlflow/api/2.0/mlflow/model-versions/get?name=#{name}&version=#{version}"
end
it 'returns the model version', :aggregate_failures do
@@ -51,7 +51,7 @@ RSpec.describe API::Ml::Mlflow::ModelVersions, feature_category: :mlops do
context 'when has access' do
context 'and model name in incorrect' do
let(:route) do
- "/projects/#{project_id}/ml/mlflow/api/2.0/mlflow/model_versions/get?name=--&version=#{version}"
+ "/projects/#{project_id}/ml/mlflow/api/2.0/mlflow/model-versions/get?name=--&version=#{version}"
end
it_behaves_like 'MLflow|Not Found - Resource Does Not Exist'
@@ -59,7 +59,7 @@ RSpec.describe API::Ml::Mlflow::ModelVersions, feature_category: :mlops do
context 'and version in incorrect' do
let(:route) do
- "/projects/#{project_id}/ml/mlflow/api/2.0/mlflow/model_versions/get?name=#{name}&version=--"
+ "/projects/#{project_id}/ml/mlflow/api/2.0/mlflow/model-versions/get?name=#{name}&version=--"
end
it_behaves_like 'MLflow|Not Found - Resource Does Not Exist'
@@ -79,8 +79,95 @@ RSpec.describe API::Ml::Mlflow::ModelVersions, feature_category: :mlops do
end
end
- it_behaves_like 'MLflow|shared model registry error cases'
- it_behaves_like 'MLflow|Requires read_api scope'
+ it_behaves_like 'MLflow|an authenticated resource'
+ it_behaves_like 'MLflow|a read-only model registry resource'
+ end
+ end
+
+ describe 'UPDATE /projects/:id/ml/mlflow/api/2.0/mlflow/model-versions/update' do
+ let(:params) { { name: name, version: version, description: 'description-text' } }
+ let(:request) { patch api(route), params: params, headers: headers }
+
+ let(:route) do
+ "/projects/#{project_id}/ml/mlflow/api/2.0/mlflow/model-versions/update"
+ end
+
+ it 'returns the model version', :aggregate_failures do
+ is_expected.to have_gitlab_http_status(:ok)
+ expect(json_response['model_version']).not_to be_nil
+ expect(json_response['model_version']['name']).to eq(name)
+ expect(json_response['model_version']['version']).to eq(version)
+ end
+
+ describe 'Error States' do
+ context 'when has access' do
+ context 'and model name in incorrect' do
+ let(:params) { { name: 'invalid-name', version: version, description: 'description-text' } }
+
+ it 'throws error 400' do
+ is_expected.to have_gitlab_http_status(:bad_request)
+ end
+ end
+
+ context 'and version in incorrect' do
+ let(:params) { { name: name, version: 'invalid-version', description: 'description-text' } }
+
+ it 'throws error 400' do
+ is_expected.to have_gitlab_http_status(:bad_request)
+ end
+ end
+
+ context 'when user lacks write_model_registry rights' do
+ before do
+ allow(Ability).to receive(:allowed?).and_call_original
+ allow(Ability).to receive(:allowed?)
+ .with(current_user, :write_model_registry, project)
+ .and_return(false)
+ end
+
+ it "is Not Found" do
+ is_expected.to have_gitlab_http_status(:unauthorized)
+ end
+ end
+ end
+
+ it_behaves_like 'MLflow|an authenticated resource'
+ it_behaves_like 'MLflow|a read/write model registry resource'
+ end
+ end
+
+ describe 'POST /projects/:id/ml/mlflow/api/2.0/mlflow/model_versions/create' do
+ let(:model_name) { model.name }
+ let(:route) do
+ "/projects/#{project_id}/ml/mlflow/api/2.0/mlflow/model-versions/create"
+ end
+
+ let(:params) { { name: model_name, description: 'description-text' } }
+ let(:request) { post api(route), params: params, headers: headers }
+
+ it 'returns the model', :aggregate_failures do
+ is_expected.to have_gitlab_http_status(:ok)
+ is_expected.to match_response_schema('ml/get_model_version')
+ end
+
+ it 'increments the version if a model version already exists' do
+ create(:ml_model_versions, model: model, version: '1.0.0')
+
+ is_expected.to have_gitlab_http_status(:ok)
+ expect(json_response["model_version"]["version"]).to eq('2.0.0')
+ end
+
+ describe 'Error States' do
+ context 'when has access' do
+ context 'and model does not exist' do
+ let(:model_name) { 'foo' }
+
+ it_behaves_like 'MLflow|Not Found - Resource Does Not Exist'
+ end
+ end
+
+ it_behaves_like 'MLflow|an authenticated resource'
+ it_behaves_like 'MLflow|a read/write model registry resource'
end
end
end
diff --git a/spec/requests/api/ml/mlflow/registered_models_spec.rb b/spec/requests/api/ml/mlflow/registered_models_spec.rb
index cd8b0a53ef3..09cf765b0b3 100644
--- a/spec/requests/api/ml/mlflow/registered_models_spec.rb
+++ b/spec/requests/api/ml/mlflow/registered_models_spec.rb
@@ -56,8 +56,8 @@ RSpec.describe API::Ml::Mlflow::RegisteredModels, feature_category: :mlops do
end
end
- it_behaves_like 'MLflow|shared model registry error cases'
- it_behaves_like 'MLflow|Requires read_api scope'
+ it_behaves_like 'MLflow|an authenticated resource'
+ it_behaves_like 'MLflow|a read-only model registry resource'
end
end
@@ -78,7 +78,7 @@ RSpec.describe API::Ml::Mlflow::RegisteredModels, feature_category: :mlops do
context 'when the model name is not passed' do
let(:params) { {} }
- it_behaves_like 'MLflow|Bad Request'
+ it_behaves_like 'MLflow|an invalid request'
end
context 'when the model name already exists' do
@@ -127,8 +127,8 @@ RSpec.describe API::Ml::Mlflow::RegisteredModels, feature_category: :mlops do
end
end
- it_behaves_like 'MLflow|shared model registry error cases'
- it_behaves_like 'MLflow|Requires api scope and write permission'
+ it_behaves_like 'MLflow|an authenticated resource'
+ it_behaves_like 'MLflow|a read/write model registry resource'
end
end
@@ -160,8 +160,8 @@ RSpec.describe API::Ml::Mlflow::RegisteredModels, feature_category: :mlops do
end
end
- it_behaves_like 'MLflow|shared model registry error cases'
- it_behaves_like 'MLflow|Requires api scope and write permission'
+ it_behaves_like 'MLflow|an authenticated resource'
+ it_behaves_like 'MLflow|a read/write model registry resource'
end
end
@@ -196,8 +196,88 @@ RSpec.describe API::Ml::Mlflow::RegisteredModels, feature_category: :mlops do
end
end
- it_behaves_like 'MLflow|shared model registry error cases'
- it_behaves_like 'MLflow|Requires read_api scope'
+ it_behaves_like 'MLflow|an authenticated resource'
+ it_behaves_like 'MLflow|a read-only model registry resource'
+ end
+ end
+
+ describe 'DELETE /projects/:id/ml/mlflow/api/2.0/mlflow/registered-models/delete' do
+ let(:model_name) { model.name }
+ let(:params) { { name: model_name } }
+ let(:route) { "/projects/#{project_id}/ml/mlflow/api/2.0/mlflow/registered-models/delete" }
+ let(:request) { delete api(route), params: params, headers: headers }
+
+ it 'returns a success response', :aggregate_failures do
+ is_expected.to have_gitlab_http_status(:ok)
+ expect(json_response).to eq({})
+ end
+
+ describe 'Error States' do
+ context 'when destroy fails' do
+ it 'returns an error' do
+ allow(Ml::DestroyModelService).to receive_message_chain(:new, :execute).and_return(false)
+
+ is_expected.to have_gitlab_http_status(:bad_request)
+ expect(json_response["message"]).to eq("Model could not be deleted")
+ end
+ end
+
+ context 'when has access' do
+ context 'and model does not exist' do
+ let(:model_name) { 'foo' }
+
+ it_behaves_like 'MLflow|Not Found - Resource Does Not Exist'
+ end
+
+ context 'and name is not passed' do
+ let(:params) { {} }
+
+ it_behaves_like 'MLflow|Not Found - Resource Does Not Exist'
+ end
+ end
+
+ it_behaves_like 'MLflow|an authenticated resource'
+ it_behaves_like 'MLflow|a read/write model registry resource'
+ end
+ end
+
+ describe 'GET /projects/:id/ml/mlflow/api/2.0/mlflow/registered-models/search' do
+ let_it_be(:model2) do
+ create(:ml_models, :with_metadata, project: project)
+ end
+
+ let(:route) { "/projects/#{project_id}/ml/mlflow/api/2.0/mlflow/registered-models/search" }
+
+ it 'returns all the models', :aggregate_failures do
+ is_expected.to have_gitlab_http_status(:ok)
+ is_expected.to match_response_schema('ml/list_models')
+ expect(json_response["registered_models"].count).to be(2)
+ end
+
+ context "with a valid filter supplied" do
+ let(:filter) { "name='#{model2.name}'" }
+ let(:route) { "/projects/#{project_id}/ml/mlflow/api/2.0/mlflow/registered-models/search?filter=#{filter}" }
+
+ it 'returns only the models for the given filter' do
+ is_expected.to have_gitlab_http_status(:ok)
+ expect(json_response["registered_models"].count).to be(1)
+ end
+ end
+
+ context "with an invalid filter supplied" do
+ let(:filter) { "description='foo'" }
+ let(:route) { "/projects/#{project_id}/ml/mlflow/api/2.0/mlflow/registered-models/search?filter=#{filter}" }
+
+ it 'returns an error' do
+ is_expected.to have_gitlab_http_status(:bad_request)
+
+ expect(json_response).to include({ 'error_code' => 'INVALID_PARAMETER_VALUE' })
+ end
+ end
+
+ describe 'Error States' do
+ it_behaves_like 'MLflow|an authenticated resource'
+ it_behaves_like 'MLflow|a read-only model registry resource'
end
end
end
diff --git a/spec/requests/api/ml_model_packages_spec.rb b/spec/requests/api/ml_model_packages_spec.rb
index 3166298b430..894127cac78 100644
--- a/spec/requests/api/ml_model_packages_spec.rb
+++ b/spec/requests/api/ml_model_packages_spec.rb
@@ -16,6 +16,8 @@ RSpec.describe ::API::MlModelPackages, feature_category: :mlops do
let_it_be(:deploy_token) { create(:deploy_token, read_package_registry: true, write_package_registry: true) }
let_it_be(:project_deploy_token) { create(:project_deploy_token, deploy_token: deploy_token, project: project) }
let_it_be(:another_project, reload: true) { create(:project) }
+ let_it_be(:model) { create(:ml_models, user: project.owner, project: project) }
+ let_it_be(:model_version) { create(:ml_model_versions, :with_package, model: model, version: '0.1.0') }
let_it_be(:tokens) do
{
@@ -70,10 +72,6 @@ RSpec.describe ::API::MlModelPackages, feature_category: :mlops do
:private | :guest | false | :job_token | true | :not_found
:private | :developer | false | :job_token | false | :unauthorized
:private | :guest | false | :job_token | false | :unauthorized
- :public | :developer | true | :deploy_token | true | :success
- :public | :developer | true | :deploy_token | false | :unauthorized
- :private | :developer | true | :deploy_token | true | :success
- :private | :developer | true | :deploy_token | false | :unauthorized
end
# :visibility, :user_role, :member, :token_type, :valid_token, :expected_status
@@ -112,10 +110,6 @@ RSpec.describe ::API::MlModelPackages, feature_category: :mlops do
:private | :guest | false | :job_token | true | :not_found
:private | :developer | false | :job_token | false | :unauthorized
:private | :guest | false | :job_token | false | :unauthorized
- :public | :developer | true | :deploy_token | true | :success
- :public | :developer | true | :deploy_token | false | :unauthorized
- :private | :developer | true | :deploy_token | true | :success
- :private | :developer | true | :deploy_token | false | :unauthorized
end
# rubocop:enable Metrics/AbcSize
end
@@ -128,14 +122,15 @@ RSpec.describe ::API::MlModelPackages, feature_category: :mlops do
include_context 'ml model authorize permissions table'
let(:token) { tokens[:personal_access_token] }
- let(:user_headers) { { 'HTTP_AUTHORIZATION' => token } }
+ let(:user_headers) { { 'Authorization' => "Bearer #{token}" } }
let(:headers) { user_headers.merge(workhorse_headers) }
let(:request) { authorize_upload_file(headers) }
- let(:model_name) { 'my_package' }
+ let(:model_name) { model_version.name }
+ let(:version) { model_version.version }
let(:file_name) { 'myfile.tar.gz' }
subject(:api_response) do
- url = "/projects/#{project.id}/packages/ml_models/#{model_name}/0.0.1/#{file_name}/authorize"
+ url = "/projects/#{project.id}/packages/ml_models/#{model_name}/#{version}/#{file_name}/authorize"
put api(url), headers: headers
@@ -149,7 +144,7 @@ RSpec.describe ::API::MlModelPackages, feature_category: :mlops do
with_them do
let(:token) { valid_token ? tokens[token_type] : 'invalid-token123' }
- let(:user_headers) { user_role == :anonymous ? {} : { 'HTTP_AUTHORIZATION' => token } }
+ let(:user_headers) { user_role == :anonymous ? {} : { 'Authorization' => "Bearer #{token}" } }
before do
project.update_column(:visibility_level, Gitlab::VisibilityLevel.level_value(visibility.to_s))
@@ -183,15 +178,16 @@ RSpec.describe ::API::MlModelPackages, feature_category: :mlops do
let_it_be(:file_name) { 'model.md5' }
let(:token) { tokens[:personal_access_token] }
- let(:user_headers) { { 'HTTP_AUTHORIZATION' => token } }
+ let(:user_headers) { { 'Authorization' => "Bearer #{token}" } }
let(:headers) { user_headers.merge(workhorse_headers) }
let(:params) { { file: temp_file(file_name) } }
let(:file_key) { :file }
let(:send_rewritten_field) { true }
- let(:model_name) { 'my_package' }
+ let(:model_name) { model_version.name }
+ let(:version) { model_version.version }
subject(:api_response) do
- url = "/projects/#{project.id}/packages/ml_models/#{model_name}/0.0.1/#{file_name}"
+ url = "/projects/#{project.id}/packages/ml_models/#{model_name}/#{version}/#{file_name}"
workhorse_finalize(
api(url),
@@ -219,7 +215,7 @@ RSpec.describe ::API::MlModelPackages, feature_category: :mlops do
with_them do
let(:token) { valid_token ? tokens[token_type] : 'invalid-token123' }
- let(:user_headers) { user_role == :anonymous ? {} : { 'HTTP_AUTHORIZATION' => token } }
+ let(:user_headers) { user_role == :anonymous ? {} : { 'Authorization' => "Bearer #{token}" } }
before do
project.update_column(:visibility_level, Gitlab::VisibilityLevel.level_value(visibility.to_s))
@@ -233,25 +229,27 @@ RSpec.describe ::API::MlModelPackages, feature_category: :mlops do
end
it_behaves_like 'Endpoint not found if read_model_registry not available'
+ it_behaves_like 'Endpoint not found if write_model_registry not available'
+ it_behaves_like 'Not found when model version does not exist'
end
end
describe 'GET /api/v4/projects/:project_id/packages/ml_models/:model_name/:model_version/:file_name' do
include_context 'ml model authorize permissions table'
- let_it_be(:package) { create(:ml_model_package, project: project, name: 'model', version: '0.0.1') }
+ let_it_be(:package) { model_version.package }
let_it_be(:package_file) { create(:package_file, :generic, package: package, file_name: 'model.md5') }
- let(:model_name) { package.name }
- let(:model_version) { package.version }
+ let(:model_name) { model_version.name }
+ let(:version) { model_version.version }
let(:file_name) { package_file.file_name }
let(:token) { tokens[:personal_access_token] }
- let(:user_headers) { { 'HTTP_AUTHORIZATION' => token } }
+ let(:user_headers) { { 'Authorization' => "Bearer #{token}" } }
let(:headers) { user_headers.merge(workhorse_headers) }
subject(:api_response) do
- url = "/projects/#{project.id}/packages/ml_models/#{model_name}/#{model_version}/#{file_name}"
+ url = "/projects/#{project.id}/packages/ml_models/#{model_name}/#{version}/#{file_name}"
get api(url), headers: headers
@@ -265,7 +263,7 @@ RSpec.describe ::API::MlModelPackages, feature_category: :mlops do
with_them do
let(:token) { valid_token ? tokens[token_type] : 'invalid-token123' }
- let(:user_headers) { user_role == :anonymous ? {} : { 'HTTP_AUTHORIZATION' => token } }
+ let(:user_headers) { user_role == :anonymous ? {} : { 'Authorization' => "Bearer #{token}" } }
before do
project.update_column(:visibility_level, Gitlab::VisibilityLevel.level_value(visibility.to_s))
diff --git a/spec/requests/api/npm_project_packages_spec.rb b/spec/requests/api/npm_project_packages_spec.rb
index b5f38698857..17cb5cf893e 100644
--- a/spec/requests/api/npm_project_packages_spec.rb
+++ b/spec/requests/api/npm_project_packages_spec.rb
@@ -54,14 +54,6 @@ RSpec.describe API::NpmProjectPackages, feature_category: :package_registry do
it_behaves_like 'does not enqueue a worker to sync a metadata cache'
- context 'when npm_metadata_cache disabled' do
- before do
- stub_feature_flags(npm_metadata_cache: false)
- end
-
- it_behaves_like 'generates metadata response "on-the-fly"'
- end
-
context 'when metadata cache file does not exist' do
before do
FileUtils.rm_rf(npm_metadata_cache.file.path)
diff --git a/spec/requests/api/nuget_group_packages_spec.rb b/spec/requests/api/nuget_group_packages_spec.rb
index 92eb869b871..4a763b3bbda 100644
--- a/spec/requests/api/nuget_group_packages_spec.rb
+++ b/spec/requests/api/nuget_group_packages_spec.rb
@@ -188,6 +188,14 @@ RSpec.describe API::NugetGroupPackages, feature_category: :package_registry do
end
end
+ describe 'GET /api/v4/groups/:id/-/packages/nuget/token/*token/symbolfiles/*file_name/*signature/*file_name' do
+ it_behaves_like 'nuget symbol file endpoint' do
+ let(:url) do
+ "/groups/#{target.id}/-/packages/nuget/symbolfiles/#{filename}/#{signature}/#{filename}"
+ end
+ end
+ end
+
def update_visibility_to(visibility)
project.update!(visibility_level: visibility)
subgroup.update!(visibility_level: visibility)
diff --git a/spec/requests/api/nuget_project_packages_spec.rb b/spec/requests/api/nuget_project_packages_spec.rb
index a116be84b3e..8252fc1c4cd 100644
--- a/spec/requests/api/nuget_project_packages_spec.rb
+++ b/spec/requests/api/nuget_project_packages_spec.rb
@@ -419,6 +419,12 @@ RSpec.describe API::NugetProjectPackages, feature_category: :package_registry do
end
end
+ describe 'GET /api/v4/projects/:id/packages/nuget/symbolfiles/*file_name/*signature/*file_name' do
+ it_behaves_like 'nuget symbol file endpoint' do
+ let(:url) { "/projects/#{target.id}/packages/nuget/symbolfiles/#{filename}/#{signature}/#{filename}" }
+ end
+ end
+
def update_visibility_to(visibility)
project.update!(visibility_level: visibility)
end
diff --git a/spec/requests/api/project_attributes.yml b/spec/requests/api/project_attributes.yml
index 165ea7bf66e..bab5bd2b6ac 100644
--- a/spec/requests/api/project_attributes.yml
+++ b/spec/requests/api/project_attributes.yml
@@ -151,6 +151,7 @@ project_setting:
- legacy_open_source_license_available
- prevent_merge_without_jira_issue
- only_allow_merge_if_all_status_checks_passed
+ - allow_merge_without_pipeline
- warn_about_potentially_unwanted_characters
- previous_default_branch
- project_id
@@ -195,5 +196,6 @@ build_service_desk_setting: # service_desk_setting
- encrypted_custom_email_smtp_password_iv
- custom_email_smtp_password
- add_external_participants_from_cc
+ - reopen_issue_on_external_participant_note
remapped_attributes:
project_key: service_desk_address
diff --git a/spec/requests/api/project_events_spec.rb b/spec/requests/api/project_events_spec.rb
index f904cd8fd6c..52a6093c4c8 100644
--- a/spec/requests/api/project_events_spec.rb
+++ b/spec/requests/api/project_events_spec.rb
@@ -3,11 +3,11 @@
require 'spec_helper'
RSpec.describe API::ProjectEvents, feature_category: :user_profile do
- let(:user) { create(:user) }
- let(:non_member) { create(:user) }
- let(:private_project) { create(:project, :private, creator_id: user.id, namespace: user.namespace) }
- let(:closed_issue) { create(:closed_issue, project: private_project, author: user) }
- let!(:closed_issue_event) { create(:event, project: private_project, author: user, target: closed_issue, action: :closed, created_at: Date.new(2016, 12, 30)) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:non_member) { create(:user) }
+ let_it_be(:private_project) { create(:project, :private, creator_id: user.id, namespace: user.namespace) }
+ let_it_be(:closed_issue) { create(:closed_issue, project: private_project, author: user) }
+ let_it_be(:closed_issue_event) { create(:closed_issue_event, project: private_project, author: user, target: closed_issue, created_at: Date.new(2016, 12, 30)) }
describe 'GET /projects/:id/events' do
context 'when unauthenticated ' do
@@ -27,11 +27,11 @@ RSpec.describe API::ProjectEvents, feature_category: :user_profile do
end
context 'with inaccessible events' do
- let(:public_project) { create(:project, :public, creator_id: user.id, namespace: user.namespace) }
- let(:confidential_issue) { create(:closed_issue, confidential: true, project: public_project, author: user) }
- let!(:confidential_event) { create(:event, project: public_project, author: user, target: confidential_issue, action: :closed) }
- let(:public_issue) { create(:closed_issue, project: public_project, author: user) }
- let!(:public_event) { create(:event, project: public_project, author: user, target: public_issue, action: :closed) }
+ let_it_be(:public_project) { create(:project, :public, creator_id: user.id, namespace: user.namespace) }
+ let_it_be(:confidential_issue) { create(:closed_issue, :confidential, project: public_project, author: user) }
+ let_it_be(:confidential_event) { create(:closed_issue_event, project: public_project, author: user, target: confidential_issue) }
+ let_it_be(:public_issue) { create(:closed_issue, project: public_project, author: user) }
+ let_it_be(:public_event) { create(:closed_issue_event, project: public_project, author: user, target: public_issue) }
it 'returns only accessible events' do
get api("/projects/#{public_project.id}/events", non_member)
@@ -124,23 +124,34 @@ RSpec.describe API::ProjectEvents, feature_category: :user_profile do
end
context 'when exists some events' do
- let(:merge_request1) { create(:merge_request, :closed, author: user, assignees: [user], source_project: private_project, title: 'Test') }
- let(:merge_request2) { create(:merge_request, :closed, author: user, assignees: [user], source_project: private_project, title: 'Test') }
+ let_it_be(:merge_request1) { create(:closed_merge_request, author: user, assignees: [user], source_project: private_project) }
+ let_it_be(:merge_request2) { create(:closed_merge_request, author: user, assignees: [user], source_project: private_project) }
+
+ let_it_be(:token) { create(:personal_access_token, user: user) }
before do
create_event(merge_request1)
end
it 'avoids N+1 queries' do
- control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do
- get api("/projects/#{private_project.id}/events", user), params: { target_type: :merge_request }
- end.count
+ # Warmup, e.g. users#last_activity_on
+ get api("/projects/#{private_project.id}/events", personal_access_token: token), params: { target_type: :merge_request }
+
+ control = ActiveRecord::QueryRecorder.new(skip_cached: false) do
+ get api("/projects/#{private_project.id}/events", personal_access_token: token), params: { target_type: :merge_request }
+ end
create_event(merge_request2)
expect do
- get api("/projects/#{private_project.id}/events", user), params: { target_type: :merge_request }
- end.not_to exceed_all_query_limit(control_count)
+ get api("/projects/#{private_project.id}/events", personal_access_token: token), params: { target_type: :merge_request }
+ end.to issue_same_number_of_queries_as(control).with_threshold(1)
+ # The extra threshold is because we need to fetch `project` for the 2nd
+ # event. This is because in `app/policies/issuable_policy.rb`, we fetch
+ # the `project` for the `target` for the `event`. It is non-trivial to
+ # re-use the original `project` object from `lib/api/project_events.rb`
+ #
+ # https://gitlab.com/gitlab-org/gitlab/-/issues/432823
expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
diff --git a/spec/requests/api/project_export_spec.rb b/spec/requests/api/project_export_spec.rb
index 22729e068da..6d5591d7500 100644
--- a/spec/requests/api/project_export_spec.rb
+++ b/spec/requests/api/project_export_spec.rb
@@ -688,6 +688,8 @@ RSpec.describe API::ProjectExport, :aggregate_failures, :clean_gitlab_redis_cach
expect(response).to have_gitlab_http_status(:ok)
expect(json_response.pluck('relation')).to contain_exactly('labels', 'milestones', 'project_badges')
expect(json_response.pluck('status')).to contain_exactly(-1, 0, 1)
+ expect(json_response.pluck('batched')).to all(eq(false))
+ expect(json_response.pluck('batches_count')).to all(eq(0))
end
context 'when relation is specified' do
@@ -699,6 +701,36 @@ RSpec.describe API::ProjectExport, :aggregate_failures, :clean_gitlab_redis_cach
expect(json_response['status']).to eq(0)
end
end
+
+ context 'when there is a batched export' do
+ let_it_be(:batched_export) do
+ create(:bulk_import_export, :started, :batched, project: project, relation: 'issues', batches_count: 1)
+ end
+
+ let_it_be(:batch) { create(:bulk_import_export_batch, objects_count: 5, export: batched_export) }
+
+ it 'returns a list of batched relation export statuses' do
+ get api(status_path, user)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to include(
+ hash_including(
+ 'relation' => batched_export.relation,
+ 'batched' => true,
+ 'batches_count' => 1,
+ 'batches' => contain_exactly(
+ {
+ 'batch_number' => 1,
+ 'error' => nil,
+ 'objects_count' => batch.objects_count,
+ 'status' => batch.status,
+ 'updated_at' => batch.updated_at.as_json
+ }
+ )
+ )
+ )
+ end
+ end
end
context 'with bulk_import is disabled' do
diff --git a/spec/requests/api/project_milestones_spec.rb b/spec/requests/api/project_milestones_spec.rb
index 978ac28ef73..e4b579b96cc 100644
--- a/spec/requests/api/project_milestones_spec.rb
+++ b/spec/requests/api/project_milestones_spec.rb
@@ -40,11 +40,27 @@ RSpec.describe API::ProjectMilestones, feature_category: :team_planning do
it_behaves_like 'listing all milestones'
- context 'when include_parent_milestones is true' do
+ context 'when include_ancestors is true' do
+ let(:params) { { include_ancestors: true } }
+
+ it_behaves_like 'listing all milestones'
+ end
+
+ context 'when deprecated include_parent_milestones is true' do
let(:params) { { include_parent_milestones: true } }
it_behaves_like 'listing all milestones'
end
+
+ context 'when both include_parent_milestones and include_ancestors are specified' do
+ let(:params) { { include_ancestors: true, include_parent_milestones: true } }
+
+ it 'returns 400' do
+ get api(route, user), params: params
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+ end
end
context 'when project parent is a group' do
@@ -52,14 +68,14 @@ RSpec.describe API::ProjectMilestones, feature_category: :team_planning do
project.update!(namespace: group)
end
- context 'when include_parent_milestones is true' do
- let(:params) { { include_parent_milestones: true } }
+ context 'when include_ancestors is true' do
+ let(:params) { { include_ancestors: true } }
let(:milestones) { [group_milestone, ancestor_group_milestone, milestone, closed_milestone] }
it_behaves_like 'listing all milestones'
context 'when iids param is present' do
- let(:params) { { include_parent_milestones: true, iids: [group_milestone.iid] } }
+ let(:params) { { include_ancestors: true, iids: [group_milestone.iid] } }
it_behaves_like 'listing all milestones'
end
@@ -75,7 +91,7 @@ RSpec.describe API::ProjectMilestones, feature_category: :team_planning do
end
context 'when updated_before param is present' do
- let(:params) { { updated_before: 12.hours.ago.iso8601, include_parent_milestones: true } }
+ let(:params) { { updated_before: 12.hours.ago.iso8601, include_ancestors: true } }
it_behaves_like 'listing all milestones' do
let(:milestones) { [group_milestone, ancestor_group_milestone, milestone] }
@@ -83,7 +99,7 @@ RSpec.describe API::ProjectMilestones, feature_category: :team_planning do
end
context 'when updated_after param is present' do
- let(:params) { { updated_after: 2.days.ago.iso8601, include_parent_milestones: true } }
+ let(:params) { { updated_after: 2.days.ago.iso8601, include_ancestors: true } }
it_behaves_like 'listing all milestones' do
let(:milestones) { [ancestor_group_milestone, closed_milestone] }
diff --git a/spec/requests/api/project_templates_spec.rb b/spec/requests/api/project_templates_spec.rb
index 1987d70633b..920fbe5f174 100644
--- a/spec/requests/api/project_templates_spec.rb
+++ b/spec/requests/api/project_templates_spec.rb
@@ -69,7 +69,7 @@ RSpec.describe API::ProjectTemplates, feature_category: :source_code_management
expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(response).to match_response_schema('public_api/v4/template_list')
- expect(json_response.map { |t| t['key'] }).to match_array(%w[bug feature_proposal template_test])
+ expect(json_response.map { |t| t['key'] }).to match_array(%w[bug feature_proposal template_test (test)])
end
it 'returns merge request templates' do
@@ -78,7 +78,7 @@ RSpec.describe API::ProjectTemplates, feature_category: :source_code_management
expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(response).to match_response_schema('public_api/v4/template_list')
- expect(json_response.map { |t| t['key'] }).to match_array(%w[bug feature_proposal template_test])
+ expect(json_response.map { |t| t['key'] }).to match_array(%w[bug feature_proposal template_test (test)])
end
it 'returns 400 for an unknown template type' do
@@ -171,6 +171,17 @@ RSpec.describe API::ProjectTemplates, feature_category: :source_code_management
expect(json_response['content']).to eq('something valid')
end
+ context 'when issue template uses parentheses' do
+ it 'returns a specific issue template' do
+ get api("/projects/#{private_project.id}/templates/issues/(test)", developer)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to match_response_schema('public_api/v4/template')
+ expect(json_response['name']).to eq('(test)')
+ expect(json_response['content']).to eq('parentheses')
+ end
+ end
+
it 'returns a specific merge request template' do
get api("/projects/#{public_project.id}/templates/merge_requests/feature_proposal")
diff --git a/spec/requests/api/projects_spec.rb b/spec/requests/api/projects_spec.rb
index e9319d514aa..b8e029385e3 100644
--- a/spec/requests/api/projects_spec.rb
+++ b/spec/requests/api/projects_spec.rb
@@ -1313,6 +1313,7 @@ RSpec.describe API::Projects, :aggregate_failures, feature_category: :groups_and
attrs[:merge_requests_access_level] = 'disabled'
attrs[:issues_access_level] = 'disabled'
attrs[:model_experiments_access_level] = 'disabled'
+ attrs[:model_registry_access_level] = 'disabled'
end
post api(path, user), params: project
@@ -1323,7 +1324,7 @@ RSpec.describe API::Projects, :aggregate_failures, feature_category: :groups_and
next if %i[
has_external_issue_tracker has_external_wiki issues_enabled merge_requests_enabled wiki_enabled storage_version
container_registry_access_level releases_access_level environments_access_level feature_flags_access_level
- infrastructure_access_level monitor_access_level model_experiments_access_level
+ infrastructure_access_level monitor_access_level model_experiments_access_level model_registry_access_level
].include?(k)
expect(json_response[k.to_s]).to eq(v)
@@ -2852,16 +2853,6 @@ RSpec.describe API::Projects, :aggregate_failures, feature_category: :groups_and
expect(response).to have_gitlab_http_status(:not_found)
end
end
-
- context 'when api_redirect_moved_projects is disabled' do
- it 'returns a 404 error' do
- stub_feature_flags(api_redirect_moved_projects: false)
-
- perform_request
-
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
end
it 'returns a 404 error if not found' do
@@ -3667,11 +3658,15 @@ RSpec.describe API::Projects, :aggregate_failures, feature_category: :groups_and
expect(json_response['error']).to eq 'group_access does not have a valid value'
end
- it "returns a 400 error when the project-group share is created with an OWNER access level" do
- post api(path, user), params: { group_id: group.id, group_access: Gitlab::Access::OWNER }
+ it 'returns a 403 when a maintainer tries to create a link with OWNER access' do
+ user = create(:user)
+ project.add_maintainer(user)
- expect(response).to have_gitlab_http_status(:bad_request)
- expect(json_response['error']).to eq 'group_access does not have a valid value'
+ expect do
+ post api(path, user), params: { group_id: group.id, group_access: Gitlab::Access::OWNER }
+ end.to not_change { project.reload.project_group_links.count }
+
+ expect(response).to have_gitlab_http_status(:forbidden)
end
it "returns a 409 error when link is not saved" do
@@ -3700,11 +3695,12 @@ RSpec.describe API::Projects, :aggregate_failures, feature_category: :groups_and
context 'for a valid group' do
let_it_be(:group) { create(:group, :private) }
let_it_be(:group_user) { create(:user) }
+ let(:group_access) { Gitlab::Access::DEVELOPER }
before do
group.add_developer(group_user)
- create(:project_group_link, group: group, project: project)
+ create(:project_group_link, group: group, project: project, group_access: group_access)
end
it 'returns 204 when deleting a group share' do
@@ -3735,6 +3731,21 @@ RSpec.describe API::Projects, :aggregate_failures, feature_category: :groups_and
expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['message']).to eq '404 Not Found'
end
+
+ context 'when a MAINTAINER tries to destroy a link with OWNER access' do
+ let(:group_access) { Gitlab::Access::OWNER }
+
+ it 'returns 403' do
+ user = create(:user)
+ project.add_maintainer(user)
+
+ expect do
+ delete api("/projects/#{project.id}/share/#{group.id}", user)
+ end.to not_change { project.reload.project_group_links.count }
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
end
it 'returns a 400 when group id is not an integer' do
@@ -3940,7 +3951,7 @@ RSpec.describe API::Projects, :aggregate_failures, feature_category: :groups_and
expect(Project.find_by(path: project[:path]).analytics_access_level).to eq(ProjectFeature::PRIVATE)
end
- %i[releases_access_level environments_access_level feature_flags_access_level infrastructure_access_level monitor_access_level model_experiments_access_level].each do |field|
+ %i[releases_access_level environments_access_level feature_flags_access_level infrastructure_access_level monitor_access_level model_experiments_access_level model_registry_access_level].each do |field|
it "sets #{field}" do
put api(path, user), params: { field => 'private' }
@@ -4465,7 +4476,7 @@ RSpec.describe API::Projects, :aggregate_failures, feature_category: :groups_and
end
it 'returns 200 when repository storage has changed' do
- stub_storage_settings('test_second_storage' => { 'path' => TestEnv::SECOND_STORAGE_PATH })
+ stub_storage_settings('test_second_storage' => {})
expect do
Sidekiq::Testing.fake! do
diff --git a/spec/requests/api/remote_mirrors_spec.rb b/spec/requests/api/remote_mirrors_spec.rb
index 3da1760e319..7d37d73a361 100644
--- a/spec/requests/api/remote_mirrors_spec.rb
+++ b/spec/requests/api/remote_mirrors_spec.rb
@@ -50,7 +50,7 @@ RSpec.describe API::RemoteMirrors, feature_category: :source_code_management do
let(:route) { "/projects/#{project.id}/remote_mirrors" }
shared_examples 'creates a remote mirror' do
- it 'creates a remote mirror and returns reponse' do
+ it 'creates a remote mirror and returns response' do
project.add_maintainer(user)
post api(route, user), params: params
diff --git a/spec/requests/api/settings_spec.rb b/spec/requests/api/settings_spec.rb
index 5656fda7684..4e24689c17a 100644
--- a/spec/requests/api/settings_spec.rb
+++ b/spec/requests/api/settings_spec.rb
@@ -8,6 +8,12 @@ RSpec.describe API::Settings, 'Settings', :do_not_mock_admin_mode_setting, featu
let_it_be(:admin) { create(:admin) }
describe "GET /application/settings" do
+ before do
+ # Testing config file config/gitlab.yml becomes SSOT for this API
+ # see https://gitlab.com/gitlab-org/gitlab/-/issues/426091#note_1675160909
+ stub_storage_settings({ 'default' => {}, 'custom' => {} })
+ end
+
it "returns application settings" do
get api("/application/settings", admin)
@@ -15,7 +21,7 @@ RSpec.describe API::Settings, 'Settings', :do_not_mock_admin_mode_setting, featu
expect(json_response).to be_an Hash
expect(json_response['default_projects_limit']).to eq(42)
expect(json_response['password_authentication_enabled_for_web']).to be_truthy
- expect(json_response['repository_storages_weighted']).to eq({ 'default' => 100 })
+ expect(json_response['repository_storages_weighted']).to eq({ 'default' => 100, 'custom' => 0 })
expect(json_response['password_authentication_enabled']).to be_truthy
expect(json_response['plantuml_enabled']).to be_falsey
expect(json_response['plantuml_url']).to be_nil
@@ -87,6 +93,7 @@ RSpec.describe API::Settings, 'Settings', :do_not_mock_admin_mode_setting, featu
expect(json_response['default_branch_protection_defaults']).to be_kind_of(Hash)
expect(json_response['max_login_attempts']).to be_nil
expect(json_response['failed_login_attempts_unlock_period_in_minutes']).to be_nil
+ expect(json_response['bulk_import_concurrent_pipeline_batch_limit']).to eq(25)
end
end
@@ -109,7 +116,7 @@ RSpec.describe API::Settings, 'Settings', :do_not_mock_admin_mode_setting, featu
}
expect(response).to have_gitlab_http_status(:ok)
- expect(json_response['repository_storages_weighted']).to eq({ 'custom' => 75 })
+ expect(json_response['repository_storages_weighted']).to eq({ 'default' => 0, 'custom' => 75 })
end
context "repository_storages_weighted value is outside a 0-100 range" do
@@ -131,7 +138,7 @@ RSpec.describe API::Settings, 'Settings', :do_not_mock_admin_mode_setting, featu
default_projects_limit: 3,
default_project_creation: 2,
password_authentication_enabled_for_web: false,
- repository_storages_weighted: { 'custom' => 100 },
+ repository_storages_weighted: { 'default' => 100, 'custom' => 0 },
plantuml_enabled: true,
plantuml_url: 'http://plantuml.example.com',
diagramsnet_enabled: false,
@@ -196,6 +203,7 @@ RSpec.describe API::Settings, 'Settings', :do_not_mock_admin_mode_setting, featu
jira_connect_proxy_url: 'http://example.com',
bulk_import_enabled: false,
bulk_import_max_download_file_size: 1,
+ bulk_import_concurrent_pipeline_batch_limit: 2,
allow_runner_registration_token: true,
user_defaults_to_private_profile: true,
default_syntax_highlighting_theme: 2,
@@ -205,7 +213,8 @@ RSpec.describe API::Settings, 'Settings', :do_not_mock_admin_mode_setting, featu
allow_account_deletion: false,
gitlab_shell_operation_limit: 500,
namespace_aggregation_schedule_lease_duration_in_seconds: 400,
- max_import_remote_file_size: 2
+ max_import_remote_file_size: 2,
+ security_txt_content: nil
}
expect(response).to have_gitlab_http_status(:ok)
@@ -213,7 +222,7 @@ RSpec.describe API::Settings, 'Settings', :do_not_mock_admin_mode_setting, featu
expect(json_response['default_projects_limit']).to eq(3)
expect(json_response['default_project_creation']).to eq(::Gitlab::Access::DEVELOPER_MAINTAINER_PROJECT_ACCESS)
expect(json_response['password_authentication_enabled_for_web']).to be_falsey
- expect(json_response['repository_storages_weighted']).to eq({ 'custom' => 100 })
+ expect(json_response['repository_storages_weighted']).to eq({ 'default' => 100, 'custom' => 0 })
expect(json_response['plantuml_enabled']).to be_truthy
expect(json_response['plantuml_url']).to eq('http://plantuml.example.com')
expect(json_response['diagramsnet_enabled']).to be_falsey
@@ -288,6 +297,8 @@ RSpec.describe API::Settings, 'Settings', :do_not_mock_admin_mode_setting, featu
expect(json_response['namespace_aggregation_schedule_lease_duration_in_seconds']).to be(400)
expect(json_response['max_import_remote_file_size']).to be(2)
expect(json_response['bulk_import_max_download_file_size']).to be(1)
+ expect(json_response['security_txt_content']).to be(nil)
+ expect(json_response['bulk_import_concurrent_pipeline_batch_limit']).to be(2)
end
end
@@ -1062,5 +1073,19 @@ RSpec.describe API::Settings, 'Settings', :do_not_mock_admin_mode_setting, featu
expect(json_response['failed_login_attempts_unlock_period_in_minutes']).to eq(30)
end
end
+
+ context 'security txt settings' do
+ let(:content) { "Contact: foo@acme.com" }
+
+ it 'updates the settings' do
+ put(
+ api("/application/settings", admin),
+ params: { security_txt_content: content }
+ )
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['security_txt_content']).to eq(content)
+ end
+ end
end
end
diff --git a/spec/requests/api/snippets_spec.rb b/spec/requests/api/snippets_spec.rb
index 0b97bb5c443..b43f98e5323 100644
--- a/spec/requests/api/snippets_spec.rb
+++ b/spec/requests/api/snippets_spec.rb
@@ -115,8 +115,8 @@ RSpec.describe API::Snippets, :aggregate_failures, factory_default: :keep, featu
public_snippet.id,
public_snippet_other.id)
expect(json_response.map { |snippet| snippet['web_url'] }).to contain_exactly(
- "http://localhost/-/snippets/#{public_snippet.id}",
- "http://localhost/-/snippets/#{public_snippet_other.id}")
+ "http://#{Gitlab.config.gitlab.host}/-/snippets/#{public_snippet.id}",
+ "http://#{Gitlab.config.gitlab.host}/-/snippets/#{public_snippet_other.id}")
expect(json_response[0]['files'].first).to eq snippet_blob_file(public_snippet_other.blobs.first)
expect(json_response[1]['files'].first).to eq snippet_blob_file(public_snippet.blobs.first)
end
diff --git a/spec/requests/api/terraform/modules/v1/packages_spec.rb b/spec/requests/api/terraform/modules/v1/packages_spec.rb
index f479ca25f3c..949acdb17e1 100644
--- a/spec/requests/api/terraform/modules/v1/packages_spec.rb
+++ b/spec/requests/api/terraform/modules/v1/packages_spec.rb
@@ -3,31 +3,9 @@
require 'spec_helper'
RSpec.describe API::Terraform::Modules::V1::Packages, feature_category: :package_registry do
- include PackagesManagerApiSpecHelpers
- include WorkhorseHelpers
+ include_context 'for terraform modules api setup'
using RSpec::Parameterized::TableSyntax
- let_it_be_with_reload(:group) { create(:group) }
- let_it_be_with_reload(:project) { create(:project, namespace: group) }
- let_it_be(:package) { create(:terraform_module_package, project: project) }
- let_it_be(:personal_access_token) { create(:personal_access_token) }
- let_it_be(:user) { personal_access_token.user }
- let_it_be(:job) { create(:ci_build, :running, user: user, project: project) }
- let_it_be(:deploy_token) { create(:deploy_token, read_package_registry: true, write_package_registry: true) }
- let_it_be(:project_deploy_token) { create(:project_deploy_token, deploy_token: deploy_token, project: project) }
-
- let(:headers) { {} }
- let(:token) { tokens[token_type] }
-
- let(:tokens) do
- {
- personal_access_token: personal_access_token.token,
- deploy_token: deploy_token.token,
- job_token: job.token,
- invalid: 'invalid-token123'
- }
- end
-
describe 'GET /api/v4/packages/terraform/modules/v1/:module_namespace/:module_name/:module_system/versions' do
let(:url) { api("/packages/terraform/modules/v1/#{group.path}/#{package.name}/versions") }
let(:headers) { { 'Authorization' => "Bearer #{tokens[:job_token]}" } }
@@ -456,198 +434,4 @@ RSpec.describe API::Terraform::Modules::V1::Packages, feature_category: :package
end
end
end
-
- describe 'PUT /api/v4/projects/:project_id/packages/terraform/modules/:module_name/:module_system/:module_version/file/authorize' do
- include_context 'workhorse headers'
-
- let(:url) { api("/projects/#{project.id}/packages/terraform/modules/mymodule/mysystem/1.0.0/file/authorize") }
- let(:headers) { {} }
-
- subject { put(url, headers: headers) }
-
- context 'with valid project' do
- where(:visibility, :user_role, :member, :token_header, :token_type, :shared_examples_name, :expected_status) do
- :public | :developer | true | 'PRIVATE-TOKEN' | :personal_access_token | 'process terraform module workhorse authorization' | :success
- :public | :guest | true | 'PRIVATE-TOKEN' | :personal_access_token | 'rejects terraform module packages access' | :forbidden
- :public | :developer | true | 'PRIVATE-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
- :public | :guest | true | 'PRIVATE-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
- :public | :developer | false | 'PRIVATE-TOKEN' | :personal_access_token | 'rejects terraform module packages access' | :forbidden
- :public | :guest | false | 'PRIVATE-TOKEN' | :personal_access_token | 'rejects terraform module packages access' | :forbidden
- :public | :developer | false | 'PRIVATE-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
- :public | :guest | false | 'PRIVATE-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
- :public | :anonymous | false | nil | nil | 'rejects terraform module packages access' | :unauthorized
- :private | :developer | true | 'PRIVATE-TOKEN' | :personal_access_token | 'process terraform module workhorse authorization' | :success
- :private | :guest | true | 'PRIVATE-TOKEN' | :personal_access_token | 'rejects terraform module packages access' | :forbidden
- :private | :developer | true | 'PRIVATE-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
- :private | :guest | true | 'PRIVATE-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
- :private | :developer | false | 'PRIVATE-TOKEN' | :personal_access_token | 'rejects terraform module packages access' | :not_found
- :private | :guest | false | 'PRIVATE-TOKEN' | :personal_access_token | 'rejects terraform module packages access' | :not_found
- :private | :developer | false | 'PRIVATE-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
- :private | :guest | false | 'PRIVATE-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
- :private | :anonymous | false | nil | nil | 'rejects terraform module packages access' | :unauthorized
- :public | :developer | true | 'JOB-TOKEN' | :job_token | 'process terraform module workhorse authorization' | :success
- :public | :guest | true | 'JOB-TOKEN' | :job_token | 'rejects terraform module packages access' | :forbidden
- :public | :developer | true | 'JOB-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
- :public | :guest | true | 'JOB-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
- :public | :developer | false | 'JOB-TOKEN' | :job_token | 'rejects terraform module packages access' | :forbidden
- :public | :guest | false | 'JOB-TOKEN' | :job_token | 'rejects terraform module packages access' | :forbidden
- :public | :developer | false | 'JOB-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
- :public | :guest | false | 'JOB-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
- :private | :developer | true | 'JOB-TOKEN' | :job_token | 'process terraform module workhorse authorization' | :success
- :private | :guest | true | 'JOB-TOKEN' | :job_token | 'rejects terraform module packages access' | :forbidden
- :private | :developer | true | 'JOB-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
- :private | :guest | true | 'JOB-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
- :private | :developer | false | 'JOB-TOKEN' | :job_token | 'rejects terraform module packages access' | :not_found
- :private | :guest | false | 'JOB-TOKEN' | :job_token | 'rejects terraform module packages access' | :not_found
- :private | :developer | false | 'JOB-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
- :private | :guest | false | 'JOB-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
- :public | :developer | true | 'DEPLOY-TOKEN' | :deploy_token | 'process terraform module workhorse authorization' | :success
- :public | :developer | true | 'DEPLOY-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
- :private | :developer | true | 'DEPLOY-TOKEN' | :deploy_token | 'process terraform module workhorse authorization' | :success
- :private | :developer | true | 'DEPLOY-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
- end
-
- with_them do
- let(:headers) { user_headers.merge(workhorse_headers) }
- let(:user_headers) { user_role == :anonymous ? {} : { token_header => token } }
-
- before do
- project.update!(visibility: visibility.to_s)
- end
-
- it_behaves_like params[:shared_examples_name], params[:user_role], params[:expected_status], params[:member]
- end
- end
- end
-
- describe 'PUT /api/v4/projects/:project_id/packages/terraform/modules/:module_name/:module_system/:module_version/file' do
- include_context 'workhorse headers'
-
- let_it_be(:file_name) { 'module-system-v1.0.0.tgz' }
-
- let(:url) { "/projects/#{project.id}/packages/terraform/modules/mymodule/mysystem/1.0.0/file" }
- let(:headers) { {} }
- let(:params) { { file: temp_file(file_name) } }
- let(:file_key) { :file }
- let(:send_rewritten_field) { true }
-
- subject do
- workhorse_finalize(
- api(url),
- method: :put,
- file_key: file_key,
- params: params,
- headers: headers,
- send_rewritten_field: send_rewritten_field
- )
- end
-
- context 'with valid project' do
- where(:visibility, :user_role, :member, :token_header, :token_type, :shared_examples_name, :expected_status) do
- :public | :developer | true | 'PRIVATE-TOKEN' | :personal_access_token | 'process terraform module upload' | :created
- :public | :guest | true | 'PRIVATE-TOKEN' | :personal_access_token | 'rejects terraform module packages access' | :forbidden
- :public | :developer | true | 'PRIVATE-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
- :public | :guest | true | 'PRIVATE-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
- :public | :developer | false | 'PRIVATE-TOKEN' | :personal_access_token | 'rejects terraform module packages access' | :forbidden
- :public | :guest | false | 'PRIVATE-TOKEN' | :personal_access_token | 'rejects terraform module packages access' | :forbidden
- :public | :developer | false | 'PRIVATE-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
- :public | :guest | false | 'PRIVATE-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
- :public | :anonymous | false | nil | nil | 'rejects terraform module packages access' | :unauthorized
- :private | :developer | true | 'PRIVATE-TOKEN' | :personal_access_token | 'process terraform module upload' | :created
- :private | :guest | true | 'PRIVATE-TOKEN' | :personal_access_token | 'rejects terraform module packages access' | :forbidden
- :private | :developer | true | 'PRIVATE-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
- :private | :guest | true | 'PRIVATE-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
- :private | :developer | false | 'PRIVATE-TOKEN' | :personal_access_token | 'rejects terraform module packages access' | :not_found
- :private | :guest | false | 'PRIVATE-TOKEN' | :personal_access_token | 'rejects terraform module packages access' | :not_found
- :private | :developer | false | 'PRIVATE-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
- :private | :guest | false | 'PRIVATE-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
- :private | :anonymous | false | nil | nil | 'rejects terraform module packages access' | :unauthorized
- :public | :developer | true | 'JOB-TOKEN' | :job_token | 'process terraform module upload' | :created
- :public | :guest | true | 'JOB-TOKEN' | :job_token | 'rejects terraform module packages access' | :forbidden
- :public | :developer | true | 'JOB-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
- :public | :guest | true | 'JOB-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
- :public | :developer | false | 'JOB-TOKEN' | :job_token | 'rejects terraform module packages access' | :forbidden
- :public | :guest | false | 'JOB-TOKEN' | :job_token | 'rejects terraform module packages access' | :forbidden
- :public | :developer | false | 'JOB-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
- :public | :guest | false | 'JOB-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
- :private | :developer | true | 'JOB-TOKEN' | :job_token | 'process terraform module upload' | :created
- :private | :guest | true | 'JOB-TOKEN' | :job_token | 'rejects terraform module packages access' | :forbidden
- :private | :developer | true | 'JOB-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
- :private | :guest | true | 'JOB-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
- :private | :developer | false | 'JOB-TOKEN' | :job_token | 'rejects terraform module packages access' | :not_found
- :private | :guest | false | 'JOB-TOKEN' | :job_token | 'rejects terraform module packages access' | :not_found
- :private | :developer | false | 'JOB-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
- :private | :guest | false | 'JOB-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
- :public | :developer | true | 'DEPLOY-TOKEN' | :deploy_token | 'process terraform module upload' | :created
- :public | :developer | true | 'DEPLOY-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
- :private | :developer | true | 'DEPLOY-TOKEN' | :deploy_token | 'process terraform module upload' | :created
- :private | :developer | true | 'DEPLOY-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
- end
-
- with_them do
- let(:user_headers) { user_role == :anonymous ? {} : { token_header => token } }
- let(:headers) { user_headers.merge(workhorse_headers) }
- let(:snowplow_gitlab_standard_context) do
- { project: project, namespace: project.namespace, user: snowplow_user, property: 'i_package_terraform_module_user' }
- end
-
- let(:snowplow_user) do
- case token_type
- when :deploy_token
- deploy_token
- when :job_token
- job.user
- else
- user
- end
- end
-
- before do
- project.update!(visibility: visibility.to_s)
- end
-
- it_behaves_like params[:shared_examples_name], params[:user_role], params[:expected_status], params[:member]
- end
-
- context 'failed package file save' do
- let(:user_headers) { { 'PRIVATE-TOKEN' => personal_access_token.token } }
- let(:headers) { user_headers.merge(workhorse_headers) }
-
- before do
- project.add_developer(user)
- end
-
- it 'does not create package record', :aggregate_failures do
- allow(Packages::CreatePackageFileService).to receive(:new).and_raise(StandardError)
-
- expect { subject }
- .to change { project.packages.count }.by(0)
- .and change { Packages::PackageFile.count }.by(0)
- expect(response).to have_gitlab_http_status(:error)
- end
-
- context 'with an existing package' do
- let_it_be_with_reload(:existing_package) { create(:terraform_module_package, name: 'mymodule/mysystem', version: '1.0.0', project: project) }
-
- it 'does not create a new package' do
- expect { subject }
- .to change { project.packages.count }.by(0)
- .and change { Packages::PackageFile.count }.by(0)
- expect(response).to have_gitlab_http_status(:forbidden)
- end
-
- context 'marked as pending_destruction' do
- it 'does create a new package' do
- existing_package.pending_destruction!
-
- expect { subject }
- .to change { project.packages.count }.by(1)
- .and change { Packages::PackageFile.count }.by(1)
- expect(response).to have_gitlab_http_status(:created)
- end
- end
- end
- end
- end
- end
end
diff --git a/spec/requests/api/terraform/modules/v1/project_packages_spec.rb b/spec/requests/api/terraform/modules/v1/project_packages_spec.rb
new file mode 100644
index 00000000000..1f3b2283d59
--- /dev/null
+++ b/spec/requests/api/terraform/modules/v1/project_packages_spec.rb
@@ -0,0 +1,205 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe API::Terraform::Modules::V1::ProjectPackages, feature_category: :package_registry do
+ include_context 'for terraform modules api setup'
+ using RSpec::Parameterized::TableSyntax
+
+ describe 'PUT /api/v4/projects/:project_id/packages/terraform/modules/:module_name/:module_system/:module_version/file/authorize' do
+ include_context 'workhorse headers'
+
+ let(:url) { api("/projects/#{project.id}/packages/terraform/modules/mymodule/mysystem/1.0.0/file/authorize") }
+ let(:headers) { {} }
+
+ subject { put(url, headers: headers) }
+
+ context 'with valid project' do
+ where(:visibility, :user_role, :member, :token_header, :token_type, :shared_examples_name, :expected_status) do
+ :public | :developer | true | 'PRIVATE-TOKEN' | :personal_access_token | 'process terraform module workhorse authorization' | :success
+ :public | :guest | true | 'PRIVATE-TOKEN' | :personal_access_token | 'rejects terraform module packages access' | :forbidden
+ :public | :developer | true | 'PRIVATE-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :public | :guest | true | 'PRIVATE-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :public | :developer | false | 'PRIVATE-TOKEN' | :personal_access_token | 'rejects terraform module packages access' | :forbidden
+ :public | :guest | false | 'PRIVATE-TOKEN' | :personal_access_token | 'rejects terraform module packages access' | :forbidden
+ :public | :developer | false | 'PRIVATE-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :public | :guest | false | 'PRIVATE-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :public | :anonymous | false | nil | nil | 'rejects terraform module packages access' | :unauthorized
+ :private | :developer | true | 'PRIVATE-TOKEN' | :personal_access_token | 'process terraform module workhorse authorization' | :success
+ :private | :guest | true | 'PRIVATE-TOKEN' | :personal_access_token | 'rejects terraform module packages access' | :forbidden
+ :private | :developer | true | 'PRIVATE-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :private | :guest | true | 'PRIVATE-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :private | :developer | false | 'PRIVATE-TOKEN' | :personal_access_token | 'rejects terraform module packages access' | :not_found
+ :private | :guest | false | 'PRIVATE-TOKEN' | :personal_access_token | 'rejects terraform module packages access' | :not_found
+ :private | :developer | false | 'PRIVATE-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :private | :guest | false | 'PRIVATE-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :private | :anonymous | false | nil | nil | 'rejects terraform module packages access' | :unauthorized
+ :public | :developer | true | 'JOB-TOKEN' | :job_token | 'process terraform module workhorse authorization' | :success
+ :public | :guest | true | 'JOB-TOKEN' | :job_token | 'rejects terraform module packages access' | :forbidden
+ :public | :developer | true | 'JOB-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :public | :guest | true | 'JOB-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :public | :developer | false | 'JOB-TOKEN' | :job_token | 'rejects terraform module packages access' | :forbidden
+ :public | :guest | false | 'JOB-TOKEN' | :job_token | 'rejects terraform module packages access' | :forbidden
+ :public | :developer | false | 'JOB-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :public | :guest | false | 'JOB-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :private | :developer | true | 'JOB-TOKEN' | :job_token | 'process terraform module workhorse authorization' | :success
+ :private | :guest | true | 'JOB-TOKEN' | :job_token | 'rejects terraform module packages access' | :forbidden
+ :private | :developer | true | 'JOB-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :private | :guest | true | 'JOB-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :private | :developer | false | 'JOB-TOKEN' | :job_token | 'rejects terraform module packages access' | :not_found
+ :private | :guest | false | 'JOB-TOKEN' | :job_token | 'rejects terraform module packages access' | :not_found
+ :private | :developer | false | 'JOB-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :private | :guest | false | 'JOB-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :public | :developer | true | 'DEPLOY-TOKEN' | :deploy_token | 'process terraform module workhorse authorization' | :success
+ :public | :developer | true | 'DEPLOY-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :private | :developer | true | 'DEPLOY-TOKEN' | :deploy_token | 'process terraform module workhorse authorization' | :success
+ :private | :developer | true | 'DEPLOY-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
+ end
+
+ with_them do
+ let(:headers) { user_headers.merge(workhorse_headers) }
+ let(:user_headers) { user_role == :anonymous ? {} : { token_header => token } }
+
+ before do
+ project.update!(visibility: visibility.to_s)
+ end
+
+ it_behaves_like params[:shared_examples_name], params[:user_role], params[:expected_status], params[:member]
+ end
+ end
+ end
+
+ describe 'PUT /api/v4/projects/:project_id/packages/terraform/modules/:module_name/:module_system/:module_version/file' do
+ include_context 'workhorse headers'
+
+ let_it_be(:file_name) { 'module-system-v1.0.0.tgz' }
+
+ let(:url) { "/projects/#{project.id}/packages/terraform/modules/mymodule/mysystem/1.0.0/file" }
+ let(:headers) { {} }
+ let(:params) { { file: temp_file(file_name) } }
+ let(:file_key) { :file }
+ let(:send_rewritten_field) { true }
+
+ subject(:api_request) do
+ workhorse_finalize(
+ api(url),
+ method: :put,
+ file_key: file_key,
+ params: params,
+ headers: headers,
+ send_rewritten_field: send_rewritten_field
+ )
+ end
+
+ context 'with valid project' do
+ where(:visibility, :user_role, :member, :token_header, :token_type, :shared_examples_name, :expected_status) do
+ :public | :developer | true | 'PRIVATE-TOKEN' | :personal_access_token | 'process terraform module upload' | :created
+ :public | :guest | true | 'PRIVATE-TOKEN' | :personal_access_token | 'rejects terraform module packages access' | :forbidden
+ :public | :developer | true | 'PRIVATE-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :public | :guest | true | 'PRIVATE-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :public | :developer | false | 'PRIVATE-TOKEN' | :personal_access_token | 'rejects terraform module packages access' | :forbidden
+ :public | :guest | false | 'PRIVATE-TOKEN' | :personal_access_token | 'rejects terraform module packages access' | :forbidden
+ :public | :developer | false | 'PRIVATE-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :public | :guest | false | 'PRIVATE-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :public | :anonymous | false | nil | nil | 'rejects terraform module packages access' | :unauthorized
+ :private | :developer | true | 'PRIVATE-TOKEN' | :personal_access_token | 'process terraform module upload' | :created
+ :private | :guest | true | 'PRIVATE-TOKEN' | :personal_access_token | 'rejects terraform module packages access' | :forbidden
+ :private | :developer | true | 'PRIVATE-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :private | :guest | true | 'PRIVATE-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :private | :developer | false | 'PRIVATE-TOKEN' | :personal_access_token | 'rejects terraform module packages access' | :not_found
+ :private | :guest | false | 'PRIVATE-TOKEN' | :personal_access_token | 'rejects terraform module packages access' | :not_found
+ :private | :developer | false | 'PRIVATE-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :private | :guest | false | 'PRIVATE-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :private | :anonymous | false | nil | nil | 'rejects terraform module packages access' | :unauthorized
+ :public | :developer | true | 'JOB-TOKEN' | :job_token | 'process terraform module upload' | :created
+ :public | :guest | true | 'JOB-TOKEN' | :job_token | 'rejects terraform module packages access' | :forbidden
+ :public | :developer | true | 'JOB-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :public | :guest | true | 'JOB-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :public | :developer | false | 'JOB-TOKEN' | :job_token | 'rejects terraform module packages access' | :forbidden
+ :public | :guest | false | 'JOB-TOKEN' | :job_token | 'rejects terraform module packages access' | :forbidden
+ :public | :developer | false | 'JOB-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :public | :guest | false | 'JOB-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :private | :developer | true | 'JOB-TOKEN' | :job_token | 'process terraform module upload' | :created
+ :private | :guest | true | 'JOB-TOKEN' | :job_token | 'rejects terraform module packages access' | :forbidden
+ :private | :developer | true | 'JOB-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :private | :guest | true | 'JOB-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :private | :developer | false | 'JOB-TOKEN' | :job_token | 'rejects terraform module packages access' | :not_found
+ :private | :guest | false | 'JOB-TOKEN' | :job_token | 'rejects terraform module packages access' | :not_found
+ :private | :developer | false | 'JOB-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :private | :guest | false | 'JOB-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :public | :developer | true | 'DEPLOY-TOKEN' | :deploy_token | 'process terraform module upload' | :created
+ :public | :developer | true | 'DEPLOY-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :private | :developer | true | 'DEPLOY-TOKEN' | :deploy_token | 'process terraform module upload' | :created
+ :private | :developer | true | 'DEPLOY-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
+ end
+
+ with_them do
+ let(:user_headers) { user_role == :anonymous ? {} : { token_header => token } }
+ let(:headers) { user_headers.merge(workhorse_headers) }
+ let(:snowplow_gitlab_standard_context) do
+ { project: project, namespace: project.namespace, user: snowplow_user,
+ property: 'i_package_terraform_module_user' }
+ end
+
+ let(:snowplow_user) do
+ case token_type
+ when :deploy_token
+ deploy_token
+ when :job_token
+ job.user
+ else
+ user
+ end
+ end
+
+ before do
+ project.update!(visibility: visibility.to_s)
+ end
+
+ it_behaves_like params[:shared_examples_name], params[:user_role], params[:expected_status], params[:member]
+ end
+
+ context 'when failed package file save' do
+ let(:user_headers) { { 'PRIVATE-TOKEN' => personal_access_token.token } }
+ let(:headers) { user_headers.merge(workhorse_headers) }
+
+ before do
+ project.add_developer(user)
+ end
+
+ it 'does not create package record', :aggregate_failures do
+ allow(Packages::CreatePackageFileService).to receive(:new).and_raise(StandardError)
+
+ expect { api_request }
+ .to change { project.packages.count }.by(0)
+ .and change { Packages::PackageFile.count }.by(0)
+ expect(response).to have_gitlab_http_status(:error)
+ end
+
+ context 'with an existing package' do
+ let_it_be_with_reload(:existing_package) do
+ create(:terraform_module_package, name: 'mymodule/mysystem', version: '1.0.0', project: project)
+ end
+
+ it 'does not create a new package' do
+ expect { api_request }
+ .to change { project.packages.count }.by(0)
+ .and change { Packages::PackageFile.count }.by(0)
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+
+ context 'when marked as pending_destruction' do
+ it 'does create a new package' do
+ existing_package.pending_destruction!
+
+ expect { api_request }
+ .to change { project.packages.count }.by(1)
+ .and change { Packages::PackageFile.count }.by(1)
+ expect(response).to have_gitlab_http_status(:created)
+ end
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/requests/api/user_runners_spec.rb b/spec/requests/api/user_runners_spec.rb
index 0e40dcade19..412b2c48f3f 100644
--- a/spec/requests/api/user_runners_spec.rb
+++ b/spec/requests/api/user_runners_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe API::UserRunners, :aggregate_failures, feature_category: :runner_fleet do
+RSpec.describe API::UserRunners, :aggregate_failures, feature_category: :fleet_visibility do
let_it_be(:admin) { create(:admin) }
let_it_be(:user, reload: true) { create(:user, username: 'user.withdot') }
diff --git a/spec/requests/api/users_spec.rb b/spec/requests/api/users_spec.rb
index b73ae2d33eb..86c4e04ef71 100644
--- a/spec/requests/api/users_spec.rb
+++ b/spec/requests/api/users_spec.rb
@@ -182,6 +182,7 @@ RSpec.describe API::Users, :aggregate_failures, feature_category: :user_profile
expect(json_response.first).not_to have_key('note')
expect(json_response.first).not_to have_key('namespace_id')
expect(json_response.first).not_to have_key('created_by')
+ expect(json_response.first).not_to have_key('email_reset_offered_at')
end
end
@@ -194,6 +195,7 @@ RSpec.describe API::Users, :aggregate_failures, feature_category: :user_profile
expect(json_response.first).not_to have_key('note')
expect(json_response.first).not_to have_key('namespace_id')
expect(json_response.first).not_to have_key('created_by')
+ expect(json_response.first).not_to have_key('email_reset_offered_at')
end
end
@@ -203,6 +205,7 @@ RSpec.describe API::Users, :aggregate_failures, feature_category: :user_profile
expect(response).to have_gitlab_http_status(:success)
expect(json_response.first).to have_key('note')
+ expect(json_response.first).to have_key('email_reset_offered_at')
expect(json_response.first['note']).to eq '2018-11-05 | 2FA removed | user requested | www.gitlab.com'
end
@@ -2966,6 +2969,39 @@ RSpec.describe API::Users, :aggregate_failures, feature_category: :user_profile
end
end
+ describe "PUT /user/preferences" do
+ let(:path) { '/user/preferences' }
+
+ context "when unauthenticated" do
+ it "returns authentication error" do
+ put api(path)
+ expect(response).to have_gitlab_http_status(:unauthorized)
+ end
+ end
+
+ context "when authenticated" do
+ it "updates user preferences" do
+ user.user_preference.view_diffs_file_by_file = false
+ user.user_preference.show_whitespace_in_diffs = true
+ user.save!
+
+ put api(path, user), params: {
+ view_diffs_file_by_file: true,
+ show_whitespace_in_diffs: false
+ }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response["view_diffs_file_by_file"]).to eq(true)
+ expect(json_response["show_whitespace_in_diffs"]).to eq(false)
+
+ user.reload
+
+ expect(json_response["view_diffs_file_by_file"]).to eq(user.view_diffs_file_by_file)
+ expect(json_response["show_whitespace_in_diffs"]).to eq(user.show_whitespace_in_diffs)
+ end
+ end
+ end
+
describe "GET /user/keys" do
subject(:request) { get api(path, user) }
diff --git a/spec/requests/application_controller_spec.rb b/spec/requests/application_controller_spec.rb
new file mode 100644
index 00000000000..52fdf6bc69e
--- /dev/null
+++ b/spec/requests/application_controller_spec.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ApplicationController, type: :request, feature_category: :shared do
+ let_it_be(:user) { create(:user) }
+
+ before do
+ sign_in(user)
+ end
+
+ it_behaves_like 'Base action controller' do
+ subject(:request) { get root_path }
+ end
+end
diff --git a/spec/requests/chaos_controller_spec.rb b/spec/requests/chaos_controller_spec.rb
new file mode 100644
index 00000000000..d2ce618b041
--- /dev/null
+++ b/spec/requests/chaos_controller_spec.rb
@@ -0,0 +1,14 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ChaosController, type: :request, feature_category: :tooling do
+ it_behaves_like 'Base action controller' do
+ before do
+ # Stub leak_mem so we don't actually leak memory for the base action controller tests.
+ allow(Gitlab::Chaos).to receive(:leak_mem).with(100, 30.seconds)
+ end
+
+ subject(:request) { get leakmem_chaos_path }
+ end
+end
diff --git a/spec/requests/clusters/agents/dashboard_controller_spec.rb b/spec/requests/clusters/agents/dashboard_controller_spec.rb
index c3c16d9b385..bc7c964d47b 100644
--- a/spec/requests/clusters/agents/dashboard_controller_spec.rb
+++ b/spec/requests/clusters/agents/dashboard_controller_spec.rb
@@ -3,13 +3,35 @@
require 'spec_helper'
RSpec.describe Clusters::Agents::DashboardController, feature_category: :deployment_management do
+ let(:user) { create(:user) }
+ let(:stub_ff) { true }
+
+ describe 'GET index' do
+ before do
+ allow(::Gitlab::Kas).to receive(:enabled?).and_return(true)
+ stub_feature_flags(k8s_dashboard: stub_ff)
+ sign_in(user)
+ get kubernetes_dashboard_index_path
+ end
+
+ it 'returns ok and renders view' do
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+
+ context 'with k8s_dashboard feature flag disabled' do
+ let(:stub_ff) { false }
+
+ it 'returns not found' do
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+
describe 'GET show' do
let_it_be(:organization) { create(:group) }
let_it_be(:agent_management_project) { create(:project, group: organization) }
let_it_be(:agent) { create(:cluster_agent, project: agent_management_project) }
let_it_be(:deployment_project) { create(:project, group: organization) }
- let(:user) { create(:user) }
- let(:stub_ff) { true }
before do
allow(::Gitlab::Kas).to receive(:enabled?).and_return(true)
@@ -37,7 +59,7 @@ RSpec.describe Clusters::Agents::DashboardController, feature_category: :deploym
).to be_present
end
- it 'returns not found' do
+ it 'returns ok' do
expect(response).to have_gitlab_http_status(:ok)
end
diff --git a/spec/requests/concerns/membership_actions_shared_examples.rb b/spec/requests/concerns/membership_actions_shared_examples.rb
new file mode 100644
index 00000000000..6e0b0d5c0a3
--- /dev/null
+++ b/spec/requests/concerns/membership_actions_shared_examples.rb
@@ -0,0 +1,67 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'request_accessable' do
+ context 'when not signed in' do
+ it 'redirects to sign in page' do
+ request
+
+ expect(response).to redirect_to(new_user_session_path)
+ end
+ end
+
+ context 'when signed in' do
+ before do
+ sign_in(user)
+ end
+
+ it 'redirects back to group members page and displays the relevant notice' do
+ request
+
+ expect(response).to redirect_to(membershipable_path)
+ expect(flash[:notice]).to eq(_('Your request for access has been queued for review.'))
+ end
+
+ context 'when something goes wrong' do
+ before do
+ group_member = build(:group_member)
+ request_access_service = instance_double(Members::RequestAccessService)
+ allow(Members::RequestAccessService).to receive(:new).and_return(request_access_service)
+ allow(request_access_service).to receive(:execute).and_return(group_member)
+ allow(group_member).to receive_message_chain(:errors, :full_messages, :to_sentence).and_return('Error')
+ end
+
+ it 'redirects back to group members page and displays the relevant notice' do
+ request
+
+ expect(response).to redirect_to(membershipable_path)
+ expect(flash[:alert]).to eq(_('Your request for access could not be processed: Error'))
+ end
+ end
+
+ context 'when already a member' do
+ before do
+ membershipable.add_developer(user)
+ end
+
+ it 'redirects back to group members page and displays the relevant notice' do
+ request
+
+ expect(response).to redirect_to(membershipable_path)
+ expect(flash[:notice]).to eq(_('You already have access.'))
+ end
+ end
+
+ context 'when a pending access request exists' do
+ before do
+ membershipable.request_access(user)
+ end
+
+ it 'redirects back to group members page and displays the relevant notice' do
+ request
+
+ expect(response).to redirect_to(membershipable_path)
+ expect(flash[:notice]).to eq(_('You have already requested access.'))
+ end
+ end
+ end
+end
diff --git a/spec/requests/content_security_policy_spec.rb b/spec/requests/content_security_policy_spec.rb
deleted file mode 100644
index 3ce7e33d88a..00000000000
--- a/spec/requests/content_security_policy_spec.rb
+++ /dev/null
@@ -1,79 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-# The AnonymousController doesn't support setting the CSP
-# This is why an arbitrary test request was chosen instead
-# of testing in application_controller_spec.
-RSpec.describe 'Content Security Policy', feature_category: :application_instrumentation do
- let(:snowplow_host) { 'snowplow.example.com' }
- let(:vite_origin) { "#{ViteRuby.instance.config.host}:#{ViteRuby.instance.config.port}" }
-
- shared_examples 'snowplow is not in the CSP' do
- it 'does not add the snowplow collector hostname to the CSP' do
- get explore_root_url
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(response.headers['Content-Security-Policy']).not_to include(snowplow_host)
- end
- end
-
- describe 'GET #explore' do
- context 'snowplow is enabled' do
- before do
- stub_application_setting(snowplow_enabled: true, snowplow_collector_hostname: snowplow_host)
- end
-
- it 'adds the snowplow collector hostname to the CSP' do
- get explore_root_url
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(response.headers['Content-Security-Policy']).to include(snowplow_host)
- end
- end
-
- context 'snowplow is enabled but host is not configured' do
- before do
- stub_application_setting(snowplow_enabled: true)
- end
-
- it_behaves_like 'snowplow is not in the CSP'
- end
-
- context 'snowplow is disabled' do
- before do
- stub_application_setting(snowplow_enabled: false, snowplow_collector_hostname: snowplow_host)
- end
-
- it_behaves_like 'snowplow is not in the CSP'
- end
-
- context 'when vite enabled during development',
- quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/424334' do
- before do
- stub_rails_env('development')
- stub_feature_flags(vite: true)
-
- get explore_root_url
- end
-
- it 'adds vite csp' do
- expect(response).to have_gitlab_http_status(:ok)
- expect(response.headers['Content-Security-Policy']).to include(vite_origin)
- end
- end
-
- context 'when vite disabled' do
- before do
- stub_feature_flags(vite: false)
-
- get explore_root_url
- end
-
- it "doesn't add vite csp" do
- expect(response).to have_gitlab_http_status(:ok)
- expect(response.headers['Content-Security-Policy']).not_to include(vite_origin)
- end
- end
- end
-end
diff --git a/spec/requests/explore/catalog_controller_spec.rb b/spec/requests/explore/catalog_controller_spec.rb
index 50a2240e040..e75b0bba5a6 100644
--- a/spec/requests/explore/catalog_controller_spec.rb
+++ b/spec/requests/explore/catalog_controller_spec.rb
@@ -3,8 +3,16 @@
require 'spec_helper'
RSpec.describe Explore::CatalogController, feature_category: :pipeline_composition do
+ let_it_be(:namespace) { create(:group) }
+ let_it_be(:project) { create(:project, namespace: namespace) }
+ let_it_be(:catalog_resource) { create(:ci_catalog_resource, :published, project: project) }
+
let_it_be(:user) { create(:user) }
+ before_all do
+ catalog_resource.project.add_reporter(user)
+ end
+
before do
sign_in(user)
end
@@ -14,40 +22,48 @@ RSpec.describe Explore::CatalogController, feature_category: :pipeline_compositi
if action == :index
explore_catalog_index_path
else
- explore_catalog_path(id: 1)
+ explore_catalog_path(catalog_resource)
end
end
- context 'with FF `global_ci_catalog`' do
- before do
- stub_feature_flags(global_ci_catalog: true)
- end
-
- it 'responds with 200' do
- get path
+ it 'responds with 200' do
+ get path
- expect(response).to have_gitlab_http_status(:ok)
- end
+ expect(response).to have_gitlab_http_status(:ok)
end
+ end
- context 'without FF `global_ci_catalog`' do
- before do
- stub_feature_flags(global_ci_catalog: false)
- end
+ describe 'GET #show' do
+ it_behaves_like 'basic get requests', :show
- it 'responds with 404' do
- get path
+ context 'when rendering a draft catalog resource' do
+ it 'returns not found error' do
+ draft_catalog_resource = create(:ci_catalog_resource, state: :draft)
+
+ get explore_catalog_path(draft_catalog_resource)
expect(response).to have_gitlab_http_status(:not_found)
end
end
- end
- describe 'GET #show' do
- it_behaves_like 'basic get requests', :show
+ context 'when rendering a published catalog resource' do
+ it 'returns success response' do
+ get explore_catalog_path(catalog_resource)
+
+ expect(response).to have_gitlab_http_status(:success)
+ end
+ end
end
describe 'GET #index' do
+ let(:subject) { get explore_catalog_index_path }
+
it_behaves_like 'basic get requests', :index
+
+ it_behaves_like 'internal event tracking' do
+ let(:namespace) { user.namespace }
+ let(:project) { nil }
+ let(:event) { 'unique_users_visiting_ci_catalog' }
+ end
end
end
diff --git a/spec/requests/external_redirect/external_redirect_controller_spec.rb b/spec/requests/external_redirect/external_redirect_controller_spec.rb
index 1b4294f5c4d..881acbd97ac 100644
--- a/spec/requests/external_redirect/external_redirect_controller_spec.rb
+++ b/spec/requests/external_redirect/external_redirect_controller_spec.rb
@@ -45,7 +45,10 @@ RSpec.describe "ExternalRedirect::ExternalRedirectController requests", feature_
[
["when url is bad", "url=javascript:alert(1)"],
["when url is empty", "url="],
- ["when url param is missing", ""]
+ ["when url param is missing", ""],
+ ["when url points to self", "url=http://www.example.com/-/external_redirect?url=#{external_url_encoded}"],
+ ["when url points to self encoded",
+ "url=http%3A%2F%2Fwww.example.com/-/external_redirect?url=#{external_url_encoded}"]
]
end
diff --git a/spec/requests/groups/group_members_controller_spec.rb b/spec/requests/groups/group_members_controller_spec.rb
new file mode 100644
index 00000000000..2147090ef51
--- /dev/null
+++ b/spec/requests/groups/group_members_controller_spec.rb
@@ -0,0 +1,20 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+require_relative '../concerns/membership_actions_shared_examples'
+
+RSpec.describe Groups::GroupMembersController, feature_category: :groups_and_projects do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:membershipable) { create(:group, :public) }
+
+ let(:membershipable_path) { group_path(membershipable) }
+
+ describe 'GET /groups/*group_id/-/group_members/request_access' do
+ subject(:request) do
+ get request_access_group_group_members_path(group_id: membershipable)
+ end
+
+ it_behaves_like 'request_accessable'
+ end
+end
diff --git a/spec/requests/health_controller_spec.rb b/spec/requests/health_controller_spec.rb
index 639f6194af9..5fb2115aac3 100644
--- a/spec/requests/health_controller_spec.rb
+++ b/spec/requests/health_controller_spec.rb
@@ -73,7 +73,9 @@ RSpec.describe HealthController, feature_category: :database do
end
describe 'GET /-/readiness' do
- subject { get '/-/readiness', params: params, headers: headers }
+ subject(:request) { get readiness_path, params: params, headers: headers }
+
+ it_behaves_like 'Base action controller'
shared_context 'endpoint responding with readiness data' do
context 'when requesting instance-checks' do
@@ -219,7 +221,6 @@ RSpec.describe HealthController, feature_category: :database do
stub_remote_addr(whitelisted_ip)
end
- it_behaves_like 'endpoint not querying database'
it_behaves_like 'endpoint responding with readiness data'
context 'when requesting all checks' do
@@ -236,7 +237,6 @@ RSpec.describe HealthController, feature_category: :database do
stub_remote_addr(not_whitelisted_ip)
end
- it_behaves_like 'endpoint not querying database'
it_behaves_like 'endpoint not found'
end
@@ -273,7 +273,6 @@ RSpec.describe HealthController, feature_category: :database do
stub_remote_addr(whitelisted_ip)
end
- it_behaves_like 'endpoint not querying database'
it_behaves_like 'endpoint responding with liveness data'
end
@@ -282,7 +281,6 @@ RSpec.describe HealthController, feature_category: :database do
stub_remote_addr(not_whitelisted_ip)
end
- it_behaves_like 'endpoint not querying database'
it_behaves_like 'endpoint not found'
context 'accessed with valid token' do
diff --git a/spec/requests/ide_controller_spec.rb b/spec/requests/ide_controller_spec.rb
index 4131f1d26ec..20d890fadbf 100644
--- a/spec/requests/ide_controller_spec.rb
+++ b/spec/requests/ide_controller_spec.rb
@@ -17,8 +17,6 @@ RSpec.describe IdeController, feature_category: :web_ide do
let_it_be(:creator) { project.creator }
let_it_be(:other_user) { create(:user) }
- let_it_be(:top_nav_partial) { 'layouts/header/_default' }
-
let(:user) { creator }
before do
@@ -156,28 +154,70 @@ RSpec.describe IdeController, feature_category: :web_ide do
end
end
- # This indirectly tests that `minimal: true` was passed to the fullscreen layout
- describe 'layout' do
- where(:ff_state, :expect_top_nav) do
- false | true
- true | false
+ describe 'legacy Web IDE' do
+ before do
+ stub_feature_flags(vscode_web_ide: false)
end
- with_them do
- before do
- stub_feature_flags(vscode_web_ide: ff_state)
+ it 'uses application layout' do
+ subject
- subject
- end
+ expect(response).to render_template('layouts/application')
+ end
- it 'handles rendering top nav' do
- if expect_top_nav
- expect(response).to render_template(top_nav_partial)
- else
- expect(response).not_to render_template(top_nav_partial)
- end
- end
+ it 'does not create oauth application' do
+ expect(Doorkeeper::Application).not_to receive(:new)
+
+ subject
+
+ expect(web_ide_oauth_application).to be_nil
+ end
+ end
+
+ describe 'vscode IDE' do
+ before do
+ stub_feature_flags(vscode_web_ide: true)
+ end
+
+ it 'uses fullscreen layout' do
+ subject
+
+ expect(response).to render_template('layouts/fullscreen')
+ end
+ end
+
+ describe 'with web_ide_oauth flag off' do
+ before do
+ stub_feature_flags(web_ide_oauth: false)
end
+
+ it 'does not create oauth application' do
+ expect(Doorkeeper::Application).not_to receive(:new)
+
+ subject
+
+ expect(web_ide_oauth_application).to be_nil
+ end
+ end
+
+ it 'ensures web_ide_oauth_application' do
+ expect(Doorkeeper::Application).to receive(:new).and_call_original
+
+ subject
+
+ expect(web_ide_oauth_application).not_to be_nil
+ expect(web_ide_oauth_application[:name]).to eq('GitLab Web IDE')
+ end
+
+ it 'when web_ide_oauth_application already exists, does not create new one' do
+ existing_app = create(:oauth_application, owner_id: nil, owner_type: nil)
+
+ stub_application_setting({ web_ide_oauth_application: existing_app })
+ expect(Doorkeeper::Application).not_to receive(:new)
+
+ subject
+
+ expect(web_ide_oauth_application).to eq(existing_app)
end
end
@@ -201,4 +241,48 @@ RSpec.describe IdeController, feature_category: :web_ide do
end
end
end
+
+ describe '#oauth_redirect', :aggregate_failures do
+ subject(:oauth_redirect) { get '/-/ide/oauth_redirect' }
+
+ it 'with no web_ide_oauth_application, returns not_found' do
+ oauth_redirect
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+
+ context 'with web_ide_oauth_application set' do
+ before do
+ stub_application_setting({
+ web_ide_oauth_application: create(:oauth_application, owner_id: nil, owner_type: nil)
+ })
+ end
+
+ it 'returns ok and renders view' do
+ oauth_redirect
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+
+ it 'with vscode_web_ide flag off, returns not_found' do
+ stub_feature_flags(vscode_web_ide: false)
+
+ oauth_redirect
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+
+ it 'with web_ide_oauth flag off, returns not_found' do
+ stub_feature_flags(web_ide_oauth: false)
+
+ oauth_redirect
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+
+ def web_ide_oauth_application
+ ::Gitlab::CurrentSettings.current_application_settings.web_ide_oauth_application
+ end
end
diff --git a/spec/requests/jwks_controller_spec.rb b/spec/requests/jwks_controller_spec.rb
index f756c1758e4..3dc3ed68311 100644
--- a/spec/requests/jwks_controller_spec.rb
+++ b/spec/requests/jwks_controller_spec.rb
@@ -55,5 +55,26 @@ RSpec.describe JwksController, feature_category: :system_access do
end
end
end
+
+ it 'has cache control header' do
+ get jwks_url
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response.headers['Cache-Control']).to include('max-age=86400', 'public', 'must-revalidate', 'no-transform')
+ end
+
+ context 'when cache_control_headers_for_openid_jwks feature flag is disabled' do
+ before do
+ stub_feature_flags(cache_control_headers_for_openid_jwks: false)
+ end
+
+ it 'does not have cache control header' do
+ get jwks_url
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response.headers['Cache-Control']).not_to include('max-age=86400', 'public',
+ 'no-transform')
+ end
+ end
end
end
diff --git a/spec/requests/jwt_controller_spec.rb b/spec/requests/jwt_controller_spec.rb
index 966cc2d6d4e..956c0e06cda 100644
--- a/spec/requests/jwt_controller_spec.rb
+++ b/spec/requests/jwt_controller_spec.rb
@@ -92,7 +92,7 @@ RSpec.describe JwtController, feature_category: :system_access do
context 'project with enabled CI' do
subject! { get '/jwt/auth', params: parameters, headers: headers }
- it { expect(service_class).to have_received(:new).with(project, user, ActionController::Parameters.new(parameters.merge(auth_type: :build)).permit!) }
+ it { expect(service_class).to have_received(:new).with(project, user, ActionController::Parameters.new(parameters.merge(auth_type: :build, raw_token: build.token)).permit!) }
it_behaves_like 'user logging'
end
@@ -119,7 +119,7 @@ RSpec.describe JwtController, feature_category: :system_access do
.with(
nil,
nil,
- ActionController::Parameters.new(parameters.merge(deploy_token: deploy_token, auth_type: :deploy_token)).permit!
+ ActionController::Parameters.new(parameters.merge(deploy_token: deploy_token, auth_type: :deploy_token, raw_token: deploy_token.token)).permit!
)
end
@@ -144,7 +144,7 @@ RSpec.describe JwtController, feature_category: :system_access do
.with(
nil,
user,
- ActionController::Parameters.new(parameters.merge(auth_type: :personal_access_token)).permit!
+ ActionController::Parameters.new(parameters.merge(auth_type: :personal_access_token, raw_token: pat.token)).permit!
)
end
@@ -160,7 +160,7 @@ RSpec.describe JwtController, feature_category: :system_access do
subject! { get '/jwt/auth', params: parameters, headers: headers }
- it { expect(service_class).to have_received(:new).with(nil, user, ActionController::Parameters.new(parameters.merge(auth_type: :gitlab_or_ldap)).permit!) }
+ it { expect(service_class).to have_received(:new).with(nil, user, ActionController::Parameters.new(parameters.merge(auth_type: :gitlab_or_ldap, raw_token: user.password)).permit!) }
it_behaves_like 'rejecting a blocked user'
@@ -180,7 +180,7 @@ RSpec.describe JwtController, feature_category: :system_access do
ActionController::Parameters.new({ service: service_name, scopes: %w[scope1 scope2] }).permit!
end
- it { expect(service_class).to have_received(:new).with(nil, user, service_parameters.merge(auth_type: :gitlab_or_ldap)) }
+ it { expect(service_class).to have_received(:new).with(nil, user, service_parameters.merge(auth_type: :gitlab_or_ldap, raw_token: user.password)) }
it_behaves_like 'user logging'
end
@@ -197,7 +197,7 @@ RSpec.describe JwtController, feature_category: :system_access do
ActionController::Parameters.new({ service: service_name, scopes: %w[scope1 scope2] }).permit!
end
- it { expect(service_class).to have_received(:new).with(nil, user, service_parameters.merge(auth_type: :gitlab_or_ldap)) }
+ it { expect(service_class).to have_received(:new).with(nil, user, service_parameters.merge(auth_type: :gitlab_or_ldap, raw_token: user.password)) }
end
context 'when user has 2FA enabled' do
@@ -274,6 +274,8 @@ RSpec.describe JwtController, feature_category: :system_access do
let_it_be(:personal_access_token) { create(:personal_access_token, user: user) }
let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project, :private, group: group) }
+ let_it_be(:bot_user) { create(:user, :project_bot) }
+ let_it_be(:group_access_token) { create(:personal_access_token, :dependency_proxy_scopes, user: bot_user) }
let_it_be(:group_deploy_token) { create(:deploy_token, :group, :dependency_proxy_scopes) }
let_it_be(:gdeploy_token) { create(:group_deploy_token, deploy_token: group_deploy_token, group: group) }
let_it_be(:project_deploy_token) { create(:deploy_token, :project, :dependency_proxy_scopes) }
@@ -313,6 +315,48 @@ RSpec.describe JwtController, feature_category: :system_access do
it_behaves_like 'with valid credentials'
end
+ context 'with group access token' do
+ let(:credential_user) { group_access_token.user.username }
+ let(:credential_password) { group_access_token.token }
+
+ context 'with the required scopes' do
+ it_behaves_like 'with valid credentials'
+ it_behaves_like 'a token that expires today'
+
+ context 'revoked' do
+ before do
+ group_access_token.update!(revoked: true)
+ end
+
+ it_behaves_like 'returning response status', :unauthorized
+ end
+
+ context 'expired' do
+ before do
+ group_access_token.update!(expires_at: Date.yesterday)
+ end
+
+ it_behaves_like 'returning response status', :unauthorized
+ end
+ end
+
+ context 'without the required scopes' do
+ before do
+ group_access_token.update!(scopes: [::Gitlab::Auth::READ_REPOSITORY_SCOPE])
+ end
+
+ it_behaves_like 'returning response status', :forbidden
+
+ context 'packages_dependency_proxy_containers_scope_check disabled' do
+ before do
+ stub_feature_flags(packages_dependency_proxy_containers_scope_check: false)
+ end
+
+ it_behaves_like 'with valid credentials'
+ end
+ end
+ end
+
context 'with group deploy token' do
let(:credential_user) { group_deploy_token.username }
let(:credential_password) { group_deploy_token.token }
diff --git a/spec/requests/legacy_routes_spec.rb b/spec/requests/legacy_routes_spec.rb
new file mode 100644
index 00000000000..537ad4054a1
--- /dev/null
+++ b/spec/requests/legacy_routes_spec.rb
@@ -0,0 +1,56 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe "Legacy routes", type: :request, feature_category: :system_access do
+ let(:user) { create(:user) }
+ let(:token) { create(:personal_access_token, user: user) }
+
+ before do
+ login_as(user)
+ end
+
+ it "/-/profile/audit_log" do
+ get "/-/profile/audit_log"
+ expect(response).to redirect_to('/-/user_settings/authentication_log')
+ end
+
+ it "/-/profile/active_sessions" do
+ get "/-/profile/active_sessions"
+ expect(response).to redirect_to('/-/user_settings/active_sessions')
+ end
+
+ it "/-/profile/personal_access_tokens" do
+ get "/-/profile/personal_access_tokens"
+ expect(response).to redirect_to('/-/user_settings/personal_access_tokens')
+
+ get "/-/profile/personal_access_tokens?name=GitLab+Dangerbot&scopes=api"
+ expect(response).to redirect_to('/-/user_settings/personal_access_tokens?name=GitLab+Dangerbot&scopes=api')
+ end
+
+ it "/-/profile/personal_access_tokens/:id/revoke" do
+ put "/-/profile/personal_access_tokens/#{token.id}/revoke"
+ expect(token.reload).to be_revoked
+ end
+
+ it "/-/profile/applications" do
+ get "/-/profile/applications"
+ expect(response).to redirect_to('/-/user_settings/applications')
+ end
+
+ it "/-/profile/password/new" do
+ get "/-/profile/password/new"
+ expect(response).to redirect_to('/-/user_settings/password/new')
+
+ get "/-/profile/password/new?abc=xyz"
+ expect(response).to redirect_to('/-/user_settings/password/new?abc=xyz')
+ end
+
+ it "/-/profile/password/edit" do
+ get "/-/profile/password/edit"
+ expect(response).to redirect_to('/-/user_settings/password/edit')
+
+ get "/-/profile/password/edit?abc=xyz"
+ expect(response).to redirect_to('/-/user_settings/password/edit?abc=xyz')
+ end
+end
diff --git a/spec/requests/metrics_controller_spec.rb b/spec/requests/metrics_controller_spec.rb
new file mode 100644
index 00000000000..ce96906e020
--- /dev/null
+++ b/spec/requests/metrics_controller_spec.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe MetricsController, type: :request, feature_category: :metrics do
+ it_behaves_like 'Base action controller' do
+ subject(:request) { get metrics_path }
+ end
+end
diff --git a/spec/requests/oauth/authorizations_controller_spec.rb b/spec/requests/oauth/authorizations_controller_spec.rb
index 257f238d9ef..7887bf52542 100644
--- a/spec/requests/oauth/authorizations_controller_spec.rb
+++ b/spec/requests/oauth/authorizations_controller_spec.rb
@@ -20,6 +20,10 @@ RSpec.describe Oauth::AuthorizationsController, feature_category: :system_access
end
describe 'GET #new' do
+ it_behaves_like 'Base action controller' do
+ subject(:request) { get oauth_authorization_path }
+ end
+
context 'when application redirect URI has a custom scheme' do
context 'when CSP is disabled' do
before do
diff --git a/spec/requests/organizations/organizations_controller_spec.rb b/spec/requests/organizations/organizations_controller_spec.rb
index 4bf527f49a8..bfd0603eb3d 100644
--- a/spec/requests/organizations/organizations_controller_spec.rb
+++ b/spec/requests/organizations/organizations_controller_spec.rb
@@ -11,13 +11,6 @@ RSpec.describe Organizations::OrganizationsController, feature_category: :cell d
sign_in(user)
end
- context 'with no association to an organization' do
- let_it_be(:user) { create(:user) }
-
- it_behaves_like 'organization - successful response'
- it_behaves_like 'organization - action disabled by `ui_for_organizations` feature flag'
- end
-
context 'as as admin', :enable_admin_mode do
let_it_be(:user) { create(:admin) }
@@ -54,6 +47,40 @@ RSpec.describe Organizations::OrganizationsController, feature_category: :cell d
it_behaves_like 'when the user is signed in'
end
+ shared_examples 'controller action that requires authentication by an organization user' do
+ it_behaves_like 'controller action that requires authentication'
+
+ context 'when the user is signed in' do
+ before do
+ sign_in(user)
+ end
+
+ context 'with no association to an organization' do
+ let_it_be(:user) { create(:user) }
+
+ it_behaves_like 'organization - not found response'
+ it_behaves_like 'organization - action disabled by `ui_for_organizations` feature flag'
+ end
+ end
+ end
+
+ shared_examples 'controller action that requires authentication by any user' do
+ it_behaves_like 'controller action that requires authentication'
+
+ context 'when the user is signed in' do
+ before do
+ sign_in(user)
+ end
+
+ context 'with no association to an organization' do
+ let_it_be(:user) { create(:user) }
+
+ it_behaves_like 'organization - successful response'
+ it_behaves_like 'organization - action disabled by `ui_for_organizations` feature flag'
+ end
+ end
+ end
+
shared_examples 'controller action that does not require authentication' do
context 'when the user is not logged in' do
it_behaves_like 'organization - successful response'
@@ -78,18 +105,18 @@ RSpec.describe Organizations::OrganizationsController, feature_category: :cell d
describe 'GET #users' do
subject(:gitlab_request) { get users_organization_path(organization) }
- it_behaves_like 'controller action that does not require authentication'
+ it_behaves_like 'controller action that requires authentication by an organization user'
end
describe 'GET #new' do
subject(:gitlab_request) { get new_organization_path }
- it_behaves_like 'controller action that requires authentication'
+ it_behaves_like 'controller action that requires authentication by any user'
end
describe 'GET #index' do
subject(:gitlab_request) { get organizations_path }
- it_behaves_like 'controller action that requires authentication'
+ it_behaves_like 'controller action that requires authentication by any user'
end
end
diff --git a/spec/requests/organizations/settings_controller_spec.rb b/spec/requests/organizations/settings_controller_spec.rb
index 77048b04b0c..1d98e598159 100644
--- a/spec/requests/organizations/settings_controller_spec.rb
+++ b/spec/requests/organizations/settings_controller_spec.rb
@@ -46,7 +46,7 @@ RSpec.describe Organizations::SettingsController, feature_category: :cell do
create :organization_user, organization: organization, user: user
end
- it_behaves_like 'organization - not found response'
+ it_behaves_like 'organization - successful response'
it_behaves_like 'organization - action disabled by `ui_for_organizations` feature flag'
end
end
diff --git a/spec/requests/projects/gcp/artifact_registry/docker_images_controller_spec.rb b/spec/requests/projects/gcp/artifact_registry/docker_images_controller_spec.rb
new file mode 100644
index 00000000000..d571999feb0
--- /dev/null
+++ b/spec/requests/projects/gcp/artifact_registry/docker_images_controller_spec.rb
@@ -0,0 +1,136 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Projects::Gcp::ArtifactRegistry::DockerImagesController, feature_category: :container_registry do
+ let_it_be(:project) { create(:project, :private) }
+
+ let(:user) { project.owner }
+ let(:gcp_project_id) { 'gcp_project_id' }
+ let(:gcp_location) { 'gcp_location' }
+ let(:gcp_ar_repository) { 'gcp_ar_repository' }
+ let(:gcp_wlif_url) { 'gcp_wlif_url' }
+
+ describe '#index' do
+ let(:service_response) { ServiceResponse.success(payload: dummy_client_payload) }
+ let(:service_double) do
+ instance_double('Integrations::GoogleCloudPlatform::ArtifactRegistry::ListDockerImagesService')
+ end
+
+ subject(:get_index_page) do
+ get(
+ project_gcp_artifact_registry_docker_images_path(
+ project,
+ gcp_project_id: gcp_project_id,
+ gcp_location: gcp_location,
+ gcp_ar_repository: gcp_ar_repository,
+ gcp_wlif_url: gcp_wlif_url
+ )
+ )
+ end
+
+ before do
+ allow_next_instance_of(Integrations::GoogleCloudPlatform::ArtifactRegistry::ListDockerImagesService) do |service|
+ allow(service).to receive(:execute).and_return(service_response)
+ end
+ end
+
+ shared_examples 'returning the error message' do |message|
+ it 'displays an error message' do
+ sign_in(user)
+
+ get_index_page
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(response.body).to include(message)
+ end
+ end
+
+ context 'when on saas', :saas do
+ it 'returns the images' do
+ sign_in(user)
+
+ get_index_page
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(response.body).to include('image@sha256:6a')
+ expect(response.body).to include('tag1')
+ expect(response.body).to include('tag2')
+ expect(response.body).to include('Prev')
+ expect(response.body).to include('Next')
+ end
+
+ context 'when the service returns an error response' do
+ let(:service_response) { ServiceResponse.error(message: 'boom') }
+
+ it_behaves_like 'returning the error message', 'boom'
+ end
+
+ %i[gcp_project_id gcp_location gcp_ar_repository gcp_wlif_url].each do |field|
+ context "when a gcp parameter #{field} is missing" do
+ let(field) { nil }
+
+ it 'redirects to setup page' do
+ sign_in(user)
+
+ get_index_page
+
+ expect(response).to redirect_to new_project_gcp_artifact_registry_setup_path(project)
+ end
+ end
+ end
+
+ context 'with the feature flag disabled' do
+ before do
+ stub_feature_flags(gcp_technical_demo: false)
+ end
+
+ it_behaves_like 'returning the error message', 'Feature flag disabled'
+ end
+
+ context 'with non private project' do
+ before do
+ allow_next_found_instance_of(Project) do |project|
+ allow(project).to receive(:private?).and_return(false)
+ end
+ end
+
+ it_behaves_like 'returning the error message', 'Can only run on private projects'
+ end
+
+ context 'with unauthorized user' do
+ let_it_be(:user) { create(:user) }
+
+ it 'returns success' do
+ sign_in(user)
+
+ get_index_page
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+
+ context 'when not on saas' do
+ it_behaves_like 'returning the error message', "Can&#39;t run here"
+ end
+
+ def dummy_client_payload
+ {
+ images: [
+ {
+ built_at: '2023-11-30T23:23:11.980068941Z',
+ media_type: 'application/vnd.docker.distribution.manifest.v2+json',
+ name: 'projects/project/locations/location/repositories/repo/dockerImages/image@sha256:6a',
+ size_bytes: 2827903,
+ tags: %w[tag1 tag2],
+ updated_at: '2023-12-07T11:48:50.840751Z',
+ uploaded_at: '2023-12-07T11:48:47.598511Z',
+ uri: 'location.pkg.dev/project/repo/image@sha256:6a'
+ }
+ ],
+ next_page_token: 'next_page_token'
+ }
+ end
+ end
+end
diff --git a/spec/requests/projects/gcp/artifact_registry/setup_controller_spec.rb b/spec/requests/projects/gcp/artifact_registry/setup_controller_spec.rb
new file mode 100644
index 00000000000..20d7969a05f
--- /dev/null
+++ b/spec/requests/projects/gcp/artifact_registry/setup_controller_spec.rb
@@ -0,0 +1,73 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Projects::Gcp::ArtifactRegistry::SetupController, feature_category: :container_registry do
+ let_it_be(:project) { create(:project, :private) }
+
+ let(:user) { project.owner }
+
+ describe '#new' do
+ subject(:get_setup_page) { get(new_project_gcp_artifact_registry_setup_path(project)) }
+
+ shared_examples 'returning the error message' do |message|
+ it 'displays an error message' do
+ sign_in(user)
+
+ get_setup_page
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(response.body).to include(message)
+ end
+ end
+
+ context 'when on saas', :saas do
+ it 'returns the setup page' do
+ sign_in(user)
+
+ get_setup_page
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(response.body).to include('Google Project ID')
+ expect(response.body).to include('Google Project Location')
+ expect(response.body).to include('Artifact Registry Repository Name')
+ expect(response.body).to include('Worflow Identity Federation url')
+ expect(response.body).to include('Setup')
+ end
+
+ context 'with the feature flag disabled' do
+ before do
+ stub_feature_flags(gcp_technical_demo: false)
+ end
+
+ it_behaves_like 'returning the error message', 'Feature flag disabled'
+ end
+
+ context 'with non private project' do
+ before do
+ allow_next_found_instance_of(Project) do |project|
+ allow(project).to receive(:private?).and_return(false)
+ end
+ end
+
+ it_behaves_like 'returning the error message', 'Can only run on private projects'
+ end
+
+ context 'with unauthorized user' do
+ let_it_be(:user) { create(:user) }
+
+ it 'returns success' do
+ sign_in(user)
+
+ get_setup_page
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+
+ context 'when not on saas' do
+ it_behaves_like 'returning the error message', "Can&#39;t run here"
+ end
+ end
+end
diff --git a/spec/requests/projects/integrations/shimos_controller_spec.rb b/spec/requests/projects/integrations/shimos_controller_spec.rb
deleted file mode 100644
index bd7af0bb4ac..00000000000
--- a/spec/requests/projects/integrations/shimos_controller_spec.rb
+++ /dev/null
@@ -1,37 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe ::Projects::Integrations::ShimosController, feature_category: :integrations do
- let_it_be(:project) { create(:project) }
- let_it_be(:user) { create(:user, developer_projects: [project]) }
- let_it_be(:shimo_integration) { create(:shimo_integration, project: project) }
-
- before do
- sign_in(user)
- end
-
- describe 'GET #show' do
- context 'when Shimo integration is inactive' do
- before do
- shimo_integration.update!(active: false)
- end
-
- it 'returns 404 status' do
- get project_integrations_shimo_path(project)
-
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
-
- context 'when Shimo integration is active' do
- it 'renders the "show" template' do
- get project_integrations_shimo_path(project)
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(response).to render_template(:show)
- expect(response.body).to include shimo_integration.external_wiki_url
- end
- end
- end
-end
diff --git a/spec/requests/projects/merge_requests/content_spec.rb b/spec/requests/projects/merge_requests/content_spec.rb
index 54066756f3e..1ecad609416 100644
--- a/spec/requests/projects/merge_requests/content_spec.rb
+++ b/spec/requests/projects/merge_requests/content_spec.rb
@@ -29,13 +29,5 @@ RSpec.describe 'merge request content spec', feature_category: :code_review_work
describe 'GET cached_widget' do
it_behaves_like 'cached widget request'
-
- context 'with non_public_artifacts disabled' do
- before do
- stub_feature_flags(non_public_artifacts: false)
- end
-
- it_behaves_like 'cached widget request'
- end
end
end
diff --git a/spec/requests/projects/ml/candidates_controller_spec.rb b/spec/requests/projects/ml/candidates_controller_spec.rb
index 78f31be26d1..0263f2d79b5 100644
--- a/spec/requests/projects/ml/candidates_controller_spec.rb
+++ b/spec/requests/projects/ml/candidates_controller_spec.rb
@@ -51,13 +51,7 @@ RSpec.describe Projects::Ml::CandidatesController, feature_category: :mlops do
end
describe 'GET show' do
- let(:can_read_build) { true }
-
before do
- allow(Ability).to receive(:allowed?)
- .with(user, :read_build, candidate.ci_build)
- .and_return(can_read_build)
-
show_candidate
end
@@ -74,20 +68,6 @@ RSpec.describe Projects::Ml::CandidatesController, feature_category: :mlops do
expect { show_candidate }.not_to exceed_all_query_limit(control_count)
end
- context 'when user has permission to read the build' do
- it 'includes ci build info' do
- expect(assigns[:include_ci_info]).to eq(true)
- end
- end
-
- context 'when user has no permission to read the build' do
- let(:can_read_build) { false }
-
- it 'sets include_ci_job to false' do
- expect(assigns[:include_ci_info]).to eq(false)
- end
- end
-
it_behaves_like '404 if candidate does not exist'
it_behaves_like 'requires read_model_experiments'
end
diff --git a/spec/requests/projects/pipelines_controller_spec.rb b/spec/requests/projects/pipelines_controller_spec.rb
index 7bdb66755db..aa3fefdef14 100644
--- a/spec/requests/projects/pipelines_controller_spec.rb
+++ b/spec/requests/projects/pipelines_controller_spec.rb
@@ -75,6 +75,59 @@ RSpec.describe Projects::PipelinesController, feature_category: :continuous_inte
expect(response).to have_gitlab_http_status(:ok)
end
+ context 'when pipeline_stage_set_last_modified is disabled' do
+ before do
+ stub_feature_flags(pipeline_stage_set_last_modified: false)
+ end
+
+ it 'does not set Last-Modified' do
+ create(:ci_build, :retried, :failed, pipeline: pipeline, stage: 'build')
+
+ request_build_stage
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response.headers['Last-Modified']).to be_nil
+ expect(response.headers['Cache-Control']).to eq('max-age=0, private, must-revalidate')
+ end
+ end
+
+ context 'when pipeline_stage_set_last_modified is enabled' do
+ before do
+ stub_feature_flags(pipeline_stage_set_last_modified: true)
+ stage.statuses.update_all(updated_at: status_timestamp)
+ end
+
+ let(:last_modified) { DateTime.parse(response.headers['Last-Modified']).utc }
+ let(:cache_control) { response.headers['Cache-Control'] }
+ let(:expected_cache_control) { 'max-age=0, private, must-revalidate' }
+
+ context 'when status.updated_at is before stage.updated' do
+ let(:stage) { pipeline.stage('build') }
+ let(:status_timestamp) { stage.updated_at - 10.minutes }
+
+ it 'sets correct Last-Modified of stage.updated_at' do
+ request_build_stage
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(last_modified).to be_within(1.second).of stage.updated_at
+ expect(cache_control).to eq(expected_cache_control)
+ end
+ end
+
+ context 'when status.updated_at is after stage.updated' do
+ let(:stage) { pipeline.stage('build') }
+ let(:status_timestamp) { stage.updated_at + 10.minutes }
+
+ it 'sets correct Last-Modified of max(status.updated_at)' do
+ request_build_stage
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(last_modified).to be_within(1.second).of status_timestamp
+ expect(cache_control).to eq(expected_cache_control)
+ end
+ end
+ end
+
context 'with retried builds' do
it 'does not execute N+1 queries' do
create(:ci_build, :retried, :failed, pipeline: pipeline, stage: 'build')
diff --git a/spec/requests/projects/project_members_controller_spec.rb b/spec/requests/projects/project_members_controller_spec.rb
new file mode 100644
index 00000000000..8ab6f521766
--- /dev/null
+++ b/spec/requests/projects/project_members_controller_spec.rb
@@ -0,0 +1,23 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+require_relative '../concerns/membership_actions_shared_examples'
+
+RSpec.describe Projects::ProjectMembersController, feature_category: :groups_and_projects do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:membershipable) { create(:project, :public, namespace: create(:group, :public)) }
+
+ let(:membershipable_path) { project_path(membershipable) }
+
+ describe 'GET /*namespace_id/:project_id/-/project_members/request_access' do
+ subject(:request) do
+ get request_access_namespace_project_project_members_path(
+ namespace_id: membershipable.namespace,
+ project_id: membershipable
+ )
+ end
+
+ it_behaves_like 'request_accessable'
+ end
+end
diff --git a/spec/requests/projects/service_desk/custom_email_controller_spec.rb b/spec/requests/projects/service_desk/custom_email_controller_spec.rb
index 8ce238ab99c..8d1f61f3f63 100644
--- a/spec/requests/projects/service_desk/custom_email_controller_spec.rb
+++ b/spec/requests/projects/service_desk/custom_email_controller_spec.rb
@@ -74,16 +74,6 @@ RSpec.describe Projects::ServiceDesk::CustomEmailController, feature_category: :
end
end
- shared_examples 'a controller with disabled feature flag with status' do |status|
- context 'when feature flag service_desk_custom_email is disabled' do
- before do
- stub_feature_flags(service_desk_custom_email: false)
- end
-
- it_behaves_like 'a controller that responds with status', status
- end
- end
-
shared_examples 'a deletable resource' do
describe 'DELETE custom email' do
let(:perform_request) { delete custom_email_path }
@@ -98,9 +88,6 @@ RSpec.describe Projects::ServiceDesk::CustomEmailController, feature_category: :
sign_in(user)
end
- # because CustomEmailController check_feature_flag_enabled responds
- it_behaves_like 'a controller with disabled feature flag with status', :not_found
-
describe 'GET custom email' do
let(:perform_request) { get custom_email_path }
@@ -364,7 +351,6 @@ RSpec.describe Projects::ServiceDesk::CustomEmailController, feature_category: :
# because Projects::ApplicationController :authenticate_user! responds
# with redirect to login page
it_behaves_like 'a controller that responds with status', :found
- it_behaves_like 'a controller with disabled feature flag with status', :found
end
context 'with illegitimate user signed in' do
@@ -374,7 +360,5 @@ RSpec.describe Projects::ServiceDesk::CustomEmailController, feature_category: :
end
it_behaves_like 'a controller that responds with status', :not_found
- # because CustomEmailController check_feature_flag_enabled responds
- it_behaves_like 'a controller with disabled feature flag with status', :not_found
end
end
diff --git a/spec/requests/projects/service_desk_controller_spec.rb b/spec/requests/projects/service_desk_controller_spec.rb
index 7d881d8ea62..1a8104dd669 100644
--- a/spec/requests/projects/service_desk_controller_spec.rb
+++ b/spec/requests/projects/service_desk_controller_spec.rb
@@ -78,24 +78,25 @@ RSpec.describe Projects::ServiceDeskController, feature_category: :service_desk
expect(response).to have_gitlab_http_status(:ok)
end
- it 'sets issue_template_key' do
- put project_service_desk_path(project, format: :json), params: { issue_template_key: 'service_desk' }
+ it 'sets attributes', :aggregate_failures do
+ put project_service_desk_path(project, format: :json), params: {
+ issue_template_key: 'service_desk',
+ reopen_issue_on_external_participant_note: true,
+ add_external_participants_from_cc: true
+ }
settings = project.service_desk_setting
expect(settings).to be_present
- expect(settings.issue_template_key).to eq('service_desk')
- expect(json_response['template_file_missing']).to eq(false)
- expect(json_response['issue_template_key']).to eq('service_desk')
- end
-
- it 'sets add_external_participants_from_cc' do
- put project_service_desk_path(project, format: :json), params: { add_external_participants_from_cc: true }
- project.reset
-
- settings = project.service_desk_setting
- expect(settings).to be_present
- expect(settings.add_external_participants_from_cc).to eq(true)
- expect(json_response['add_external_participants_from_cc']).to eq(true)
+ expect(settings).to have_attributes(
+ issue_template_key: 'service_desk',
+ reopen_issue_on_external_participant_note: true,
+ add_external_participants_from_cc: true
+ )
+ expect(json_response).to include(
+ 'issue_template_key' => 'service_desk',
+ 'reopen_issue_on_external_participant_note' => true,
+ 'add_external_participants_from_cc' => true
+ )
end
it 'returns an error when update of service desk settings fails' do
diff --git a/spec/requests/projects/tags_controller_spec.rb b/spec/requests/projects/tags_controller_spec.rb
index c0b0b1728c2..97cc3a5a0df 100644
--- a/spec/requests/projects/tags_controller_spec.rb
+++ b/spec/requests/projects/tags_controller_spec.rb
@@ -24,4 +24,23 @@ RSpec.describe Projects::TagsController, feature_category: :source_code_manageme
end
end
end
+
+ describe '#show' do
+ let_it_be(:project) { create(:project, :repository, :public) }
+ let_it_be(:user) { create(:user) }
+
+ before do
+ sign_in(user)
+ end
+
+ context 'with x509 signature' do
+ let(:tag_name) { 'v1.1.1' }
+
+ it 'displays a signature badge' do
+ get project_tags_path(project, id: tag_name)
+
+ expect(response.body).to include('Unverified')
+ end
+ end
+ end
end
diff --git a/spec/requests/registrations_controller_spec.rb b/spec/requests/registrations_controller_spec.rb
index 8b857046a4d..71f2f347f0d 100644
--- a/spec/requests/registrations_controller_spec.rb
+++ b/spec/requests/registrations_controller_spec.rb
@@ -6,7 +6,9 @@ RSpec.describe RegistrationsController, type: :request, feature_category: :syste
describe 'POST #create' do
let_it_be(:user_attrs) { build_stubbed(:user).slice(:first_name, :last_name, :username, :email, :password) }
- subject(:create_user) { post user_registration_path, params: { user: user_attrs } }
+ subject(:request) { post user_registration_path, params: { user: user_attrs } }
+
+ it_behaves_like 'Base action controller'
context 'when email confirmation is required' do
before do
@@ -15,7 +17,7 @@ RSpec.describe RegistrationsController, type: :request, feature_category: :syste
end
it 'redirects to the `users_almost_there_path`', unless: Gitlab.ee? do
- create_user
+ request
expect(response).to redirect_to(users_almost_there_path(email: user_attrs[:email]))
end
diff --git a/spec/requests/runner_setup_controller_spec.rb b/spec/requests/runner_setup_controller_spec.rb
index 8d75b9e81b7..ae52bd71b3b 100644
--- a/spec/requests/runner_setup_controller_spec.rb
+++ b/spec/requests/runner_setup_controller_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe RunnerSetupController, feature_category: :runner_fleet do
+RSpec.describe RunnerSetupController, feature_category: :fleet_visibility do
let(:user) { create(:user) }
before do
diff --git a/spec/requests/sessions_spec.rb b/spec/requests/sessions_spec.rb
index 3428e607305..337f358d394 100644
--- a/spec/requests/sessions_spec.rb
+++ b/spec/requests/sessions_spec.rb
@@ -7,6 +7,10 @@ RSpec.describe 'Sessions', feature_category: :system_access do
let(:user) { create(:user) }
+ it_behaves_like 'Base action controller' do
+ subject(:request) { get new_user_session_path }
+ end
+
context 'for authentication', :allow_forgery_protection do
it 'logout does not require a csrf token' do
login_as(user)
@@ -41,7 +45,7 @@ RSpec.describe 'Sessions', feature_category: :system_access do
post user_session_path(user: { login: user.username, password: user.password })
- expect(response).to redirect_to(activity_group_path(member.source))
+ expect(response).to redirect_to(group_path(member.source))
end
end
diff --git a/spec/requests/user_settings_spec.rb b/spec/requests/user_settings_spec.rb
new file mode 100644
index 00000000000..8298edc9ad0
--- /dev/null
+++ b/spec/requests/user_settings_spec.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe "UserSettings", type: :request, feature_category: :system_access do
+ let(:user) { create(:user) }
+
+ describe 'GET authentication_log' do
+ let(:auth_event) { create(:authentication_event, user: user) }
+
+ it 'tracks search event', :snowplow do
+ sign_in(user)
+
+ get '/-/user_settings/authentication_log'
+
+ expect_snowplow_event(
+ category: 'UserSettings::UserSettingsController',
+ action: 'search_audit_event',
+ user: user
+ )
+ end
+
+ it 'loads page correctly' do
+ sign_in(user)
+
+ get '/-/user_settings/authentication_log'
+
+ expect(response).to have_gitlab_http_status(:success)
+ end
+ end
+end
diff --git a/spec/requests/well_known_routing_spec.rb b/spec/requests/well_known_routing_spec.rb
deleted file mode 100644
index d4e77a06953..00000000000
--- a/spec/requests/well_known_routing_spec.rb
+++ /dev/null
@@ -1,13 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'well-known URLs', feature_category: :system_access do
- describe '/.well-known/change-password' do
- it 'redirects to edit profile password path' do
- get('/.well-known/change-password')
-
- expect(response).to redirect_to(edit_profile_password_path)
- end
- end
-end
diff --git a/spec/requests/well_known_spec.rb b/spec/requests/well_known_spec.rb
new file mode 100644
index 00000000000..6236acac3ab
--- /dev/null
+++ b/spec/requests/well_known_spec.rb
@@ -0,0 +1,55 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'well-known URLs', feature_category: :shared do
+ describe '/.well-known/change-password', feature_category: :system_access do
+ it 'redirects to edit profile password path' do
+ get('/.well-known/change-password')
+
+ expect(response).to redirect_to(edit_user_settings_password_path)
+ end
+ end
+
+ describe '/.well-known/security.txt', feature_category: :compliance_management do
+ let(:action) { get('/.well-known/security.txt') }
+
+ context 'for an authenticated user' do
+ before do
+ sign_in(create(:user))
+ end
+
+ it 'renders when a security txt is configured' do
+ stub_application_setting security_txt_content: 'HELLO'
+ action
+ expect(response.body).to eq('HELLO')
+ end
+
+ it 'returns a 404 when a security txt is blank' do
+ stub_application_setting security_txt_content: ''
+ action
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+
+ it 'returns a 404 when a security txt is nil' do
+ stub_application_setting security_txt_content: nil
+ action
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ context 'for an unauthenticated user' do
+ it 'renders when a security txt is configured' do
+ stub_application_setting security_txt_content: 'HELLO'
+ action
+ expect(response.body).to eq('HELLO')
+ end
+
+ it 'redirects to sign in' do
+ stub_application_setting security_txt_content: ''
+ action
+ expect(response).to redirect_to(new_user_session_path)
+ end
+ end
+ end
+end
diff --git a/spec/routing/routing_spec.rb b/spec/routing/routing_spec.rb
index 7c4f040266e..8b54bc443da 100644
--- a/spec/routing/routing_spec.rb
+++ b/spec/routing/routing_spec.rb
@@ -122,7 +122,6 @@ end
# profile_account GET /-/profile/account(.:format) profile#account
# profile_history GET /-/profile/history(.:format) profile#history
-# profile_password PUT /-/profile/password(.:format) profile#password_update
# profile_token GET /-/profile/token(.:format) profile#token
# profile GET /-/profile(.:format) profile#show
# profile_update PUT /-/profile/update(.:format) profile#update
@@ -131,10 +130,6 @@ RSpec.describe ProfilesController, "routing" do
expect(get("/-/profile/account")).to route_to('profiles/accounts#show')
end
- it "to #audit_log" do
- expect(get("/-/profile/audit_log")).to route_to('profiles#audit_log')
- end
-
it "to #reset_feed_token" do
expect(put("/-/profile/reset_feed_token")).to route_to('profiles#reset_feed_token')
end
@@ -397,3 +392,19 @@ RSpec.describe JwksController, "routing" do
expect(get('/-/jwks')).to route_to('jwks#index')
end
end
+
+# user_settings_authentication_log GET /-/user_settings/authentication_log(.:format) user_settings/user_settings#authentication_log
+
+RSpec.describe UserSettings::UserSettingsController, 'routing', feature_category: :system_access do
+ it 'to #authentication_log' do
+ expect(get('/-/user_settings/authentication_log')).to route_to('user_settings/user_settings#authentication_log')
+ end
+end
+
+# user_settings_active_sessions_log GET /-/user_settings_active_sessions_log(.:format) user_settings/active_sessions#index#
+
+RSpec.describe UserSettings::ActiveSessionsController, 'routing', feature_category: :system_access do
+ it 'to #index' do
+ expect(get('/-/user_settings/active_sessions')).to route_to('user_settings/active_sessions#index')
+ end
+end
diff --git a/spec/routing/uploads_routing_spec.rb b/spec/routing/uploads_routing_spec.rb
index 198cda59357..63840b4c30b 100644
--- a/spec/routing/uploads_routing_spec.rb
+++ b/spec/routing/uploads_routing_spec.rb
@@ -3,37 +3,84 @@
require 'spec_helper'
RSpec.describe 'Uploads', 'routing' do
- it 'allows creating uploads for personal snippets' do
- expect(post('/uploads/personal_snippet?id=1')).to route_to(
- controller: 'uploads',
- action: 'create',
- model: 'personal_snippet',
- id: '1'
- )
+ context 'for personal snippets' do
+ it 'allows creating uploads for personal snippets' do
+ expect(post('/uploads/personal_snippet?id=1')).to route_to(
+ controller: 'uploads',
+ action: 'create',
+ model: 'personal_snippet',
+ id: '1'
+ )
+ end
+ end
+
+ context 'for users' do
+ it 'allows creating uploads for users' do
+ expect(post('/uploads/user?id=1')).to route_to(
+ controller: 'uploads',
+ action: 'create',
+ model: 'user',
+ id: '1'
+ )
+ end
end
- it 'allows creating uploads for users' do
- expect(post('/uploads/user?id=1')).to route_to(
- controller: 'uploads',
- action: 'create',
- model: 'user',
- id: '1'
- )
+ context 'for abuse reports' do
+ it 'allows fetching uploaded files for abuse reports' do
+ expect(get('/uploads/-/system/abuse_report/1/secret/test.png')).to route_to(
+ controller: 'uploads',
+ action: 'show',
+ model: 'abuse_report',
+ id: '1',
+ secret: 'secret',
+ filename: 'test.png'
+ )
+ end
+
+ it 'allows creating uploads for abuse reports' do
+ expect(post('/uploads/abuse_report?id=1')).to route_to(
+ controller: 'uploads',
+ action: 'create',
+ model: 'abuse_report',
+ id: '1'
+ )
+ end
+
+ it 'allows authorizing uploads for abuse reports' do
+ expect(post('/uploads/abuse_report/authorize')).to route_to(
+ controller: 'uploads',
+ action: 'authorize',
+ model: 'abuse_report'
+ )
+ end
+
+ it 'allows fetching abuse report screenshots' do
+ expect(get('/uploads/-/system/abuse_report/screenshot/1/test.jpg')).to route_to(
+ controller: 'uploads',
+ action: 'show',
+ model: 'abuse_report',
+ id: '1',
+ filename: 'test.jpg',
+ mounted_as: 'screenshot'
+ )
+ end
end
- it 'allows fetching alert metric metric images' do
- expect(get('/uploads/-/system/alert_management_metric_image/file/1/test.jpg')).to route_to(
- controller: 'uploads',
- action: 'show',
- model: 'alert_management_metric_image',
- id: '1',
- filename: 'test.jpg',
- mounted_as: 'file'
- )
+ context 'for alert management' do
+ it 'allows fetching alert metric metric images' do
+ expect(get('/uploads/-/system/alert_management_metric_image/file/1/test.jpg')).to route_to(
+ controller: 'uploads',
+ action: 'show',
+ model: 'alert_management_metric_image',
+ id: '1',
+ filename: 'test.jpg',
+ mounted_as: 'file'
+ )
+ end
end
it 'does not allow creating uploads for other models' do
- unroutable_models = UploadsController::MODEL_CLASSES.keys.compact - %w[personal_snippet user]
+ unroutable_models = UploadsController::MODEL_CLASSES.keys.compact - %w[personal_snippet user abuse_report]
unroutable_models.each do |model|
expect(post("/uploads/#{model}?id=1")).not_to be_routable
diff --git a/spec/rubocop/cop/background_migration/dictionary_file_spec.rb b/spec/rubocop/cop/background_migration/dictionary_file_spec.rb
index 7becf9c09a4..9b4adc87f78 100644
--- a/spec/rubocop/cop/background_migration/dictionary_file_spec.rb
+++ b/spec/rubocop/cop/background_migration/dictionary_file_spec.rb
@@ -134,16 +134,20 @@ RSpec.describe RuboCop::Cop::BackgroundMigration::DictionaryFile, feature_catego
end
context 'with dictionary file' do
- let(:introduced_by_url) { 'https://test_url' }
+ let(:introduced_by_url) { 'https://gitlab.com/gitlab-org/gitlab/-/merge_requests/132639' }
let(:finalize_after) { '20230507160251' }
+ let(:milestone) { '16.1' }
before do
+ allow(File).to receive(:exist?).and_call_original
allow(File).to receive(:exist?).with(dictionary_file_path).and_return(true)
-
- allow_next_instance_of(RuboCop::BatchedBackgroundMigrationsDictionary) do |dictionary|
- allow(dictionary).to receive(:finalize_after).and_return(finalize_after)
- allow(dictionary).to receive(:introduced_by_url).and_return(introduced_by_url)
- end
+ allow(::RuboCop::BatchedBackgroundMigrationsDictionary).to receive(:dictionary_data).and_return({
+ '20231118100907' => {
+ finalize_after: finalize_after,
+ introduced_by_url: introduced_by_url,
+ milestone: milestone
+ }
+ })
end
context 'without introduced_by_url' do
@@ -158,6 +162,50 @@ RSpec.describe RuboCop::Cop::BackgroundMigration::DictionaryFile, feature_catego
end
end
+ context 'when the `introduced_by_url` is not correct' do
+ let(:introduced_by_url) { 'https://gitlab.com/gitlab-org/gitlab/-/merge_requests/132639/invalid' }
+
+ it 'throws offense on having a correct url' do
+ expect_offense(<<~RUBY)
+ class QueueMyMigration < Gitlab::Database::Migration[2.1]
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ #{format('Invalid `introduced_by_url` url for the dictionary. Please use the following format: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/XXX')}
+ def up
+ queue_batched_background_migration(
+ 'MyMigration',
+ :users,
+ :id
+ )
+ end
+ end
+ RUBY
+ end
+ end
+
+ context 'without milestone' do
+ it_behaves_like 'migration with missing dictionary keys offense', :milestone do
+ let(:milestone) { nil }
+ end
+ end
+
+ context 'when milestone is a number' do
+ let(:milestone) { 16.1 }
+
+ it 'throws offense on having an invalid milestone' do
+ expect_offense(<<~RUBY)
+ class QueueMyMigration < Gitlab::Database::Migration[2.1]
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ #{format('Invalid `milestone` for the dictionary. It must be a string. Please ensure it is quoted.')}
+ def up
+ queue_batched_background_migration(
+ 'MyMigration',
+ :users,
+ :id
+ )
+ end
+ end
+ RUBY
+ end
+ end
+
context 'with required dictionary keys' do
it 'does not throw offense with appropriate dictionary file' do
expect_no_offenses(<<~RUBY)
diff --git a/spec/rubocop/cop/database/avoid_using_pluck_without_limit_spec.rb b/spec/rubocop/cop/database/avoid_using_pluck_without_limit_spec.rb
new file mode 100644
index 00000000000..801cf449726
--- /dev/null
+++ b/spec/rubocop/cop/database/avoid_using_pluck_without_limit_spec.rb
@@ -0,0 +1,144 @@
+# frozen_string_literal: true
+
+require 'rubocop_spec_helper'
+require_relative '../../../../rubocop/cop/database/avoid_using_pluck_without_limit'
+
+RSpec.describe RuboCop::Cop::Database::AvoidUsingPluckWithoutLimit, feature_category: :database do
+ context 'when using pluck without a limit' do
+ it 'flags the use of pluck as a model scope' do
+ expect_offense(<<~RUBY)
+ class MyModel < ApplicationRecord
+ scope :all_users, -> { where(user_id: User.pluck(:id)) }
+ ^^^^^ #{described_class::MSG}
+ end
+ RUBY
+ end
+
+ it 'flags the use of pluck as a regular method' do
+ expect_offense(<<~RUBY)
+ class MyModel < ApplicationRecord
+ def all
+ self.pluck(:id)
+ ^^^^^ #{described_class::MSG}
+ end
+ end
+ RUBY
+ end
+
+ it 'flags the use of pluck inside where' do
+ expect_offense(<<~RUBY)
+ class MyModel < ApplicationRecord
+ def all_projects
+ Project.where(id: self.pluck(:id))
+ ^^^^^ #{described_class::MSG}
+ end
+ end
+ RUBY
+ end
+
+ it 'flags the use of pluck inside a model class method' do
+ allow(cop).to receive(:in_model?).and_return(true)
+
+ expect_offense(<<~RUBY)
+ class MyClass < Model
+ def all_users
+ User.where(id: self.pluck(:id))
+ ^^^^^ #{described_class::MSG}
+ end
+ end
+ RUBY
+ end
+
+ it 'flags the use of pluck inside a finder' do
+ allow(cop).to receive(:in_finder?).and_return(true)
+
+ expect_offense(<<~RUBY)
+ class MyFinder
+ def find(path)
+ Project.where(path: path).pluck(:id)
+ ^^^^^ #{described_class::MSG}
+ end
+ end
+ RUBY
+ end
+
+ it 'flags the use of pluck inside a service' do
+ allow(cop).to receive(:in_service_class?).and_return(true)
+
+ expect_offense(<<~RUBY)
+ class MyService
+ def delete_all(project)
+ delete(project.for_scan_result_policy_read(scan_result_policy_reads.pluck(:id)))
+ ^^^^^ #{described_class::MSG}
+ end
+ end
+ RUBY
+ end
+ end
+
+ context 'when using pluck with a limit' do
+ it 'does not flags the use of pluck as a model scope' do
+ expect_no_offenses(<<~RUBY)
+ class MyModel < ApplicationRecord
+ scope :all_users, ->(limit) { where(user_id: User.limit(limit).pluck(:id)) }
+ end
+ RUBY
+ end
+
+ it 'does not flags the use of pluck as a regular method' do
+ expect_no_offenses(<<~RUBY)
+ class MyModel < ApplicationRecord
+ def all(limit)
+ self.limit(limit).pluck(:id)
+ end
+ end
+ RUBY
+ end
+
+ it 'does not flags the use of pluck inside where' do
+ expect_no_offenses(<<~RUBY)
+ class MyModel < ApplicationRecord
+ def all_projects(limit)
+ Project.where(id: self.limit(limit).pluck(:id))
+ end
+ end
+ RUBY
+ end
+
+ it 'does not flags the use of pluck inside a model class method' do
+ allow(cop).to receive(:in_model?).and_return(true)
+
+ expect_no_offenses(<<~RUBY)
+ class MyClass < Model
+ def all_users
+ User.where(id: self.limit(100).pluck(:id))
+ end
+ end
+ RUBY
+ end
+
+ it 'does not flags the use of pluck inside a finder' do
+ allow(cop).to receive(:in_finder?).and_return(true)
+
+ expect_no_offenses(<<~RUBY)
+ class MyFinder
+ def find(path)
+ Project.where(path: path).limit(100).pluck(:id)
+ end
+ end
+ RUBY
+ end
+
+ it 'flags the use of pluck inside a service' do
+ allow(cop).to receive(:in_service_class?).and_return(true)
+
+ expect_no_offenses(<<~RUBY)
+ class MyService
+ def delete_all(project)
+ delete(project.for_scan_result_policy_read(scan_result_policy_reads.limit(100).pluck(:id)))
+ end
+ end
+ RUBY
+ end
+ end
+end
diff --git a/spec/rubocop/cop/gitlab/avoid_gitlab_instance_checks_spec.rb b/spec/rubocop/cop/gitlab/avoid_gitlab_instance_checks_spec.rb
index 2dba6194d44..b3864c5495f 100644
--- a/spec/rubocop/cop/gitlab/avoid_gitlab_instance_checks_spec.rb
+++ b/spec/rubocop/cop/gitlab/avoid_gitlab_instance_checks_spec.rb
@@ -18,6 +18,8 @@ RSpec.describe RuboCop::Cop::Gitlab::AvoidGitlabInstanceChecks, feature_category
::Gitlab.com?
Gitlab::CurrentSettings.should_check_namespace_plan?
::Gitlab::CurrentSettings.should_check_namespace_plan?
+ Gitlab::Saas.enabled?
+ ::Gitlab::Saas.enabled?
]
end
diff --git a/spec/rubocop/cop/migration/versioned_migration_class_spec.rb b/spec/rubocop/cop/migration/versioned_migration_class_spec.rb
index b92d9d21498..89657fbfa91 100644
--- a/spec/rubocop/cop/migration/versioned_migration_class_spec.rb
+++ b/spec/rubocop/cop/migration/versioned_migration_class_spec.rb
@@ -49,7 +49,7 @@ RSpec.describe RuboCop::Cop::Migration::VersionedMigrationClass, feature_categor
it 'adds an offence if inheriting from ActiveRecord::Migration' do
expect_offense(<<~RUBY)
class MyMigration < ActiveRecord::Migration[6.1]
- ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Don't inherit from ActiveRecord::Migration or old versions of Gitlab::Database::Migration. Use Gitlab::Database::Migration[2.1] instead. See https://docs.gitlab.com/ee/development/migration_style_guide.html#migration-helpers-and-versioning.
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Don't inherit from ActiveRecord::Migration or old versions of Gitlab::Database::Migration. Use Gitlab::Database::Migration[#{described_class::CURRENT_MIGRATION_VERSION}] instead. See https://docs.gitlab.com/ee/development/migration_style_guide.html#migration-helpers-and-versioning.
end
RUBY
end
@@ -57,23 +57,23 @@ RSpec.describe RuboCop::Cop::Migration::VersionedMigrationClass, feature_categor
it 'adds an offence if inheriting from old version of Gitlab::Database::Migration' do
expect_offense(<<~RUBY)
class MyMigration < Gitlab::Database::Migration[2.0]
- ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Don't inherit from ActiveRecord::Migration or old versions of Gitlab::Database::Migration. Use Gitlab::Database::Migration[2.1] instead. See https://docs.gitlab.com/ee/development/migration_style_guide.html#migration-helpers-and-versioning.
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Don't inherit from ActiveRecord::Migration or old versions of Gitlab::Database::Migration. Use Gitlab::Database::Migration[#{described_class::CURRENT_MIGRATION_VERSION}] instead. See https://docs.gitlab.com/ee/development/migration_style_guide.html#migration-helpers-and-versioning.
end
RUBY
end
it 'adds an offence if including Gitlab::Database::MigrationHelpers directly' do
expect_offense(<<~RUBY)
- class MyMigration < Gitlab::Database::Migration[2.1]
+ class MyMigration < Gitlab::Database::Migration[#{described_class::CURRENT_MIGRATION_VERSION}]
include Gitlab::Database::MigrationHelpers
- ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Don't include migration helper modules directly. Inherit from Gitlab::Database::Migration[2.1] instead. See https://docs.gitlab.com/ee/development/migration_style_guide.html#migration-helpers-and-versioning.
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Don't include migration helper modules directly. Inherit from Gitlab::Database::Migration[#{described_class::CURRENT_MIGRATION_VERSION}] instead. See https://docs.gitlab.com/ee/development/migration_style_guide.html#migration-helpers-and-versioning.
end
RUBY
end
it 'excludes ActiveRecord classes defined inside the migration' do
expect_no_offenses(<<~RUBY)
- class TestMigration < Gitlab::Database::Migration[2.1]
+ class TestMigration < Gitlab::Database::Migration[#{described_class::CURRENT_MIGRATION_VERSION}]
class TestModel < ApplicationRecord
end
@@ -85,7 +85,7 @@ RSpec.describe RuboCop::Cop::Migration::VersionedMigrationClass, feature_categor
it 'excludes parentless classes defined inside the migration' do
expect_no_offenses(<<~RUBY)
- class TestMigration < Gitlab::Database::Migration[2.1]
+ class TestMigration < Gitlab::Database::Migration[#{described_class::CURRENT_MIGRATION_VERSION}]
class TestClass
end
end
diff --git a/spec/rubocop_spec_helper.rb b/spec/rubocop_spec_helper.rb
index 2f1dc2843be..297244073ed 100644
--- a/spec/rubocop_spec_helper.rb
+++ b/spec/rubocop_spec_helper.rb
@@ -7,8 +7,8 @@ require 'fast_spec_helper'
# See https://gitlab.com/gitlab-org/gitlab/-/merge_requests/47008
require 'rubocop'
require 'rubocop/rspec/shared_contexts/default_rspec_language_config_context'
+require 'gitlab/rspec/next_instance_of'
-require_relative 'support/helpers/next_instance_of'
require_relative 'rubocop/support_workaround'
RSpec.configure do |config|
diff --git a/spec/scripts/duo_chat/reporter_spec.rb b/spec/scripts/duo_chat/reporter_spec.rb
new file mode 100644
index 00000000000..836c41273e8
--- /dev/null
+++ b/spec/scripts/duo_chat/reporter_spec.rb
@@ -0,0 +1,270 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+require 'gitlab'
+require 'json'
+require_relative '../../../scripts/duo_chat/reporter'
+
+RSpec.describe Reporter, feature_category: :ai_abstraction_layer do
+ subject(:reporter) { described_class.new }
+
+ describe '#run', :freeze_time do
+ let(:ci_commit_sha) { 'commitsha' }
+ let(:ci_pipeline_url) { 'https://gitlab.com/pipeline/url' }
+ let(:client) { double }
+
+ before do
+ stub_env('CI_COMMIT_SHA', ci_commit_sha)
+ stub_env('CI_PIPELINE_URL', ci_pipeline_url)
+ stub_env('CI_COMMIT_BRANCH', ci_commit_branch)
+ stub_env('CI_DEFAULT_BRANCH', ci_default_branch)
+
+ allow(Gitlab).to receive(:client).and_return(client)
+ end
+
+ context 'when the CI pipeline is running with the commit in `master` branch' do
+ let(:ci_commit_branch) { 'master' }
+ let(:ci_default_branch) { 'master' }
+ let(:snippet_web_url) { 'https://gitlab.com/snippet/url' }
+ let(:issue_web_url) { 'https://gitlab.com/issue/url' }
+
+ let(:mock_data) do
+ [
+ {
+ "question" => "question1",
+ "resource" => "resource",
+ "answer" => "answer1",
+ "tools_used" => ["foobar tool"],
+ "evaluations" => [
+ { "model" => "claude-2", "response" => "Grade: CORRECT" },
+ { "model" => "text-bison", "response" => "Grade: CORRECT" }
+ ]
+ }
+ ]
+ end
+
+ before do
+ allow(reporter).to receive(:report_data).and_return(mock_data)
+ end
+
+ it 'uploads snippet, creates a report issue and updates the tracking issue' do
+ # Uploads the test data as a snippet along with commit sha and pipeline url
+ snippet = double(web_url: snippet_web_url) # rubocop: disable RSpec/VerifiedDoubles -- an internal detail of Gitlab gem.
+ snippet_content = ::JSON.pretty_generate({
+ commit: ci_commit_sha,
+ pipeline_url: ci_pipeline_url,
+ data: mock_data
+ })
+
+ expect(client).to receive(:create_snippet).with(
+ described_class::QA_EVALUATION_PROJECT_ID,
+ {
+ title: Time.now.utc.to_s,
+ files: [{ file_path: "#{Time.now.utc.to_i}.json", content: snippet_content }],
+ visibility: 'private'
+ }
+ ).and_return(snippet)
+
+ # Create a new issue for the report
+ issue_title = "Report #{Time.now.utc}"
+ issue = double(web_url: issue_web_url) # rubocop: disable RSpec/VerifiedDoubles -- an internal detail of Gitlab gem.
+
+ expect(client).to receive(:create_issue).with(
+ described_class::QA_EVALUATION_PROJECT_ID,
+ issue_title,
+ { description: reporter.markdown_report }
+ ).and_return(issue)
+
+ # Updates the tracking issue by adding a row that links to the snippet and the issue just created.
+ aggregated_report_issue = double(description: "") # rubocop: disable RSpec/VerifiedDoubles -- an internal detail of Gitlab gem.
+ allow(client).to receive(:issue).with(
+ described_class::QA_EVALUATION_PROJECT_ID,
+ described_class::AGGREGATED_REPORT_ISSUE_IID
+ ).and_return(aggregated_report_issue)
+ row = "\n| #{Time.now.utc} | 1 | 100.0% | 0.0% | 0.0%"
+ row << " | #{issue_web_url} | #{snippet_web_url} |"
+
+ expect(client).to receive(:edit_issue).with(
+ described_class::QA_EVALUATION_PROJECT_ID,
+ described_class::AGGREGATED_REPORT_ISSUE_IID,
+ { description: aggregated_report_issue.description + row }
+ )
+
+ reporter.run
+ end
+ end
+
+ context 'when the CI pipeline is not running with the commit in `master` branch' do
+ let(:ci_commit_branch) { 'foobar' }
+ let(:ci_default_branch) { 'master' }
+ let(:qa_eval_report_filename) { 'report.md' }
+ let(:merge_request_iid) { "123" }
+ let(:ci_project_id) { "456" }
+ let(:ci_project_dir) { "/builds/gitlab-org/gitlab" }
+ let(:base_dir) { "#{ci_project_dir}/#{qa_eval_report_filename}" }
+
+ before do
+ stub_env('QA_EVAL_REPORT_FILENAME', qa_eval_report_filename)
+ stub_env('CI_MERGE_REQUEST_IID', merge_request_iid)
+ stub_env('CI_PROJECT_ID', ci_project_id)
+ stub_env('CI_PROJECT_DIR', ci_project_dir)
+ end
+
+ context 'when a note does not already exist' do
+ let(:note) { nil } # rubocop: disable RSpec/VerifiedDoubles -- an internal detail of Gitlab gem.
+
+ it 'saves the report as a markdown file and creates a new MR note containing the report content' do
+ expect(File).to receive(:write).with(base_dir, reporter.markdown_report)
+
+ allow(reporter).to receive(:existing_report_note).and_return(note)
+ expect(client).to receive(:create_merge_request_note).with(
+ ci_project_id,
+ merge_request_iid,
+ reporter.markdown_report
+ )
+
+ reporter.run
+ end
+ end
+
+ context 'when a note exists' do
+ let(:note_id) { "1" }
+ let(:note) { double(id: note_id, type: "Note") } # rubocop: disable RSpec/VerifiedDoubles -- an internal detail of Gitlab gem.
+
+ it 'saves the report as a markdown file and updates the existing MR note containing the report content' do
+ expect(File).to receive(:write).with(base_dir, reporter.markdown_report)
+
+ allow(reporter).to receive(:existing_report_note).and_return(note)
+ expect(client).to receive(:edit_merge_request_note).with(
+ ci_project_id,
+ merge_request_iid,
+ note_id,
+ reporter.markdown_report
+ )
+
+ reporter.run
+ end
+ end
+ end
+ end
+
+ describe '#markdown_report' do
+ let(:mock_data) do
+ [
+ {
+ "question" => "question1",
+ "resource" => "resource",
+ "answer" => "answer1",
+ "tools_used" => ["foobar tool"],
+ "evaluations" => [
+ { "model" => "claude-2", "response" => "Grade: CORRECT" },
+ { "model" => "text-bison", "response" => "Grade: CORRECT" }
+ ]
+ },
+ {
+ "question" => "question2",
+ "resource" => "resource",
+ "answer" => "answer2",
+ "tools_used" => [],
+ "evaluations" => [
+ { "model" => "claude-2", "response" => " Grade: INCORRECT" },
+ { "model" => "text-bison", "response" => "Grade: INCORRECT" }
+ ]
+ },
+ {
+ "question" => "question3",
+ "resource" => "resource",
+ "answer" => "answer3",
+ "tools_used" => [],
+ "evaluations" => [
+ { "model" => "claude-2", "response" => " Grade: CORRECT" },
+ { "model" => "text-bison", "response" => "Grade: INCORRECT" }
+ ]
+ },
+ {
+ "question" => "question4",
+ "resource" => "resource",
+ "answer" => "answer4",
+ "tools_used" => [],
+ # Note: The first evaluation (claude-2) is considered invalid and ignored.
+ "evaluations" => [
+ { "model" => "claude-2", "response" => "???" },
+ { "model" => "text-bison", "response" => "Grade: CORRECT" }
+ ]
+ },
+ {
+ "question" => "question5",
+ "resource" => "resource",
+ "answer" => "answer5",
+ "tools_used" => [],
+ # Note: The second evaluation (text-bison) is considered invalid and ignored.
+ "evaluations" => [
+ { "model" => "claude-2", "response" => " Grade: INCORRECT" },
+ { "model" => "text-bison", "response" => "???" }
+ ]
+ },
+ {
+ "question" => "question6",
+ "resource" => "resource",
+ "answer" => "answer6",
+ "tools_used" => [],
+ # Note: Both evaluations are invalid as they contain neither `CORRECT` nor `INCORRECT`.
+ # It should be ignored in the report.
+ "evaluations" => [
+ { "model" => "claude-2", "response" => "???" },
+ { "model" => "text-bison", "response" => "???" }
+ ]
+ }
+ ]
+ end
+
+ before do
+ allow(reporter).to receive(:report_data).and_return(mock_data)
+ end
+
+ it "generates the correct summary stats and uses the correct emoji indicators" do
+ expect(reporter.markdown_report).to include "The total number of evaluations: 5"
+
+ expect(reporter.markdown_report).to include "all LLMs graded `CORRECT`: 2 (40.0%)"
+ expect(reporter.markdown_report).to include ":white_check_mark: :white_check_mark:"
+ expect(reporter.markdown_report).to include ":warning: :white_check_mark:"
+
+ expect(reporter.markdown_report).to include "all LLMs graded `INCORRECT`: 2 (40.0%)"
+ expect(reporter.markdown_report).to include ":x: :x:"
+ expect(reporter.markdown_report).to include ":x: :warning:"
+
+ expect(reporter.markdown_report).to include "in which LLMs disagreed: 1 (20.0%)"
+ expect(reporter.markdown_report).to include ":white_check_mark: :x:"
+ end
+
+ it "includes the tools used" do
+ expect(reporter.markdown_report).to include "[\"foobar tool\"]"
+ end
+
+ context 'when usernames are present' do
+ let(:mock_data) do
+ [
+ {
+ "question" => "@user's @root?",
+ "resource" => "resource",
+ "answer" => "@user2 and @user3",
+ "tools_used" => ["foobar tool"],
+ "evaluations" => [
+ { "model" => "claude-2", "response" => "Grade: CORRECT\n\n@user4" },
+ { "model" => "text-bison", "response" => "Grade: CORRECT\n\n@user5" }
+ ]
+ }
+ ]
+ end
+
+ it 'quotes the usernames with backticks' do
+ expect(reporter.markdown_report).to include "`@root`"
+ expect(reporter.markdown_report).to include "`@user`"
+ expect(reporter.markdown_report).to include "`@user2`"
+ expect(reporter.markdown_report).to include "`@user3`"
+ expect(reporter.markdown_report).to include "`@user4`"
+ expect(reporter.markdown_report).to include "`@user5`"
+ end
+ end
+ end
+end
diff --git a/spec/scripts/generate_message_to_run_e2e_pipeline_spec.rb b/spec/scripts/generate_message_to_run_e2e_pipeline_spec.rb
index aa758e19dfa..9b191215739 100644
--- a/spec/scripts/generate_message_to_run_e2e_pipeline_spec.rb
+++ b/spec/scripts/generate_message_to_run_e2e_pipeline_spec.rb
@@ -224,8 +224,8 @@ RSpec.describe GenerateMessageToRunE2ePipeline, feature_category: :tooling do
<!-- Run e2e warning begin -->
@#{author_username} Some end-to-end (E2E) tests should run based on the stage label.
- Please start the `trigger-omnibus-and-follow-up-e2e` job in the `qa` stage and ensure tests in the `follow-up-e2e:package-and-test-ee` pipeline
- pass **before this MR is merged**.
+ Please start the `trigger-omnibus-and-follow-up-e2e` job in the `qa` stage and wait for the tests in the `follow-up-e2e:package-and-test-ee` pipeline
+ to pass **before merging this MR**. Do not use **Auto-merge**, unless these tests have already completed successfully, because a failure in these tests do not block the auto-merge.
(E2E tests are computationally intensive and don't run automatically for every push/rebase, so we ask you to run this job manually at least once.)
To run all E2E tests, apply the ~"pipeline:run-all-e2e" label and run a new pipeline.
@@ -235,7 +235,7 @@ RSpec.describe GenerateMessageToRunE2ePipeline, feature_category: :tooling do
Once done, apply the ✅ emoji on this comment.
- **Team members only:** for any questions or help, reach out on the internal `#quality` Slack channel.
+ **Team members only:** for any questions or help, reach out on the internal `#test-platform` Slack channel.
<!-- Run e2e warning end -->
MARKDOWN
end
diff --git a/spec/scripts/internal_events/cli_spec.rb b/spec/scripts/internal_events/cli_spec.rb
new file mode 100644
index 00000000000..d84a4498fe8
--- /dev/null
+++ b/spec/scripts/internal_events/cli_spec.rb
@@ -0,0 +1,866 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+require 'tty/prompt/test'
+require_relative '../../../scripts/internal_events/cli'
+
+RSpec.describe Cli, feature_category: :service_ping do
+ let(:prompt) { TTY::Prompt::Test.new }
+ let(:files_to_cleanup) { [] }
+
+ let(:event1_filepath) { 'config/events/internal_events_cli_used.yml' }
+ let(:event1_content) { internal_event_fixture('events/event_with_identifiers.yml') }
+ let(:event2_filepath) { 'ee/config/events/internal_events_cli_opened.yml' }
+ let(:event2_content) { internal_event_fixture('events/ee_event_without_identifiers.yml') }
+ let(:event3_filepath) { 'config/events/internal_events_cli_closed.yml' }
+ let(:event3_content) { internal_event_fixture('events/secondary_event_with_identifiers.yml') }
+
+ before do
+ stub_milestone('16.6')
+ collect_file_writes(files_to_cleanup)
+ stub_product_groups(File.read('spec/fixtures/scripts/internal_events/stages.yml'))
+ stub_helper(:fetch_window_size, '50')
+ end
+
+ after do
+ delete_files(files_to_cleanup)
+ end
+
+ shared_examples 'creates the right defintion files' do |description, test_case = {}|
+ # For expected keystroke mapping, see https://github.com/piotrmurach/tty-reader/blob/master/lib/tty/reader/keys.rb
+ let(:keystrokes) { test_case.dig('inputs', 'keystrokes') || [] }
+ let(:input_files) { test_case.dig('inputs', 'files') || [] }
+ let(:output_files) { test_case.dig('outputs', 'files') || [] }
+
+ subject { run_with_verbose_timeout }
+
+ it "in scenario: #{description}" do
+ delete_old_ouputs # just in case
+ prep_input_files
+ queue_cli_inputs(keystrokes)
+ expect_file_creation
+
+ subject
+ end
+
+ private
+
+ def delete_old_ouputs
+ [input_files, output_files].flatten.each do |file_info|
+ FileUtils.rm_f(Rails.root.join(file_info['path']))
+ end
+ end
+
+ def prep_input_files
+ input_files.each do |file|
+ File.write(
+ Rails.root.join(file['path']),
+ File.read(Rails.root.join(file['content']))
+ )
+ end
+ end
+
+ def expect_file_creation
+ if output_files.any?
+ output_files.each do |file|
+ expect(File).to receive(:write).with(file['path'], File.read(file['content']))
+ end
+ else
+ expect(File).not_to receive(:write)
+ end
+ end
+ end
+
+ context 'when creating new events' do
+ YAML.safe_load(File.read('spec/fixtures/scripts/internal_events/new_events.yml')).each do |test_case|
+ it_behaves_like 'creates the right defintion files', test_case['description'], test_case
+ end
+ end
+
+ context 'when creating new metrics' do
+ YAML.safe_load(File.read('spec/fixtures/scripts/internal_events/new_metrics.yml')).each do |test_case|
+ it_behaves_like 'creates the right defintion files', test_case['description'], test_case
+ end
+
+ context 'when creating a metric from multiple events' do
+ let(:events) do
+ [{
+ action: '00_event1', category: 'InternalEventTracking',
+ product_section: 'dev', product_stage: 'plan', product_group: 'optimize'
+ }, {
+ action: '00_event2', category: 'InternalEventTracking',
+ product_section: 'dev', product_stage: 'create', product_group: 'ide'
+ }, {
+ action: '00_event3', category: 'InternalEventTracking',
+ product_section: 'dev', product_stage: 'create', product_group: 'source_code'
+ }]
+ end
+
+ before do
+ events.each do |event|
+ File.write("config/events/#{event[:action]}.yml", event.transform_keys(&:to_s).to_yaml)
+ end
+ end
+
+ it 'filters the product group options based on common section' do
+ # Select 00_event1 & #00_event2
+ queue_cli_inputs([
+ "2\n", # Enum-select: New Metric -- calculate how often one or more existing events occur over time
+ "2\n", # Enum-select: Multiple events -- count occurrences of several separate events or interactions
+ " ", # Multi-select: __event1
+ "\e[B", # Arrow down to: __event2
+ " ", # Multi-select: __event2
+ "\n", # Submit selections
+ "\n", # Select: Weekly/Monthly count of unique users
+ "aggregate metric description\n", # Submit description
+ "\n", # Accept description for weekly
+ "\n" # Copy & continue
+ ])
+
+ run_with_timeout
+
+ # Filter down to "dev" options
+ expect(plain_last_lines(9)).to eq <<~TEXT.chomp
+ ‣ dev:plan:project_management
+ dev:plan:product_planning
+ dev:plan:knowledge
+ dev:plan:optimize
+ dev:create:source_code
+ dev:create:code_review
+ dev:create:ide
+ dev:create:editor_extensions
+ dev:create:code_creation
+ TEXT
+ end
+
+ it 'filters the product group options based on common section & stage' do
+ # Select 00_event2 & #00_event3
+ queue_cli_inputs([
+ "2\n", # Enum-select: New Metric -- calculate how often one or more existing events occur over time
+ "2\n", # Enum-select: Multiple events -- count occurrences of several separate events or interactions
+ "\e[B", # Arrow down to: __event2
+ " ", # Multi-select: __event2
+ "\e[B", # Arrow down to: __event3
+ " ", # Multi-select: __event3
+ "\n", # Submit selections
+ "\n", # Select: Weekly/Monthly count of unique users
+ "aggregate metric description\n", # Submit description
+ "\n", # Accept description for weekly
+ "\n" # Copy & continue
+ ])
+
+ run_with_timeout
+
+ # Filter down to "dev:create" options
+ expect(plain_last_lines(5)).to eq <<~TEXT.chomp
+ ‣ dev:create:source_code
+ dev:create:code_review
+ dev:create:ide
+ dev:create:editor_extensions
+ dev:create:code_creation
+ TEXT
+ end
+ end
+
+ context 'when product group for event no longer exists' do
+ let(:event) do
+ {
+ action: '00_event1', category: 'InternalEventTracking',
+ product_section: 'other', product_stage: 'other', product_group: 'other'
+ }
+ end
+
+ before do
+ File.write("config/events/#{event[:action]}.yml", event.transform_keys(&:to_s).to_yaml)
+ end
+
+ it 'prompts user to select another group' do
+ queue_cli_inputs([
+ "2\n", # Enum-select: New Metric -- calculate how often one or more existing events occur over time
+ "1\n", # Enum-select: Single event -- count occurrences of a specific event or user interaction
+ "\n", # Select: 00__event1
+ "\n", # Select: Weekly/Monthly count of unique users
+ "aggregate metric description\n", # Submit description
+ "\n", # Accept description for weekly
+ "2\n" # Modify attributes
+ ])
+
+ run_with_timeout
+
+ # Filter down to "dev" options
+ expect(plain_last_lines(50)).to include 'Select one: Which group owns the metric?'
+ end
+ end
+
+ context 'when creating a metric for an event which has metrics' do
+ before do
+ File.write(event1_filepath, File.read(event1_content))
+ end
+
+ it 'shows all metrics options' do
+ select_event_from_list
+
+ expect(plain_last_lines(5)).to eq <<~TEXT.chomp
+ ‣ Monthly/Weekly count of unique users [who triggered internal_events_cli_used]
+ Monthly/Weekly count of unique projects [where internal_events_cli_used occurred]
+ Monthly/Weekly count of unique namespaces [where internal_events_cli_used occurred]
+ Monthly/Weekly count of [internal_events_cli_used occurrences]
+ Total count of [internal_events_cli_used occurrences]
+ TEXT
+ end
+
+ context 'with an existing weekly metric' do
+ before do
+ File.write(
+ 'ee/config/metrics/counts_7d/count_total_internal_events_cli_used_weekly.yml',
+ File.read('spec/fixtures/scripts/internal_events/metrics/ee_total_7d_single_event.yml')
+ )
+ end
+
+ it 'partially filters metric options' do
+ select_event_from_list
+
+ expect(plain_last_lines(6)).to eq <<~TEXT.chomp
+ ‣ Monthly/Weekly count of unique users [who triggered internal_events_cli_used]
+ Monthly/Weekly count of unique projects [where internal_events_cli_used occurred]
+ Monthly/Weekly count of unique namespaces [where internal_events_cli_used occurred]
+ Monthly count of [internal_events_cli_used occurrences]
+ ✘ Weekly count of [internal_events_cli_used occurrences] (already defined)
+ Total count of [internal_events_cli_used occurrences]
+ TEXT
+ end
+ end
+
+ context 'with an existing total metric' do
+ before do
+ File.write(
+ 'ee/config/metrics/counts_all/count_total_internal_events_cli_used.yml',
+ File.read('spec/fixtures/scripts/internal_events/metrics/ee_total_single_event.yml')
+ )
+ end
+
+ it 'filters whole metric options' do
+ select_event_from_list
+
+ expect(plain_last_lines(5)).to eq <<~TEXT.chomp
+ ‣ Monthly/Weekly count of unique users [who triggered internal_events_cli_used]
+ Monthly/Weekly count of unique projects [where internal_events_cli_used occurred]
+ Monthly/Weekly count of unique namespaces [where internal_events_cli_used occurred]
+ Monthly/Weekly count of [internal_events_cli_used occurrences]
+ ✘ Total count of [internal_events_cli_used occurrences] (already defined)
+ TEXT
+ end
+ end
+
+ private
+
+ def select_event_from_list
+ queue_cli_inputs([
+ "2\n", # Enum-select: New Metric -- calculate how often one or more existing events occur over time
+ "1\n", # Enum-select: Single event -- count occurrences of a specific event or user interaction
+ 'internal_events_cli_used', # Filters to this event
+ "\n" # Select: config/events/internal_events_cli_used.yml
+ ])
+
+ run_with_timeout
+ end
+ end
+
+ context 'when event excludes identifiers' do
+ before do
+ File.write(event2_filepath, File.read(event2_content))
+ end
+
+ it 'filters unavailable identifiers' do
+ queue_cli_inputs([
+ "2\n", # Enum-select: New Metric -- calculate how often one or more existing events occur over time
+ "1\n", # Enum-select: Single event -- count occurrences of a specific event or user interaction
+ 'internal_events_cli_opened', # Filters to this event
+ "\n" # Select: config/events/internal_events_cli_opened.yml
+ ])
+
+ run_with_timeout
+
+ expect(plain_last_lines(5)).to eq <<~TEXT.chomp
+ ✘ Monthly/Weekly count of unique users [who triggered internal_events_cli_opened] (user unavailable)
+ ✘ Monthly/Weekly count of unique projects [where internal_events_cli_opened occurred] (project unavailable)
+ ✘ Monthly/Weekly count of unique namespaces [where internal_events_cli_opened occurred] (namespace unavailable)
+ ‣ Monthly/Weekly count of [internal_events_cli_opened occurrences]
+ Total count of [internal_events_cli_opened occurrences]
+ TEXT
+ end
+ end
+
+ context 'when all metrics already exist' do
+ let(:event) { { action: '00_event1', category: 'InternalEventTracking' } }
+ let(:metric) { { options: { 'events' => ['00_event1'] }, events: [{ 'name' => '00_event1' }] } }
+
+ let(:files) do
+ [
+ ['config/events/00_event1.yml', event],
+ ['config/metrics/counts_all/count_total_00_event1.yml', metric.merge(time_frame: 'all')],
+ ['config/metrics/counts_7d/count_total_00_event1_weekly.yml', metric.merge(time_frame: '7d')],
+ ['config/metrics/counts_28d/count_total_00_event1_monthly.yml', metric.merge(time_frame: '28d')]
+ ]
+ end
+
+ before do
+ files.each do |path, content|
+ File.write(path, content.transform_keys(&:to_s).to_yaml)
+ end
+ end
+
+ it 'exits the script and directs user to search for existing metrics' do
+ queue_cli_inputs([
+ "2\n", # Enum-select: New Metric -- calculate how often one or more existing events occur over time
+ "1\n", # Enum-select: Single event -- count occurrences of a specific event or user interaction
+ '00_event1', # Filters to this event
+ "\n" # Select: config/events/00_event1.yml
+ ])
+
+ run_with_timeout
+
+ expect(plain_last_lines(15)).to include 'Looks like the potential metrics for this event ' \
+ 'either already exist or are unsupported.'
+ end
+ end
+ end
+
+ context 'when showing usage examples' do
+ let(:expected_example_prompt) do
+ <<~TEXT.chomp
+ Select one: Select a use-case to view examples for: (Press ↑/↓ arrow or 1-8 number to move and Enter to select)
+ ‣ 1. ruby/rails
+ 2. rspec
+ 3. javascript (vue)
+ 4. javascript (plain)
+ 5. vue template
+ 6. haml
+ 7. View examples for a different event
+ 8. Exit
+ TEXT
+ end
+
+ context 'for an event with identifiers' do
+ let(:expected_rails_example) do
+ <<~TEXT.chomp
+ --------------------------------------------------
+ # RAILS
+
+ Gitlab::InternalEvents.track_event(
+ 'internal_events_cli_used',
+ project: project,
+ namespace: project.namespace,
+ user: user
+ )
+
+ --------------------------------------------------
+ TEXT
+ end
+
+ let(:expected_rspec_example) do
+ <<~TEXT.chomp
+ --------------------------------------------------
+ # RSPEC
+
+ it_behaves_like 'internal event tracking' do
+ let(:event) { 'internal_events_cli_used' }
+ let(:project) { project }
+ let(:namespace) { project.namespace }
+ let(:user) { user }
+ end
+
+ --------------------------------------------------
+ TEXT
+ end
+
+ before do
+ File.write(event1_filepath, File.read(event1_content))
+ end
+
+ it 'shows backend examples' do
+ queue_cli_inputs([
+ "3\n", # Enum-select: View Usage -- look at code examples for an existing event
+ 'internal_events_cli_used', # Filters to this event
+ "\n", # Select: config/events/internal_events_cli_used.yml
+ "\n", # Select: ruby/rails
+ "\e[B", # Arrow down to: rspec
+ "\n", # Select: rspec
+ "8\n" # Exit
+ ])
+
+ run_with_timeout
+
+ output = plain_last_lines(100)
+
+ expect(output).to include expected_example_prompt
+ expect(output).to include expected_rails_example
+ expect(output).to include expected_rspec_example
+ end
+ end
+
+ context 'for an event without identifiers' do
+ let(:expected_rails_example) do
+ <<~TEXT.chomp
+ --------------------------------------------------
+ # RAILS
+
+ Gitlab::InternalEvents.track_event('internal_events_cli_opened')
+
+ --------------------------------------------------
+ TEXT
+ end
+
+ let(:expected_rspec_example) do
+ <<~TEXT.chomp
+ --------------------------------------------------
+ # RSPEC
+
+ it_behaves_like 'internal event tracking' do
+ let(:event) { 'internal_events_cli_opened' }
+ end
+
+ --------------------------------------------------
+ TEXT
+ end
+
+ let(:expected_vue_example) do
+ <<~TEXT.chomp
+ --------------------------------------------------
+ // VUE
+
+ <script>
+ import { InternalEvents } from '~/tracking';
+ import { GlButton } from '@gitlab/ui';
+
+ const trackingMixin = InternalEvents.mixin();
+
+ export default {
+ mixins: [trackingMixin],
+ components: { GlButton },
+ methods: {
+ performAction() {
+ this.trackEvent('internal_events_cli_opened');
+ },
+ },
+ };
+ </script>
+
+ <template>
+ <gl-button @click=performAction>Click Me</gl-button>
+ </template>
+
+ --------------------------------------------------
+ TEXT
+ end
+
+ let(:expected_js_example) do
+ <<~TEXT.chomp
+ --------------------------------------------------
+ // FRONTEND -- RAW JAVASCRIPT
+
+ import { InternalEvents } from '~/tracking';
+
+ export const performAction = () => {
+ InternalEvents.trackEvent('internal_events_cli_opened');
+
+ return true;
+ };
+
+ --------------------------------------------------
+ TEXT
+ end
+
+ let(:expected_vue_template_example) do
+ <<~TEXT.chomp
+ --------------------------------------------------
+ // VUE TEMPLATE -- ON-CLICK
+
+ <script>
+ import { GlButton } from '@gitlab/ui';
+
+ export default {
+ components: { GlButton }
+ };
+ </script>
+
+ <template>
+ <gl-button data-event-tracking="internal_events_cli_opened">
+ Click Me
+ </gl-button>
+ </template>
+
+ --------------------------------------------------
+ // VUE TEMPLATE -- ON-LOAD
+
+ <script>
+ import { GlButton } from '@gitlab/ui';
+
+ export default {
+ components: { GlButton }
+ };
+ </script>
+
+ <template>
+ <gl-button data-event-tracking-load="internal_events_cli_opened">
+ Click Me
+ </gl-button>
+ </template>
+
+ --------------------------------------------------
+ TEXT
+ end
+
+ let(:expected_haml_example) do
+ <<~TEXT.chomp
+ --------------------------------------------------
+ # HAML -- ON-CLICK
+
+ .gl-display-inline-block{ data: { event_tracking: 'internal_events_cli_opened' } }
+ = _('Important Text')
+
+ --------------------------------------------------
+ # HAML -- COMPONENT ON-CLICK
+
+ = render Pajamas::ButtonComponent.new(button_options: { data: { event_tracking: 'internal_events_cli_opened' } })
+
+ --------------------------------------------------
+ # HAML -- COMPONENT ON-LOAD
+
+ = render Pajamas::ButtonComponent.new(button_options: { data: { event_tracking_load: true, event_tracking: 'internal_events_cli_opened' } })
+
+ --------------------------------------------------
+ TEXT
+ end
+
+ before do
+ File.write(event2_filepath, File.read(event2_content))
+ end
+
+ it 'shows all examples' do
+ queue_cli_inputs([
+ "3\n", # Enum-select: View Usage -- look at code examples for an existing event
+ 'internal_events_cli_opened', # Filters to this event
+ "\n", # Select: config/events/internal_events_cli_used.yml
+ "\n", # Select: ruby/rails
+ "\e[B", # Arrow down to: rspec
+ "\n", # Select: rspec
+ "\e[B", # Arrow down to: js vue
+ "\n", # Select: js vue
+ "\e[B", # Arrow down to: js plain
+ "\n", # Select: js plain
+ "\e[B", # Arrow down to: vue template
+ "\n", # Select: vue template
+ "\e[B", # Arrow down to: haml
+ "\n", # Select: haml
+ "8\n" # Exit
+ ])
+
+ run_with_timeout
+
+ output = plain_last_lines(1000)
+
+ expect(output).to include expected_example_prompt
+ expect(output).to include expected_rails_example
+ expect(output).to include expected_rspec_example
+ expect(output).to include expected_vue_example
+ expect(output).to include expected_js_example
+ expect(output).to include expected_vue_template_example
+ expect(output).to include expected_haml_example
+ end
+ end
+
+ context 'when viewing examples for multiple events' do
+ let(:expected_event1_example) do
+ <<~TEXT.chomp
+ --------------------------------------------------
+ # RAILS
+
+ Gitlab::InternalEvents.track_event(
+ 'internal_events_cli_used',
+ project: project,
+ namespace: project.namespace,
+ user: user
+ )
+
+ --------------------------------------------------
+ TEXT
+ end
+
+ let(:expected_event2_example) do
+ <<~TEXT.chomp
+ --------------------------------------------------
+ # RAILS
+
+ Gitlab::InternalEvents.track_event('internal_events_cli_opened')
+
+ --------------------------------------------------
+ TEXT
+ end
+
+ before do
+ File.write(event1_filepath, File.read(event1_content))
+ File.write(event2_filepath, File.read(event2_content))
+ end
+
+ it 'switches between events gracefully' do
+ queue_cli_inputs([
+ "3\n", # Enum-select: View Usage -- look at code examples for an existing event
+ 'internal_events_cli_used', # Filters to this event
+ "\n", # Select: config/events/internal_events_cli_used.yml
+ "\n", # Select: ruby/rails
+ "7\n", # Select: View examples for a different event
+ 'internal_events_cli_opened', # Filters to this event
+ "\n", # Select: config/events/internal_events_cli_opened.yml
+ "\n", # Select: ruby/rails
+ "8\n" # Exit
+ ])
+
+ run_with_timeout
+
+ output = plain_last_lines(300)
+
+ expect(output).to include expected_example_prompt
+ expect(output).to include expected_event1_example
+ expect(output).to include expected_event2_example
+ end
+ end
+ end
+
+ context 'when offline' do
+ before do
+ stub_product_groups(nil)
+ end
+
+ it_behaves_like 'creates the right defintion files',
+ 'Creates a new event with product stage/section/group input manually' do
+ let(:keystrokes) do
+ [
+ "1\n", # Enum-select: New Event -- start tracking when an action or scenario occurs on gitlab instances
+ "Internal Event CLI is opened\n", # Submit description
+ "internal_events_cli_opened\n", # Submit action name
+ "6\n", # Select: None
+ "\n", # Skip MR URL
+ "analytics\n", # Input section
+ "monitor\n", # Input stage
+ "analytics_instrumentation\n", # Input group
+ "2\n", # Select [premium, ultimate]
+ "y\n", # Create file
+ "3\n" # Exit
+ ]
+ end
+
+ let(:output_files) { [{ 'path' => event2_filepath, 'content' => event2_content }] }
+ end
+
+ it_behaves_like 'creates the right defintion files',
+ 'Creates a new metric with product stage/section/group input manually' do
+ let(:keystrokes) do
+ [
+ "2\n", # Enum-select: New Metric -- calculate how often one or more existing events occur over time
+ "2\n", # Enum-select: Multiple events -- count occurrences of several separate events or interactions
+ 'internal_events_cli', # Filters to the relevant events
+ ' ', # Multi-select: internal_events_cli_closed
+ "\e[B", # Arrow down to: internal_events_cli_used
+ ' ', # Multi-select: internal_events_cli_used
+ "\n", # Submit selections
+ "\e[B", # Arrow down to: Weekly count of unique projects
+ "\n", # Select: Weekly count of unique projects
+ "where a defition file was created with the CLI\n", # Input description
+ "\n", # Submit weekly description for monthly
+ "2\n", # Select: Modify attributes
+ "\n", # Accept section
+ "\n", # Accept stage
+ "\n", # Accept group
+ "\n", # Skip URL
+ "1\n", # Select: [free, premium, ultimate]
+ "y\n", # Create file
+ "y\n", # Create file
+ "2\n" # Exit
+ ]
+ end
+
+ let(:input_files) do
+ [
+ { 'path' => event1_filepath, 'content' => event1_content },
+ { 'path' => event3_filepath, 'content' => event3_content }
+ ]
+ end
+
+ let(:output_files) do
+ # rubocop:disable Layout/LineLength -- Long filepaths read better unbroken
+ [{
+ 'path' => 'config/metrics/counts_28d/count_distinct_project_id_from_internal_events_cli_closed_and_internal_events_cli_used_monthly.yml',
+ 'content' => 'spec/fixtures/scripts/internal_events/metrics/project_id_28d_multiple_events.yml'
+ }, {
+ 'path' => 'config/metrics/counts_7d/count_distinct_project_id_from_internal_events_cli_closed_and_internal_events_cli_used_weekly.yml',
+ 'content' => 'spec/fixtures/scripts/internal_events/metrics/project_id_7d_multiple_events.yml'
+ }]
+ # rubocop:enable Layout/LineLength
+ end
+ end
+ end
+
+ context 'when window size is unavailable' do
+ before do
+ # `tput <cmd>` returns empty string on error
+ stub_helper(:fetch_window_size, '')
+ stub_helper(:fetch_window_height, '')
+ end
+
+ it_behaves_like 'creates the right defintion files',
+ 'Terminal size does not prevent file creation' do
+ let(:keystrokes) do
+ [
+ "1\n", # Enum-select: New Event -- start tracking when an action or scenario occurs on gitlab instances
+ "Internal Event CLI is opened\n", # Submit description
+ "internal_events_cli_opened\n", # Submit action name
+ "6\n", # Select: None
+ "\n", # Skip MR URL
+ "instrumentation\n", # Filter & select group
+ "2\n", # Select [premium, ultimate]
+ "y\n", # Create file
+ "3\n" # Exit
+ ]
+ end
+
+ let(:output_files) { [{ 'path' => event2_filepath, 'content' => event2_content }] }
+ end
+ end
+
+ context "when user doesn't know what they're trying to do" do
+ it "handles when user isn't trying to track product usage" do
+ queue_cli_inputs([
+ "4\n", # Enum-select: ...am I in the right place?
+ "n\n" # No --> Are you trying to track customer usage of a GitLab feature?
+ ])
+
+ run_with_timeout
+
+ expect(plain_last_lines(50)).to include("Oh no! This probably isn't the tool you need!")
+ end
+
+ it "handles when product usage can't be tracked with events" do
+ queue_cli_inputs([
+ "4\n", # Enum-select: ...am I in the right place?
+ "y\n", # Yes --> Are you trying to track customer usage of a GitLab feature?
+ "n\n" # No --> Can usage for the feature be measured by tracking a specific user action?
+ ])
+
+ run_with_timeout
+
+ expect(plain_last_lines(50)).to include("Oh no! This probably isn't the tool you need!")
+ end
+
+ it 'handles when user needs to add a new event' do
+ queue_cli_inputs([
+ "4\n", # Enum-select: ...am I in the right place?
+ "y\n", # Yes --> Are you trying to track customer usage of a GitLab feature?
+ "y\n", # Yes --> Can usage for the feature be measured by tracking a specific user action?
+ "n\n", # No --> Is the event already tracked?
+ "n\n" # No --> Ready to start?
+ ])
+
+ run_with_timeout
+
+ expect(plain_last_lines(30)).to include("Okay! The next step is adding a new event! (~5 min)")
+ end
+
+ it 'handles when user needs to add a new metric' do
+ queue_cli_inputs([
+ "4\n", # Enum-select: ...am I in the right place?
+ "y\n", # Yes --> Are you trying to track customer usage of a GitLab feature?
+ "y\n", # Yes --> Can usage for the feature be measured by tracking a specific user action?
+ "y\n", # Yes --> Is the event already tracked?
+ "n\n" # No --> Ready to start?
+ ])
+
+ run_with_timeout
+
+ expect(plain_last_lines(30)).to include("Amazing! The next step is adding a new metric! (~8 min)")
+ end
+ end
+
+ private
+
+ def queue_cli_inputs(keystrokes)
+ prompt.input << keystrokes.join('')
+ prompt.input.rewind
+ end
+
+ def run_with_timeout(duration = 1)
+ Timeout.timeout(duration) { described_class.new(prompt).run }
+ rescue Timeout::Error
+ # Timeout is needed to break out of the CLI, but we may want
+ # to make assertions afterwards
+ end
+
+ def run_with_verbose_timeout(duration = 1)
+ Timeout.timeout(duration) { described_class.new(prompt).run }
+ rescue Timeout::Error => e
+ # Re-raise error so CLI output is printed with the error
+ message = <<~TEXT
+ Awaiting input too long. Entire CLI output:
+
+ #{
+ prompt.output.string.lines
+ .map { |line| "\e[0;37m#{line}\e[0m" } # wrap in white
+ .join('')
+ .gsub("\e[1G", "\e[1G ") # align to error indent
+ }
+
+
+ TEXT
+
+ raise e.class, message, e.backtrace
+ end
+
+ def plain_last_lines(size)
+ prompt.output.string
+ .lines
+ .last(size)
+ .join('')
+ .gsub(/\e[^\sm]{2,4}[mh]/, '')
+ end
+
+ def collect_file_writes(collector)
+ allow(File).to receive(:write).and_wrap_original do |original_method, *args, &block|
+ filepath = args.first
+ collector << filepath
+
+ dirname = Pathname.new(filepath).dirname
+ unless dirname.directory?
+ FileUtils.mkdir_p dirname
+ collector << dirname.to_s
+ end
+
+ original_method.call(*args, &block)
+ end
+ end
+
+ def stub_milestone(milestone)
+ stub_const("InternalEventsCli::Helpers::MILESTONE", milestone)
+ end
+
+ def stub_product_groups(body)
+ allow(Net::HTTP).to receive(:get)
+ .with(URI('https://gitlab.com/gitlab-com/www-gitlab-com/-/raw/master/data/stages.yml'))
+ .and_return(body)
+ end
+
+ def stub_helper(helper, value)
+ # rubocop:disable RSpec/AnyInstanceOf -- 'Next' helper not included in fast_spec_helper & next is insufficient
+ allow_any_instance_of(InternalEventsCli::Helpers).to receive(helper).and_return(value)
+ # rubocop:enable RSpec/AnyInstanceOf
+ end
+
+ def delete_files(files)
+ files.each do |filepath|
+ FileUtils.rm_f(Rails.root.join(filepath))
+ end
+ end
+
+ def internal_event_fixture(filepath)
+ Rails.root.join('spec', 'fixtures', 'scripts', 'internal_events', filepath)
+ end
+end
diff --git a/spec/scripts/lib/glfm/update_specification_spec.rb b/spec/scripts/lib/glfm/update_specification_spec.rb
index 500e8685e77..8269256dc06 100644
--- a/spec/scripts/lib/glfm/update_specification_spec.rb
+++ b/spec/scripts/lib/glfm/update_specification_spec.rb
@@ -1,8 +1,9 @@
# frozen_string_literal: true
require 'fast_spec_helper'
+require 'gitlab/rspec/next_instance_of'
+
require_relative '../../../../scripts/lib/glfm/update_specification'
-require_relative '../../../support/helpers/next_instance_of'
# IMPORTANT NOTE: See https://docs.gitlab.com/ee/development/gitlab_flavored_markdown/specification_guide/#update-specificationrb-script
# for details on the implementation and usage of the `update_specification.rb` script being tested.
diff --git a/spec/scripts/trigger-build_spec.rb b/spec/scripts/trigger-build_spec.rb
index f46adb1a9f1..a1bedd19ed3 100644
--- a/spec/scripts/trigger-build_spec.rb
+++ b/spec/scripts/trigger-build_spec.rb
@@ -236,14 +236,62 @@ RSpec.describe Trigger, feature_category: :tooling do
describe "TRIGGER_BRANCH" do
context 'when CNG_BRANCH is not set' do
- it 'sets TRIGGER_BRANCH to master' do
- stub_env('CI_PROJECT_NAMESPACE', 'gitlab-org')
- expect(subject.variables['TRIGGER_BRANCH']).to eq('master')
+ context 'with gitlab-org' do
+ before do
+ stub_env('CI_PROJECT_NAMESPACE', 'gitlab-org')
+ end
+
+ it 'sets TRIGGER_BRANCH to master if the commit ref is master' do
+ stub_env('CI_COMMIT_REF_NAME', 'master')
+ stub_env('CI_MERGE_REQUEST_TARGET_BRANCH_NAME', nil)
+ expect(subject.variables['TRIGGER_BRANCH']).to eq('master')
+ end
+
+ it 'sets the TRIGGER_BRANCH to master if the commit is part of an MR targeting master' do
+ stub_env('CI_COMMIT_REF_NAME', 'feature_branch')
+ stub_env('CI_MERGE_REQUEST_TARGET_BRANCH_NAME', 'master')
+ expect(subject.variables['TRIGGER_BRANCH']).to eq('master')
+ end
+
+ it 'sets TRIGGER_BRANCH to stable branch if the commit ref is a stable branch' do
+ stub_env('CI_COMMIT_REF_NAME', '16-6-stable-ee')
+ expect(subject.variables['TRIGGER_BRANCH']).to eq('16-6-stable')
+ end
+
+ it 'sets the TRIGGER_BRANCH to stable branch if the commit is part of an MR targeting stable branch' do
+ stub_env('CI_COMMIT_REF_NAME', 'feature_branch')
+ stub_env('CI_MERGE_REQUEST_TARGET_BRANCH_NAME', '16-6-stable-ee')
+ expect(subject.variables['TRIGGER_BRANCH']).to eq('16-6-stable')
+ end
end
- it 'sets TRIGGER_BRANCH to main-jh on JH side' do
- stub_env('CI_PROJECT_NAMESPACE', 'gitlab-cn')
- expect(subject.variables['TRIGGER_BRANCH']).to eq('main-jh')
+ context 'with gitlab-cn' do
+ before do
+ stub_env('CI_PROJECT_NAMESPACE', 'gitlab-cn')
+ end
+
+ it 'sets TRIGGER_BRANCH to main-jh if commit ref is main-jh' do
+ stub_env('CI_COMMIT_REF_NAME', 'main-jh')
+ stub_env('CI_MERGE_REQUEST_TARGET_BRANCH_NAME', nil)
+ expect(subject.variables['TRIGGER_BRANCH']).to eq('main-jh')
+ end
+
+ it 'sets the TRIGGER_BRANCH to main-jh if the commit is part of an MR targeting main-jh' do
+ stub_env('CI_COMMIT_REF_NAME', 'feature_branch')
+ stub_env('CI_MERGE_REQUEST_TARGET_BRANCH_NAME', 'main-jh')
+ expect(subject.variables['TRIGGER_BRANCH']).to eq('main-jh')
+ end
+
+ it 'sets TRIGGER_BRANCH to 16-6-stable if commit ref is a stable branch' do
+ stub_env('CI_COMMIT_REF_NAME', '16-6-stable-jh')
+ expect(subject.variables['TRIGGER_BRANCH']).to eq('16-6-stable')
+ end
+
+ it 'sets the TRIGGER_BRANCH to 16-6-stable if the commit is part of an MR targeting 16-6-stable-jh' do
+ stub_env('CI_COMMIT_REF_NAME', 'feature_branch')
+ stub_env('CI_MERGE_REQUEST_TARGET_BRANCH_NAME', '16-6-stable-jh')
+ expect(subject.variables['TRIGGER_BRANCH']).to eq('16-6-stable')
+ end
end
end
diff --git a/spec/serializers/admin/abuse_report_details_entity_spec.rb b/spec/serializers/admin/abuse_report_details_entity_spec.rb
index 47904a4e7e5..67f6bdfee85 100644
--- a/spec/serializers/admin/abuse_report_details_entity_spec.rb
+++ b/spec/serializers/admin/abuse_report_details_entity_spec.rb
@@ -21,7 +21,8 @@ RSpec.describe Admin::AbuseReportDetailsEntity, feature_category: :insider_threa
it 'exposes correct attributes' do
expect(entity_hash.keys).to match_array([
:user,
- :report
+ :report,
+ :upload_note_attachment_path
])
end
diff --git a/spec/serializers/admin/abuse_report_details_serializer_spec.rb b/spec/serializers/admin/abuse_report_details_serializer_spec.rb
index 3bdd2e46ba3..f320d9538f5 100644
--- a/spec/serializers/admin/abuse_report_details_serializer_spec.rb
+++ b/spec/serializers/admin/abuse_report_details_serializer_spec.rb
@@ -11,7 +11,8 @@ RSpec.describe Admin::AbuseReportDetailsSerializer, feature_category: :insider_t
it 'serializes an abuse report' do
is_expected.to match_array([
:user,
- :report
+ :report,
+ :upload_note_attachment_path
])
end
end
diff --git a/spec/serializers/build_details_entity_spec.rb b/spec/serializers/build_details_entity_spec.rb
index 874bcbfceaf..a899a798fa0 100644
--- a/spec/serializers/build_details_entity_spec.rb
+++ b/spec/serializers/build_details_entity_spec.rb
@@ -281,21 +281,11 @@ RSpec.describe BuildDetailsEntity do
end
context 'when the build has non public archive type artifacts' do
- let(:build) { create(:ci_build, :artifacts, :with_private_artifacts_config, pipeline: pipeline) }
+ let(:build) { create(:ci_build, :private_artifacts, :with_private_artifacts_config, pipeline: pipeline) }
it 'does not expose non public artifacts' do
expect(subject.keys).not_to include(:artifact)
end
-
- context 'with the non_public_artifacts feature flag disabled' do
- before do
- stub_feature_flags(non_public_artifacts: false)
- end
-
- it 'exposes artifact details' do
- expect(subject[:artifact].keys).to include(:download_path, :browse_path, :locked)
- end
- end
end
end
diff --git a/spec/serializers/deploy_keys/basic_deploy_key_entity_spec.rb b/spec/serializers/deploy_keys/basic_deploy_key_entity_spec.rb
index 7df6413f416..8645bbd49fb 100644
--- a/spec/serializers/deploy_keys/basic_deploy_key_entity_spec.rb
+++ b/spec/serializers/deploy_keys/basic_deploy_key_entity_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe DeployKeys::BasicDeployKeyEntity do
+RSpec.describe DeployKeys::BasicDeployKeyEntity, feature_category: :continuous_delivery do
include RequestAwareEntity
let(:user) { create(:user) }
@@ -56,7 +56,18 @@ RSpec.describe DeployKeys::BasicDeployKeyEntity do
end
context 'project deploy key' do
+ let(:options) { { user: user, project: project } }
+
it { expect(entity.as_json).to include(can_edit: true) }
+ it { expect(entity.as_json).to include(edit_path: edit_project_deploy_key_path(options[:project], deploy_key)) }
+
+ it do
+ expect(entity.as_json).to include(enable_path: enable_project_deploy_key_path(options[:project], deploy_key))
+ end
+
+ it do
+ expect(entity.as_json).to include(disable_path: disable_project_deploy_key_path(options[:project], deploy_key))
+ end
end
context 'public deploy key' do
diff --git a/spec/serializers/discussion_entity_spec.rb b/spec/serializers/discussion_entity_spec.rb
index 0fe10ed2c6d..4b818ce35e6 100644
--- a/spec/serializers/discussion_entity_spec.rb
+++ b/spec/serializers/discussion_entity_spec.rb
@@ -53,13 +53,6 @@ RSpec.describe DiscussionEntity do
.to match_schema('entities/note_user_entity')
end
- it 'exposes the url for custom award emoji' do
- custom_emoji = create(:custom_emoji, group: group)
- create(:award_emoji, awardable: note, name: custom_emoji.name)
-
- expect(subject[:notes].last[:award_emoji].first.keys).to include(:url)
- end
-
context 'when is LegacyDiffDiscussion' do
let(:discussion) { create(:legacy_diff_note_on_merge_request, noteable: note.noteable, project: project).to_discussion }
diff --git a/spec/serializers/group_link/group_group_link_entity_spec.rb b/spec/serializers/group_link/group_group_link_entity_spec.rb
index 8f31c53e841..c1ee92e55ba 100644
--- a/spec/serializers/group_link/group_group_link_entity_spec.rb
+++ b/spec/serializers/group_link/group_group_link_entity_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe GroupLink::GroupGroupLinkEntity do
+RSpec.describe GroupLink::GroupGroupLinkEntity, feature_category: :groups_and_projects do
include_context 'group_group_link'
let_it_be(:current_user) { create(:user) }
@@ -17,6 +17,10 @@ RSpec.describe GroupLink::GroupGroupLinkEntity do
expect(entity.to_json).to match_schema('group_link/group_group_link')
end
+ it 'correctly exposes `valid_roles`' do
+ expect(entity.as_json[:valid_roles]).to include(Gitlab::Access.options_with_owner)
+ end
+
context 'source' do
it 'exposes `source`' do
expect(as_json[:source]).to include(
@@ -59,7 +63,7 @@ RSpec.describe GroupLink::GroupGroupLinkEntity do
allow(entity).to receive(:direct_member?).and_return(false)
end
- it 'exposes `can_update` and `can_remove` as `true`' do
+ it 'exposes `can_update` and `can_remove` as `false`' do
expect(as_json[:can_update]).to be false
expect(as_json[:can_remove]).to be false
end
diff --git a/spec/serializers/group_link/group_link_entity_spec.rb b/spec/serializers/group_link/group_link_entity_spec.rb
index 941445feaa2..01c01f492aa 100644
--- a/spec/serializers/group_link/group_link_entity_spec.rb
+++ b/spec/serializers/group_link/group_link_entity_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe GroupLink::GroupLinkEntity do
+RSpec.describe GroupLink::GroupLinkEntity, feature_category: :groups_and_projects do
include_context 'group_group_link'
let(:entity) { described_class.new(group_group_link) }
@@ -12,10 +12,6 @@ RSpec.describe GroupLink::GroupLinkEntity do
expect(entity.to_json).to match_schema('group_link/group_link')
end
- it 'correctly exposes `valid_roles`' do
- expect(entity_hash[:valid_roles]).to include(Gitlab::Access.options_with_owner)
- end
-
it 'correctly exposes `shared_with_group.avatar_url`' do
avatar_url = 'https://gitlab.com/uploads/-/system/group/avatar/24/foobar.png?width=40'
allow(shared_with_group).to receive(:avatar_url).with(only_path: false, size: Member::AVATAR_SIZE).and_return(avatar_url)
diff --git a/spec/serializers/group_link/project_group_link_entity_spec.rb b/spec/serializers/group_link/project_group_link_entity_spec.rb
index 00bfc43f17e..9f09020b91d 100644
--- a/spec/serializers/group_link/project_group_link_entity_spec.rb
+++ b/spec/serializers/group_link/project_group_link_entity_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe GroupLink::ProjectGroupLinkEntity do
+RSpec.describe GroupLink::ProjectGroupLinkEntity, feature_category: :groups_and_projects do
let_it_be(:current_user) { create(:user) }
let_it_be(:project_group_link) { create(:project_group_link) }
@@ -16,32 +16,86 @@ RSpec.describe GroupLink::ProjectGroupLinkEntity do
expect(entity.to_json).to match_schema('group_link/project_group_link')
end
- context 'when current user is a project maintainer' do
- before_all do
- project_group_link.project.add_maintainer(current_user)
+ context 'when current user is a direct member' do
+ before do
+ allow(entity).to receive(:direct_member?).and_return(true)
+ allow(entity).to receive(:can?).and_call_original
end
- it 'exposes `can_update` and `can_remove` as `true`' do
- expect(as_json[:can_update]).to be true
- expect(as_json[:can_remove]).to be true
- end
- end
+ describe 'can_update' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(
+ :can_admin_project_member,
+ :can_manage_group_link_with_owner_access,
+ :expected_can_update
+ ) do
+ false | false | false
+ true | false | false
+ true | true | true
+ end
- context 'when current user is a group owner' do
- before_all do
- project_group_link.group.add_owner(current_user)
+ with_them do
+ before do
+ allow(entity)
+ .to receive(:can?)
+ .with(current_user, :admin_project_member, project_group_link.shared_from)
+ .and_return(can_admin_project_member)
+ allow(entity)
+ .to receive(:can?)
+ .with(current_user, :manage_group_link_with_owner_access, project_group_link)
+ .and_return(can_manage_group_link_with_owner_access)
+ end
+
+ it "exposes `can_update` as `#{params[:expected_can_update]}`" do
+ expect(entity.as_json[:can_update]).to be expected_can_update
+ end
+ end
end
- it 'exposes `can_remove` as true' do
- expect(as_json[:can_remove]).to be true
+ describe 'can_remove' do
+ context 'when current user has `destroy_project_group_link` ability' do
+ before do
+ allow(entity)
+ .to receive(:can?)
+ .with(current_user, :destroy_project_group_link, project_group_link)
+ .and_return(true)
+ end
+
+ it 'exposes `can_remove` as `true`' do
+ expect(entity.as_json[:can_remove]).to be(true)
+ end
+ end
+
+ context 'when current user does not have `destroy_project_group_link` ability' do
+ before do
+ allow(entity)
+ .to receive(:can?)
+ .with(current_user, :destroy_project_group_link, project_group_link)
+ .and_return(false)
+ end
+
+ it 'exposes `can_remove` as `false`' do
+ expect(entity.as_json[:can_remove]).to be(false)
+ end
+ end
end
end
- context 'when current user is not a group owner' do
- it 'exposes `can_remove` as false' do
- expect(as_json[:can_remove]).to be false
+ context 'when current user is not a direct member' do
+ before do
+ allow(entity).to receive(:direct_member?).and_return(false)
end
+ it 'exposes `can_update` and `can_remove` as `false`' do
+ json = entity.as_json
+
+ expect(json[:can_update]).to be false
+ expect(json[:can_remove]).to be false
+ end
+ end
+
+ context 'when current user is not a project member' do
context 'when group is public' do
it 'does expose shared_with_group details' do
expect(as_json[:shared_with_group].keys).to include(:id, :avatar_url, :web_url, :name)
diff --git a/spec/serializers/merge_request_poll_widget_entity_spec.rb b/spec/serializers/merge_request_poll_widget_entity_spec.rb
index 6b80609c348..171f2324cf1 100644
--- a/spec/serializers/merge_request_poll_widget_entity_spec.rb
+++ b/spec/serializers/merge_request_poll_widget_entity_spec.rb
@@ -22,6 +22,9 @@ RSpec.describe MergeRequestPollWidgetEntity do
.to eq(resource.default_merge_commit_message(include_description: true))
end
+ it { is_expected.to include(ff_only_enabled: false) }
+ it { is_expected.to include(ff_merge_possible: false) }
+
describe 'new_blob_path' do
context 'when user can push to project' do
it 'returns path' do
diff --git a/spec/serializers/personal_access_token_entity_spec.rb b/spec/serializers/personal_access_token_entity_spec.rb
index 8a77a4e0036..817d112d15f 100644
--- a/spec/serializers/personal_access_token_entity_spec.rb
+++ b/spec/serializers/personal_access_token_entity_spec.rb
@@ -10,7 +10,7 @@ RSpec.describe PersonalAccessTokenEntity do
it 'has the correct attributes' do
expected_revoke_path = Gitlab::Routing.url_helpers
- .revoke_profile_personal_access_token_path(
+ .revoke_user_settings_personal_access_token_path(
{ id: token })
expect(json).to(
diff --git a/spec/services/activity_pub/projects/releases_follow_service_spec.rb b/spec/services/activity_pub/projects/releases_follow_service_spec.rb
new file mode 100644
index 00000000000..6d0d400b9c6
--- /dev/null
+++ b/spec/services/activity_pub/projects/releases_follow_service_spec.rb
@@ -0,0 +1,145 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ActivityPub::Projects::ReleasesFollowService, feature_category: :release_orchestration do
+ let_it_be(:project) { create(:project, :public) }
+ let_it_be_with_reload(:existing_subscription) { create(:activity_pub_releases_subscription, project: project) }
+
+ describe '#execute' do
+ let(:service) { described_class.new(project, payload) }
+ let(:payload) { nil }
+
+ before do
+ allow(ActivityPub::Projects::ReleasesSubscriptionWorker).to receive(:perform_async)
+ end
+
+ context 'with a valid payload' do
+ let(:payload) do
+ {
+ '@context': 'https://www.w3.org/ns/activitystreams',
+ id: 'https://example.com/new-actor#follow-1',
+ type: 'Follow',
+ actor: actor,
+ object: 'https://localhost/our/project/-/releases'
+ }.with_indifferent_access
+ end
+
+ let(:actor) { 'https://example.com/new-actor' }
+
+ context 'when there is no subscription for that actor' do
+ before do
+ allow(ActivityPub::ReleasesSubscription).to receive(:find_by_project_and_subscriber).and_return(nil)
+ end
+
+ it 'sets the subscriber url' do
+ service.execute
+ expect(ActivityPub::ReleasesSubscription.last.subscriber_url).to eq 'https://example.com/new-actor'
+ end
+
+ it 'sets the payload' do
+ service.execute
+ expect(ActivityPub::ReleasesSubscription.last.payload).to eq payload
+ end
+
+ it 'sets the project' do
+ service.execute
+ expect(ActivityPub::ReleasesSubscription.last.project_id).to eq project.id
+ end
+
+ it 'saves the subscription' do
+ expect { service.execute }.to change { ActivityPub::ReleasesSubscription.count }.by(1)
+ end
+
+ it 'queues the subscription job' do
+ service.execute
+ expect(ActivityPub::Projects::ReleasesSubscriptionWorker).to have_received(:perform_async)
+ end
+
+ it 'returns true' do
+ expect(service.execute).to be_truthy
+ end
+ end
+
+ context 'when there is already a subscription for that actor' do
+ before do
+ allow(ActivityPub::ReleasesSubscription).to receive(:find_by_project_and_subscriber) { existing_subscription }
+ end
+
+ it 'does not save the subscription' do
+ expect { service.execute }.not_to change { ActivityPub::ReleasesSubscription.count }
+ end
+
+ it 'does not queue the subscription job' do
+ service.execute
+ expect(ActivityPub::Projects::ReleasesSubscriptionWorker).not_to have_received(:perform_async)
+ end
+
+ it 'returns true' do
+ expect(service.execute).to be_truthy
+ end
+ end
+ end
+
+ shared_examples 'invalid follow request' do
+ it 'does not save the subscription' do
+ expect { service.execute }.not_to change { ActivityPub::ReleasesSubscription.count }
+ end
+
+ it 'does not queue the subscription job' do
+ service.execute
+ expect(ActivityPub::Projects::ReleasesSubscriptionWorker).not_to have_received(:perform_async)
+ end
+
+ it 'sets an error' do
+ service.execute
+ expect(service.errors).not_to be_empty
+ end
+
+ it 'returns false' do
+ expect(service.execute).to be_falsey
+ end
+ end
+
+ context 'when actor is missing' do
+ let(:payload) do
+ {
+ '@context': 'https://www.w3.org/ns/activitystreams',
+ id: 'https://example.com/new-actor',
+ type: 'Follow',
+ object: 'https://localhost/our/project/-/releases'
+ }.with_indifferent_access
+ end
+
+ it_behaves_like 'invalid follow request'
+ end
+
+ context 'when actor is an object with no id attribute' do
+ let(:payload) do
+ {
+ '@context': 'https://www.w3.org/ns/activitystreams',
+ id: 'https://example.com/new-actor',
+ type: 'Follow',
+ actor: { type: 'Person' },
+ object: 'https://localhost/our/project/-/releases'
+ }.with_indifferent_access
+ end
+
+ it_behaves_like 'invalid follow request'
+ end
+
+ context 'when actor is neither a string nor an object' do
+ let(:payload) do
+ {
+ '@context': 'https://www.w3.org/ns/activitystreams',
+ id: 'https://example.com/new-actor',
+ type: 'Follow',
+ actor: 27.13,
+ object: 'https://localhost/our/project/-/releases'
+ }.with_indifferent_access
+ end
+
+ it_behaves_like 'invalid follow request'
+ end
+ end
+end
diff --git a/spec/services/activity_pub/projects/releases_unfollow_service_spec.rb b/spec/services/activity_pub/projects/releases_unfollow_service_spec.rb
new file mode 100644
index 00000000000..c732d82a2ad
--- /dev/null
+++ b/spec/services/activity_pub/projects/releases_unfollow_service_spec.rb
@@ -0,0 +1,152 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ActivityPub::Projects::ReleasesUnfollowService, feature_category: :release_orchestration do
+ let_it_be(:project) { create(:project, :public) }
+ let_it_be_with_reload(:existing_subscription) { create(:activity_pub_releases_subscription, project: project) }
+
+ describe '#execute' do
+ let(:service) { described_class.new(project, payload) }
+ let(:payload) { nil }
+
+ context 'with a valid payload' do
+ let(:payload) do
+ {
+ '@context': 'https://www.w3.org/ns/activitystreams',
+ id: 'https://example.com/new-actor#unfollow-1',
+ type: 'Undo',
+ actor: actor,
+ object: {
+ id: 'https://example.com/new-actor#follow-1',
+ type: 'Follow',
+ actor: actor,
+ object: 'https://localhost/our/project/-/releases'
+ }
+ }.with_indifferent_access
+ end
+
+ let(:actor) { existing_subscription.subscriber_url }
+
+ context 'when there is a subscription for this actor' do
+ it 'deletes the subscription' do
+ service.execute
+ expect(ActivityPub::ReleasesSubscription.where(id: existing_subscription.id).first).to be_nil
+ end
+
+ it 'returns true' do
+ expect(service.execute).to be_truthy
+ end
+ end
+
+ context 'when there is no subscription for this actor' do
+ before do
+ allow(ActivityPub::ReleasesSubscription).to receive(:find_by_project_and_subscriber).and_return(nil)
+ end
+
+ it 'does not delete anything' do
+ expect { service.execute }.not_to change { ActivityPub::ReleasesSubscription.count }
+ end
+
+ it 'returns true' do
+ expect(service.execute).to be_truthy
+ end
+ end
+ end
+
+ shared_examples 'invalid unfollow request' do
+ it 'does not delete anything' do
+ expect { service.execute }.not_to change { ActivityPub::ReleasesSubscription.count }
+ end
+
+ it 'sets an error' do
+ service.execute
+ expect(service.errors).not_to be_empty
+ end
+
+ it 'returns false' do
+ expect(service.execute).to be_falsey
+ end
+ end
+
+ context 'when actor is missing' do
+ let(:payload) do
+ {
+ '@context': 'https://www.w3.org/ns/activitystreams',
+ id: 'https://example.com/new-actor#unfollow-1',
+ type: 'Undo',
+ object: {
+ id: 'https://example.com/new-actor#follow-1',
+ type: 'Follow',
+ object: 'https://localhost/our/project/-/releases'
+ }
+ }.with_indifferent_access
+ end
+
+ it_behaves_like 'invalid unfollow request'
+ end
+
+ context 'when actor is an object with no id attribute' do
+ let(:payload) do
+ {
+ '@context': 'https://www.w3.org/ns/activitystreams',
+ id: 'https://example.com/new-actor#unfollow-1',
+ actor: { type: 'Person' },
+ type: 'Undo',
+ object: {
+ id: 'https://example.com/new-actor#follow-1',
+ type: 'Follow',
+ actor: { type: 'Person' },
+ object: 'https://localhost/our/project/-/releases'
+ }
+ }.with_indifferent_access
+ end
+
+ it_behaves_like 'invalid unfollow request'
+ end
+
+ context 'when actor is neither a string nor an object' do
+ let(:payload) do
+ {
+ '@context': 'https://www.w3.org/ns/activitystreams',
+ id: 'https://example.com/new-actor#unfollow-1',
+ actor: 27.13,
+ type: 'Undo',
+ object: {
+ id: 'https://example.com/new-actor#follow-1',
+ type: 'Follow',
+ actor: 27.13,
+ object: 'https://localhost/our/project/-/releases'
+ }
+ }.with_indifferent_access
+ end
+
+ it_behaves_like 'invalid unfollow request'
+ end
+
+ context "when actor tries to delete someone else's subscription" do
+ let(:payload) do
+ {
+ '@context': 'https://www.w3.org/ns/activitystreams',
+ id: 'https://example.com/actor#unfollow-1',
+ type: 'Undo',
+ actor: 'https://example.com/nasty-actor',
+ object: {
+ id: 'https://example.com/actor#follow-1',
+ type: 'Follow',
+ actor: existing_subscription.subscriber_url,
+ object: 'https://localhost/our/project/-/releases'
+ }
+ }.with_indifferent_access
+ end
+
+ it 'does not delete anything' do
+ expect { service.execute }.not_to change { ActivityPub::ReleasesSubscription.count }
+ end
+
+ it 'returns true' do
+ expect(service.execute).to be_truthy
+ end
+ end
+ end
+end
diff --git a/spec/services/auth/dependency_proxy_authentication_service_spec.rb b/spec/services/auth/dependency_proxy_authentication_service_spec.rb
index 3ef9c8fc96e..e81f59cff39 100644
--- a/spec/services/auth/dependency_proxy_authentication_service_spec.rb
+++ b/spec/services/auth/dependency_proxy_authentication_service_spec.rb
@@ -4,15 +4,17 @@ require 'spec_helper'
RSpec.describe Auth::DependencyProxyAuthenticationService, feature_category: :dependency_proxy do
let_it_be(:user) { create(:user) }
+ let_it_be(:params) { {} }
- let(:service) { described_class.new(nil, user) }
+ let(:authentication_abilities) { nil }
+ let(:service) { described_class.new(nil, user, params) }
before do
- stub_config(dependency_proxy: { enabled: true })
+ stub_config(dependency_proxy: { enabled: true }, registry: { enabled: true })
end
describe '#execute' do
- subject { service.execute(authentication_abilities: nil) }
+ subject { service.execute(authentication_abilities: authentication_abilities) }
shared_examples 'returning' do |status:, message:|
it "returns #{message}", :aggregate_failures do
@@ -21,9 +23,13 @@ RSpec.describe Auth::DependencyProxyAuthenticationService, feature_category: :de
end
end
- shared_examples 'returning a token' do
- it 'returns a token' do
- expect(subject[:token]).not_to be_nil
+ shared_examples 'returning a token with an encoded field' do |field|
+ it 'returns a token with encoded field' do
+ token = subject[:token]
+ expect(token).not_to be_nil
+
+ decoded_token = decode(token)
+ expect(decoded_token[field]).not_to be_nil
end
end
@@ -41,14 +47,73 @@ RSpec.describe Auth::DependencyProxyAuthenticationService, feature_category: :de
it_behaves_like 'returning', status: 403, message: 'access forbidden'
end
- context 'with a deploy token as user' do
- let_it_be(:user) { create(:deploy_token, :group, :dependency_proxy_scopes) }
+ context 'with a deploy token' do
+ let_it_be(:deploy_token) { create(:deploy_token, :group, :dependency_proxy_scopes) }
+ let_it_be(:params) { { deploy_token: deploy_token } }
+
+ it_behaves_like 'returning a token with an encoded field', 'deploy_token'
+ end
+
+ context 'with a human user' do
+ it_behaves_like 'returning a token with an encoded field', 'user_id'
+ end
+
+ context 'all other user types' do
+ User::USER_TYPES.except(:human, :project_bot).each_value do |user_type|
+ context "with user_type #{user_type}" do
+ before do
+ user.update!(user_type: user_type)
+ end
+
+ it_behaves_like 'returning a token with an encoded field', 'user_id'
+ end
+ end
+ end
+
+ context 'with a group access token' do
+ let_it_be(:user) { create(:user, :project_bot) }
+ let_it_be_with_reload(:token) { create(:personal_access_token, user: user) }
+
+ context 'with insufficient authentication abilities' do
+ it_behaves_like 'returning', status: 403, message: 'access forbidden'
- it_behaves_like 'returning a token'
+ context 'packages_dependency_proxy_containers_scope_check disabled' do
+ before do
+ stub_feature_flags(packages_dependency_proxy_containers_scope_check: false)
+ end
+
+ it_behaves_like 'returning a token with an encoded field', 'user_id'
+ end
+ end
+
+ context 'with sufficient authentication abilities' do
+ let_it_be(:authentication_abilities) { Auth::DependencyProxyAuthenticationService::REQUIRED_ABILITIES }
+ let_it_be(:params) { { raw_token: token.token } }
+
+ subject { service.execute(authentication_abilities: authentication_abilities) }
+
+ it_behaves_like 'returning a token with an encoded field', 'user_id'
+
+ context 'revoked' do
+ before do
+ token.revoke!
+ end
+
+ it_behaves_like 'returning', status: 403, message: 'access forbidden'
+ end
+
+ context 'expired' do
+ before do
+ token.update_column(:expires_at, 1.day.ago)
+ end
+
+ it_behaves_like 'returning', status: 403, message: 'access forbidden'
+ end
+ end
end
- context 'with a user' do
- it_behaves_like 'returning a token'
+ def decode(token)
+ DependencyProxy::AuthTokenService.new(token).execute
end
end
end
diff --git a/spec/services/bulk_imports/batched_relation_export_service_spec.rb b/spec/services/bulk_imports/batched_relation_export_service_spec.rb
index dd85961befd..cb356b90c61 100644
--- a/spec/services/bulk_imports/batched_relation_export_service_spec.rb
+++ b/spec/services/bulk_imports/batched_relation_export_service_spec.rb
@@ -57,6 +57,16 @@ RSpec.describe BulkImports::BatchedRelationExportService, feature_category: :imp
expect(export.batches.count).to eq(11)
end
end
+
+ context 'when an error occurs during batches creation' do
+ it 'does not enqueue FinishBatchedRelationExportWorker' do
+ allow(service).to receive(:enqueue_batch_exports).and_raise(StandardError)
+
+ expect(BulkImports::FinishBatchedRelationExportWorker).not_to receive(:perform_async)
+
+ expect { service.execute }.to raise_error(StandardError)
+ end
+ end
end
context 'when there are no batches to export' do
diff --git a/spec/services/bulk_imports/create_service_spec.rb b/spec/services/bulk_imports/create_service_spec.rb
index 20872623802..024e7a0aa44 100644
--- a/spec/services/bulk_imports/create_service_spec.rb
+++ b/spec/services/bulk_imports/create_service_spec.rb
@@ -123,7 +123,8 @@ RSpec.describe BulkImports::CreateService, feature_category: :importers do
)
allow_next_instance_of(BulkImports::Clients::HTTP) do |client|
- allow(client).to receive(:validate_import_scopes!).and_raise(BulkImports::Error.scope_validation_failure)
+ allow(client).to receive(:validate_import_scopes!)
+ .and_raise(BulkImports::Error.scope_or_url_validation_failure)
end
result = subject.execute
@@ -132,8 +133,7 @@ RSpec.describe BulkImports::CreateService, feature_category: :importers do
expect(result).to be_error
expect(result.message)
.to eq(
- "Personal access token does not " \
- "have the required 'api' scope or is no longer valid."
+ "Check that the source instance base URL and the personal access token meet the necessary requirements."
)
end
end
@@ -546,7 +546,8 @@ RSpec.describe BulkImports::CreateService, feature_category: :importers do
expect(result).to be_a(ServiceResponse)
expect(result).to be_error
expect(result.message)
- .to eq("Import failed. Destination 'destination-namespace' is invalid, or you don't have permission.")
+ .to eq("Import failed. Destination 'destination-namespace' is invalid, " \
+ "or you don't have permission.")
end
end
@@ -571,7 +572,8 @@ RSpec.describe BulkImports::CreateService, feature_category: :importers do
expect(result).to be_a(ServiceResponse)
expect(result).to be_error
expect(result.message)
- .to eq("Import failed. Destination '#{parent_group.path}' is invalid, or you don't have permission.")
+ .to eq("Import failed. Destination '#{parent_group.path}' is invalid, " \
+ "or you don't have permission.")
end
end
@@ -596,7 +598,8 @@ RSpec.describe BulkImports::CreateService, feature_category: :importers do
expect(result).to be_a(ServiceResponse)
expect(result).to be_error
expect(result.message)
- .to eq("Import failed. Destination '#{parent_group.path}' is invalid, or you don't have permission.")
+ .to eq("Import failed. Destination '#{parent_group.path}' is invalid, " \
+ "or you don't have permission.")
end
end
end
diff --git a/spec/services/bulk_imports/file_download_service_spec.rb b/spec/services/bulk_imports/file_download_service_spec.rb
index b2971c75bce..0c3eef69fa5 100644
--- a/spec/services/bulk_imports/file_download_service_spec.rb
+++ b/spec/services/bulk_imports/file_download_service_spec.rb
@@ -22,9 +22,10 @@ RSpec.describe BulkImports::FileDownloadService, feature_category: :importers do
}
end
- let(:chunk_size) { 100 }
let(:chunk_code) { 200 }
- let(:chunk_double) { double('chunk', size: chunk_size, code: chunk_code, http_response: double(to_hash: headers)) }
+ let(:chunk_double) do
+ double('chunk', size: 100, code: chunk_code, http_response: double(to_hash: headers), to_s: 'some chunk context')
+ end
subject(:service) do
described_class.new(
@@ -92,7 +93,9 @@ RSpec.describe BulkImports::FileDownloadService, feature_category: :importers do
it 'logs and raises an error' do
expect(import_logger).to receive(:warn).once.with(
message: 'Invalid content type',
- response_headers: headers
+ response_code: chunk_code,
+ response_headers: headers,
+ last_chunk_context: 'some chunk context'
)
expect { subject.execute }.to raise_error(described_class::ServiceError, 'Invalid content type')
@@ -145,15 +148,26 @@ RSpec.describe BulkImports::FileDownloadService, feature_category: :importers do
end
context 'when chunk code is not 200' do
- let(:chunk_code) { 500 }
+ let(:chunk_code) { 404 }
it 'raises an error' do
expect { subject.execute }.to raise_error(
described_class::ServiceError,
- 'File download error 500'
+ 'File download error 404'
)
end
+ context 'when chunk code is retriable' do
+ let(:chunk_code) { 502 }
+
+ it 'raises a retriable error' do
+ expect { subject.execute }.to raise_error(
+ BulkImports::NetworkError,
+ 'Error downloading file from /test. Error code: 502'
+ )
+ end
+ end
+
context 'when chunk code is redirection' do
let(:chunk_code) { 303 }
diff --git a/spec/services/bulk_imports/process_service_spec.rb b/spec/services/bulk_imports/process_service_spec.rb
index f5566819039..a295b170c2f 100644
--- a/spec/services/bulk_imports/process_service_spec.rb
+++ b/spec/services/bulk_imports/process_service_spec.rb
@@ -205,28 +205,20 @@ RSpec.describe BulkImports::ProcessService, feature_category: :importers do
it 'logs an info message for the skipped pipelines' do
expect_next_instance_of(BulkImports::Logger) do |logger|
+ expect(logger).to receive(:with_entity).with(entity).and_call_original.twice
+
expect(logger).to receive(:info).with(
message: 'Pipeline skipped as source instance version not compatible with pipeline',
- bulk_import_entity_id: entity.id,
- bulk_import_id: entity.bulk_import_id,
- bulk_import_entity_type: entity.source_type,
- source_full_path: entity.source_full_path,
pipeline_class: 'PipelineClass4',
minimum_source_version: '15.1.0',
- maximum_source_version: nil,
- source_version: '15.0.0'
+ maximum_source_version: nil
)
expect(logger).to receive(:info).with(
message: 'Pipeline skipped as source instance version not compatible with pipeline',
- bulk_import_entity_id: entity.id,
- bulk_import_id: entity.bulk_import_id,
- bulk_import_entity_type: entity.source_type,
- source_full_path: entity.source_full_path,
pipeline_class: 'PipelineClass5',
minimum_source_version: '16.0.0',
- maximum_source_version: nil,
- source_version: '15.0.0'
+ maximum_source_version: nil
)
end
diff --git a/spec/services/ci/catalog/resources/destroy_service_spec.rb b/spec/services/ci/catalog/resources/destroy_service_spec.rb
new file mode 100644
index 00000000000..da5ba7ad0bc
--- /dev/null
+++ b/spec/services/ci/catalog/resources/destroy_service_spec.rb
@@ -0,0 +1,38 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::Catalog::Resources::DestroyService, feature_category: :pipeline_composition do
+ let_it_be(:project) { create(:project, :catalog_resource_with_components) }
+ let_it_be(:catalog_resource) { create(:ci_catalog_resource, project: project) }
+ let_it_be(:user) { create(:user) }
+
+ let(:service) { described_class.new(project, user) }
+
+ before do
+ stub_licensed_features(ci_namespace_catalog: true)
+ end
+
+ describe '#execute' do
+ context 'with an unauthorized user' do
+ it 'raises an AccessDeniedError' do
+ expect { service.execute(catalog_resource) }.to raise_error(Gitlab::Access::AccessDeniedError)
+ end
+ end
+
+ context 'with an authorized user' do
+ before_all do
+ project.add_owner(user)
+ end
+
+ it 'destroys a catalog resource' do
+ expect(project.catalog_resource).to eq(catalog_resource)
+
+ response = service.execute(catalog_resource)
+
+ expect(project.reload.catalog_resource).to be_nil
+ expect(response.status).to be(:success)
+ end
+ end
+ end
+end
diff --git a/spec/services/ci/components/fetch_service_spec.rb b/spec/services/ci/components/fetch_service_spec.rb
index 21b7df19f4a..82be795e997 100644
--- a/spec/services/ci/components/fetch_service_spec.rb
+++ b/spec/services/ci/components/fetch_service_spec.rb
@@ -21,14 +21,15 @@ RSpec.describe Ci::Components::FetchService, feature_category: :pipeline_composi
project = create(
:project, :custom_repo,
files: {
- 'template.yml' => content,
- 'my-component/template.yml' => content,
- 'my-dir/my-component/template.yml' => content
+ 'templates/first-component.yml' => content,
+ 'templates/second-component/template.yml' => content
}
)
project.repository.add_tag(project.creator, 'v0.1', project.repository.commit.sha)
+ create(:release, project: project, tag: 'v0.1', sha: project.repository.commit.sha)
+
project
end
@@ -119,32 +120,27 @@ RSpec.describe Ci::Components::FetchService, feature_category: :pipeline_composi
context 'when address points to an external component' do
let(:address) { "#{current_host}/#{component_path}@#{version}" }
- context 'when component path is the full path to a project' do
- let(:component_path) { project.full_path }
- let(:component_yaml_path) { 'template.yml' }
+ context 'when component path points to a template file in a project' do
+ let(:component_path) { "#{project.full_path}/first-component" }
it_behaves_like 'an external component'
end
- context 'when component path points to a directory in a project' do
- let(:component_path) { "#{project.full_path}/my-component" }
- let(:component_yaml_path) { 'my-component/template.yml' }
+ context 'when component path points to a template directory in a project' do
+ let(:component_path) { "#{project.full_path}/second-component" }
it_behaves_like 'an external component'
end
- context 'when component path points to a nested directory in a project' do
- let(:component_path) { "#{project.full_path}/my-dir/my-component" }
- let(:component_yaml_path) { 'my-dir/my-component/template.yml' }
+ context 'when the project exists but the component does not' do
+ let(:component_path) { "#{project.full_path}/unknown-component" }
+ let(:version) { '~latest' }
- it_behaves_like 'an external component'
+ it 'returns a content not found error' do
+ expect(result).to be_error
+ expect(result.reason).to eq(:content_not_found)
+ end
end
end
end
-
- def stub_project_blob(ref, path, content)
- allow_next_instance_of(Repository) do |instance|
- allow(instance).to receive(:blob_data_at).with(ref, path).and_return(content)
- end
- end
end
diff --git a/spec/services/ci/create_pipeline_service/workflow_auto_cancel_spec.rb b/spec/services/ci/create_pipeline_service/workflow_auto_cancel_spec.rb
new file mode 100644
index 00000000000..851c6f8fbea
--- /dev/null
+++ b/spec/services/ci/create_pipeline_service/workflow_auto_cancel_spec.rb
@@ -0,0 +1,169 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectness,
+ feature_category: :pipeline_composition do
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:user) { project.first_owner }
+
+ let(:service) { described_class.new(project, user, { ref: 'master' }) }
+ let(:pipeline) { service.execute(:push).payload }
+
+ before do
+ stub_ci_pipeline_yaml_file(config)
+ end
+
+ describe 'on_new_commit' do
+ context 'when is set to interruptible' do
+ let(:config) do
+ <<~YAML
+ workflow:
+ auto_cancel:
+ on_new_commit: interruptible
+
+ test1:
+ script: exit 0
+ YAML
+ end
+
+ before do
+ stub_ci_pipeline_yaml_file(config)
+ end
+
+ it 'creates a pipeline with on_new_commit' do
+ expect(pipeline).to be_persisted
+ expect(pipeline.errors).to be_empty
+ expect(pipeline.pipeline_metadata.auto_cancel_on_new_commit).to eq('interruptible')
+ end
+ end
+
+ context 'when is set to invalid' do
+ let(:config) do
+ <<~YAML
+ workflow:
+ auto_cancel:
+ on_new_commit: invalid
+
+ test1:
+ script: exit 0
+ YAML
+ end
+
+ before do
+ stub_ci_pipeline_yaml_file(config)
+ end
+
+ it 'creates a pipeline with errors' do
+ expect(pipeline).to be_persisted
+ expect(pipeline.errors.full_messages).to include(
+ 'workflow:auto_cancel on new commit must be one of: conservative, interruptible, disabled')
+ end
+ end
+ end
+
+ describe 'on_job_failure' do
+ context 'when is set to none' do
+ let(:config) do
+ <<~YAML
+ workflow:
+ auto_cancel:
+ on_job_failure: none
+
+ test1:
+ script: exit 0
+ YAML
+ end
+
+ before do
+ stub_ci_pipeline_yaml_file(config)
+ end
+
+ it 'creates a pipeline with on_job_failure' do
+ expect(pipeline).to be_persisted
+ expect(pipeline.errors).to be_empty
+ expect(pipeline.pipeline_metadata.auto_cancel_on_job_failure).to eq('none')
+ end
+ end
+
+ context 'when is set to all' do
+ let(:config) do
+ <<~YAML
+ workflow:
+ auto_cancel:
+ on_job_failure: all
+
+ test1:
+ script: exit 0
+ YAML
+ end
+
+ before do
+ stub_ci_pipeline_yaml_file(config)
+ end
+
+ it 'creates a pipeline with on_job_failure' do
+ expect(pipeline).to be_persisted
+ expect(pipeline.errors).to be_empty
+ expect(pipeline.pipeline_metadata.auto_cancel_on_job_failure).to eq('all')
+ end
+
+ context 'when auto_cancel_pipeline_on_job_failure feature flag is disabled' do
+ before do
+ stub_feature_flags(auto_cancel_pipeline_on_job_failure: false)
+ end
+
+ context 'when there are no other metadata settings present' do
+ it 'creates a pipeline without metadata' do
+ expect(pipeline).to be_persisted
+ expect(pipeline.errors).to be_empty
+ expect(pipeline.pipeline_metadata).to be_nil
+ end
+ end
+
+ context 'when other metadata settings are present' do
+ let(:config) do
+ <<~YAML
+ workflow:
+ name: pipeline_name
+ auto_cancel:
+ on_job_failure: all
+
+ test1:
+ script: exit 0
+ YAML
+ end
+
+ it 'creates a pipeline with on_job_failure' do
+ expect(pipeline).to be_persisted
+ expect(pipeline.errors).to be_empty
+ expect(pipeline.pipeline_metadata.auto_cancel_on_job_failure).to eq('none')
+ end
+ end
+ end
+ end
+
+ context 'when on_job_failure is set to invalid' do
+ let(:config) do
+ <<~YAML
+ workflow:
+ auto_cancel:
+ on_job_failure: invalid
+
+ test1:
+ script: exit 0
+ YAML
+ end
+
+ before do
+ stub_ci_pipeline_yaml_file(config)
+ end
+
+ it 'creates a pipeline with errors' do
+ expect(pipeline).to be_persisted
+ expect(pipeline.errors.full_messages).to include(
+ 'workflow:auto_cancel on job failure must be one of: none, all')
+ end
+ end
+ end
+end
diff --git a/spec/services/ci/create_pipeline_service_spec.rb b/spec/services/ci/create_pipeline_service_spec.rb
index 19e55c22df8..7dea50ba270 100644
--- a/spec/services/ci/create_pipeline_service_spec.rb
+++ b/spec/services/ci/create_pipeline_service_spec.rb
@@ -1757,7 +1757,7 @@ RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectnes
let(:sha) do
components_project.repository.create_file(
user,
- 'my-component/template.yml',
+ 'templates/my-component/template.yml',
template,
message: 'Add my first CI component',
branch_name: 'master'
@@ -1894,7 +1894,7 @@ RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectnes
let(:sha) do
components_project.repository.create_file(
user,
- 'my-component/template.yml',
+ 'templates/my-component/template.yml',
template,
message: 'Add my first CI component',
branch_name: 'master'
diff --git a/spec/services/ci/generate_coverage_reports_service_spec.rb b/spec/services/ci/generate_coverage_reports_service_spec.rb
index 811431bf9d6..dac8dc57261 100644
--- a/spec/services/ci/generate_coverage_reports_service_spec.rb
+++ b/spec/services/ci/generate_coverage_reports_service_spec.rb
@@ -25,6 +25,21 @@ RSpec.describe Ci::GenerateCoverageReportsService, feature_category: :code_testi
expect(subject[:status]).to eq(:parsed)
expect(subject[:data]).to eq(files: {})
end
+
+ context 'when there is a parsing error' do
+ before do
+ allow_next_found_instance_of(MergeRequest) do |merge_request|
+ allow(merge_request).to receive(:new_paths).and_raise(StandardError)
+ end
+ end
+
+ it 'returns status with error message and tracks the error' do
+ expect(service).to receive(:track_exception).and_call_original
+
+ expect(subject[:status]).to eq(:error)
+ expect(subject[:status_reason]).to include('An error occurred while fetching coverage reports.')
+ end
+ end
end
context 'when head pipeline does not have a coverage report artifact' do
@@ -38,6 +53,8 @@ RSpec.describe Ci::GenerateCoverageReportsService, feature_category: :code_testi
end
it 'returns status and error message' do
+ expect(service).not_to receive(:track_exception)
+
expect(subject[:status]).to eq(:error)
expect(subject[:status_reason]).to include('An error occurred while fetching coverage reports.')
end
@@ -48,6 +65,8 @@ RSpec.describe Ci::GenerateCoverageReportsService, feature_category: :code_testi
let!(:base_pipeline) { nil }
it 'returns status and error message' do
+ expect(service).not_to receive(:track_exception)
+
expect(subject[:status]).to eq(:error)
expect(subject[:status_reason]).to include('An error occurred while fetching coverage reports.')
end
diff --git a/spec/services/ci/job_artifacts/create_service_spec.rb b/spec/services/ci/job_artifacts/create_service_spec.rb
index a23ba250daf..0d6a15b0ea3 100644
--- a/spec/services/ci/job_artifacts/create_service_spec.rb
+++ b/spec/services/ci/job_artifacts/create_service_spec.rb
@@ -139,59 +139,19 @@ RSpec.describe Ci::JobArtifacts::CreateService, :clean_gitlab_redis_shared_state
shared_examples_for 'handling accessibility' do
shared_examples 'public accessibility' do
it 'sets accessibility to public level' do
+ subject
+
+ expect(job.job_artifacts).not_to be_empty
expect(job.job_artifacts).to all be_public_accessibility
end
end
shared_examples 'private accessibility' do
it 'sets accessibility to private level' do
- expect(job.job_artifacts).to all be_private_accessibility
- end
- end
-
- context 'when non_public_artifacts flag is disabled' do
- before do
- stub_feature_flags(non_public_artifacts: false)
- end
-
- it_behaves_like 'public accessibility'
- end
-
- context 'when non_public_artifacts flag is enabled' do
- context 'and accessibility is defined in the params' do
- context 'and is passed as private' do
- before do
- params.merge!('accessibility' => 'private')
- end
-
- it_behaves_like 'private accessibility'
- end
-
- context 'and is passed as public' do
- before do
- params.merge!('accessibility' => 'public')
- end
-
- it_behaves_like 'public accessibility'
- end
- end
-
- context 'and accessibility is not defined in the params' do
- context 'and job has no public artifacts defined in its CI config' do
- it_behaves_like 'public accessibility'
- end
-
- context 'and job artifacts defined as private in the CI config' do
- let(:job) { create(:ci_build, :with_private_artifacts_config, project: project) }
-
- it_behaves_like 'private accessibility'
- end
-
- context 'and job artifacts defined as public in the CI config' do
- let(:job) { create(:ci_build, :with_public_artifacts_config, project: project) }
+ subject
- it_behaves_like 'public accessibility'
- end
+ expect(job.job_artifacts).not_to be_empty
+ expect(job.job_artifacts).to all be_private_accessibility
end
end
diff --git a/spec/services/ci/pipeline_creation/cancel_redundant_pipelines_service_spec.rb b/spec/services/ci/pipeline_creation/cancel_redundant_pipelines_service_spec.rb
index a5dda1d13aa..0d83187f9e4 100644
--- a/spec/services/ci/pipeline_creation/cancel_redundant_pipelines_service_spec.rb
+++ b/spec/services/ci/pipeline_creation/cancel_redundant_pipelines_service_spec.rb
@@ -207,12 +207,12 @@ RSpec.describe Ci::PipelineCreation::CancelRedundantPipelinesService, feature_ca
it 'does not cancel any builds' do
expect(build_statuses(prev_pipeline)).to contain_exactly('running', 'success', 'created')
- expect(build_statuses(parent_pipeline)).to contain_exactly('running', 'running')
+ expect(build_statuses(parent_pipeline)).to contain_exactly('created', 'running', 'running')
execute
expect(build_statuses(prev_pipeline)).to contain_exactly('running', 'success', 'created')
- expect(build_statuses(parent_pipeline)).to contain_exactly('running', 'running')
+ expect(build_statuses(parent_pipeline)).to contain_exactly('created', 'running', 'running')
end
end
@@ -227,6 +227,25 @@ RSpec.describe Ci::PipelineCreation::CancelRedundantPipelinesService, feature_ca
end
end
+ context 'when there are trigger jobs' do
+ before do
+ create(:ci_bridge, :created, pipeline: prev_pipeline)
+ create(:ci_bridge, :running, pipeline: prev_pipeline)
+ create(:ci_bridge, :success, pipeline: prev_pipeline)
+ create(:ci_bridge, :interruptible, :created, pipeline: prev_pipeline)
+ create(:ci_bridge, :interruptible, :running, pipeline: prev_pipeline)
+ create(:ci_bridge, :interruptible, :success, pipeline: prev_pipeline)
+ end
+
+ it 'still cancels the pipeline because auto-cancel is not affected by non-interruptible started triggers' do
+ execute
+
+ expect(job_statuses(prev_pipeline)).to contain_exactly(
+ 'canceled', 'success', 'canceled', 'canceled', 'canceled', 'success', 'canceled', 'canceled', 'success')
+ expect(job_statuses(pipeline)).to contain_exactly('pending')
+ end
+ end
+
it 'does not cancel future pipelines' do
expect(prev_pipeline.id).to be < pipeline.id
expect(build_statuses(pipeline)).to contain_exactly('pending')
@@ -269,7 +288,8 @@ RSpec.describe Ci::PipelineCreation::CancelRedundantPipelinesService, feature_ca
private
- def build_statuses(pipeline)
- pipeline.builds.pluck(:status)
+ def job_statuses(pipeline)
+ pipeline.statuses.pluck(:status)
end
+ alias_method :build_statuses, :job_statuses
end
diff --git a/spec/services/ci/process_sync_events_service_spec.rb b/spec/services/ci/process_sync_events_service_spec.rb
index c58d73815b0..cea5eec294e 100644
--- a/spec/services/ci/process_sync_events_service_spec.rb
+++ b/spec/services/ci/process_sync_events_service_spec.rb
@@ -163,5 +163,70 @@ RSpec.describe Ci::ProcessSyncEventsService, feature_category: :continuous_integ
execute
end
end
+
+ context 'for Ci::Catalog::Resources::SyncEvent' do
+ let(:sync_event_class) { Ci::Catalog::Resources::SyncEvent }
+ let(:hierarchy_class) { Ci::Catalog::Resource }
+
+ let_it_be(:project1) { create(:project) }
+ let_it_be(:project2) { create(:project) }
+ let_it_be_with_refind(:resource1) { create(:ci_catalog_resource, project: project1) }
+ let_it_be(:resource2) { create(:ci_catalog_resource, project: project2) }
+
+ before_all do
+ create(:ci_catalog_resource_sync_event, catalog_resource: resource1, status: :processed)
+ # PG trigger adds an event for each update
+ project1.update!(name: 'Name 1', description: 'Test 1')
+ project1.update!(visibility_level: Gitlab::VisibilityLevel::INTERNAL)
+ project2.update!(name: 'Name 2', description: 'Test 2')
+ project2.update!(visibility_level: Gitlab::VisibilityLevel::PUBLIC)
+ end
+
+ it 'processes the events', :aggregate_failures do
+ # 2 pending events from resource1 + 2 pending events from resource2
+ expect { execute }.to change(Ci::Catalog::Resources::SyncEvent.status_pending, :count).from(4).to(0)
+
+ expect(resource1.reload.name).to eq(project1.name)
+ expect(resource2.reload.name).to eq(project2.name)
+ expect(resource1.reload.description).to eq(project1.description)
+ expect(resource2.reload.description).to eq(project2.description)
+ expect(resource1.reload.visibility_level).to eq(project1.visibility_level)
+ expect(resource2.reload.visibility_level).to eq(project2.visibility_level)
+ end
+
+ context 'when there are no remaining unprocessed events' do
+ it 'does not enqueue Ci::Catalog::Resources::ProcessSyncEventsWorker' do
+ stub_const("#{described_class}::BATCH_SIZE", 4)
+
+ expect(Ci::Catalog::Resources::ProcessSyncEventsWorker).not_to receive(:perform_async)
+
+ execute
+ end
+ end
+
+ context 'when there are remaining unprocessed events' do
+ it 'enqueues Ci::Catalog::Resources::ProcessSyncEventsWorker' do
+ stub_const("#{described_class}::BATCH_SIZE", 1)
+
+ expect(Ci::Catalog::Resources::ProcessSyncEventsWorker).to receive(:perform_async)
+
+ execute
+ end
+ end
+
+ # The `p_catalog_resource_sync_events` table does not enforce an FK on catalog_resource_id
+ context 'when there are orphaned sync events' do
+ it 'processes the events', :aggregate_failures do
+ resource1.destroy!
+
+ # 2 pending events from resource1 + 2 pending events from resource2
+ expect { execute }.to change(Ci::Catalog::Resources::SyncEvent.status_pending, :count).from(4).to(0)
+
+ expect(resource2.reload.name).to eq(project2.name)
+ expect(resource2.reload.description).to eq(project2.description)
+ expect(resource2.reload.visibility_level).to eq(project2.visibility_level)
+ end
+ end
+ end
end
end
diff --git a/spec/services/ci/runners/assign_runner_service_spec.rb b/spec/services/ci/runners/assign_runner_service_spec.rb
index 00fbb5e2d26..eb0b7478ad3 100644
--- a/spec/services/ci/runners/assign_runner_service_spec.rb
+++ b/spec/services/ci/runners/assign_runner_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe ::Ci::Runners::AssignRunnerService, '#execute', feature_category: :runner_fleet do
+RSpec.describe ::Ci::Runners::AssignRunnerService, '#execute', feature_category: :fleet_visibility do
subject(:execute) { described_class.new(runner, new_project, user).execute }
let_it_be(:owner_group) { create(:group) }
diff --git a/spec/services/ci/runners/bulk_delete_runners_service_spec.rb b/spec/services/ci/runners/bulk_delete_runners_service_spec.rb
index 5e697565972..b57cae00867 100644
--- a/spec/services/ci/runners/bulk_delete_runners_service_spec.rb
+++ b/spec/services/ci/runners/bulk_delete_runners_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe ::Ci::Runners::BulkDeleteRunnersService, '#execute', feature_category: :runner_fleet do
+RSpec.describe ::Ci::Runners::BulkDeleteRunnersService, '#execute', feature_category: :fleet_visibility do
subject(:execute) { described_class.new(**service_args).execute }
let_it_be(:admin_user) { create(:user, :admin) }
diff --git a/spec/services/ci/runners/create_runner_service_spec.rb b/spec/services/ci/runners/create_runner_service_spec.rb
index db337b0b005..eaba7b9e4db 100644
--- a/spec/services/ci/runners/create_runner_service_spec.rb
+++ b/spec/services/ci/runners/create_runner_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe ::Ci::Runners::CreateRunnerService, "#execute", feature_category: :runner_fleet do
+RSpec.describe ::Ci::Runners::CreateRunnerService, "#execute", feature_category: :fleet_visibility do
subject(:execute) { described_class.new(user: current_user, params: params).execute }
let(:runner) { execute.payload[:runner] }
diff --git a/spec/services/ci/runners/process_runner_version_update_service_spec.rb b/spec/services/ci/runners/process_runner_version_update_service_spec.rb
index f8b7aa281af..cc8df6579d4 100644
--- a/spec/services/ci/runners/process_runner_version_update_service_spec.rb
+++ b/spec/services/ci/runners/process_runner_version_update_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Ci::Runners::ProcessRunnerVersionUpdateService, feature_category: :runner_fleet do
+RSpec.describe Ci::Runners::ProcessRunnerVersionUpdateService, feature_category: :fleet_visibility do
subject(:service) { described_class.new(version) }
let(:version) { '1.0.0' }
diff --git a/spec/services/ci/runners/reconcile_existing_runner_versions_service_spec.rb b/spec/services/ci/runners/reconcile_existing_runner_versions_service_spec.rb
index 8d7e97e5ea8..88f0a930599 100644
--- a/spec/services/ci/runners/reconcile_existing_runner_versions_service_spec.rb
+++ b/spec/services/ci/runners/reconcile_existing_runner_versions_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe ::Ci::Runners::ReconcileExistingRunnerVersionsService, '#execute', feature_category: :runner_fleet do
+RSpec.describe ::Ci::Runners::ReconcileExistingRunnerVersionsService, '#execute', feature_category: :fleet_visibility do
include RunnerReleasesHelper
subject(:execute) { described_class.new.execute }
diff --git a/spec/services/ci/runners/register_runner_service_spec.rb b/spec/services/ci/runners/register_runner_service_spec.rb
index 4b997855657..aabf30d975a 100644
--- a/spec/services/ci/runners/register_runner_service_spec.rb
+++ b/spec/services/ci/runners/register_runner_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe ::Ci::Runners::RegisterRunnerService, '#execute', feature_category: :runner_fleet do
+RSpec.describe ::Ci::Runners::RegisterRunnerService, '#execute', feature_category: :fleet_visibility do
let(:registration_token) { 'abcdefg123456' }
let(:token) {}
let(:args) { {} }
diff --git a/spec/services/ci/runners/reset_registration_token_service_spec.rb b/spec/services/ci/runners/reset_registration_token_service_spec.rb
index c8115236034..68faa9fa387 100644
--- a/spec/services/ci/runners/reset_registration_token_service_spec.rb
+++ b/spec/services/ci/runners/reset_registration_token_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe ::Ci::Runners::ResetRegistrationTokenService, '#execute', feature_category: :runner_fleet do
+RSpec.describe ::Ci::Runners::ResetRegistrationTokenService, '#execute', feature_category: :fleet_visibility do
subject(:execute) { described_class.new(scope, current_user).execute }
let_it_be(:user) { build(:user) }
diff --git a/spec/services/ci/runners/set_runner_associated_projects_service_spec.rb b/spec/services/ci/runners/set_runner_associated_projects_service_spec.rb
index 8d612174a0b..b617cb0a006 100644
--- a/spec/services/ci/runners/set_runner_associated_projects_service_spec.rb
+++ b/spec/services/ci/runners/set_runner_associated_projects_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe ::Ci::Runners::SetRunnerAssociatedProjectsService, '#execute', feature_category: :runner_fleet do
+RSpec.describe ::Ci::Runners::SetRunnerAssociatedProjectsService, '#execute', feature_category: :fleet_visibility do
subject(:execute) do
described_class.new(runner: runner, current_user: user, project_ids: new_projects.map(&:id)).execute
end
diff --git a/spec/services/ci/runners/stale_managers_cleanup_service_spec.rb b/spec/services/ci/runners/stale_managers_cleanup_service_spec.rb
index 0a20c12bc15..4931f24d5d8 100644
--- a/spec/services/ci/runners/stale_managers_cleanup_service_spec.rb
+++ b/spec/services/ci/runners/stale_managers_cleanup_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Ci::Runners::StaleManagersCleanupService, feature_category: :runner_fleet do
+RSpec.describe Ci::Runners::StaleManagersCleanupService, feature_category: :fleet_visibility do
let(:service) { described_class.new }
let!(:runner_manager3) { create(:ci_runner_machine, created_at: 6.months.ago, contacted_at: Time.current) }
diff --git a/spec/services/ci/runners/unassign_runner_service_spec.rb b/spec/services/ci/runners/unassign_runner_service_spec.rb
index e91d4249473..99cf087cf78 100644
--- a/spec/services/ci/runners/unassign_runner_service_spec.rb
+++ b/spec/services/ci/runners/unassign_runner_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe ::Ci::Runners::UnassignRunnerService, '#execute', feature_category: :runner_fleet do
+RSpec.describe ::Ci::Runners::UnassignRunnerService, '#execute', feature_category: :fleet_visibility do
let_it_be(:project) { create(:project) }
let_it_be(:runner) { create(:ci_runner, :project, projects: [project]) }
diff --git a/spec/services/ci/runners/unregister_runner_manager_service_spec.rb b/spec/services/ci/runners/unregister_runner_manager_service_spec.rb
index 8bfda8e2083..590df18469d 100644
--- a/spec/services/ci/runners/unregister_runner_manager_service_spec.rb
+++ b/spec/services/ci/runners/unregister_runner_manager_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe ::Ci::Runners::UnregisterRunnerManagerService, '#execute', feature_category: :runner_fleet do
+RSpec.describe ::Ci::Runners::UnregisterRunnerManagerService, '#execute', feature_category: :fleet_visibility do
subject(:execute) { described_class.new(runner, 'some_token', system_id: system_id).execute }
context 'with runner registered with registration token' do
diff --git a/spec/services/ci/runners/unregister_runner_service_spec.rb b/spec/services/ci/runners/unregister_runner_service_spec.rb
index fb779e1a673..e73dcb2511e 100644
--- a/spec/services/ci/runners/unregister_runner_service_spec.rb
+++ b/spec/services/ci/runners/unregister_runner_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe ::Ci::Runners::UnregisterRunnerService, '#execute', feature_category: :runner_fleet do
+RSpec.describe ::Ci::Runners::UnregisterRunnerService, '#execute', feature_category: :fleet_visibility do
subject(:execute) { described_class.new(runner, 'some_token').execute }
let(:runner) { create(:ci_runner) }
diff --git a/spec/services/ci/runners/update_runner_service_spec.rb b/spec/services/ci/runners/update_runner_service_spec.rb
index 86875df70a2..9483d122c35 100644
--- a/spec/services/ci/runners/update_runner_service_spec.rb
+++ b/spec/services/ci/runners/update_runner_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Ci::Runners::UpdateRunnerService, '#execute', feature_category: :runner_fleet do
+RSpec.describe Ci::Runners::UpdateRunnerService, '#execute', feature_category: :fleet_visibility do
subject(:execute) { described_class.new(runner).execute(params) }
let(:runner) { create(:ci_runner) }
diff --git a/spec/services/ci/stuck_builds/drop_pending_service_spec.rb b/spec/services/ci/stuck_builds/drop_pending_service_spec.rb
index 9da63930057..b3045d838a1 100644
--- a/spec/services/ci/stuck_builds/drop_pending_service_spec.rb
+++ b/spec/services/ci/stuck_builds/drop_pending_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Ci::StuckBuilds::DropPendingService, feature_category: :runner_fleet do
+RSpec.describe Ci::StuckBuilds::DropPendingService, feature_category: :fleet_visibility do
let_it_be(:runner) { create(:ci_runner) }
let_it_be(:pipeline) { create(:ci_empty_pipeline) }
let_it_be_with_reload(:job) do
diff --git a/spec/services/ci/stuck_builds/drop_running_service_spec.rb b/spec/services/ci/stuck_builds/drop_running_service_spec.rb
index c2f8a643f24..74b02240ea5 100644
--- a/spec/services/ci/stuck_builds/drop_running_service_spec.rb
+++ b/spec/services/ci/stuck_builds/drop_running_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Ci::StuckBuilds::DropRunningService, feature_category: :runner_fleet do
+RSpec.describe Ci::StuckBuilds::DropRunningService, feature_category: :fleet_visibility do
let!(:runner) { create :ci_runner }
let!(:job) { create(:ci_build, runner: runner, created_at: created_at, updated_at: updated_at, status: status) }
diff --git a/spec/services/ci/stuck_builds/drop_scheduled_service_spec.rb b/spec/services/ci/stuck_builds/drop_scheduled_service_spec.rb
index 5560eaf9b40..5a95b55054f 100644
--- a/spec/services/ci/stuck_builds/drop_scheduled_service_spec.rb
+++ b/spec/services/ci/stuck_builds/drop_scheduled_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Ci::StuckBuilds::DropScheduledService, feature_category: :runner_fleet do
+RSpec.describe Ci::StuckBuilds::DropScheduledService, feature_category: :fleet_visibility do
let_it_be(:runner) { create :ci_runner }
let!(:job) { create :ci_build, :scheduled, scheduled_at: scheduled_at, runner: runner }
diff --git a/spec/services/container_registry/protection/create_rule_service_spec.rb b/spec/services/container_registry/protection/create_rule_service_spec.rb
index 3c319caf25c..4559a8fb131 100644
--- a/spec/services/container_registry/protection/create_rule_service_spec.rb
+++ b/spec/services/container_registry/protection/create_rule_service_spec.rb
@@ -22,7 +22,7 @@ RSpec.describe ContainerRegistry::Protection::CreateRuleService, '#execute', fea
container_registry_protection_rule:
be_a(ContainerRegistry::Protection::Rule)
.and(have_attributes(
- container_path_pattern: params[:container_path_pattern],
+ repository_path_pattern: params[:repository_path_pattern],
push_protected_up_to_access_level: params[:push_protected_up_to_access_level].to_s,
delete_protected_up_to_access_level: params[:delete_protected_up_to_access_level].to_s
))
@@ -36,7 +36,7 @@ RSpec.describe ContainerRegistry::Protection::CreateRuleService, '#execute', fea
expect(
ContainerRegistry::Protection::Rule.where(
project: project,
- container_path_pattern: params[:container_path_pattern],
+ repository_path_pattern: params[:repository_path_pattern],
push_protected_up_to_access_level: params[:push_protected_up_to_access_level]
)
).to exist
@@ -57,7 +57,7 @@ RSpec.describe ContainerRegistry::Protection::CreateRuleService, '#execute', fea
expect(
ContainerRegistry::Protection::Rule.where(
project: project,
- container_path_pattern: params[:container_path_pattern],
+ repository_path_pattern: params[:repository_path_pattern],
push_protected_up_to_access_level: params[:push_protected_up_to_access_level]
)
).not_to exist
@@ -67,12 +67,12 @@ RSpec.describe ContainerRegistry::Protection::CreateRuleService, '#execute', fea
it_behaves_like 'a successful service response'
context 'when fields are invalid' do
- context 'when container_path_pattern is invalid' do
- let(:params) { super().merge(container_path_pattern: '') }
+ context 'when repository_path_pattern is invalid' do
+ let(:params) { super().merge(repository_path_pattern: '') }
it_behaves_like 'an erroneous service response'
- it { is_expected.to have_attributes(message: match(/Container path pattern can't be blank/)) }
+ it { is_expected.to have_attributes(message: match(/Repository path pattern can't be blank/)) }
end
context 'when delete_protected_up_to_access_level is invalid' do
@@ -100,8 +100,8 @@ RSpec.describe ContainerRegistry::Protection::CreateRuleService, '#execute', fea
context 'when container registry name pattern is slightly different' do
let(:params) do
super().merge(
- # The field `container_path_pattern` is unique; this is why we change the value in a minimum way
- container_path_pattern: "#{existing_container_registry_protection_rule.container_path_pattern}-unique",
+ # The field `repository_path_pattern` is unique; this is why we change the value in a minimum way
+ repository_path_pattern: "#{existing_container_registry_protection_rule.repository_path_pattern}-unique",
push_protected_up_to_access_level:
existing_container_registry_protection_rule.push_protected_up_to_access_level
)
@@ -110,17 +110,17 @@ RSpec.describe ContainerRegistry::Protection::CreateRuleService, '#execute', fea
it_behaves_like 'a successful service response'
end
- context 'when field `container_path_pattern` is taken' do
+ context 'when field `repository_path_pattern` is taken' do
let(:params) do
super().merge(
- container_path_pattern: existing_container_registry_protection_rule.container_path_pattern,
+ repository_path_pattern: existing_container_registry_protection_rule.repository_path_pattern,
push_protected_up_to_access_level: :maintainer
)
end
it_behaves_like 'an erroneous service response'
- it { is_expected.to have_attributes(errors: ['Container path pattern has already been taken']) }
+ it { is_expected.to have_attributes(errors: ['Repository path pattern has already been taken']) }
it { expect { subject }.not_to change { existing_container_registry_protection_rule.updated_at } }
end
diff --git a/spec/services/container_registry/protection/delete_rule_service_spec.rb b/spec/services/container_registry/protection/delete_rule_service_spec.rb
new file mode 100644
index 00000000000..acefe6a55d0
--- /dev/null
+++ b/spec/services/container_registry/protection/delete_rule_service_spec.rb
@@ -0,0 +1,106 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ContainerRegistry::Protection::DeleteRuleService, '#execute', feature_category: :container_registry do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:current_user) { create(:user, maintainer_projects: [project]) }
+ let_it_be_with_refind(:container_registry_protection_rule) do
+ create(:container_registry_protection_rule, project: project)
+ end
+
+ subject(:service_execute) do
+ described_class.new(container_registry_protection_rule, current_user: current_user).execute
+ end
+
+ shared_examples 'a successful service response' do
+ it { is_expected.to be_success }
+
+ it do
+ is_expected.to have_attributes(
+ errors: be_blank,
+ message: be_blank,
+ payload: { container_registry_protection_rule: container_registry_protection_rule }
+ )
+ end
+
+ it do
+ service_execute
+
+ expect { container_registry_protection_rule.reload }.to raise_error ActiveRecord::RecordNotFound
+ end
+ end
+
+ shared_examples 'an erroneous service response' do
+ it { is_expected.to be_error }
+
+ it do
+ is_expected.to have_attributes(message: be_present, payload: { container_registry_protection_rule: be_blank })
+ end
+
+ it do
+ expect { service_execute }.not_to change { ContainerRegistry::Protection::Rule.count }
+
+ expect { container_registry_protection_rule.reload }.not_to raise_error
+ end
+ end
+
+ it_behaves_like 'a successful service response'
+
+ it 'deletes the container registry protection rule in the database' do
+ expect { service_execute }
+ .to change {
+ project.reload.container_registry_protection_rules
+ }.from([container_registry_protection_rule]).to([])
+ .and change { ::ContainerRegistry::Protection::Rule.count }.from(1).to(0)
+ end
+
+ context 'with deleted container registry protection rule' do
+ let!(:container_registry_protection_rule) do
+ create(:container_registry_protection_rule, project: project,
+ repository_path_pattern: 'protection_rule_deleted').destroy!
+ end
+
+ it_behaves_like 'a successful service response'
+ end
+
+ context 'when error occurs during delete operation' do
+ before do
+ allow(container_registry_protection_rule).to receive(:destroy!).and_raise(StandardError.new('Some error'))
+ end
+
+ it_behaves_like 'an erroneous service response'
+
+ it { is_expected.to have_attributes message: /Some error/ }
+ end
+
+ context 'when current_user does not have permission' do
+ let_it_be(:developer) { create(:user).tap { |u| project.add_developer(u) } }
+ let_it_be(:reporter) { create(:user).tap { |u| project.add_reporter(u) } }
+ let_it_be(:guest) { create(:user).tap { |u| project.add_guest(u) } }
+ let_it_be(:anonymous) { create(:user) }
+
+ where(:current_user) do
+ [ref(:developer), ref(:reporter), ref(:guest), ref(:anonymous)]
+ end
+
+ with_them do
+ it_behaves_like 'an erroneous service response'
+
+ it { is_expected.to have_attributes message: /Unauthorized to delete a container registry protection rule/ }
+ end
+ end
+
+ context 'without container registry protection rule' do
+ let(:container_registry_protection_rule) { nil }
+
+ it { expect { service_execute }.to raise_error(ArgumentError) }
+ end
+
+ context 'without current_user' do
+ let(:current_user) { nil }
+ let(:container_registry_protection_rule) { build_stubbed(:container_registry_protection_rule, project: project) }
+
+ it { expect { service_execute }.to raise_error(ArgumentError) }
+ end
+end
diff --git a/spec/services/container_registry/protection/update_rule_service_spec.rb b/spec/services/container_registry/protection/update_rule_service_spec.rb
new file mode 100644
index 00000000000..28933b5764a
--- /dev/null
+++ b/spec/services/container_registry/protection/update_rule_service_spec.rb
@@ -0,0 +1,167 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ContainerRegistry::Protection::UpdateRuleService, '#execute', feature_category: :container_registry do
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:current_user) { create(:user, maintainer_projects: [project]) }
+ let_it_be_with_reload(:container_registry_protection_rule) do
+ create(:container_registry_protection_rule, project: project)
+ end
+
+ let(:service) { described_class.new(container_registry_protection_rule, current_user: current_user, params: params) }
+
+ let(:params) do
+ attributes_for(
+ :container_registry_protection_rule,
+ repository_path_pattern: "#{container_registry_protection_rule.repository_path_pattern}-updated",
+ delete_protected_up_to_access_level: 'owner',
+ push_protected_up_to_access_level: 'owner'
+ )
+ end
+
+ subject(:service_execute) { service.execute }
+
+ shared_examples 'a successful service response' do
+ let(:expected_attributes) { params }
+
+ it { is_expected.to be_success }
+
+ it do
+ is_expected.to have_attributes(
+ errors: be_blank,
+ message: be_blank,
+ payload: {
+ container_registry_protection_rule:
+ be_a(ContainerRegistry::Protection::Rule)
+ .and(have_attributes(expected_attributes))
+ }
+ )
+ end
+
+ it { expect { subject }.not_to change { ContainerRegistry::Protection::Rule.count } }
+
+ it { subject.tap { expect(container_registry_protection_rule.reload).to have_attributes expected_attributes } }
+ end
+
+ shared_examples 'an erroneous service response' do
+ it { is_expected.to be_error }
+
+ it do
+ is_expected.to have_attributes(
+ errors: be_present,
+ message: be_present,
+ payload: { container_registry_protection_rule: nil }
+ )
+ end
+
+ it { expect { subject }.not_to change { ContainerRegistry::Protection::Rule.count } }
+ it { expect { subject }.not_to change { container_registry_protection_rule.reload.updated_at } }
+ end
+
+ it_behaves_like 'a successful service response'
+
+ context 'with disallowed params' do
+ let(:params) { super().merge!(project_id: 1, unsupported_param: 'unsupported_param_value') }
+
+ it_behaves_like 'a successful service response' do
+ let(:expected_attributes) { params.except(:project_id, :unsupported_param) }
+ end
+ end
+
+ context 'with invalid params' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:params_invalid, :message_expected) do
+ { repository_path_pattern: '' } | [/Repository path pattern can't be blank/]
+ { delete_protected_up_to_access_level: 1000 } | /not a valid delete_protected_up_to_access_level/
+ { push_protected_up_to_access_level: 1000 } | /not a valid push_protected_up_to_access_level/
+ end
+
+ with_them do
+ let(:params) do
+ super().merge(params_invalid)
+ end
+
+ it_behaves_like 'an erroneous service response'
+
+ it { is_expected.to have_attributes message: message_expected }
+ end
+ end
+
+ context 'with empty params' do
+ let(:params) { {} }
+
+ it_behaves_like 'a successful service response' do
+ let(:expected_attributes) { container_registry_protection_rule.attributes }
+ end
+
+ it { expect { service_execute }.not_to change { container_registry_protection_rule.reload.updated_at } }
+ end
+
+ context 'with nil params' do
+ let(:params) { nil }
+
+ it_behaves_like 'a successful service response' do
+ let(:expected_attributes) { container_registry_protection_rule.attributes }
+ end
+
+ it { expect { service_execute }.not_to change { container_registry_protection_rule.reload.updated_at } }
+ end
+
+ context 'when updated field `repository_path_pattern` is already taken' do
+ let_it_be_with_reload(:other_existing_container_registry_protection_rule) do
+ create(:container_registry_protection_rule, project: project,
+ repository_path_pattern: "#{container_registry_protection_rule.repository_path_pattern}-other")
+ end
+
+ let(:params) do
+ { repository_path_pattern: other_existing_container_registry_protection_rule.repository_path_pattern }
+ end
+
+ it_behaves_like 'an erroneous service response'
+
+ it do
+ expect { service_execute }.not_to(
+ change { other_existing_container_registry_protection_rule.reload.repository_path_pattern }
+ )
+ end
+
+ it do
+ is_expected.to have_attributes(
+ errors: match_array([/Repository path pattern has already been taken/]),
+ message: match_array([/Repository path pattern has already been taken/])
+ )
+ end
+ end
+
+ context 'when current_user does not have permission' do
+ let_it_be(:developer) { create(:user).tap { |u| project.add_developer(u) } }
+ let_it_be(:reporter) { create(:user).tap { |u| project.add_reporter(u) } }
+ let_it_be(:guest) { create(:user).tap { |u| project.add_guest(u) } }
+ let_it_be(:anonymous) { create(:user) }
+
+ where(:current_user) do
+ [ref(:developer), ref(:reporter), ref(:guest), ref(:anonymous)]
+ end
+
+ with_them do
+ it_behaves_like 'an erroneous service response'
+
+ it { is_expected.to have_attributes errors: match_array(/Unauthorized/), message: /Unauthorized/ }
+ end
+ end
+
+ context 'without container registry protection rule' do
+ let(:container_registry_protection_rule) { nil }
+ let(:params) { {} }
+
+ it { expect { service_execute }.to raise_error(ArgumentError) }
+ end
+
+ context 'without current_user' do
+ let(:current_user) { nil }
+
+ it { expect { service_execute }.to raise_error(ArgumentError) }
+ end
+end
diff --git a/spec/services/design_management/delete_designs_service_spec.rb b/spec/services/design_management/delete_designs_service_spec.rb
index 5534dea85b2..79274599b99 100644
--- a/spec/services/design_management/delete_designs_service_spec.rb
+++ b/spec/services/design_management/delete_designs_service_spec.rb
@@ -139,7 +139,7 @@ RSpec.describe DesignManagement::DeleteDesignsService, feature_category: :design
end
it 'informs the new-version-worker' do
- expect(::DesignManagement::NewVersionWorker).to receive(:perform_async).with(Integer, false)
+ expect(DesignManagement::NewVersionWorker).to receive(:perform_async).with(Integer, false)
run_service
end
diff --git a/spec/services/design_management/save_designs_service_spec.rb b/spec/services/design_management/save_designs_service_spec.rb
index 8a4dd8b5fc2..4ab0080d8a2 100644
--- a/spec/services/design_management/save_designs_service_spec.rb
+++ b/spec/services/design_management/save_designs_service_spec.rb
@@ -33,7 +33,7 @@ RSpec.describe DesignManagement::SaveDesignsService, feature_category: :design_m
issue.design_collection.repository.raw.delete_all_refs_except([Gitlab::Git::BLANK_SHA])
end
- allow(::DesignManagement::NewVersionWorker)
+ allow(DesignManagement::NewVersionWorker)
.to receive(:perform_async).with(Integer, false).and_return(nil)
end
@@ -293,7 +293,7 @@ RSpec.describe DesignManagement::SaveDesignsService, feature_category: :design_m
it 'has the correct side-effects' do
counter = Gitlab::UsageDataCounters::DesignsCounter
- expect(::DesignManagement::NewVersionWorker)
+ expect(DesignManagement::NewVersionWorker)
.to receive(:perform_async).once.with(Integer, false).and_return(nil)
expect { run_service }
@@ -327,7 +327,7 @@ RSpec.describe DesignManagement::SaveDesignsService, feature_category: :design_m
design_repository.create_if_not_exists
design_repository.has_visible_content?
- expect(::DesignManagement::NewVersionWorker)
+ expect(DesignManagement::NewVersionWorker)
.to receive(:perform_async).once.with(Integer, false).and_return(nil)
expect { service.execute }
diff --git a/spec/services/groups/participants_service_spec.rb b/spec/services/groups/participants_service_spec.rb
index 8359bf1670f..e934921317d 100644
--- a/spec/services/groups/participants_service_spec.rb
+++ b/spec/services/groups/participants_service_spec.rb
@@ -22,6 +22,12 @@ RSpec.describe Groups::ParticipantsService, feature_category: :groups_and_projec
stub_feature_flags(disable_all_mention: false)
end
+ it 'returns results in correct order' do
+ expect(service_result.pluck(:username)).to eq([
+ 'all', developer.username, parent_group.full_path, subgroup.full_path
+ ])
+ end
+
it 'includes `All Group Members`' do
group.add_developer(create(:user))
diff --git a/spec/services/import/github_service_spec.rb b/spec/services/import/github_service_spec.rb
index 39832ee4b13..fc649b61426 100644
--- a/spec/services/import/github_service_spec.rb
+++ b/spec/services/import/github_service_spec.rb
@@ -5,12 +5,7 @@ require 'spec_helper'
RSpec.describe Import::GithubService, feature_category: :importers do
let_it_be(:user) { create(:user) }
let_it_be(:token) { 'complex-token' }
- let_it_be(:access_params) do
- {
- github_access_token: 'github-complex-token',
- additional_access_tokens: %w[foo bar]
- }
- end
+ let_it_be(:access_params) { { github_access_token: 'github-complex-token' } }
let(:settings) { instance_double(Gitlab::GithubImport::Settings) }
let(:user_namespace_path) { user.namespace_path }
@@ -37,7 +32,6 @@ RSpec.describe Import::GithubService, feature_category: :importers do
.to receive(:write)
.with(
optional_stages: optional_stages,
- additional_access_tokens: access_params[:additional_access_tokens],
timeout_strategy: timeout_strategy
)
end
@@ -98,7 +92,6 @@ RSpec.describe Import::GithubService, feature_category: :importers do
expect(settings)
.to have_received(:write)
.with(optional_stages: nil,
- additional_access_tokens: access_params[:additional_access_tokens],
timeout_strategy: timeout_strategy
)
expect_snowplow_event(
@@ -124,7 +117,6 @@ RSpec.describe Import::GithubService, feature_category: :importers do
.to have_received(:write)
.with(
optional_stages: nil,
- additional_access_tokens: access_params[:additional_access_tokens],
timeout_strategy: timeout_strategy
)
expect_snowplow_event(
@@ -157,7 +149,6 @@ RSpec.describe Import::GithubService, feature_category: :importers do
.to have_received(:write)
.with(
optional_stages: nil,
- additional_access_tokens: access_params[:additional_access_tokens],
timeout_strategy: timeout_strategy
)
expect_snowplow_event(
@@ -194,7 +185,6 @@ RSpec.describe Import::GithubService, feature_category: :importers do
.to have_received(:write)
.with(
optional_stages: optional_stages,
- additional_access_tokens: access_params[:additional_access_tokens],
timeout_strategy: timeout_strategy
)
end
@@ -210,7 +200,6 @@ RSpec.describe Import::GithubService, feature_category: :importers do
.to have_received(:write)
.with(
optional_stages: optional_stages,
- additional_access_tokens: access_params[:additional_access_tokens],
timeout_strategy: timeout_strategy
)
end
@@ -224,7 +213,6 @@ RSpec.describe Import::GithubService, feature_category: :importers do
.to have_received(:write)
.with(
optional_stages: optional_stages,
- additional_access_tokens: %w[foo bar],
timeout_strategy: timeout_strategy
)
end
diff --git a/spec/services/integrations/google_cloud_platform/artifact_registry/list_docker_images_service_spec.rb b/spec/services/integrations/google_cloud_platform/artifact_registry/list_docker_images_service_spec.rb
new file mode 100644
index 00000000000..3f57add10e3
--- /dev/null
+++ b/spec/services/integrations/google_cloud_platform/artifact_registry/list_docker_images_service_spec.rb
@@ -0,0 +1,91 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ::Integrations::GoogleCloudPlatform::ArtifactRegistry::ListDockerImagesService, feature_category: :container_registry do
+ let_it_be(:project) { create(:project, :private) }
+
+ let(:user) { project.owner }
+ let(:gcp_project_id) { 'gcp_project_id' }
+ let(:gcp_location) { 'gcp_location' }
+ let(:gcp_repository) { 'gcp_repository' }
+ let(:gcp_wlif) { 'https://wlif.test' }
+ let(:service) do
+ described_class.new(
+ project: project,
+ current_user: user,
+ params: {
+ gcp_project_id: gcp_project_id,
+ gcp_location: gcp_location,
+ gcp_repository: gcp_repository,
+ gcp_wlif: gcp_wlif
+ }
+ )
+ end
+
+ describe '#execute' do
+ let(:page_token) { nil }
+ let(:list_docker_images_response) { dummy_list_response }
+ let(:client_double) { instance_double('::Integrations::GoogleCloudPlatform::ArtifactRegistry::Client') }
+
+ before do
+ allow(::Integrations::GoogleCloudPlatform::ArtifactRegistry::Client).to receive(:new)
+ .with(
+ project: project,
+ user: user,
+ gcp_project_id: gcp_project_id,
+ gcp_location: gcp_location,
+ gcp_repository: gcp_repository,
+ gcp_wlif: gcp_wlif
+ ).and_return(client_double)
+ allow(client_double).to receive(:list_docker_images)
+ .with(page_token: page_token)
+ .and_return(list_docker_images_response)
+ end
+
+ subject(:list) { service.execute(page_token: page_token) }
+
+ it 'returns the docker images' do
+ expect(list).to be_success
+ expect(list.payload).to include(images: an_instance_of(Array), next_page_token: an_instance_of(String))
+ end
+
+ context 'with the client returning an empty hash' do
+ let(:list_docker_images_response) { {} }
+
+ it 'returns an empty hash' do
+ expect(list).to be_success
+ expect(list.payload).to eq({})
+ end
+ end
+
+ context 'with not enough permissions' do
+ let_it_be(:user) { create(:user) }
+
+ it 'returns an error response' do
+ expect(list).to be_error
+ expect(list.message).to eq('Access denied')
+ end
+ end
+
+ private
+
+ def dummy_list_response
+ {
+ images: [
+ {
+ built_at: '2023-11-30T23:23:11.980068941Z',
+ media_type: 'application/vnd.docker.distribution.manifest.v2+json',
+ name: 'projects/project/locations/location/repositories/repo/dockerImages/image@sha256:6a',
+ size_bytes: 2827903,
+ tags: %w[tag1 tag2],
+ updated_at: '2023-12-07T11:48:50.840751Z',
+ uploaded_at: '2023-12-07T11:48:47.598511Z',
+ uri: 'location.pkg.dev/project/repo/image@sha256:6a'
+ }
+ ],
+ next_page_token: 'next_page_token'
+ }
+ end
+ end
+end
diff --git a/spec/services/issue_email_participants/create_service_spec.rb b/spec/services/issue_email_participants/create_service_spec.rb
new file mode 100644
index 00000000000..fcfdeeb08f3
--- /dev/null
+++ b/spec/services/issue_email_participants/create_service_spec.rb
@@ -0,0 +1,161 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe IssueEmailParticipants::CreateService, feature_category: :service_desk do
+ shared_examples 'a successful service execution' do
+ it 'creates new participants', :aggregate_failures do
+ response = service.execute
+ expect(response).to be_success
+
+ issue.reset
+ note = issue.notes.last
+ expect(note.system?).to be true
+ expect(note.author).to eq(user)
+
+ participants_emails = issue.email_participants_emails_downcase
+
+ expected_emails.each do |email|
+ expect(participants_emails).to include(email)
+ expect(response.message).to include(email)
+ expect(note.note).to include(email)
+ end
+ end
+ end
+
+ shared_examples 'a failed service execution' do
+ it 'returns error ServiceResponse with message', :aggregate_failures do
+ response = service.execute
+ expect(response).to be_error
+ expect(response.message).to eq(error_message)
+ end
+ end
+
+ describe '#execute' do
+ let_it_be_with_reload(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
+ let_it_be_with_reload(:issue) { create(:issue, project: project) }
+
+ let(:emails) { nil }
+ let(:service) { described_class.new(target: issue, current_user: user, emails: emails) }
+ let(:expected_emails) { emails }
+
+ let(:error_feature_flag) { "Feature flag issue_email_participants is not enabled for this project." }
+ let(:error_underprivileged) { _("You don't have permission to add email participants.") }
+ let(:error_no_participants) do
+ _("No email participants were added. Either none were provided, or they already exist.")
+ end
+
+ context 'when the user is not a project member' do
+ let(:error_message) { error_underprivileged }
+
+ it_behaves_like 'a failed service execution'
+ end
+
+ context 'when user has reporter role in project' do
+ before_all do
+ project.add_reporter(user)
+ end
+
+ context 'when no emails are provided' do
+ let(:error_message) { error_no_participants }
+
+ it_behaves_like 'a failed service execution'
+ end
+
+ context 'when one email is provided' do
+ let(:emails) { ['user@example.com'] }
+
+ it_behaves_like 'a successful service execution'
+
+ context 'when email is already a participant of the issue' do
+ let(:error_message) { error_no_participants }
+
+ before do
+ issue.issue_email_participants.create!(email: emails.first)
+ end
+
+ it_behaves_like 'a failed service execution'
+
+ context 'when email is formatted in a different case' do
+ let(:emails) { ['USER@example.com'] }
+
+ it_behaves_like 'a failed service execution'
+ end
+
+ context 'when participants limit on issue is reached' do
+ before do
+ stub_const("#{described_class}::MAX_NUMBER_OF_RECORDS", 1)
+ end
+
+ let(:emails) { ['over-max@example.com'] }
+ let(:error_message) { error_no_participants }
+
+ it_behaves_like 'a failed service execution'
+
+ it 'logs count of emails above limit' do
+ expect(Gitlab::AppLogger).to receive(:info).with({ above_limit_count: 1 }).once
+ service.execute
+ end
+ end
+ end
+ end
+
+ context 'when multiple emails are provided' do
+ let(:emails) { ['user@example.com', 'other-user@example.com'] }
+
+ it_behaves_like 'a successful service execution'
+
+ context 'when duplicate email provided' do
+ let(:emails) { ['user@example.com', 'user@example.com'] }
+ let(:expected_emails) { emails[...-1] }
+
+ it_behaves_like 'a successful service execution'
+ end
+
+ context 'when an email is already a participant of the issue' do
+ let(:expected_emails) { emails[1...] }
+
+ before do
+ issue.issue_email_participants.create!(email: emails.first)
+ end
+
+ it_behaves_like 'a successful service execution'
+ end
+
+ context 'when only some emails can be added because of participants limit' do
+ before do
+ stub_const("#{described_class}::MAX_NUMBER_OF_RECORDS", 1)
+ end
+
+ let(:expected_emails) { emails[...-1] }
+
+ it_behaves_like 'a successful service execution'
+
+ it 'logs count of emails above limit' do
+ expect(Gitlab::AppLogger).to receive(:info).with({ above_limit_count: 1 }).once
+ service.execute
+ end
+ end
+ end
+
+ context 'when more than the allowed number of emails are provided' do
+ let(:emails) { (1..7).map { |i| "user#{i}@example.com" } }
+
+ let(:expected_emails) { emails[...-1] }
+
+ it_behaves_like 'a successful service execution'
+ end
+ end
+
+ context 'when feature flag issue_email_participants is disabled' do
+ let(:error_message) { error_feature_flag }
+
+ before do
+ stub_feature_flags(issue_email_participants: false)
+ end
+
+ it_behaves_like 'a failed service execution'
+ end
+ end
+end
diff --git a/spec/services/issues/import_csv_service_spec.rb b/spec/services/issues/import_csv_service_spec.rb
index 660686cf805..d3d7277e3e3 100644
--- a/spec/services/issues/import_csv_service_spec.rb
+++ b/spec/services/issues/import_csv_service_spec.rb
@@ -14,6 +14,8 @@ RSpec.describe Issues::ImportCsvService, feature_category: :team_planning do
described_class.new(user, project, uploader)
end
+ let!(:test_milestone) { create(:milestone, project: project, title: '15.10') }
+
include_examples 'issuable import csv service', 'issue' do
let(:issuables) { project.issues }
let(:email_method) { :import_issues_csv_email }
@@ -36,7 +38,8 @@ RSpec.describe Issues::ImportCsvService, feature_category: :team_planning do
description: 'Description',
time_estimate: 3600,
assignees: include(assignee),
- due_date: Date.new(2022, 6, 28)
+ due_date: Date.new(2022, 6, 28),
+ milestone_id: test_milestone.id
)
)
end
diff --git a/spec/services/merge_requests/approval_service_spec.rb b/spec/services/merge_requests/approval_service_spec.rb
index e7fe5c19fa3..8761aba432f 100644
--- a/spec/services/merge_requests/approval_service_spec.rb
+++ b/spec/services/merge_requests/approval_service_spec.rb
@@ -13,6 +13,7 @@ RSpec.describe MergeRequests::ApprovalService, feature_category: :code_review_wo
before do
project.add_developer(user)
+ stub_feature_flags ff_require_saml_auth_to_approve: false
end
context 'with invalid approval' do
diff --git a/spec/services/merge_requests/close_service_spec.rb b/spec/services/merge_requests/close_service_spec.rb
index 25c75ae7244..199f5e3fd9a 100644
--- a/spec/services/merge_requests/close_service_spec.rb
+++ b/spec/services/merge_requests/close_service_spec.rb
@@ -42,7 +42,7 @@ RSpec.describe MergeRequests::CloseService, feature_category: :code_review_workf
.with(@merge_request, 'close')
end
- it 'sends email to user2 about assign of new merge_request', :sidekiq_might_not_need_inline do
+ it 'sends email to user2 about assign of new merge_request', :sidekiq_inline do
email = ActionMailer::Base.deliveries.last
expect(email.to.first).to eq(user2.email)
expect(email.subject).to include(merge_request.title)
diff --git a/spec/services/merge_requests/create_service_spec.rb b/spec/services/merge_requests/create_service_spec.rb
index 51b1bed1dd3..bf52800b77e 100644
--- a/spec/services/merge_requests/create_service_spec.rb
+++ b/spec/services/merge_requests/create_service_spec.rb
@@ -52,7 +52,7 @@ RSpec.describe MergeRequests::CreateService, :clean_gitlab_redis_shared_state, f
end.to change { project.open_merge_requests_count }.from(0).to(1)
end
- it 'creates exactly 1 create MR event', :sidekiq_might_not_need_inline do
+ it 'creates exactly 1 create MR event', :sidekiq_inline do
attributes = {
action: :created,
target_id: merge_request.id,
diff --git a/spec/services/merge_requests/mergeability/check_base_service_spec.rb b/spec/services/merge_requests/mergeability/check_base_service_spec.rb
index 806bde61c23..05f5b4f1315 100644
--- a/spec/services/merge_requests/mergeability/check_base_service_spec.rb
+++ b/spec/services/merge_requests/mergeability/check_base_service_spec.rb
@@ -8,6 +8,22 @@ RSpec.describe MergeRequests::Mergeability::CheckBaseService, feature_category:
let(:merge_request) { double }
let(:params) { double }
+ describe '.identifier' do
+ it 'sets the identifier' do
+ described_class.identifier("test")
+
+ expect(described_class.identifier).to eq("test")
+ end
+ end
+
+ describe '.description' do
+ it 'sets the description' do
+ described_class.description("test")
+
+ expect(described_class.description).to eq("test")
+ end
+ end
+
describe '#merge_request' do
it 'returns the merge_request' do
expect(check_base_service.merge_request).to eq merge_request
diff --git a/spec/services/merge_requests/mergeability/check_broken_status_service_spec.rb b/spec/services/merge_requests/mergeability/check_broken_status_service_spec.rb
index b6ee1049bb9..f29289be86b 100644
--- a/spec/services/merge_requests/mergeability/check_broken_status_service_spec.rb
+++ b/spec/services/merge_requests/mergeability/check_broken_status_service_spec.rb
@@ -7,6 +7,8 @@ RSpec.describe MergeRequests::Mergeability::CheckBrokenStatusService, feature_ca
let(:merge_request) { build(:merge_request) }
+ it_behaves_like 'mergeability check service', :broken_status, 'Checks whether the merge request is broken'
+
describe '#execute' do
let(:result) { check_broken_status.execute }
@@ -19,7 +21,7 @@ RSpec.describe MergeRequests::Mergeability::CheckBrokenStatusService, feature_ca
it 'returns a check result with status failed' do
expect(result.status).to eq Gitlab::MergeRequests::Mergeability::CheckResult::FAILED_STATUS
- expect(result.payload[:reason]).to eq(:broken_status)
+ expect(result.payload[:identifier]).to eq(:broken_status)
end
end
diff --git a/spec/services/merge_requests/mergeability/check_ci_status_service_spec.rb b/spec/services/merge_requests/mergeability/check_ci_status_service_spec.rb
index 067e87859e7..aa7920b9b40 100644
--- a/spec/services/merge_requests/mergeability/check_ci_status_service_spec.rb
+++ b/spec/services/merge_requests/mergeability/check_ci_status_service_spec.rb
@@ -10,6 +10,8 @@ RSpec.describe MergeRequests::Mergeability::CheckCiStatusService, feature_catego
let(:params) { { skip_ci_check: skip_check } }
let(:skip_check) { false }
+ it_behaves_like 'mergeability check service', :ci_must_pass, 'Checks whether CI has passed'
+
describe '#execute' do
let(:result) { check_ci_status.execute }
@@ -39,7 +41,7 @@ RSpec.describe MergeRequests::Mergeability::CheckCiStatusService, feature_catego
it 'returns a check result with status failed' do
expect(result.status).to eq Gitlab::MergeRequests::Mergeability::CheckResult::FAILED_STATUS
- expect(result.payload[:reason]).to eq :ci_must_pass
+ expect(result.payload[:identifier]).to eq :ci_must_pass
end
end
end
diff --git a/spec/services/merge_requests/mergeability/check_conflict_status_service_spec.rb b/spec/services/merge_requests/mergeability/check_conflict_status_service_spec.rb
index 14173c19bfb..e35de4d4042 100644
--- a/spec/services/merge_requests/mergeability/check_conflict_status_service_spec.rb
+++ b/spec/services/merge_requests/mergeability/check_conflict_status_service_spec.rb
@@ -7,6 +7,8 @@ RSpec.describe MergeRequests::Mergeability::CheckConflictStatusService, feature_
let(:merge_request) { build(:merge_request) }
+ it_behaves_like 'mergeability check service', :conflict, 'Checks whether the merge request has a conflict'
+
describe '#execute' do
let(:result) { check_conflict_status.execute }
@@ -27,7 +29,7 @@ RSpec.describe MergeRequests::Mergeability::CheckConflictStatusService, feature_
it 'returns a check result with status failed' do
expect(result.status).to eq Gitlab::MergeRequests::Mergeability::CheckResult::FAILED_STATUS
- expect(result.payload[:reason]).to eq(:conflict)
+ expect(result.payload[:identifier]).to eq(:conflict)
end
end
end
diff --git a/spec/services/merge_requests/mergeability/check_discussions_status_service_spec.rb b/spec/services/merge_requests/mergeability/check_discussions_status_service_spec.rb
index 4a8b28f603d..3d1fe0c838d 100644
--- a/spec/services/merge_requests/mergeability/check_discussions_status_service_spec.rb
+++ b/spec/services/merge_requests/mergeability/check_discussions_status_service_spec.rb
@@ -10,6 +10,9 @@ RSpec.describe MergeRequests::Mergeability::CheckDiscussionsStatusService, featu
let(:params) { { skip_discussions_check: skip_check } }
let(:skip_check) { false }
+ it_behaves_like 'mergeability check service', :discussions_not_resolved,
+ 'Checks whether the merge request has open discussions'
+
describe '#execute' do
let(:result) { check_discussions_status.execute }
@@ -39,7 +42,7 @@ RSpec.describe MergeRequests::Mergeability::CheckDiscussionsStatusService, featu
it 'returns a check result with status failed' do
expect(result.status).to eq Gitlab::MergeRequests::Mergeability::CheckResult::FAILED_STATUS
- expect(result.payload[:reason]).to eq(:discussions_not_resolved)
+ expect(result.payload[:identifier]).to eq(:discussions_not_resolved)
end
end
end
diff --git a/spec/services/merge_requests/mergeability/check_draft_status_service_spec.rb b/spec/services/merge_requests/mergeability/check_draft_status_service_spec.rb
index 3837022232d..cef8169e725 100644
--- a/spec/services/merge_requests/mergeability/check_draft_status_service_spec.rb
+++ b/spec/services/merge_requests/mergeability/check_draft_status_service_spec.rb
@@ -10,6 +10,8 @@ RSpec.describe MergeRequests::Mergeability::CheckDraftStatusService, feature_cat
let(:params) { { skip_draft_check: skip_check } }
let(:skip_check) { false }
+ it_behaves_like 'mergeability check service', :draft_status, 'Checks whether the merge request is draft'
+
describe '#execute' do
let(:result) { check_draft_status.execute }
@@ -22,7 +24,7 @@ RSpec.describe MergeRequests::Mergeability::CheckDraftStatusService, feature_cat
it 'returns a check result with status failed' do
expect(result.status).to eq Gitlab::MergeRequests::Mergeability::CheckResult::FAILED_STATUS
- expect(result.payload[:reason]).to eq(:draft_status)
+ expect(result.payload[:identifier]).to eq(:draft_status)
end
end
diff --git a/spec/services/merge_requests/mergeability/check_open_status_service_spec.rb b/spec/services/merge_requests/mergeability/check_open_status_service_spec.rb
index 53ad77ea4df..f673e43931d 100644
--- a/spec/services/merge_requests/mergeability/check_open_status_service_spec.rb
+++ b/spec/services/merge_requests/mergeability/check_open_status_service_spec.rb
@@ -7,6 +7,8 @@ RSpec.describe MergeRequests::Mergeability::CheckOpenStatusService, feature_cate
let(:merge_request) { build(:merge_request) }
+ it_behaves_like 'mergeability check service', :not_open, 'Checks whether the merge request is open'
+
describe '#execute' do
let(:result) { check_open_status.execute }
@@ -27,7 +29,7 @@ RSpec.describe MergeRequests::Mergeability::CheckOpenStatusService, feature_cate
it 'returns a check result with status failed' do
expect(result.status).to eq Gitlab::MergeRequests::Mergeability::CheckResult::FAILED_STATUS
- expect(result.payload[:reason]).to eq(:not_open)
+ expect(result.payload[:identifier]).to eq(:not_open)
end
end
end
diff --git a/spec/services/merge_requests/mergeability/check_rebase_status_service_spec.rb b/spec/services/merge_requests/mergeability/check_rebase_status_service_spec.rb
index d6948f72c0a..047cf5c13bf 100644
--- a/spec/services/merge_requests/mergeability/check_rebase_status_service_spec.rb
+++ b/spec/services/merge_requests/mergeability/check_rebase_status_service_spec.rb
@@ -10,6 +10,8 @@ RSpec.describe MergeRequests::Mergeability::CheckRebaseStatusService, feature_ca
let(:params) { { skip_rebase_check: skip_check } }
let(:skip_check) { false }
+ it_behaves_like 'mergeability check service', :need_rebase, 'Checks whether the merge request needs to be rebased'
+
describe '#execute' do
let(:result) { check_rebase_status.execute }
@@ -31,7 +33,7 @@ RSpec.describe MergeRequests::Mergeability::CheckRebaseStatusService, feature_ca
it 'returns a check result with status failed' do
expect(result.status).to eq Gitlab::MergeRequests::Mergeability::CheckResult::FAILED_STATUS
- expect(result.payload[:reason]).to eq(:need_rebase)
+ expect(result.payload[:identifier]).to eq(:need_rebase)
end
end
diff --git a/spec/services/merge_requests/mergeability/detailed_merge_status_service_spec.rb b/spec/services/merge_requests/mergeability/detailed_merge_status_service_spec.rb
index 66bcb948cb6..a3c5427ee82 100644
--- a/spec/services/merge_requests/mergeability/detailed_merge_status_service_spec.rb
+++ b/spec/services/merge_requests/mergeability/detailed_merge_status_service_spec.rb
@@ -64,7 +64,7 @@ RSpec.describe ::MergeRequests::Mergeability::DetailedMergeStatusService, featur
merge_request.close!
end
- it 'returns the failure reason' do
+ it 'returns the failed check' do
expect(detailed_merge_status).to eq(:not_open)
end
end
@@ -77,7 +77,7 @@ RSpec.describe ::MergeRequests::Mergeability::DetailedMergeStatusService, featur
end
context 'when pipeline does not exist' do
- it 'returns the failure reason' do
+ it 'returns the failed check' do
expect(detailed_merge_status).to eq(:ci_must_pass)
end
end
@@ -97,15 +97,21 @@ RSpec.describe ::MergeRequests::Mergeability::DetailedMergeStatusService, featur
context 'when the pipeline is running' do
let(:ci_status) { :running }
- it 'returns the failure reason' do
+ it 'returns the failed check' do
expect(detailed_merge_status).to eq(:ci_still_running)
end
end
+ context 'when the pipeline is pending' do
+ let(:ci_status) { :pending }
+
+ it { expect(detailed_merge_status).to eq(:ci_still_running) }
+ end
+
context 'when the pipeline is not running' do
let(:ci_status) { :failed }
- it 'returns the failure reason' do
+ it 'returns the failed check' do
expect(detailed_merge_status).to eq(:ci_must_pass)
end
end
diff --git a/spec/services/merge_requests/mergeability/run_checks_service_spec.rb b/spec/services/merge_requests/mergeability/run_checks_service_spec.rb
index 06e15356a92..a67b01b8069 100644
--- a/spec/services/merge_requests/mergeability/run_checks_service_spec.rb
+++ b/spec/services/merge_requests/mergeability/run_checks_service_spec.rb
@@ -77,7 +77,7 @@ RSpec.describe MergeRequests::Mergeability::RunChecksService, :clean_gitlab_redi
end
context 'when one check fails' do
- let(:failed_result) { Gitlab::MergeRequests::Mergeability::CheckResult.failed(payload: { reason: 'failed' }) }
+ let(:failed_result) { Gitlab::MergeRequests::Mergeability::CheckResult.failed(payload: { identifier: 'failed' }) }
before do
allow_next_instance_of(MergeRequests::Mergeability::CheckOpenStatusService) do |service|
@@ -86,11 +86,11 @@ RSpec.describe MergeRequests::Mergeability::RunChecksService, :clean_gitlab_redi
end
end
- it 'returns the failure reason' do
+ it 'returns the failed check' do
result = execute
expect(result.success?).to eq(false)
- expect(execute.payload[:failure_reason]).to eq(:failed)
+ expect(execute.payload[:failed_check]).to eq(:failed)
end
it_behaves_like 'checks are all executed' do
diff --git a/spec/services/merge_requests/post_merge_service_spec.rb b/spec/services/merge_requests/post_merge_service_spec.rb
index f7526c169bd..61c754e30a9 100644
--- a/spec/services/merge_requests/post_merge_service_spec.rb
+++ b/spec/services/merge_requests/post_merge_service_spec.rb
@@ -153,5 +153,17 @@ RSpec.describe MergeRequests::PostMergeService, feature_category: :code_review_w
expect(deploy_job.reload.canceled?).to be false
end
end
+
+ context 'when the merge request has a pages deployment' do
+ it 'performs Pages::DeactivateMrDeploymentWorker asynchronously' do
+ expect(Pages::DeactivateMrDeploymentsWorker)
+ .to receive(:perform_async)
+ .with(merge_request)
+
+ subject
+
+ expect(merge_request.reload).to be_merged
+ end
+ end
end
end
diff --git a/spec/services/merge_requests/reopen_service_spec.rb b/spec/services/merge_requests/reopen_service_spec.rb
index e173cd382f2..f3ac55059bc 100644
--- a/spec/services/merge_requests/reopen_service_spec.rb
+++ b/spec/services/merge_requests/reopen_service_spec.rb
@@ -44,7 +44,7 @@ RSpec.describe MergeRequests::ReopenService, feature_category: :code_review_work
expect(Integrations::GroupMentionWorker).not_to receive(:perform_async)
end
- it 'sends email to user2 about reopen of merge_request', :sidekiq_might_not_need_inline do
+ it 'sends email to user2 about reopen of merge_request', :sidekiq_inline do
email = ActionMailer::Base.deliveries.last
expect(email.to.first).to eq(user2.email)
expect(email.subject).to include(merge_request.title)
diff --git a/spec/services/ml/create_candidate_service_spec.rb b/spec/services/ml/create_candidate_service_spec.rb
index fb3456b0bcc..b1a053711d7 100644
--- a/spec/services/ml/create_candidate_service_spec.rb
+++ b/spec/services/ml/create_candidate_service_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe ::Ml::CreateCandidateService, feature_category: :mlops do
describe '#execute' do
- let_it_be(:model_version) { create(:ml_model_versions) }
+ let_it_be(:model_version) { create(:ml_model_versions, candidate: nil) }
let_it_be(:experiment) { create(:ml_experiments, project: model_version.project) }
let(:params) { {} }
diff --git a/spec/services/ml/create_model_service_spec.rb b/spec/services/ml/create_model_service_spec.rb
index 212f0940635..74c1dd5fec7 100644
--- a/spec/services/ml/create_model_service_spec.rb
+++ b/spec/services/ml/create_model_service_spec.rb
@@ -9,6 +9,10 @@ RSpec.describe ::Ml::CreateModelService, feature_category: :mlops do
let_it_be(:description) { 'description' }
let_it_be(:metadata) { [] }
+ before do
+ allow(Gitlab::InternalEvents).to receive(:track_event)
+ end
+
subject(:create_model) { described_class.new(project, name, user, description, metadata).execute }
describe '#execute' do
@@ -18,6 +22,10 @@ RSpec.describe ::Ml::CreateModelService, feature_category: :mlops do
it 'creates a model', :aggregate_failures do
expect { create_model }.to change { Ml::Model.count }.by(1)
+ expect(Gitlab::InternalEvents).to have_received(:track_event).with(
+ 'model_registry_ml_model_created',
+ { project: project, user: user }
+ )
expect(create_model.name).to eq(name)
end
@@ -29,6 +37,10 @@ RSpec.describe ::Ml::CreateModelService, feature_category: :mlops do
it 'creates a model', :aggregate_failures do
expect { create_model }.to change { Ml::Model.count }.by(1)
+ expect(Gitlab::InternalEvents).to have_received(:track_event).with(
+ 'model_registry_ml_model_created',
+ { project: project, user: user }
+ )
expect(create_model.name).to eq(name)
end
@@ -40,6 +52,7 @@ RSpec.describe ::Ml::CreateModelService, feature_category: :mlops do
it 'raises an error', :aggregate_failures do
expect { create_model }.to raise_error(ActiveRecord::RecordInvalid)
+ expect(Gitlab::InternalEvents).not_to have_received(:track_event)
end
end
diff --git a/spec/services/ml/create_model_version_service_spec.rb b/spec/services/ml/create_model_version_service_spec.rb
new file mode 100644
index 00000000000..b3aead4a92c
--- /dev/null
+++ b/spec/services/ml/create_model_version_service_spec.rb
@@ -0,0 +1,79 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ::Ml::CreateModelVersionService, feature_category: :mlops do
+ let(:model) { create(:ml_models) }
+ let(:params) { {} }
+
+ before do
+ allow(Gitlab::InternalEvents).to receive(:track_event)
+ end
+
+ subject(:service) { described_class.new(model, params).execute }
+
+ context 'when no versions exist' do
+ it 'creates a model version', :aggregate_failures do
+ expect { service }.to change { Ml::ModelVersion.count }.by(1).and change { Ml::Candidate.count }.by(1)
+ expect(model.reload.latest_version.version).to eq('1.0.0')
+
+ expect(Gitlab::InternalEvents).to have_received(:track_event).with(
+ 'model_registry_ml_model_version_created',
+ { project: model.project, user: nil }
+ )
+ end
+ end
+
+ context 'when a version exist' do
+ before do
+ create(:ml_model_versions, model: model, version: '3.0.0')
+ end
+
+ it 'creates another model version and increments the version number', :aggregate_failures do
+ expect { service }.to change { Ml::ModelVersion.count }.by(1).and change { Ml::Candidate.count }.by(1)
+ expect(model.reload.latest_version.version).to eq('4.0.0')
+
+ expect(Gitlab::InternalEvents).to have_received(:track_event).with(
+ 'model_registry_ml_model_version_created',
+ { project: model.project, user: nil }
+ )
+ end
+ end
+
+ context 'when a version is created' do
+ it 'creates a package' do
+ expect { service }.to change { Ml::ModelVersion.count }.by(1).and change {
+ Packages::MlModel::Package.count
+ }.by(1)
+ expect(model.reload.latest_version.package.name).to eq(model.name)
+ expect(model.latest_version.package.version).to eq(model.latest_version.version)
+ end
+ end
+
+ context 'when a version is created and the package already exists' do
+ it 'does not creates a package' do
+ next_version = Ml::IncrementVersionService.new(model.latest_version.try(:version)).execute
+ create(:ml_model_package, name: model.name, version: next_version, project: model.project)
+
+ expect { service }.to change { Ml::ModelVersion.count }.by(1).and not_change {
+ Packages::MlModel::Package.count
+ }
+ expect(model.reload.latest_version.package.name).to eq(model.name)
+ expect(model.latest_version.package.version).to eq(model.latest_version.version)
+ end
+ end
+
+ context 'when a version is created and an existing package supplied' do
+ it 'does not creates a package' do
+ next_version = Ml::IncrementVersionService.new(model.latest_version.try(:version)).execute
+ package = create(:ml_model_package, name: model.name, version: next_version, project: model.project)
+ service = described_class.new(model, { package: package })
+
+ expect { service.execute }.to change { Ml::ModelVersion.count }.by(1).and not_change {
+ Packages::MlModel::Package.count
+ }
+ expect(model.reload.latest_version.package.name).to eq(model.name)
+ expect(model.latest_version.package.version).to eq(model.latest_version.version)
+ end
+ end
+end
diff --git a/spec/services/ml/destroy_model_service_spec.rb b/spec/services/ml/destroy_model_service_spec.rb
new file mode 100644
index 00000000000..79914ff8b22
--- /dev/null
+++ b/spec/services/ml/destroy_model_service_spec.rb
@@ -0,0 +1,28 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ::Ml::DestroyModelService, feature_category: :mlops do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:model) { create(:ml_models, :with_latest_version_and_package) }
+ let(:service) { described_class.new(model, user) }
+
+ describe '#execute' do
+ context 'when model name does not exist in the project' do
+ it 'returns nil' do
+ allow(model).to receive(:destroy).and_return(false)
+ expect(service.execute).to be nil
+ end
+ end
+
+ context 'when a model exists' do
+ it 'destroys the model' do
+ expect(Packages::MarkPackagesForDestructionService).to receive(:new).with(packages: model.all_packages,
+ current_user: user).and_return(instance_double('Packages::MarkPackagesForDestructionService').tap do |service|
+ expect(service).to receive(:execute)
+ end)
+ expect { service.execute }.to change { Ml::Model.count }.by(-1).and change { Ml::ModelVersion.count }.by(-1)
+ end
+ end
+ end
+end
diff --git a/spec/services/ml/find_or_create_model_version_service_spec.rb b/spec/services/ml/find_or_create_model_version_service_spec.rb
index e5ca7c3a450..88647f23ad9 100644
--- a/spec/services/ml/find_or_create_model_version_service_spec.rb
+++ b/spec/services/ml/find_or_create_model_version_service_spec.rb
@@ -35,21 +35,29 @@ RSpec.describe ::Ml::FindOrCreateModelVersionService, feature_category: :mlops d
end
end
- context 'when model version does not exist' do
+ context 'when model does not exist' do
let(:project) { existing_version.project }
let(:name) { 'a_new_model' }
let(:version) { '2.0.0' }
+
+ it 'does not create a new model version', :aggregate_failures do
+ expect { model_version }.to change { Ml::ModelVersion.count }.by(0)
+ end
+ end
+
+ context 'when model exists and model version does not' do
+ let(:project) { existing_version.project }
+ let(:name) { existing_version.name }
+ let(:version) { '2.0.0' }
let(:description) { 'A model version' }
let(:package) { create(:ml_model_package, project: project, name: name, version: version) }
it 'creates a new model version', :aggregate_failures do
- expect { model_version }.to change { Ml::ModelVersion.count }.by(1).and change { Ml::Candidate.count }.by(1)
+ expect { model_version }.to change { Ml::ModelVersion.count }.by(1)
- expect(model_version.name).to eq(name)
expect(model_version.version).to eq(version)
- expect(model_version.package).to eq(package)
- expect(model_version.candidate.model_version_id).to eq(model_version.id)
+ expect(model_version.model).to eq(existing_version.model)
expect(model_version.description).to eq(description)
end
end
diff --git a/spec/services/ml/increment_version_service_spec.rb b/spec/services/ml/increment_version_service_spec.rb
new file mode 100644
index 00000000000..7e8bf153e63
--- /dev/null
+++ b/spec/services/ml/increment_version_service_spec.rb
@@ -0,0 +1,56 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ::Ml::IncrementVersionService, feature_category: :mlops do
+ describe '#execute' do
+ let(:increment_type) { nil }
+ let(:finder) { described_class.new(version, increment_type) }
+
+ context 'when given an invalid version format' do
+ let(:version) { 'foo' }
+
+ it 'raises an error' do
+ expect { finder.execute }.to raise_error(RuntimeError, "Version must be in a valid SemVer format")
+ end
+ end
+
+ context 'when given a non-semver version format' do
+ let(:version) { 1 }
+
+ it 'raises an error' do
+ expect { finder.execute }.to raise_error(RuntimeError, "Version must be in a valid SemVer format")
+ end
+ end
+
+ context 'when given an unsupported increment type' do
+ let(:version) { '1.2.3' }
+ let(:increment_type) { 'foo' }
+
+ it 'raises an error' do
+ expect do
+ finder.execute
+ end.to raise_error(RuntimeError, "Increment type must be one of :patch, :minor, or :major")
+ end
+ end
+
+ context 'when valid inputs are provided' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:version, :increment_type, :result) do
+ nil | nil | '1.0.0'
+ '0.0.1' | nil | '1.0.1'
+ '1.0.0' | nil | '2.0.0'
+ '1.0.0' | :major | '2.0.0'
+ '1.0.0' | :minor | '1.1.0'
+ '1.0.0' | :patch | '1.0.1'
+ end
+
+ with_them do
+ subject { finder.execute }
+
+ it { is_expected.to eq(result) }
+ end
+ end
+ end
+end
diff --git a/spec/services/ml/model_versions/delete_service_spec.rb b/spec/services/ml/model_versions/delete_service_spec.rb
new file mode 100644
index 00000000000..1cc5a2f85a5
--- /dev/null
+++ b/spec/services/ml/model_versions/delete_service_spec.rb
@@ -0,0 +1,55 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ml::ModelVersions::DeleteService, feature_category: :mlops do
+ let_it_be(:valid_model_version) do
+ create(:ml_model_versions, :with_package)
+ end
+
+ let(:project) { valid_model_version.project }
+ let(:user) { valid_model_version.project.owner }
+ let(:name) { valid_model_version.name }
+ let(:version) { valid_model_version.version }
+
+ subject(:execute_service) { described_class.new(project, name, version, user).execute }
+
+ describe '#execute' do
+ context 'when model version exists' do
+ it 'deletes the model version', :aggregate_failures do
+ expect(execute_service).to be_success
+ expect(Ml::ModelVersion.find_by(id: valid_model_version.id)).to be_nil
+ end
+ end
+
+ context 'when model version does not exist' do
+ let(:version) { 'wrong-version' }
+
+ it { is_expected.to be_error.and have_attributes(message: 'Model not found') }
+ end
+
+ context 'when model version has no package' do
+ before do
+ valid_model_version.update!(package: nil)
+ end
+
+ it 'does not trigger destroy package service', :aggregate_failures do
+ expect(Packages::MarkPackageForDestructionService).not_to receive(:new)
+ expect(execute_service).to be_success
+ end
+ end
+
+ context 'when package cannot be marked for destruction' do
+ before do
+ allow_next_instance_of(Packages::MarkPackageForDestructionService) do |service|
+ allow(service).to receive(:execute).and_return(ServiceResponse.error(message: 'error'))
+ end
+ end
+
+ it 'does not delete the model version', :aggregate_failures do
+ is_expected.to be_error.and have_attributes(message: 'error')
+ expect(Ml::ModelVersion.find_by(id: valid_model_version.id)).to eq(valid_model_version)
+ end
+ end
+ end
+end
diff --git a/spec/services/ml/model_versions/update_model_version_service_spec.rb b/spec/services/ml/model_versions/update_model_version_service_spec.rb
new file mode 100644
index 00000000000..99ea8b81df3
--- /dev/null
+++ b/spec/services/ml/model_versions/update_model_version_service_spec.rb
@@ -0,0 +1,44 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ml::ModelVersions::UpdateModelVersionService, feature_category: :mlops do
+ let_it_be(:existing_version) { create(:ml_model_versions) }
+
+ let(:project) { existing_version.project }
+ let(:name) { existing_version.name }
+ let(:version) { existing_version.version }
+ let(:description) { 'A model version description' }
+
+ subject(:execute_service) { described_class.new(project, name, version, description).execute }
+
+ describe '#execute' do
+ context 'when model version exists' do
+ it { is_expected.to be_success }
+
+ it 'updates the model version description' do
+ execute_service
+
+ expect(execute_service.payload.description).to eq(description)
+ end
+ end
+
+ context 'when description is invalid' do
+ let(:description) { 'a' * 501 }
+
+ it { is_expected.to be_error }
+ end
+
+ context 'when model does not exist' do
+ let(:name) { 'a_new_model' }
+
+ it { is_expected.to be_error }
+ end
+
+ context 'when model version does not exist' do
+ let(:name) { '2.0.0' }
+
+ it { is_expected.to be_error }
+ end
+ end
+end
diff --git a/spec/services/namespaces/package_settings/update_service_spec.rb b/spec/services/namespaces/package_settings/update_service_spec.rb
index 8a4a51e409c..41f3499a1bb 100644
--- a/spec/services/namespaces/package_settings/update_service_spec.rb
+++ b/spec/services/namespaces/package_settings/update_service_spec.rb
@@ -45,7 +45,8 @@ RSpec.describe ::Namespaces::PackageSettings::UpdateService, feature_category: :
npm_package_requests_forwarding: nil,
lock_npm_package_requests_forwarding: false,
pypi_package_requests_forwarding: nil,
- lock_pypi_package_requests_forwarding: false
+ lock_pypi_package_requests_forwarding: false,
+ nuget_symbol_server_enabled: false
}, to: {
maven_duplicates_allowed: false,
maven_duplicate_exception_regex: 'RELEASE',
@@ -58,7 +59,8 @@ RSpec.describe ::Namespaces::PackageSettings::UpdateService, feature_category: :
npm_package_requests_forwarding: true,
lock_npm_package_requests_forwarding: true,
pypi_package_requests_forwarding: true,
- lock_pypi_package_requests_forwarding: true
+ lock_pypi_package_requests_forwarding: true,
+ nuget_symbol_server_enabled: true
}
it_behaves_like 'returning a success'
@@ -109,7 +111,8 @@ RSpec.describe ::Namespaces::PackageSettings::UpdateService, feature_category: :
npm_package_requests_forwarding: true,
lock_npm_package_requests_forwarding: true,
pypi_package_requests_forwarding: true,
- lock_pypi_package_requests_forwarding: true
+ lock_pypi_package_requests_forwarding: true,
+ nuget_symbol_server_enabled: true
}
end
diff --git a/spec/services/notes/create_service_spec.rb b/spec/services/notes/create_service_spec.rb
index c1b15ec7681..a46a1438d18 100644
--- a/spec/services/notes/create_service_spec.rb
+++ b/spec/services/notes/create_service_spec.rb
@@ -395,27 +395,12 @@ RSpec.describe Notes::CreateService, feature_category: :team_planning do
context 'is ipynb file' do
before do
allow_any_instance_of(::Gitlab::Diff::File).to receive(:ipynb?).and_return(true)
- stub_feature_flags(ipynbdiff_notes_tracker: false)
end
- context ':ipynbdiff_notes_tracker is off' do
- it 'does not track ipynb note usage data' do
- expect(::Gitlab::UsageDataCounters::IpynbDiffActivityCounter).not_to receive(:note_created)
+ it 'tracks ipynb diff note creation' do
+ expect(::Gitlab::UsageDataCounters::IpynbDiffActivityCounter).to receive(:note_created)
- described_class.new(project_with_repo, user, new_opts).execute
- end
- end
-
- context ':ipynbdiff_notes_tracker is on' do
- before do
- stub_feature_flags(ipynbdiff_notes_tracker: true)
- end
-
- it 'tracks ipynb diff note creation' do
- expect(::Gitlab::UsageDataCounters::IpynbDiffActivityCounter).to receive(:note_created)
-
- described_class.new(project_with_repo, user, new_opts).execute
- end
+ described_class.new(project_with_repo, user, new_opts).execute
end
end
end
diff --git a/spec/services/organizations/create_service_spec.rb b/spec/services/organizations/create_service_spec.rb
index 7d9bf64ddd3..aae89517c15 100644
--- a/spec/services/organizations/create_service_spec.rb
+++ b/spec/services/organizations/create_service_spec.rb
@@ -7,7 +7,10 @@ RSpec.describe Organizations::CreateService, feature_category: :cell do
let_it_be(:user) { create(:user) }
let(:current_user) { user }
- let(:params) { attributes_for(:organization) }
+ let(:params) { attributes_for(:organization).merge(extra_params) }
+ let(:avatar_filename) { nil }
+ let(:extra_params) { {} }
+ let(:created_organization) { response.payload[:organization] }
subject(:response) { described_class.new(current_user: current_user, params: params).execute }
@@ -23,17 +26,41 @@ RSpec.describe Organizations::CreateService, feature_category: :cell do
end
context 'when user has permission' do
- it 'creates an organization' do
- expect { response }.to change { Organizations::Organization.count }
+ shared_examples 'creating an organization' do
+ it 'creates the organization' do
+ expect { response }.to change { Organizations::Organization.count }
+ expect(response).to be_success
+ expect(created_organization.name).to eq(params[:name])
+ expect(created_organization.path).to eq(params[:path])
+ expect(created_organization.description).to eq(params[:description])
+ expect(created_organization.avatar.filename).to eq(avatar_filename)
+ end
+ end
+
+ it_behaves_like 'creating an organization'
- expect(response).to be_success
+ context 'with description' do
+ let(:description) { 'Organization description' }
+ let(:extra_params) { { description: description } }
+
+ it_behaves_like 'creating an organization'
end
- it 'returns an error when the organization is not persisted' do
- params[:name] = nil
+ context 'with avatar' do
+ let(:avatar_filename) { 'dk.png' }
+ let(:avatar) { fixture_file_upload("spec/fixtures/#{avatar_filename}") }
+ let(:extra_params) { { avatar: avatar } }
- expect(response).to be_error
- expect(response.message).to match_array(["Name can't be blank"])
+ it_behaves_like 'creating an organization'
+ end
+
+ context 'when the organization is not persisted' do
+ let(:extra_params) { { name: nil } }
+
+ it 'returns an error when the organization is not persisted' do
+ expect(response).to be_error
+ expect(response.message).to match_array(["Name can't be blank"])
+ end
end
end
end
diff --git a/spec/services/organizations/update_service_spec.rb b/spec/services/organizations/update_service_spec.rb
new file mode 100644
index 00000000000..148840770db
--- /dev/null
+++ b/spec/services/organizations/update_service_spec.rb
@@ -0,0 +1,76 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Organizations::UpdateService, feature_category: :cell do
+ describe '#execute' do
+ let_it_be(:user) { create(:user) }
+ let_it_be_with_reload(:organization) { create(:organization) }
+
+ let_it_be(:current_user) { user } # due to use in before_all
+ let(:name) { 'Name' }
+ let(:path) { 'path' }
+ let(:description) { nil }
+ let(:avatar_filename) { nil }
+ let(:params) { { name: name, path: path }.merge(extra_params) }
+ let(:extra_params) { {} }
+ let(:updated_organization) { response.payload[:organization] }
+
+ subject(:response) do
+ described_class.new(organization, current_user: current_user, params: params).execute
+ end
+
+ context 'when user does not have permission' do
+ let(:current_user) { nil }
+
+ it 'returns an error' do
+ expect(response).to be_error
+
+ expect(response.message).to match_array(['You have insufficient permissions to update the organization'])
+ end
+ end
+
+ context 'when user has permission' do
+ before_all do
+ create(:organization_user, organization: organization, user: current_user)
+ end
+
+ shared_examples 'updating an organization' do
+ it 'updates the organization' do
+ expect(response).to be_success
+ expect(updated_organization.name).to eq(name)
+ expect(updated_organization.path).to eq(path)
+ expect(updated_organization.description).to eq(description)
+ expect(updated_organization.avatar.filename).to eq(avatar_filename)
+ end
+ end
+
+ context 'with description' do
+ let(:description) { 'Organization description' }
+ let(:extra_params) { { description: description } }
+
+ it_behaves_like 'updating an organization'
+ end
+
+ context 'with avatar' do
+ let(:avatar_filename) { 'dk.png' }
+ let(:avatar) { fixture_file_upload("spec/fixtures/#{avatar_filename}") }
+ let(:extra_params) { { avatar: avatar } }
+
+ it_behaves_like 'updating an organization'
+ end
+
+ include_examples 'updating an organization'
+
+ context 'when the organization is not updated' do
+ let(:extra_params) { { name: nil } }
+
+ it 'returns an error' do
+ expect(response).to be_error
+ expect(updated_organization).to be_instance_of Organizations::Organization
+ expect(response.message).to match_array(["Name can't be blank"])
+ end
+ end
+ end
+ end
+end
diff --git a/spec/services/packages/mark_package_for_destruction_service_spec.rb b/spec/services/packages/mark_package_for_destruction_service_spec.rb
index d65e62b84a6..bd69f995c77 100644
--- a/spec/services/packages/mark_package_for_destruction_service_spec.rb
+++ b/spec/services/packages/mark_package_for_destruction_service_spec.rb
@@ -17,6 +17,7 @@ RSpec.describe Packages::MarkPackageForDestructionService, feature_category: :pa
context 'when it is successful' do
it 'marks the package and package files as pending destruction' do
expect(package).to receive(:sync_maven_metadata).and_call_original
+ expect(package).to receive(:sync_npm_metadata_cache).and_call_original
expect(package).to receive(:mark_package_files_for_destruction).and_call_original
expect { service.execute }.to change { package.status }.from('default').to('pending_destruction')
end
@@ -45,6 +46,7 @@ RSpec.describe Packages::MarkPackageForDestructionService, feature_category: :pa
response = service.execute
expect(package).not_to receive(:sync_maven_metadata)
+ expect(package).not_to receive(:sync_npm_metadata_cache)
expect(response).to be_a(ServiceResponse)
expect(response).to be_error
expect(response.message).to eq("Failed to mark the package as pending destruction")
diff --git a/spec/services/packages/mark_packages_for_destruction_service_spec.rb b/spec/services/packages/mark_packages_for_destruction_service_spec.rb
index 22278f9927d..cd6426d39ad 100644
--- a/spec/services/packages/mark_packages_for_destruction_service_spec.rb
+++ b/spec/services/packages/mark_packages_for_destruction_service_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe Packages::MarkPackagesForDestructionService, :sidekiq_inline, feature_category: :package_registry do
let_it_be(:project) { create(:project) }
- let_it_be_with_reload(:packages) { create_list(:npm_package, 3, project: project) }
+ let_it_be_with_reload(:packages) { create_list(:nuget_package, 3, project: project) }
let(:user) { project.owner }
@@ -15,6 +15,17 @@ RSpec.describe Packages::MarkPackagesForDestructionService, :sidekiq_inline, fea
describe '#execute' do
subject { service.execute }
+ shared_examples 'returning service response' do |status:, message:, reason: nil|
+ it 'returns service response' do
+ subject
+
+ expect(subject).to be_a(ServiceResponse)
+ expect(subject.status).to eq(status)
+ expect(subject.message).to eq(message)
+ expect(subject.reason).to eq(reason) if reason
+ end
+ end
+
context 'when the user is authorized' do
before do
project.add_maintainer(user)
@@ -23,16 +34,16 @@ RSpec.describe Packages::MarkPackagesForDestructionService, :sidekiq_inline, fea
context 'when it is successful' do
it 'marks the packages as pending destruction' do
expect(::Packages::Maven::Metadata::SyncService).not_to receive(:new)
+ expect(::Packages::Npm::CreateMetadataCacheService).not_to receive(:new)
expect { subject }.to change { ::Packages::Package.pending_destruction.count }.from(0).to(3)
.and change { Packages::PackageFile.pending_destruction.count }.from(0).to(3)
packages.each { |pkg| expect(pkg.reload).to be_pending_destruction }
-
- expect(subject).to be_a(ServiceResponse)
- expect(subject).to be_success
- expect(subject.message).to eq('Packages were successfully marked as pending destruction')
end
+ it_behaves_like 'returning service response', status: :success,
+ message: 'Packages were successfully marked as pending destruction'
+
context 'with maven packages' do
let_it_be_with_reload(:packages) { create_list(:maven_package, 3, project: project) }
@@ -42,12 +53,11 @@ RSpec.describe Packages::MarkPackagesForDestructionService, :sidekiq_inline, fea
expect { subject }.to change { ::Packages::Package.pending_destruction.count }.from(0).to(3)
.and change { Packages::PackageFile.pending_destruction.count }.from(0).to(9)
packages.each { |pkg| expect(pkg.reload).to be_pending_destruction }
-
- expect(subject).to be_a(ServiceResponse)
- expect(subject).to be_success
- expect(subject.message).to eq('Packages were successfully marked as pending destruction')
end
+ it_behaves_like 'returning service response', status: :success,
+ message: 'Packages were successfully marked as pending destruction'
+
context 'without version' do
before do
::Packages::Package.id_in(package_ids).update_all(version: nil)
@@ -59,12 +69,26 @@ RSpec.describe Packages::MarkPackagesForDestructionService, :sidekiq_inline, fea
expect { subject }.to change { ::Packages::Package.pending_destruction.count }.from(0).to(3)
.and change { Packages::PackageFile.pending_destruction.count }.from(0).to(9)
packages.each { |pkg| expect(pkg.reload).to be_pending_destruction }
-
- expect(subject).to be_a(ServiceResponse)
- expect(subject).to be_success
- expect(subject.message).to eq('Packages were successfully marked as pending destruction')
end
+
+ it_behaves_like 'returning service response', status: :success,
+ message: 'Packages were successfully marked as pending destruction'
+ end
+ end
+
+ context 'with npm packages' do
+ let_it_be_with_reload(:packages) { create_list(:npm_package, 3, project: project, name: 'test-package') }
+
+ it 'marks the packages as pending destruction' do
+ expect(::Packages::Npm::CreateMetadataCacheService).to receive(:new).once.and_call_original
+
+ expect { subject }.to change { ::Packages::Package.pending_destruction.count }.from(0).to(3)
+ .and change { Packages::PackageFile.pending_destruction.count }.from(0).to(3)
+ packages.each { |package| expect(package.reload).to be_pending_destruction }
end
+
+ it_behaves_like 'returning service response', status: :success,
+ message: 'Packages were successfully marked as pending destruction'
end
end
@@ -73,7 +97,7 @@ RSpec.describe Packages::MarkPackagesForDestructionService, :sidekiq_inline, fea
allow(service).to receive(:can_destroy_packages?).and_raise(StandardError, 'test')
end
- it 'returns an error ServiceResponse' do
+ it 'does not mark the packages as pending destruction' do
expect(::Packages::Maven::Metadata::SyncService).not_to receive(:new)
expect(Gitlab::ErrorTracking).to receive(:track_exception).with(
@@ -83,30 +107,25 @@ RSpec.describe Packages::MarkPackagesForDestructionService, :sidekiq_inline, fea
expect { subject }.to not_change { ::Packages::Package.pending_destruction.count }
.and not_change { ::Packages::PackageFile.pending_destruction.count }
-
- expect(subject).to be_a(ServiceResponse)
- expect(subject).to be_error
- expect(subject.message).to eq("Failed to mark the packages as pending destruction")
- expect(subject.status).to eq(:error)
end
+
+ it_behaves_like 'returning service response', status: :error,
+ message: 'Failed to mark the packages as pending destruction'
end
end
context 'when the user is not authorized' do
let(:user) { nil }
- it 'returns an error ServiceResponse' do
+ it 'does not mark the packages as pending destruction' do
expect(::Packages::Maven::Metadata::SyncService).not_to receive(:new)
expect { subject }.to not_change { ::Packages::Package.pending_destruction.count }
.and not_change { ::Packages::PackageFile.pending_destruction.count }
-
- expect(subject).to be_a(ServiceResponse)
- expect(subject).to be_error
- expect(subject.message).to eq("You don't have the permission to perform this action")
- expect(subject.status).to eq(:error)
- expect(subject.reason).to eq(:unauthorized)
end
+
+ it_behaves_like 'returning service response', status: :error, reason: :unauthorized,
+ message: "You don't have the permission to perform this action"
end
end
end
diff --git a/spec/services/packages/ml_model/create_package_file_service_spec.rb b/spec/services/packages/ml_model/create_package_file_service_spec.rb
index 30a6bedd07b..505c8038976 100644
--- a/spec/services/packages/ml_model/create_package_file_service_spec.rb
+++ b/spec/services/packages/ml_model/create_package_file_service_spec.rb
@@ -8,6 +8,8 @@ RSpec.describe Packages::MlModel::CreatePackageFileService, feature_category: :m
let_it_be(:user) { create(:user) }
let_it_be(:pipeline) { create(:ci_pipeline, user: user, project: project) }
let_it_be(:file_name) { 'myfile.tar.gz.1' }
+ let_it_be(:model) { create(:ml_models, user: user, project: project) }
+ let_it_be(:model_version) { create(:ml_model_versions, :with_package, model: model, version: '0.1.0') }
let(:build) { instance_double(Ci::Build, pipeline: pipeline) }
@@ -26,47 +28,24 @@ RSpec.describe Packages::MlModel::CreatePackageFileService, feature_category: :m
FileUtils.rm_f(temp_file)
end
- context 'without existing package' do
+ context 'when model version is nil' do
let(:params) do
{
- package_name: 'new_model',
- package_version: '1.0.0',
+ model_version: nil,
file: file,
file_name: file_name
}
end
- it 'creates package file', :aggregate_failures do
- expect { execute_service }
- .to change { Packages::MlModel::Package.count }.by(1)
- .and change { Packages::PackageFile.count }.by(1)
- .and change { Packages::PackageFileBuildInfo.count }.by(0)
- .and change { Ml::ModelVersion.count }.by(1)
-
- new_model = Packages::MlModel::Package.last
- package_file = new_model.package_files.last
- new_model_version = Ml::ModelVersion.last
-
- expect(new_model.name).to eq('new_model')
- expect(new_model.version).to eq('1.0.0')
- expect(new_model.status).to eq('default')
- expect(package_file.package).to eq(new_model)
- expect(package_file.file_name).to eq(file_name)
- expect(package_file.size).to eq(file.size)
- expect(package_file.file_sha256).to eq(sha256)
- expect(new_model_version.name).to eq('new_model')
- expect(new_model_version.version).to eq('1.0.0')
- expect(new_model_version.package).to eq(new_model)
+ it 'does not create package file', :aggregate_failures do
+ expect(execute_service).to be(nil)
end
end
- context 'with existing package' do
- let_it_be(:model) { create(:ml_model_package, creator: user, project: project, version: '0.1.0') }
-
+ context 'with existing model version' do
let(:params) do
{
- package_name: model.name,
- package_version: model.version,
+ model_version: model_version,
file: file,
file_name: file_name,
status: :hidden,
@@ -76,18 +55,16 @@ RSpec.describe Packages::MlModel::CreatePackageFileService, feature_category: :m
it 'adds the package file and updates status and ci_build', :aggregate_failures do
expect { execute_service }
- .to change { project.packages.ml_model.count }.by(0)
- .and change { model.package_files.count }.by(1)
+ .to change { model_version.package.package_files.count }.by(1)
.and change { Packages::PackageFileBuildInfo.count }.by(1)
- model.reload
-
- package_file = model.package_files.last
+ package = model_version.reload.package
+ package_file = package.package_files.last
- expect(model.build_infos.first.pipeline).to eq(build.pipeline)
- expect(model.status).to eq('hidden')
+ expect(package.build_infos.first.pipeline).to eq(build.pipeline)
+ expect(package.status).to eq('hidden')
- expect(package_file.package).to eq(model)
+ expect(package_file.package).to eq(package)
expect(package_file.file_name).to eq(file_name)
expect(package_file.size).to eq(file.size)
expect(package_file.file_sha256).to eq(sha256)
diff --git a/spec/services/packages/npm/create_package_service_spec.rb b/spec/services/packages/npm/create_package_service_spec.rb
index 867dc582771..f02e53b67cb 100644
--- a/spec/services/packages/npm/create_package_service_spec.rb
+++ b/spec/services/packages/npm/create_package_service_spec.rb
@@ -322,7 +322,7 @@ RSpec.describe Packages::Npm::CreatePackageService, feature_category: :package_r
it { expect(subject[:message]).to eq 'Could not obtain package lease. Please try again.' }
end
- context 'when feature flag :packages_package_protection is disabled' do
+ context 'when feature flag :packages_protected_packages disabled' do
let_it_be_with_reload(:package_protection_rule) { create(:package_protection_rule, package_type: :npm, project: project) }
before do
diff --git a/spec/services/packages/npm/generate_metadata_service_spec.rb b/spec/services/packages/npm/generate_metadata_service_spec.rb
index f5d7f13d22c..ad3d4fde665 100644
--- a/spec/services/packages/npm/generate_metadata_service_spec.rb
+++ b/spec/services/packages/npm/generate_metadata_service_spec.rb
@@ -5,7 +5,8 @@ require 'spec_helper'
RSpec.describe ::Packages::Npm::GenerateMetadataService, feature_category: :package_registry do
using RSpec::Parameterized::TableSyntax
- let_it_be(:project) { create(:project) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, namespace: group) }
let_it_be(:package_name) { "@#{project.root_namespace.path}/test" }
let_it_be(:package1) { create(:npm_package, version: '2.0.4', project: project, name: package_name) }
let_it_be(:package2) { create(:npm_package, version: '2.0.6', project: project, name: package_name) }
@@ -156,6 +157,19 @@ RSpec.describe ::Packages::Npm::GenerateMetadataService, feature_category: :pack
end
end
end
+
+ context 'with duplicate tags' do
+ let_it_be(:project2) { create(:project, namespace: group) }
+ let_it_be(:package2) { create(:npm_package, version: '3.0.0', project: project2, name: package_name) }
+ let_it_be(:package_tag1) { create(:packages_tag, package: package1, name: 'latest') }
+ let_it_be(:package_tag2) { create(:packages_tag, package: package2, name: 'latest') }
+
+ let(:packages) { ::Packages::Package.for_projects([project.id, project2.id]).with_name(package_name) }
+
+ it "returns the tag of the latest package's version" do
+ expect(subject['latest']).to eq(package2.version)
+ end
+ end
end
end
diff --git a/spec/services/packages/protection/update_rule_service_spec.rb b/spec/services/packages/protection/update_rule_service_spec.rb
new file mode 100644
index 00000000000..70619a1caa3
--- /dev/null
+++ b/spec/services/packages/protection/update_rule_service_spec.rb
@@ -0,0 +1,150 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Packages::Protection::UpdateRuleService, '#execute', feature_category: :environment_management do
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:current_user) { create(:user, maintainer_projects: [project]) }
+ let_it_be_with_reload(:package_protection_rule) { create(:package_protection_rule, project: project) }
+
+ let(:service) { described_class.new(package_protection_rule, current_user: current_user, params: params) }
+
+ let(:params) do
+ attributes_for(
+ :package_protection_rule,
+ package_name_pattern: "#{package_protection_rule.package_name_pattern}-updated",
+ package_type: 'npm',
+ push_protected_up_to_access_level: 'owner'
+ )
+ end
+
+ subject(:service_execute) { service.execute }
+
+ shared_examples 'a successful service response' do
+ let(:expected_attributes) { params }
+
+ it { is_expected.to be_success }
+
+ it do
+ is_expected.to have_attributes(
+ errors: be_blank,
+ message: be_blank,
+ payload: { package_protection_rule: be_a(Packages::Protection::Rule).and(have_attributes(expected_attributes)) }
+ )
+ end
+
+ it { expect { subject }.not_to change { Packages::Protection::Rule.count } }
+
+ it { subject.tap { expect(package_protection_rule.reload).to have_attributes expected_attributes } }
+ end
+
+ shared_examples 'an erroneous service response' do
+ it { is_expected.to be_error }
+
+ it do
+ is_expected.to have_attributes(
+ errors: be_present,
+ message: be_present,
+ payload: { package_protection_rule: nil }
+ )
+ end
+
+ it { expect { subject }.not_to change { Packages::Protection::Rule.count } }
+ it { expect { subject }.not_to change { package_protection_rule.reload.updated_at } }
+ end
+
+ it_behaves_like 'a successful service response'
+
+ context 'with disallowed params' do
+ let(:params) { super().merge!(project_id: 1, unsupported_param: 'unsupported_param_value') }
+
+ it_behaves_like 'a successful service response' do
+ let(:expected_attributes) { params.except(:project_id, :unsupported_param) }
+ end
+ end
+
+ context 'when fields are invalid' do
+ let(:params) do
+ { package_name_pattern: '', package_type: 'unknown_package_type',
+ push_protected_up_to_access_level: 1000 }
+ end
+
+ it_behaves_like 'an erroneous service response'
+
+ it { is_expected.to have_attributes message: /'unknown_package_type' is not a valid package_type/ }
+ end
+
+ context 'with empty params' do
+ let(:params) { {} }
+
+ it_behaves_like 'a successful service response' do
+ let(:expected_attributes) { package_protection_rule.attributes }
+ end
+
+ it { expect { service_execute }.not_to change { package_protection_rule.reload.updated_at } }
+ end
+
+ context 'with nil params' do
+ let(:params) { nil }
+
+ it_behaves_like 'a successful service response' do
+ let(:expected_attributes) { package_protection_rule.attributes }
+ end
+
+ it { expect { service_execute }.not_to change { package_protection_rule.reload.updated_at } }
+ end
+
+ context 'when updated field `package_name_pattern` is already taken' do
+ let_it_be_with_reload(:other_existing_package_protection_rule) do
+ create(:package_protection_rule, project: project,
+ package_name_pattern: "#{package_protection_rule.package_name_pattern}-other")
+ end
+
+ let(:params) { { package_name_pattern: other_existing_package_protection_rule.package_name_pattern } }
+
+ it_behaves_like 'an erroneous service response'
+
+ it do
+ expect { service_execute }.not_to(
+ change { other_existing_package_protection_rule.reload.package_name_pattern }
+ )
+ end
+
+ it do
+ is_expected.to have_attributes(
+ errors: match_array([/Package name pattern has already been taken/]),
+ message: match_array([/Package name pattern has already been taken/])
+ )
+ end
+ end
+
+ context 'when current_user does not have permission' do
+ let_it_be(:developer) { create(:user).tap { |u| project.add_developer(u) } }
+ let_it_be(:reporter) { create(:user).tap { |u| project.add_reporter(u) } }
+ let_it_be(:guest) { create(:user).tap { |u| project.add_guest(u) } }
+ let_it_be(:anonymous) { create(:user) }
+
+ where(:current_user) do
+ [ref(:developer), ref(:reporter), ref(:guest), ref(:anonymous)]
+ end
+
+ with_them do
+ it_behaves_like 'an erroneous service response'
+
+ it { is_expected.to have_attributes errors: match_array(/Unauthorized/), message: /Unauthorized/ }
+ end
+ end
+
+ context 'without package protection rule' do
+ let(:package_protection_rule) { nil }
+ let(:params) { {} }
+
+ it { expect { service_execute }.to raise_error(ArgumentError) }
+ end
+
+ context 'without current_user' do
+ let(:current_user) { nil }
+
+ it { expect { service_execute }.to raise_error(ArgumentError) }
+ end
+end
diff --git a/spec/services/pages_domains/obtain_lets_encrypt_certificate_service_spec.rb b/spec/services/pages_domains/obtain_lets_encrypt_certificate_service_spec.rb
index 0e46391c0ad..63b5d54a18d 100644
--- a/spec/services/pages_domains/obtain_lets_encrypt_certificate_service_spec.rb
+++ b/spec/services/pages_domains/obtain_lets_encrypt_certificate_service_spec.rb
@@ -188,4 +188,28 @@ RSpec.describe PagesDomains::ObtainLetsEncryptCertificateService, feature_catego
service.execute
end
end
+
+ context 'when the domain URL is longer than 64 characters' do
+ let(:long_domain) { "a.b.c.#{'d' * 63}" }
+ let(:pages_domain) { create(:pages_domain, :without_certificate, :without_key, domain: long_domain) }
+ let(:service) { described_class.new(pages_domain) }
+
+ it 'logs an error and does not proceed with certificate acquisition' do
+ expect(Gitlab::AppLogger).to receive(:error).with(
+ hash_including(
+ message: "Domain name too long for Let's Encrypt certificate",
+ pages_domain: long_domain,
+ pages_domain_bytesize: long_domain.bytesize,
+ max_allowed_bytesize: described_class::MAX_DOMAIN_LENGTH,
+ project_id: pages_domain.project_id
+ )
+ )
+
+ # Ensure that the certificate acquisition is not attempted
+ expect(::PagesDomains::CreateAcmeOrderService).not_to receive(:new)
+ expect(PagesDomainSslRenewalWorker).not_to receive(:perform_in)
+
+ service.execute
+ end
+ end
end
diff --git a/spec/services/product_analytics/build_activity_graph_service_spec.rb b/spec/services/product_analytics/build_activity_graph_service_spec.rb
deleted file mode 100644
index 2eb35523da7..00000000000
--- a/spec/services/product_analytics/build_activity_graph_service_spec.rb
+++ /dev/null
@@ -1,33 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe ProductAnalytics::BuildActivityGraphService, feature_category: :product_analytics_data_management do
- let_it_be(:project) { create(:project) }
- let_it_be(:time_now) { Time.zone.now }
- let_it_be(:time_ago) { Time.zone.now - 5.days }
-
- let_it_be(:events) do
- [
- create(:product_analytics_event, project: project, collector_tstamp: time_now),
- create(:product_analytics_event, project: project, collector_tstamp: time_now),
- create(:product_analytics_event, project: project, collector_tstamp: time_now),
- create(:product_analytics_event, project: project, collector_tstamp: time_ago),
- create(:product_analytics_event, project: project, collector_tstamp: time_ago)
- ]
- end
-
- let(:params) { { timerange: 7 } }
-
- subject { described_class.new(project, params).execute }
-
- it 'returns a valid graph hash' do
- expected_hash = {
- id: 'collector_tstamp',
- keys: [time_ago.to_date, time_now.to_date],
- values: [2, 3]
- }
-
- expect(subject).to eq(expected_hash)
- end
-end
diff --git a/spec/services/product_analytics/build_graph_service_spec.rb b/spec/services/product_analytics/build_graph_service_spec.rb
deleted file mode 100644
index a850d69e53c..00000000000
--- a/spec/services/product_analytics/build_graph_service_spec.rb
+++ /dev/null
@@ -1,27 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe ProductAnalytics::BuildGraphService, feature_category: :product_analytics_data_management do
- let_it_be(:project) { create(:project) }
-
- let_it_be(:events) do
- [
- create(:product_analytics_event, project: project, platform: 'web'),
- create(:product_analytics_event, project: project, platform: 'web'),
- create(:product_analytics_event, project: project, platform: 'app'),
- create(:product_analytics_event, project: project, platform: 'mobile'),
- create(:product_analytics_event, project: project, platform: 'mobile', collector_tstamp: Time.zone.now - 60.days)
- ]
- end
-
- let(:params) { { graph: 'platform', timerange: 5 } }
-
- subject { described_class.new(project, params).execute }
-
- it 'returns a valid graph hash' do
- expect(subject[:id]).to eq(:platform)
- expect(subject[:keys]).to eq(%w[app mobile web])
- expect(subject[:values]).to eq([1, 1, 2])
- end
-end
diff --git a/spec/services/projects/after_rename_service_spec.rb b/spec/services/projects/after_rename_service_spec.rb
index 4b2569f6b2d..228dff6aa0b 100644
--- a/spec/services/projects/after_rename_service_spec.rb
+++ b/spec/services/projects/after_rename_service_spec.rb
@@ -26,6 +26,7 @@ RSpec.describe Projects::AfterRenameService, feature_category: :groups_and_proje
let(:hash) { Digest::SHA2.hexdigest(project.id.to_s) }
let(:hashed_prefix) { File.join('@hashed', hash[0..1], hash[2..3]) }
let(:hashed_path) { File.join(hashed_prefix, hash) }
+ let(:message) { "Repository #{full_path_before_rename} could not be renamed to #{full_path_after_rename}" }
before do
# Project#gitlab_shell returns a new instance of Gitlab::Shell on every
@@ -35,6 +36,15 @@ RSpec.describe Projects::AfterRenameService, feature_category: :groups_and_proje
stub_application_setting(hashed_storage_enabled: true)
end
+ shared_examples 'logging and raising a RenameFailedError' do
+ it 'logs raises a RenameFailedError' do
+ expect_any_instance_of(described_class).to receive(:log_error).with(message)
+
+ expect { service_execute }
+ .to raise_error(described_class::RenameFailedError)
+ end
+ end
+
it 'renames a repository' do
stub_container_registry_config(enabled: false)
@@ -47,8 +57,21 @@ RSpec.describe Projects::AfterRenameService, feature_category: :groups_and_proje
service_execute
end
+ context 'when renaming or migrating fails' do
+ before do
+ allow_any_instance_of(::Projects::HashedStorage::MigrationService)
+ .to receive(:execute).and_return(false)
+ end
+
+ it_behaves_like 'logging and raising a RenameFailedError'
+ end
+
context 'container registry with images' do
let(:container_repository) { create(:container_repository) }
+ let(:message) do
+ "Project #{full_path_before_rename} cannot be renamed because images are " \
+ "present in its container registry"
+ end
before do
stub_container_registry_config(enabled: true)
@@ -56,9 +79,36 @@ RSpec.describe Projects::AfterRenameService, feature_category: :groups_and_proje
project.container_repositories << container_repository
end
- it 'raises a RenameFailedError' do
- expect { service_execute }
- .to raise_error(described_class::RenameFailedError)
+ context 'when Gitlab API is not supported' do
+ before do
+ allow(ContainerRegistry::GitlabApiClient).to receive(:supports_gitlab_api?).and_return(false)
+ end
+
+ it_behaves_like 'logging and raising a RenameFailedError'
+ end
+
+ context 'when Gitlab API Client is supported' do
+ before do
+ allow(ContainerRegistry::GitlabApiClient).to receive(:supports_gitlab_api?).and_return(true)
+ end
+
+ it 'renames the base repository in the registry' do
+ expect(ContainerRegistry::GitlabApiClient).to receive(:rename_base_repository_path)
+ .with(full_path_before_rename, name: path_after_rename).and_return(:ok)
+
+ service_execute
+ end
+
+ context 'when the base repository rename in the registry fails' do
+ before do
+ allow(ContainerRegistry::GitlabApiClient)
+ .to receive(:rename_base_repository_path).and_return(:bad_request)
+ end
+
+ let(:message) { 'Renaming the base repository in the registry failed with error bad_request.' }
+
+ it_behaves_like 'logging and raising a RenameFailedError'
+ end
end
end
diff --git a/spec/services/projects/create_service_spec.rb b/spec/services/projects/create_service_spec.rb
index 899ed477180..5a7abf6cde8 100644
--- a/spec/services/projects/create_service_spec.rb
+++ b/spec/services/projects/create_service_spec.rb
@@ -812,6 +812,31 @@ RSpec.describe Projects::CreateService, '#execute', feature_category: :groups_an
end
end
+ context 'when SHA256 format is requested' do
+ let(:project) { create_project(user, opts) }
+ let(:opts) { super().merge(initialize_with_readme: true, repository_object_format: 'sha256') }
+
+ before do
+ allow(Gitlab::CurrentSettings).to receive(:default_branch_name).and_return('main')
+ end
+
+ it 'creates a repository with SHA256 commit hashes', :aggregate_failures do
+ expect(project.repository.commit_count).to be(1)
+ expect(project.commit.id.size).to eq 64
+ end
+
+ context 'when "support_sha256_repositories" feature flag is disabled' do
+ before do
+ stub_feature_flags(support_sha256_repositories: false)
+ end
+
+ it 'creates a repository with default SHA1 commit hash' do
+ expect(project.repository.commit_count).to be(1)
+ expect(project.commit.id.size).to eq 40
+ end
+ end
+ end
+
describe 'create integration for the project' do
subject(:project) { create_project(user, opts) }
diff --git a/spec/services/projects/destroy_service_spec.rb b/spec/services/projects/destroy_service_spec.rb
index a0064eadf13..3aea329a45f 100644
--- a/spec/services/projects/destroy_service_spec.rb
+++ b/spec/services/projects/destroy_service_spec.rb
@@ -454,6 +454,12 @@ RSpec.describe Projects::DestroyService, :aggregate_failures, :event_store_publi
expect { destroy_project(forked_project, user) }
.not_to raise_error
end
+
+ it 'does not update project statistics for the deleted project' do
+ expect(ProjectCacheWorker).not_to receive(:perform_async)
+
+ destroy_project(forked_project, user)
+ end
end
context 'as the root of a fork network' do
diff --git a/spec/services/projects/fork_service_spec.rb b/spec/services/projects/fork_service_spec.rb
index ceb060445ad..e6418c7b4ea 100644
--- a/spec/services/projects/fork_service_spec.rb
+++ b/spec/services/projects/fork_service_spec.rb
@@ -387,7 +387,7 @@ RSpec.describe Projects::ForkService, feature_category: :source_code_management
# Stub everything required to move a project to a Gitaly shard that does not exist
allow(Gitlab::GitalyClient).to receive(:filesystem_id).with('default').and_call_original
allow(Gitlab::GitalyClient).to receive(:filesystem_id).with('test_second_storage').and_return(SecureRandom.uuid)
- stub_storage_settings('test_second_storage' => { 'path' => TestEnv::SECOND_STORAGE_PATH })
+ stub_storage_settings('test_second_storage' => {})
allow_any_instance_of(Gitlab::Git::Repository).to receive(:create_repository)
.and_return(true)
allow_any_instance_of(Gitlab::Git::Repository).to receive(:replicate)
diff --git a/spec/services/projects/group_links/create_service_spec.rb b/spec/services/projects/group_links/create_service_spec.rb
index e3f170ef3fe..6bc0b86545a 100644
--- a/spec/services/projects/group_links/create_service_spec.rb
+++ b/spec/services/projects/group_links/create_service_spec.rb
@@ -103,11 +103,57 @@ RSpec.describe Projects::GroupLinks::CreateService, '#execute', feature_category
it_behaves_like 'shareable'
end
end
+
+ context 'when sharing it to a group with OWNER access' do
+ let(:opts) do
+ {
+ link_group_access: Gitlab::Access::OWNER,
+ expires_at: nil
+ }
+ end
+
+ it 'does not share and returns a forbiden error' do
+ expect do
+ result = subject.execute
+
+ expect(result[:status]).to eq(:error)
+ expect(result[:http_status]).to eq(403)
+ end.not_to change { project.reload.project_group_links.count }
+ end
+ end
+ end
+
+ context 'when the user is an OWNER in the project' do
+ before do
+ project.add_owner(user)
+ end
+
+ it_behaves_like 'shareable'
+
+ context 'when sharing it to a group with OWNER access' do
+ let(:opts) do
+ {
+ link_group_access: Gitlab::Access::OWNER,
+ expires_at: nil
+ }
+ end
+
+ it_behaves_like 'shareable'
+ end
end
end
context 'when user does not have permissions to share the project with a group' do
it_behaves_like 'not shareable'
+
+ context 'when the user has less than MAINTAINER access in the project' do
+ before do
+ group.add_guest(user)
+ project.add_developer(user)
+ end
+
+ it_behaves_like 'not shareable'
+ end
end
context 'when group is blank' do
diff --git a/spec/services/projects/group_links/destroy_service_spec.rb b/spec/services/projects/group_links/destroy_service_spec.rb
index 0cd003f6142..4fc441c9e26 100644
--- a/spec/services/projects/group_links/destroy_service_spec.rb
+++ b/spec/services/projects/group_links/destroy_service_spec.rb
@@ -19,8 +19,10 @@ RSpec.describe Projects::GroupLinks::DestroyService, '#execute', feature_categor
end
end
- shared_examples_for 'returns not_found' do
- it do
+ context 'if group_link is blank' do
+ let!(:group_link) { nil }
+
+ it 'returns 404 not found' do
expect do
result = subject.execute(group_link)
@@ -30,14 +32,15 @@ RSpec.describe Projects::GroupLinks::DestroyService, '#execute', feature_categor
end
end
- context 'if group_link is blank' do
- let!(:group_link) { nil }
-
- it_behaves_like 'returns not_found'
- end
-
context 'if the user does not have access to destroy the link' do
- it_behaves_like 'returns not_found'
+ it 'returns 404 not found' do
+ expect do
+ result = subject.execute(group_link)
+
+ expect(result[:status]).to eq(:error)
+ expect(result[:reason]).to eq(:not_found)
+ end.not_to change { project.reload.project_group_links.count }
+ end
end
context 'when the user has proper permissions to remove a group-link from a project' do
@@ -111,6 +114,41 @@ RSpec.describe Projects::GroupLinks::DestroyService, '#execute', feature_categor
end
end
end
+
+ context 'on trying to destroy a link with OWNER access' do
+ let(:group_access) { Gitlab::Access::OWNER }
+
+ it 'does not remove the group from project' do
+ expect do
+ result = subject.execute(group_link)
+
+ expect(result[:status]).to eq(:error)
+ expect(result[:reason]).to eq(:forbidden)
+ end.not_to change { project.reload.project_group_links.count }
+ end
+
+ context 'if the user is an OWNER of the group' do
+ before do
+ group.add_owner(user)
+ end
+
+ it_behaves_like 'removes group from project'
+ end
+ end
+ end
+
+ context 'when the user is an OWNER in the project' do
+ before do
+ project.add_owner(user)
+ end
+
+ it_behaves_like 'removes group from project'
+
+ context 'on trying to destroy a link with OWNER access' do
+ let(:group_access) { Gitlab::Access::OWNER }
+
+ it_behaves_like 'removes group from project'
+ end
end
end
diff --git a/spec/services/projects/group_links/update_service_spec.rb b/spec/services/projects/group_links/update_service_spec.rb
index b02614fa062..86ad1bcf286 100644
--- a/spec/services/projects/group_links/update_service_spec.rb
+++ b/spec/services/projects/group_links/update_service_spec.rb
@@ -20,8 +20,8 @@ RSpec.describe Projects::GroupLinks::UpdateService, '#execute', feature_category
subject { described_class.new(link, user).execute(group_link_params) }
- shared_examples_for 'returns not_found' do
- it do
+ context 'when the user does not have proper permissions to update a project group link' do
+ it 'returns 404 not found' do
result = subject
expect(result[:status]).to eq(:error)
@@ -29,10 +29,6 @@ RSpec.describe Projects::GroupLinks::UpdateService, '#execute', feature_category
end
end
- context 'when the user does not have proper permissions to update a project group link' do
- it_behaves_like 'returns not_found'
- end
-
context 'when user has proper permissions to update a project group link' do
context 'when the user is a MAINTAINER in the project' do
before do
@@ -92,6 +88,86 @@ RSpec.describe Projects::GroupLinks::UpdateService, '#execute', feature_category
subject
end
end
+
+ context 'updating a link with OWNER access' do
+ let(:group_access) { Gitlab::Access::OWNER }
+
+ shared_examples_for 'returns :forbidden' do
+ it do
+ expect do
+ result = subject
+
+ expect(result[:status]).to eq(:error)
+ expect(result[:reason]).to eq(:forbidden)
+ end.to not_change { link.expires_at }.and not_change { link.group_access }
+ end
+ end
+
+ context 'updating expires_at' do
+ let(:group_link_params) do
+ { expires_at: 7.days.from_now }
+ end
+
+ it_behaves_like 'returns :forbidden'
+ end
+
+ context 'updating group_access' do
+ let(:group_link_params) do
+ { group_access: Gitlab::Access::MAINTAINER }
+ end
+
+ it_behaves_like 'returns :forbidden'
+ end
+
+ context 'updating both expires_at and group_access' do
+ it_behaves_like 'returns :forbidden'
+ end
+ end
+ end
+
+ context 'when the user is an OWNER in the project' do
+ before do
+ project.add_owner(user)
+ end
+
+ context 'updating expires_at' do
+ let(:group_link_params) do
+ { expires_at: 7.days.from_now.to_date }
+ end
+
+ it 'updates existing link' do
+ expect do
+ result = subject
+
+ expect(result[:status]).to eq(:success)
+ end.to change { link.reload.expires_at }.to(group_link_params[:expires_at])
+ end
+ end
+
+ context 'updating group_access' do
+ let(:group_link_params) do
+ { group_access: Gitlab::Access::MAINTAINER }
+ end
+
+ it 'updates existing link' do
+ expect do
+ result = subject
+
+ expect(result[:status]).to eq(:success)
+ end.to change { link.reload.group_access }.to(group_link_params[:group_access])
+ end
+ end
+
+ context 'updating both expires_at and group_access' do
+ it 'updates existing link' do
+ expect do
+ result = subject
+
+ expect(result[:status]).to eq(:success)
+ end.to change { link.reload.group_access }.to(group_link_params[:group_access])
+ .and change { link.reload.expires_at }.to(group_link_params[:expires_at])
+ end
+ end
end
end
end
diff --git a/spec/services/projects/hashed_storage/base_attachment_service_spec.rb b/spec/services/projects/hashed_storage/base_attachment_service_spec.rb
index e32747ad907..4834af79225 100644
--- a/spec/services/projects/hashed_storage/base_attachment_service_spec.rb
+++ b/spec/services/projects/hashed_storage/base_attachment_service_spec.rb
@@ -45,7 +45,7 @@ RSpec.describe Projects::HashedStorage::BaseAttachmentService, feature_category:
describe '#move_folder!' do
context 'when old_path is not a directory' do
it 'adds information to the logger and returns true' do
- Tempfile.create do |old_path| # rubocop:disable Rails/SaveBang
+ Tempfile.create do |old_path|
new_path = "#{old_path}-new"
expect(subject.send(:move_folder!, old_path, new_path)).to be_truthy
diff --git a/spec/services/projects/import_service_spec.rb b/spec/services/projects/import_service_spec.rb
index 16b9d2618ca..dd6e82b9ef2 100644
--- a/spec/services/projects/import_service_spec.rb
+++ b/spec/services/projects/import_service_spec.rb
@@ -183,81 +183,6 @@ RSpec.describe Projects::ImportService, feature_category: :importers do
expect(result[:status]).to eq :error
expect(result[:message]).to eq "Error importing repository #{project.safe_import_url} into #{project.full_path} - Failed to import the repository [FILTERED]"
end
-
- context 'when bitbucket_parallel_importer feature flag is disabled' do
- before do
- stub_feature_flags(bitbucket_parallel_importer: false)
- end
-
- it 'succeeds if repository import is successful' do
- expect(project.repository).to receive(:import_repository).and_return(true)
- expect_next_instance_of(Gitlab::BitbucketImport::Importer) do |importer|
- expect(importer).to receive(:execute).and_return(true)
- end
-
- expect_next_instance_of(Projects::LfsPointers::LfsImportService) do |service|
- expect(service).to receive(:execute).and_return(status: :success)
- end
-
- result = subject.execute
-
- expect(result[:status]).to eq :success
- end
-
- it 'fails if repository import fails' do
- expect(project.repository)
- .to receive(:import_repository)
- .with('https://bitbucket.org/vim/vim.git', resolved_address: '')
- .and_raise(Gitlab::Git::CommandError, 'Failed to import the repository /a/b/c')
-
- result = subject.execute
-
- expect(result[:status]).to eq :error
- expect(result[:message]).to eq "Error importing repository #{project.safe_import_url} into #{project.full_path} - Failed to import the repository [FILTERED]"
- end
-
- context 'when lfs import fails' do
- it 'logs the error' do
- error_message = 'error message'
-
- expect(project.repository).to receive(:import_repository).and_return(true)
-
- expect_next_instance_of(Gitlab::BitbucketImport::Importer) do |importer|
- expect(importer).to receive(:execute).and_return(true)
- end
-
- expect_next_instance_of(Projects::LfsPointers::LfsImportService) do |service|
- expect(service).to receive(:execute).and_return(status: :error, message: error_message)
- end
-
- expect(Gitlab::AppLogger).to receive(:error).with("The Lfs import process failed. #{error_message}")
-
- subject.execute
- end
- end
-
- context 'when repository import scheduled' do
- before do
- expect(project.repository).to receive(:import_repository).and_return(true)
- allow(subject).to receive(:import_data)
- end
-
- it 'downloads lfs objects if lfs_enabled is enabled for project' do
- allow(project).to receive(:lfs_enabled?).and_return(true)
-
- expect_any_instance_of(Projects::LfsPointers::LfsImportService).to receive(:execute)
-
- subject.execute
- end
-
- it 'does not download lfs objects if lfs_enabled is not enabled for project' do
- allow(project).to receive(:lfs_enabled?).and_return(false)
- expect_any_instance_of(Projects::LfsPointers::LfsImportService).not_to receive(:execute)
-
- subject.execute
- end
- end
- end
end
end
end
@@ -352,13 +277,53 @@ RSpec.describe Projects::ImportService, feature_category: :importers do
end
end
+ context 'when import is a local request' do
+ before do
+ project.import_url = "http://127.0.0.1/group/project"
+ end
+
+ context 'when local network requests are enabled' do
+ before do
+ stub_application_setting(allow_local_requests_from_web_hooks_and_services: true)
+ end
+
+ it 'returns an error' do
+ expect(project.repository).not_to receive(:import_repository)
+ expect(subject.execute).to include(
+ status: :error,
+ message: end_with('Requests to localhost are not allowed')
+ )
+ end
+
+ context 'when environment is development' do
+ before do
+ stub_rails_env('development')
+ end
+
+ it 'imports successfully' do
+ expect(project.repository)
+ .to receive(:import_repository)
+ .and_return(true)
+ expect(subject.execute[:status]).to eq(:success)
+ end
+ end
+ end
+ end
+
context 'when DNS rebind protection is disabled' do
before do
allow(Gitlab::CurrentSettings).to receive(:dns_rebinding_protection_enabled?).and_return(false)
project.import_url = "https://example.com/group/project"
allow(Gitlab::UrlBlocker).to receive(:validate!)
- .with(project.import_url, ports: Project::VALID_IMPORT_PORTS, schemes: Project::VALID_IMPORT_PROTOCOLS, dns_rebind_protection: false)
+ .with(
+ project.import_url,
+ ports: Project::VALID_IMPORT_PORTS,
+ schemes: Project::VALID_IMPORT_PROTOCOLS,
+ allow_local_network: false,
+ allow_localhost: false,
+ dns_rebind_protection: false
+ )
.and_return([Addressable::URI.parse("https://example.com/group/project"), nil])
end
@@ -386,7 +351,14 @@ RSpec.describe Projects::ImportService, feature_category: :importers do
project.import_url = "https://example.com/group/project"
allow(Gitlab::UrlBlocker).to receive(:validate!)
- .with(project.import_url, ports: Project::VALID_IMPORT_PORTS, schemes: Project::VALID_IMPORT_PROTOCOLS, dns_rebind_protection: true)
+ .with(
+ project.import_url,
+ ports: Project::VALID_IMPORT_PORTS,
+ schemes: Project::VALID_IMPORT_PROTOCOLS,
+ allow_local_network: false,
+ allow_localhost: false,
+ dns_rebind_protection: true
+ )
.and_return([Addressable::URI.parse("https://172.16.123.1/group/project"), 'example.com'])
end
@@ -407,7 +379,14 @@ RSpec.describe Projects::ImportService, feature_category: :importers do
project.import_url = 'https://gitlab.com/gitlab-org/gitlab-development-kit'
allow(Gitlab::UrlBlocker).to receive(:validate!)
- .with(project.import_url, ports: Project::VALID_IMPORT_PORTS, schemes: Project::VALID_IMPORT_PROTOCOLS, dns_rebind_protection: true)
+ .with(
+ project.import_url,
+ ports: Project::VALID_IMPORT_PORTS,
+ schemes: Project::VALID_IMPORT_PROTOCOLS,
+ allow_local_network: false,
+ allow_localhost: false,
+ dns_rebind_protection: true
+ )
.and_return([Addressable::URI.parse('https://[2606:4700:90:0:f22e:fbec:5bed:a9b9]/gitlab-org/gitlab-development-kit'), 'gitlab.com'])
end
@@ -430,7 +409,14 @@ RSpec.describe Projects::ImportService, feature_category: :importers do
project.import_url = "http://example.com/group/project"
allow(Gitlab::UrlBlocker).to receive(:validate!)
- .with(project.import_url, ports: Project::VALID_IMPORT_PORTS, schemes: Project::VALID_IMPORT_PROTOCOLS, dns_rebind_protection: true)
+ .with(
+ project.import_url,
+ ports: Project::VALID_IMPORT_PORTS,
+ schemes: Project::VALID_IMPORT_PROTOCOLS,
+ allow_local_network: false,
+ allow_localhost: false,
+ dns_rebind_protection: true
+ )
.and_return([Addressable::URI.parse("http://172.16.123.1/group/project"), 'example.com'])
end
@@ -452,7 +438,14 @@ RSpec.describe Projects::ImportService, feature_category: :importers do
project.import_url = "git://example.com/group/project.git"
allow(Gitlab::UrlBlocker).to receive(:validate!)
- .with(project.import_url, ports: Project::VALID_IMPORT_PORTS, schemes: Project::VALID_IMPORT_PROTOCOLS, dns_rebind_protection: true)
+ .with(
+ project.import_url,
+ ports: Project::VALID_IMPORT_PORTS,
+ schemes: Project::VALID_IMPORT_PROTOCOLS,
+ allow_local_network: false,
+ allow_localhost: false,
+ dns_rebind_protection: true
+ )
.and_return([Addressable::URI.parse("git://172.16.123.1/group/project"), 'example.com'])
end
diff --git a/spec/services/projects/lfs_pointers/lfs_link_service_spec.rb b/spec/services/projects/lfs_pointers/lfs_link_service_spec.rb
index d3f053aaedc..5862ed15c2a 100644
--- a/spec/services/projects/lfs_pointers/lfs_link_service_spec.rb
+++ b/spec/services/projects/lfs_pointers/lfs_link_service_spec.rb
@@ -91,5 +91,23 @@ RSpec.describe Projects::LfsPointers::LfsLinkService, feature_category: :source_
# 3. Insert the lfs_objects_projects for that batch
expect { subject.execute(new_oid_list.keys) }.not_to exceed_query_limit(3)
end
+
+ context 'when MAX_OIDS is 5' do
+ let(:max_oids) { 5 }
+ let(:oids) { Array.new(max_oids) { |i| "oid-#{i}" } }
+
+ before do
+ stub_const("#{described_class}::MAX_OIDS", max_oids)
+ end
+
+ it 'does not raise an error when trying to link exactly the OID limit' do
+ expect { subject.execute(oids) }.not_to raise_error
+ end
+
+ it 'raises an error when trying to link more than OID limit' do
+ oids << 'the straw'
+ expect { subject.execute(oids) }.to raise_error(described_class::TooManyOidsError)
+ end
+ end
end
end
diff --git a/spec/services/projects/refresh_build_artifacts_size_statistics_service_spec.rb b/spec/services/projects/refresh_build_artifacts_size_statistics_service_spec.rb
index 591cd1cba8d..48cf963e6e1 100644
--- a/spec/services/projects/refresh_build_artifacts_size_statistics_service_spec.rb
+++ b/spec/services/projects/refresh_build_artifacts_size_statistics_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Projects::RefreshBuildArtifactsSizeStatisticsService, :clean_gitlab_redis_shared_state, feature_category: :build_artifacts do
+RSpec.describe Projects::RefreshBuildArtifactsSizeStatisticsService, :clean_gitlab_redis_buffered_counter, feature_category: :build_artifacts do
let(:service) { described_class.new }
describe '#execute' do
@@ -79,7 +79,7 @@ RSpec.describe Projects::RefreshBuildArtifactsSizeStatisticsService, :clean_gitl
end
before do
- allow(Gitlab::Redis::SharedState).to receive(:with).and_raise(StandardError, 'error')
+ allow(Gitlab::Redis::BufferedCounter).to receive(:with).and_raise(StandardError, 'error')
expect { service.execute }.to raise_error(StandardError)
end
diff --git a/spec/services/projects/unlink_fork_service_spec.rb b/spec/services/projects/unlink_fork_service_spec.rb
index 872e38aba1d..2e1a6c03c90 100644
--- a/spec/services/projects/unlink_fork_service_spec.rb
+++ b/spec/services/projects/unlink_fork_service_spec.rb
@@ -58,6 +58,26 @@ RSpec.describe Projects::UnlinkForkService, :use_clean_rails_memory_store_cachin
expect(source.forks_count).to be_zero
end
+ it 'refreshes the project statistics of the forked project' do
+ expect(ProjectCacheWorker).to receive(:perform_async).with(forked_project.id, [], [:repository_size])
+
+ subject.execute
+ end
+
+ it 'does not refresh project statistics when refresh_statistics is false' do
+ expect(ProjectCacheWorker).not_to receive(:perform_async)
+
+ subject.execute(refresh_statistics: false)
+ end
+
+ it 'does not refresh project statistics when the feature flag is disabled' do
+ stub_feature_flags(refresh_statistics_on_unlink_fork: false)
+
+ expect(ProjectCacheWorker).not_to receive(:perform_async)
+
+ subject.execute
+ end
+
context 'when the original project was deleted' do
it 'does not fail when the original project is deleted' do
source = forked_project.forked_from_project
diff --git a/spec/services/projects/update_repository_storage_service_spec.rb b/spec/services/projects/update_repository_storage_service_spec.rb
index b81fc8bf633..de7e6ae9a40 100644
--- a/spec/services/projects/update_repository_storage_service_spec.rb
+++ b/spec/services/projects/update_repository_storage_service_spec.rb
@@ -13,12 +13,7 @@ RSpec.describe Projects::UpdateRepositoryStorageService, feature_category: :sour
before do
allow(Time).to receive(:now).and_return(time)
- stub_storage_settings(
- 'test_second_storage' => {
- 'gitaly_address' => Gitlab.config.repositories.storages.default.gitaly_address,
- 'path' => TestEnv::SECOND_STORAGE_PATH
- }
- )
+ stub_storage_settings('test_second_storage' => {})
end
context 'without wiki and design repository' do
@@ -76,6 +71,8 @@ RSpec.describe Projects::UpdateRepositoryStorageService, feature_category: :sour
expect(project).not_to be_repository_read_only
expect(project.repository_storage).to eq('test_second_storage')
expect(project.project_repository.shard_name).to eq('test_second_storage')
+ expect(repository_storage_move.reload).to be_finished
+ expect(repository_storage_move.error_message).to be_nil
end
end
@@ -100,6 +97,7 @@ RSpec.describe Projects::UpdateRepositoryStorageService, feature_category: :sour
expect(project).not_to be_repository_read_only
expect(repository_storage_move.reload).to be_failed
+ expect(repository_storage_move.error_message).to eq('Boom')
end
end
@@ -126,7 +124,7 @@ RSpec.describe Projects::UpdateRepositoryStorageService, feature_category: :sour
expect(project_repository_double).to receive(:replicate)
.with(project.repository.raw)
- .and_raise(Gitlab::Git::CommandError)
+ .and_raise(Gitlab::Git::CommandError, 'Boom')
expect(project_repository_double).to receive(:remove)
expect do
@@ -136,6 +134,7 @@ RSpec.describe Projects::UpdateRepositoryStorageService, feature_category: :sour
expect(project).not_to be_repository_read_only
expect(project.repository_storage).to eq('default')
expect(repository_storage_move).to be_failed
+ expect(repository_storage_move.error_message).to eq('Boom')
end
end
@@ -216,29 +215,6 @@ RSpec.describe Projects::UpdateRepositoryStorageService, feature_category: :sour
expect(object_pool_double).to have_received(:link).with(project.repository.raw)
end
- context 'when feature flag replicate_object_pool_on_move is disabled' do
- before do
- stub_feature_flags(replicate_object_pool_on_move: false)
- end
-
- it 'just moves the repository without the object pool' do
- result = subject.execute
- expect(result).to be_success
-
- project.reload.cleanup
-
- new_pool_repository = project.pool_repository
-
- expect(new_pool_repository).to eq(pool_repository)
- expect(new_pool_repository.shard).to eq(shard_default)
- expect(new_pool_repository.state).to eq('ready')
- expect(new_pool_repository.source_project).to eq(project)
-
- expect(object_pool_repository_double).not_to have_received(:replicate)
- expect(object_pool_double).not_to have_received(:link)
- end
- end
-
context 'when new shard has a repository pool' do
let!(:new_pool_repository) { create(:pool_repository, :ready, shard: shard_to, source_project: project) }
diff --git a/spec/services/projects/update_service_spec.rb b/spec/services/projects/update_service_spec.rb
index 7ab85d8253a..a3f6c472f3b 100644
--- a/spec/services/projects/update_service_spec.rb
+++ b/spec/services/projects/update_service_spec.rb
@@ -356,7 +356,7 @@ RSpec.describe Projects::UpdateService, feature_category: :groups_and_projects d
context 'when changes project features' do
# Using some sample features for testing.
# Not using all the features because some of them must be enabled/disabled together
- %w[issues wiki forking model_experiments].each do |feature_name|
+ %w[issues wiki forking model_experiments model_registry].each do |feature_name|
context "with feature_name:#{feature_name}" do
let(:feature) { "#{feature_name}_access_level" }
let(:params) do
@@ -415,17 +415,92 @@ RSpec.describe Projects::UpdateService, feature_category: :groups_and_projects d
end
context 'when updating a project that contains container images' do
+ let(:new_name) { 'renamed' }
+
before do
stub_container_registry_config(enabled: true)
stub_container_registry_tags(repository: /image/, tags: %w[rc1])
create(:container_repository, project: project, name: :image)
end
- it 'does not allow to rename the project' do
- result = update_project(project, admin, path: 'renamed')
+ shared_examples 'renaming the project fails with message' do |error_message|
+ it 'does not allow to rename the project' do
+ result = update_project(project, admin, path: new_name)
+
+ expect(result).to include(status: :error)
+ expect(result[:message]).to match(error_message)
+ end
+ end
+
+ context 'when the GitlabAPI is not supported' do
+ before do
+ allow(ContainerRegistry::GitlabApiClient).to receive(:supports_gitlab_api?).and_return(false)
+ end
+
+ it_behaves_like 'renaming the project fails with message', /contains container registry tags/
+ end
+
+ context 'when Gitlab API is supported' do
+ before do
+ allow(ContainerRegistry::GitlabApiClient).to receive(:supports_gitlab_api?).and_return(true)
+ end
+
+ it 'executes a dry run of the project rename' do
+ stub_rename_base_repository_in_registry(dry_run: true)
+
+ update_project(project, admin, path: new_name)
+
+ expect_rename_of_base_repository_in_registry(dry_run: true)
+ end
+
+ context 'when the dry run fails' do
+ before do
+ stub_rename_base_repository_in_registry(dry_run: true, result: :bad_request)
+ end
+
+ it_behaves_like 'renaming the project fails with message', /container registry path rename validation failed/
+
+ it 'logs the error' do
+ expect_any_instance_of(described_class).to receive(:log_error).with("Dry run failed for renaming project with tags: #{project.full_path}, error: bad_request")
+
+ update_project(project, admin, path: new_name)
+ end
+ end
+
+ context 'when the dry run succeeds' do
+ before do
+ stub_rename_base_repository_in_registry(dry_run: true, result: :accepted)
+ end
+
+ it 'continues with the project rename' do
+ stub_rename_base_repository_in_registry(dry_run: false, result: :ok)
+ old_project_full_path = project.full_path
- expect(result).to include(status: :error)
- expect(result[:message]).to match(/contains container registry tags/)
+ update_project(project, admin, path: new_name)
+
+ expect_rename_of_base_repository_in_registry(dry_run: true, path: old_project_full_path)
+ expect_rename_of_base_repository_in_registry(dry_run: false, path: old_project_full_path)
+ end
+ end
+
+ def stub_rename_base_repository_in_registry(dry_run:, result: nil)
+ options = { name: new_name }
+ options[:dry_run] = true if dry_run
+
+ allow(ContainerRegistry::GitlabApiClient)
+ .to receive(:rename_base_repository_path)
+ .with(project.full_path, options)
+ .and_return(result)
+ end
+
+ def expect_rename_of_base_repository_in_registry(dry_run:, path: nil)
+ options = { name: new_name }
+ options[:dry_run] = true if dry_run
+
+ expect(ContainerRegistry::GitlabApiClient)
+ .to have_received(:rename_base_repository_path)
+ .with(path || project.full_path, options)
+ end
end
it 'allows to update other settings' do
@@ -708,7 +783,7 @@ RSpec.describe Projects::UpdateService, feature_category: :groups_and_projects d
let(:opts) { { repository_storage: 'test_second_storage' } }
before do
- stub_storage_settings('test_second_storage' => { 'path' => 'tmp/tests/extra_storage' })
+ stub_storage_settings('test_second_storage' => {})
end
shared_examples 'the transfer was not scheduled' do
diff --git a/spec/services/protected_branches/create_service_spec.rb b/spec/services/protected_branches/create_service_spec.rb
index 625aa4fa377..abfb73c147e 100644
--- a/spec/services/protected_branches/create_service_spec.rb
+++ b/spec/services/protected_branches/create_service_spec.rb
@@ -16,6 +16,8 @@ RSpec.describe ProtectedBranches::CreateService, feature_category: :compliance_m
describe '#execute' do
let(:name) { 'master' }
+ let(:group_cache_service_double) { instance_double(ProtectedBranches::CacheService) }
+ let(:project_cache_service_double) { instance_double(ProtectedBranches::CacheService) }
it 'creates a new protected branch' do
expect { service.execute }.to change(ProtectedBranch, :count).by(1)
@@ -24,8 +26,12 @@ RSpec.describe ProtectedBranches::CreateService, feature_category: :compliance_m
end
it 'refreshes the cache' do
- expect_next_instance_of(ProtectedBranches::CacheService) do |cache_service|
- expect(cache_service).to receive(:refresh)
+ expect(ProtectedBranches::CacheService).to receive(:new).with(entity, user, params).and_return(group_cache_service_double)
+ expect(group_cache_service_double).to receive(:refresh)
+
+ if entity.is_a?(Group)
+ expect(ProtectedBranches::CacheService).to receive(:new).with(project, user, params).and_return(project_cache_service_double)
+ expect(project_cache_service_double).to receive(:refresh)
end
service.execute
@@ -58,14 +64,14 @@ RSpec.describe ProtectedBranches::CreateService, feature_category: :compliance_m
context 'with entity project' do
let_it_be_with_reload(:entity) { create(:project) }
-
let(:user) { entity.first_owner }
it_behaves_like 'execute with entity'
end
context 'with entity group' do
- let_it_be_with_reload(:entity) { create(:group) }
+ let_it_be_with_reload(:project) { create(:project, :in_group) }
+ let_it_be_with_reload(:entity) { project.group }
let_it_be_with_reload(:user) { create(:user) }
before do
diff --git a/spec/services/quick_actions/interpret_service_spec.rb b/spec/services/quick_actions/interpret_service_spec.rb
index 1c9c6323e96..dc93fd96aee 100644
--- a/spec/services/quick_actions/interpret_service_spec.rb
+++ b/spec/services/quick_actions/interpret_service_spec.rb
@@ -554,14 +554,14 @@ RSpec.describe QuickActions::InterpretService, feature_category: :team_planning
end
end
- shared_examples 'award command' do
- it 'toggle award 100 emoji if content contains /award :100:' do
+ shared_examples 'react command' do |command|
+ it "toggle award 100 emoji if content contains #{command} :100:" do
_, updates, _ = service.execute(content, issuable)
expect(updates).to eq(emoji_award: "100")
end
- it 'returns the award message' do
+ it 'returns the reaction message' do
_, _, message = service.execute(content, issuable)
expect(message).to eq('Toggled :100: emoji award.')
@@ -1861,56 +1861,59 @@ RSpec.describe QuickActions::InterpretService, feature_category: :team_planning
end
end
- context '/award command' do
- it_behaves_like 'award command' do
- let(:content) { '/award :100:' }
- let(:issuable) { issue }
- end
-
- it_behaves_like 'award command' do
- let(:content) { '/award :100:' }
- let(:issuable) { merge_request }
- end
-
- it_behaves_like 'award command' do
- let(:content) { '/award :100:' }
- let(:issuable) { work_item }
- end
-
- context 'ignores command with no argument' do
- it_behaves_like 'failed command' do
- let(:content) { '/award' }
+ %w[/react /award].each do |command|
+ context "#{command} command" do
+ it_behaves_like 'react command', command do
+ let(:content) { "#{command} :100:" }
let(:issuable) { issue }
end
- it_behaves_like 'failed command' do
- let(:content) { '/award' }
- let(:issuable) { work_item }
+ it_behaves_like 'react command', command do
+ let(:content) { "#{command} :100:" }
+ let(:issuable) { merge_request }
end
- end
- context 'ignores non-existing / invalid emojis' do
- it_behaves_like 'failed command' do
- let(:content) { '/award noop' }
- let(:issuable) { issue }
+ it_behaves_like 'react command', command do
+ let(:content) { "#{command} :100:" }
+ let(:issuable) { work_item }
end
- it_behaves_like 'failed command' do
- let(:content) { '/award :lorem_ipsum:' }
- let(:issuable) { issue }
+ context 'ignores command with no argument' do
+ it_behaves_like 'failed command' do
+ let(:content) { command }
+ let(:issuable) { issue }
+ end
+
+ it_behaves_like 'failed command' do
+ let(:content) { command }
+ let(:issuable) { work_item }
+ end
end
- it_behaves_like 'failed command' do
- let(:content) { '/award :lorem_ipsum:' }
- let(:issuable) { work_item }
+ context 'ignores non-existing / invalid emojis' do
+ it_behaves_like 'failed command' do
+ let(:content) { "#{command} noop" }
+ let(:issuable) { issue }
+ end
+
+ it_behaves_like 'failed command' do
+ let(:content) { "#{command} :lorem_ipsum:" }
+ let(:issuable) { issue }
+ end
+
+ it_behaves_like 'failed command' do
+ let(:content) { "#{command} :lorem_ipsum:" }
+ let(:issuable) { work_item }
+ end
end
- end
- context 'if issuable is a Commit' do
- let(:content) { '/award :100:' }
- let(:issuable) { commit }
+ context 'if issuable is a Commit' do
+ let(:content) { "#{command} :100:" }
+ let(:issuable) { commit }
- it_behaves_like 'failed command', 'Could not apply award command.'
+ # TODO: https://gitlab.com/gitlab-org/gitlab/-/issues/434446
+ it_behaves_like 'failed command', "Could not apply award command."
+ end
end
end
@@ -2185,6 +2188,36 @@ RSpec.describe QuickActions::InterpretService, feature_category: :team_planning
expect(message).to eq('Submitted the current review.')
end
end
+
+ context 'when parameters are passed' do
+ context 'with approve parameter' do
+ it 'calls MergeRequests::ApprovalService service' do
+ expect_next_instance_of(
+ MergeRequests::ApprovalService, project: merge_request.project, current_user: current_user
+ ) do |service|
+ expect(service).to receive(:execute).with(merge_request)
+ end
+
+ _, _, message = service.execute('/submit_review approve', merge_request)
+
+ expect(message).to eq('Submitted the current review.')
+ end
+ end
+
+ context 'with review state parameter' do
+ it 'calls MergeRequests::UpdateReviewerStateService service' do
+ expect_next_instance_of(
+ MergeRequests::UpdateReviewerStateService, project: merge_request.project, current_user: current_user
+ ) do |service|
+ expect(service).to receive(:execute).with(merge_request, 'requested_changes')
+ end
+
+ _, _, message = service.execute('/submit_review requested_changes', merge_request)
+
+ expect(message).to eq('Submitted the current review.')
+ end
+ end
+ end
end
context 'request_changes command' do
@@ -2374,6 +2407,30 @@ RSpec.describe QuickActions::InterpretService, feature_category: :team_planning
end
end
+ context 'when participants limit on issue is reached' do
+ before do
+ issue.issue_email_participants.create!(email: 'user@example.com')
+ stub_const("IssueEmailParticipants::CreateService::MAX_NUMBER_OF_RECORDS", 1)
+ end
+
+ let(:content) { '/invite_email a@gitlab.com' }
+
+ it_behaves_like 'failed command',
+ "No email participants were added. Either none were provided, or they already exist."
+ end
+
+ context 'when only some emails can be added because of participants limit' do
+ before do
+ stub_const("IssueEmailParticipants::CreateService::MAX_NUMBER_OF_RECORDS", 1)
+ end
+
+ let(:content) { '/invite_email a@gitlab.com b@gitlab.com' }
+
+ it 'only adds one new email' do
+ expect { add_emails }.to change { issue.issue_email_participants.count }.by(1)
+ end
+ end
+
context 'with feature flag disabled' do
before do
stub_feature_flags(issue_email_participants: false)
@@ -2384,6 +2441,18 @@ RSpec.describe QuickActions::InterpretService, feature_category: :team_planning
end
end
end
+
+ it 'is part of the available commands' do
+ expect(service.available_commands(issuable)).to include(a_hash_including(name: :invite_email))
+ end
+
+ context 'with non-persisted issue' do
+ let(:issuable) { build(:issue) }
+
+ it 'is not part of the available commands' do
+ expect(service.available_commands(issuable)).not_to include(a_hash_including(name: :invite_email))
+ end
+ end
end
context 'severity command' do
diff --git a/spec/services/releases/destroy_service_spec.rb b/spec/services/releases/destroy_service_spec.rb
index de3ce2b6206..b7729043896 100644
--- a/spec/services/releases/destroy_service_spec.rb
+++ b/spec/services/releases/destroy_service_spec.rb
@@ -30,7 +30,7 @@ RSpec.describe Releases::DestroyService, feature_category: :release_orchestratio
end
context 'when the release is for a catalog resource' do
- let!(:catalog_resource) { create(:ci_catalog_resource, project: project, state: 'published') }
+ let!(:catalog_resource) { create(:ci_catalog_resource, :published, project: project) }
let!(:version) { create(:ci_catalog_resource_version, catalog_resource: catalog_resource, release: release) }
it 'does not update the catalog resources if there are still releases' do
diff --git a/spec/services/service_desk/custom_email_verifications/create_service_spec.rb b/spec/services/service_desk/custom_email_verifications/create_service_spec.rb
index 0046213e0b2..03c5743434e 100644
--- a/spec/services/service_desk/custom_email_verifications/create_service_spec.rb
+++ b/spec/services/service_desk/custom_email_verifications/create_service_spec.rb
@@ -14,7 +14,6 @@ RSpec.describe ServiceDesk::CustomEmailVerifications::CreateService, feature_cat
let(:service) { described_class.new(project: project, current_user: user) }
- let(:error_feature_flag_disabled) { 'Feature flag service_desk_custom_email is not enabled' }
let(:error_user_not_authorized) { s_('ServiceDesk|User cannot manage project.') }
let(:error_settings_missing) { s_('ServiceDesk|Service Desk setting missing') }
let(:expected_error_message) { error_settings_missing }
@@ -45,7 +44,7 @@ RSpec.describe ServiceDesk::CustomEmailVerifications::CreateService, feature_cat
end
end
- shared_examples 'a verification process with ramp up error' do |error, error_identifier|
+ shared_examples 'a verification process with ramp up error' do
it 'aborts verification process', :aggregate_failures do
allow(message).to receive(:deliver).and_raise(error)
@@ -80,16 +79,6 @@ RSpec.describe ServiceDesk::CustomEmailVerifications::CreateService, feature_cat
it_behaves_like 'a verification process that exits early'
- context 'when feature flag :service_desk_custom_email is disabled' do
- let(:expected_error_message) { error_feature_flag_disabled }
-
- before do
- stub_feature_flags(service_desk_custom_email: false)
- end
-
- it_behaves_like 'a verification process that exits early'
- end
-
context 'when service desk setting exists' do
let(:settings) { create(:service_desk_setting, project: project, custom_email: 'user@example.com') }
let(:service) { described_class.new(project: settings.project, current_user: user) }
@@ -115,7 +104,7 @@ RSpec.describe ServiceDesk::CustomEmailVerifications::CreateService, feature_cat
end
context 'when user has maintainer role in project' do
- before do
+ before_all do
project.add_maintainer(user)
end
@@ -151,10 +140,25 @@ RSpec.describe ServiceDesk::CustomEmailVerifications::CreateService, feature_cat
allow(Notify).to receive(:service_desk_verification_result_email).and_return(message_delivery)
end
- it_behaves_like 'a verification process with ramp up error', SocketError, 'smtp_host_issue'
- it_behaves_like 'a verification process with ramp up error', OpenSSL::SSL::SSLError, 'smtp_host_issue'
- it_behaves_like 'a verification process with ramp up error',
- Net::SMTPAuthenticationError.new('Invalid username or password'), 'invalid_credentials'
+ it_behaves_like 'a verification process with ramp up error' do
+ let(:error) { SocketError }
+ let(:error_identifier) { 'smtp_host_issue' }
+ end
+
+ it_behaves_like 'a verification process with ramp up error' do
+ let(:error) { OpenSSL::SSL::SSLError }
+ let(:error_identifier) { 'smtp_host_issue' }
+ end
+
+ it_behaves_like 'a verification process with ramp up error' do
+ let(:error) { Net::SMTPAuthenticationError.new('Invalid username or password') }
+ let(:error_identifier) { 'invalid_credentials' }
+ end
+
+ it_behaves_like 'a verification process with ramp up error' do
+ let(:error) { Net::ReadTimeout }
+ let(:error_identifier) { 'read_timeout' }
+ end
end
end
end
diff --git a/spec/services/service_desk/custom_email_verifications/update_service_spec.rb b/spec/services/service_desk/custom_email_verifications/update_service_spec.rb
index f87952d1d0e..103caf0e6c5 100644
--- a/spec/services/service_desk/custom_email_verifications/update_service_spec.rb
+++ b/spec/services/service_desk/custom_email_verifications/update_service_spec.rb
@@ -14,7 +14,6 @@ RSpec.describe ServiceDesk::CustomEmailVerifications::UpdateService, feature_cat
let(:message_delivery) { instance_double(ActionMailer::MessageDelivery) }
let(:service) { described_class.new(project: settings.project, params: { mail: mail_object }) }
- let(:error_feature_flag_disabled) { 'Feature flag service_desk_custom_email is not enabled' }
let(:error_parameter_missing) { s_('ServiceDesk|Service Desk setting or verification object missing') }
let(:error_already_finished) { s_('ServiceDesk|Custom email address has already been verified.') }
let(:error_already_failed) do
@@ -28,6 +27,9 @@ RSpec.describe ServiceDesk::CustomEmailVerifications::UpdateService, feature_cat
before do
allow(message_delivery).to receive(:deliver_later)
allow(Notify).to receive(:service_desk_verification_result_email).and_return(message_delivery)
+
+ stub_incoming_email_setting(enabled: true, address: 'support+%{key}@example.com')
+ stub_service_desk_email_setting(enabled: true, address: 'contact+%{key}@example.com')
end
shared_examples 'a failing verification process' do |expected_error_identifier|
@@ -86,26 +88,6 @@ RSpec.describe ServiceDesk::CustomEmailVerifications::UpdateService, feature_cat
expect(settings).not_to be_custom_email_enabled
end
- context 'when feature flag :service_desk_custom_email is disabled' do
- let(:expected_error_message) { error_feature_flag_disabled }
-
- before do
- stub_feature_flags(service_desk_custom_email: false)
- end
-
- it 'exits early' do
- expect(Notify).to receive(:service_desk_verification_result_email).exactly(0).times
-
- expect(Gitlab::AppLogger).to receive(:warn).with(logger_params.merge(
- error_message: expected_error_message
- )).once
-
- response = service.execute
-
- expect(response).to be_error
- end
- end
-
context 'when verification exists' do
let!(:verification) { create(:service_desk_custom_email_verification, project: project) }
@@ -139,7 +121,34 @@ RSpec.describe ServiceDesk::CustomEmailVerifications::UpdateService, feature_cat
verification.update!(token: 'ZROT4ZZXA-Y6') # token from email fixture
end
- let(:email_raw) { email_fixture('emails/service_desk_custom_email_address_verification.eml') }
+ let(:service_desk_address) { project.service_desk_incoming_address }
+ let(:verification_address) { 'custom-support-email+verify@example.com' }
+ let(:verification_token) { 'ZROT4ZZXA-Y6' }
+ let(:shared_email_raw) do
+ <<~EMAIL
+ From: Flight Support <custom-support-email@example.com>
+ Subject: Verify custom email address custom-support-email@example.com for Flight
+ Auto-Submitted: no
+
+
+ This email is auto-generated. It verifies the ownership of the entered Service Desk custom email address and
+ correct functionality of email forwarding.
+
+ Verification token: #{verification_token}
+ --
+
+ You're receiving this email because of your account on 127.0.0.1.
+ EMAIL
+ end
+
+ let(:email_raw) do
+ <<~EMAIL
+ Delivered-To: #{service_desk_address}
+ To: #{verification_address}
+ #{shared_email_raw}
+ EMAIL
+ end
+
let(:mail_object) { Mail::Message.new(email_raw) }
it 'verifies and sends result emails' do
@@ -181,6 +190,38 @@ RSpec.describe ServiceDesk::CustomEmailVerifications::UpdateService, feature_cat
it_behaves_like 'a failing verification process', 'mail_not_received_within_timeframe'
end
+ context 'and service desk address from service_desk_email was used as forwarding target' do
+ let(:service_desk_address) { project.service_desk_alias_address }
+
+ it_behaves_like 'a failing verification process', 'incorrect_forwarding_target'
+
+ context 'when multiple Delivered-To headers are present' do
+ let(:email_raw) do
+ <<~EMAIL
+ Delivered-To: other@example.com
+ Delivered-To: #{service_desk_address}
+ To: #{verification_address}
+ #{shared_email_raw}
+ EMAIL
+ end
+
+ it_behaves_like 'a failing verification process', 'incorrect_forwarding_target'
+ end
+
+ context 'when multiple To headers are present' do
+ # Microsoft Exchange forwards emails this way when forwarding
+ # to an external email address using a transport rule
+ let(:email_raw) do
+ <<~EMAIL
+ To: #{service_desk_address}, #{verification_address}
+ #{shared_email_raw}
+ EMAIL
+ end
+
+ it_behaves_like 'a failing verification process', 'incorrect_forwarding_target'
+ end
+ end
+
context 'when already verified' do
let(:expected_error_message) { error_already_finished }
diff --git a/spec/services/service_desk/custom_emails/create_service_spec.rb b/spec/services/service_desk/custom_emails/create_service_spec.rb
index e165131bcf9..a015817b380 100644
--- a/spec/services/service_desk/custom_emails/create_service_spec.rb
+++ b/spec/services/service_desk/custom_emails/create_service_spec.rb
@@ -8,7 +8,6 @@ RSpec.describe ServiceDesk::CustomEmails::CreateService, feature_category: :serv
let_it_be(:user) { create(:user) }
let(:service) { described_class.new(project: project, current_user: user, params: params) }
- let(:error_feature_flag_disabled) { 'Feature flag service_desk_custom_email is not enabled' }
let(:error_user_not_authorized) { s_('ServiceDesk|User cannot manage project.') }
let(:error_cannot_create_custom_email) { s_('ServiceDesk|Cannot create custom email') }
let(:error_custom_email_exists) { s_('ServiceDesk|Custom email already exists') }
@@ -52,16 +51,6 @@ RSpec.describe ServiceDesk::CustomEmails::CreateService, feature_category: :serv
end
end
- context 'when feature flag service_desk_custom_email is disabled' do
- let(:expected_error_message) { error_feature_flag_disabled }
-
- before do
- stub_feature_flags(service_desk_custom_email: false)
- end
-
- it_behaves_like 'a service that exits with error'
- end
-
context 'with illegitimate user' do
let(:expected_error_message) { error_user_not_authorized }
diff --git a/spec/services/service_desk/custom_emails/destroy_service_spec.rb b/spec/services/service_desk/custom_emails/destroy_service_spec.rb
index 7f53a941d4e..d77e408c31b 100644
--- a/spec/services/service_desk/custom_emails/destroy_service_spec.rb
+++ b/spec/services/service_desk/custom_emails/destroy_service_spec.rb
@@ -8,7 +8,6 @@ RSpec.describe ServiceDesk::CustomEmails::DestroyService, feature_category: :ser
let(:user) { build_stubbed(:user) }
let(:service) { described_class.new(project: project, current_user: user) }
- let(:error_feature_flag_disabled) { 'Feature flag service_desk_custom_email is not enabled' }
let(:error_user_not_authorized) { s_('ServiceDesk|User cannot manage project.') }
let(:error_does_not_exist) { s_('ServiceDesk|Custom email does not exist') }
let(:expected_error_message) { nil }
@@ -45,16 +44,6 @@ RSpec.describe ServiceDesk::CustomEmails::DestroyService, feature_category: :ser
end
end
- context 'when feature flag service_desk_custom_email is disabled' do
- let(:expected_error_message) { error_feature_flag_disabled }
-
- before do
- stub_feature_flags(service_desk_custom_email: false)
- end
-
- it_behaves_like 'a service that exits with error'
- end
-
context 'with illegitimate user' do
let(:expected_error_message) { error_user_not_authorized }
diff --git a/spec/services/service_desk_settings/update_service_spec.rb b/spec/services/service_desk_settings/update_service_spec.rb
index a9e54012075..2c310bad247 100644
--- a/spec/services/service_desk_settings/update_service_spec.rb
+++ b/spec/services/service_desk_settings/update_service_spec.rb
@@ -1,4 +1,5 @@
# frozen_string_literal: true
+
require 'spec_helper'
RSpec.describe ServiceDeskSettings::UpdateService, :aggregate_failures, feature_category: :service_desk do
@@ -12,7 +13,14 @@ RSpec.describe ServiceDeskSettings::UpdateService, :aggregate_failures, feature_
let_it_be(:user) { create(:user) }
context 'with valid params' do
- let(:params) { { outgoing_name: 'some name', project_key: 'foo', add_external_participants_from_cc: true } }
+ let(:params) do
+ {
+ outgoing_name: 'some name',
+ project_key: 'foo',
+ reopen_issue_on_external_participant_note: true,
+ add_external_participants_from_cc: true
+ }
+ end
it 'updates service desk settings' do
response = described_class.new(settings.project, user, params).execute
@@ -21,6 +29,7 @@ RSpec.describe ServiceDeskSettings::UpdateService, :aggregate_failures, feature_
expect(settings.reset).to have_attributes(
outgoing_name: 'some name',
project_key: 'foo',
+ reopen_issue_on_external_participant_note: true,
add_external_participants_from_cc: true
)
end
diff --git a/spec/services/snippets/update_repository_storage_service_spec.rb b/spec/services/snippets/update_repository_storage_service_spec.rb
index 66847a43335..84e687329cc 100644
--- a/spec/services/snippets/update_repository_storage_service_spec.rb
+++ b/spec/services/snippets/update_repository_storage_service_spec.rb
@@ -43,6 +43,8 @@ RSpec.describe Snippets::UpdateRepositoryStorageService, feature_category: :sour
expect(snippet).not_to be_repository_read_only
expect(snippet.repository_storage).to eq(destination)
expect(snippet.snippet_repository.shard_name).to eq(destination)
+ expect(repository_storage_move.reload).to be_finished
+ expect(repository_storage_move.error_message).to be_nil
end
end
@@ -66,7 +68,7 @@ RSpec.describe Snippets::UpdateRepositoryStorageService, feature_category: :sour
it 'unmarks the repository as read-only without updating the repository storage' do
expect(snippet_repository_double).to receive(:replicate)
.with(snippet.repository.raw)
- .and_raise(Gitlab::Git::CommandError)
+ .and_raise(Gitlab::Git::CommandError, 'Boom')
expect(snippet_repository_double).to receive(:remove)
expect do
@@ -76,6 +78,7 @@ RSpec.describe Snippets::UpdateRepositoryStorageService, feature_category: :sour
expect(snippet).not_to be_repository_read_only
expect(snippet.repository_storage).to eq('default')
expect(repository_storage_move).to be_failed
+ expect(repository_storage_move.error_message).to eq('Boom')
end
end
diff --git a/spec/services/system_notes/issuables_service_spec.rb b/spec/services/system_notes/issuables_service_spec.rb
index ca6feb6fde2..0ba20ee5be1 100644
--- a/spec/services/system_notes/issuables_service_spec.rb
+++ b/spec/services/system_notes/issuables_service_spec.rb
@@ -387,7 +387,7 @@ RSpec.describe ::SystemNotes::IssuablesService, feature_category: :team_planning
describe 'note_body' do
context 'cross-project' do
let(:project2) { create(:project, :repository) }
- let(:mentioned_in) { create(:issue, project: project2) }
+ let(:mentioned_in) { create(:issue, :task, project: project2) }
context 'from Commit' do
let(:mentioned_in) { project2.repository.commit }
@@ -399,7 +399,7 @@ RSpec.describe ::SystemNotes::IssuablesService, feature_category: :team_planning
context 'from non-Commit' do
it 'references the mentioning object' do
- expect(subject.note).to eq "mentioned in issue #{mentioned_in.to_reference(project)}"
+ expect(subject.note).to eq "mentioned in task #{mentioned_in.to_reference(project)}"
end
end
end
diff --git a/spec/services/users/in_product_marketing_email_records_spec.rb b/spec/services/users/in_product_marketing_email_records_spec.rb
deleted file mode 100644
index d214560b2a6..00000000000
--- a/spec/services/users/in_product_marketing_email_records_spec.rb
+++ /dev/null
@@ -1,55 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Users::InProductMarketingEmailRecords, feature_category: :onboarding do
- let_it_be(:user) { create :user }
-
- subject(:records) { described_class.new }
-
- it 'initializes records' do
- expect(subject.records).to match_array []
- end
-
- describe '#save!' do
- before do
- allow(Users::InProductMarketingEmail).to receive(:bulk_insert!)
-
- records.add(user, track: :team_short, series: 0)
- records.add(user, track: :create, series: 1)
- end
-
- it 'bulk inserts added records' do
- expect(Users::InProductMarketingEmail).to receive(:bulk_insert!).with(records.records)
- records.save!
- end
-
- it 'resets its records' do
- records.save!
- expect(records.records).to match_array []
- end
- end
-
- describe '#add' do
- it 'adds a Users::InProductMarketingEmail record to its records', :aggregate_failures do
- freeze_time do
- records.add(user, track: :team_short, series: 0)
- records.add(user, track: :create, series: 1)
-
- first, second = records.records
-
- expect(first).to be_a Users::InProductMarketingEmail
- expect(first.track.to_sym).to eq :team_short
- expect(first.series).to eq 0
- expect(first.created_at).to eq Time.zone.now
- expect(first.updated_at).to eq Time.zone.now
-
- expect(second).to be_a Users::InProductMarketingEmail
- expect(second.track.to_sym).to eq :create
- expect(second.series).to eq 1
- expect(second.created_at).to eq Time.zone.now
- expect(second.updated_at).to eq Time.zone.now
- end
- end
- end
-end
diff --git a/spec/services/users/migrate_records_to_ghost_user_service_spec.rb b/spec/services/users/migrate_records_to_ghost_user_service_spec.rb
index d6fb7a2954d..57378c07dd7 100644
--- a/spec/services/users/migrate_records_to_ghost_user_service_spec.rb
+++ b/spec/services/users/migrate_records_to_ghost_user_service_spec.rb
@@ -143,6 +143,13 @@ RSpec.describe Users::MigrateRecordsToGhostUserService, feature_category: :user_
let(:created_record) { create(:release, author: user) }
end
end
+
+ context 'for user achievements' do
+ include_examples 'migrating records to the ghost user', Achievements::UserAchievement,
+ [:awarded_by_user, :revoked_by_user] do
+ let(:created_record) { create(:user_achievement, awarded_by_user: user, revoked_by_user: user) }
+ end
+ end
end
context 'on post-migrate cleanups' do
@@ -358,6 +365,16 @@ RSpec.describe Users::MigrateRecordsToGhostUserService, feature_category: :user_
expect(Issue).not_to exist(issue.id)
end
+
+ it 'migrates awarded and revoked fields of user achievements' do
+ user_achievement = create(:user_achievement, awarded_by_user: user, revoked_by_user: user)
+
+ service.execute(hard_delete: true)
+ user_achievement.reload
+
+ expect(user_achievement.revoked_by_user).to eq(Users::Internal.ghost)
+ expect(user_achievement.awarded_by_user).to eq(Users::Internal.ghost)
+ end
end
end
end
diff --git a/spec/services/work_items/delete_task_service_spec.rb b/spec/services/work_items/delete_task_service_spec.rb
deleted file mode 100644
index dc01da65771..00000000000
--- a/spec/services/work_items/delete_task_service_spec.rb
+++ /dev/null
@@ -1,88 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe WorkItems::DeleteTaskService, feature_category: :team_planning do
- let_it_be(:project) { create(:project) }
- let_it_be(:developer) { create(:user).tap { |u| project.add_developer(u) } }
- let_it_be_with_refind(:task) { create(:work_item, project: project, author: developer) }
- let_it_be_with_refind(:list_work_item) do
- create(:work_item, project: project, description: "- [ ] #{task.to_reference}+")
- end
-
- let(:current_user) { developer }
- let(:line_number_start) { 1 }
- let(:params) do
- {
- line_number_start: line_number_start,
- line_number_end: 1,
- task: task
- }
- end
-
- before_all do
- create(:issue_link, source_id: list_work_item.id, target_id: task.id)
- end
-
- shared_examples 'failing WorkItems::DeleteTaskService' do |error_message|
- it { is_expected.to be_error }
-
- it 'does not remove work item or issue links' do
- expect do
- service_result
- list_work_item.reload
- end.to not_change(WorkItem, :count).and(
- not_change(IssueLink, :count)
- ).and(
- not_change(list_work_item, :description)
- )
- end
-
- it 'returns an error message' do
- expect(service_result.errors).to contain_exactly(error_message)
- end
- end
-
- describe '#execute' do
- subject(:service_result) do
- described_class.new(
- work_item: list_work_item,
- current_user: current_user,
- lock_version: list_work_item.lock_version,
- task_params: params
- ).execute
- end
-
- context 'when work item params are valid' do
- it { is_expected.to be_success }
-
- it 'deletes the work item and the related issue link' do
- expect do
- service_result
- end.to change(WorkItem, :count).by(-1).and(
- change(IssueLink, :count).by(-1)
- )
- end
-
- it 'removes the task list item with the work item reference' do
- expect do
- service_result
- end.to change(list_work_item, :description).from(list_work_item.description).to("- [ ] #{task.title}")
- end
- end
-
- context 'when first operation fails' do
- let(:line_number_start) { -1 }
-
- it_behaves_like 'failing WorkItems::DeleteTaskService', 'line_number_start must be greater than 0'
- end
-
- context 'when last operation fails' do
- let_it_be(:non_member_user) { create(:user) }
-
- let(:current_user) { non_member_user }
-
- it_behaves_like 'failing WorkItems::DeleteTaskService', 'User not authorized to delete work item'
- end
- end
-end
diff --git a/spec/services/work_items/task_list_reference_removal_service_spec.rb b/spec/services/work_items/task_list_reference_removal_service_spec.rb
deleted file mode 100644
index 0d34aaa3c1c..00000000000
--- a/spec/services/work_items/task_list_reference_removal_service_spec.rb
+++ /dev/null
@@ -1,152 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe WorkItems::TaskListReferenceRemovalService, feature_category: :team_planning do
- let_it_be(:developer) { create(:user) }
- let_it_be(:project) { create(:project, :repository).tap { |project| project.add_developer(developer) } }
- let_it_be(:task) { create(:work_item, project: project, title: 'Task title') }
- let_it_be(:single_line_work_item, refind: true) do
- create(:work_item, project: project, description: "- [ ] #{task.to_reference}+ single line")
- end
-
- let_it_be(:multiple_line_work_item, refind: true) do
- create(
- :work_item,
- project: project,
- description: <<~MARKDOWN
- Any text
-
- * [ ] Item to be converted
- #{task.to_reference}+ second line
- third line
- * [x] task
-
- More text
- MARKDOWN
- )
- end
-
- let(:line_number_start) { 3 }
- let(:line_number_end) { 5 }
- let(:work_item) { multiple_line_work_item }
- let(:lock_version) { work_item.lock_version }
-
- shared_examples 'successful work item task reference removal service' do |expected_description|
- it { is_expected.to be_success }
-
- it 'removes the task list item containing the task reference' do
- expect do
- result
- end.to change(work_item, :description).from(work_item.description).to(expected_description)
- end
-
- it 'creates system notes' do
- expect do
- result
- end.to change(Note, :count).by(1)
-
- expect(Note.last.note).to include('changed the description')
- end
- end
-
- shared_examples 'failing work item task reference removal service' do |error_message|
- it { is_expected.to be_error }
-
- it 'does not change the work item description' do
- expect do
- result
- work_item.reload
- end.to not_change(work_item, :description)
- end
-
- it 'returns an error message' do
- expect(result.errors).to contain_exactly(error_message)
- end
- end
-
- describe '#execute' do
- subject(:result) do
- described_class.new(
- work_item: work_item,
- task: task,
- line_number_start: line_number_start,
- line_number_end: line_number_end,
- lock_version: lock_version,
- current_user: developer
- ).execute
- end
-
- context 'when task mardown spans a single line' do
- let(:line_number_start) { 1 }
- let(:line_number_end) { 1 }
- let(:work_item) { single_line_work_item }
-
- it_behaves_like 'successful work item task reference removal service', '- [ ] Task title single line'
-
- context 'when description does not contain a task' do
- let_it_be(:no_matching_work_item) { create(:work_item, project: project, description: 'no matching task') }
-
- let(:work_item) { no_matching_work_item }
-
- it_behaves_like 'failing work item task reference removal service', 'Unable to detect a task on lines 1-1'
- end
-
- context 'when description reference does not exactly match the task reference' do
- before do
- work_item.update!(description: work_item.description.gsub(task.to_reference, "#{task.to_reference}200"))
- end
-
- it_behaves_like 'failing work item task reference removal service', 'Unable to detect a task on lines 1-1'
- end
- end
-
- context 'when task mardown spans multiple lines' do
- it_behaves_like 'successful work item task reference removal service',
- "Any text\n\n* [ ] Item to be converted\n Task title second line\n third line\n* [x] task\n\nMore text"
- end
-
- context 'when updating the work item fails' do
- before do
- work_item.title = nil
- end
-
- it_behaves_like 'failing work item task reference removal service', "Title can't be blank"
- end
-
- context 'when description is empty' do
- let_it_be(:empty_work_item) { create(:work_item, project: project, description: '') }
-
- let(:work_item) { empty_work_item }
-
- it_behaves_like 'failing work item task reference removal service', "Work item description can't be blank"
- end
-
- context 'when line_number_start is lower than 1' do
- let(:line_number_start) { 0 }
-
- it_behaves_like 'failing work item task reference removal service', 'line_number_start must be greater than 0'
- end
-
- context 'when line_number_end is lower than line_number_start' do
- let(:line_number_end) { line_number_start - 1 }
-
- it_behaves_like 'failing work item task reference removal service',
- 'line_number_end must be greater or equal to line_number_start'
- end
-
- context 'when lock_version is older than current' do
- let(:lock_version) { work_item.lock_version - 1 }
-
- it_behaves_like 'failing work item task reference removal service', 'Stale work item. Check lock version'
- end
-
- context 'when work item is stale before updating' do
- it_behaves_like 'failing work item task reference removal service', 'Stale work item. Check lock version' do
- before do
- ::WorkItem.where(id: work_item.id).update_all(lock_version: lock_version + 1)
- end
- end
- end
- end
-end
diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb
index 2dd4e92eee9..7317b512ae4 100644
--- a/spec/spec_helper.rb
+++ b/spec/spec_helper.rb
@@ -38,7 +38,7 @@ require 'test_prof/factory_prof/nate_heckler'
require 'parslet/rig/rspec'
require 'axe-rspec'
-require 'rspec_flaky'
+require 'gitlab/rspec_flaky'
rspec_profiling_is_configured =
ENV['RSPEC_PROFILING_POSTGRES_URL'].present? ||
@@ -139,11 +139,6 @@ RSpec.configure do |config|
metadata[:migration] = true if metadata[:level] == :migration || metadata[:level] == :background_migration
end
- # Do not overwrite schema if it's already set
- unless metadata.key?(:schema)
- metadata[:schema] = :latest if metadata[:level] == :background_migration
- end
-
# Admin controller specs get auto admin mode enabled since they are
# protected by the 'EnforcesAdminAuthentication' concern
metadata[:enable_admin_mode] = true if %r{(ee)?/spec/controllers/admin/}.match?(location)
@@ -212,7 +207,6 @@ RSpec.configure do |config|
config.include_context 'when rendered has no HTML escapes', type: :view
include StubFeatureFlags
- include StubSaasFeatures
include StubSnowplow
include StubMember
@@ -230,9 +224,9 @@ RSpec.configure do |config|
config.exceptions_to_hard_fail = [DeprecationToolkitEnv::DeprecationBehaviors::SelectiveRaise::RaiseDisallowedDeprecation]
end
- if RspecFlaky::Config.generate_report?
+ if Gitlab::RspecFlaky::Config.generate_report?
config.reporter.register_listener(
- RspecFlaky::Listener.new,
+ Gitlab::RspecFlaky::Listener.new,
:example_passed,
:dump_summary)
end
@@ -327,6 +321,7 @@ RSpec.configure do |config|
# Postgres is the primary data source, and ClickHouse only when enabled in certain cases.
stub_feature_flags(clickhouse_data_collection: false)
+ # This is going to be removed with https://gitlab.com/gitlab-org/gitlab/-/issues/431041
stub_feature_flags(vite: false)
else
unstub_all_feature_flags
diff --git a/spec/support/database/click_house/hooks.rb b/spec/support/database/click_house/hooks.rb
index 77b33b7aaa3..8c8afdbc689 100644
--- a/spec/support/database/click_house/hooks.rb
+++ b/spec/support/database/click_house/hooks.rb
@@ -26,19 +26,26 @@ class ClickHouseTestRunner
clear_db
# run the schema SQL files
- migrations_paths = ClickHouse::MigrationSupport::Migrator.migrations_paths
- schema_migration = ClickHouse::MigrationSupport::SchemaMigration
- migration_context = ClickHouse::MigrationSupport::MigrationContext.new(migrations_paths, schema_migration)
- migrate(nil, migration_context)
+ migrations_paths = ClickHouse::MigrationSupport::Migrator.migrations_paths(:main)
+ connection = ::ClickHouse::Connection.new(:main)
+ schema_migration = ClickHouse::MigrationSupport::SchemaMigration.new(connection)
+ schema_migration.ensure_table
+ migration_context = ClickHouse::MigrationSupport::MigrationContext.new(connection,
+ migrations_paths, schema_migration)
+ migrate(migration_context, nil)
@ensure_schema = true
end
+ def reset_schema_cache!
+ @ensure_schema = nil
+ end
+
private
def tables_for(db)
@tables ||= {}
- @tables[db] ||= lookup_tables(db) - [ClickHouse::MigrationSupport::SchemaMigration.table_name]
+ @tables[db] ||= lookup_tables(db) - %w[schema_migrations]
end
end
# rubocop: enable Gitlab/NamespacedClass
@@ -50,6 +57,7 @@ RSpec.configure do |config|
with_net_connect_allowed do
if example.example.metadata[:click_house] == :without_migrations
click_house_test_runner.clear_db
+ click_house_test_runner.reset_schema_cache!
else
click_house_test_runner.ensure_schema
click_house_test_runner.truncate_tables
diff --git a/spec/support/helpers/after_next_helpers.rb b/spec/support/helpers/after_next_helpers.rb
index 0a7844fdd8f..35e62b89aaf 100644
--- a/spec/support/helpers/after_next_helpers.rb
+++ b/spec/support/helpers/after_next_helpers.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-require_relative './next_instance_of'
+require 'gitlab/rspec/next_instance_of'
module AfterNextHelpers
class DeferredExpectation
diff --git a/spec/support/helpers/click_house_test_helpers.rb b/spec/support/helpers/click_house_test_helpers.rb
index 24f81a3ec01..5befeabd82b 100644
--- a/spec/support/helpers/click_house_test_helpers.rb
+++ b/spec/support/helpers/click_house_test_helpers.rb
@@ -1,12 +1,12 @@
# frozen_string_literal: true
module ClickHouseTestHelpers
- def migrate(target_version, migration_context)
- quietly { migration_context.up(target_version) }
+ def migrate(migration_context, target_version, step = nil)
+ quietly { migration_context.up(target_version, step) }
end
- def rollback(target_version, migration_context)
- quietly { migration_context.down(target_version) }
+ def rollback(migration_context, target_version, step = 1)
+ quietly { migration_context.down(target_version, step) }
end
def table_names(database = :main, configuration = ClickHouse::Client.configuration)
@@ -36,12 +36,13 @@ module ClickHouseTestHelpers
def clear_db(configuration = ClickHouse::Client.configuration)
configuration.databases.each_key do |db|
+ connection = ::ClickHouse::Connection.new(db, configuration)
# drop all tables
lookup_tables(db, configuration).each do |table|
- ClickHouse::Client.execute("DROP TABLE IF EXISTS #{table}", db, configuration)
+ connection.execute("DROP TABLE IF EXISTS #{table}")
end
- ClickHouse::MigrationSupport::SchemaMigration.create_table(db, configuration)
+ ClickHouse::MigrationSupport::SchemaMigration.new(connection).ensure_table
end
end
@@ -71,7 +72,7 @@ module ClickHouseTestHelpers
ClickHouse::Migration.verbose = was_verbose
end
- def clear_consts(fixtures_path)
+ def unload_click_house_migration_classes(fixtures_path)
$LOADED_FEATURES.select { |file| file.include? fixtures_path }.each do |file|
const = File.basename(file)
.scan(ClickHouse::Migration::MIGRATION_FILENAME_REGEXP)[0][1]
diff --git a/spec/support/helpers/database/duplicate_indexes.yml b/spec/support/helpers/database/duplicate_indexes.yml
index 1ebc45a9d81..ab9935a7a98 100644
--- a/spec/support/helpers/database/duplicate_indexes.yml
+++ b/spec/support/helpers/database/duplicate_indexes.yml
@@ -27,12 +27,6 @@ boards_epic_board_recent_visits:
boards_epic_user_preferences:
index_boards_epic_user_preferences_on_board_user_epic_unique:
- index_boards_epic_user_preferences_on_board_id
-bulk_import_batch_trackers:
- i_bulk_import_trackers_id_batch_number:
- - index_bulk_import_batch_trackers_on_tracker_id
-bulk_import_export_batches:
- i_bulk_import_export_batches_id_batch_number:
- - index_bulk_import_export_batches_on_export_id
ci_job_artifacts:
index_ci_job_artifacts_on_id_project_id_and_created_at:
- index_ci_job_artifacts_on_project_id
@@ -44,14 +38,10 @@ ci_pipeline_artifacts:
index_ci_pipeline_artifacts_on_pipeline_id_and_file_type:
- index_ci_pipeline_artifacts_on_pipeline_id
ci_stages:
- index_ci_stages_on_pipeline_id_and_name:
- - index_ci_stages_on_pipeline_id
index_ci_stages_on_pipeline_id_and_position:
- index_ci_stages_on_pipeline_id
- index_ci_stages_on_pipeline_id_convert_to_bigint_and_name:
- - index_ci_stages_on_pipeline_id_convert_to_bigint
- index_ci_stages_on_pipeline_id_convert_to_bigint_and_position:
- - index_ci_stages_on_pipeline_id_convert_to_bigint
+ index_ci_stages_on_pipeline_id_name_partition_id_unique:
+ - index_ci_stages_on_pipeline_id
dast_site_tokens:
index_dast_site_token_on_project_id_and_url:
- index_dast_site_tokens_on_project_id
@@ -91,25 +81,12 @@ issue_links:
issues:
index_issues_on_author_id_and_id_and_created_at:
- index_issues_on_author_id
-jira_connect_subscriptions:
- idx_jira_connect_subscriptions_on_installation_id_namespace_id:
- - idx_jira_connect_subscriptions_on_installation_id
list_user_preferences:
index_list_user_preferences_on_user_id_and_list_id:
- index_list_user_preferences_on_user_id
member_tasks:
index_member_tasks_on_member_id_and_project_id:
- index_member_tasks_on_member_id
-members:
- index_members_on_member_namespace_id_compound:
- - index_members_on_member_namespace_id
-merge_requests:
- index_merge_requests_on_author_id_and_created_at:
- - index_merge_requests_on_author_id
- index_merge_requests_on_author_id_and_id:
- - index_merge_requests_on_author_id
- index_merge_requests_on_author_id_and_target_project_id:
- - index_merge_requests_on_author_id
ml_candidate_params:
index_ml_candidate_params_on_candidate_id_on_name:
- index_ml_candidate_params_on_candidate_id
@@ -158,15 +135,9 @@ pm_package_versions:
project_compliance_standards_adherence:
u_project_compliance_standards_adherence_for_reporting:
- index_project_compliance_standards_adherence_on_project_id
-project_relation_exports:
- index_project_export_job_relation:
- - index_project_relation_exports_on_project_export_job_id
project_repositories:
index_project_repositories_on_shard_id_and_project_id:
- index_project_repositories_on_shard_id
-project_topics:
- index_project_topics_on_project_id_and_topic_id:
- - index_project_topics_on_project_id
protected_environments:
index_protected_environments_on_project_id_and_name:
- index_protected_environments_on_project_id
@@ -185,18 +156,8 @@ sbom_component_versions:
sbom_occurrences:
index_sbom_occurrences_for_input_file_path_search:
- index_sbom_occurrences_on_project_id_component_id
- - index_sbom_occurrences_on_project_id
- idx_sbom_occurrences_on_project_id_and_source_id:
- - index_sbom_occurrences_on_project_id
- index_sbom_occurrences_on_project_id_and_id:
- - index_sbom_occurrences_on_project_id
- index_sbom_occurrences_on_project_id_component_id:
- - index_sbom_occurrences_on_project_id
index_sbom_occurrences_on_project_id_and_component_id_and_id:
- index_sbom_occurrences_on_project_id_component_id
- - index_sbom_occurrences_on_project_id
- index_sbom_occurrences_on_project_id_and_package_manager:
- - index_sbom_occurrences_on_project_id
search_namespace_index_assignments:
index_search_namespace_index_assignments_uniqueness_index_type:
- index_search_namespace_index_assignments_on_namespace_id
@@ -217,9 +178,6 @@ todos:
user_callouts:
index_user_callouts_on_user_id_and_feature_name:
- index_user_callouts_on_user_id
-users:
- index_users_on_state_and_user_type:
- - index_users_on_state
vulnerabilities:
index_vulnerabilities_project_id_state_severity_default_branch:
- index_vulnerabilities_on_project_id_and_state_and_severity
@@ -235,12 +193,6 @@ vulnerability_finding_signatures:
vulnerability_flags:
index_vulnerability_flags_on_unique_columns:
- index_vulnerability_flags_on_vulnerability_occurrence_id
-web_hook_logs:
- index_web_hook_logs_on_web_hook_id_and_created_at:
- - index_web_hook_logs_part_on_web_hook_id
-web_hooks:
- index_web_hooks_on_project_id_recent_failures:
- - index_web_hooks_on_project_id
work_item_hierarchy_restrictions:
index_work_item_hierarchy_restrictions_on_parent_and_child:
- index_work_item_hierarchy_restrictions_on_parent_type_id
diff --git a/spec/support/helpers/design_management_test_helpers.rb b/spec/support/helpers/design_management_test_helpers.rb
index be723a47521..1ce2dedf2e1 100644
--- a/spec/support/helpers/design_management_test_helpers.rb
+++ b/spec/support/helpers/design_management_test_helpers.rb
@@ -6,15 +6,15 @@ module DesignManagementTestHelpers
end
def delete_designs(*designs)
- act_on_designs(designs) { ::DesignManagement::Action.deletion }
+ act_on_designs(designs) { DesignManagement::Action.deletion }
end
def restore_designs(*designs)
- act_on_designs(designs) { ::DesignManagement::Action.creation }
+ act_on_designs(designs) { DesignManagement::Action.creation }
end
def modify_designs(*designs)
- act_on_designs(designs) { ::DesignManagement::Action.modification }
+ act_on_designs(designs) { DesignManagement::Action.modification }
end
def path_for_design(design)
diff --git a/spec/support/helpers/email_helpers.rb b/spec/support/helpers/email_helpers.rb
index 9dffe035b2a..3583889d305 100644
--- a/spec/support/helpers/email_helpers.rb
+++ b/spec/support/helpers/email_helpers.rb
@@ -64,7 +64,11 @@ module EmailHelpers
def not_enqueue_mail_with(mailer_class, mail_method_name, *args)
args.map! { |arg| arg.is_a?(ActiveRecord::Base) ? arg.id : arg }
- not_enqueue_mail(mailer_class, mail_method_name).with(*args)
+
+ matcher = have_enqueued_mail(mailer_class, mail_method_name).with(*args)
+ description = proc { 'email has not been enqueued' }
+
+ RSpec::Matchers::AliasedNegatedMatcher.new(matcher, description)
end
def have_only_enqueued_mail_with_args(mailer_class, mailer_method, *args)
diff --git a/spec/support/helpers/features/invite_members_modal_helpers.rb b/spec/support/helpers/features/invite_members_modal_helpers.rb
index deb75cffe0d..c40e060bc8e 100644
--- a/spec/support/helpers/features/invite_members_modal_helpers.rb
+++ b/spec/support/helpers/features/invite_members_modal_helpers.rb
@@ -2,6 +2,8 @@
module Features
module InviteMembersModalHelpers
+ include ListboxHelpers
+
def invite_member(names, role: 'Guest', expires_at: nil)
click_on 'Invite members'
@@ -63,7 +65,10 @@ module Features
end
def choose_options(role, expires_at)
- select role, from: 'Select a role'
+ page.within role_dropdown_selector do
+ toggle_listbox
+ select_listbox_item(role, exact_text: true)
+ end
fill_in 'YYYY-MM-DD', with: expires_at.strftime('%Y-%m-%d') if expires_at
end
@@ -72,6 +77,10 @@ module Features
click_link "Groups"
end
+ def role_dropdown_selector
+ '[data-testid="access-level-dropdown"]'
+ end
+
def group_dropdown_selector
'[data-testid="group-select-dropdown"]'
end
diff --git a/spec/support/helpers/features/runners_helpers.rb b/spec/support/helpers/features/runners_helpers.rb
index 7c3618ee799..dac92a9f18e 100644
--- a/spec/support/helpers/features/runners_helpers.rb
+++ b/spec/support/helpers/features/runners_helpers.rb
@@ -29,8 +29,6 @@ module Features
click_on 'Search'
end
-
- wait_for_requests
end
def open_filtered_search_suggestions(filter)
@@ -39,8 +37,6 @@ module Features
page.within(search_bar_selector) do
click_on filter
end
-
- wait_for_requests
end
def input_filtered_search_filter_is_only(filter, value)
@@ -56,8 +52,6 @@ module Features
click_on 'Search'
end
-
- wait_for_requests
end
end
end
diff --git a/spec/support/helpers/features/sorting_helpers.rb b/spec/support/helpers/features/sorting_helpers.rb
index 8dda16af625..0ea7c5432fb 100644
--- a/spec/support/helpers/features/sorting_helpers.rb
+++ b/spec/support/helpers/features/sorting_helpers.rb
@@ -22,9 +22,11 @@ module Features
# pajamas_sort_by is used to sort new pajamas dropdowns. When
# all of the dropdowns are converted, pajamas_sort_by can be renamed to sort_by
# https://gitlab.com/groups/gitlab-org/-/epics/7551
- def pajamas_sort_by(value)
- find('.filter-dropdown-container .gl-new-dropdown').click
- find('.gl-new-dropdown-item', text: value).click
+ def pajamas_sort_by(value, from: nil)
+ raise ArgumentError, 'The :from option must be given' if from.nil?
+
+ click_button from
+ find('[role="option"]', text: value).click
end
end
end
diff --git a/spec/support/helpers/features/top_nav_spec_helpers.rb b/spec/support/helpers/features/top_nav_spec_helpers.rb
deleted file mode 100644
index ecc05189fb4..00000000000
--- a/spec/support/helpers/features/top_nav_spec_helpers.rb
+++ /dev/null
@@ -1,33 +0,0 @@
-# frozen_string_literal: true
-
-# These helpers help you interact within the Source Editor (single-file editor, snippets, etc.).
-#
-module Features
- module TopNavSpecHelpers
- def open_top_nav
- find('.js-top-nav-dropdown-toggle').click
- end
-
- def within_top_nav
- within('.js-top-nav-dropdown-menu') do
- yield
- end
- end
-
- def open_top_nav_projects
- open_top_nav
-
- within_top_nav do
- click_button('Projects')
- end
- end
-
- def open_top_nav_groups
- open_top_nav
-
- within_top_nav do
- click_button('Groups')
- end
- end
- end
-end
diff --git a/spec/support/helpers/login_helpers.rb b/spec/support/helpers/login_helpers.rb
index d35fa801638..913316c8622 100644
--- a/spec/support/helpers/login_helpers.rb
+++ b/spec/support/helpers/login_helpers.rb
@@ -3,6 +3,7 @@
require_relative 'devise_helpers'
module LoginHelpers
+ include AdminModeHelper
include DeviseHelpers
# Overriding Devise::Test::IntegrationHelpers#sign_in to store @current_user
@@ -48,12 +49,16 @@ module LoginHelpers
@current_user = user
end
- def gitlab_enable_admin_mode_sign_in(user)
- visit new_admin_session_path
- fill_in 'user_password', with: user.password
- click_button 'Enter admin mode'
+ def gitlab_enable_admin_mode_sign_in(user, use_mock_admin_mode: true)
+ if use_mock_admin_mode
+ enable_admin_mode!(user)
+ else
+ visit new_admin_session_path
+ fill_in 'user_password', with: user.password
+ click_button 'Enter admin mode'
- wait_for_requests
+ wait_for_requests
+ end
end
def gitlab_sign_in_via(provider, user, uid, saml_response = nil)
diff --git a/spec/support/helpers/migrations_helpers.rb b/spec/support/helpers/migrations_helpers.rb
index dcf61d57af7..9509040238b 100644
--- a/spec/support/helpers/migrations_helpers.rb
+++ b/spec/support/helpers/migrations_helpers.rb
@@ -120,11 +120,18 @@ module MigrationsHelpers
end
end
+ def finalized_by_version
+ ::Gitlab::Database::BackgroundMigration::BatchedBackgroundMigrationDictionary
+ .entry(described_class.to_s.demodulize)&.finalized_by
+ end
+
def migration_schema_version
metadata_schema = self.class.metadata[:schema]
if metadata_schema == :latest
migrations.last.version
+ elsif self.class.metadata[:level] == :background_migration
+ metadata_schema || finalized_by_version || migrations.last.version
else
metadata_schema || previous_migration.version
end
diff --git a/spec/support/helpers/models/ci/partitioning_testing/cascade_check.rb b/spec/support/helpers/models/ci/partitioning_testing/cascade_check.rb
index 81c2d2cb225..7bcb8e5fcac 100644
--- a/spec/support/helpers/models/ci/partitioning_testing/cascade_check.rb
+++ b/spec/support/helpers/models/ci/partitioning_testing/cascade_check.rb
@@ -25,7 +25,7 @@ module PartitioningTesting
end
end
-Ci::Partitionable::Testing::PARTITIONABLE_MODELS.each do |klass|
+Ci::Partitionable::Testing.partitionable_models.each do |klass|
next if klass == 'Ci::Pipeline'
model = klass.safe_constantize
diff --git a/spec/support/helpers/models/ci/partitioning_testing/schema_helpers.rb b/spec/support/helpers/models/ci/partitioning_testing/schema_helpers.rb
index a47aaffdb43..a6c0ad143c5 100644
--- a/spec/support/helpers/models/ci/partitioning_testing/schema_helpers.rb
+++ b/spec/support/helpers/models/ci/partitioning_testing/schema_helpers.rb
@@ -18,7 +18,6 @@ module Ci
each_partitionable_table do |table_name|
create_test_partition("p_#{table_name}", connection: connection)
end
- ensure_builds_id_uniquness(connection: connection)
end
def teardown(connection: Ci::ApplicationRecord.connection)
@@ -28,7 +27,7 @@ module Ci
end
def each_partitionable_table
- ::Ci::Partitionable::Testing::PARTITIONABLE_MODELS.each do |klass|
+ ::Ci::Partitionable::Testing.partitionable_models.each do |klass|
model = klass.safe_constantize
table_name = model.table_name.delete_prefix('p_')
@@ -60,16 +59,6 @@ module Ci
SQL
end
- # This can be removed after https://gitlab.com/gitlab-org/gitlab/-/issues/421173
- # is implemented
- def ensure_builds_id_uniquness(connection:)
- connection.execute(<<~SQL.squish)
- CREATE TRIGGER assign_p_ci_builds_id_trigger
- BEFORE INSERT ON #{full_partition_name('ci_builds')}
- FOR EACH ROW EXECUTE FUNCTION assign_p_ci_builds_id_value();
- SQL
- end
-
def table_available?(table_name, connection:)
connection.table_exists?(table_name) &&
connection.column_exists?(table_name, :partition_id)
diff --git a/spec/support/helpers/navbar_structure_helper.rb b/spec/support/helpers/navbar_structure_helper.rb
index 131c7597827..5519a6910a2 100644
--- a/spec/support/helpers/navbar_structure_helper.rb
+++ b/spec/support/helpers/navbar_structure_helper.rb
@@ -127,7 +127,7 @@ module NavbarStructureHelper
def project_analytics_sub_nav_item
[
_('Value stream analytics'),
- _('Contributor statistics'),
+ _('Contributor analytics'),
_('CI/CD analytics'),
_('Repository analytics'),
(_('Code review analytics') if Gitlab.ee?),
diff --git a/spec/support/helpers/next_found_instance_of.rb b/spec/support/helpers/next_found_instance_of.rb
deleted file mode 100644
index f53798c1856..00000000000
--- a/spec/support/helpers/next_found_instance_of.rb
+++ /dev/null
@@ -1,61 +0,0 @@
-# frozen_string_literal: true
-
-module NextFoundInstanceOf
- ERROR_MESSAGE = 'NextFoundInstanceOf mock helpers can only be used with ActiveRecord targets'
- HELPER_METHOD_PATTERN = /(?:allow|expect)_next_found_(?<number>\d+)_instances_of/
-
- def method_missing(method_name, ...)
- return super unless match_data = method_name.match(HELPER_METHOD_PATTERN)
-
- helper_method = method_name.to_s.sub("_#{match_data[:number]}", '')
-
- public_send(helper_method, *args, match_data[:number].to_i, &block)
- end
-
- def expect_next_found_instance_of(klass, &block)
- expect_next_found_instances_of(klass, nil, &block)
- end
-
- def expect_next_found_instances_of(klass, number)
- check_if_active_record!(klass)
-
- stub_allocate(expect(klass), klass, number) do |expectation|
- yield(expectation)
- end
- end
-
- def allow_next_found_instance_of(klass, &block)
- allow_next_found_instances_of(klass, nil, &block)
- end
-
- def allow_next_found_instances_of(klass, number)
- check_if_active_record!(klass)
-
- stub_allocate(allow(klass), klass, number) do |allowance|
- yield(allowance)
- end
- end
-
- private
-
- def check_if_active_record!(klass)
- raise ArgumentError, ERROR_MESSAGE unless klass < ActiveRecord::Base
- end
-
- def stub_allocate(target, klass, number)
- stub = receive(:allocate)
- stub.exactly(number).times if number
-
- target.to stub.and_wrap_original do |method|
- method.call.tap do |allocation|
- # ActiveRecord::Core.allocate returns a frozen object:
- # https://github.com/rails/rails/blob/291a3d2ef29a3842d1156ada7526f4ee60dd2b59/activerecord/lib/active_record/core.rb#L620
- # It's unexpected behavior and probably a bug in Rails
- # Let's work it around by setting the attributes to default to unfreeze the object for now
- allocation.instance_variable_set(:@attributes, klass._default_attributes)
-
- yield(allocation)
- end
- end
- end
-end
diff --git a/spec/support/helpers/next_instance_of.rb b/spec/support/helpers/next_instance_of.rb
deleted file mode 100644
index 5cc63fe5c6e..00000000000
--- a/spec/support/helpers/next_instance_of.rb
+++ /dev/null
@@ -1,39 +0,0 @@
-# frozen_string_literal: true
-
-module NextInstanceOf
- def expect_next_instance_of(klass, *new_args, &blk)
- stub_new(expect(klass), nil, false, *new_args, &blk)
- end
-
- def expect_next_instances_of(klass, number, ordered = false, *new_args, &blk)
- stub_new(expect(klass), number, ordered, *new_args, &blk)
- end
-
- def allow_next_instance_of(klass, *new_args, &blk)
- stub_new(allow(klass), nil, false, *new_args, &blk)
- end
-
- def allow_next_instances_of(klass, number, ordered = false, *new_args, &blk)
- stub_new(allow(klass), number, ordered, *new_args, &blk)
- end
-
- private
-
- def stub_new(target, number, ordered = false, *new_args, &blk)
- receive_new = receive(:new)
- receive_new.ordered if ordered
- receive_new.with(*new_args) if new_args.present?
-
- if number.is_a?(Range)
- receive_new.at_least(number.begin).times if number.begin
- receive_new.at_most(number.end).times if number.end
- elsif number
- receive_new.exactly(number).times
- end
-
- target.to receive_new.and_wrap_original do |*original_args, **original_kwargs|
- method, *original_args = original_args
- method.call(*original_args, **original_kwargs).tap(&blk)
- end
- end
-end
diff --git a/spec/support/helpers/search_helpers.rb b/spec/support/helpers/search_helpers.rb
index dd5ce63876e..a2d111bdf5d 100644
--- a/spec/support/helpers/search_helpers.rb
+++ b/spec/support/helpers/search_helpers.rb
@@ -15,12 +15,9 @@ module SearchHelpers
if page.has_css?('.search-page-form')
search_form = '.search-page-form'
# Open search modal from super sidebar
- elsif has_testid?('super-sidebar-search-button')
+ else
find_by_testid('super-sidebar-search-button').click
search_form = '#super-sidebar-search-modal'
- # Open legacy search dropdown in navigation
- else
- search_form = '.header-search-form'
end
page.within(search_form) do
@@ -38,6 +35,15 @@ module SearchHelpers
end
end
+ def submit_dashboard_search(query)
+ visit(search_path) unless page.has_css?('#dashboard_search')
+
+ search_form = page.find('input[name="search"]', match: :first)
+
+ search_form.fill_in(with: query)
+ search_form.send_keys(:enter)
+ end
+
def select_search_scope(scope)
within_testid('search-filter') do
click_link scope
diff --git a/spec/support/helpers/sign_up_helpers.rb b/spec/support/helpers/sign_up_helpers.rb
index 6259467232c..f1449ed643e 100644
--- a/spec/support/helpers/sign_up_helpers.rb
+++ b/spec/support/helpers/sign_up_helpers.rb
@@ -3,11 +3,11 @@
require 'spec_helper'
module SignUpHelpers
- def fill_in_sign_up_form(new_user, submit_button_text = 'Register')
+ def fill_in_sign_up_form(new_user, submit_button_text = 'Register', invite: false)
fill_in 'new_user_first_name', with: new_user.first_name
fill_in 'new_user_last_name', with: new_user.last_name
fill_in 'new_user_username', with: new_user.username
- fill_in 'new_user_email', with: new_user.email
+ fill_in 'new_user_email', with: new_user.email unless invite
fill_in 'new_user_password', with: new_user.password
wait_for_all_requests
@@ -19,6 +19,12 @@ module SignUpHelpers
click_button submit_button_text
end
+ def confirm_email(new_user)
+ new_user_token = User.find_by_email(new_user.email).confirmation_token
+
+ visit user_confirmation_path(confirmation_token: new_user_token)
+ end
+
private
def expect_username_to_be_validated
diff --git a/spec/support/helpers/snippet_helpers.rb b/spec/support/helpers/snippet_helpers.rb
index 1ec50bce070..242661a4e23 100644
--- a/spec/support/helpers/snippet_helpers.rb
+++ b/spec/support/helpers/snippet_helpers.rb
@@ -8,7 +8,7 @@ module SnippetHelpers
def snippet_blob_file(blob)
{
"path" => blob.path,
- "raw_url" => gitlab_raw_snippet_blob_url(blob.container, blob.path, host: 'localhost')
+ "raw_url" => gitlab_raw_snippet_blob_url(blob.container, blob.path, host: Gitlab.config.gitlab.host)
}
end
end
diff --git a/spec/support/helpers/stub_configuration.rb b/spec/support/helpers/stub_configuration.rb
index 562805cec3d..e043d1249b9 100644
--- a/spec/support/helpers/stub_configuration.rb
+++ b/spec/support/helpers/stub_configuration.rb
@@ -93,10 +93,11 @@ module StubConfiguration
messages.deep_stringify_keys!
# Default storage is always required
- messages['default'] ||= Gitlab.config.repositories.storages.default
+ messages['default'] ||= Gitlab.config.repositories.storages[GitalySetup::REPOS_STORAGE]
messages.each do |storage_name, storage_hash|
- if !storage_hash.key?('path') || storage_hash['path'] == Gitlab::GitalyClient::StorageSettings::Deprecated
- storage_hash['path'] = Gitlab::GitalyClient::StorageSettings.allow_disk_access { TestEnv.repos_path }
+ # Default additional storages to connect to the default storage
+ unless storage_hash.key?('gitaly_address')
+ storage_hash['gitaly_address'] = Gitlab.config.repositories.storages[GitalySetup::REPOS_STORAGE].gitaly_address
end
messages[storage_name] = Gitlab::GitalyClient::StorageSettings.new(storage_hash.to_h)
diff --git a/spec/support/helpers/stub_feature_flags.rb b/spec/support/helpers/stub_feature_flags.rb
index 42bb9982144..76fc8ebf84d 100644
--- a/spec/support/helpers/stub_feature_flags.rb
+++ b/spec/support/helpers/stub_feature_flags.rb
@@ -29,7 +29,7 @@ module StubFeatureFlags
return unless Gitlab::SafeRequestStore.active?
new_request = Feature::FlipperRequest.new
- allow(new_request).to receive(:id).and_return(SecureRandom.uuid)
+ allow(new_request).to receive(:flipper_id).and_return("FlipperRequest:#{SecureRandom.uuid}")
allow(Feature).to receive(:current_request).and_return(new_request)
end
diff --git a/spec/support/helpers/stub_gitlab_calls.rb b/spec/support/helpers/stub_gitlab_calls.rb
index c02ffe07159..0c92502ba99 100644
--- a/spec/support/helpers/stub_gitlab_calls.rb
+++ b/spec/support/helpers/stub_gitlab_calls.rb
@@ -109,6 +109,14 @@ module StubGitlabCalls
end
end
+ def stub_commonmark_sourcepos_enabled
+ engine = Banzai::Filter::MarkdownFilter.render_engine(nil)
+
+ allow_next_instance_of(engine) do |instance|
+ allow(instance).to receive(:sourcepos_disabled?).and_return(false)
+ end
+ end
+
private
def stub_container_registry_tag_manifest_content
diff --git a/spec/support/helpers/stub_requests.rb b/spec/support/helpers/stub_requests.rb
index a3810323fee..bde5535705e 100644
--- a/spec/support/helpers/stub_requests.rb
+++ b/spec/support/helpers/stub_requests.rb
@@ -18,9 +18,15 @@ module StubRequests
end
def stub_dns(url, ip_address:, port: 80)
+ debug_with_puts "beginning of stub_dns"
url = parse_url(url)
+ debug_with_puts "before socket = Socket.sockaddr_in"
socket = Socket.sockaddr_in(port, ip_address)
+ debug_with_puts "after socket = Socket.sockaddr_in"
+
+ debug_with_puts "before addr = Addrinfo.new(socket)"
addr = Addrinfo.new(socket)
+ debug_with_puts "after addr = Addrinfo.new(socket)"
# See Gitlab::UrlBlocker
allow(Addrinfo).to receive(:getaddrinfo)
@@ -52,4 +58,12 @@ module StubRequests
def parse_url(url)
url.is_a?(URI) ? url : URI(url)
end
+
+ # TODO: Remove the debug_with_puts statements below! Used for debugging purposes.
+ # TODO: https://gitlab.com/gitlab-org/quality/engineering-productivity/team/-/issues/323#note_1688925316
+ def debug_with_puts(message)
+ return unless ENV['CI'] # rubocop:disable RSpec/AvoidConditionalStatements -- Debug information only in the CI
+
+ puts "[#{Time.current}] #{message}"
+ end
end
diff --git a/spec/support/helpers/stub_saas_features.rb b/spec/support/helpers/stub_saas_features.rb
deleted file mode 100644
index d0aa7108a6a..00000000000
--- a/spec/support/helpers/stub_saas_features.rb
+++ /dev/null
@@ -1,20 +0,0 @@
-# frozen_string_literal: true
-
-module StubSaasFeatures
- # Stub SaaS feature with `feature_name: true/false`
- #
- # @param [Hash] features where key is feature name and value is boolean whether enabled or not.
- #
- # Examples
- # - `stub_saas_features(onboarding: false)` ... Disable `onboarding`
- # SaaS feature globally.
- # - `stub_saas_features(onboarding: true)` ... Enable `onboarding`
- # SaaS feature globally.
- def stub_saas_features(features)
- features.each do |feature_name, value|
- raise ArgumentError, 'value must be boolean' unless value.in? [true, false]
-
- allow(::Gitlab::Saas).to receive(:feature_available?).with(feature_name).and_return(value)
- end
- end
-end
diff --git a/spec/support/helpers/usage_data_helpers.rb b/spec/support/helpers/usage_data_helpers.rb
index 3b8c0b42fe8..111886c1586 100644
--- a/spec/support/helpers/usage_data_helpers.rb
+++ b/spec/support/helpers/usage_data_helpers.rb
@@ -77,20 +77,10 @@ module UsageDataHelpers
USAGE_DATA_KEYS = %i[
counts
recorded_at
- mattermost_enabled
- signup_enabled
- ldap_enabled
- gravatar_enabled
- omniauth_enabled
- reply_by_email_enabled
- container_registry_enabled
- dependency_proxy_enabled
- gitlab_shared_runners_enabled
gitlab_pages
git
gitaly
database
- prometheus_metrics_enabled
object_store
topology
].freeze
diff --git a/spec/support/matchers/not_enqueue_mail_matcher.rb b/spec/support/matchers/not_enqueue_mail_matcher.rb
deleted file mode 100644
index 0975c038252..00000000000
--- a/spec/support/matchers/not_enqueue_mail_matcher.rb
+++ /dev/null
@@ -1,3 +0,0 @@
-# frozen_string_literal: true
-
-RSpec::Matchers.define_negated_matcher :not_enqueue_mail, :have_enqueued_mail
diff --git a/spec/support/rspec.rb b/spec/support/rspec.rb
index f2f93fff07e..dfc881caddc 100644
--- a/spec/support/rspec.rb
+++ b/spec/support/rspec.rb
@@ -2,6 +2,7 @@
require_relative 'rake'
require_relative 'rspec_order'
+require_relative 'rspec_run_time'
require_relative 'system_exit_detected'
require_relative 'helpers/stub_configuration'
require_relative 'helpers/stub_metrics'
diff --git a/spec/support/rspec_order.rb b/spec/support/rspec_order.rb
index 0305ae7241d..70e4d76f3b2 100644
--- a/spec/support/rspec_order.rb
+++ b/spec/support/rspec_order.rb
@@ -1,6 +1,7 @@
# frozen_string_literal: true
require 'yaml'
+require 'rspec/core/formatters/base_formatter'
module Support
module RspecOrder
@@ -28,7 +29,7 @@ module Support
end
def potential_order_dependent?(path)
- @todo ||= YAML.load_file(TODO_YAML).to_set # rubocop:disable Gitlab/PredicateMemoization
+ @todo ||= YAML.load_file(TODO_YAML).to_set # rubocop:disable Gitlab/PredicateMemoization -- @todo is never `nil` or `false`.
@todo.include?(path)
end
@@ -38,23 +39,31 @@ module Support
#
# Previously, we've modified metadata[:description] directly but that led
# to bugs. See https://gitlab.com/gitlab-org/gitlab/-/merge_requests/96137
- module DocumentationFormatterPatch
+ class RSpecFormatter < RSpec::Core::Formatters::BaseFormatter
+ RSpec::Core::Formatters.register self, :example_group_started
+
# See https://github.com/rspec/rspec-core/blob/v3.11.0/lib/rspec/core/formatters/documentation_formatter.rb#L24-L29
def example_group_started(notification)
- super
-
order = notification.group.metadata[:order]
- return unless order
- output.puts "#{current_indentation}# order #{order}"
+ output.puts " # order #{order}" if order
+ end
+
+ # Print order information only with `--format documentation`.
+ def self.add_formatter_to(config)
+ documentation_formatter = config.formatters
+ .find { |formatter| formatter.is_a?(RSpec::Core::Formatters::DocumentationFormatter) }
+ return unless documentation_formatter
+
+ config.add_formatter self, documentation_formatter.output
end
end
end
end
-RSpec::Core::Formatters::DocumentationFormatter.prepend Support::RspecOrder::DocumentationFormatterPatch
-
RSpec.configure do |config|
+ Support::RspecOrder::RSpecFormatter.add_formatter_to(config)
+
# Useful to find order-dependent specs.
config.register_ordering(:reverse, &:reverse)
diff --git a/spec/support/rspec_order_todo.yml b/spec/support/rspec_order_todo.yml
index da23f81e86e..81196fdcbfa 100644
--- a/spec/support/rspec_order_todo.yml
+++ b/spec/support/rspec_order_todo.yml
@@ -163,28 +163,6 @@
- './ee/spec/controllers/users_controller_spec.rb'
- './ee/spec/db/production/license_spec.rb'
- './ee/spec/elastic_integration/global_search_spec.rb'
-- './ee/spec/elastic/migrate/20201105181100_apply_max_analyzed_offset_spec.rb'
-- './ee/spec/elastic/migrate/20201116142400_add_new_data_to_issues_documents_spec.rb'
-- './ee/spec/elastic/migrate/20201123123400_migrate_issues_to_separate_index_spec.rb'
-- './ee/spec/elastic/migrate/20210112165500_delete_issues_from_original_index_spec.rb'
-- './ee/spec/elastic/migrate/20210127154600_remove_permissions_data_from_notes_documents_spec.rb'
-- './ee/spec/elastic/migrate/20210128163600_add_permissions_data_to_notes_documents_spec.rb'
-- './ee/spec/elastic/migrate/20210201104800_migrate_notes_to_separate_index_spec.rb'
-- './ee/spec/elastic/migrate/20210421140400_add_new_data_to_merge_requests_documents_spec.rb'
-- './ee/spec/elastic/migrate/20210429154500_migrate_merge_requests_to_separate_index_spec.rb'
-- './ee/spec/elastic/migrate/20210510113500_delete_merge_requests_from_original_index_spec.rb'
-- './ee/spec/elastic/migrate/20210510143200_delete_notes_from_original_index_spec.rb'
-- './ee/spec/elastic/migrate/20210623081800_add_upvotes_to_issues_spec.rb'
-- './ee/spec/elastic/migrate/20210722112500_add_upvotes_mappings_to_merge_requests_spec.rb'
-- './ee/spec/elastic/migrate/20210813134600_add_namespace_ancestry_to_issues_mapping_spec.rb'
-- './ee/spec/elastic/migrate/20210825110300_backfill_namespace_ancestry_for_issues_spec.rb'
-- './ee/spec/elastic/migrate/20210910094600_add_namespace_ancestry_ids_to_issues_mapping_spec.rb'
-- './ee/spec/elastic/migrate/20210910100000_redo_backfill_namespace_ancestry_ids_for_issues_spec.rb'
-- './ee/spec/elastic/migrate/20220118150500_delete_orphaned_commits_spec.rb'
-- './ee/spec/elastic/migrate/20220119120500_populate_commit_permissions_in_main_index_spec.rb'
-- './ee/spec/elastic/migrate/20220512150000_pause_indexing_for_unsupported_es_versions_spec.rb'
-- './ee/spec/elastic/migrate/20220613120500_migrate_commits_to_separate_index_spec.rb'
-- './ee/spec/elastic/migrate/20220713103500_delete_commits_from_original_index_spec.rb'
- './ee/spec/features/account_recovery_regular_check_spec.rb'
- './ee/spec/features/admin/admin_credentials_inventory_spec.rb'
- './ee/spec/features/admin/admin_dashboard_spec.rb'
@@ -194,7 +172,6 @@
- './ee/spec/features/admin/admin_interacts_with_push_rules_spec.rb'
- './ee/spec/features/admin/admin_merge_requests_approvals_spec.rb'
- './ee/spec/features/admin/admin_reset_pipeline_minutes_spec.rb'
-- './ee/spec/features/admin/admin_runners_spec.rb'
- './ee/spec/features/admin/admin_sends_notification_spec.rb'
- './ee/spec/features/admin/admin_settings_spec.rb'
- './ee/spec/features/admin/admin_show_new_user_signups_cap_alert_spec.rb'
@@ -373,7 +350,6 @@
- './ee/spec/features/pending_project_memberships_spec.rb'
- './ee/spec/features/profiles/account_spec.rb'
- './ee/spec/features/profiles/billing_spec.rb'
-- './ee/spec/features/profiles/password_spec.rb'
- './ee/spec/features/profiles/user_visits_public_profile_spec.rb'
- './ee/spec/features/projects/active_tabs_spec.rb'
- './ee/spec/features/projects/audit_events_spec.rb'
@@ -492,7 +468,6 @@
- './ee/spec/finders/boards/boards_finder_spec.rb'
- './ee/spec/finders/boards/epic_boards_finder_spec.rb'
- './ee/spec/finders/boards/milestones_finder_spec.rb'
-- './ee/spec/finders/boards/users_finder_spec.rb'
- './ee/spec/finders/clusters/environments_finder_spec.rb'
- './ee/spec/finders/compliance_management/merge_requests/compliance_violations_finder_spec.rb'
- './ee/spec/finders/custom_project_templates_finder_spec.rb'
@@ -886,8 +861,6 @@
- './ee/spec/graphql/types/work_items/type_spec.rb'
- './ee/spec/graphql/types/work_items/widget_interface_spec.rb'
- './ee/spec/helpers/admin/emails_helper_spec.rb'
-- './ee/spec/helpers/admin/ip_restriction_helper_spec.rb'
-- './ee/spec/helpers/admin/repo_size_limit_helper_spec.rb'
- './ee/spec/helpers/analytics/code_review_helper_spec.rb'
- './ee/spec/helpers/application_helper_spec.rb'
- './ee/spec/helpers/audit_events_helper_spec.rb'
@@ -1068,13 +1041,11 @@
- './ee/spec/lib/ee/gitlab/auth/request_authenticator_spec.rb'
- './ee/spec/lib/ee/gitlab/auth/saml/identity_linker_spec.rb'
- './ee/spec/lib/ee/gitlab/background_migration/backfill_iteration_cadence_id_for_boards_spec.rb'
-- './ee/spec/lib/ee/gitlab/background_migration/backfill_project_statistics_container_repository_size_spec.rb'
- './ee/spec/lib/ee/gitlab/background_migration/delete_invalid_epic_issues_spec.rb'
- './ee/spec/lib/ee/gitlab/background_migration/migrate_approver_to_approval_rules_check_progress_spec.rb'
- './ee/spec/lib/ee/gitlab/background_migration/migrate_approver_to_approval_rules_in_batch_spec.rb'
- './ee/spec/lib/ee/gitlab/background_migration/migrate_approver_to_approval_rules_spec.rb'
- './ee/spec/lib/ee/gitlab/background_migration/migrate_shared_vulnerability_scanners_spec.rb'
-- './ee/spec/lib/ee/gitlab/background_migration/purge_stale_security_scans_spec.rb'
- './ee/spec/lib/ee/gitlab/checks/push_rule_check_spec.rb'
- './ee/spec/lib/ee/gitlab/checks/push_rules/branch_check_spec.rb'
- './ee/spec/lib/ee/gitlab/checks/push_rules/commit_check_spec.rb'
@@ -1239,7 +1210,6 @@
- './ee/spec/lib/gitlab/auth/smartcard/san_extension_spec.rb'
- './ee/spec/lib/gitlab/auth/smartcard/session_enforcer_spec.rb'
- './ee/spec/lib/gitlab/auth/smartcard/session_spec.rb'
-- './ee/spec/lib/gitlab/background_migration/migrate_requirements_to_work_items_spec.rb'
- './ee/spec/lib/gitlab/bullet/exclusions_spec.rb'
- './ee/spec/lib/gitlab/cache_spec.rb'
- './ee/spec/lib/gitlab/checks/changes_access_spec.rb'
@@ -1363,7 +1333,6 @@
- './ee/spec/lib/gitlab/geo/oauth/logout_token_spec.rb'
- './ee/spec/lib/gitlab/geo/oauth/session_spec.rb'
- './ee/spec/lib/gitlab/geo/registry_batcher_spec.rb'
-- './ee/spec/lib/gitlab/geo/replication/blob_downloader_spec.rb'
- './ee/spec/lib/gitlab/geo/replication/blob_retriever_spec.rb'
- './ee/spec/lib/gitlab/geo/replicator_spec.rb'
- './ee/spec/lib/gitlab/geo/signed_data_spec.rb'
@@ -1477,10 +1446,6 @@
- './ee/spec/lib/gitlab/usage/metrics/instrumentations/user_cap_setting_enabled_metric_spec.rb'
- './ee/spec/lib/gitlab/user_access_spec.rb'
- './ee/spec/lib/gitlab/visibility_level_spec.rb'
-- './ee/spec/lib/gitlab/vulnerabilities/base_vulnerability_spec.rb'
-- './ee/spec/lib/gitlab/vulnerabilities/container_scanning_vulnerability_spec.rb'
-- './ee/spec/lib/gitlab/vulnerabilities/parser_spec.rb'
-- './ee/spec/lib/gitlab/vulnerabilities/standard_vulnerability_spec.rb'
- './ee/spec/lib/gitlab/web_ide/config/entry/schema/match_spec.rb'
- './ee/spec/lib/gitlab/web_ide/config/entry/schema_spec.rb'
- './ee/spec/lib/gitlab/web_ide/config/entry/schemas_spec.rb'
@@ -1518,9 +1483,6 @@
- './ee/spec/mailers/emails/user_cap_spec.rb'
- './ee/spec/mailers/license_mailer_spec.rb'
- './ee/spec/mailers/notify_spec.rb'
-- './ee/spec/migrations/20220411173544_cleanup_orphans_approval_project_rules_spec.rb'
-- './ee/spec/migrations/20220517144749_remove_vulnerability_approval_rules_spec.rb'
-- './ee/spec/migrations/backfill_delayed_group_deletion_spec.rb'
- './ee/spec/migrations/geo/fix_state_column_in_file_registry_spec.rb'
- './ee/spec/migrations/geo/fix_state_column_in_lfs_object_registry_spec.rb'
- './ee/spec/migrations/geo/migrate_ci_job_artifacts_to_separate_registry_spec.rb'
@@ -1916,7 +1878,6 @@
- './ee/spec/presenters/project_member_presenter_spec.rb'
- './ee/spec/presenters/security/scan_presenter_spec.rb'
- './ee/spec/presenters/subscription_presenter_spec.rb'
-- './ee/spec/presenters/subscriptions/new_plan_presenter_spec.rb'
- './ee/spec/presenters/vulnerabilities/finding_presenter_spec.rb'
- './ee/spec/presenters/vulnerability_presenter_spec.rb'
- './ee/spec/presenters/web_hooks/group/hook_presenter_spec.rb'
@@ -2546,7 +2507,7 @@
- './ee/spec/services/ee/merge_requests/after_create_service_spec.rb'
- './ee/spec/services/ee/merge_requests/base_service_spec.rb'
- './ee/spec/services/ee/merge_requests/create_approval_event_service_spec.rb'
-- './ee/spec/services/ee/merge_requests/create_from_vulnerability_data_service_spec.rb'
+- './ee/spec/services/merge_requests/create_from_vulnerability_data_service_spec.rb'
- './ee/spec/services/ee/merge_requests/create_pipeline_service_spec.rb'
- './ee/spec/services/ee/merge_requests/create_service_spec.rb'
- './ee/spec/services/ee/merge_requests/execute_approval_hooks_service_spec.rb'
@@ -2709,7 +2670,6 @@
- './ee/spec/services/merge_requests/build_service_spec.rb'
- './ee/spec/services/merge_requests/mergeability/check_approved_service_spec.rb'
- './ee/spec/services/merge_requests/mergeability/check_blocked_by_other_mrs_service_spec.rb'
-- './ee/spec/services/merge_requests/mergeability/check_denied_policies_service_spec.rb'
- './ee/spec/services/merge_requests/merge_service_spec.rb'
- './ee/spec/services/merge_requests/merge_to_ref_service_spec.rb'
- './ee/spec/services/merge_requests/push_options_handler_service_spec.rb'
@@ -2933,13 +2893,8 @@
- './ee/spec/views/groups/settings/reporting/show.html.haml_spec.rb'
- './ee/spec/views/layouts/application.html.haml_spec.rb'
- './ee/spec/views/layouts/checkout.html.haml_spec.rb'
-- './ee/spec/views/layouts/header/_current_user_dropdown.html.haml_spec.rb'
- './ee/spec/views/layouts/header/_ee_subscribable_banner.html.haml_spec.rb'
- './ee/spec/views/layouts/header/_read_only_banner.html.haml_spec.rb'
-- './ee/spec/views/layouts/nav/sidebar/_admin.html.haml_spec.rb'
-- './ee/spec/views/layouts/nav/sidebar/_group.html.haml_spec.rb'
-- './ee/spec/views/layouts/nav/sidebar/_project.html.haml_spec.rb'
-- './ee/spec/views/layouts/nav/sidebar/_push_rules_link.html.haml_spec.rb'
- './ee/spec/views/operations/environments.html.haml_spec.rb'
- './ee/spec/views/operations/index.html.haml_spec.rb'
- './ee/spec/views/profiles/preferences/show.html.haml_spec.rb'
@@ -3256,14 +3211,13 @@
- './spec/controllers/omniauth_callbacks_controller_spec.rb'
- './spec/controllers/passwords_controller_spec.rb'
- './spec/controllers/profiles/accounts_controller_spec.rb'
-- './spec/controllers/profiles/active_sessions_controller_spec.rb'
+- './spec/controllers/user_settings/active_sessions_controller_spec.rb'
- './spec/controllers/profiles/avatars_controller_spec.rb'
- './spec/controllers/profiles_controller_spec.rb'
- './spec/controllers/profiles/emails_controller_spec.rb'
- './spec/controllers/profiles/gpg_keys_controller_spec.rb'
- './spec/controllers/profiles/keys_controller_spec.rb'
- './spec/controllers/profiles/notifications_controller_spec.rb'
-- './spec/controllers/profiles/personal_access_tokens_controller_spec.rb'
- './spec/controllers/profiles/preferences_controller_spec.rb'
- './spec/controllers/profiles/two_factor_auths_controller_spec.rb'
- './spec/controllers/profiles/webauthn_registrations_controller_spec.rb'
@@ -3406,7 +3360,6 @@
- './spec/features/admin/admin_mode_spec.rb'
- './spec/features/admin/admin_mode/workers_spec.rb'
- './spec/features/admin/admin_projects_spec.rb'
-- './spec/features/admin/admin_runners_spec.rb'
- './spec/features/admin/admin_search_settings_spec.rb'
- './spec/features/admin/admin_sees_background_migrations_spec.rb'
- './spec/features/admin/admin_sees_projects_statistics_spec.rb'
@@ -3524,7 +3477,6 @@
- './spec/features/groups/dependency_proxy_spec.rb'
- './spec/features/groups/empty_states_spec.rb'
- './spec/features/groups/group_page_with_external_authorization_service_spec.rb'
-- './spec/features/groups/group_runners_spec.rb'
- './spec/features/groups/group_settings_spec.rb'
- './spec/features/groups/import_export/connect_instance_spec.rb'
- './spec/features/groups/import_export/export_file_spec.rb'
@@ -3765,15 +3717,13 @@
- './spec/features/password_reset_spec.rb'
- './spec/features/populate_new_pipeline_vars_with_params_spec.rb'
- './spec/features/profiles/account_spec.rb'
-- './spec/features/profiles/active_sessions_spec.rb'
+- './spec/features/user_settings/active_sessions_spec.rb'
- './spec/features/profiles/chat_names_spec.rb'
- './spec/features/profiles/emails_spec.rb'
- './spec/features/profiles/gpg_keys_spec.rb'
- './spec/features/profiles/keys_spec.rb'
- './spec/features/profiles/oauth_applications_spec.rb'
-- './spec/features/profiles/password_spec.rb'
- './spec/features/profile_spec.rb'
-- './spec/features/profiles/personal_access_tokens_spec.rb'
- './spec/features/profiles/two_factor_auths_spec.rb'
- './spec/features/profiles/user_changes_notified_of_own_activity_spec.rb'
- './spec/features/profiles/user_edit_preferences_spec.rb'
@@ -4879,7 +4829,6 @@
- './spec/helpers/merge_requests_helper_spec.rb'
- './spec/helpers/namespaces_helper_spec.rb'
- './spec/helpers/nav/new_dropdown_helper_spec.rb'
-- './spec/helpers/nav/top_nav_helper_spec.rb'
- './spec/helpers/notes_helper_spec.rb'
- './spec/helpers/notifications_helper_spec.rb'
- './spec/helpers/notify_helper_spec.rb'
@@ -5376,12 +5325,10 @@
- './spec/lib/gitlab/avatar_cache_spec.rb'
- './spec/lib/gitlab/background_migration/backfill_imported_issue_search_data_spec.rb'
- './spec/lib/gitlab/background_migration/backfill_integrations_enable_ssl_verification_spec.rb'
-- './spec/lib/gitlab/background_migration/backfill_namespace_id_for_project_route_spec.rb'
- './spec/lib/gitlab/background_migration/backfill_namespace_id_of_vulnerability_reads_spec.rb'
- './spec/lib/gitlab/background_migration/backfill_note_discussion_id_spec.rb'
- './spec/lib/gitlab/background_migration/backfill_project_feature_package_registry_access_level_spec.rb'
- './spec/lib/gitlab/background_migration/backfill_project_import_level_spec.rb'
-- './spec/lib/gitlab/background_migration/backfill_project_member_namespace_id_spec.rb'
- './spec/lib/gitlab/background_migration/backfill_project_repositories_spec.rb'
- './spec/lib/gitlab/background_migration/backfill_snippet_repositories_spec.rb'
- './spec/lib/gitlab/background_migration/backfill_topics_title_spec.rb'
@@ -5394,9 +5341,7 @@
- './spec/lib/gitlab/background_migration/batching_strategies/dismissed_vulnerabilities_strategy_spec.rb'
- './spec/lib/gitlab/background_migration/batching_strategies/loose_index_scan_batching_strategy_spec.rb'
- './spec/lib/gitlab/background_migration/batching_strategies/primary_key_batching_strategy_spec.rb'
-- './spec/lib/gitlab/background_migration/cleanup_orphaned_routes_spec.rb'
- './spec/lib/gitlab/background_migration/copy_column_using_background_migration_job_spec.rb'
-- './spec/lib/gitlab/background_migration/destroy_invalid_group_members_spec.rb'
- './spec/lib/gitlab/background_migration/disable_legacy_open_source_licence_for_recent_public_projects_spec.rb'
- './spec/lib/gitlab/background_migration/disable_legacy_open_source_license_for_inactive_public_projects_spec.rb'
- './spec/lib/gitlab/background_migration/disable_legacy_open_source_license_for_no_issues_no_repo_projects_spec.rb'
@@ -5417,7 +5362,6 @@
- './spec/lib/gitlab/background_task_spec.rb'
- './spec/lib/gitlab/backtrace_cleaner_spec.rb'
- './spec/lib/gitlab/batch_worker_context_spec.rb'
-- './spec/lib/gitlab/bitbucket_import/importer_spec.rb'
- './spec/lib/gitlab/bitbucket_import/project_creator_spec.rb'
- './spec/lib/gitlab/bitbucket_import/wiki_formatter_spec.rb'
- './spec/lib/gitlab/blame_spec.rb'
@@ -6029,7 +5973,6 @@
- './spec/lib/gitlab/email/failure_handler_spec.rb'
- './spec/lib/gitlab/email/handler/create_issue_handler_spec.rb'
- './spec/lib/gitlab/email/handler/create_merge_request_handler_spec.rb'
-- './spec/lib/gitlab/email/handler/create_note_handler_spec.rb'
- './spec/lib/gitlab/email/handler/create_note_on_issuable_handler_spec.rb'
- './spec/lib/gitlab/email/handler/service_desk_handler_spec.rb'
- './spec/lib/gitlab/email/handler_spec.rb'
@@ -6063,7 +6006,6 @@
- './spec/lib/gitlab/etag_caching/router/rails_spec.rb'
- './spec/lib/gitlab/etag_caching/router_spec.rb'
- './spec/lib/gitlab/etag_caching/store_spec.rb'
-- './spec/lib/gitlab/event_store/event_spec.rb'
- './spec/lib/gitlab/event_store/store_spec.rb'
- './spec/lib/gitlab/exception_log_formatter_spec.rb'
- './spec/lib/gitlab/exceptions_app_spec.rb'
@@ -6150,7 +6092,6 @@
- './spec/lib/gitlab/github_import/importer/events/renamed_spec.rb'
- './spec/lib/gitlab/github_import/importer/events/reopened_spec.rb'
- './spec/lib/gitlab/github_import/importer/issue_and_label_links_importer_spec.rb'
-- './spec/lib/gitlab/github_import/importer/issue_event_importer_spec.rb'
- './spec/lib/gitlab/github_import/importer/issue_events_importer_spec.rb'
- './spec/lib/gitlab/github_import/importer/issue_importer_spec.rb'
- './spec/lib/gitlab/github_import/importer/issues_importer_spec.rb'
@@ -6622,7 +6563,6 @@
- './spec/lib/gitlab/redis/rate_limiting_spec.rb'
- './spec/lib/gitlab/redis/sessions_spec.rb'
- './spec/lib/gitlab/redis/shared_state_spec.rb'
-- './spec/lib/gitlab/redis/sidekiq_status_spec.rb'
- './spec/lib/gitlab/redis/trace_chunks_spec.rb'
- './spec/lib/gitlab/redis/wrapper_spec.rb'
- './spec/lib/gitlab/reference_counter_spec.rb'
@@ -6929,7 +6869,6 @@
- './spec/lib/peek/views/external_http_spec.rb'
- './spec/lib/peek/views/memory_spec.rb'
- './spec/lib/peek/views/redis_detailed_spec.rb'
-- './spec/lib/product_analytics/event_params_spec.rb'
- './spec/lib/prometheus/cleanup_multiproc_dir_service_spec.rb'
- './spec/lib/prometheus/pid_provider_spec.rb'
- './spec/lib/quality/seeders/issues_spec.rb'
@@ -6979,7 +6918,6 @@
- './spec/lib/sidebars/projects/menus/scope_menu_spec.rb'
- './spec/lib/sidebars/projects/menus/security_compliance_menu_spec.rb'
- './spec/lib/sidebars/projects/menus/settings_menu_spec.rb'
-- './spec/lib/sidebars/projects/menus/shimo_menu_spec.rb'
- './spec/lib/sidebars/projects/menus/snippets_menu_spec.rb'
- './spec/lib/sidebars/projects/menus/wiki_menu_spec.rb'
- './spec/lib/sidebars/projects/menus/zentao_menu_spec.rb'
@@ -6991,8 +6929,6 @@
- './spec/lib/system_check/base_check_spec.rb'
- './spec/lib/system_check/incoming_email_check_spec.rb'
- './spec/lib/system_check/incoming_email/imap_authentication_check_spec.rb'
-- './spec/lib/system_check/orphans/namespace_check_spec.rb'
-- './spec/lib/system_check/orphans/repository_check_spec.rb'
- './spec/lib/system_check/sidekiq_check_spec.rb'
- './spec/lib/system_check/simple_executor_spec.rb'
- './spec/lib/system_check_spec.rb'
@@ -7384,7 +7320,6 @@
- './spec/models/integrations/pumble_spec.rb'
- './spec/models/integrations/pushover_spec.rb'
- './spec/models/integrations/redmine_spec.rb'
-- './spec/models/integrations/shimo_spec.rb'
- './spec/models/integrations/slack_slash_commands_spec.rb'
- './spec/models/integrations/slack_spec.rb'
- './spec/models/integrations/teamcity_spec.rb'
@@ -7523,7 +7458,6 @@
- './spec/models/preloaders/merge_request_diff_preloader_spec.rb'
- './spec/models/preloaders/user_max_access_level_in_groups_preloader_spec.rb'
- './spec/models/preloaders/user_max_access_level_in_projects_preloader_spec.rb'
-- './spec/models/product_analytics_event_spec.rb'
- './spec/models/programming_language_spec.rb'
- './spec/models/project_authorization_spec.rb'
- './spec/models/project_auto_devops_spec.rb'
@@ -7613,7 +7547,6 @@
- './spec/models/user_custom_attribute_spec.rb'
- './spec/models/user_detail_spec.rb'
- './spec/models/user_highest_role_spec.rb'
-- './spec/models/user_interacted_project_spec.rb'
- './spec/models/user_mentions/commit_user_mention_spec.rb'
- './spec/models/user_mentions/issue_user_mention_spec.rb'
- './spec/models/user_mentions/merge_request_user_mention_spec.rb'
@@ -8004,7 +7937,6 @@
- './spec/requests/api/graphql/mutations/work_items/create_from_task_spec.rb'
- './spec/requests/api/graphql/mutations/work_items/create_spec.rb'
- './spec/requests/api/graphql/mutations/work_items/delete_spec.rb'
-- './spec/requests/api/graphql/mutations/work_items/delete_task_spec.rb'
- './spec/requests/api/graphql/mutations/work_items/update_spec.rb'
- './spec/requests/api/graphql/mutations/work_items/update_task_spec.rb'
- './spec/requests/api/graphql/namespace/package_settings_spec.rb'
@@ -8199,7 +8131,6 @@
- './spec/requests/api/users_spec.rb'
- './spec/requests/api/wikis_spec.rb'
- './spec/requests/concerns/planning_hierarchy_spec.rb'
-- './spec/requests/content_security_policy_spec.rb'
- './spec/requests/dashboard_controller_spec.rb'
- './spec/requests/dashboard/projects_controller_spec.rb'
- './spec/requests/git_http_spec.rb'
@@ -8252,7 +8183,6 @@
- './spec/requests/projects/harbor/artifacts_controller_spec.rb'
- './spec/requests/projects/harbor/repositories_controller_spec.rb'
- './spec/requests/projects/harbor/tags_controller_spec.rb'
-- './spec/requests/projects/integrations/shimos_controller_spec.rb'
- './spec/requests/projects/issue_links_controller_spec.rb'
- './spec/requests/projects/issues_controller_spec.rb'
- './spec/requests/projects/issues/discussions_spec.rb'
@@ -9003,8 +8933,6 @@
- './spec/services/personal_access_tokens/revoke_service_spec.rb'
- './spec/services/post_receive_service_spec.rb'
- './spec/services/preview_markdown_service_spec.rb'
-- './spec/services/product_analytics/build_activity_graph_service_spec.rb'
-- './spec/services/product_analytics/build_graph_service_spec.rb'
- './spec/services/projects/after_rename_service_spec.rb'
- './spec/services/projects/alerting/notify_service_spec.rb'
- './spec/services/projects/all_issues_count_service_spec.rb'
@@ -9227,10 +9155,8 @@
- './spec/services/work_items/create_from_task_service_spec.rb'
- './spec/services/work_items/create_service_spec.rb'
- './spec/services/work_items/delete_service_spec.rb'
-- './spec/services/work_items/delete_task_service_spec.rb'
- './spec/services/work_items/parent_links/create_service_spec.rb'
- './spec/services/work_items/parent_links/destroy_service_spec.rb'
-- './spec/services/work_items/task_list_reference_removal_service_spec.rb'
- './spec/services/work_items/task_list_reference_replacement_service_spec.rb'
- './spec/services/work_items/update_service_spec.rb'
- './spec/services/work_items/widgets/assignees_service/update_service_spec.rb'
@@ -9399,14 +9325,7 @@
- './spec/views/layouts/devise.html.haml_spec.rb'
- './spec/views/layouts/_flash.html.haml_spec.rb'
- './spec/views/layouts/fullscreen.html.haml_spec.rb'
-- './spec/views/layouts/header/_gitlab_version.html.haml_spec.rb'
-- './spec/views/layouts/header/_new_dropdown.haml_spec.rb'
-- './spec/views/layouts/_header_search.html.haml_spec.rb'
- './spec/views/layouts/_head.html.haml_spec.rb'
-- './spec/views/layouts/nav/sidebar/_admin.html.haml_spec.rb'
-- './spec/views/layouts/nav/sidebar/_group.html.haml_spec.rb'
-- './spec/views/layouts/nav/sidebar/_profile.html.haml_spec.rb'
-- './spec/views/layouts/nav/sidebar/_project.html.haml_spec.rb'
- './spec/views/layouts/profile.html.haml_spec.rb'
- './spec/views/layouts/_published_experiments.html.haml_spec.rb'
- './spec/views/layouts/signup_onboarding.html.haml_spec.rb'
@@ -9474,7 +9393,6 @@
- './spec/views/shared/milestones/_issuables.html.haml_spec.rb'
- './spec/views/shared/_milestones_sort_dropdown.html.haml_spec.rb'
- './spec/views/shared/milestones/_top.html.haml_spec.rb'
-- './spec/views/shared/nav/_sidebar.html.haml_spec.rb'
- './spec/views/shared/projects/_inactive_project_deletion_alert.html.haml_spec.rb'
- './spec/views/shared/projects/_list.html.haml_spec.rb'
- './spec/views/shared/projects/_project.html.haml_spec.rb'
diff --git a/spec/support/rspec_run_time.rb b/spec/support/rspec_run_time.rb
new file mode 100644
index 00000000000..977d4885624
--- /dev/null
+++ b/spec/support/rspec_run_time.rb
@@ -0,0 +1,107 @@
+# frozen_string_literal: true
+
+require 'os'
+require 'yaml'
+require 'rspec/core/formatters/base_formatter'
+require_relative '../../tooling/lib/tooling/helpers/duration_formatter'
+
+module Support
+ module RSpecRunTime
+ class RSpecFormatter < RSpec::Core::Formatters::BaseFormatter
+ include Tooling::Helpers::DurationFormatter
+
+ TIME_LIMIT_IN_MINUTES = 80
+
+ RSpec::Core::Formatters.register self, :example_group_started, :example_group_finished
+
+ def start(_notification)
+ @group_level = 0
+ @rspec_test_suite_start_time = Process.clock_gettime(Process::CLOCK_MONOTONIC)
+ output.puts "\n# [RSpecRunTime] Starting RSpec timer..."
+
+ init_expected_duration_report
+ end
+
+ def example_group_started(notification)
+ if @last_elapsed_seconds && @last_elapsed_seconds > TIME_LIMIT_IN_MINUTES * 60
+ RSpec::Expectations.fail_with(
+ "Rspec suite is exceeding the #{TIME_LIMIT_IN_MINUTES} minute limit and is forced to exit with error.")
+ end
+
+ if @group_level == 0
+ @current_group_start_time = Process.clock_gettime(Process::CLOCK_MONOTONIC)
+ file_path = spec_file_path(notification)
+ output.puts "# [RSpecRunTime] Starting example group #{file_path}. #{expected_run_time(file_path)}"
+ end
+
+ @group_level += 1
+ end
+
+ def example_group_finished(notification)
+ @group_level -= 1 if @group_level > 0
+
+ if @group_level == 0
+ file_path = spec_file_path(notification)
+ time_now = Process.clock_gettime(Process::CLOCK_MONOTONIC)
+ actual_duration = time_now - @current_group_start_time
+
+ output.puts "\n# [RSpecRunTime] Finishing example group #{file_path}. " \
+ "It took #{readable_duration(actual_duration)}. " \
+ "#{expected_run_time(file_path)}"
+ end
+
+ output_elapsed_time
+ end
+
+ private
+
+ def expected_duration_report
+ report_path = ENV['KNAPSACK_RSPEC_SUITE_REPORT_PATH']
+
+ return unless report_path && File.exist?(report_path)
+
+ # rubocop:disable Gitlab/Json -- regular JSON is sufficient
+ @expected_duration_report ||= JSON.parse(File.read(report_path))
+ # rubocop:enable Gitlab/Json
+ end
+ alias_method :init_expected_duration_report, :expected_duration_report
+
+ def spec_file_path(notification)
+ notification.group.metadata[:file_path].sub('./', '')
+ end
+
+ def expected_run_time(spec_file_path)
+ return '' unless expected_duration_report
+
+ expected_duration = expected_duration_report[spec_file_path]
+ return "Missing expected duration from Knapsack report for #{spec_file_path}." unless expected_duration
+
+ "Expected to take #{readable_duration(expected_duration)}."
+ end
+
+ def output_elapsed_time
+ time_now = Process.clock_gettime(Process::CLOCK_MONOTONIC)
+ elapsed_seconds = time_now - @rspec_test_suite_start_time
+
+ # skip the output unless the duration increased by at least 1 second
+ unless @last_elapsed_seconds.nil? || elapsed_seconds - @last_elapsed_seconds < 1
+ output.puts \
+ "# [RSpecRunTime] RSpec elapsed time: #{readable_duration(elapsed_seconds)}. " \
+ "#{current_rss_in_megabytes}\n\n"
+ end
+
+ @last_elapsed_seconds = elapsed_seconds
+ end
+
+ def current_rss_in_megabytes
+ rss_in_megabytes = OS.rss_bytes / 1024 / 1024
+
+ "Current RSS: ~#{rss_in_megabytes.round}M"
+ end
+ end
+ end
+end
+
+RSpec.configure do |config|
+ config.add_formatter Support::RSpecRunTime::RSpecFormatter if ENV['GITLAB_CI']
+end
diff --git a/spec/support/shared_contexts/ci/catalog/resources/version_shared_context.rb b/spec/support/shared_contexts/ci/catalog/resources/version_shared_context.rb
index 3c9bb980b46..3eeaa52d221 100644
--- a/spec/support/shared_contexts/ci/catalog/resources/version_shared_context.rb
+++ b/spec/support/shared_contexts/ci/catalog/resources/version_shared_context.rb
@@ -3,14 +3,14 @@
RSpec.shared_context 'when there are catalog resources with versions' do
let_it_be(:current_user) { create(:user) }
- let_it_be(:project1) { create(:project, :repository) }
- let_it_be(:project2) { create(:project, :repository) }
- let_it_be(:project3) { create(:project, :repository) }
+ let_it_be(:project1) { create(:project, :repository, name: 'A') }
+ let_it_be(:project2) { create(:project, :repository, name: 'Z') }
+ let_it_be(:project3) { create(:project, :repository, name: 'L', description: 'Z') }
let_it_be_with_reload(:resource1) { create(:ci_catalog_resource, project: project1) }
let_it_be_with_reload(:resource2) { create(:ci_catalog_resource, project: project2) }
let_it_be(:resource3) { create(:ci_catalog_resource, project: project3) }
- let_it_be(:release_v1_0) { create(:release, project: project1, tag: 'v1.0', released_at: 4.days.ago) }
+ let_it_be_with_reload(:release_v1_0) { create(:release, project: project1, tag: 'v1.0', released_at: 4.days.ago) }
let_it_be(:release_v1_1) { create(:release, project: project1, tag: 'v1.1', released_at: 3.days.ago) }
let_it_be(:release_v2_0) { create(:release, project: project2, tag: 'v2.0', released_at: 2.days.ago) }
let_it_be(:release_v2_1) { create(:release, project: project2, tag: 'v2.1', released_at: 1.day.ago) }
diff --git a/spec/support/shared_contexts/features/integrations/project_integrations_jira_context.rb b/spec/support/shared_contexts/features/integrations/project_integrations_jira_context.rb
index fadd46a7e12..c8552e02d0c 100644
--- a/spec/support/shared_contexts/features/integrations/project_integrations_jira_context.rb
+++ b/spec/support/shared_contexts/features/integrations/project_integrations_jira_context.rb
@@ -7,8 +7,8 @@ RSpec.shared_context 'project integration Jira context' do
def fill_form(disable: false)
click_active_checkbox if disable
- fill_in 'service_url', with: url
- fill_in 'service_username', with: 'username'
- fill_in 'service_password', with: 'password'
+ fill_in 'service-url', with: url
+ fill_in 'service-username', with: 'username'
+ fill_in 'service-password', with: 'password'
end
end
diff --git a/spec/support/shared_contexts/finders/issues_finder_shared_contexts.rb b/spec/support/shared_contexts/finders/issues_finder_shared_contexts.rb
index 16d23f63fd0..2ad2fcec39e 100644
--- a/spec/support/shared_contexts/finders/issues_finder_shared_contexts.rb
+++ b/spec/support/shared_contexts/finders/issues_finder_shared_contexts.rb
@@ -63,6 +63,11 @@ RSpec.shared_context 'IssuesFinder context' do
)
end
+ let_it_be(:group_level_item) { create(:issue, :epic, :group_level, namespace: group, author: user) }
+ let_it_be(:group_level_confidential_item) do
+ create(:issue, :confidential, :epic, :group_level, namespace: group, author: user2)
+ end
+
let_it_be(:award_emoji1) { create(:award_emoji, name: 'thumbsup', user: user, awardable: item1) }
let_it_be(:award_emoji2) { create(:award_emoji, name: 'thumbsup', user: user2, awardable: item2) }
let_it_be(:award_emoji3) { create(:award_emoji, name: 'thumbsdown', user: user, awardable: item3) }
diff --git a/spec/support/shared_contexts/finders/work_items_finder_shared_contexts.rb b/spec/support/shared_contexts/finders/work_items_finder_shared_contexts.rb
index 1118039d164..39e2819235b 100644
--- a/spec/support/shared_contexts/finders/work_items_finder_shared_contexts.rb
+++ b/spec/support/shared_contexts/finders/work_items_finder_shared_contexts.rb
@@ -63,6 +63,25 @@ RSpec.shared_context 'WorkItemsFinder context' do
)
end
+ let_it_be(:group_level_item) do
+ create(
+ :work_item,
+ :epic,
+ namespace: group,
+ author: user
+ )
+ end
+
+ let_it_be(:group_level_confidential_item) do
+ create(
+ :work_item,
+ :confidential,
+ :epic,
+ namespace: group,
+ author: user2
+ )
+ end
+
let_it_be(:award_emoji1) { create(:award_emoji, name: 'thumbsup', user: user, awardable: item1) }
let_it_be(:award_emoji2) { create(:award_emoji, name: 'thumbsup', user: user2, awardable: item2) }
let_it_be(:award_emoji3) { create(:award_emoji, name: 'thumbsdown', user: user, awardable: item3) }
diff --git a/spec/support/shared_contexts/graphql/types/query_type_shared_context.rb b/spec/support/shared_contexts/graphql/types/query_type_shared_context.rb
index 257ccc553fe..6ab41d87f44 100644
--- a/spec/support/shared_contexts/graphql/types/query_type_shared_context.rb
+++ b/spec/support/shared_contexts/graphql/types/query_type_shared_context.rb
@@ -43,6 +43,7 @@ RSpec.shared_context 'with FOSS query type fields' do
:user,
:users,
:work_item,
+ :work_items_by_reference,
:audit_event_definitions,
:abuse_report,
:abuse_report_labels
diff --git a/spec/support/shared_contexts/merge_request_edit_shared_context.rb b/spec/support/shared_contexts/merge_request_edit_shared_context.rb
index f0e89b0c5f9..cceaa14b3d2 100644
--- a/spec/support/shared_contexts/merge_request_edit_shared_context.rb
+++ b/spec/support/shared_contexts/merge_request_edit_shared_context.rb
@@ -5,7 +5,7 @@ RSpec.shared_context 'merge request edit context' do
let(:user2) { create(:user) }
let!(:milestone) { create(:milestone, project: target_project) }
let!(:label) { create(:label, project: target_project) }
- let!(:label2) { create(:label, project: target_project) }
+ let!(:label2) { create(:label, project: target_project, lock_on_merge: true) }
let(:target_project) { create(:project, :public, :repository) }
let(:source_project) { target_project }
let(:merge_request) do
diff --git a/spec/support/shared_contexts/navbar_structure_context.rb b/spec/support/shared_contexts/navbar_structure_context.rb
index a5ccce27aa5..2adb9e410a7 100644
--- a/spec/support/shared_contexts/navbar_structure_context.rb
+++ b/spec/support/shared_contexts/navbar_structure_context.rb
@@ -236,29 +236,6 @@ RSpec.shared_context '"Explore" navbar structure' do
nav_sub_items: []
},
{
- nav_item: _("Topics"),
- nav_sub_items: []
- },
- {
- nav_item: _("Snippets"),
- nav_sub_items: []
- }
- ]
- end
-end
-
-RSpec.shared_context '"Explore" navbar structure with global_ci_catalog FF' do
- let(:structure) do
- [
- {
- nav_item: _("Projects"),
- nav_sub_items: []
- },
- {
- nav_item: _("Groups"),
- nav_sub_items: []
- },
- {
nav_item: _("CI/CD Catalog"),
nav_sub_items: []
},
diff --git a/spec/support/shared_contexts/policies/group_policy_shared_context.rb b/spec/support/shared_contexts/policies/group_policy_shared_context.rb
index 4564fa23236..c6ea665a160 100644
--- a/spec/support/shared_contexts/policies/group_policy_shared_context.rb
+++ b/spec/support/shared_contexts/policies/group_policy_shared_context.rb
@@ -12,14 +12,14 @@ RSpec.shared_context 'GroupPolicy context' do
let(:public_permissions) do
%i[
- read_group read_counts read_issue
+ read_group read_counts read_issue read_namespace
read_label read_issue_board_list read_milestone read_issue_board
]
end
let(:guest_permissions) do
%i[
- read_label read_group upload_file read_namespace read_namespace_via_membership read_group_activity
+ read_label read_group upload_file read_namespace_via_membership read_group_activity
read_group_issues read_group_boards read_group_labels read_group_milestones
read_group_merge_requests
]
diff --git a/spec/support/shared_contexts/policies/project_policy_table_shared_context.rb b/spec/support/shared_contexts/policies/project_policy_table_shared_context.rb
index 11f6d816fc1..04bbbcb1c36 100644
--- a/spec/support/shared_contexts/policies/project_policy_table_shared_context.rb
+++ b/spec/support/shared_contexts/policies/project_policy_table_shared_context.rb
@@ -83,16 +83,16 @@ RSpec.shared_context 'ProjectPolicyTable context' do
:public | :anonymous | nil | 1
:internal | :admin | true | 1
- :internal | :admin | false | 0
- :internal | :reporter | nil | 0
- :internal | :guest | nil | 0
- :internal | :non_member | nil | 0
+ :internal | :admin | false | 1
+ :internal | :reporter | nil | 1
+ :internal | :guest | nil | 1
+ :internal | :non_member | nil | 1
:internal | :anonymous | nil | 0
:private | :admin | true | 1
:private | :admin | false | 0
- :private | :reporter | nil | 0
- :private | :guest | nil | 0
+ :private | :reporter | nil | 1
+ :private | :guest | nil | 1
:private | :non_member | nil | 0
:private | :anonymous | nil | 0
end
diff --git a/spec/support/shared_contexts/requests/api/graphql/work_items/work_item_types_shared_context.rb b/spec/support/shared_contexts/requests/api/graphql/work_items/work_item_types_shared_context.rb
new file mode 100644
index 00000000000..3a062d77b4f
--- /dev/null
+++ b/spec/support/shared_contexts/requests/api/graphql/work_items/work_item_types_shared_context.rb
@@ -0,0 +1,64 @@
+# frozen_string_literal: true
+
+RSpec.shared_context 'with work item types request context' do
+ let(:work_item_type_fields) do
+ <<~GRAPHQL
+ id
+ name
+ iconName
+ widgetDefinitions {
+ type
+ ... on WorkItemWidgetDefinitionAssignees {
+ canInviteMembers
+ }
+ ... on WorkItemWidgetDefinitionHierarchy {
+ allowedChildTypes {
+ nodes { id name }
+ }
+ }
+ }
+ GRAPHQL
+ end
+
+ # This is necessary so we can overwrite attributes in EE
+ let(:widget_attributes) { base_widget_attributes }
+ let(:base_widget_attributes) do
+ {
+ assignees: {
+ 'canInviteMembers' => false
+ }
+ }
+ end
+
+ def expected_work_item_type_response(work_item_type = nil)
+ base_scope = WorkItems::Type.default
+ base_scope = base_scope.id_in(work_item_type.id) if work_item_type
+
+ base_scope.map do |type|
+ hash_including(
+ 'id' => type.to_global_id.to_s,
+ 'name' => type.name,
+ 'iconName' => type.icon_name,
+ 'widgetDefinitions' => match_array(widgets_for(type))
+ )
+ end
+ end
+
+ def widgets_for(work_item_type)
+ work_item_type.widgets.map do |widget|
+ base_attributes = { 'type' => widget.type.to_s.upcase }
+ next hierarchy_widget_attributes(work_item_type, base_attributes) if widget == WorkItems::Widgets::Hierarchy
+ next base_attributes unless widget_attributes[widget.type]
+
+ base_attributes.merge(widget_attributes[widget.type])
+ end
+ end
+
+ def hierarchy_widget_attributes(work_item_type, base_attributes)
+ fields = work_item_type.allowed_child_types_by_name.map do |child_type|
+ { "id" => child_type.to_global_id.to_s, "name" => child_type.name }
+ end
+
+ base_attributes.merge({ 'allowedChildTypes' => { 'nodes' => fields } })
+ end
+end
diff --git a/spec/support/shared_contexts/requests/api/terraform_modules_shared_context.rb b/spec/support/shared_contexts/requests/api/terraform_modules_shared_context.rb
new file mode 100644
index 00000000000..359664ee4f3
--- /dev/null
+++ b/spec/support/shared_contexts/requests/api/terraform_modules_shared_context.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+RSpec.shared_context 'for terraform modules api setup' do
+ include PackagesManagerApiSpecHelpers
+ include WorkhorseHelpers
+
+ let_it_be_with_reload(:group) { create(:group) }
+ let_it_be_with_reload(:project) { create(:project, namespace: group) }
+ let_it_be(:package) { create(:terraform_module_package, project: project) }
+ let_it_be(:personal_access_token) { create(:personal_access_token) }
+ let_it_be(:user) { personal_access_token.user }
+ let_it_be(:job) { create(:ci_build, :running, user: user, project: project) }
+ let_it_be(:deploy_token) { create(:deploy_token, read_package_registry: true, write_package_registry: true) }
+ let_it_be(:project_deploy_token) { create(:project_deploy_token, deploy_token: deploy_token, project: project) }
+
+ let(:headers) { {} }
+ let(:token) { tokens[token_type] }
+
+ let(:tokens) do
+ {
+ personal_access_token: personal_access_token.token,
+ deploy_token: deploy_token.token,
+ job_token: job.token,
+ invalid: 'invalid-token123'
+ }
+ end
+end
diff --git a/spec/support/shared_contexts/services/service_ping/stubbed_service_ping_metrics_definitions_shared_context.rb b/spec/support/shared_contexts/services/service_ping/stubbed_service_ping_metrics_definitions_shared_context.rb
index 24f0d22da47..b51c68f4958 100644
--- a/spec/support/shared_contexts/services/service_ping/stubbed_service_ping_metrics_definitions_shared_context.rb
+++ b/spec/support/shared_contexts/services/service_ping/stubbed_service_ping_metrics_definitions_shared_context.rb
@@ -50,7 +50,8 @@ RSpec.shared_context 'stubbed service ping metrics definitions' do
'value_type' => value_type,
'status' => status,
'instrumentation_class' => instrumentation_class,
- 'time_frame' => 'all'
+ 'time_frame' => 'all',
+ 'data_source' => 'redis_hll'
}
end
end
diff --git a/spec/support/shared_contexts/single_change_access_checks_shared_context.rb b/spec/support/shared_contexts/single_change_access_checks_shared_context.rb
index bf90c26047b..5945302824b 100644
--- a/spec/support/shared_contexts/single_change_access_checks_shared_context.rb
+++ b/spec/support/shared_contexts/single_change_access_checks_shared_context.rb
@@ -21,7 +21,7 @@ RSpec.shared_context 'change access checks context' do
)
end
- subject { described_class.new(change_access) }
+ subject(:change_check) { described_class.new(change_access) }
before do
project.add_developer(user)
diff --git a/spec/support/shared_examples/analytics/cycle_analytics/request_params_examples.rb b/spec/support/shared_examples/analytics/cycle_analytics/request_params_examples.rb
index cf539174587..ddf3b1d7d17 100644
--- a/spec/support/shared_examples/analytics/cycle_analytics/request_params_examples.rb
+++ b/spec/support/shared_examples/analytics/cycle_analytics/request_params_examples.rb
@@ -137,7 +137,9 @@ RSpec.shared_examples 'unlicensed cycle analytics request params' do
it 'disables all paid features' do
is_expected.to match(a_hash_including(enable_tasks_by_type_chart: 'false',
enable_customizable_stages: 'false',
- enable_projects_filter: 'false'))
+ enable_projects_filter: 'false',
+ enable_vsd_link: 'false'
+ ))
end
end
diff --git a/spec/support/shared_examples/ci/stuck_builds_shared_examples.rb b/spec/support/shared_examples/ci/stuck_builds_shared_examples.rb
index dd0a57c6b6d..b86b70de4f1 100644
--- a/spec/support/shared_examples/ci/stuck_builds_shared_examples.rb
+++ b/spec/support/shared_examples/ci/stuck_builds_shared_examples.rb
@@ -11,7 +11,7 @@ RSpec.shared_examples 'job is dropped with failure reason' do |failure_reason|
context 'when job has data integrity problem' do
it 'drops the job and logs the reason' do
- job.update_columns(yaml_variables: '[{"key" => "value"}]')
+ allow(::Gitlab::Ci::Build::Status::Reason).to receive(:fabricate).and_raise(StandardError.new)
expect(Gitlab::ErrorTracking)
.to receive(:track_exception)
diff --git a/spec/support/shared_examples/config/metrics/every_metric_definition_shared_examples.rb b/spec/support/shared_examples/config/metrics/every_metric_definition_shared_examples.rb
index 53d80c64827..bdc2bac0573 100644
--- a/spec/support/shared_examples/config/metrics/every_metric_definition_shared_examples.rb
+++ b/spec/support/shared_examples/config/metrics/every_metric_definition_shared_examples.rb
@@ -120,29 +120,15 @@ RSpec.shared_examples 'every metric definition' do
end
let(:ignored_classes) do
- [
- Gitlab::Usage::Metrics::Instrumentations::IssuesWithAlertManagementAlertsMetric,
- Gitlab::Usage::Metrics::Instrumentations::IssuesWithPrometheusAlertEvents
- ].freeze
- end
-
- let(:removed_classes) do
- [
- Gitlab::Usage::Metrics::Instrumentations::InProductMarketingEmailCtaClickedMetric,
- Gitlab::Usage::Metrics::Instrumentations::InProductMarketingEmailSentMetric
- ].freeze
- end
-
- def metric_not_used?(constant)
- parent_metric_classes.include?(constant) ||
- ignored_classes.include?(constant) ||
- removed_classes.include?(constant)
+ Gitlab::Usage::Metrics::Instrumentations::IssuesCreatedFromAlertsMetric::ISSUES_FROM_ALERTS_METRICS +
+ Gitlab::Usage::Metrics::Instrumentations::UniqueUsersAllImportsMetric::IMPORTS_METRICS
end
def assert_uses_all_nested_classes(parent_module)
parent_module.constants(false).each do |const_name|
constant = parent_module.const_get(const_name, false)
- next if metric_not_used?(constant)
+ next if parent_metric_classes.include?(constant) ||
+ ignored_classes.include?(constant)
case constant
when Class
diff --git a/spec/support/shared_examples/controllers/base_action_controller_shared_examples.rb b/spec/support/shared_examples/controllers/base_action_controller_shared_examples.rb
new file mode 100644
index 00000000000..2eab533ef7f
--- /dev/null
+++ b/spec/support/shared_examples/controllers/base_action_controller_shared_examples.rb
@@ -0,0 +1,80 @@
+# frozen_string_literal: true
+
+# Requires `request` subject to be defined
+#
+# subject(:request) { get root_path }
+RSpec.shared_examples 'Base action controller' do
+ describe 'security headers' do
+ describe 'Cross-Security-Policy' do
+ context 'when configuring snowplow' do
+ let(:snowplow_host) { 'snowplow.example.com' }
+
+ shared_examples 'snowplow is not in the CSP' do
+ it 'does not add the snowplow collector hostname to the CSP' do
+ request
+
+ expect(response.headers['Content-Security-Policy']).not_to include(snowplow_host)
+ end
+ end
+
+ context 'when snowplow is enabled' do
+ before do
+ stub_application_setting(snowplow_enabled: true, snowplow_collector_hostname: snowplow_host)
+ end
+
+ it 'adds snowplow to the csp' do
+ request
+
+ expect(response.headers['Content-Security-Policy']).to include(snowplow_host)
+ end
+ end
+
+ context 'when snowplow is enabled but host is not configured' do
+ before do
+ stub_application_setting(snowplow_enabled: true, snowplow_collector_hostname: nil)
+ end
+
+ it_behaves_like 'snowplow is not in the CSP'
+ end
+
+ context 'when snowplow is disabled' do
+ before do
+ stub_application_setting(snowplow_enabled: false, snowplow_collector_hostname: snowplow_host)
+ end
+
+ it_behaves_like 'snowplow is not in the CSP'
+ end
+ end
+
+ context 'when configuring vite' do
+ let(:vite_origin) { "#{ViteRuby.instance.config.host}:#{ViteRuby.instance.config.port}" }
+
+ context 'when vite enabled during development',
+ skip: 'https://gitlab.com/gitlab-org/gitlab/-/issues/424334' do
+ before do
+ stub_rails_env('development')
+ stub_feature_flags(vite: true)
+ end
+
+ it 'adds vite csp' do
+ request
+
+ expect(response.headers['Content-Security-Policy']).to include(vite_origin)
+ end
+ end
+
+ context 'when vite disabled' do
+ before do
+ stub_feature_flags(vite: false)
+ end
+
+ it "doesn't add vite csp" do
+ request
+
+ expect(response.headers['Content-Security-Policy']).not_to include(vite_origin)
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/support/shared_examples/controllers/concerns/onboarding/redirectable_shared_examples.rb b/spec/support/shared_examples/controllers/concerns/onboarding/redirectable_shared_examples.rb
index b448ea16128..efb05709924 100644
--- a/spec/support/shared_examples/controllers/concerns/onboarding/redirectable_shared_examples.rb
+++ b/spec/support/shared_examples/controllers/concerns/onboarding/redirectable_shared_examples.rb
@@ -6,20 +6,20 @@ RSpec.shared_examples Onboarding::Redirectable do
context 'when the new user already has any accepted group membership' do
let!(:single_member) { create(:group_member, invite_email: email) }
- it 'redirects to activity group path with a flash message' do
+ it 'redirects to the group path with a flash message' do
post_create
- expect(response).to redirect_to activity_group_path(single_member.source)
+ expect(response).to redirect_to group_path(single_member.source)
expect(controller).to set_flash[:notice].to(/You have been granted/)
end
context 'when the new user already has more than 1 accepted group membership' do
let!(:last_member) { create(:group_member, invite_email: email) }
- it 'redirects to the last member activity group path without a flash message' do
+ it 'redirects to the last member group path without a flash message' do
post_create
- expect(response).to redirect_to activity_group_path(last_member.source)
+ expect(response).to redirect_to group_path(last_member.source)
expect(controller).not_to set_flash[:notice].to(/You have been granted/)
end
end
diff --git a/spec/support/shared_examples/controllers/unique_visits_shared_examples.rb b/spec/support/shared_examples/controllers/unique_visits_shared_examples.rb
index 7f33ece854b..10d80dd5219 100644
--- a/spec/support/shared_examples/controllers/unique_visits_shared_examples.rb
+++ b/spec/support/shared_examples/controllers/unique_visits_shared_examples.rb
@@ -13,6 +13,9 @@ RSpec.shared_examples 'tracking unique visits' do |method|
.to receive(:track_event).with(id, values: anything)
end
+ # allow other method calls in addition to the expected one
+ allow(Gitlab::UsageDataCounters::HLLRedisCounter).to receive(:track_event)
+
get method, params: request_params, format: :html
end
@@ -24,6 +27,9 @@ RSpec.shared_examples 'tracking unique visits' do |method|
.to receive(:track_event).with(id, values: anything)
end
+ # allow other method calls in addition to the expected one
+ allow(Gitlab::UsageDataCounters::HLLRedisCounter).to receive(:track_event)
+
stub_do_not_track('0')
get method, params: request_params, format: :html
diff --git a/spec/support/shared_examples/features/content_editor_shared_examples.rb b/spec/support/shared_examples/features/content_editor_shared_examples.rb
index 6bfb60c3f34..d0b2d0c9cae 100644
--- a/spec/support/shared_examples/features/content_editor_shared_examples.rb
+++ b/spec/support/shared_examples/features/content_editor_shared_examples.rb
@@ -449,6 +449,9 @@ RSpec.shared_examples 'edits content using the content editor' do |params = {
before do
switch_to_content_editor
+ type_in_content_editor [modifier_key, 'a']
+ type_in_content_editor :delete
+
type_in_content_editor "Some **rich** _text_ ~~content~~ [link](https://gitlab.com)"
type_in_content_editor [modifier_key, 'a']
@@ -488,6 +491,26 @@ RSpec.shared_examples 'edits content using the content editor' do |params = {
end
end
+ it 'pastes raw markdown with formatting when pasting inside a markdown code block' do
+ type_in_content_editor '```md'
+ type_in_content_editor :enter
+ type_in_content_editor [modifier_key, 'v']
+
+ page.within content_editor_testid do
+ expect(page).to have_selector('pre', text: 'Some **rich** _text_ ~~content~~ [link](https://gitlab.com)')
+ end
+ end
+
+ it 'pastes raw markdown without formatting when pasting inside a plaintext code block' do
+ type_in_content_editor '```'
+ type_in_content_editor :enter
+ type_in_content_editor [modifier_key, 'v']
+
+ page.within content_editor_testid do
+ expect(page).to have_selector('pre', text: 'Some rich text content link')
+ end
+ end
+
it 'pastes raw text without formatting, stripping whitespaces, if shift + ctrl + v is pressed' do
type_in_content_editor " Some **rich**"
type_in_content_editor :enter
@@ -570,7 +593,7 @@ RSpec.shared_examples 'edits content using the content editor' do |params = {
type_in_content_editor '/assign'
expect(find(suggestions_dropdown)).to have_text('/assign')
- send_keys [:arrow_down, :enter]
+ send_keys :enter
expect(page).to have_text('/assign @')
end
@@ -579,7 +602,7 @@ RSpec.shared_examples 'edits content using the content editor' do |params = {
type_in_content_editor '/label'
expect(find(suggestions_dropdown)).to have_text('/label')
- send_keys [:arrow_down, :enter]
+ send_keys :enter
expect(page).to have_text('/label ~')
end
@@ -588,10 +611,23 @@ RSpec.shared_examples 'edits content using the content editor' do |params = {
type_in_content_editor '/milestone'
expect(find(suggestions_dropdown)).to have_text('/milestone')
- send_keys [:arrow_down, :enter]
+ send_keys :enter
expect(page).to have_text('/milestone %')
end
+
+ it 'scrolls selected item into view when navigating with keyboard' do
+ type_in_content_editor '/'
+
+ expect(find(suggestions_dropdown)).to have_text('label')
+
+ expect(dropdown_scroll_top).to be 0
+
+ send_keys :arrow_up
+
+ expect(dropdown_scroll_top).to be > 100
+ end
+
end
it 'shows suggestions for members with descriptions' do
@@ -603,7 +639,18 @@ RSpec.shared_examples 'edits content using the content editor' do |params = {
type_in_content_editor 'bc'
- send_keys [:arrow_down, :enter]
+ send_keys :enter
+
+ expect(page).not_to have_css(suggestions_dropdown)
+ expect(page).to have_text('@abc123')
+ end
+
+ it 'allows selecting element with tab key' do
+ type_in_content_editor '@abc'
+
+ expect(find(suggestions_dropdown)).to have_text('abc123')
+
+ send_keys :tab
expect(page).not_to have_css(suggestions_dropdown)
expect(page).to have_text('@abc123')
@@ -701,11 +748,11 @@ RSpec.shared_examples 'edits content using the content editor' do |params = {
expect(find(suggestions_dropdown)).to have_text('😃 smiley')
expect(find(suggestions_dropdown)).to have_text('😸 smile_cat')
- send_keys [:arrow_down, :enter]
+ send_keys :enter
expect(page).not_to have_css(suggestions_dropdown)
- expect(page).to have_text('😃')
+ expect(page).to have_text('😄')
end
it 'doesn\'t show suggestions dropdown if there are no suggestions to show' do
@@ -718,18 +765,6 @@ RSpec.shared_examples 'edits content using the content editor' do |params = {
expect(page).not_to have_css(suggestions_dropdown)
end
- it 'scrolls selected item into view when navigating with keyboard' do
- type_in_content_editor ':'
-
- expect(find(suggestions_dropdown)).to have_text('grinning')
-
- expect(dropdown_scroll_top).to be 0
-
- send_keys :arrow_up
-
- expect(dropdown_scroll_top).to be > 100
- end
-
def dropdown_scroll_top
evaluate_script("document.querySelector('#{suggestions_dropdown}').scrollTop")
end
diff --git a/spec/support/shared_examples/features/editable_merge_request_shared_examples.rb b/spec/support/shared_examples/features/editable_merge_request_shared_examples.rb
index 9f884683f47..1bee8184e61 100644
--- a/spec/support/shared_examples/features/editable_merge_request_shared_examples.rb
+++ b/spec/support/shared_examples/features/editable_merge_request_shared_examples.rb
@@ -54,6 +54,8 @@ RSpec.shared_examples 'an editable merge request' do
page.within '.labels' do
expect(page).to have_content label.title
expect(page).to have_content label2.title
+
+ expect(page).to have_selector("[data-testid='close-icon']", count: 1)
end
end
end
diff --git a/spec/support/shared_examples/features/integrations/user_activates_mattermost_slash_command_integration_shared_examples.rb b/spec/support/shared_examples/features/integrations/user_activates_mattermost_slash_command_integration_shared_examples.rb
index 148ff2cfb54..5fdd91ebd2d 100644
--- a/spec/support/shared_examples/features/integrations/user_activates_mattermost_slash_command_integration_shared_examples.rb
+++ b/spec/support/shared_examples/features/integrations/user_activates_mattermost_slash_command_integration_shared_examples.rb
@@ -6,7 +6,7 @@ RSpec.shared_examples 'user activates the Mattermost Slash Command integration'
end
it 'shows a token placeholder' do
- token_placeholder = find_field('service_token')['placeholder']
+ token_placeholder = find_field('service-token')['placeholder']
expect(token_placeholder).to eq('')
end
@@ -14,7 +14,7 @@ RSpec.shared_examples 'user activates the Mattermost Slash Command integration'
it 'redirects to the integrations page after saving but not activating' do
token = ('a'..'z').to_a.join
- fill_in 'service_token', with: token
+ fill_in 'service-token', with: token
click_active_checkbox
click_save_integration
@@ -25,7 +25,7 @@ RSpec.shared_examples 'user activates the Mattermost Slash Command integration'
it 'redirects to the integrations page after activating' do
token = ('a'..'z').to_a.join
- fill_in 'service_token', with: token
+ fill_in 'service-token', with: token
click_save_integration
expect(page).to have_current_path(edit_path, ignore_query: true)
diff --git a/spec/support/shared_examples/features/inviting_members_shared_examples.rb b/spec/support/shared_examples/features/inviting_members_shared_examples.rb
index 178f85cb85b..b479d71b274 100644
--- a/spec/support/shared_examples/features/inviting_members_shared_examples.rb
+++ b/spec/support/shared_examples/features/inviting_members_shared_examples.rb
@@ -181,6 +181,7 @@ RSpec.shared_examples 'inviting members' do |snowplow_invite_label|
visit subentity_members_page_path
click_on _('Invite members')
+ wait_for_requests
page.within invite_modal_selector do
choose_options(role, nil)
diff --git a/spec/support/shared_examples/features/runners_shared_examples.rb b/spec/support/shared_examples/features/runners_shared_examples.rb
index 861c205337a..8e1e2b10bef 100644
--- a/spec/support/shared_examples/features/runners_shared_examples.rb
+++ b/spec/support/shared_examples/features/runners_shared_examples.rb
@@ -4,67 +4,41 @@ RSpec.shared_examples 'shows and resets runner registration token' do
include Features::RunnersHelpers
include Spec::Support::Helpers::ModalHelpers
- before do
+ it 'shows registration instructions' do
click_on dropdown_text
- end
-
- describe 'shows registration instructions' do
- before do
- click_on 'Show runner installation and registration instructions'
-
- wait_for_requests
- end
-
- it 'opens runner installation modal', :aggregate_failures do
- within_modal do
- expect(page).to have_text "Install a runner"
- expect(page).to have_text "Environment"
- expect(page).to have_text "Architecture"
- expect(page).to have_text "Download and install binary"
- end
- end
-
- it 'dismisses runner installation modal' do
- within_modal do
- click_button('Close', match: :first)
- end
+ click_on 'Show runner installation and registration instructions'
- expect(page).not_to have_text "Install a runner"
+ within_modal do
+ expect(page).to have_text "Install a runner"
+ expect(page).to have_text "Environment"
+ expect(page).to have_text "Architecture"
+ expect(page).to have_text "Download and install binary"
end
end
- it 'has a registration token' do
+ it 'resets current registration token' do
+ click_on dropdown_text
click_on 'Click to reveal'
- expect(page.find_field('token-value').value).to have_content(registration_token)
- end
- describe 'reset registration token' do
- let!(:old_registration_token) { find_field('token-value').value }
+ # current registration token
+ expect(page.find_field('token-value').value).to eq(registration_token)
- before do
- click_on 'Reset registration token'
-
- within_modal do
- click_button('Reset token', match: :first)
- end
-
- wait_for_requests
+ # reset registration token
+ click_on 'Reset registration token'
+ within_modal do
+ click_button('Reset token', match: :first)
end
- it 'changes registration token' do
- expect(find('.gl-toast')).to have_content('New registration token generated!')
-
- click_on dropdown_text
- click_on 'Click to reveal'
-
- expect(find_field('token-value').value).not_to eq old_registration_token
- end
+ # new registration token
+ click_on dropdown_text
+ expect(find_field('token-value').value).not_to eq(registration_token)
+ expect(find('.gl-toast')).to have_content('New registration token generated!')
end
end
RSpec.shared_examples 'shows no runners registered' do
it 'shows 0 count and the empty state' do
- expect(find('[data-testid="runner-type-tabs"]')).to have_text "#{s_('Runners|All')} 0"
+ expect(find('[data-testid="runner-type-tabs"]')).to have_text "All 0"
# No stats are shown
expect(page).not_to have_text s_('Runners|Online')
@@ -82,22 +56,19 @@ RSpec.shared_examples 'shows no runners found' do
end
end
-RSpec.shared_examples 'shows runner in list' do
- it 'shows runner row and no empty state' do
+RSpec.shared_examples 'shows runner summary and navigates to details' do
+ it 'shows runner summary and details' do
+ expect(page).not_to have_content s_('Runners|Get started with runners')
+
+ # see runner summary in list
within_runner_row(runner.id) do
expect(page).to have_text "##{runner.id}"
expect(page).to have_text runner.short_sha
expect(page).to have_text runner.description
end
- expect(page).not_to have_content s_('Runners|Get started with runners')
- end
-end
-
-RSpec.shared_examples 'shows runner details from list' do
- it 'shows runner details page' do
+ # navigate to see runner details
click_link("##{runner.id} (#{runner.short_sha})")
-
expect(current_url).to include(runner_page_path)
expect(page).to have_selector 'h1', text: "##{runner.id} (#{runner.short_sha})"
@@ -131,12 +102,10 @@ RSpec.shared_examples 'pauses, resumes and deletes a runner' do
end
end
- it 'shows a confirmation modal' do
+ it 'confirms runner deletion' do
expect(page).to have_text "Delete runner ##{runner.id} (#{runner.short_sha})?"
expect(page).to have_text "Are you sure you want to continue?"
- end
- it 'deletes a runner' do
within_modal do
click_on 'Permanently delete runner'
end
@@ -150,8 +119,6 @@ RSpec.shared_examples 'pauses, resumes and deletes a runner' do
click_on 'Cancel'
end
- wait_for_requests
-
expect(page).to have_content runner.description
end
end
@@ -166,8 +133,6 @@ RSpec.shared_examples 'deletes runners in bulk' do
within_modal do
click_on "Permanently delete #{runner_count} runners"
end
-
- wait_for_requests
end
it_behaves_like 'shows no runners registered'
@@ -187,17 +152,11 @@ RSpec.shared_examples 'filters by tag' do
end
RSpec.shared_examples 'shows runner jobs tab' do
- context 'when clicking on jobs tab' do
- before do
- click_on("#{s_('Runners|Jobs')} #{job_count}")
+ it 'show jobs in tab' do
+ click_on("#{s_('Runners|Jobs')} #{job_count}")
- wait_for_requests
- end
-
- it 'shows job in list' do
- within "[data-testid='job-row-#{job.id}']" do
- expect(page).to have_link("##{job.id}")
- end
+ within "[data-testid='job-row-#{job.id}']" do
+ expect(page).to have_link("##{job.id}")
end
end
end
@@ -217,29 +176,17 @@ RSpec.shared_examples 'submits edit runner form' do
end
end
- describe 'runner header', :js do
- it 'contains the runner id' do
- expect(page).to have_content("##{runner.id} (#{runner.short_sha})")
- end
- end
-
context 'when a runner is updated', :js do
before do
- fill_in s_('Runners|Runner description'), with: 'new-runner-description'
+ fill_in s_('Runners|Runner description'), with: 'new-runner-description', fill_options: { clear: :backspace }
click_on _('Save changes')
- wait_for_requests
end
- it 'redirects to runner page' do
+ it 'redirects to runner page and shows succesful update' do
expect(current_url).to match(runner_page_path)
- end
- it 'show success alert' do
expect(page.find('[data-testid="alert-success"]')).to have_content('saved')
- end
-
- it 'shows updated information' do
expect(page).to have_content("#{s_('Runners|Description')} new-runner-description")
end
end
@@ -251,7 +198,6 @@ RSpec.shared_examples 'creates runner and shows register page' do
fill_in s_('Runners|Runner description'), with: 'runner-foo'
fill_in s_('Runners|Tags'), with: 'tag1'
click_on s_('Runners|Create runner')
- wait_for_requests
end
it 'navigates to registration page and opens install instructions drawer' do
diff --git a/spec/support/shared_examples/features/variable_list_drawer_shared_examples.rb b/spec/support/shared_examples/features/variable_list_drawer_shared_examples.rb
index b438a23aafd..04e73cfeee7 100644
--- a/spec/support/shared_examples/features/variable_list_drawer_shared_examples.rb
+++ b/spec/support/shared_examples/features/variable_list_drawer_shared_examples.rb
@@ -140,7 +140,10 @@ RSpec.shared_examples 'variable list drawer' do
toggle_masked
fill_variable('EMPTY_MASK_KEY', '???')
- expect(page).to have_content('This variable value does not meet the masking requirements.')
+ # rubocop:disable Layout/LineLength -- error message is too long
+ expect(page).to have_content('This value cannot be masked because it contains the following characters: ?. The value must have at least 8 characters.')
+ # rubocop:enable Layout/LineLength
+
page.within('[data-testid="ci-variable-drawer"]') do
expect(find_button('Add variable', disabled: true)).to be_present
end
diff --git a/spec/support/shared_examples/features/wiki/user_deletes_wiki_page_shared_examples.rb b/spec/support/shared_examples/features/wiki/user_deletes_wiki_page_shared_examples.rb
index 55c89977a99..9e6002f2958 100644
--- a/spec/support/shared_examples/features/wiki/user_deletes_wiki_page_shared_examples.rb
+++ b/spec/support/shared_examples/features/wiki/user_deletes_wiki_page_shared_examples.rb
@@ -22,7 +22,7 @@ RSpec.shared_examples 'User deletes wiki page' do
specify 'deletes a page', :js do
click_on('Edit')
click_on('Delete')
- find('[data-testid="confirm_deletion_button"]').click
+ find('[data-testid="confirm-deletion-button"]').click
expect(page).to have_content('Wiki page was successfully deleted.')
end
diff --git a/spec/support/shared_examples/features/wiki/user_updates_wiki_page_shared_examples.rb b/spec/support/shared_examples/features/wiki/user_updates_wiki_page_shared_examples.rb
index a48ff8a5f77..09444084eb9 100644
--- a/spec/support/shared_examples/features/wiki/user_updates_wiki_page_shared_examples.rb
+++ b/spec/support/shared_examples/features/wiki/user_updates_wiki_page_shared_examples.rb
@@ -25,7 +25,7 @@ RSpec.shared_examples 'User updates wiki page' do
end
it 'redirects back to the home edit page' do
- page.within(:css, '.wiki-form .form-actions') do
+ page.within(:css, '[data-testid="wiki-form-actions"]') do
click_on('Cancel')
end
@@ -128,7 +128,7 @@ RSpec.shared_examples 'User updates wiki page' do
end
it 'cancels editing of a page' do
- page.within(:css, '.wiki-form .form-actions') do
+ page.within(:css, '[data-testid="wiki-form-actions"]') do
click_on('Cancel')
end
diff --git a/spec/support/shared_examples/features/wiki/user_views_wiki_page_shared_examples.rb b/spec/support/shared_examples/features/wiki/user_views_wiki_page_shared_examples.rb
index 3fac7e7093c..c6454f62f34 100644
--- a/spec/support/shared_examples/features/wiki/user_views_wiki_page_shared_examples.rb
+++ b/spec/support/shared_examples/features/wiki/user_views_wiki_page_shared_examples.rb
@@ -16,6 +16,10 @@ RSpec.shared_examples 'User views a wiki page' do
)
end
+ let(:more_actions_dropdown) do
+ find('[data-testid="wiki-more-dropdown"] button')
+ end
+
before do
sign_in(user)
end
@@ -38,10 +42,12 @@ RSpec.shared_examples 'User views a wiki page' do
expect(page).to have_content('Wiki page was successfully created.')
end
- it 'shows the history of a page that has a path' do
+ it 'shows the history of a page that has a path', :js do
expect(page).to have_current_path(%r{one/two/three-test})
first(:link, text: 'three').click
+
+ more_actions_dropdown.click
click_on('Page history')
expect(page).to have_current_path(%r{one/two/three-test})
@@ -69,6 +75,7 @@ RSpec.shared_examples 'User views a wiki page' do
expect(page).to have_content('Wiki page was successfully updated.')
+ more_actions_dropdown.click
click_on('Page history')
within('.wiki-page-header') do
@@ -119,11 +126,12 @@ RSpec.shared_examples 'User views a wiki page' do
wiki_page.update(message: 'updated home', content: 'updated [some link](other-page)') # rubocop:disable Rails/SaveBang
end
- it 'shows the page history' do
+ it 'shows the page history', :js do
visit(wiki_page_path(wiki, wiki_page))
expect(page).to have_selector('[data-testid="wiki-edit-button"]')
+ more_actions_dropdown.click
click_on('Page history')
expect(page).to have_content(user.name)
diff --git a/spec/support/shared_examples/features/work_items_shared_examples.rb b/spec/support/shared_examples/features/work_items_shared_examples.rb
index 30605c81312..3dfd7604914 100644
--- a/spec/support/shared_examples/features/work_items_shared_examples.rb
+++ b/spec/support/shared_examples/features/work_items_shared_examples.rb
@@ -495,30 +495,30 @@ end
RSpec.shared_examples 'work items parent' do |type|
let(:work_item_parent) { create(:work_item, type, project: project) }
- def set_parent(parent_dropdown, parent_text)
- parent_dropdown.click
-
+ def set_parent(parent_text)
find('[data-testid="listbox-search-input"] .gl-listbox-search-input',
visible: true).send_keys "\"#{parent_text}\""
wait_for_requests
- find('.gl-new-dropdown-item').click
- wait_for_requests
+ find('.gl-new-dropdown-item', text: parent_text).click
+ wait_for_all_requests
end
- let(:parent_dropdown_selector) { 'work-item-parent-listbox' }
-
it 'searches and sets or removes parent for the work item' do
+ find_by_testid('edit-parent').click
within_testid('work-item-parent-form') do
- set_parent(find_by_testid(parent_dropdown_selector), work_item_parent.title)
+ set_parent(work_item_parent.title)
+ end
- expect(find_by_testid(parent_dropdown_selector)).to have_text(work_item_parent.title)
+ expect(find_by_testid('work-item-parent-link')).to have_text(work_item_parent.title)
+ wait_for_requests
- find_by_testid(parent_dropdown_selector).click
+ page.refresh
+ find_by_testid('edit-parent').click
- click_button('Unassign')
+ click_button('Unassign')
+ wait_for_requests
- expect(find_by_testid(parent_dropdown_selector)).to have_text('None')
- end
+ expect(find_by_testid('work-item-parent-none')).to have_text('None')
end
end
diff --git a/spec/support/shared_examples/finders/issues_finder_shared_examples.rb b/spec/support/shared_examples/finders/issues_finder_shared_examples.rb
index 043d6db66d3..a5fee9c5fed 100644
--- a/spec/support/shared_examples/finders/issues_finder_shared_examples.rb
+++ b/spec/support/shared_examples/finders/issues_finder_shared_examples.rb
@@ -269,6 +269,34 @@ RSpec.shared_examples 'issues or work items finder' do |factory, execute_context
end
end
end
+
+ context 'when querying group-level items' do
+ let(:params) { { group_id: group.id, issue_types: %w[issue epic] } }
+
+ it 'includes group-level items' do
+ expect(items).to contain_exactly(item1, item5, group_level_item)
+ end
+
+ context 'when user has access to confidential items' do
+ before do
+ group.add_reporter(user)
+ end
+
+ it 'includes confidential group-level items' do
+ expect(items).to contain_exactly(item1, item5, group_level_item, group_level_confidential_item)
+ end
+ end
+
+ context 'when namespace_level_work_items is disabled' do
+ before do
+ stub_feature_flags(namespace_level_work_items: false)
+ end
+
+ it 'only returns project-level items' do
+ expect(items).to contain_exactly(item1, item5)
+ end
+ end
+ end
end
context 'filtering by author' do
diff --git a/spec/support/shared_examples/graphql/mutations/boards_list_create_shared_examples.rb b/spec/support/shared_examples/graphql/mutations/boards_list_create_shared_examples.rb
index 13d2447754c..eb58cb97a75 100644
--- a/spec/support/shared_examples/graphql/mutations/boards_list_create_shared_examples.rb
+++ b/spec/support/shared_examples/graphql/mutations/boards_list_create_shared_examples.rb
@@ -10,7 +10,7 @@ RSpec.shared_examples 'board lists create mutation' do
let(:mutation) { described_class.new(object: nil, context: { current_user: user }, field: nil) }
let(:list_create_params) { {} }
- subject { mutation.resolve(board_id: board.to_global_id.to_s, **list_create_params) }
+ subject { mutation.resolve(board_id: board.to_global_id, **list_create_params) }
describe '#ready?' do
it 'raises an error if required arguments are missing' do
diff --git a/spec/support/shared_examples/graphql/resolvers/users/pages_visits_resolvers_shared_examples.rb b/spec/support/shared_examples/graphql/resolvers/users/pages_visits_resolvers_shared_examples.rb
index 0dca28a4e74..a850784839c 100644
--- a/spec/support/shared_examples/graphql/resolvers/users/pages_visits_resolvers_shared_examples.rb
+++ b/spec/support/shared_examples/graphql/resolvers/users/pages_visits_resolvers_shared_examples.rb
@@ -15,16 +15,6 @@ RSpec.shared_examples 'namespace visits resolver' do
context 'when user is logged in' do
let_it_be(:current_user) { create(:user) }
- context 'when the frecent_namespaces_suggestions feature flag is disabled' do
- before do
- stub_feature_flags(frecent_namespaces_suggestions: false)
- end
-
- it 'raises a "Resource not available" exception' do
- expect(resolve_items).to be_a(::Gitlab::Graphql::Errors::ResourceNotAvailable)
- end
- end
-
it 'returns frecent groups' do
expect(resolve_items).to be_an_instance_of(Array)
end
diff --git a/spec/support/shared_examples/keys/meets_ssh_key_restrictions_shared_examples.rb b/spec/support/shared_examples/keys/meets_ssh_key_restrictions_shared_examples.rb
new file mode 100644
index 00000000000..a8515ef0be4
--- /dev/null
+++ b/spec/support/shared_examples/keys/meets_ssh_key_restrictions_shared_examples.rb
@@ -0,0 +1,63 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+# Requires a context with:
+# - subject
+#
+RSpec.shared_examples 'meets ssh key restrictions' do
+ where(:factory, :minimum, :result) do
+ forbidden = ApplicationSetting::FORBIDDEN_KEY_VALUE
+
+ [
+ [:rsa_key_2048, 0, true],
+ [:dsa_key_2048, 0, true],
+ [:ecdsa_key_256, 0, true],
+ [:ed25519_key_256, 0, true],
+ [:ecdsa_sk_key_256, 0, true],
+ [:ed25519_sk_key_256, 0, true],
+
+ [:rsa_key_2048, 1024, true],
+ [:rsa_key_2048, 2048, true],
+ [:rsa_key_2048, 4096, false],
+
+ [:dsa_key_2048, 1024, true],
+ [:dsa_key_2048, 2048, true],
+ [:dsa_key_2048, 4096, false],
+
+ [:ecdsa_key_256, 256, true],
+ [:ecdsa_key_256, 384, false],
+
+ [:ed25519_key_256, 256, true],
+ [:ed25519_key_256, 384, false],
+
+ [:ecdsa_sk_key_256, 256, true],
+ [:ecdsa_sk_key_256, 384, false],
+
+ [:ed25519_sk_key_256, 256, true],
+ [:ed25519_sk_key_256, 384, false],
+
+ [:rsa_key_2048, forbidden, false],
+ [:dsa_key_2048, forbidden, false],
+ [:ecdsa_key_256, forbidden, false],
+ [:ed25519_key_256, forbidden, false],
+ [:ecdsa_sk_key_256, forbidden, false],
+ [:ed25519_sk_key_256, forbidden, false]
+ ]
+ end
+
+ with_them do
+ let(:ssh_key) { build(factory).key }
+ let(:type) { Gitlab::SSHPublicKey.new(ssh_key).type }
+
+ before do
+ stub_application_setting("#{type}_key_restriction" => minimum)
+ end
+
+ it 'validates that the key is valid' do
+ subject.key = ssh_key
+
+ expect(subject.valid?).to eq(result)
+ end
+ end
+end
diff --git a/spec/support/shared_examples/lib/gitlab/bitbucket_server_import/object_import_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/bitbucket_server_import/object_import_shared_examples.rb
index 4eae8632467..45248f57683 100644
--- a/spec/support/shared_examples/lib/gitlab/bitbucket_server_import/object_import_shared_examples.rb
+++ b/spec/support/shared_examples/lib/gitlab/bitbucket_server_import/object_import_shared_examples.rb
@@ -65,5 +65,17 @@ RSpec.shared_examples Gitlab::BitbucketServerImport::ObjectImporter do
it_behaves_like 'notifies the waiter'
end
+
+ context 'when project import has failed' do
+ let_it_be(:project_id) { create(:project, :import_failed).id }
+
+ it 'does not call the importer' do
+ expect_next(worker.importer_class).not_to receive(:execute)
+
+ worker.perform(project_id, {}, waiter_key)
+ end
+
+ it_behaves_like 'notifies the waiter'
+ end
end
end
diff --git a/spec/support/shared_examples/lib/gitlab/import/advance_stage_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/import/advance_stage_shared_examples.rb
index c172e73ce9e..ed365b0e524 100644
--- a/spec/support/shared_examples/lib/gitlab/import/advance_stage_shared_examples.rb
+++ b/spec/support/shared_examples/lib/gitlab/import/advance_stage_shared_examples.rb
@@ -63,7 +63,7 @@ RSpec.shared_examples Gitlab::Import::AdvanceStage do |factory:|
next_worker = described_class::STAGES[next_stage.to_sym]
expect_next_found_instance_of(import_state.class) do |state|
- expect(state).to receive(:refresh_jid_expiration)
+ expect(state).to receive(:refresh_jid_expiration).twice
end
expect(next_worker).to receive(:perform_async).with(project.id)
@@ -124,7 +124,7 @@ RSpec.shared_examples Gitlab::Import::AdvanceStage do |factory:|
freeze_time do
next_worker = described_class::STAGES[next_stage.to_sym]
- expect(next_worker).not_to receive(:perform_async).with(project.id)
+ expect(next_worker).not_to receive(:perform_async)
expect_next_instance_of(described_class) do |klass|
expect(klass).to receive(:find_import_state).and_call_original
end
diff --git a/spec/support/shared_examples/lib/gitlab/redis/multi_store_feature_flags_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/redis/multi_store_feature_flags_shared_examples.rb
deleted file mode 100644
index a5e4df1c272..00000000000
--- a/spec/support/shared_examples/lib/gitlab/redis/multi_store_feature_flags_shared_examples.rb
+++ /dev/null
@@ -1,51 +0,0 @@
-# frozen_string_literal: true
-
-RSpec.shared_examples 'multi store feature flags' do |use_primary_and_secondary_stores, use_primary_store_as_default|
- context "with feature flag :#{use_primary_and_secondary_stores} is enabled" do
- before do
- stub_feature_flags(use_primary_and_secondary_stores => true)
- end
-
- it 'multi store is enabled' do
- subject.with do |redis_instance|
- expect(redis_instance.use_primary_and_secondary_stores?).to be true
- end
- end
- end
-
- context "with feature flag :#{use_primary_and_secondary_stores} is disabled" do
- before do
- stub_feature_flags(use_primary_and_secondary_stores => false)
- end
-
- it 'multi store is disabled' do
- subject.with do |redis_instance|
- expect(redis_instance.use_primary_and_secondary_stores?).to be false
- end
- end
- end
-
- context "with feature flag :#{use_primary_store_as_default} is enabled" do
- before do
- stub_feature_flags(use_primary_store_as_default => true)
- end
-
- it 'primary store is enabled' do
- subject.with do |redis_instance|
- expect(redis_instance.use_primary_store_as_default?).to be true
- end
- end
- end
-
- context "with feature flag :#{use_primary_store_as_default} is disabled" do
- before do
- stub_feature_flags(use_primary_store_as_default => false)
- end
-
- it 'primary store is disabled' do
- subject.with do |redis_instance|
- expect(redis_instance.use_primary_store_as_default?).to be false
- end
- end
- end
-end
diff --git a/spec/support/shared_examples/metrics_instrumentation_shared_examples.rb b/spec/support/shared_examples/metrics_instrumentation_shared_examples.rb
index 5c2f66e08db..4249b90bf66 100644
--- a/spec/support/shared_examples/metrics_instrumentation_shared_examples.rb
+++ b/spec/support/shared_examples/metrics_instrumentation_shared_examples.rb
@@ -31,11 +31,13 @@ RSpec.shared_examples 'a correct instrumented metric query' do |params|
end
before do
- allow(metric.send(:relation).connection).to receive(:transaction_open?).and_return(false)
+ if metric.respond_to?(:relation, true) && metric.send(:relation).respond_to?(:connection)
+ allow(metric.send(:relation).connection).to receive(:transaction_open?).and_return(false)
+ end
end
it 'has correct generate query' do
- expect(metric.to_sql).to eq(expected_query)
+ expect(metric.instrumentation).to eq(expected_query)
end
end
diff --git a/spec/support/shared_examples/models/concerns/analytics/cycle_analytics/stage_event_model_examples.rb b/spec/support/shared_examples/models/concerns/analytics/cycle_analytics/stage_event_model_examples.rb
index 52f0e7847b0..13a7f71fdb2 100644
--- a/spec/support/shared_examples/models/concerns/analytics/cycle_analytics/stage_event_model_examples.rb
+++ b/spec/support/shared_examples/models/concerns/analytics/cycle_analytics/stage_event_model_examples.rb
@@ -14,7 +14,8 @@ RSpec.shared_examples 'StageEventModel' do
milestone_id: 6,
state_id: 1,
start_event_timestamp: time,
- end_event_timestamp: time
+ end_event_timestamp: time,
+ duration_in_milliseconds: 3
},
{
stage_event_hash_id: 7,
@@ -25,7 +26,8 @@ RSpec.shared_examples 'StageEventModel' do
milestone_id: 13,
state_id: 1,
start_event_timestamp: time,
- end_event_timestamp: time
+ end_event_timestamp: time,
+ duration_in_milliseconds: 5
}
]
end
@@ -40,7 +42,8 @@ RSpec.shared_examples 'StageEventModel' do
:milestone_id,
:state_id,
:start_event_timestamp,
- :end_event_timestamp
+ :end_event_timestamp,
+ :duration_in_milliseconds
]
end
diff --git a/spec/support/shared_examples/models/concerns/avatarable_shared_examples.rb b/spec/support/shared_examples/models/concerns/avatarable_shared_examples.rb
new file mode 100644
index 00000000000..6057a10df92
--- /dev/null
+++ b/spec/support/shared_examples/models/concerns/avatarable_shared_examples.rb
@@ -0,0 +1,30 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples Avatarable do
+ describe '#avatar_type' do
+ it 'is true if avatar is image' do
+ model.update_attribute(:avatar, 'uploads/avatar.png')
+
+ expect(model.avatar_type).to be_truthy
+ end
+
+ it 'is false if avatar is html page' do
+ model.update_attribute(:avatar, 'uploads/avatar.html')
+ model.avatar_type
+
+ msg = 'file format is not supported. Please try one of the following supported formats: ' \
+ 'png, jpg, jpeg, gif, bmp, tiff, ico, webp'
+ expect(model.errors.added?(:avatar, msg)).to be true
+ end
+ end
+
+ describe '#avatar_url' do
+ context 'when avatar file is uploaded' do
+ it 'shows correct avatar url' do
+ url = [Gitlab.config.gitlab.url, model.avatar.url].join
+ expect(model.avatar_url).to eq(model.avatar.url)
+ expect(model.avatar_url(only_path: false)).to eq(url)
+ end
+ end
+ end
+end
diff --git a/spec/support/shared_examples/models/concerns/repository_storage_movable_shared_examples.rb b/spec/support/shared_examples/models/concerns/repository_storage_movable_shared_examples.rb
index d8a8d1e1cea..993c94e2695 100644
--- a/spec/support/shared_examples/models/concerns/repository_storage_movable_shared_examples.rb
+++ b/spec/support/shared_examples/models/concerns/repository_storage_movable_shared_examples.rb
@@ -68,7 +68,7 @@ RSpec.shared_examples 'handles repository moves' do
describe 'state transitions' do
before do
- stub_storage_settings('test_second_storage' => { 'path' => 'tmp/tests/extra_storage' })
+ stub_storage_settings('test_second_storage' => {})
end
context 'when in the default state' do
diff --git a/spec/support/shared_examples/models/concerns/update_namespace_statistics_shared_examples.rb b/spec/support/shared_examples/models/concerns/update_namespace_statistics_shared_examples.rb
index 255b6efa518..191b954bfb1 100644
--- a/spec/support/shared_examples/models/concerns/update_namespace_statistics_shared_examples.rb
+++ b/spec/support/shared_examples/models/concerns/update_namespace_statistics_shared_examples.rb
@@ -45,6 +45,8 @@ RSpec.shared_examples 'updates namespace statistics' do
context 'when deleting' do
it 'schedules a statistic refresh' do
+ statistic_source.save!
+
expect(Groups::UpdateStatisticsWorker)
.to receive(:perform_async)
diff --git a/spec/support/shared_examples/models/cycle_analytics_stage_shared_examples.rb b/spec/support/shared_examples/models/cycle_analytics_stage_shared_examples.rb
index 3f532629961..9da130d2750 100644
--- a/spec/support/shared_examples/models/cycle_analytics_stage_shared_examples.rb
+++ b/spec/support/shared_examples/models/cycle_analytics_stage_shared_examples.rb
@@ -59,7 +59,9 @@ RSpec.shared_examples 'value stream analytics stage' do
it { expect(stage).not_to be_valid }
end
+ end
+ describe 'scopes' do
# rubocop: disable Rails/SaveBang
describe '.by_value_stream' do
it 'finds stages by value stream' do
@@ -71,6 +73,17 @@ RSpec.shared_examples 'value stream analytics stage' do
expect(result).to eq([stage1])
end
end
+
+ describe '.by_value_stream_ids' do
+ it 'finds stages by array of value streams ids' do
+ stages = create_list(factory, 2)
+ create(factory) # To be left out of the results
+
+ result = described_class.by_value_streams_ids(stages.map(&:value_stream_id))
+
+ expect(result).to match_array(stages)
+ end
+ end
# rubocop: enable Rails/SaveBang
end
diff --git a/spec/support/shared_examples/models/disable_sti_shared_examples.rb b/spec/support/shared_examples/models/disable_sti_shared_examples.rb
new file mode 100644
index 00000000000..090592827d1
--- /dev/null
+++ b/spec/support/shared_examples/models/disable_sti_shared_examples.rb
@@ -0,0 +1,28 @@
+# frozen_string_literal: true
+
+# Checks whether STI is disabled in +models+.
+#
+# Parameter:
+# - models: List of model classes
+RSpec.shared_examples 'Model disables STI' do
+ skip_sti_check = Gitlab::Utils.to_boolean(ENV['SKIP_STI_CHECK'], default: false)
+
+ it 'does not allow STI', :aggregate_failures, unless: skip_sti_check do
+ models.each do |model|
+ next unless model
+ next unless model < ApplicationRecord
+ next if model == model.base_class
+ next if model.allow_legacy_sti_class
+
+ expect(model).not_to have_attribute(model.inheritance_column),
+ "Do not use Single Table Inheritance (`#{model.name}` inherits `#{model.base_class.name}`). " \
+ "See https://docs.gitlab.com/ee/development/database/single_table_inheritance.html"
+ end
+ end
+end
+
+RSpec.shared_examples 'STI disabled', type: :model do # rubocop:disable RSpec/SharedGroupsMetadata -- Shared example is run within every spec tagged `type: :model`
+ include_examples 'Model disables STI' do
+ let(:models) { [described_class] }
+ end
+end
diff --git a/spec/support/shared_examples/npm_sync_metadata_cache_worker_shared_examples.rb b/spec/support/shared_examples/npm_sync_metadata_cache_worker_shared_examples.rb
index de2dc4c3725..39870c41e1c 100644
--- a/spec/support/shared_examples/npm_sync_metadata_cache_worker_shared_examples.rb
+++ b/spec/support/shared_examples/npm_sync_metadata_cache_worker_shared_examples.rb
@@ -19,12 +19,4 @@ RSpec.shared_examples 'enqueue a worker to sync a metadata cache' do
subject
end
-
- context 'with npm_metadata_cache disabled' do
- before do
- stub_feature_flags(npm_metadata_cache: false)
- end
-
- it_behaves_like 'does not enqueue a worker to sync a metadata cache'
- end
end
diff --git a/spec/support/shared_examples/policies/project_policy_shared_examples.rb b/spec/support/shared_examples/policies/project_policy_shared_examples.rb
index d1f5a01b10c..349e15aa79d 100644
--- a/spec/support/shared_examples/policies/project_policy_shared_examples.rb
+++ b/spec/support/shared_examples/policies/project_policy_shared_examples.rb
@@ -58,7 +58,7 @@ RSpec.shared_examples 'project policies as anonymous' do
let(:group) { create(:group, :public) }
let(:project) { create(:project, :public, namespace: group) }
let(:user_permissions) { [:create_merge_request_in, :create_project, :create_issue, :create_note, :upload_file, :award_emoji, :create_incident] }
- let(:anonymous_permissions) { guest_permissions - user_permissions }
+ let(:anonymous_permissions) { base_guest_permissions - user_permissions }
let(:current_user) { anonymous }
before do
diff --git a/spec/support/shared_examples/policies/protected_branches.rb b/spec/support/shared_examples/policies/protected_branches.rb
new file mode 100644
index 00000000000..39652434acb
--- /dev/null
+++ b/spec/support/shared_examples/policies/protected_branches.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'allows protected branch crud' do
+ it { is_expected.to be_allowed(:read_protected_branch) }
+ it { is_expected.to be_allowed(:create_protected_branch) }
+ it { is_expected.to be_allowed(:update_protected_branch) }
+ it { is_expected.to be_allowed(:destroy_protected_branch) }
+end
+
+RSpec.shared_examples 'disallows protected branch crud' do
+ it { is_expected.not_to be_allowed(:read_protected_branch) }
+ it { is_expected.not_to be_allowed(:create_protected_branch) }
+ it { is_expected.not_to be_allowed(:update_protected_branch) }
+ it { is_expected.not_to be_allowed(:destroy_protected_branch) }
+end
+
+RSpec.shared_examples 'disallows protected branch changes' do
+ it { is_expected.not_to be_allowed(:create_protected_branch) }
+ it { is_expected.not_to be_allowed(:update_protected_branch) }
+ it { is_expected.not_to be_allowed(:destroy_protected_branch) }
+end
diff --git a/spec/support/shared_examples/quick_actions/issuable/issuable_quick_actions_shared_examples.rb b/spec/support/shared_examples/quick_actions/issuable/issuable_quick_actions_shared_examples.rb
index 1cd529aa50b..f9df6878a69 100644
--- a/spec/support/shared_examples/quick_actions/issuable/issuable_quick_actions_shared_examples.rb
+++ b/spec/support/shared_examples/quick_actions/issuable/issuable_quick_actions_shared_examples.rb
@@ -171,6 +171,16 @@ RSpec.shared_examples 'issuable quick actions' do
}
),
QuickAction.new(
+ action_text: "/react :heart:",
+ expectation: ->(noteable, can_use_quick_action) {
+ if can_use_quick_action
+ expect(noteable.award_emoji.last.name).to eq("heart")
+ else
+ expect(noteable.award_emoji).to be_empty
+ end
+ }
+ ),
+ QuickAction.new(
action_text: "/estimate 1d 2h 3m",
expectation: ->(noteable, can_use_quick_action) {
expect(noteable.time_estimate == 36180).to eq(can_use_quick_action)
diff --git a/spec/support/shared_examples/redis/redis_shared_examples.rb b/spec/support/shared_examples/redis/redis_shared_examples.rb
index f184f678283..796b483820b 100644
--- a/spec/support/shared_examples/redis/redis_shared_examples.rb
+++ b/spec/support/shared_examples/redis/redis_shared_examples.rb
@@ -223,7 +223,8 @@ RSpec.shared_examples "redis_shared_examples" do
end
it 'instantiates a connection pool with size 5' do
- expect(ConnectionPool).to receive(:new).with(size: 5).and_call_original
+ expect(ConnectionPool).to receive(:new)
+ .with(size: 5, name: described_class.store_name.underscore).and_call_original
described_class.with { |_redis_shared_example| true }
end
@@ -236,7 +237,8 @@ RSpec.shared_examples "redis_shared_examples" do
end
it 'instantiates a connection pool with a size based on the concurrency of the worker' do
- expect(ConnectionPool).to receive(:new).with(size: 18 + 5).and_call_original
+ expect(ConnectionPool).to receive(:new)
+ .with(size: 18 + 5, name: described_class.store_name.underscore).and_call_original
described_class.with { |_redis_shared_example| true }
end
diff --git a/spec/support/shared_examples/requests/api/discussions_shared_examples.rb b/spec/support/shared_examples/requests/api/discussions_shared_examples.rb
index 2996c794e52..5cc87fb9654 100644
--- a/spec/support/shared_examples/requests/api/discussions_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/discussions_shared_examples.rb
@@ -1,23 +1,29 @@
# frozen_string_literal: true
RSpec.shared_examples 'with cross-reference system notes' do
- let(:merge_request) { create(:merge_request) }
- let(:project) { merge_request.project }
- let(:new_merge_request) { create(:merge_request) }
- let(:commit) { new_merge_request.project.commit }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:pat) { create(:personal_access_token, user: user) }
+ let_it_be(:project) { create(:project, :small_repo) }
+ let_it_be(:project2) { create(:project, :small_repo) }
+ let_it_be(:project3) { create(:project, :small_repo) }
+
+ let_it_be(:merge_request) { create(:merge_request, source_project: project) }
+ let_it_be(:new_merge_request) { create(:merge_request, source_project: project2) }
+ let_it_be(:hidden_merge_request) { create(:merge_request, source_project: project3) }
+
let!(:note) { create(:system_note, noteable: merge_request, project: project, note: cross_reference) }
let!(:note_metadata) { create(:system_note_metadata, note: note, action: 'cross_reference') }
let(:cross_reference) { "test commit #{commit.to_reference(project)}" }
- let(:pat) { create(:personal_access_token, user: user) }
+ let(:commit) { new_merge_request.project.commit }
- before do
- project.add_developer(user)
- new_merge_request.project.add_developer(user)
+ let!(:new_note) { create(:system_note, noteable: merge_request, project: project, note: hidden_cross_reference) }
+ let!(:new_note_metadata) { create(:system_note_metadata, note: new_note, action: 'cross_reference') }
+ let(:hidden_cross_reference) { "test commit #{hidden_commit.to_reference(project)}" }
+ let(:hidden_commit) { hidden_merge_request.project.commit }
- hidden_merge_request = create(:merge_request)
- new_cross_reference = "test commit #{hidden_merge_request.project.commit.to_reference(project)}"
- new_note = create(:system_note, noteable: merge_request, project: project, note: new_cross_reference)
- create(:system_note_metadata, note: new_note, action: 'cross_reference')
+ before_all do
+ project.add_developer(user)
+ project2.add_developer(user)
end
it 'returns only the note that the user should see' do
diff --git a/spec/support/shared_examples/requests/api/graphql/work_item_type_list_shared_examples.rb b/spec/support/shared_examples/requests/api/graphql/work_item_type_list_shared_examples.rb
new file mode 100644
index 00000000000..beb3085a606
--- /dev/null
+++ b/spec/support/shared_examples/requests/api/graphql/work_item_type_list_shared_examples.rb
@@ -0,0 +1,49 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'graphql work item type list request spec' do |context_name = nil|
+ include_context context_name || 'with work item types request context'
+
+ context 'when user has access to the group' do
+ it_behaves_like 'a working graphql query that returns data' do
+ before do
+ post_graphql(query, current_user: current_user)
+ end
+ end
+
+ it 'returns all default work item types' do
+ post_graphql(query, current_user: current_user)
+
+ expect(graphql_data_at(parent_key, :workItemTypes, :nodes)).to match_array(expected_work_item_type_response)
+ end
+
+ it 'prevents N+1 queries' do
+ # Destroy 2 existing types
+ WorkItems::Type.by_type([:issue, :task]).delete_all
+
+ post_graphql(query, current_user: current_user) # warm-up
+
+ control = ActiveRecord::QueryRecorder.new(skip_cached: false) { post_graphql(query, current_user: current_user) }
+ expect(graphql_errors).to be_blank
+
+ # Add back the 2 deleted types
+ expect do
+ Gitlab::DatabaseImporters::WorkItems::BaseTypeImporter.upsert_types
+ end.to change { WorkItems::Type.count }.by(2)
+
+ expect { post_graphql(query, current_user: current_user) }.to issue_same_number_of_queries_as(control)
+ expect(graphql_errors).to be_blank
+ end
+ end
+
+ context "when user doesn't have access to the parent" do
+ let(:current_user) { create(:user) }
+
+ before do
+ post_graphql(query, current_user: current_user)
+ end
+
+ it 'does not return the parent' do
+ expect(graphql_data).to eq(parent_key.to_s => nil)
+ end
+ end
+end
diff --git a/spec/support/shared_examples/requests/api/graphql_rest/milestones_shared_examples.rb b/spec/support/shared_examples/requests/api/graphql_rest/milestones_shared_examples.rb
new file mode 100644
index 00000000000..8e147f43091
--- /dev/null
+++ b/spec/support/shared_examples/requests/api/graphql_rest/milestones_shared_examples.rb
@@ -0,0 +1,83 @@
+# frozen_string_literal: true
+
+# Examples for both GraphQL and REST APIs
+RSpec.shared_examples 'group milestones including ancestors and descendants' do
+ context 'for group milestones' do
+ let_it_be(:current_user) { create(:user) }
+
+ context 'when including descendant milestones in a public group' do
+ let_it_be(:group) { create(:group, :public) }
+
+ let(:params) { { include_descendants: true } }
+
+ it 'finds milestones only in accessible projects and groups' do
+ accessible_group = create(:group, :private, parent: group)
+ accessible_project = create(:project, group: accessible_group)
+ accessible_group.add_developer(current_user)
+ inaccessible_group = create(:group, :private, parent: group)
+ inaccessible_project = create(:project, :private, group: group)
+ milestone1 = create(:milestone, group: group)
+ milestone2 = create(:milestone, group: accessible_group)
+ milestone3 = create(:milestone, project: accessible_project)
+ create(:milestone, group: inaccessible_group)
+ create(:milestone, project: inaccessible_project)
+
+ milestone_ids = query_group_milestone_ids(params)
+
+ expect(milestone_ids).to match_array([milestone1, milestone2, milestone3].pluck(:id))
+ end
+ end
+
+ describe 'include_descendants and include_ancestors' do
+ let_it_be(:parent_group) { create(:group, :public) }
+ let_it_be(:group) { create(:group, :public, parent: parent_group) }
+ let_it_be(:accessible_group) { create(:group, :private, parent: group) }
+ let_it_be(:accessible_project) { create(:project, group: accessible_group) }
+ let_it_be(:inaccessible_group) { create(:group, :private, parent: group) }
+ let_it_be(:inaccessible_project) { create(:project, :private, group: group) }
+ let_it_be(:milestone1) { create(:milestone, group: group) }
+ let_it_be(:milestone2) { create(:milestone, group: accessible_group) }
+ let_it_be(:milestone3) { create(:milestone, project: accessible_project) }
+ let_it_be(:milestone4) { create(:milestone, group: inaccessible_group) }
+ let_it_be(:milestone5) { create(:milestone, project: inaccessible_project) }
+ let_it_be(:milestone6) { create(:milestone, group: parent_group) }
+
+ before_all do
+ accessible_group.add_developer(current_user)
+ end
+
+ context 'when including neither ancestor nor descendant milestones in a public group' do
+ let(:params) { {} }
+
+ it 'finds milestones only in accessible projects and groups' do
+ expect(query_group_milestone_ids(params)).to match_array([milestone1.id])
+ end
+ end
+
+ context 'when including descendant milestones in a public group' do
+ let(:params) { { include_descendants: true } }
+
+ it 'finds milestones only in accessible projects and groups' do
+ expect(query_group_milestone_ids(params)).to match_array([milestone1, milestone2, milestone3].pluck(:id))
+ end
+ end
+
+ context 'when including ancestor milestones in a public group' do
+ let(:params) { { include_ancestors: true } }
+
+ it 'finds milestones only in accessible projects and groups' do
+ expect(query_group_milestone_ids(params)).to match_array([milestone1, milestone6].pluck(:id))
+ end
+ end
+
+ context 'when including both ancestor and descendant milestones in a public group' do
+ let(:params) { { include_descendants: true, include_ancestors: true } }
+
+ it 'finds milestones only in accessible projects and groups' do
+ expect(query_group_milestone_ids(params))
+ .to match_array([milestone1, milestone2, milestone3, milestone6].pluck(:id))
+ end
+ end
+ end
+ end
+end
diff --git a/spec/support/shared_examples/requests/api/hooks_shared_examples.rb b/spec/support/shared_examples/requests/api/hooks_shared_examples.rb
index 7489dc7c1d6..de458bc87db 100644
--- a/spec/support/shared_examples/requests/api/hooks_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/hooks_shared_examples.rb
@@ -84,7 +84,7 @@ RSpec.shared_examples 'web-hook API endpoints' do |prefix|
end
end
- context 'the hook has URL variables' do
+ context 'the hook has URL variables', if: prefix != '/projects/:id' do
before do
hook.update!(url_variables: { 'token' => 'supers3cret' })
end
diff --git a/spec/support/shared_examples/requests/api/ml/mlflow/mlflow_shared_examples.rb b/spec/support/shared_examples/requests/api/ml/mlflow/mlflow_shared_examples.rb
index 7978f43610d..5ae0b8b10b6 100644
--- a/spec/support/shared_examples/requests/api/ml/mlflow/mlflow_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/ml/mlflow/mlflow_shared_examples.rb
@@ -1,5 +1,53 @@
# frozen_string_literal: true
+RSpec.shared_examples 'MLflow|an endpoint that requires authentication' do
+ context 'when not authenticated' do
+ let(:headers) { {} }
+
+ it "is Unauthorized" do
+ is_expected.to have_gitlab_http_status(:unauthorized)
+ end
+ end
+
+ context 'when user does not have access' do
+ let(:access_token) { tokens[:different_user] }
+
+ it "is Not Found" do
+ is_expected.to have_gitlab_http_status(:not_found)
+ end
+ end
+end
+
+RSpec.shared_examples 'MLflow|an endpoint that requires read_model_registry' do
+ context 'when user does not have read_model_registry' do
+ before do
+ allow(Ability).to receive(:allowed?).and_call_original
+ allow(Ability).to receive(:allowed?)
+ .with(current_user, :read_model_registry, project)
+ .and_return(false)
+ end
+
+ it "is Not Found" do
+ is_expected.to have_gitlab_http_status(:not_found)
+ end
+ end
+end
+
+RSpec.shared_examples 'MLflow|an endpoint that requires write_model_registry' do
+ context 'when user does not have read_model_registry' do
+ before do
+ allow(Ability).to receive(:allowed?).and_call_original
+ allow(Ability).to receive(:allowed?)
+ .with(current_user, :write_model_registry, project)
+ .and_return(false)
+ end
+
+ it "is Not Found" do
+ is_expected.to have_gitlab_http_status(:unauthorized)
+ end
+ end
+end
+
RSpec.shared_examples 'MLflow|Not Found - Resource Does Not Exist' do
it "is Resource Does Not Exist", :aggregate_failures do
is_expected.to have_gitlab_http_status(:not_found)
@@ -44,21 +92,7 @@ RSpec.shared_examples 'MLflow|Bad Request' do
end
RSpec.shared_examples 'MLflow|shared error cases' do
- context 'when not authenticated' do
- let(:headers) { {} }
-
- it "is Unauthorized" do
- is_expected.to have_gitlab_http_status(:unauthorized)
- end
- end
-
- context 'when user does not have access' do
- let(:access_token) { tokens[:different_user] }
-
- it "is Not Found" do
- is_expected.to have_gitlab_http_status(:not_found)
- end
- end
+ it_behaves_like 'MLflow|an endpoint that requires authentication'
context 'when model experiments is unavailable' do
before do
@@ -75,34 +109,8 @@ RSpec.shared_examples 'MLflow|shared error cases' do
end
RSpec.shared_examples 'MLflow|shared model registry error cases' do
- context 'when not authenticated' do
- let(:headers) { {} }
-
- it "is Unauthorized" do
- is_expected.to have_gitlab_http_status(:unauthorized)
- end
- end
-
- context 'when user does not have access' do
- let(:access_token) { tokens[:different_user] }
-
- it "is Not Found" do
- is_expected.to have_gitlab_http_status(:not_found)
- end
- end
-
- context 'when model registry is unavailable' do
- before do
- allow(Ability).to receive(:allowed?).and_call_original
- allow(Ability).to receive(:allowed?)
- .with(current_user, :read_model_registry, project)
- .and_return(false)
- end
-
- it "is Not Found" do
- is_expected.to have_gitlab_http_status(:not_found)
- end
- end
+ it_behaves_like 'MLflow|an endpoint that requires authentication'
+ it_behaves_like 'MLflow|an endpoint that requires read_model_registry'
end
RSpec.shared_examples 'MLflow|Bad Request on missing required' do |keys|
@@ -110,9 +118,27 @@ RSpec.shared_examples 'MLflow|Bad Request on missing required' do |keys|
context "when \"#{key}\" is missing" do
let(:params) { default_params.tap { |p| p.delete(key) } }
- it "is Bad Request" do
- is_expected.to have_gitlab_http_status(:bad_request)
- end
+ it_behaves_like 'MLflow|Bad Request'
end
end
end
+
+RSpec.shared_examples 'MLflow|an invalid request' do
+ it_behaves_like 'MLflow|Bad Request'
+end
+
+RSpec.shared_examples 'MLflow|an authenticated resource' do
+ it_behaves_like 'MLflow|an endpoint that requires authentication'
+ it_behaves_like 'MLflow|Requires read_api scope'
+end
+
+RSpec.shared_examples 'MLflow|a read-only model registry resource' do
+ it_behaves_like 'MLflow|an endpoint that requires authentication'
+ it_behaves_like 'MLflow|an endpoint that requires read_model_registry'
+end
+
+RSpec.shared_examples 'MLflow|a read/write model registry resource' do
+ it_behaves_like 'MLflow|an endpoint that requires authentication'
+ it_behaves_like 'MLflow|an endpoint that requires read_model_registry'
+ it_behaves_like 'MLflow|an endpoint that requires write_model_registry'
+end
diff --git a/spec/support/shared_examples/requests/api/ml_model_packages_shared_examples.rb b/spec/support/shared_examples/requests/api/ml_model_packages_shared_examples.rb
index 47cbd268a65..30a1398bf94 100644
--- a/spec/support/shared_examples/requests/api/ml_model_packages_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/ml_model_packages_shared_examples.rb
@@ -15,11 +15,31 @@ RSpec.shared_examples 'Endpoint not found if read_model_registry not available'
end
end
-RSpec.shared_examples 'creates model experiments package files' do
+RSpec.shared_examples 'Endpoint not found if write_model_registry not available' do
+ context 'when write_model_registry is disabled for current project' do
+ before do
+ allow(Ability).to receive(:allowed?).and_call_original
+ allow(Ability).to receive(:allowed?)
+ .with(user, :write_model_registry, project)
+ .and_return(false)
+ end
+
+ it { is_expected.to have_gitlab_http_status(:not_found) }
+ end
+end
+
+RSpec.shared_examples 'Not found when model version does not exist' do
+ context 'when model version does not exist' do
+ let(:version) { "#{non_existing_record_id}.0.0" }
+
+ it { is_expected.to have_gitlab_http_status(:not_found) }
+ end
+end
+
+RSpec.shared_examples 'creates package files for model versions' do
it 'creates package files', :aggregate_failures do
expect { api_response }
- .to change { project.packages.count }.by(1)
- .and change { Packages::PackageFile.count }.by(1)
+ .to change { Packages::PackageFile.count }.by(1)
expect(api_response).to have_gitlab_http_status(:created)
package_file = project.packages.last.package_files.reload.last
@@ -59,7 +79,7 @@ RSpec.shared_examples 'process ml model package upload' do
context 'with correct params' do
it_behaves_like 'package workhorse uploads'
- it_behaves_like 'creates model experiments package files'
+ it_behaves_like 'creates package files for model versions'
# To be reactivated with https://gitlab.com/gitlab-org/gitlab/-/issues/414270
# it_behaves_like 'a package tracking event', '::API::MlModelPackages', 'push_package'
end
@@ -81,7 +101,7 @@ RSpec.shared_examples 'process ml model package upload' do
stub_package_file_object_storage(direct_upload: true)
end
- it_behaves_like 'creates model experiments package files'
+ it_behaves_like 'creates package files for model versions'
['123123', '../../123123'].each do |remote_id|
context "with invalid remote_id: #{remote_id}" do
@@ -102,7 +122,7 @@ RSpec.shared_examples 'process ml model package upload' do
stub_package_file_object_storage(direct_upload: false)
end
- it_behaves_like 'creates model experiments package files'
+ it_behaves_like 'creates package files for model versions'
end
end
end
@@ -112,13 +132,5 @@ RSpec.shared_examples 'process ml model package download' do
it { is_expected.to have_gitlab_http_status(:success) }
end
- context 'when record does not exist' do
- it 'response is not found' do
- expect_next_instance_of(::Packages::MlModel::PackageFinder) do |instance|
- expect(instance).to receive(:execute!).and_raise(ActiveRecord::RecordNotFound)
- end
-
- expect(api_response).to have_gitlab_http_status(:not_found)
- end
- end
+ it_behaves_like 'Not found when model version does not exist'
end
diff --git a/spec/support/shared_examples/requests/api/nuget_packages_shared_examples.rb b/spec/support/shared_examples/requests/api/nuget_packages_shared_examples.rb
index c23d514abfc..8281b7d4024 100644
--- a/spec/support/shared_examples/requests/api/nuget_packages_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/nuget_packages_shared_examples.rb
@@ -741,3 +741,71 @@ RSpec.shared_examples 'process nuget delete request' do |user_type, status|
end
end
end
+
+RSpec.shared_examples 'nuget symbol file endpoint' do
+ let_it_be(:symbol) { create(:nuget_symbol) }
+ let_it_be(:filename) { symbol.file.filename }
+ let_it_be(:signature) { symbol.signature }
+ let_it_be(:checksum) { symbol.file_sha256.delete("\n") }
+
+ let(:headers) { { 'Symbolchecksum' => "SHA256:#{checksum}" } }
+
+ subject { get api(url), headers: headers }
+
+ it { is_expected.to have_request_urgency(:low) }
+
+ context 'with nuget_symbol_server_enabled setting enabled' do
+ before do
+ allow_next_instance_of(::Namespace::PackageSetting) do |setting|
+ allow(setting).to receive(:nuget_symbol_server_enabled).and_return(true)
+ end
+ end
+
+ context 'with valid target' do
+ it 'returns the symbol file' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response.media_type).to eq('application/octet-stream')
+ expect(response.body).to eq(symbol.file.read)
+ end
+ end
+
+ context 'when target does not exist' do
+ let(:target) { double(id: 1234567890) }
+
+ it_behaves_like 'returning response status', :not_found
+ end
+
+ context 'when target exists' do
+ context 'when symbol file does not exist' do
+ let(:filename) { 'non-existent-file.pdb' }
+ let(:signature) { 'non-existent-signature' }
+
+ it_behaves_like 'returning response status', :not_found
+ end
+
+ context 'when symbol file checksum does not match' do
+ let(:checksum) { 'non-matching-checksum' }
+
+ it_behaves_like 'returning response status', :not_found
+ end
+
+ context 'when symbol file checksum is missing' do
+ let(:headers) { {} }
+
+ it_behaves_like 'returning response status', :bad_request
+ end
+ end
+ end
+
+ context 'with nuget_symbol_server_enabled setting disabled' do
+ before do
+ allow_next_instance_of(::Namespace::PackageSetting) do |setting|
+ allow(setting).to receive(:nuget_symbol_server_enabled).and_return(false)
+ end
+ end
+
+ it_behaves_like 'returning response status', :forbidden
+ end
+end
diff --git a/spec/support/shared_examples/requests/api/repository_storage_moves_shared_examples.rb b/spec/support/shared_examples/requests/api/repository_storage_moves_shared_examples.rb
index 181bab41e09..f400c5fa201 100644
--- a/spec/support/shared_examples/requests/api/repository_storage_moves_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/repository_storage_moves_shared_examples.rb
@@ -90,7 +90,7 @@ RSpec.shared_examples 'repository_storage_moves API' do |container_type|
end
before do
- stub_storage_settings('test_second_storage' => { 'path' => 'tmp/tests/extra_storage' })
+ stub_storage_settings('test_second_storage' => {})
end
it 'schedules a container repository storage move' do
@@ -203,7 +203,7 @@ RSpec.shared_examples 'repository_storage_moves API' do |container_type|
end
before do
- stub_storage_settings('test_second_storage' => { 'path' => 'tmp/tests/extra_storage' })
+ stub_storage_settings('test_second_storage' => {})
end
it 'schedules the worker' do
diff --git a/spec/support/shared_examples/security/policies_shared_examples.rb b/spec/support/shared_examples/security/policies_shared_examples.rb
new file mode 100644
index 00000000000..6edd2aa5c59
--- /dev/null
+++ b/spec/support/shared_examples/security/policies_shared_examples.rb
@@ -0,0 +1,48 @@
+# frozen_string_literal: true
+
+# Requires the url to the policies list:
+# - path_to_policies_list
+RSpec.shared_examples 'policies list' do
+ before do
+ allow_next_found_instance_of(Security::OrchestrationPolicyConfiguration) do |policy|
+ allow(policy).to receive(:policy_configuration_valid?).and_return(true)
+ allow(policy).to receive(:policy_hash).and_return(policy_yaml)
+ allow(policy).to receive(:policy_last_updated_at).and_return(Time.current)
+ end
+ sign_in(owner)
+ stub_licensed_features(security_orchestration_policies: true)
+ end
+
+ it "shows the policies list with policies" do
+ visit(path_to_policies_list)
+
+ # Scan Execution Policy from ee/spec/fixtures/security_orchestration.yml
+ expect(page).to have_content 'Run DAST in every pipeline'
+ # Scan Result Policy from ee/spec/fixtures/security_orchestration.yml
+ expect(page).to have_content 'critical vulnerability CS approvals'
+ end
+end
+
+# Requires the url to the policy editor:
+# - path_to_policy_editor
+RSpec.shared_examples 'policy editor' do
+ before do
+ sign_in(owner)
+ stub_licensed_features(security_orchestration_policies: true)
+ end
+
+ it "can create a policy when a policy project exists" do
+ visit(path_to_policy_editor)
+ page.within(".gl-card:nth-child(1)") do
+ click_button _('Select policy')
+ end
+ fill_in _('Name'), with: 'Prevent vulnerabilities'
+ click_button _('Select scan type')
+ select_listbox_item _('Security Scan')
+ page.within(find_by_testid('actions-section')) do
+ click_button _('Remove')
+ end
+ click_button _('Configure with a merge request')
+ expect(page).to have_current_path(project_merge_request_path(policy_management_project, 1))
+ end
+end
diff --git a/spec/support/shared_examples/services/container_registry_auth_service_shared_examples.rb b/spec/support/shared_examples/services/container_registry_auth_service_shared_examples.rb
index f6be45b0cf8..6fb0516e173 100644
--- a/spec/support/shared_examples/services/container_registry_auth_service_shared_examples.rb
+++ b/spec/support/shared_examples/services/container_registry_auth_service_shared_examples.rb
@@ -286,21 +286,21 @@ RSpec.shared_examples 'a container registry auth service' do
describe '.push_pull_nested_repositories_access_token' do
let_it_be(:project) { create(:project) }
-
- let(:token) { described_class.push_pull_nested_repositories_access_token(project.full_path) }
+ let(:name) { project.full_path }
+ let(:token) { described_class.push_pull_nested_repositories_access_token(name) }
let(:access) do
[
{
'type' => 'repository',
- 'name' => project.full_path,
+ 'name' => name,
'actions' => %w[pull push],
- 'meta' => { 'project_path' => project.full_path }
+ 'meta' => { 'project_path' => name }
},
{
'type' => 'repository',
- 'name' => "#{project.full_path}/*",
+ 'name' => "#{name}/*",
'actions' => %w[pull],
- 'meta' => { 'project_path' => project.full_path }
+ 'meta' => { 'project_path' => name }
}
]
end
@@ -311,6 +311,12 @@ RSpec.shared_examples 'a container registry auth service' do
expect(payload).to include('access' => access)
end
+ it 'sends the name as the override project path for the access token' do
+ expect(described_class).to receive(:access_token).with(anything, override_project_path: name)
+
+ subject
+ end
+
it_behaves_like 'a valid token'
it_behaves_like 'not a container repository factory'
@@ -1345,9 +1351,9 @@ RSpec.shared_examples 'a container registry auth service' do
describe '#access_token' do
let(:token) { described_class.access_token({ bad_project.full_path => ['pull'] }) }
let(:access) do
- [{ 'type' => 'repository',
- 'name' => bad_project.full_path,
- 'actions' => ['pull'] }]
+ { 'type' => 'repository',
+ 'name' => bad_project.full_path,
+ 'actions' => ['pull'] }
end
subject { { token: token } }
@@ -1355,7 +1361,23 @@ RSpec.shared_examples 'a container registry auth service' do
it_behaves_like 'a valid token'
it 'has the correct scope' do
- expect(payload).to include('access' => access)
+ expect(payload).to include('access' => [access])
+ end
+
+ context 'with an override project path' do
+ let(:override_project_path) { 'group/project-override' }
+ let(:token) do
+ described_class.access_token(
+ { bad_project.full_path => ['pull'] },
+ override_project_path: override_project_path
+ )
+ end
+
+ it 'returns the override project path in the metadata' do
+ expect(payload).to include('access' => [
+ access.merge("meta" => { "project_path" => override_project_path })
+ ])
+ end
end
end
end
diff --git a/spec/support/shared_examples/services/issuable/issuable_import_csv_service_shared_examples.rb b/spec/support/shared_examples/services/issuable/issuable_import_csv_service_shared_examples.rb
index 8a3ab07bbfe..aa31bd2b604 100644
--- a/spec/support/shared_examples/services/issuable/issuable_import_csv_service_shared_examples.rb
+++ b/spec/support/shared_examples/services/issuable/issuable_import_csv_service_shared_examples.rb
@@ -30,7 +30,7 @@ RSpec.shared_examples 'issuable import csv service' do |issuable_type|
context 'with a file generated by Gitlab CSV export' do
let(:file) { fixture_file_upload('spec/fixtures/csv_gitlab_export.csv') }
- let!(:test_milestone) { create(:milestone, project: project, title: 'v1.0') }
+ let_it_be(:test_milestone) { create(:milestone, project: project, title: 'v1.0') }
it 'imports the CSV without errors' do
expect(subject[:success]).to eq(4)
@@ -41,7 +41,19 @@ RSpec.shared_examples 'issuable import csv service' do |issuable_type|
it 'correctly sets the issuable attributes' do
expect { subject }.to change { issuables.count }.by 4
- expect(issuables.reload).to include(have_attributes({ title: 'Test Title', description: 'Test Description' }))
+ if issuable_type == 'issue'
+ expect(issuables.reload).to include(
+ have_attributes({ title: 'Test Title', description: 'Test Description', milestone_id: test_milestone.id }),
+ have_attributes({ title: 'Title', milestone_id: test_milestone.id }),
+ have_attributes(
+ { title: 'Nihil impedit neque quos totam ut aut enim cupiditate doloribus molestiae.',
+ description: 'Omnis aliquid sint laudantium quam.',
+ milestone_id: test_milestone.id })
+ )
+
+ else
+ expect(issuables.reload).to include(have_attributes({ title: 'Test Title', description: 'Test Description' }))
+ end
end
it_behaves_like 'importer with email notification'
diff --git a/spec/support/shared_examples/services/mergeability_checks_service_shared_examples.rb b/spec/support/shared_examples/services/mergeability_checks_service_shared_examples.rb
new file mode 100644
index 00000000000..56f9275b0aa
--- /dev/null
+++ b/spec/support/shared_examples/services/mergeability_checks_service_shared_examples.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'mergeability check service' do |identifier, description|
+ it 'sets the identifier' do
+ expect(described_class.identifier).to eq(identifier)
+ end
+
+ it 'sets the description' do
+ expect(described_class.description).to eq(description)
+ end
+end
diff --git a/spec/support/shared_examples/services/namespace_package_settings_shared_examples.rb b/spec/support/shared_examples/services/namespace_package_settings_shared_examples.rb
index 6f0fd1aa4ed..d288c74ae4b 100644
--- a/spec/support/shared_examples/services/namespace_package_settings_shared_examples.rb
+++ b/spec/support/shared_examples/services/namespace_package_settings_shared_examples.rb
@@ -11,6 +11,7 @@ RSpec.shared_examples 'updating the namespace package setting attributes' do |to
.and change { namespace.package_settings.reload.generic_duplicate_exception_regex }.from(from[:generic_duplicate_exception_regex]).to(to[:generic_duplicate_exception_regex])
.and change { namespace.package_settings.reload.nuget_duplicates_allowed }.from(from[:nuget_duplicates_allowed]).to(to[:nuget_duplicates_allowed])
.and change { namespace.package_settings.reload.nuget_duplicate_exception_regex }.from(from[:nuget_duplicate_exception_regex]).to(to[:nuget_duplicate_exception_regex])
+ .and change { namespace.package_settings.reload.nuget_symbol_server_enabled }.from(from[:nuget_symbol_server_enabled]).to(to[:nuget_symbol_server_enabled])
end
end
@@ -34,6 +35,7 @@ RSpec.shared_examples 'creating the namespace package setting' do
expect(namespace.package_setting_relation.generic_duplicate_exception_regex).to eq(package_settings[:generic_duplicate_exception_regex])
expect(namespace.package_setting_relation.nuget_duplicates_allowed).to eq(package_settings[:nuget_duplicates_allowed])
expect(namespace.package_setting_relation.nuget_duplicate_exception_regex).to eq(package_settings[:nuget_duplicate_exception_regex])
+ expect(namespace.package_setting_relation.nuget_symbol_server_enabled).to eq(package_settings[:nuget_symbol_server_enabled])
end
it_behaves_like 'returning a success'
diff --git a/spec/support/shared_examples/services/projects/update_repository_storage_service_shared_examples.rb b/spec/support/shared_examples/services/projects/update_repository_storage_service_shared_examples.rb
index 7d786dbeb87..dc7ac9a44e0 100644
--- a/spec/support/shared_examples/services/projects/update_repository_storage_service_shared_examples.rb
+++ b/spec/support/shared_examples/services/projects/update_repository_storage_service_shared_examples.rb
@@ -48,20 +48,12 @@ RSpec.shared_examples 'moves repository to another storage' do |repository_type|
end
it "moves the project and its #{repository_type} repository to the new storage and unmarks the repository as read-only" do
- old_project_repository_path = Gitlab::GitalyClient::StorageSettings.allow_disk_access do
- project.repository.path_to_repo
- end
-
- old_repository_path = repository.full_path
-
result = subject.execute
project.reload
expect(result).to be_success
expect(project).not_to be_repository_read_only
expect(project.repository_storage).to eq('test_second_storage')
- expect(gitlab_shell.repository_exists?('default', old_project_repository_path)).to be(false)
- expect(gitlab_shell.repository_exists?('default', old_repository_path)).to be(false)
end
end
diff --git a/spec/support/shared_examples/services/protected_branches_shared_examples.rb b/spec/support/shared_examples/services/protected_branches_shared_examples.rb
index 80e2f09ed44..6d4b82730da 100644
--- a/spec/support/shared_examples/services/protected_branches_shared_examples.rb
+++ b/spec/support/shared_examples/services/protected_branches_shared_examples.rb
@@ -12,7 +12,7 @@ RSpec.shared_context 'with scan result policy blocking protected branches' do
end
let(:scan_result_policy) do
- build(:scan_result_policy, branches: [branch_name], approval_settings: { block_unprotecting_branches: true })
+ build(:scan_result_policy, branches: [branch_name], approval_settings: { block_branch_modification: true })
end
before do
diff --git a/spec/support/shared_examples/services/schedule_bulk_repository_shard_moves_shared_examples.rb b/spec/support/shared_examples/services/schedule_bulk_repository_shard_moves_shared_examples.rb
index acf15730180..6fd5b0b169c 100644
--- a/spec/support/shared_examples/services/schedule_bulk_repository_shard_moves_shared_examples.rb
+++ b/spec/support/shared_examples/services/schedule_bulk_repository_shard_moves_shared_examples.rb
@@ -5,7 +5,7 @@ RSpec.shared_examples 'moves repository shard in bulk' do
let(:destination_storage_name) { 'test_second_storage' }
before do
- stub_storage_settings(destination_storage_name => { 'path' => 'tmp/tests/extra_storage' })
+ stub_storage_settings(destination_storage_name => {})
end
describe '#execute' do
diff --git a/spec/support/shared_examples/workers/batched_background_migration_worker_shared_examples.rb b/spec/support/shared_examples/workers/batched_background_migration_worker_shared_examples.rb
index 003b8d07819..e4e4bdd9e6c 100644
--- a/spec/support/shared_examples/workers/batched_background_migration_worker_shared_examples.rb
+++ b/spec/support/shared_examples/workers/batched_background_migration_worker_shared_examples.rb
@@ -341,15 +341,21 @@ RSpec.shared_examples 'it runs batched background migration jobs' do |tracking_d
context 'health status' do
subject(:migration_run) { described_class.new.perform }
- it 'puts migration on hold when there is autovaccum activity on related tables' do
- swapout_view_for_table(:postgres_autovacuum_activity, connection: connection)
- create(
- :postgres_autovacuum_activity,
- table: migration.table_name,
- table_identifier: "public.#{migration.table_name}"
- )
+ context 'with skip_autovacuum_health_check_for_ci_builds FF disabled' do
+ before do
+ stub_feature_flags(skip_autovacuum_health_check_for_ci_builds: false)
+ end
- expect { migration_run }.to change { migration.reload.on_hold? }.from(false).to(true)
+ it 'puts migration on hold when there is autovaccum activity on related tables' do
+ swapout_view_for_table(:postgres_autovacuum_activity, connection: connection)
+ create(
+ :postgres_autovacuum_activity,
+ table: migration.table_name,
+ table_identifier: "public.#{migration.table_name}"
+ )
+
+ expect { migration_run }.to change { migration.reload.on_hold? }.from(false).to(true)
+ end
end
it 'puts migration on hold when the pending WAL count is above the limit' do
diff --git a/spec/support/shared_examples/workers/idempotency_shared_examples.rb b/spec/support/shared_examples/workers/idempotency_shared_examples.rb
index be43ea7d5f0..4d752afa00e 100644
--- a/spec/support/shared_examples/workers/idempotency_shared_examples.rb
+++ b/spec/support/shared_examples/workers/idempotency_shared_examples.rb
@@ -5,7 +5,7 @@
#
# Usage:
#
-# include_examples 'an idempotent worker' do
+# it_behaves_like 'an idempotent worker' do
# it 'checks the side-effects for multiple calls' do
# # it'll call the job's perform method 3 times
# # by default.
diff --git a/spec/support/shared_examples/workers/schedule_bulk_repository_shard_moves_shared_examples.rb b/spec/support/shared_examples/workers/schedule_bulk_repository_shard_moves_shared_examples.rb
index 6707f65eb69..4b73c1720bc 100644
--- a/spec/support/shared_examples/workers/schedule_bulk_repository_shard_moves_shared_examples.rb
+++ b/spec/support/shared_examples/workers/schedule_bulk_repository_shard_moves_shared_examples.rb
@@ -6,7 +6,7 @@ RSpec.shared_examples 'schedules bulk repository shard moves' do
describe "#perform" do
before do
- stub_storage_settings(destination_storage_name => { 'path' => 'tmp/tests/extra_storage' })
+ stub_storage_settings(destination_storage_name => {})
allow(worker_klass).to receive(:perform_async)
end
diff --git a/spec/support/shared_examples/workers/update_repository_move_shared_examples.rb b/spec/support/shared_examples/workers/update_repository_move_shared_examples.rb
index 9b7183a9eac..cb3cd81f5ce 100644
--- a/spec/support/shared_examples/workers/update_repository_move_shared_examples.rb
+++ b/spec/support/shared_examples/workers/update_repository_move_shared_examples.rb
@@ -1,43 +1,166 @@
# frozen_string_literal: true
RSpec.shared_examples 'an update storage move worker' do
+ let(:worker) { described_class.new }
+
it 'has the `until_executed` deduplicate strategy' do
expect(described_class.get_deduplicate_strategy).to eq(:until_executed)
end
- describe '#perform' do
+ describe '#perform', :clean_gitlab_redis_shared_state do
let(:service) { double(:update_repository_storage_service) }
before do
allow(Gitlab.config.repositories.storages).to receive(:keys).and_return(%w[default test_second_storage])
end
- context 'without repository storage move' do
- it 'calls the update repository storage service' do
- expect(service_klass).to receive(:new).and_return(service)
- expect(service).to receive(:execute)
+ describe 'deprecated method signature' do
+ # perform(container_id, new_repository_storage_key, repository_storage_move_id = nil)
+ subject { worker.perform(container.id, 'test_second_storage', repository_storage_move_id) }
- expect do
- subject.perform(container.id, 'test_second_storage')
- end.to change { repository_storage_move_klass.count }.by(1)
-
- storage_move = container.repository_storage_moves.last
- expect(storage_move).to have_attributes(
- source_storage_name: 'default',
- destination_storage_name: 'test_second_storage'
- )
+ context 'without repository storage move' do
+ let(:repository_storage_move_id) { nil }
+
+ it 'calls the update repository storage service' do
+ expect(service_klass).to receive(:new).and_return(service)
+ expect(service).to receive(:execute)
+
+ expect do
+ worker.perform(container.id, 'test_second_storage')
+ end.to change { repository_storage_move_klass.count }.by(1)
+
+ storage_move = container.repository_storage_moves.last
+ expect(storage_move).to have_attributes(
+ source_storage_name: 'default',
+ destination_storage_name: 'test_second_storage'
+ )
+ end
+ end
+
+ context 'with repository storage move' do
+ let(:repository_storage_move_id) { repository_storage_move.id }
+
+ before do
+ allow(service_klass).to receive(:new).and_return(service)
+ end
+
+ it 'calls the update repository storage service' do
+ expect(service).to receive(:execute)
+
+ expect do
+ subject
+ end.not_to change { repository_storage_move_klass.count }
+ end
+
+ context 'when repository storage move raises an exception' do
+ let(:exception) { RuntimeError.new('boom') }
+
+ it 'releases the exclusive lock' do
+ expect(service).to receive(:execute).and_raise(exception)
+
+ allow_next_instance_of(Gitlab::ExclusiveLease) do |lease|
+ expect(lease).to receive(:cancel)
+ end
+
+ expect { subject }.to raise_error(exception)
+ end
+ end
+
+ context 'when exclusive lease already set' do
+ let(:lease_key) { [described_class.name.underscore, container.id].join(':') }
+ let(:exclusive_lease) { Gitlab::ExclusiveLease.new(lease_key, uuid: uuid, timeout: 1.minute) }
+ let(:uuid) { 'other_worker_jid' }
+
+ it 'does not call the update repository storage service' do
+ expect(exclusive_lease.try_obtain).to eq(uuid)
+ expect(service).not_to receive(:execute)
+
+ subject
+
+ expect(repository_storage_move.reload).to be_failed
+ end
+
+ context 'when exclusive lease was taken by the current worker' do
+ let(:uuid) { 'existing_worker_jid' }
+
+ before do
+ allow(worker).to receive(:jid).and_return(uuid)
+ end
+
+ it 'marks storage migration as failed' do
+ expect(exclusive_lease.try_obtain).to eq(worker.jid)
+ expect(service).not_to receive(:execute)
+
+ subject
+
+ expect(repository_storage_move.reload).to be_failed
+ end
+ end
+ end
end
end
- context 'with repository storage move' do
+ describe 'new method signature' do
+ # perform(repository_storage_move_id)
+ subject { worker.perform(repository_storage_move.id) }
+
+ before do
+ allow(service_klass).to receive(:new).and_return(service)
+ end
+
it 'calls the update repository storage service' do
- expect(service_klass).to receive(:new).and_return(service)
expect(service).to receive(:execute)
expect do
- subject.perform(nil, nil, repository_storage_move.id)
+ subject
end.not_to change { repository_storage_move_klass.count }
end
+
+ context 'when repository storage move raises an exception' do
+ let(:exception) { RuntimeError.new('boom') }
+
+ it 'releases the exclusive lock' do
+ expect(service).to receive(:execute).and_raise(exception)
+
+ allow_next_instance_of(Gitlab::ExclusiveLease) do |lease|
+ expect(lease).to receive(:cancel)
+ end
+
+ expect { subject }.to raise_error(exception)
+ end
+ end
+
+ context 'when exclusive lease already set' do
+ let(:lease_key) { [described_class.name.underscore, repository_storage_move.container_id].join(':') }
+ let(:exclusive_lease) { Gitlab::ExclusiveLease.new(lease_key, uuid: uuid, timeout: 1.minute) }
+ let(:uuid) { 'other_worker_jid' }
+
+ it 'does not call the update repository storage service' do
+ expect(exclusive_lease.try_obtain).to eq(uuid)
+ expect(service).not_to receive(:execute)
+
+ subject
+
+ expect(repository_storage_move.reload).to be_failed
+ end
+
+ context 'when exclusive lease was taken by the current worker' do
+ let(:uuid) { 'existing_worker_jid' }
+
+ before do
+ allow(worker).to receive(:jid).and_return(uuid)
+ end
+
+ it 'marks storage migration as failed' do
+ expect(exclusive_lease.try_obtain).to eq(worker.jid)
+ expect(service).not_to receive(:execute)
+
+ subject
+
+ expect(repository_storage_move.reload).to be_failed
+ end
+ end
+ end
end
end
end
diff --git a/spec/support_specs/helpers/stub_saas_features_spec.rb b/spec/support_specs/helpers/stub_saas_features_spec.rb
deleted file mode 100644
index c3cec3f47aa..00000000000
--- a/spec/support_specs/helpers/stub_saas_features_spec.rb
+++ /dev/null
@@ -1,50 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe StubSaasFeatures, feature_category: :shared do
- describe '#stub_saas_features' do
- using RSpec::Parameterized::TableSyntax
-
- let(:feature_name) { :some_saas_feature }
-
- context 'when checking global state' do
- where(:feature_value) do
- [true, false]
- end
-
- with_them do
- before do
- stub_saas_features(feature_name => feature_value)
- end
-
- it { expect(::Gitlab::Saas.feature_available?(feature_name)).to eq(feature_value) }
- end
- end
-
- context 'when value is not boolean' do
- it 'raises an error' do
- expect do
- stub_saas_features(feature_name => '_not_boolean_')
- end.to raise_error(ArgumentError, /value must be boolean/)
- end
- end
-
- it 'subsequent run changes state' do
- # enable FF on all
- stub_saas_features({ feature_name => true })
- expect(::Gitlab::Saas.feature_available?(feature_name)).to eq(true)
-
- # disable FF on all
- stub_saas_features({ feature_name => false })
- expect(::Gitlab::Saas.feature_available?(feature_name)).to eq(false)
- end
-
- it 'handles multiple features' do
- stub_saas_features(feature_name => false, some_new_feature: true)
-
- expect(::Gitlab::Saas.feature_available?(feature_name)).to eq(false)
- expect(::Gitlab::Saas.feature_available?(:some_new_feature)).to eq(true)
- end
- end
-end
diff --git a/spec/tasks/gitlab/backup_rake_spec.rb b/spec/tasks/gitlab/backup_rake_spec.rb
index 75be7b97a67..4f27ba57bcb 100644
--- a/spec/tasks/gitlab/backup_rake_spec.rb
+++ b/spec/tasks/gitlab/backup_rake_spec.rb
@@ -453,10 +453,7 @@ RSpec.describe 'gitlab:backup namespace rake tasks', :delete, feature_category:
before do
# We only need a backup of the repositories for this test
stub_env('SKIP', 'db,uploads,builds,artifacts,lfs,terraform_state,registry')
- stub_storage_settings(second_storage_name => {
- 'gitaly_address' => Gitlab.config.repositories.storages.default.gitaly_address,
- 'path' => TestEnv::SECOND_STORAGE_PATH
- })
+ stub_storage_settings(second_storage_name => {})
end
shared_examples 'includes repositories in all repository storages' do
diff --git a/spec/tasks/gitlab/check_rake_spec.rb b/spec/tasks/gitlab/check_rake_spec.rb
index 4a73ef78022..cf339a5e86a 100644
--- a/spec/tasks/gitlab/check_rake_spec.rb
+++ b/spec/tasks/gitlab/check_rake_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'check.rake', :silence_stdout, feature_category: :gitaly do
+RSpec.describe 'check.rake', :delete, :silence_stdout, feature_category: :gitaly do
before do
Rake.application.rake_require 'tasks/gitlab/check'
diff --git a/spec/tasks/gitlab/click_house/migration_rake_spec.rb b/spec/tasks/gitlab/click_house/migration_rake_spec.rb
index 75a1c1a1856..37ae9d7f090 100644
--- a/spec/tasks/gitlab/click_house/migration_rake_spec.rb
+++ b/spec/tasks/gitlab/click_house/migration_rake_spec.rb
@@ -8,125 +8,227 @@ RSpec.describe 'gitlab:clickhouse', click_house: :without_migrations, feature_ca
# We don't need to delete data since we don't modify Postgres data
self.use_transactional_tests = false
- let(:migrations_base_dir) { 'click_house/migrations' }
- let(:migrations_dirname) { '' }
- let(:migrations_dir) { expand_fixture_path("#{migrations_base_dir}/#{migrations_dirname}") }
- let(:verbose) { nil }
-
before(:all) do
Rake.application.rake_require 'tasks/gitlab/click_house/migration'
end
- before do
- stub_env('VERBOSE', verbose) if verbose
- end
+ it 'migrates and rolls back the database' do
+ expect { run_rake_task('gitlab:clickhouse:migrate:main') }.to change { active_schema_migrations_count }.from(0)
+ .and output.to_stdout
- describe 'migrate' do
- subject(:migration) { run_rake_task('gitlab:clickhouse:migrate') }
+ expect { run_rake_task('gitlab:clickhouse:rollback:main') }.to change { active_schema_migrations_count }.by(-1)
+ .and output.to_stdout
- let(:target_version) { nil }
+ stub_env('VERSION', 0)
+ expect { run_rake_task('gitlab:clickhouse:rollback:main') }.to change { active_schema_migrations_count }.to(0)
+ .and output.to_stdout
+ end
- around do |example|
- ClickHouse::MigrationSupport::Migrator.migrations_paths = [migrations_dir]
+ context 'when clickhouse database is not configured' do
+ before do
+ allow(::ClickHouse::Client).to receive(:configuration).and_return(::ClickHouse::Client::Configuration.new)
+ end
- example.run
+ it 'raises an error' do
+ expect { run_rake_task('gitlab:clickhouse:migrate:main') }.to raise_error(ClickHouse::Client::ConfigurationError)
+ end
- clear_consts(expand_fixture_path(migrations_base_dir))
+ it 'prints the error message and exits successfully if skip_unless_configured is passed' do
+ expect do
+ run_rake_task('gitlab:clickhouse:migrate:main', true)
+ end.to output(/The 'main' ClickHouse database is not configured, skipping migrations/).to_stdout
end
+ end
- before do
- stub_env('VERSION', target_version) if target_version
+ describe 'gitlab:clickhouse:migrate' do
+ it 'delegates to gitlab:clickhouse:migrate:main' do
+ task = Rake::Task['gitlab:clickhouse:migrate:main']
+ task.reenable # re-enabling task in case other tests already run it
+ expect(task).to receive(:invoke).with("true").and_call_original
+
+ expect do
+ run_rake_task('gitlab:clickhouse:migrate', true)
+ end.to change { active_schema_migrations_count }.from(0).and output.to_stdout
end
+ end
+
+ context 'with migration fixtures', :silence_stdout do
+ let(:migrations_base_dir) { 'click_house/migrations' }
+ let(:migrations_dirname) { 'undefined' }
+ let(:migrations_dir) { expand_fixture_path("#{migrations_base_dir}/#{migrations_dirname}") }
+
+ describe 'migrate:main' do
+ subject(:migration) { run_rake_task('gitlab:clickhouse:migrate:main') }
+
+ let(:verbose) { nil }
+ let(:target_version) { nil }
+ let(:step) { nil }
- describe 'when creating a table' do
- let(:migrations_dirname) { 'plain_table_creation' }
+ before do
+ allow(ClickHouse::MigrationSupport::Migrator).to receive(:migrations_paths).with(:main)
+ .and_return(migrations_dir)
- it 'creates a table' do
- expect { migration }.to change { active_schema_migrations_count }.from(0).to(1)
- .and output.to_stdout
+ stub_env('VERBOSE', verbose) if verbose
+ stub_env('VERSION', target_version) if target_version
+ stub_env('STEP', step.to_s) if step
+ end
- expect(describe_table('some')).to match({
- id: a_hash_including(type: 'UInt64'),
- date: a_hash_including(type: 'Date')
- })
+ after do
+ unload_click_house_migration_classes(expand_fixture_path(migrations_dir))
end
- context 'when VERBOSE is false' do
- let(:verbose) { 'false' }
+ describe 'when creating a table' do
+ let(:migrations_dirname) { 'plain_table_creation' }
- it 'does not write to stdout' do
- expect { migration }.not_to output.to_stdout
+ it 'creates a table' do
+ expect { migration }.to change { active_schema_migrations_count }.from(0).to(1)
+ .and output.to_stdout
expect(describe_table('some')).to match({
id: a_hash_including(type: 'UInt64'),
date: a_hash_including(type: 'Date')
})
end
+
+ context 'when VERBOSE is false' do
+ let(:verbose) { 'false' }
+
+ it 'does not write to stdout' do
+ expect { migration }.not_to output.to_stdout
+
+ expect(describe_table('some')).to match({
+ id: a_hash_including(type: 'UInt64'),
+ date: a_hash_including(type: 'Date')
+ })
+ end
+ end
end
- end
- describe 'when dropping a table' do
- let(:migrations_dirname) { 'drop_table' }
- let(:target_version) { 2 }
+ describe 'when dropping a table' do
+ let(:migrations_dirname) { 'drop_table' }
+
+ context 'with VERSION set' do
+ let(:target_version) { 2 }
+
+ it 'drops table' do
+ stub_env('VERSION', 1)
+ run_rake_task('gitlab:clickhouse:migrate:main')
+
+ expect(table_names).to include('some')
+
+ stub_env('VERSION', target_version)
+ migration
+ expect(table_names).not_to include('some')
+ end
+
+ context 'with STEP also set' do
+ let(:step) { 1 }
+
+ it 'ignores STEP and executes both migrations' do
+ migration
+
+ expect(table_names).not_to include('some')
+ end
+ end
+ end
+
+ context 'with STEP set to 1' do
+ let(:step) { 1 }
- it 'drops table' do
- stub_env('VERSION', 1)
- run_rake_task('gitlab:clickhouse:migrate')
+ it 'executes only first step and creates table' do
+ migration
+
+ expect(table_names).to include('some')
+ end
+ end
+
+ context 'with STEP set to 0' do
+ let(:step) { 0 }
+
+ it 'executes only first step and creates table' do
+ expect { migration }.to raise_error ArgumentError, 'STEP should be a positive number'
+ end
+ end
- expect(table_names).to include('some')
+ context 'with STEP set to not-a-number' do
+ let(:step) { 'NaN' }
- stub_env('VERSION', target_version)
- migration
- expect(table_names).not_to include('some')
+ it 'raises an error' do
+ expect { migration }.to raise_error ArgumentError, 'invalid value for Integer(): "NaN"'
+ end
+ end
+
+ context 'with STEP set to empty string' do
+ let(:step) { '' }
+
+ it 'raises an error' do
+ expect { migration }.to raise_error ArgumentError, 'invalid value for Integer(): ""'
+ end
+ end
end
- end
- describe 'with VERSION is invalid' do
- let(:migrations_dirname) { 'plain_table_creation' }
- let(:target_version) { 'invalid' }
+ context 'with VERSION is invalid' do
+ let(:migrations_dirname) { 'plain_table_creation' }
+ let(:target_version) { 'invalid' }
- it { expect { migration }.to raise_error RuntimeError, 'Invalid format of target version: `VERSION=invalid`' }
+ it { expect { migration }.to raise_error RuntimeError, 'Invalid format of target version: `VERSION=invalid`' }
+ end
end
- end
- describe 'rollback' do
- subject(:migration) { run_rake_task('gitlab:clickhouse:rollback') }
+ describe 'rollback:main' do
+ subject(:migration) { run_rake_task('gitlab:clickhouse:rollback:main') }
- let(:schema_migration) { ClickHouse::MigrationSupport::SchemaMigration }
+ let(:target_version) { nil }
+ let(:rollback_step) { nil }
+ let(:migrations_dirname) { 'table_creation_with_down_method' }
- around do |example|
- ClickHouse::MigrationSupport::Migrator.migrations_paths = [migrations_dir]
- migrate(nil, ClickHouse::MigrationSupport::MigrationContext.new(migrations_dir, schema_migration))
+ before do
+ allow(ClickHouse::MigrationSupport::Migrator).to receive(:migrations_paths).with(:main)
+ .and_return(migrations_dir)
- example.run
+ run_rake_task('gitlab:clickhouse:migrate:main')
- clear_consts(expand_fixture_path(migrations_base_dir))
- end
+ stub_env('VERSION', target_version) if target_version
+ stub_env('STEP', rollback_step.to_s) if rollback_step
+ end
- context 'when migrating back all the way to 0' do
- let(:target_version) { 0 }
+ after do
+ unload_click_house_migration_classes(expand_fixture_path(migrations_dir))
+ end
- context 'when down method is present' do
- let(:migrations_dirname) { 'table_creation_with_down_method' }
+ context 'with VERSION set' do
+ context 'when migrating back all the way to 0' do
+ let(:target_version) { 0 }
- it 'removes migration' do
- expect(table_names).to include('some')
+ it 'rolls back all migrations' do
+ expect(table_names).to include('some', 'another')
+
+ migration
+ expect(table_names).not_to include('some', 'another')
+ end
+
+ context 'with STEP also set' do
+ let(:rollback_step) { 1 }
- migration
- expect(table_names).not_to include('some')
+ it 'ignores STEP and rolls back all migrations' do
+ expect(table_names).to include('some', 'another')
+
+ migration
+ expect(table_names).not_to include('some', 'another')
+ end
+ end
end
end
- end
- end
- %w[gitlab:clickhouse:migrate].each do |task|
- context "when running #{task}" do
- it "does run gitlab:clickhouse:prepare_schema_migration_table before" do
- expect(Rake::Task['gitlab:clickhouse:prepare_schema_migration_table']).to receive(:execute).and_return(true)
- expect(Rake::Task[task]).to receive(:execute).and_return(true)
+ context 'with STEP set to 1' do
+ let(:rollback_step) { 1 }
- Rake::Task['gitlab:clickhouse:prepare_schema_migration_table'].reenable
- run_rake_task(task)
+ it 'executes only first step and drops "another" table' do
+ run_rake_task('gitlab:clickhouse:rollback:main')
+
+ expect(table_names).to include('some')
+ expect(table_names).not_to include('another')
+ end
end
end
end
diff --git a/spec/tasks/gitlab/db/decomposition/migrate_rake_spec.rb b/spec/tasks/gitlab/db/decomposition/migrate_rake_spec.rb
new file mode 100644
index 00000000000..300c7009154
--- /dev/null
+++ b/spec/tasks/gitlab/db/decomposition/migrate_rake_spec.rb
@@ -0,0 +1,39 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'gitlab:db:decomposition:migrate', feature_category: :cell do
+ before(:all) do
+ skip_if_database_exists(:ci)
+
+ Rake.application.rake_require 'tasks/gitlab/db/decomposition/migrate'
+ end
+
+ subject(:migrate_task) { run_rake_task('gitlab:db:decomposition:migrate') }
+
+ before do
+ allow_next_instance_of(Gitlab::Database::Decomposition::Migrate) do |instance|
+ allow(instance).to receive(:process!)
+ end
+ end
+
+ it 'calls Gitlab::Database::Decomposition::Migrate#process!' do
+ expect_next_instance_of(Gitlab::Database::Decomposition::Migrate) do |instance|
+ expect(instance).to receive(:process!)
+ end
+
+ migrate_task
+ end
+
+ context 'when a Gitlab::Database::Decomposition::Migrate::Error is raised' do
+ before do
+ allow_next_instance_of(Gitlab::Database::Decomposition::Migrate) do |instance|
+ allow(instance).to receive(:process!).and_raise(Gitlab::Database::Decomposition::MigrateError, 'some error')
+ end
+ end
+
+ it 'renders error' do
+ expect { migrate_task }.to output("some error\n").to_stdout.and raise_error(SystemExit)
+ end
+ end
+end
diff --git a/spec/tasks/gitlab/db_rake_spec.rb b/spec/tasks/gitlab/db_rake_spec.rb
index a966f2118b0..d7bcb501425 100644
--- a/spec/tasks/gitlab/db_rake_spec.rb
+++ b/spec/tasks/gitlab/db_rake_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe 'gitlab:db namespace rake task', :silence_stdout, feature_categor
before(:all) do
Rake.application.rake_require 'active_record/railties/databases'
Rake.application.rake_require 'tasks/seed_fu'
+ Rake.application.rake_require 'tasks/gitlab/click_house/migration'
Rake.application.rake_require 'tasks/gitlab/db'
Rake.application.rake_require 'tasks/gitlab/db/lock_writes'
end
@@ -357,6 +358,43 @@ RSpec.describe 'gitlab:db namespace rake task', :silence_stdout, feature_categor
end
end
end
+
+ describe 'clickhouse migrations' do
+ let(:connection) { Gitlab::Database.database_base_models[:main].connection }
+ let(:main_config) { double(:config, name: 'main') }
+
+ before do
+ # stub normal migrations
+ allow(ActiveRecord::Base).to receive_message_chain('configurations.configs_for').and_return([main_config])
+ allow(connection).to receive(:tables).and_return(%w[table1 table2])
+ allow(Rake::Task['db:migrate']).to receive(:invoke)
+ end
+
+ it 'migrates clickhouse database' do
+ expect(Rake::Task['gitlab:clickhouse:migrate']).to receive(:invoke).with(true)
+
+ run_rake_task('gitlab:db:configure')
+ end
+
+ it 'does not run clickhouse migrations if feature flag is disabled' do
+ stub_feature_flags(run_clickhouse_migrations_automatically: false)
+
+ expect(Rake::Task['gitlab:clickhouse:migrate']).not_to receive(:invoke)
+
+ run_rake_task('gitlab:db:configure')
+ end
+
+ it 'does not fail if clickhouse is not configured' do
+ allow(::ClickHouse::Client).to receive(:configuration).and_return(::ClickHouse::Client::Configuration.new)
+
+ Rake::Task['gitlab:clickhouse:migrate'].reenable
+ Rake::Task['gitlab:clickhouse:migrate:main'].reenable
+
+ expect do
+ run_rake_task('gitlab:db:configure')
+ end.to output(/The 'main' ClickHouse database is not configured, skipping migrations/).to_stdout
+ end
+ end
end
describe 'schema inconsistencies' do
@@ -550,7 +588,7 @@ RSpec.describe 'gitlab:db namespace rake task', :silence_stdout, feature_categor
with_them do
it 'outputs changed message for automation after operations happen' do
- allow(ActiveRecord::Base.connection.schema_migration).to receive(:table_exists?).and_return(schema_migration_table_exists)
+ allow(ActiveRecord::Base.connection).to receive_message_chain(:schema_migration, :table_exists?).and_return(schema_migration_table_exists)
allow_any_instance_of(ActiveRecord::MigrationContext).to receive(:needs_migration?).and_return(needs_migrations)
expect { run_rake_task('gitlab:db:unattended') }.to output(/^#{rake_output}$/).to_stdout
end
@@ -740,6 +778,18 @@ RSpec.describe 'gitlab:db namespace rake task', :silence_stdout, feature_categor
.to raise_error(/Don't know how to build task 'gitlab:db:create_dynamic_partitions:geo'/)
end
end
+
+ context 'with jh configured' do
+ before do
+ skip 'Skipping because the jh database is not configured' unless
+ !!ActiveRecord::Base.configurations.configs_for(env_name: Rails.env, name: 'jh')
+ end
+
+ it 'does not create a task for the jh database' do
+ expect { run_rake_task('gitlab:db:create_dynamic_partitions:jh') }
+ .to raise_error(/Don't know how to build task 'gitlab:db:create_dynamic_partitions:jh'/)
+ end
+ end
end
describe 'reindex' do
diff --git a/spec/tasks/gitlab/doctor/secrets_rake_spec.rb b/spec/tasks/gitlab/doctor/secrets_rake_spec.rb
index 91ef3c57d73..123b8745d65 100644
--- a/spec/tasks/gitlab/doctor/secrets_rake_spec.rb
+++ b/spec/tasks/gitlab/doctor/secrets_rake_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'gitlab:doctor:reset_encrypted_tokens', :silence_stdout, feature_category: :runner_fleet do
+RSpec.describe 'gitlab:doctor:reset_encrypted_tokens', :silence_stdout, feature_category: :fleet_visibility do
let(:model_names) { 'Project,Group' }
let(:token_names) { 'runners_token' }
diff --git a/spec/tasks/gitlab/ldap_rake_spec.rb b/spec/tasks/gitlab/ldap_rake_spec.rb
index 877bed7cacf..508fd240750 100644
--- a/spec/tasks/gitlab/ldap_rake_spec.rb
+++ b/spec/tasks/gitlab/ldap_rake_spec.rb
@@ -89,6 +89,21 @@ RSpec.describe 'gitlab:ldap:secret rake tasks' do
stub_env('EDITOR', nil)
expect { run_rake_task('gitlab:ldap:secret:edit') }.to output(/No \$EDITOR specified to open file. Please provide one when running the command/).to_stderr
end
+
+ it 'when $EDITOR contains multiple arguments' do
+ stub_env('EDITOR', 'cat -v')
+
+ expect { run_rake_task('gitlab:ldap:secret:edit') }.to output(/File encrypted and saved./).to_stdout
+ expect(File.exist?(ldap_secret_file)).to be true
+ value = Settings.encrypted(ldap_secret_file)
+ expect(value.read).to match(/password: '123'/)
+ end
+
+ it 'when $EDITOR is set to a non-existent binary' do
+ stub_env('EDITOR', "nothing-#{SecureRandom.hex}")
+
+ expect { run_rake_task('gitlab:ldap:secret:edit') }.to raise_error(/Unable to run \$EDITOR/)
+ end
end
describe 'write' do
diff --git a/spec/tasks/gitlab/seed/runner_fleet_rake_spec.rb b/spec/tasks/gitlab/seed/runner_fleet_rake_spec.rb
index b8503e2bc1b..ae4e5425d0c 100644
--- a/spec/tasks/gitlab/seed/runner_fleet_rake_spec.rb
+++ b/spec/tasks/gitlab/seed/runner_fleet_rake_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'gitlab:seed:runner_fleet rake task', :silence_stdout, feature_category: :runner_fleet do
+RSpec.describe 'gitlab:seed:runner_fleet rake task', :silence_stdout, feature_category: :fleet_visibility do
let(:registration_prefix) { 'rf-' }
let(:runner_count) { 10 }
let(:job_count) { 20 }
diff --git a/spec/tasks/gitlab/shell_rake_spec.rb b/spec/tasks/gitlab/shell_rake_spec.rb
index 7bcf88aeea2..4ca46a9ce40 100644
--- a/spec/tasks/gitlab/shell_rake_spec.rb
+++ b/spec/tasks/gitlab/shell_rake_spec.rb
@@ -11,12 +11,8 @@ RSpec.describe 'gitlab:shell rake tasks', :silence_stdout do
describe 'install task' do
it 'installs and compiles gitlab-shell' do
- storages = Gitlab::GitalyClient::StorageSettings.allow_disk_access do
- Gitlab.config.repositories.storages.values.map(&:legacy_disk_path)
- end
-
expect_any_instance_of(Gitlab::TaskHelpers).to receive(:checkout_or_clone_version)
- allow(Kernel).to receive(:system).with('bin/install', *storages).and_return(true)
+ allow(Kernel).to receive(:system).with('bin/install').and_return(true)
allow(Kernel).to receive(:system).with('make', 'build').and_return(true)
run_rake_task('gitlab:shell:install')
diff --git a/spec/tasks/rubocop_rake_spec.rb b/spec/tasks/rubocop_rake_spec.rb
index 33ebd9ed2b0..c68e1bc2a8a 100644
--- a/spec/tasks/rubocop_rake_spec.rb
+++ b/spec/tasks/rubocop_rake_spec.rb
@@ -4,9 +4,9 @@
require 'fast_spec_helper'
require 'tmpdir'
require 'fileutils'
+require 'gitlab/rspec/next_instance_of'
require_relative '../support/silence_stdout'
-require_relative '../support/helpers/next_instance_of'
require_relative '../support/matchers/abort_matcher'
require_relative '../../rubocop/formatter/todo_formatter'
require_relative '../../rubocop/todo_dir'
diff --git a/spec/tooling/danger/outdated_todo_spec.rb b/spec/tooling/danger/outdated_todo_spec.rb
index 3a3909c69ac..3079c26bafc 100644
--- a/spec/tooling/danger/outdated_todo_spec.rb
+++ b/spec/tooling/danger/outdated_todo_spec.rb
@@ -15,69 +15,78 @@ RSpec.describe Tooling::Danger::OutdatedTodo, feature_category: :tooling do
]
end
- subject(:plugin) { described_class.new(filenames, context: fake_danger, todos: todos) }
-
- context 'when the filenames are mentioned in single todo' do
- let(:filenames) { ['app/controllers/acme_challenges_controller.rb'] }
-
- it 'warns about mentions' do
- expect(fake_danger)
- .to receive(:warn)
- .with <<~MESSAGE
- `app/controllers/acme_challenges_controller.rb` was removed but is mentioned in:
- - `spec/fixtures/tooling/danger/rubocop_todo/cop1.yml:5`
- MESSAGE
-
- plugin.check
- end
- end
-
- context 'when the filenames are mentioned in multiple todos' do
- let(:filenames) do
- [
- 'app/controllers/application_controller.rb',
- 'app/controllers/acme_challenges_controller.rb'
- ]
- end
-
- it 'warns about mentions' do
- expect(fake_danger)
- .to receive(:warn)
- .with(<<~FIRSTMESSAGE)
- `app/controllers/application_controller.rb` was removed but is mentioned in:
- - `spec/fixtures/tooling/danger/rubocop_todo/cop1.yml:4`
- - `spec/fixtures/tooling/danger/rubocop_todo/cop2.yml:4`
- FIRSTMESSAGE
-
- expect(fake_danger)
- .to receive(:warn)
- .with(<<~SECONDMESSAGE)
- `app/controllers/acme_challenges_controller.rb` was removed but is mentioned in:
- - `spec/fixtures/tooling/danger/rubocop_todo/cop1.yml:5`
- SECONDMESSAGE
-
- plugin.check
- end
- end
-
- context 'when the filenames are not mentioned in todos' do
- let(:filenames) { ['any/inexisting/file.rb'] }
-
- it 'does not warn' do
- expect(fake_danger).not_to receive(:warn)
-
- plugin.check
- end
- end
-
- context 'when there is no todos' do
- let(:filenames) { ['app/controllers/acme_challenges_controller.rb'] }
- let(:todos) { [] }
-
- it 'does not warn' do
- expect(fake_danger).not_to receive(:warn)
-
- plugin.check
+ subject(:plugin) { described_class.new(filenames, context: fake_danger, todos: todos, allow_fail: allow_fail) }
+
+ [true, false].each do |allow_failure|
+ context "with allow_fail set to #{allow_failure}" do
+ let(:allow_fail) { allow_failure }
+ let(:expected_method) do
+ allow_failure ? :fail : :warn
+ end
+
+ context 'when the filenames are mentioned in single todo' do
+ let(:filenames) { ['app/controllers/acme_challenges_controller.rb'] }
+
+ it 'warns about mentions' do
+ expect(fake_danger)
+ .to receive(expected_method)
+ .with <<~MESSAGE
+ `app/controllers/acme_challenges_controller.rb` was removed but is mentioned in:
+ - `spec/fixtures/tooling/danger/rubocop_todo/cop1.yml:5`
+ MESSAGE
+
+ plugin.check
+ end
+ end
+
+ context 'when the filenames are mentioned in multiple todos' do
+ let(:filenames) do
+ [
+ 'app/controllers/application_controller.rb',
+ 'app/controllers/acme_challenges_controller.rb'
+ ]
+ end
+
+ it 'warns about mentions' do
+ expect(fake_danger)
+ .to receive(expected_method)
+ .with(<<~FIRSTMESSAGE)
+ `app/controllers/application_controller.rb` was removed but is mentioned in:
+ - `spec/fixtures/tooling/danger/rubocop_todo/cop1.yml:4`
+ - `spec/fixtures/tooling/danger/rubocop_todo/cop2.yml:4`
+ FIRSTMESSAGE
+
+ expect(fake_danger)
+ .to receive(expected_method)
+ .with(<<~SECONDMESSAGE)
+ `app/controllers/acme_challenges_controller.rb` was removed but is mentioned in:
+ - `spec/fixtures/tooling/danger/rubocop_todo/cop1.yml:5`
+ SECONDMESSAGE
+
+ plugin.check
+ end
+ end
+
+ context 'when the filenames are not mentioned in todos' do
+ let(:filenames) { ['any/inexisting/file.rb'] }
+
+ it 'does not warn' do
+ expect(fake_danger).not_to receive(expected_method)
+
+ plugin.check
+ end
+ end
+
+ context 'when there is no todos' do
+ let(:filenames) { ['app/controllers/acme_challenges_controller.rb'] }
+ let(:todos) { [] }
+
+ it 'does not warn' do
+ expect(fake_danger).not_to receive(expected_method)
+
+ plugin.check
+ end
+ end
end
end
end
diff --git a/spec/tooling/danger/project_helper_spec.rb b/spec/tooling/danger/project_helper_spec.rb
index 2da90ddbd67..a41aba17f56 100644
--- a/spec/tooling/danger/project_helper_spec.rb
+++ b/spec/tooling/danger/project_helper_spec.rb
@@ -8,7 +8,7 @@ require 'gitlab/dangerfiles/spec_helper'
require_relative '../../../danger/plugins/project_helper'
-RSpec.describe Tooling::Danger::ProjectHelper do
+RSpec.describe Tooling::Danger::ProjectHelper, feature_category: :tooling do
include StubENV
include_context "with dangerfile"
@@ -130,6 +130,7 @@ RSpec.describe Tooling::Danger::ProjectHelper do
'lib/gitlab/background_migration.rb' | [:database, :backend]
'lib/gitlab/background_migration/foo' | [:database, :backend]
'ee/lib/gitlab/background_migration/foo' | [:database, :backend]
+ 'ee/lib/ee/gitlab/background_migration/foo' | [:database, :backend]
'lib/gitlab/database.rb' | [:database, :backend]
'lib/gitlab/database/foo' | [:database, :backend]
'ee/lib/gitlab/database/foo' | [:database, :backend]
@@ -238,7 +239,7 @@ RSpec.describe Tooling::Danger::ProjectHelper do
it { is_expected.to eq(expected_categories) }
end
- context 'having specific changes' do
+ context 'when having specific changes' do
where(:expected_categories, :patch, :changed_files) do
[:analytics_instrumentation] | '+data-track-action' | ['components/welcome.vue']
[:analytics_instrumentation] | '+ data: { track_label:' | ['admin/groups/_form.html.haml']
diff --git a/spec/tooling/lib/tooling/api/job_spec.rb b/spec/tooling/lib/tooling/api/job_spec.rb
new file mode 100644
index 00000000000..dfb047f0e6d
--- /dev/null
+++ b/spec/tooling/lib/tooling/api/job_spec.rb
@@ -0,0 +1,95 @@
+# frozen_string_literal: true
+
+require_relative '../../../../../tooling/lib/tooling/api/job'
+require_relative '../../../../../tooling/lib/tooling/api/request'
+
+RSpec.describe Tooling::API::Job, feature_category: :tooling do
+ describe '#rspec_failed_files' do
+ let(:job) { described_class.new('api_token', 'project_id', 'job_id') }
+ let(:failures) { '' }
+ let(:log) do
+ <<~LOG
+ lots of content at the top of the file
+ #{failures}
+ some content at the bottom of the file
+ LOG
+ end
+
+ subject(:rspec_failed_files) { job.rspec_failed_files }
+
+ shared_context 'with stubbed API request' do
+ before do
+ # Stub the API request.
+ allow(job).to receive(:get_job_log).and_return(log)
+ end
+ end
+
+ it 'will fetch job logs' do
+ uri = URI("https://gitlab.com/api/v4/projects/project_id/jobs/job_id/trace")
+
+ response_double = instance_double(Net::HTTPOK, body: log)
+ expect(Tooling::API::Request).to receive(:get).with('api_token', uri).and_return(response_double)
+
+ rspec_failed_files
+ end
+
+ context 'when there are no failures' do
+ include_context 'with stubbed API request'
+
+ let(:failures) { '' }
+
+ it { is_expected.to be_empty }
+ end
+
+ context 'when a spec fails on a specified line' do
+ include_context 'with stubbed API request'
+
+ let(:failures) { 'rspec ./spec/foo_spec.rb:123' }
+
+ it { is_expected.to eq(%w[spec/foo_spec.rb]) }
+ end
+
+ context 'when a nested spec fails' do
+ include_context 'with stubbed API request'
+
+ let(:failures) { %(rspec './spec/foo_spec.rb[123:456]') }
+
+ it { is_expected.to eq(%w[spec/foo_spec.rb]) }
+ end
+
+ context 'when there are multiple spec failures' do
+ include_context 'with stubbed API request'
+
+ let(:failures) do
+ <<~LOG
+ rspec spec/foo_spec.rb:123
+ rspec spec/bar_spec.rb:456
+ rspec 'spec/ro_spec.rb[1:2]'
+ rspec 'spec/sham_spec.rb[3:4]'
+ rspec 'spec/bo_spec.rb[5:6]'
+ LOG
+ end
+
+ it do
+ is_expected.to match_array(
+ %w[spec/bar_spec.rb spec/bo_spec.rb spec/foo_spec.rb spec/ro_spec.rb spec/sham_spec.rb]
+ )
+ end
+ end
+
+ context 'when there are multiple spec failures in the same file' do
+ include_context 'with stubbed API request'
+
+ let(:failures) do
+ <<~LOG
+ rspec ./spec/foo_spec.rb:123
+ rspec ./spec/foo_spec.rb:456
+ rspec './spec/bar_spec.rb[1:2]'
+ rspec './spec/bar_spec.rb[3:4]'
+ LOG
+ end
+
+ it { is_expected.to eq(%w[spec/foo_spec.rb spec/bar_spec.rb]) }
+ end
+ end
+end
diff --git a/spec/tooling/lib/tooling/api/pipeline_spec.rb b/spec/tooling/lib/tooling/api/pipeline_spec.rb
new file mode 100644
index 00000000000..1c641328796
--- /dev/null
+++ b/spec/tooling/lib/tooling/api/pipeline_spec.rb
@@ -0,0 +1,63 @@
+# frozen_string_literal: true
+
+require_relative '../../../../../tooling/lib/tooling/api/pipeline'
+require_relative '../../../../../tooling/lib/tooling/api/job'
+
+RSpec.describe Tooling::API::Pipeline, feature_category: :tooling do
+ let(:pipeline) { described_class.new('api_token', 'project_id', 'pipeline_id') }
+
+ describe '#failed_jobs' do
+ subject { pipeline.failed_jobs }
+
+ context 'when there are failed jobs' do
+ let(:jobs) { [{ 'id' => '123' }, { 'id' => '456' }] }
+ let(:response) { instance_double(Net::HTTPOK, body: jobs.to_json, '[]' => nil) }
+
+ it 'returns the jobs' do
+ allow(Tooling::API::Request).to receive(:get).and_yield(response)
+
+ expect(pipeline.failed_jobs).to eq(jobs)
+ end
+ end
+ end
+
+ describe '#failed_spec_files' do
+ let(:job1) { { 'id' => 1 } }
+ let(:job2) { { 'id' => 2 } }
+ let(:failed_jobs) { [job1, job2] }
+ let(:job1_failed_files) { %w[spec/foo_spec.rb spec/bar_spec.rb] }
+ let(:job2_failed_files) { %w[spec/baz_spec.rb spec/qux_spec.rb] }
+ let(:failed_files) { job1_failed_files + job2_failed_files }
+
+ subject { pipeline.failed_spec_files }
+
+ before do
+ allow(pipeline).to receive(:failed_jobs).and_return(failed_jobs)
+
+ allow(Tooling::API::Job).to receive(:new).with(anything, anything, job1['id']).and_return(job1)
+ allow(job1).to receive(:rspec_failed_files).and_return(job1_failed_files)
+
+ allow(Tooling::API::Job).to receive(:new).with(anything, anything, job2['id']).and_return(job2)
+ allow(job2).to receive(:rspec_failed_files).and_return(job2_failed_files)
+ end
+
+ it 'returns the failed spec files' do
+ expect(pipeline.failed_spec_files).to match_array(failed_files)
+ end
+
+ context 'when Tooling::Debug is enabled' do
+ around do |example|
+ Tooling::Debug.debug = true
+ example.run
+ ensure
+ Tooling::Debug.debug = false
+ end
+
+ it 'outputs the job logs' do
+ expect { pipeline.failed_spec_files }.to output(/Fetching failed jobs... found 2/).to_stdout
+ expect { pipeline.failed_spec_files }.to output(/Fetching job logs for #1/).to_stdout
+ expect { pipeline.failed_spec_files }.to output(/Fetching job logs for #2/).to_stdout
+ end
+ end
+ end
+end
diff --git a/spec/tooling/lib/tooling/api/request_spec.rb b/spec/tooling/lib/tooling/api/request_spec.rb
new file mode 100644
index 00000000000..091bd860cbd
--- /dev/null
+++ b/spec/tooling/lib/tooling/api/request_spec.rb
@@ -0,0 +1,53 @@
+# frozen_string_literal: true
+
+require_relative '../../../../../tooling/lib/tooling/api/request'
+
+require 'webmock/rspec'
+
+RSpec.describe Tooling::API::Request, feature_category: :tooling do
+ let(:base_url) { 'https://gitlab.com/api/v4/projects/project_id/pipelines/pipeline_id/jobs' }
+
+ describe '.get' do
+ let(:body) { 'body' }
+
+ subject(:response) { described_class.get('api_token', URI(base_url)) }
+
+ context 'when the response is successful' do
+ before do
+ stub_request(:get, base_url).to_return(status: 200, body: body)
+ end
+
+ it { expect(response.body).to eq(body) }
+ end
+
+ context 'when the response is not successful' do
+ before do
+ stub_request(:get, base_url).to_return(status: 500)
+ end
+
+ it { expect(response.body).to be_empty }
+ end
+
+ context 'when there are multiple pages' do
+ let(:body1) { 'body1' }
+ let(:body2) { 'body2' }
+
+ before do
+ stub_request(:get, base_url).to_return(
+ status: 200, body: body1, headers: { 'Link' => %(<#{base_url}&page=2>; rel="next") }
+ )
+ stub_request(:get, "#{base_url}&page=2").to_return(status: 200, body: body2, headers: { 'Link' => '' })
+ end
+
+ it 'yields each page' do
+ expected = [body1, body2]
+
+ expected_yield = proc do |response|
+ expect(response.body).to eq(expected.shift)
+ end
+
+ described_class.get('api_token', URI(base_url), &expected_yield)
+ end
+ end
+ end
+end
diff --git a/spec/tooling/lib/tooling/debug_spec.rb b/spec/tooling/lib/tooling/debug_spec.rb
new file mode 100644
index 00000000000..c506a93a24a
--- /dev/null
+++ b/spec/tooling/lib/tooling/debug_spec.rb
@@ -0,0 +1,76 @@
+# frozen_string_literal: true
+
+require_relative '../../../../tooling/lib/tooling/debug'
+
+RSpec.describe Tooling::Debug, feature_category: :tooling do
+ let(:some_class) do
+ Class.new do
+ include Tooling::Debug
+
+ def print_hello_world
+ print 'hello world'
+ end
+
+ def puts_hello_world
+ puts 'hello world'
+ end
+ end
+ end
+
+ after do
+ # Ensure that debug mode is default disabled at start of specs.
+ described_class.debug = false
+ end
+
+ shared_context 'when debug is enabled' do
+ before do
+ described_class.debug = true
+ end
+ end
+
+ shared_context 'when debug is disabled' do
+ before do
+ described_class.debug = false
+ end
+ end
+
+ shared_examples 'writes to stdout' do |str|
+ it 'writes to stdout' do
+ expect { subject }.to output(str).to_stdout
+ end
+ end
+
+ shared_examples 'does not write to stdout' do
+ it 'does not write to stdout' do
+ expect { subject }.not_to output.to_stdout
+ end
+ end
+
+ describe '#print' do
+ subject { some_class.new.print_hello_world }
+
+ context 'when debug is enabled' do
+ include_context 'when debug is enabled'
+ include_examples 'writes to stdout', 'hello world'
+ end
+
+ context 'when debug is disabled' do
+ include_context 'when debug is disabled'
+ include_examples 'does not write to stdout'
+ end
+ end
+
+ describe '#puts' do
+ subject { some_class.new.puts_hello_world }
+
+ context 'when debug is enabled' do
+ include_context 'when debug is enabled'
+ include_examples 'writes to stdout', "hello world\n"
+ end
+
+ context 'when debug is disabled' do
+ include_context 'when debug is disabled'
+ include_examples 'does not write to stdout'
+ end
+ end
+end
diff --git a/spec/tooling/lib/tooling/helpers/duration_formatter_spec.rb b/spec/tooling/lib/tooling/helpers/duration_formatter_spec.rb
new file mode 100644
index 00000000000..59703a87ea1
--- /dev/null
+++ b/spec/tooling/lib/tooling/helpers/duration_formatter_spec.rb
@@ -0,0 +1,50 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+require_relative '../../../../../tooling/lib/tooling/helpers/duration_formatter'
+
+module TestModule
+ class MockClass
+ include Tooling::Helpers::DurationFormatter
+ end
+end
+
+RSpec.describe Tooling::Helpers::DurationFormatter, feature_category: :tooling do
+ subject(:result) { TestModule::MockClass.new.readable_duration(argument) }
+
+ context 'when duration is less than 1 second' do
+ let(:argument) { 0.11111 }
+
+ it { expect(result).to eq('0.11 second') }
+ end
+
+ context 'when duration is less than 60 seconds' do
+ let(:argument) { 5.2 }
+
+ it { expect(result).to eq('5.2 seconds') }
+ end
+
+ context 'when duration is exactly 60 seconds' do
+ let(:argument) { 60 }
+
+ it { expect(result).to eq('1 minute') }
+ end
+
+ context 'when duration is 60.02 seconds' do
+ let(:argument) { 60.02 }
+
+ it { expect(result).to eq('1 minute 0.02 second') }
+ end
+
+ context 'when duration is 65.5 seconds' do
+ let(:argument) { 65.5 }
+
+ it { expect(result).to eq('1 minute 5.5 seconds') }
+ end
+
+ context 'when duration is more than 2 minutes' do
+ let(:argument) { 120.5 }
+
+ it { expect(result).to eq('2 minutes 0.5 second') }
+ end
+end
diff --git a/spec/tooling/lib/tooling/parallel_rspec_runner_spec.rb b/spec/tooling/lib/tooling/parallel_rspec_runner_spec.rb
index b7b39c37819..ed9fdc3825b 100644
--- a/spec/tooling/lib/tooling/parallel_rspec_runner_spec.rb
+++ b/spec/tooling/lib/tooling/parallel_rspec_runner_spec.rb
@@ -1,15 +1,17 @@
# frozen_string_literal: true
+require 'fast_spec_helper'
require 'tempfile'
+require 'fileutils'
require_relative '../../../../tooling/lib/tooling/parallel_rspec_runner'
-RSpec.describe Tooling::ParallelRSpecRunner do # rubocop:disable RSpec/FilePath
+RSpec.describe Tooling::ParallelRSpecRunner, feature_category: :tooling do # rubocop:disable RSpec/FilePath -- parallel_r_spec_runner_spec.rb is too long
describe '#run' do
let(:test_dir) { 'spec' }
let(:node_tests) { %w[01_spec.rb 03_spec.rb] }
let(:allocator) { instance_double(Knapsack::Allocator, test_dir: test_dir, node_tests: node_tests) }
- let(:allocator_builder) { double(Knapsack::AllocatorBuilder, allocator: allocator) } # rubocop:disable RSpec/VerifiedDoubles
+ let(:allocator_builder) { instance_double(Knapsack::AllocatorBuilder, allocator: allocator) }
let(:filter_tests) { [] }
let(:filter_tests_file) { nil }
@@ -17,7 +19,6 @@ RSpec.describe Tooling::ParallelRSpecRunner do # rubocop:disable RSpec/FilePath
before do
allow(Knapsack::AllocatorBuilder).to receive(:new).and_return(allocator_builder)
- allow(Knapsack.logger).to receive(:info)
end
after do
@@ -27,28 +28,34 @@ RSpec.describe Tooling::ParallelRSpecRunner do # rubocop:disable RSpec/FilePath
end
end
- subject { described_class.new(filter_tests_file: filter_tests_file_path, rspec_args: rspec_args) }
+ subject(:runner) do
+ described_class.new(filter_tests_file: filter_tests_file_path, rspec_args: rspec_args)
+ end
shared_examples 'runs node tests' do
let(:rspec_args) { nil }
+ before do
+ allow(Knapsack.logger).to receive(:info)
+ end
+
it 'runs rspec with tests allocated for this node' do
expect(allocator_builder).to receive(:filter_tests=).with(filter_tests)
expect_command(%W[bundle exec rspec#{rspec_args} --] + node_tests)
- subject.run
+ runner.run
end
end
context 'without filter_tests_file option' do
- subject { described_class.new(rspec_args: rspec_args) }
+ subject(:runner) { described_class.new(rspec_args: rspec_args) }
it_behaves_like 'runs node tests'
end
context 'given filter tests file' do
let(:filter_tests_file) do
- Tempfile.create.tap do |f| # rubocop:disable Rails/SaveBang
+ Tempfile.create.tap do |f|
f.write(filter_tests.join(' '))
f.rewind
end
@@ -79,8 +86,88 @@ RSpec.describe Tooling::ParallelRSpecRunner do # rubocop:disable RSpec/FilePath
it_behaves_like 'runs node tests'
end
+ # rubocop:disable Gitlab/Json -- standard JSON is sufficient
+ context 'when KNAPSACK_RSPEC_SUITE_REPORT_PATH set' do
+ let(:rspec_args) { nil }
+ let(:master_report_file_name) { 'master-report1.json' }
+ let(:master_report) do
+ {
+ "01_spec.rb" => 65,
+ "02_spec.rb" => 60
+ }
+ end
+
+ let(:master_report_file) do
+ Tempfile.open(master_report_file_name) do |f|
+ f.write(JSON.dump(master_report))
+ f
+ end
+ end
+
+ let(:expected_report_file_path) do
+ "#{File.dirname(master_report_file.path)}/node_specs_expected_duration.json"
+ end
+
+ let(:expected_report_content) { JSON.dump({ "01_spec.rb" => 65 }) }
+
+ before do
+ stub_env('KNAPSACK_RSPEC_SUITE_REPORT_PATH', master_report_file.path)
+ allow(allocator_builder).to receive(:filter_tests=).with(filter_tests)
+ allow(runner).to receive(:exec)
+ end
+
+ after do
+ master_report_file.close
+ master_report_file.unlink
+ end
+
+ context 'when GITLAB_CI env var is not set' do
+ before do
+ stub_env('GITLAB_CI', nil)
+ end
+
+ it 'does not parse expected rspec report' do
+ expected_output = <<~MARKDOWN.chomp
+ Running command: bundle exec rspec -- 01_spec.rb 03_spec.rb
+
+ MARKDOWN
+
+ expect(File).not_to receive(:write).with(expected_report_file_path, expected_report_content)
+
+ expect { runner.run }.to output(expected_output).to_stdout
+ end
+ end
+
+ context 'with GITLAB_CI env var set to true' do
+ before do
+ stub_env('GITLAB_CI', true)
+ end
+
+ it 'parses expected rspec report' do
+ expected_output = <<~MARKDOWN.chomp
+ Parsing expected rspec suite duration...
+ 03_spec.rb not found in master report
+ RSpec suite is expected to take 1 minute 5 seconds.
+ Expected duration for tests:
+
+ {
+ "01_spec.rb": 65
+ }
+
+ Running command: bundle exec rspec -- 01_spec.rb 03_spec.rb
+
+ MARKDOWN
+
+ expect(File).to receive(:write).with(expected_report_file_path, expected_report_content)
+
+ expect { runner.run }.to output(expected_output).to_stdout
+ end
+ end
+ end
+ # rubocop:enable Gitlab/Json
+
def expect_command(cmd)
- expect(subject).to receive(:exec).with(*cmd)
+ expect(runner).to receive(:exec).with(*cmd)
end
end
end
diff --git a/spec/tooling/lib/tooling/predictive_tests_spec.rb b/spec/tooling/lib/tooling/predictive_tests_spec.rb
index fdb7d09a3e2..e49daa70295 100644
--- a/spec/tooling/lib/tooling/predictive_tests_spec.rb
+++ b/spec/tooling/lib/tooling/predictive_tests_spec.rb
@@ -106,6 +106,7 @@ RSpec.describe Tooling::PredictiveTests, feature_category: :tooling do
context 'when some files used for frontend fixtures were changed' do
let(:changed_files_content) { 'app/models/todo.rb' }
let(:changed_files_matching_test) { 'spec/models/todo_spec.rb' }
+ let(:additional_matching_tests) { 'spec/models/every_model_spec.rb' }
let(:matching_frontend_fixture) { 'tmp/tests/frontend/fixtures-ee/todos/todos.html' }
let(:fixtures_mapping_content) do
JSON.dump(changed_files_matching_test => [matching_frontend_fixture]) # rubocop:disable Gitlab/Json
@@ -120,7 +121,7 @@ RSpec.describe Tooling::PredictiveTests, feature_category: :tooling do
it 'appends the spec file to RSPEC_MATCHING_TESTS_PATH' do
expect { subject }.to change { File.read(matching_tests.path) }
.from(matching_tests_initial_content)
- .to("#{matching_tests_initial_content} #{changed_files_matching_test}")
+ .to("#{matching_tests_initial_content} #{additional_matching_tests} #{changed_files_matching_test}")
end
it 'does not change files other than RSPEC_CHANGED_FILES_PATH nor RSPEC_MATCHING_TESTS_PATH' do
diff --git a/spec/tooling/merge_request_spec.rb b/spec/tooling/merge_request_spec.rb
index de6fd48ad9f..667b2869fe9 100644
--- a/spec/tooling/merge_request_spec.rb
+++ b/spec/tooling/merge_request_spec.rb
@@ -1,9 +1,9 @@
# frozen_string_literal: true
require 'webmock/rspec'
+require 'gitlab/rspec/next_instance_of'
require_relative '../../tooling/merge_request'
-require_relative '../support/helpers/next_instance_of'
RSpec.describe Tooling::MergeRequest do
let(:project_path) { 'gitlab-org/gitlab' }
diff --git a/spec/tooling/quality/test_level_spec.rb b/spec/tooling/quality/test_level_spec.rb
index d7d04015b48..b6c469929ea 100644
--- a/spec/tooling/quality/test_level_spec.rb
+++ b/spec/tooling/quality/test_level_spec.rb
@@ -46,7 +46,7 @@ RSpec.describe Quality::TestLevel, feature_category: :tooling do
context 'when level is unit' do
it 'returns a pattern' do
expect(subject.pattern(:unit))
- .to eq("spec/{bin,channels,click_house,components,config,contracts,db,dependencies,elastic,elastic_integration,experiments,factories,finders,frontend,graphql,haml_lint,helpers,initializers,lib,metrics_server,models,policies,presenters,rack_servers,replicators,routing,rubocop,scripts,serializers,services,sidekiq,sidekiq_cluster,spam,support_specs,tasks,uploaders,validators,views,workers,tooling}{,/**/}*_spec.rb")
+ .to eq("spec/{bin,channels,components,config,contracts,db,dependencies,elastic,elastic_integration,experiments,factories,finders,frontend,graphql,haml_lint,helpers,initializers,keeps,lib,metrics_server,models,policies,presenters,rack_servers,replicators,routing,rubocop,scripts,serializers,services,sidekiq,sidekiq_cluster,spam,support_specs,tasks,uploaders,validators,views,workers,tooling}{,/**/}*_spec.rb")
end
end
@@ -121,7 +121,7 @@ RSpec.describe Quality::TestLevel, feature_category: :tooling do
context 'when level is unit' do
it 'returns a regexp' do
expect(subject.regexp(:unit))
- .to eq(%r{spec/(bin|channels|click_house|components|config|contracts|db|dependencies|elastic|elastic_integration|experiments|factories|finders|frontend|graphql|haml_lint|helpers|initializers|lib|metrics_server|models|policies|presenters|rack_servers|replicators|routing|rubocop|scripts|serializers|services|sidekiq|sidekiq_cluster|spam|support_specs|tasks|uploaders|validators|views|workers|tooling)/})
+ .to eq(%r{spec/(bin|channels|components|config|contracts|db|dependencies|elastic|elastic_integration|experiments|factories|finders|frontend|graphql|haml_lint|helpers|initializers|keeps|lib|metrics_server|models|policies|presenters|rack_servers|replicators|routing|rubocop|scripts|serializers|services|sidekiq|sidekiq_cluster|spam|support_specs|tasks|uploaders|validators|views|workers|tooling)/})
end
end
@@ -230,6 +230,10 @@ RSpec.describe Quality::TestLevel, feature_category: :tooling do
expect(subject.level_for('spec/features/abuse_report_spec.rb')).to eq(:system)
end
+ it 'returns the correct level for a keep test' do
+ expect(subject.level_for('spec/keeps/helpers/postgres_ai_spec.rb')).to eq(:unit)
+ end
+
it 'raises an error for an unknown level' do
expect { subject.level_for('spec/unknown/foo_spec.rb') }
.to raise_error(described_class::UnknownTestLevelError,
diff --git a/spec/uploaders/gitlab_uploader_spec.rb b/spec/uploaders/gitlab_uploader_spec.rb
index bd86f1fe08a..df32bc98392 100644
--- a/spec/uploaders/gitlab_uploader_spec.rb
+++ b/spec/uploaders/gitlab_uploader_spec.rb
@@ -56,6 +56,12 @@ RSpec.describe GitlabUploader, feature_category: :shared do
end
end
+ describe '#empty_size?' do
+ it 'is true' do
+ expect(subject.empty_size?).to eq(true)
+ end
+ end
+
describe '#cache!' do
it 'moves the file from the working directory to the cache directory' do
# One to get the work dir, the other to remove it
diff --git a/spec/validators/ip_cidr_array_validator_spec.rb b/spec/validators/ip_cidr_array_validator_spec.rb
index 4ea46d714c2..6adb0bc70db 100644
--- a/spec/validators/ip_cidr_array_validator_spec.rb
+++ b/spec/validators/ip_cidr_array_validator_spec.rb
@@ -17,7 +17,7 @@ RSpec.describe IpCidrArrayValidator, feature_category: :shared do
using RSpec::Parameterized::TableSyntax
- # noinspection RubyMismatchedArgumentType - RubyMine is resolving `#|` from Array, instead of Rspec::Parameterized
+ # noinspection RubyMismatchedArgumentType - https://handbook.gitlab.com/handbook/tools-and-tips/editors-and-ides/jetbrains-ides/tracked-jetbrains-issues/#ruby-32041
where(:cidr_array, :validity, :errors) do
# rubocop:disable Layout/LineLength -- The RSpec table syntax often requires long lines for errors
nil | false | { cidr_array: ["must be an array of CIDR values"] }
diff --git a/spec/validators/json_schema_validator_spec.rb b/spec/validators/json_schema_validator_spec.rb
index 01caf4ab0bd..f3a22892407 100644
--- a/spec/validators/json_schema_validator_spec.rb
+++ b/spec/validators/json_schema_validator_spec.rb
@@ -58,5 +58,30 @@ RSpec.describe JsonSchemaValidator do
end
end
end
+
+ context 'when detail_errors is true' do
+ let(:validator) { described_class.new(attributes: [:data], detail_errors: true, filename: "build_report_result_data") }
+
+ context 'when data is valid' do
+ it 'returns no errors' do
+ subject
+
+ expect(build_report_result.errors).to be_empty
+ end
+ end
+
+ context 'when data is invalid' do
+ it 'returns json schema is invalid' do
+ build_report_result.data = { invalid: 'data' }
+
+ subject
+
+ expect(build_report_result.errors.size).to eq(1)
+ expect(build_report_result.errors.full_messages).to match_array(
+ ["Data '/invalid' must be a valid 'schema'"]
+ )
+ end
+ end
+ end
end
end
diff --git a/spec/validators/kubernetes_container_resources_validator_spec.rb b/spec/validators/kubernetes_container_resources_validator_spec.rb
new file mode 100644
index 00000000000..aea561bafb9
--- /dev/null
+++ b/spec/validators/kubernetes_container_resources_validator_spec.rb
@@ -0,0 +1,42 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe KubernetesContainerResourcesValidator, feature_category: :shared do
+ let(:model) do
+ Class.new do
+ include ActiveModel::Model
+ include ActiveModel::Validations
+
+ attr_accessor :resources
+ alias_method :resources_before_type_cast, :resources
+
+ validates :resources, kubernetes_container_resources: true
+ end.new
+ end
+
+ using RSpec::Parameterized::TableSyntax
+
+ # noinspection RubyMismatchedArgumentType - https://handbook.gitlab.com/handbook/tools-and-tips/editors-and-ides/jetbrains-ides/tracked-jetbrains-issues/#ruby-32041
+ where(:resources, :validity, :errors) do
+ # rubocop:disable Layout/LineLength -- The RSpec table syntax often requires long lines for errors
+ nil | false | { resources: ["must be a hash"] }
+ '' | false | { resources: ["must be a hash"] }
+ {} | false | { resources: ["must be a hash containing 'cpu' and 'memory' attribute of type string"] }
+ { cpu: nil, memory: nil } | false | { resources: ["'cpu: ' must be a string", "'memory: ' must be a string"] }
+ { cpu: "123di", memory: "123oi" } | false | { resources: ["'cpu: 123di' must match the regex '^(\\d+m|\\d+(\\.\\d*)?)$'", "'memory: 123oi' must match the regex '^\\d+(\\.\\d*)?([EPTGMK]|[EPTGMK]i)?$'"] }
+ { cpu: "123di", memory: "123oi" } | false | { resources: ["'cpu: 123di' must match the regex '^(\\d+m|\\d+(\\.\\d*)?)$'", "'memory: 123oi' must match the regex '^\\d+(\\.\\d*)?([EPTGMK]|[EPTGMK]i)?$'"] }
+ { cpu: "100m", memory: "123Mi" } | true | {}
+ # rubocop:enable Layout/LineLength
+ end
+
+ with_them do
+ before do
+ model.resources = resources
+ model.validate
+ end
+
+ it { expect(model.valid?).to eq(validity) }
+ it { expect(model.errors.messages).to eq(errors) }
+ end
+end
diff --git a/spec/views/admin/application_settings/_security_txt.html.haml_spec.rb b/spec/views/admin/application_settings/_security_txt.html.haml_spec.rb
new file mode 100644
index 00000000000..9f420d018e5
--- /dev/null
+++ b/spec/views/admin/application_settings/_security_txt.html.haml_spec.rb
@@ -0,0 +1,38 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'admin/application_settings/_security_txt.html.haml', feature_category: :compliance_management do
+ let(:app_settings) { build(:application_setting) }
+
+ before do
+ assign(:application_setting, app_settings)
+ allow(view).to receive(:expanded).and_return(true)
+ end
+
+ context 'when security contact information is not set' do
+ it 'renders the form correctly' do
+ render
+
+ expect(rendered).to have_selector(
+ 'textarea',
+ id: 'application_setting_security_txt_content',
+ exact_text: ''
+ )
+ end
+ end
+
+ context 'when security contact information is set' do
+ let(:app_settings) { build(:application_setting, security_txt_content: 'HELLO') }
+
+ it 'renders the form correctly' do
+ render
+
+ expect(rendered).to have_selector(
+ 'textarea',
+ id: 'application_setting_security_txt_content',
+ exact_text: 'HELLO'
+ )
+ end
+ end
+end
diff --git a/spec/views/admin/application_settings/general.html.haml_spec.rb b/spec/views/admin/application_settings/general.html.haml_spec.rb
index 99564003d59..ad581ee6093 100644
--- a/spec/views/admin/application_settings/general.html.haml_spec.rb
+++ b/spec/views/admin/application_settings/general.html.haml_spec.rb
@@ -115,7 +115,7 @@ RSpec.describe 'admin/application_settings/general.html.haml' do
end
# for the licensed tests, refer to ee/spec/views/admin/application_settings/general.html.haml_spec.rb
- describe 'instance-level code suggestions settings', :without_license, feature_category: :code_suggestions do
+ describe 'instance-level ai-powered settings', :without_license, feature_category: :code_suggestions do
before do
allow(::Gitlab).to receive(:org_or_com?).and_return(gitlab_org_or_com?)
@@ -125,6 +125,7 @@ RSpec.describe 'admin/application_settings/general.html.haml' do
shared_examples 'does not render the form' do
it 'does not render the form' do
expect(rendered).not_to have_field('application_setting_instance_level_code_suggestions_enabled')
+ expect(rendered).not_to have_field('application_setting_instance_level_ai_beta_features_enabled')
end
end
diff --git a/spec/views/dashboard/projects/index.html.haml_spec.rb b/spec/views/dashboard/projects/index.html.haml_spec.rb
index 08e88e4bdcf..8e051b821c6 100644
--- a/spec/views/dashboard/projects/index.html.haml_spec.rb
+++ b/spec/views/dashboard/projects/index.html.haml_spec.rb
@@ -9,7 +9,6 @@ RSpec.describe 'dashboard/projects/index.html.haml' do
allow(view).to receive(:limited_counter_with_delimiter)
allow(view).to receive(:current_user).and_return(user)
allow(view).to receive(:time_ago_with_tooltip)
- allow(view).to receive(:project_icon)
assign(:projects, [build(:project, name: 'awesome stuff')])
end
diff --git a/spec/views/devise/shared/_signup_box.html.haml_spec.rb b/spec/views/devise/shared/_signup_box.html.haml_spec.rb
index 377e29e18e7..eba036083be 100644
--- a/spec/views/devise/shared/_signup_box.html.haml_spec.rb
+++ b/spec/views/devise/shared/_signup_box.html.haml_spec.rb
@@ -8,13 +8,13 @@ RSpec.describe 'devise/shared/_signup_box' do
let(:translation_com) do
s_("SignUp|By clicking %{button_text} or registering through a third party you "\
- "accept the GitLab%{link_start} Terms of Use and acknowledge the Privacy Policy "\
+ "accept the GitLab%{link_start} Terms of Use and acknowledge the Privacy Statement "\
"and Cookie Policy%{link_end}")
end
let(:translation_non_com) do
s_("SignUp|By clicking %{button_text} or registering through a third party you "\
- "accept the%{link_start} Terms of Use and acknowledge the Privacy Policy and "\
+ "accept the%{link_start} Terms of Use and acknowledge the Privacy Statement and "\
"Cookie Policy%{link_end}")
end
diff --git a/spec/views/devise/shared/_signup_omniauth_provider_list_spec.rb b/spec/views/devise/shared/_signup_omniauth_provider_list_spec.rb
index cc3ee52e73b..c3e4bd76b30 100644
--- a/spec/views/devise/shared/_signup_omniauth_provider_list_spec.rb
+++ b/spec/views/devise/shared/_signup_omniauth_provider_list_spec.rb
@@ -47,4 +47,14 @@ RSpec.describe 'devise/shared/_signup_omniauth_provider_list', feature_category:
it_behaves_like 'sso buttons have snowplow tracking'
end
+
+ context 'when rendering button' do
+ before do
+ render
+ end
+
+ it 'renders button in form' do
+ expect(rendered).to have_css('form[action="/users/auth/github"]')
+ end
+ end
end
diff --git a/spec/views/groups/_home_panel.html.haml_spec.rb b/spec/views/groups/_home_panel.html.haml_spec.rb
index e76862cdaea..ab556a3db01 100644
--- a/spec/views/groups/_home_panel.html.haml_spec.rb
+++ b/spec/views/groups/_home_panel.html.haml_spec.rb
@@ -9,12 +9,6 @@ RSpec.describe 'groups/_home_panel' do
assign(:group, group)
end
- it 'renders the group ID' do
- render
-
- expect(rendered).to have_content("Group ID: #{group.id}")
- end
-
context 'admin area link' do
it 'renders admin area link for admin' do
allow(view).to receive(:current_user).and_return(create(:admin))
diff --git a/spec/views/layouts/_header_search.html.haml_spec.rb b/spec/views/layouts/_header_search.html.haml_spec.rb
deleted file mode 100644
index 3a21bb3a92c..00000000000
--- a/spec/views/layouts/_header_search.html.haml_spec.rb
+++ /dev/null
@@ -1,113 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'layouts/_header_search' do
- let(:project) { nil }
- let(:group) { nil }
- let(:scope) { nil }
- let(:ref) { nil }
- let(:code_search) { false }
- let(:for_snippets) { false }
-
- let(:header_search_context) do
- {
- project: project,
- group: group,
- scope: scope,
- ref: ref,
- code_search: code_search,
- for_snippets: for_snippets
- }
- end
-
- before do
- allow(view).to receive(:header_search_context).and_return(header_search_context)
- end
-
- shared_examples 'hidden fields are properly set' do
- context 'when search_context has a scope value' do
- let(:scope) { 'issues' }
-
- it 'sets scope input to issues' do
- render
-
- expect(rendered).to have_css("input[name='scope'][value='#{scope}']", count: 1, visible: false)
- end
- end
-
- context 'when search_context has a code_search value' do
- let(:code_search) { true }
-
- it 'sets search_code input to true' do
- render
-
- expect(rendered).to have_css("input[name='search_code'][value='#{code_search}']", count: 1, visible: false)
- end
- end
-
- context 'when search_context has a ref value' do
- let(:ref) { 'test-branch' }
-
- it 'sets repository_ref input to test-branch' do
- render
-
- expect(rendered).to have_css("input[name='repository_ref'][value='#{ref}']", count: 1, visible: false)
- end
- end
-
- context 'when search_context has a for_snippets value' do
- let(:for_snippets) { true }
-
- it 'sets for_snippets input to true' do
- render
-
- expect(rendered).to have_css("input[name='snippets'][value='#{for_snippets}']", count: 1, visible: false)
- end
- end
-
- context 'nav_source' do
- it 'always set to navbar' do
- render
-
- expect(rendered).to have_css("input[name='nav_source'][value='navbar']", count: 1, visible: false)
- end
- end
-
- context 'submit button' do
- it 'always renders for specs' do
- render
-
- expect(rendered).to have_css('noscript button', text: 'Search')
- end
- end
- end
-
- context 'when doing a project level search' do
- let(:project) do
- { id: 123, name: 'foo' }
- end
-
- it 'sets project_id field' do
- render
-
- expect(rendered).to have_css("input[name='project_id'][value='#{project[:id]}']", count: 1, visible: false)
- end
-
- it_behaves_like 'hidden fields are properly set'
- end
-
- context 'when doing a group level search' do
- let(:group) do
- { id: 123, name: 'bar' }
- end
-
- it 'sets group_id field' do
- render
-
- expect(rendered).to have_css("input[name='group_id'][value='#{group[:id]}']", count: 1, visible: false)
- end
-
- it_behaves_like 'hidden fields are properly set'
- end
-end
diff --git a/spec/views/layouts/fullscreen.html.haml_spec.rb b/spec/views/layouts/fullscreen.html.haml_spec.rb
index 2309e885b75..c763b53ac6c 100644
--- a/spec/views/layouts/fullscreen.html.haml_spec.rb
+++ b/spec/views/layouts/fullscreen.html.haml_spec.rb
@@ -4,8 +4,6 @@ require 'spec_helper'
RSpec.describe 'layouts/fullscreen' do
let_it_be(:template) { 'layouts/fullscreen' }
- let_it_be(:top_nav_partial) { 'layouts/header/_default' }
- let_it_be(:top_nav_responsive_partial) { 'layouts/nav/_top_nav_responsive' }
let_it_be(:user) { create(:user) }
@@ -27,58 +25,6 @@ RSpec.describe 'layouts/fullscreen' do
expect(rendered).to have_selector(".flash-container.flash-container-no-margin")
end
- it 'renders top nav' do
- render
-
- expect(view).to render_template(top_nav_partial)
- expect(view).to render_template(top_nav_responsive_partial)
- end
-
it_behaves_like 'a layout which reflects the application theme setting'
it_behaves_like 'a layout which reflects the preferred language'
-
- describe 'sidebar' do
- context 'when nav is set' do
- before do
- allow(view).to receive(:nav).and_return("admin")
- render
- end
-
- it 'renders the sidebar' do
- expect(rendered).to render_template("layouts/nav/sidebar/_admin")
- expect(rendered).to have_selector("aside.nav-sidebar")
- end
-
- it 'adds the proper classes' do
- expect(rendered).to have_selector(".layout-page.gl-mt-0\\!")
- end
- end
-
- describe 'when nav is not set' do
- before do
- allow(view).to receive(:nav).and_return(nil)
- render
- end
-
- it 'does not render the sidebar' do
- expect(rendered).not_to render_template("layouts/nav/sidebar/_admin")
- expect(rendered).not_to have_selector("aside.nav-sidebar")
- end
-
- it 'not add classes' do
- expect(rendered).not_to have_selector(".layout-page.gl-mt-0\\!")
- end
- end
- end
-
- context 'when minimal is set' do
- subject { render(template: template, formats: :html, locals: { minimal: true }) }
-
- it 'does not render top nav' do
- subject
-
- expect(view).not_to render_template(top_nav_partial)
- expect(view).not_to render_template(top_nav_responsive_partial)
- end
- end
end
diff --git a/spec/views/layouts/header/_gitlab_version.html.haml_spec.rb b/spec/views/layouts/header/_gitlab_version.html.haml_spec.rb
deleted file mode 100644
index a027bdd6357..00000000000
--- a/spec/views/layouts/header/_gitlab_version.html.haml_spec.rb
+++ /dev/null
@@ -1,36 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'layouts/header/_gitlab_version' do
- describe 'when show_version_check? is true' do
- before do
- allow(view).to receive(:show_version_check?).and_return(true)
- render
- end
-
- it 'renders the version check badge' do
- expect(rendered).to have_selector('.js-gitlab-version-check-badge')
- end
-
- it 'renders the container as a link' do
- expect(rendered).to have_selector(
- 'a[data-testid="gitlab-version-container"][href="/help/update/index"]'
- )
- end
-
- it 'renders the container with correct data-tracking attributes' do
- expect(rendered).to have_selector(
- 'a[data-testid="gitlab-version-container"][data-track-action="click_link"]'
- )
-
- expect(rendered).to have_selector(
- 'a[data-testid="gitlab-version-container"][data-track-label="version_help_dropdown"]'
- )
-
- expect(rendered).to have_selector(
- 'a[data-testid="gitlab-version-container"][data-track-property="navigation_top"]'
- )
- end
- end
-end
diff --git a/spec/views/layouts/header/_new_dropdown.haml_spec.rb b/spec/views/layouts/header/_new_dropdown.haml_spec.rb
deleted file mode 100644
index ef028da7ab9..00000000000
--- a/spec/views/layouts/header/_new_dropdown.haml_spec.rb
+++ /dev/null
@@ -1,204 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'layouts/header/_new_dropdown', feature_category: :navigation do
- let_it_be(:user) { create(:user) } # rubocop:disable RSpec/FactoryBot/AvoidCreate
-
- shared_examples_for 'invite member selector' do
- context 'with ability to invite members' do
- it { is_expected.to have_selector('.js-invite-members-trigger') }
- end
-
- context 'without ability to invite members' do
- let(:invite_member) { false }
-
- it { is_expected.not_to have_selector('.js-invite-members-trigger') }
- end
- end
-
- context 'with group-specific links' do
- let_it_be(:group) { create(:group) } # rubocop:disable RSpec/FactoryBot/AvoidCreate
-
- before do
- stub_current_user(user)
-
- assign(:group, group)
- end
-
- context 'as a Group owner' do
- before do
- group.add_owner(user)
- end
-
- it 'has a "New project" link' do
- render
-
- expect(rendered).to have_link('New project', href: new_project_path(namespace_id: group.id))
- end
-
- it 'has a "New subgroup" link' do
- render
-
- expect(rendered)
- .to have_link('New subgroup', href: new_group_path(parent_id: group.id, anchor: 'create-group-pane'))
- end
- end
-
- describe 'invite members item' do
- let(:href) { group_group_members_path(group) }
- let(:invite_member) { true }
-
- before do
- allow(view).to receive(:can?).with(user, :create_projects, group).and_return(true)
- allow(view).to receive(:can?).with(user, :admin_group_member, group).and_return(invite_member)
- allow(view).to receive(:can_admin_group_member?).and_return(invite_member)
- end
-
- subject do
- render
-
- rendered
- end
-
- it_behaves_like 'invite member selector'
- end
- end
-
- context 'with project-specific links' do
- let_it_be(:project) { create(:project, creator: user, namespace: user.namespace) } # rubocop:disable RSpec/FactoryBot/AvoidCreate
-
- before do
- assign(:project, project)
- end
-
- context 'as a Project owner' do
- before do
- stub_current_user(user)
- end
-
- it 'has a "New issue" link' do
- render
-
- expect(rendered).to have_link('New issue', href: new_project_issue_path(project))
- end
-
- it 'has a "New merge request" link' do
- render
-
- expect(rendered).to have_link('New merge request', href: project_new_merge_request_path(project))
- end
-
- it 'has a "New snippet" link' do
- render
-
- expect(rendered).to have_link('New snippet', href: new_project_snippet_path(project))
- end
- end
-
- context 'as a Project guest' do
- let_it_be(:guest) { create(:user) } # rubocop:disable RSpec/FactoryBot/AvoidCreate
-
- before do
- stub_current_user(guest)
- project.add_guest(guest)
- end
-
- it 'has no "New merge request" link' do
- render
-
- expect(rendered).not_to have_link('New merge request')
- end
-
- it 'has no "New snippet" link' do
- render
-
- expect(rendered).not_to have_link('New snippet', href: new_project_snippet_path(project))
- end
- end
-
- describe 'invite members item' do
- let(:invite_member) { true }
- let(:href) { project_project_members_path(project) }
-
- before do
- allow(view).to receive(:can_admin_project_member?).and_return(invite_member)
- stub_current_user(user)
- end
-
- subject do
- render
-
- rendered
- end
-
- it_behaves_like 'invite member selector'
- end
- end
-
- context 'with global links' do
- before do
- stub_current_user(user)
- end
-
- it 'has a "New project" link' do
- render
-
- expect(rendered).to have_link('New project', href: new_project_path)
- end
-
- it 'has a "New group" link' do
- render
-
- expect(rendered).to have_link('New group', href: new_group_path)
- end
-
- it 'has a "New snippet" link' do
- render
-
- expect(rendered).to have_link('New snippet', href: new_snippet_path)
- end
-
- context 'when partial exists in a menu item' do
- it 'renders the menu item partial without rendering invite modal partial' do
- view_model = {
- title: '_title_',
- menu_sections: [
- {
- title: '_section_title_',
- menu_items: [
- ::Gitlab::Nav::TopNavMenuItem
- .build(id: '_id_', title: '_title_', partial: 'groups/invite_members_top_nav_link')
- ]
- }
- ]
- }
-
- allow(view).to receive(:new_dropdown_view_model).and_return(view_model)
-
- render
-
- expect(response).to render_template(partial: 'groups/_invite_members_top_nav_link')
- end
- end
-
- context 'when the user is not allowed to do anything' do
- let(:user) { create(:user, :external) } # rubocop:disable RSpec/FactoryBot/AvoidCreate
-
- before do
- allow(user).to receive(:can?).and_call_original
- allow(user).to receive(:can?).with(:create_organization).and_return(false)
- end
-
- it 'is nil' do
- # We have to use `view.render` because `render` causes issues
- # https://github.com/rails/rails/issues/41320
- expect(view.render("layouts/header/new_dropdown")).to be_nil
- end
- end
- end
-
- def stub_current_user(current_user)
- allow(view).to receive(:current_user).and_return(current_user)
- end
-end
diff --git a/spec/views/layouts/nav/sidebar/_admin.html.haml_spec.rb b/spec/views/layouts/nav/sidebar/_admin.html.haml_spec.rb
deleted file mode 100644
index 3097598aaca..00000000000
--- a/spec/views/layouts/nav/sidebar/_admin.html.haml_spec.rb
+++ /dev/null
@@ -1,161 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'layouts/nav/sidebar/_admin', feature_category: :navigation do
- let(:user) { build(:admin) }
-
- before do
- allow(user).to receive(:can_admin_all_resources?).and_return(true)
- allow(view).to receive(:current_user).and_return(user)
- end
-
- shared_examples 'page has active tab' do |title|
- it "activates #{title} tab" do
- render
-
- expect(rendered).to have_selector('.nav-sidebar .sidebar-top-level-items > li.active', count: 1)
- expect(rendered).to have_css('.nav-sidebar .sidebar-top-level-items > li.active', text: title)
- end
- end
-
- shared_examples 'page has active sub tab' do |title|
- it "activates #{title} sub tab" do
- render
-
- expect(rendered).to have_css('.sidebar-sub-level-items > li.active', text: title)
- end
- end
-
- context 'on home page' do
- before do
- allow(controller).to receive(:controller_name).and_return('dashboard')
- end
-
- it_behaves_like 'page has active tab', 'Overview'
- end
-
- it_behaves_like 'has nav sidebar'
-
- context 'on projects' do
- before do
- allow(controller).to receive(:controller_name).and_return('admin/projects')
- allow(controller).to receive(:controller_path).and_return('admin/projects')
- end
-
- it_behaves_like 'page has active tab', 'Overview'
- it_behaves_like 'page has active sub tab', 'Projects'
- end
-
- context 'on groups' do
- before do
- allow(controller).to receive(:controller_name).and_return('groups')
- end
-
- it_behaves_like 'page has active tab', 'Overview'
- it_behaves_like 'page has active sub tab', 'Groups'
- end
-
- context 'on users' do
- before do
- allow(controller).to receive(:controller_name).and_return('users')
- end
-
- it_behaves_like 'page has active tab', 'Overview'
- it_behaves_like 'page has active sub tab', 'Users'
- end
-
- context 'on topics' do
- before do
- allow(controller).to receive(:controller_name).and_return('admin/topics')
- end
-
- it_behaves_like 'page has active tab', 'Overview'
- it_behaves_like 'page has active sub tab', 'Topics'
- end
-
- context 'on runners' do
- before do
- allow(controller).to receive(:controller_name).and_return('runners')
- end
-
- it_behaves_like 'page has active tab', 'CI/CD'
- it_behaves_like 'page has active sub tab', 'Runners'
- end
-
- context 'on jobs' do
- before do
- allow(controller).to receive(:controller_name).and_return('jobs')
- end
-
- it_behaves_like 'page has active tab', 'CI/CD'
- it_behaves_like 'page has active sub tab', 'Jobs'
- end
-
- context 'on messages' do
- before do
- allow(controller).to receive(:controller_name).and_return('broadcast_messages')
- end
-
- it_behaves_like 'page has active tab', 'Messages'
- end
-
- context 'on analytics' do
- before do
- allow(controller).to receive(:controller_name).and_return('dev_ops_report')
- end
-
- it_behaves_like 'page has active tab', 'Analytics'
- end
-
- context 'on hooks' do
- before do
- allow(controller).to receive(:controller_name).and_return('hooks')
- end
-
- it_behaves_like 'page has active tab', 'Hooks'
- end
-
- context 'on background jobs' do
- before do
- allow(controller).to receive(:controller_name).and_return('background_jobs')
- end
-
- it_behaves_like 'page has active tab', 'Monitoring'
- it_behaves_like 'page has active sub tab', 'Background Jobs'
- end
-
- context 'on settings' do
- let(:gitlab_com?) { false }
-
- before do
- allow(::Gitlab).to receive(:com?) { gitlab_com? }
-
- render
- end
-
- it 'includes General link' do
- expect(rendered).to have_link('General', href: general_admin_application_settings_path)
- end
-
- context 'when GitLab.com' do
- let(:gitlab_com?) { true }
-
- it 'does not include Integrations link' do
- expect(rendered).not_to have_link('Integrations', href: integrations_admin_application_settings_path)
- end
- end
-
- context 'when not GitLab.com' do
- it 'includes Integrations link' do
- expect(rendered).to have_link('Integrations', href: integrations_admin_application_settings_path)
- end
- end
-
- context 'when GitLab FOSS' do
- it 'does not include Templates link' do
- expect(rendered).not_to have_link('Templates', href: '/admin/application_settings/templates')
- end
- end
- end
-end
diff --git a/spec/views/layouts/nav/sidebar/_group.html.haml_spec.rb b/spec/views/layouts/nav/sidebar/_group.html.haml_spec.rb
deleted file mode 100644
index 472a2f3cb34..00000000000
--- a/spec/views/layouts/nav/sidebar/_group.html.haml_spec.rb
+++ /dev/null
@@ -1,187 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'layouts/nav/sidebar/_group' do
- let_it_be(:owner) { create(:user) }
- let_it_be(:group) do
- create(:group).tap do |g|
- g.add_owner(owner)
- end
- end
-
- before do
- assign(:group, group)
-
- allow(view).to receive(:current_user).and_return(owner)
- end
-
- it_behaves_like 'has nav sidebar'
- it_behaves_like 'sidebar includes snowplow attributes', 'render', 'groups_side_navigation', 'groups_side_navigation'
-
- describe 'Group context menu' do
- it 'has a link to the group path' do
- render
-
- expect(rendered).to have_link(group.name, href: group_path(group))
- end
- end
-
- describe 'Group information' do
- it 'has a link to the group activity path' do
- render
-
- expect(rendered).to have_link('Group information', href: activity_group_path(group))
- end
-
- it 'has a link to the group labels path' do
- render
-
- expect(rendered).to have_link('Labels', href: group_labels_path(group))
- end
-
- it 'has a link to the members page' do
- render
-
- expect(rendered).to have_link('Members', href: group_group_members_path(group))
- end
- end
-
- describe 'Issues' do
- it 'has a default link to the issue list path' do
- render
-
- expect(rendered).to have_link('Issues', href: issues_group_path(group))
- end
-
- it 'has a link to the issue list page' do
- render
-
- expect(rendered).to have_link('List', href: issues_group_path(group))
- end
-
- it 'has a link to the boards page' do
- render
-
- expect(rendered).to have_link('Board', href: group_boards_path(group))
- end
-
- it 'has a link to the milestones page' do
- render
-
- expect(rendered).to have_link('Milestones', href: group_milestones_path(group))
- end
- end
-
- describe 'Merge Requests' do
- it 'has a link to the merge request list path' do
- render
-
- expect(rendered).to have_link('Merge requests', href: merge_requests_group_path(group))
- end
-
- it 'shows pill with the number of merge requests' do
- render
-
- expect(rendered).to have_css('span.badge.badge-pill.merge_counter.js-merge-counter')
- end
- end
-
- describe 'CI/CD' do
- it 'has a default link to the runners list path' do
- render
-
- expect(rendered).to have_link('CI/CD', href: group_runners_path(group))
- end
-
- it 'has a link to the runners list page' do
- render
-
- expect(rendered).to have_link('Runners', href: group_runners_path(group))
- end
- end
-
- describe 'Kubernetes menu', :request_store do
- it 'has a link to the group cluster list path' do
- render
-
- expect(rendered).to have_link('Kubernetes', href: group_clusters_path(group))
- end
- end
-
- describe 'Packages and registries' do
- it 'has a link to the package registry page' do
- stub_config(packages: { enabled: true })
-
- render
-
- expect(rendered).to have_link('Package Registry', href: group_packages_path(group))
- end
-
- it 'has a link to the container registry page' do
- stub_container_registry_config(enabled: true)
-
- render
-
- expect(rendered).to have_link('Container Registry', href: group_container_registries_path(group))
- end
-
- it 'has a link to the dependency proxy page' do
- stub_config(dependency_proxy: { enabled: true })
-
- render
-
- expect(rendered).to have_link('Dependency Proxy', href: group_dependency_proxy_path(group))
- end
- end
-
- describe 'Settings' do
- it 'default link points to edit group page' do
- render
-
- expect(rendered).to have_link('Settings', href: edit_group_path(group))
- end
-
- it 'has a link to the General settings page' do
- render
-
- expect(rendered).to have_link('General', href: edit_group_path(group))
- end
-
- it 'has a link to the Integrations settings page' do
- render
-
- expect(rendered).to have_link('Integrations', href: group_settings_integrations_path(group))
- end
-
- it 'has a link to the group Projects settings page' do
- render
-
- expect(rendered).to have_link('Projects', href: projects_group_path(group))
- end
-
- it 'has a link to the Repository settings page' do
- render
-
- expect(rendered).to have_link('Repository', href: group_settings_repository_path(group))
- end
-
- it 'has a link to the CI/CD settings page' do
- render
-
- expect(rendered).to have_link('CI/CD', href: group_settings_ci_cd_path(group))
- end
-
- it 'has a link to the Applications settings page' do
- render
-
- expect(rendered).to have_link('Applications', href: group_settings_applications_path(group))
- end
-
- it 'has a link to the Package and registry settings page' do
- render
-
- expect(rendered).to have_link('Packages and registries', href: group_settings_packages_and_registries_path(group))
- end
- end
-end
diff --git a/spec/views/layouts/nav/sidebar/_profile.html.haml_spec.rb b/spec/views/layouts/nav/sidebar/_profile.html.haml_spec.rb
deleted file mode 100644
index f5a0a7a935c..00000000000
--- a/spec/views/layouts/nav/sidebar/_profile.html.haml_spec.rb
+++ /dev/null
@@ -1,30 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'layouts/nav/sidebar/_profile' do
- let(:user) { create(:user) }
-
- before do
- allow(view).to receive(:current_user).and_return(user)
- end
-
- it_behaves_like 'has nav sidebar'
- it_behaves_like 'sidebar includes snowplow attributes', 'render', 'user_side_navigation', 'user_side_navigation'
-
- it 'has a link to access tokens' do
- render
-
- expect(rendered).to have_link(_('Access Tokens'), href: profile_personal_access_tokens_path)
- end
-
- context 'when personal access tokens are disabled' do
- it 'does not have a link to access tokens' do
- allow(::Gitlab::CurrentSettings).to receive_messages(personal_access_tokens_disabled?: true)
-
- render
-
- expect(rendered).not_to have_link(_('Access Tokens'), href: profile_personal_access_tokens_path)
- end
- end
-end
diff --git a/spec/views/layouts/nav/sidebar/_project.html.haml_spec.rb b/spec/views/layouts/nav/sidebar/_project.html.haml_spec.rb
deleted file mode 100644
index 34debcab5f7..00000000000
--- a/spec/views/layouts/nav/sidebar/_project.html.haml_spec.rb
+++ /dev/null
@@ -1,958 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'layouts/nav/sidebar/_project', feature_category: :navigation do
- let_it_be_with_reload(:project) { create(:project, :repository) }
-
- let(:user) { project.first_owner }
- let(:current_ref) { 'master' }
-
- before do
- assign(:project, project)
- assign(:repository, project.repository)
-
- allow(view).to receive(:current_ref).and_return(current_ref)
- allow(view).to receive(:can?).and_return(true)
- allow(view).to receive(:current_user).and_return(user)
- end
-
- it_behaves_like 'has nav sidebar'
-
- describe 'Project context' do
- it 'has a link to the project path' do
- render
-
- expect(rendered).to have_link(project.name, href: project_path(project), class: 'shortcuts-project')
- expect(rendered).to have_selector("[aria-label=\"#{project.name}\"]")
- end
- end
-
- describe 'Project information' do
- it 'has a link to the project activity path' do
- render
-
- expect(rendered).to have_link('Project information', href: activity_project_path(project), class: %w[shortcuts-project-information])
- expect(rendered).to have_selector('[aria-label="Project information"]')
- end
-
- describe 'Activity' do
- it 'has a link to the project activity path' do
- render
-
- expect(rendered).to have_link('Activity', href: activity_project_path(project), class: 'shortcuts-project-activity')
- end
- end
-
- describe 'Labels' do
- let(:page) { Nokogiri::HTML.parse(rendered) }
-
- it 'has a link to the labels path' do
- render
-
- expect(page.at_css('.shortcuts-project-information').parent.css('[aria-label="Labels"]')).not_to be_empty
- expect(rendered).to have_link('Labels', href: project_labels_path(project))
- end
- end
-
- describe 'Members' do
- let(:page) { Nokogiri::HTML.parse(rendered) }
-
- it 'has a link to the members page' do
- render
-
- expect(page.at_css('.shortcuts-project-information').parent.css('[aria-label="Members"]')).not_to be_empty
- expect(rendered).to have_link('Members', href: project_project_members_path(project))
- end
- end
- end
-
- describe 'Repository' do
- it 'has a link to the project tree path' do
- render
-
- expect(rendered).to have_link('Repository', href: project_tree_path(project, current_ref), class: 'shortcuts-tree')
- end
-
- describe 'Files' do
- it 'has a link to the project tree path' do
- render
-
- expect(rendered).to have_link('Files', href: project_tree_path(project, current_ref))
- end
- end
-
- describe 'Commits' do
- it 'has a link to the fully qualified project commits path' do
- render
-
- expect(rendered).to have_link('Commits', href: project_commits_path(project, current_ref, ref_type: 'heads'), id: 'js-onboarding-commits-link')
- end
- end
-
- describe 'Branches' do
- it 'has a link to the project branches path' do
- render
-
- expect(rendered).to have_link('Branches', href: project_branches_path(project), id: 'js-onboarding-branches-link')
- end
- end
-
- describe 'Tags' do
- it 'has a link to the project tags path' do
- render
-
- expect(rendered).to have_link('Tags', href: project_tags_path(project))
- end
- end
-
- describe 'Contributor statistics' do
- it 'has a link to the project contributors path' do
- render
-
- expect(rendered).to have_link('Contributor statistics', href: project_graph_path(project, current_ref, ref_type: 'heads'))
- end
- end
-
- describe 'Graph' do
- it 'has a link to the project graph path' do
- render
-
- expect(rendered).to have_link('Graph', href: project_network_path(project, current_ref))
- end
- end
-
- describe 'Compare revisions' do
- it 'has a link to the project compare path' do
- render
-
- expect(rendered).to have_link('Compare revisions', href: project_compare_index_path(project, from: project.repository.root_ref, to: current_ref))
- end
- end
- end
-
- describe 'Issues' do
- it 'has a link to the issue list path' do
- render
-
- expect(rendered).to have_link('Issues', href: project_issues_path(project))
- end
-
- it 'shows pill with the number of open issues' do
- render
-
- expect(rendered).to have_css('span.badge.badge-pill.issue_counter')
- end
-
- describe 'Issue List' do
- it 'has a link to the issue list path' do
- render
-
- expect(rendered).to have_link('List', href: project_issues_path(project))
- end
- end
-
- describe 'Issue Boards' do
- it 'has a link to the issue boards path' do
- render
-
- expect(rendered).to have_link('Boards', href: project_boards_path(project))
- end
- end
-
- describe 'Service Desk' do
- it 'has a link to the service desk path' do
- render
-
- expect(rendered).to have_link('Service Desk', href: service_desk_project_issues_path(project))
- end
- end
-
- describe 'Milestones' do
- it 'has a link to the milestones path' do
- render
-
- expect(rendered).to have_link('Milestones', href: project_milestones_path(project))
- end
- end
- end
-
- describe 'External Issue Tracker' do
- let_it_be_with_refind(:project) { create(:project, has_external_issue_tracker: true) }
-
- context 'with custom external issue tracker' do
- let(:external_issue_tracker_url) { 'http://test.com' }
-
- let!(:external_issue_tracker) do
- create(:custom_issue_tracker_integration, active: external_issue_tracker_active, project: project, project_url: external_issue_tracker_url)
- end
-
- context 'when external issue tracker is configured and active' do
- let(:external_issue_tracker_active) { true }
-
- it 'has a link to the external issue tracker' do
- render
-
- expect(rendered).to have_link(external_issue_tracker.title, href: external_issue_tracker_url)
- end
- end
-
- context 'when external issue tracker is not configured and active' do
- let(:external_issue_tracker_active) { false }
-
- it 'does not have a link to the external issue tracker' do
- render
-
- expect(rendered).not_to have_link(external_issue_tracker.title)
- end
- end
- end
-
- context 'with Jira issue tracker' do
- let_it_be(:jira) { create(:jira_integration, project: project, issues_enabled: false) }
-
- it 'has a link to the Jira issue tracker' do
- render
-
- expect(rendered).to have_link('Jira', href: project.external_issue_tracker.issue_tracker_path)
- end
- end
- end
-
- describe 'Merge Requests' do
- it 'has a link to the merge request list path' do
- render
-
- expect(rendered).to have_link('Merge requests', href: project_merge_requests_path(project), class: 'shortcuts-merge_requests')
- end
-
- it 'shows pill with the number of merge requests' do
- render
-
- expect(rendered).to have_css('span.badge.badge-pill.merge_counter.js-merge-counter')
- end
- end
-
- describe 'CI/CD' do
- it 'has a link to pipelines page' do
- render
-
- expect(rendered).to have_link('CI/CD', href: project_pipelines_path(project))
- end
-
- describe 'Artifacts' do
- it 'has a link to the artifacts page' do
- render
-
- expect(rendered).to have_link('Artifacts', href: project_artifacts_path(project))
- end
- end
-
- describe 'Jobs' do
- it 'has a link to the jobs page' do
- render
-
- expect(rendered).to have_link('Jobs', href: project_jobs_path(project))
- end
- end
-
- describe 'Pipeline Schedules' do
- it 'has a link to the pipeline schedules page' do
- render
-
- expect(rendered).to have_link('Schedules', href: pipeline_schedules_path(project))
- end
- end
-
- describe 'Pipelines' do
- it 'has a link to the pipelines page' do
- render
-
- expect(rendered).to have_link('Pipelines', href: project_pipelines_path(project))
- end
- end
-
- describe 'Pipeline Editor' do
- context 'with a current_ref' do
- it 'has a link to the pipeline editor' do
- render
-
- expect(rendered).to have_link('Editor', href: project_ci_pipeline_editor_path(project, params: { branch_name: current_ref }))
- end
- end
-
- context 'with the default_branch' do
- it 'has a link to the pipeline editor' do
- render
-
- expect(rendered).to have_link('Editor', href: project_ci_pipeline_editor_path(project, params: { branch_name: project.default_branch }))
- end
- end
-
- context 'when user cannot access pipeline editor' do
- it 'does not has a link to the pipeline editor' do
- allow(view).to receive(:can_view_pipeline_editor?).and_return(false)
-
- render
-
- expect(rendered).not_to have_link('Editor', href: project_ci_pipeline_editor_path(project))
- end
- end
- end
- end
-
- describe 'Security and Compliance' do
- describe 'when user does not have permissions' do
- before do
- allow(view).to receive(:current_user).and_return(nil)
- end
-
- it 'top level navigation link is not visible' do
- render
-
- expect(rendered).not_to have_link('Security and Compliance')
- end
- end
-
- context 'when user has permissions' do
- before do
- allow(view).to receive(:current_user).and_return(user)
-
- render
- end
-
- it 'top level navigation link is visible' do
- expect(rendered).to have_link('Security and Compliance')
- end
-
- it 'security configuration link is visible' do
- expect(rendered).to have_link('Security configuration', href: project_security_configuration_path(project))
- end
- end
- end
-
- describe 'Deployments' do
- let(:page) { Nokogiri::HTML.parse(rendered) }
-
- describe 'Feature flags' do
- it 'has a link to the feature flags page' do
- render
-
- expect(page.at_css('.shortcuts-deployments').parent.css('[aria-label="Feature flags"]')).not_to be_empty
- expect(rendered).to have_link('Feature flags', href: project_feature_flags_path(project))
- end
-
- describe 'when the user does not have access' do
- let(:user) { nil }
-
- it 'does not have a link to the feature flags page' do
- render
-
- expect(rendered).not_to have_link('Feature flags')
- end
- end
- end
-
- describe 'Environments' do
- it 'has a link to the environments page' do
- render
-
- expect(page.at_css('.shortcuts-deployments').parent.css('[aria-label="Environments"]')).not_to be_empty
- expect(rendered).to have_link('Environments', href: project_environments_path(project))
- end
-
- describe 'when the user does not have access' do
- let(:user) { nil }
-
- it 'does not have a link to the environments page' do
- render
-
- expect(rendered).not_to have_link('Environments')
- end
- end
- end
-
- describe 'Releases' do
- it 'has a link to the project releases path' do
- render
-
- expect(rendered).to have_link('Releases', href: project_releases_path(project), class: 'shortcuts-deployments-releases')
- end
- end
- end
-
- describe 'Monitor' do
- it 'top level navigation link is visible for user with permissions' do
- render
-
- expect(rendered).to have_link('Monitor')
- end
-
- describe 'Error Tracking' do
- it 'has a link to the error tracking page' do
- render
-
- expect(rendered).to have_link('Error Tracking', href: project_error_tracking_index_path(project))
- end
-
- describe 'when the user does not have access' do
- let(:user) { nil }
-
- it 'does not have a link to the error tracking page' do
- render
-
- expect(rendered).not_to have_link('Error Tracking')
- end
- end
- end
-
- describe 'Alert Management' do
- it 'has a link to the alert management page' do
- render
-
- expect(rendered).to have_link('Alerts', href: project_alert_management_index_path(project))
- end
-
- describe 'when the user does not have access' do
- let(:user) { nil }
-
- it 'does not have a link to the alert management page' do
- render
-
- expect(rendered).not_to have_link('Alerts')
- end
- end
- end
-
- describe 'Incidents' do
- it 'has a link to the incidents page' do
- render
-
- expect(rendered).to have_link('Incidents', href: project_incidents_path(project))
- end
-
- describe 'when the user does not have access' do
- let(:user) { nil }
-
- it 'does not have a link to the incidents page' do
- render
-
- expect(rendered).not_to have_link('Incidents')
- end
- end
- end
- end
-
- describe 'Infrastructure' do
- describe 'Terraform' do
- it 'has a link to the terraform page' do
- render
-
- expect(rendered).to have_link('Terraform states', href: project_terraform_index_path(project))
- end
-
- describe 'when the user does not have access' do
- let(:user) { nil }
-
- it 'does not have a link to the terraform page' do
- render
-
- expect(rendered).not_to have_link('Terraform states')
- end
- end
- end
-
- describe 'Kubernetes clusters' do
- it 'has a link to the kubernetes page' do
- render
-
- expect(rendered).to have_link('Kubernetes clusters', href: project_clusters_path(project))
- end
-
- describe 'when the user does not have access' do
- let(:user) { nil }
-
- it 'does not have a link to the kubernetes page' do
- render
-
- expect(rendered).not_to have_link('Kubernetes clusters')
- end
- end
- end
-
- describe 'Google Cloud' do
- it 'has a link to the google cloud page' do
- render
- expect(rendered).to have_link('Google Cloud', href: project_google_cloud_configuration_path(project))
- end
-
- describe 'when the user does not have access' do
- let(:user) { nil }
-
- it 'does not have a link to the google cloud page' do
- render
-
- expect(rendered).not_to have_link('Google Cloud')
- end
- end
- end
- end
-
- describe 'Packages and Registries' do
- let(:registry_enabled) { true }
- let(:packages_enabled) { true }
-
- before do
- stub_container_registry_config(enabled: registry_enabled)
- stub_config(packages: { enabled: packages_enabled })
- end
-
- it 'top level navigation link is visible and points to package registry page' do
- render
-
- expect(rendered).to have_link('Packages and registries', href: project_packages_path(project))
- end
-
- describe 'Packages Registry' do
- it 'shows link to package registry page' do
- render
-
- expect(rendered).to have_link('Package Registry', href: project_packages_path(project))
- end
-
- context 'when packages config setting is not enabled' do
- let(:packages_enabled) { false }
-
- it 'does not show link to package registry page' do
- render
-
- expect(rendered).not_to have_link('Package Registry', href: project_packages_path(project))
- end
- end
- end
-
- describe 'Container Registry' do
- it 'shows link to container registry page' do
- render
-
- expect(rendered).to have_link('Container Registry', href: project_container_registry_index_path(project))
- end
-
- context 'when container config setting is not enabled' do
- let(:registry_enabled) { false }
-
- it 'does not show link to package registry page' do
- render
-
- expect(rendered).not_to have_link('Container Registry', href: project_container_registry_index_path(project))
- end
- end
- end
-
- describe 'Terraform modules' do
- it 'shows link to terraform modules page' do
- render
-
- expect(rendered).to have_link('Terraform modules', href: project_infrastructure_registry_index_path(project))
- end
-
- context 'when package registry config is disabled' do
- it 'does not show link to package registry page' do
- stub_config(packages: { enabled: false })
-
- render
-
- expect(rendered).not_to have_link('Terraform modules', href: project_infrastructure_registry_index_path(project))
- end
- end
- end
- end
-
- describe 'Analytics' do
- it 'top level navigation link is visible points to the value stream page' do
- render
-
- expect(rendered).to have_link('Analytics', href: project_cycle_analytics_path(project))
- end
-
- describe 'CI/CD' do
- it 'has a link to the CI/CD analytics page' do
- render
-
- expect(rendered).to have_link('CI/CD', href: charts_project_pipelines_path(project))
- end
-
- context 'when user does not have access' do
- let(:user) { nil }
-
- it 'does not have a link to the CI/CD analytics page' do
- render
-
- expect(rendered).not_to have_link('CI/CD', href: charts_project_pipelines_path(project))
- end
- end
- end
-
- describe 'Repository' do
- it 'has a link to the repository analytics page' do
- render
-
- expect(rendered).to have_link('Repository', href: charts_project_graph_path(project, 'master'))
- end
-
- context 'when user does not have access' do
- let(:user) { nil }
-
- it 'does not have a link to the repository analytics page' do
- render
-
- expect(rendered).not_to have_link('Repository', href: charts_project_graph_path(project, 'master'))
- end
- end
- end
-
- describe 'Value stream' do
- it 'has a link to the value stream page' do
- render
-
- expect(rendered).to have_link('Value stream', href: project_cycle_analytics_path(project))
- end
-
- context 'when user does not have access' do
- let(:user) { nil }
-
- it 'does not have a link to the value stream page' do
- render
-
- expect(rendered).not_to have_link('Value stream', href: project_cycle_analytics_path(project))
- end
- end
- end
- end
-
- describe 'Confluence' do
- let!(:service) { create(:confluence_integration, project: project, active: active) }
-
- before do
- render
- end
-
- context 'when the Confluence integration is active' do
- let(:active) { true }
-
- it 'shows the Confluence link' do
- expect(rendered).to have_link('Confluence', href: project_wikis_confluence_path(project))
- end
-
- it 'does not show the GitLab wiki link' do
- expect(rendered).not_to have_link('Wiki')
- end
- end
-
- context 'when it is disabled' do
- let(:active) { false }
-
- it 'does not show the Confluence link' do
- expect(rendered).not_to have_link('Confluence')
- end
-
- it 'shows the GitLab wiki link' do
- expect(rendered).to have_link('Wiki', href: wiki_path(project.wiki))
- end
- end
- end
-
- describe 'Wiki' do
- describe 'when wiki is enabled' do
- it 'shows the wiki tab with the wiki internal link' do
- render
-
- expect(rendered).to have_link('Wiki', href: wiki_path(project.wiki))
- end
- end
-
- describe 'when wiki is disabled' do
- let(:user) { nil }
-
- it 'does not show the wiki link' do
- render
-
- expect(rendered).not_to have_link('Wiki')
- end
- end
- end
-
- describe 'External Wiki' do
- let(:properties) { { 'external_wiki_url' => 'https://gitlab.com' } }
- let(:service_status) { true }
-
- before do
- project.create_external_wiki_integration(active: service_status, properties: properties)
- project.reload
- end
-
- context 'when it is active' do
- it 'shows the external wiki tab with the external wiki service link' do
- render
-
- expect(rendered).to have_link('External wiki', href: properties['external_wiki_url'])
- end
- end
-
- context 'when it is disabled' do
- let(:service_status) { false }
-
- it 'does not show the external wiki link' do
- render
-
- expect(rendered).not_to have_link('External wiki')
- end
- end
- end
-
- describe 'Snippets' do
- before do
- render
- end
-
- context 'when user can access snippets' do
- it 'shows Snippets link' do
- expect(rendered).to have_link('Snippets', href: project_snippets_path(project))
- end
- end
-
- context 'when user cannot access snippets' do
- let(:user) { nil }
-
- it 'does not show Snippets link' do
- expect(rendered).not_to have_link('Snippets')
- end
- end
- end
-
- describe 'Settings' do
- describe 'General' do
- it 'has a link to the General settings' do
- render
-
- expect(rendered).to have_link('General', href: edit_project_path(project))
- end
- end
-
- describe 'Integrations' do
- it 'has a link to the Integrations settings' do
- render
-
- expect(rendered).to have_link('Integrations', href: project_settings_integrations_path(project))
- end
- end
-
- describe 'WebHooks' do
- it 'has a link to the WebHooks settings' do
- render
-
- expect(rendered).to have_link('Webhooks', href: project_hooks_path(project))
- end
- end
-
- describe 'Access Tokens' do
- context 'self-managed instance' do
- before do
- allow(Gitlab).to receive(:com?).and_return(false)
- end
-
- it 'has a link to the Access Tokens settings' do
- render
-
- expect(rendered).to have_link('Access Tokens', href: project_settings_access_tokens_path(project))
- end
- end
-
- context 'gitlab.com', :with_license do
- before do
- allow(Gitlab).to receive(:com?).and_return(true)
- end
-
- it 'has a link to the Access Tokens settings' do
- render
-
- expect(rendered).to have_link('Access Tokens', href: project_settings_access_tokens_path(project))
- end
- end
- end
-
- describe 'Repository' do
- it 'has a link to the Repository settings' do
- render
-
- expect(rendered).to have_link('Repository', href: project_settings_repository_path(project))
- end
- end
-
- describe 'CI/CD' do
- context 'when project is archived' do
- before do
- project.update!(archived: true)
- end
-
- it 'does not have a link to the CI/CD settings' do
- render
-
- expect(rendered).not_to have_link('CI/CD', href: project_settings_ci_cd_path(project))
- end
- end
-
- context 'when project is not archived' do
- it 'has a link to the CI/CD settings' do
- render
-
- expect(rendered).to have_link('CI/CD', href: project_settings_ci_cd_path(project))
- end
- end
- end
-
- describe 'Monitor' do
- context 'when project is archived' do
- before do
- project.update!(archived: true)
- end
-
- it 'does not have a link to the Monitor settings' do
- render
-
- expect(rendered).not_to have_link('Monitor', href: project_settings_operations_path(project))
- end
- end
-
- context 'when project is not archived active' do
- it 'has a link to the Monitor settings' do
- render
-
- expect(rendered).to have_link('Monitor', href: project_settings_operations_path(project))
- end
- end
- end
-
- describe 'Pages' do
- before do
- stub_config(pages: { enabled: pages_enabled })
- end
-
- context 'when pages are enabled' do
- let(:pages_enabled) { true }
-
- it 'has a link to the Pages settings' do
- render
-
- expect(rendered).to have_link('Pages', href: project_pages_path(project))
- end
- end
-
- context 'when pages are not enabled' do
- let(:pages_enabled) { false }
-
- it 'does not have a link to the Pages settings' do
- render
-
- expect(rendered).not_to have_link('Pages', href: project_pages_path(project))
- end
- end
- end
-
- describe 'Packages and registries' do
- let(:packages_enabled) { false }
-
- before do
- stub_container_registry_config(enabled: registry_enabled)
- stub_config(packages: { enabled: packages_enabled })
- end
-
- context 'when registry is enabled' do
- let(:registry_enabled) { true }
-
- it 'has a link to the Package and registry settings' do
- render
-
- expect(rendered).to have_link('Packages and registries', href: project_settings_packages_and_registries_path(project))
- end
- end
-
- context 'when registry is not enabled' do
- let(:registry_enabled) { false }
-
- it 'does not have a link to the Package and registry settings' do
- render
-
- expect(rendered).not_to have_link('Packages and registries', href: project_settings_packages_and_registries_path(project))
- end
- end
-
- context 'when packages config is enabled' do
- let(:registry_enabled) { false }
- let(:packages_enabled) { true }
-
- it 'has a link to the Package and registry settings' do
- render
-
- expect(rendered).to have_link('Packages and registries', href: project_settings_packages_and_registries_path(project))
- end
- end
- end
-
- describe 'Usage Quotas' do
- it 'has a link to Usage Quotas' do
- render
-
- expect(rendered).to have_link('Usage Quotas', href: project_usage_quotas_path(project))
- end
- end
- end
-
- describe 'Hidden menus' do
- it 'has a link to the Activity page' do
- render
-
- expect(rendered).to have_link('Activity', href: activity_project_path(project), class: 'shortcuts-project-activity', visible: false)
- end
-
- it 'has a link to the Graph page' do
- render
-
- expect(rendered).to have_link('Graph', href: project_network_path(project, current_ref), class: 'shortcuts-network', visible: false)
- end
-
- it 'has a link to the New Issue page' do
- render
-
- expect(rendered).to have_link('Create a new issue', href: new_project_issue_path(project), class: 'shortcuts-new-issue', visible: false)
- end
-
- it 'has a link to the Jobs page' do
- render
-
- expect(rendered).to have_link('Jobs', href: project_jobs_path(project), class: 'shortcuts-builds', visible: false)
- end
-
- it 'has a link to the Commits page' do
- render
-
- expect(rendered).to have_link('Commits', href: project_commits_path(project), class: 'shortcuts-commits', visible: false)
- end
-
- it 'has a link to the Issue Boards page' do
- render
-
- expect(rendered).to have_link('Issue Boards', href: project_boards_path(project), class: 'shortcuts-issue-boards', visible: false)
- end
- end
-
- it_behaves_like 'sidebar includes snowplow attributes', 'render', 'projects_side_navigation', 'projects_side_navigation'
-
- describe 'Collapsed menu items' do
- it 'does not render the collapsed top menu as a link' do
- render
-
- expect(rendered).not_to have_selector('.sidebar-sub-level-items > li.fly-out-top-item > a')
- end
- end
-end
diff --git a/spec/views/profiles/keys/_form.html.haml_spec.rb b/spec/views/profiles/keys/_form.html.haml_spec.rb
index dd8af14100a..f427804d005 100644
--- a/spec/views/profiles/keys/_form.html.haml_spec.rb
+++ b/spec/views/profiles/keys/_form.html.haml_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'profiles/keys/_form.html.haml' do
+RSpec.describe 'profiles/keys/_form.html.haml', feature_category: :system_access do
include SshKeysHelper
let_it_be(:key) { Key.new }
@@ -44,7 +44,7 @@ RSpec.describe 'profiles/keys/_form.html.haml' do
end
it 'has the validation warning', :aggregate_failures do
- expect(rendered).to have_text("Oops, are you sure? Publicly visible private SSH keys can compromise your system.")
+ expect(rendered).to have_text("Are you sure? Publicly visible private SSH keys can compromise your system.")
expect(rendered).to have_button('Yes, add it')
end
diff --git a/spec/views/projects/_files.html.haml_spec.rb b/spec/views/projects/_files.html.haml_spec.rb
index 96c6c2bdfab..870d436ca88 100644
--- a/spec/views/projects/_files.html.haml_spec.rb
+++ b/spec/views/projects/_files.html.haml_spec.rb
@@ -35,6 +35,8 @@ RSpec.describe 'projects/_files', feature_category: :groups_and_projects do
before do
allow(view).to receive(:current_user).and_return(user)
allow(user).to receive(:project_shortcut_buttons).and_return(true)
+
+ stub_feature_flags(project_overview_reorg: false)
end
it 'renders buttons' do
@@ -45,6 +47,10 @@ RSpec.describe 'projects/_files', feature_category: :groups_and_projects do
end
context 'when rendered in the project overview page and there is no current user' do
+ before do
+ stub_feature_flags(project_overview_reorg: false)
+ end
+
it 'renders buttons' do
render(template, is_project_overview: true)
diff --git a/spec/views/projects/_home_panel.html.haml_spec.rb b/spec/views/projects/_home_panel.html.haml_spec.rb
index e5081df4c22..0282e149b25 100644
--- a/spec/views/projects/_home_panel.html.haml_spec.rb
+++ b/spec/views/projects/_home_panel.html.haml_spec.rb
@@ -100,6 +100,8 @@ RSpec.describe 'projects/_home_panel' do
allow(view).to receive(:current_user).and_return(user)
allow(view).to receive(:can?).with(user, :read_project, project).and_return(false)
allow(project).to receive(:license_anchor_data).and_return(false)
+
+ stub_feature_flags(project_overview_reorg: false)
end
context 'has no badges' do
@@ -146,38 +148,6 @@ RSpec.describe 'projects/_home_panel' do
end
end
- context 'project id' do
- let(:project) { create(:project) }
- let(:user) { create(:user) }
-
- before do
- assign(:project, project)
-
- allow(view).to receive(:current_user).and_return(user)
- allow(project).to receive(:license_anchor_data).and_return(false)
- end
-
- context 'user can read project' do
- it 'is shown' do
- allow(view).to receive(:can?).with(user, :read_project, project).and_return(true)
-
- render
-
- expect(rendered).to have_content("Project ID: #{project.id}")
- end
- end
-
- context 'user cannot read project' do
- it 'is not shown' do
- allow(view).to receive(:can?).with(user, :read_project, project).and_return(false)
-
- render
-
- expect(rendered).not_to have_content("Project ID: #{project.id}")
- end
- end
- end
-
context 'forks' do
let(:source_project) { create(:project, :repository) }
let(:project) { fork_project(source_project) }
diff --git a/spec/views/projects/edit.html.haml_spec.rb b/spec/views/projects/edit.html.haml_spec.rb
index 8c1a8cf21d0..127dbf18a0a 100644
--- a/spec/views/projects/edit.html.haml_spec.rb
+++ b/spec/views/projects/edit.html.haml_spec.rb
@@ -111,4 +111,30 @@ RSpec.describe 'projects/edit' do
expect(rendered).to have_content(_('GitLab Pages has moved'))
end
end
+
+ describe 'notifications on renaming the project path' do
+ context 'when the GitlabAPI is supported' do
+ before do
+ allow(ContainerRegistry::GitlabApiClient).to receive(:supports_gitlab_api?).and_return(true)
+ end
+
+ it 'displays the warning regarding the container registry' do
+ render
+
+ expect(rendered).to have_content('new uploads to the container registry are blocked')
+ end
+ end
+
+ context 'when the GitlabAPI is not supported' do
+ before do
+ allow(ContainerRegistry::GitlabApiClient).to receive(:supports_gitlab_api?).and_return(false)
+ end
+
+ it 'does not display the warning regarding the container registry' do
+ render
+
+ expect(rendered).not_to have_content('new uploads to the container registry are blocked')
+ end
+ end
+ end
end
diff --git a/spec/views/projects/pages/_pages_settings.html.haml_spec.rb b/spec/views/projects/pages/_pages_settings.html.haml_spec.rb
index e790305da5d..ba276bc6070 100644
--- a/spec/views/projects/pages/_pages_settings.html.haml_spec.rb
+++ b/spec/views/projects/pages/_pages_settings.html.haml_spec.rb
@@ -42,7 +42,7 @@ RSpec.describe 'projects/pages/_pages_settings', feature_category: :pages do
render
- expect(rendered).to have_content('Use multiple versions')
+ expect(rendered).to have_content('Use multiple deployments')
end
end
end
diff --git a/spec/views/projects/tags/index.html.haml_spec.rb b/spec/views/projects/tags/index.html.haml_spec.rb
index 0ac5efa2e6d..ad6a3a69cc7 100644
--- a/spec/views/projects/tags/index.html.haml_spec.rb
+++ b/spec/views/projects/tags/index.html.haml_spec.rb
@@ -49,34 +49,6 @@ RSpec.describe 'projects/tags/index.html.haml' do
end
end
- context 'when the most recent build for a tag has artifacts' do
- let!(:build) { create(:ci_build, :success, :artifacts, pipeline: pipeline) }
-
- it 'renders the Artifacts section in the download list' do
- render
- expect(rendered).to have_selector('li', text: 'Artifacts')
- end
-
- it 'renders artifact download links' do
- render
- expect(rendered).to have_link(href: latest_succeeded_project_artifacts_path(project, "#{pipeline.ref}/download", job: 'test'))
- end
- end
-
- context 'when the most recent build for a tag has expired artifacts' do
- let!(:build) { create(:ci_build, :success, :expired, :artifacts, pipeline: pipeline) }
-
- it 'does not render the Artifacts section in the download list' do
- render
- expect(rendered).not_to have_selector('li', text: 'Artifacts')
- end
-
- it 'does not render artifact download links' do
- render
- expect(rendered).not_to have_link(href: latest_succeeded_project_artifacts_path(project, "#{pipeline.ref}/download", job: 'test'))
- end
- end
-
context 'build stats' do
let(:tag) { 'v1.0.0' }
let(:page) { Capybara::Node::Simple.new(rendered) }
@@ -92,13 +64,12 @@ RSpec.describe 'projects/tags/index.html.haml' do
render
expect(page.find('.tags .content-list li', text: tag)).to have_css '[data-testid="status_success_borderless-icon"]'
- expect(page.all('.tags .content-list li')).to all(have_css('svg.s16'))
end
it 'shows no build status or placeholder when no pipelines present' do
render
- expect(page.all('.tags .content-list li')).not_to have_css 'svg.s16'
+ expect(page.find('.tags .content-list li', text: tag)).not_to have_css '[data-testid="status_success_borderless-icon"]'
end
it 'shows no build status or placeholder when pipelines are private' do
@@ -107,7 +78,7 @@ RSpec.describe 'projects/tags/index.html.haml' do
render
- expect(page.all('.tags .content-list li')).not_to have_css 'svg.s16'
+ expect(page.find('.tags .content-list li', text: tag)).not_to have_css '[data-testid="status_success_borderless-icon"]'
end
end
diff --git a/spec/views/shared/nav/_sidebar.html.haml_spec.rb b/spec/views/shared/nav/_sidebar.html.haml_spec.rb
deleted file mode 100644
index 0eb945f5624..00000000000
--- a/spec/views/shared/nav/_sidebar.html.haml_spec.rb
+++ /dev/null
@@ -1,43 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'shared/nav/_sidebar.html.haml' do
- let_it_be(:project) { create(:project) }
-
- let(:context) { Sidebars::Projects::Context.new(current_user: nil, container: project) }
- let(:sidebar) { Sidebars::Projects::Panel.new(context) }
-
- before do
- assign(:project, project)
- assign(:sidebar, sidebar)
-
- allow(sidebar).to receive(:renderable_menus).and_return([])
- end
-
- context 'when sidebar has a scope menu' do
- it 'renders the scope menu' do
- render
-
- expect(rendered).to render_template('shared/nav/_scope_menu')
- end
- end
-
- context 'when sidebar does not have a scope menu' do
- let(:scope_menu_view_path) { 'shared/nav/' }
- let(:scope_menu_view_name) { 'scope_menu.html.haml' }
- let(:scope_menu_partial) { "#{scope_menu_view_path}_#{scope_menu_view_name}" }
- let(:content) { 'Custom test content' }
-
- context 'when sidebar has a custom scope menu partial defined' do
- it 'renders the custom partial' do
- allow(view).to receive(:scope_menu).and_return(nil)
- stub_template(scope_menu_partial => content)
-
- render
-
- expect(rendered).to have_text(content)
- end
- end
- end
-end
diff --git a/spec/views/shared/runners/_runner_details.html.haml_spec.rb b/spec/views/shared/runners/_runner_details.html.haml_spec.rb
index a597c719d87..0612d157ff4 100644
--- a/spec/views/shared/runners/_runner_details.html.haml_spec.rb
+++ b/spec/views/shared/runners/_runner_details.html.haml_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'shared/runners/_runner_details.html.haml', feature_category: :runner_fleet do
+RSpec.describe 'shared/runners/_runner_details.html.haml', feature_category: :fleet_visibility do
include PageLayoutHelper
let_it_be(:runner) do
diff --git a/spec/views/shared/snippets/_snippet.html.haml_spec.rb b/spec/views/shared/snippets/_snippet.html.haml_spec.rb
index 712021ec1e1..71ef97b0448 100644
--- a/spec/views/shared/snippets/_snippet.html.haml_spec.rb
+++ b/spec/views/shared/snippets/_snippet.html.haml_spec.rb
@@ -49,4 +49,37 @@ RSpec.describe 'shared/snippets/_snippet.html.haml' do
expect(rendered).not_to have_selector('span.file_count')
end
end
+
+ context 'spam icon and tooltip', feature_category: :insider_threat do
+ context 'when the author of the snippet is not banned' do
+ before do
+ render 'shared/snippets/snippet', snippet: snippet
+ end
+
+ it 'does not render spam icon' do
+ expect(rendered).not_to have_css('[data-testid="spam-icon"]')
+ end
+
+ it 'does not render tooltip' do
+ expect(rendered).not_to have_selector("span.has-tooltip")
+ end
+ end
+
+ context 'when the author of the snippet is banned' do
+ let_it_be(:banned_user) { create(:user, :banned) }
+ let_it_be(:snippet) { create(:snippet, author: banned_user) }
+
+ before do
+ render 'shared/snippets/snippet', snippet: snippet
+ end
+
+ it 'renders spam icon' do
+ expect(rendered).to have_css('[data-testid="spam-icon"]')
+ end
+
+ it 'renders tooltip' do
+ expect(rendered).to have_selector("span.has-tooltip[title='This snippet is hidden because its author has been banned']")
+ end
+ end
+ end
end
diff --git a/spec/views/profiles/audit_log.html.haml_spec.rb b/spec/views/user_settings/user_settings/authentication_log.html.haml_spec.rb
index d5f6a2d64e7..4188bdc587f 100644
--- a/spec/views/profiles/audit_log.html.haml_spec.rb
+++ b/spec/views/user_settings/user_settings/authentication_log.html.haml_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'profiles/audit_log' do
+RSpec.describe 'user_settings/user_settings/authentication_log', feature_category: :system_access do
let(:user) { create(:user) }
before do
diff --git a/spec/workers/abuse/trust_score_worker_spec.rb b/spec/workers/abuse/trust_score_worker_spec.rb
new file mode 100644
index 00000000000..adc582ada94
--- /dev/null
+++ b/spec/workers/abuse/trust_score_worker_spec.rb
@@ -0,0 +1,46 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Abuse::TrustScoreWorker, :clean_gitlab_redis_shared_state, feature_category: :instance_resiliency do
+ let(:worker) { described_class.new }
+ let_it_be(:user) { create(:user) }
+
+ subject(:perform) { worker.perform(user.id, :telesign, 0.85, 'foo') }
+
+ it_behaves_like 'an idempotent worker' do
+ let(:job_args) { [user.id, :telesign, 0.5] }
+ end
+
+ context "when the user does not exist" do
+ let(:log_payload) { { 'message' => 'User not found.', 'user_id' => user.id } }
+
+ before do
+ allow(User).to receive(:find_by_id).with(user.id).and_return(nil)
+ end
+
+ it 'logs an error' do
+ expect(Sidekiq.logger).to receive(:info).with(hash_including(log_payload))
+
+ expect { perform }.not_to raise_exception
+ end
+
+ it 'does not attempt to create the trust score' do
+ expect(Abuse::TrustScore).not_to receive(:create!)
+
+ perform
+ end
+ end
+
+ context "when the user exists" do
+ it 'creates an abuse trust score with the correct data' do
+ expect { perform }.to change { Abuse::TrustScore.count }.from(0).to(1)
+ expect(Abuse::TrustScore.last.attributes).to include({
+ user_id: user.id,
+ source: "telesign",
+ score: 0.85,
+ correlation_id_value: 'foo'
+ }.stringify_keys)
+ end
+ end
+end
diff --git a/spec/workers/background_migration/ci_database_worker_spec.rb b/spec/workers/background_migration/ci_database_worker_spec.rb
index 952c9ebfce8..7819bc695a4 100644
--- a/spec/workers/background_migration/ci_database_worker_spec.rb
+++ b/spec/workers/background_migration/ci_database_worker_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe BackgroundMigration::CiDatabaseWorker, :clean_gitlab_redis_cluster_shared_state,
+RSpec.describe BackgroundMigration::CiDatabaseWorker, :clean_gitlab_redis_shared_state,
feature_category: :database do
before do
skip_if_shared_database(:ci)
diff --git a/spec/workers/background_migration_worker_spec.rb b/spec/workers/background_migration_worker_spec.rb
index 76509b4b227..be1f5027e44 100644
--- a/spec/workers/background_migration_worker_spec.rb
+++ b/spec/workers/background_migration_worker_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe BackgroundMigrationWorker, :clean_gitlab_redis_cluster_shared_state,
+RSpec.describe BackgroundMigrationWorker, :clean_gitlab_redis_shared_state,
feature_category: :database do
it_behaves_like 'it runs background migration jobs', 'main'
end
diff --git a/spec/workers/bulk_imports/entity_worker_spec.rb b/spec/workers/bulk_imports/entity_worker_spec.rb
index 690555aa08f..325b31c85db 100644
--- a/spec/workers/bulk_imports/entity_worker_spec.rb
+++ b/spec/workers/bulk_imports/entity_worker_spec.rb
@@ -49,10 +49,6 @@ RSpec.describe BulkImports::EntityWorker, feature_category: :importers do
end
end
- it 'has the option to reschedule once if deduplicated' do
- expect(described_class.get_deduplication_options).to include({ if_deduplicated: :reschedule_once })
- end
-
context 'when pipeline workers from a stage are running' do
before do
pipeline_tracker.enqueue!
@@ -77,6 +73,8 @@ RSpec.describe BulkImports::EntityWorker, feature_category: :importers do
it 'enqueues the pipeline workers from the next stage and re-enqueues itself' do
expect_next_instance_of(BulkImports::Logger) do |logger|
+ expect(logger).to receive(:with_entity).with(entity).and_call_original
+
expect(logger).to receive(:info).with(hash_including('message' => 'Stage starting', 'entity_stage' => 1))
end
@@ -92,6 +90,26 @@ RSpec.describe BulkImports::EntityWorker, feature_category: :importers do
worker.perform(entity.id)
end
+
+ context 'when exclusive lease cannot be obtained' do
+ it 'does not start next stage and re-enqueue worker' do
+ expect_next_instance_of(Gitlab::ExclusiveLease) do |lease|
+ expect(lease).to receive(:try_obtain).and_return(false)
+ end
+
+ expect_next_instance_of(BulkImports::Logger) do |logger|
+ expect(logger).to receive(:info).with(
+ hash_including(
+ 'message' => 'Cannot obtain an exclusive lease. There must be another instance already in execution.'
+ )
+ )
+ end
+
+ expect(described_class).to receive(:perform_in)
+
+ worker.perform(entity.id)
+ end
+ end
end
context 'when there are no next stage to run' do
diff --git a/spec/workers/bulk_imports/export_request_worker_spec.rb b/spec/workers/bulk_imports/export_request_worker_spec.rb
index e9d0b6b24b2..2cc6348bb27 100644
--- a/spec/workers/bulk_imports/export_request_worker_spec.rb
+++ b/spec/workers/bulk_imports/export_request_worker_spec.rb
@@ -72,17 +72,14 @@ RSpec.describe BulkImports::ExportRequestWorker, feature_category: :importers do
entity.update!(source_xid: nil)
expect_next_instance_of(BulkImports::Logger) do |logger|
+ expect(logger).to receive(:with_entity).with(entity).and_call_original
+
expect(logger).to receive(:error).with(
a_hash_including(
- 'bulk_import_entity_id' => entity.id,
- 'bulk_import_id' => entity.bulk_import_id,
- 'bulk_import_entity_type' => entity.source_type,
- 'source_full_path' => entity.source_full_path,
'exception.backtrace' => anything,
'exception.class' => 'NoMethodError',
'exception.message' => /^undefined method `model_id' for nil:NilClass/,
- 'message' => 'Failed to fetch source entity id',
- 'source_version' => entity.bulk_import.source_version_info.to_s
+ 'message' => 'Failed to fetch source entity id'
)
).twice
end
@@ -148,7 +145,9 @@ RSpec.describe BulkImports::ExportRequestWorker, feature_category: :importers do
entity = create(:bulk_import_entity, bulk_import: bulk_import)
error = 'Exhausted error!'
- expect_next_instance_of(Gitlab::Import::Logger) do |logger|
+ expect_next_instance_of(BulkImports::Logger) do |logger|
+ expect(logger).to receive(:with_entity).with(entity).and_call_original
+
expect(logger)
.to receive(:error)
.with(hash_including('message' => "Request to export #{entity.source_type} failed"))
diff --git a/spec/workers/bulk_imports/finish_batched_pipeline_worker_spec.rb b/spec/workers/bulk_imports/finish_batched_pipeline_worker_spec.rb
index 59ae4205c0f..2dd5b23b3d2 100644
--- a/spec/workers/bulk_imports/finish_batched_pipeline_worker_spec.rb
+++ b/spec/workers/bulk_imports/finish_batched_pipeline_worker_spec.rb
@@ -33,6 +33,8 @@ RSpec.describe BulkImports::FinishBatchedPipelineWorker, feature_category: :impo
)
end
+ let!(:batch_1) { create(:bulk_import_batch_tracker, :finished, tracker: pipeline_tracker) }
+
subject(:worker) { described_class.new }
describe '#perform' do
@@ -45,27 +47,30 @@ RSpec.describe BulkImports::FinishBatchedPipelineWorker, feature_category: :impo
end
end
- context 'when import is in progress' do
- it 'marks the tracker as finished' do
- expect_next_instance_of(BulkImports::Logger) do |logger|
- expect(logger).to receive(:info).with(
- a_hash_including('message' => 'Tracker finished')
- )
- end
+ it 'marks the tracker as finished' do
+ expect_next_instance_of(BulkImports::Logger) do |logger|
+ expect(logger).to receive(:with_tracker).with(pipeline_tracker).and_call_original
+ expect(logger).to receive(:with_entity).with(entity).and_call_original
- expect { subject.perform(pipeline_tracker.id) }
- .to change { pipeline_tracker.reload.finished? }
- .from(false).to(true)
+ expect(logger).to receive(:info).with(
+ a_hash_including('message' => 'Tracker finished')
+ )
end
- it "calls the pipeline's `#on_finish`" do
- expect_next_instance_of(pipeline_class) do |pipeline|
- expect(pipeline).to receive(:on_finish)
- end
+ expect { subject.perform(pipeline_tracker.id) }
+ .to change { pipeline_tracker.reload.finished? }
+ .from(false).to(true)
+ end
- subject.perform(pipeline_tracker.id)
+ it "calls the pipeline's `#on_finish`" do
+ expect_next_instance_of(pipeline_class) do |pipeline|
+ expect(pipeline).to receive(:on_finish)
end
+ subject.perform(pipeline_tracker.id)
+ end
+
+ context 'when import is in progress' do
it 're-enqueues for any started batches' do
create(:bulk_import_batch_tracker, :started, tracker: pipeline_tracker)
@@ -88,14 +93,17 @@ RSpec.describe BulkImports::FinishBatchedPipelineWorker, feature_category: :impo
end
context 'when pipeline tracker is stale' do
- let(:pipeline_tracker) { create(:bulk_import_tracker, :started, :batched, :stale, entity: entity) }
+ before do
+ batch_1.update!(updated_at: 5.hours.ago)
+ end
it 'fails pipeline tracker and its batches' do
- create(:bulk_import_batch_tracker, :finished, tracker: pipeline_tracker)
-
expect_next_instance_of(BulkImports::Logger) do |logger|
+ expect(logger).to receive(:with_tracker).with(pipeline_tracker).and_call_original
+ expect(logger).to receive(:with_entity).with(entity).and_call_original
+
expect(logger).to receive(:error).with(
- a_hash_including('message' => 'Tracker stale. Failing batches and tracker')
+ a_hash_including('message' => 'Batch stale. Failing batches and tracker')
)
end
diff --git a/spec/workers/bulk_imports/pipeline_worker_spec.rb b/spec/workers/bulk_imports/pipeline_worker_spec.rb
index d99b3e9de73..368c7537641 100644
--- a/spec/workers/bulk_imports/pipeline_worker_spec.rb
+++ b/spec/workers/bulk_imports/pipeline_worker_spec.rb
@@ -16,12 +16,16 @@ RSpec.describe BulkImports::PipelineWorker, feature_category: :importers do
def self.file_extraction_pipeline?
false
end
+
+ def self.abort_on_failure?
+ false
+ end
end
end
let_it_be(:bulk_import) { create(:bulk_import) }
let_it_be(:config) { create(:bulk_import_configuration, bulk_import: bulk_import) }
- let_it_be(:entity) { create(:bulk_import_entity, bulk_import: bulk_import) }
+ let_it_be_with_reload(:entity) { create(:bulk_import_entity, bulk_import: bulk_import) }
let(:pipeline_tracker) do
create(
@@ -44,7 +48,7 @@ RSpec.describe BulkImports::PipelineWorker, feature_category: :importers do
end
end
- include_examples 'an idempotent worker' do
+ it_behaves_like 'an idempotent worker' do
let(:job_args) { [pipeline_tracker.id, pipeline_tracker.stage, entity.id] }
it 'runs the pipeline and sets tracker to finished' do
@@ -61,17 +65,9 @@ RSpec.describe BulkImports::PipelineWorker, feature_category: :importers do
it 'runs the given pipeline successfully' do
expect_next_instance_of(BulkImports::Logger) do |logger|
- expect(logger)
- .to receive(:info)
- .with(
- hash_including(
- 'pipeline_class' => 'FakePipeline',
- 'bulk_import_id' => entity.bulk_import_id,
- 'bulk_import_entity_id' => entity.id,
- 'bulk_import_entity_type' => entity.source_type,
- 'source_full_path' => entity.source_full_path
- )
- )
+ expect(logger).to receive(:with_tracker).with(pipeline_tracker).and_call_original
+ expect(logger).to receive(:with_entity).with(pipeline_tracker.entity).and_call_original
+ expect(logger).to receive(:info)
end
allow(worker).to receive(:jid).and_return('jid')
@@ -98,22 +94,9 @@ RSpec.describe BulkImports::PipelineWorker, feature_category: :importers do
job = { 'args' => [pipeline_tracker.id, pipeline_tracker.stage, entity.id] }
expect_next_instance_of(BulkImports::Logger) do |logger|
- expect(logger)
- .to receive(:error)
- .with(
- hash_including(
- 'pipeline_class' => 'FakePipeline',
- 'bulk_import_entity_id' => entity.id,
- 'bulk_import_id' => entity.bulk_import_id,
- 'bulk_import_entity_type' => entity.source_type,
- 'source_full_path' => entity.source_full_path,
- 'class' => 'BulkImports::PipelineWorker',
- 'exception.message' => 'Error!',
- 'message' => 'Pipeline failed',
- 'source_version' => entity.bulk_import.source_version_info.to_s,
- 'importer' => 'gitlab_migration'
- )
- )
+ expect(logger).to receive(:with_tracker).with(pipeline_tracker).and_call_original
+ expect(logger).to receive(:with_entity).with(pipeline_tracker.entity).and_call_original
+ expect(logger).to receive(:error)
end
expect(Gitlab::ErrorTracking)
@@ -121,13 +104,13 @@ RSpec.describe BulkImports::PipelineWorker, feature_category: :importers do
.with(
instance_of(StandardError),
hash_including(
- 'bulk_import_entity_id' => entity.id,
- 'bulk_import_id' => entity.bulk_import.id,
- 'bulk_import_entity_type' => entity.source_type,
- 'source_full_path' => entity.source_full_path,
- 'pipeline_class' => pipeline_tracker.pipeline_name,
- 'importer' => 'gitlab_migration',
- 'source_version' => entity.bulk_import.source_version_info.to_s
+ bulk_import_entity_id: entity.id,
+ bulk_import_id: entity.bulk_import.id,
+ bulk_import_entity_type: entity.source_type,
+ source_full_path: entity.source_full_path,
+ pipeline_class: pipeline_tracker.pipeline_name,
+ importer: 'gitlab_migration',
+ source_version: entity.bulk_import.source_version_info.to_s
)
)
@@ -156,6 +139,21 @@ RSpec.describe BulkImports::PipelineWorker, feature_category: :importers do
expect(pipeline_tracker.status_name).to eq(:failed)
expect(pipeline_tracker.jid).to eq('jid')
+ expect(entity.reload.status_name).to eq(:created)
+ end
+
+ context 'when pipeline has abort_on_failure' do
+ before do
+ allow(pipeline_class).to receive(:abort_on_failure?).and_return(true)
+ end
+
+ it 'marks entity as failed' do
+ job = { 'args' => [pipeline_tracker.id, pipeline_tracker.stage, entity.id] }
+
+ described_class.sidekiq_retries_exhausted_block.call(job, StandardError.new('Error!'))
+
+ expect(entity.reload.status_name).to eq(:failed)
+ end
end
end
@@ -266,6 +264,10 @@ RSpec.describe BulkImports::PipelineWorker, feature_category: :importers do
describe '#perform' do
context 'when entity is failed' do
+ before do
+ entity.update!(status: -1)
+ end
+
it 'marks tracker as skipped and logs the skip' do
pipeline_tracker = create(
:bulk_import_tracker,
@@ -274,23 +276,12 @@ RSpec.describe BulkImports::PipelineWorker, feature_category: :importers do
status_event: 'enqueue'
)
- entity.update!(status: -1)
-
expect_next_instance_of(BulkImports::Logger) do |logger|
allow(logger).to receive(:info)
expect(logger)
.to receive(:info)
- .with(
- hash_including(
- 'pipeline_class' => 'FakePipeline',
- 'bulk_import_entity_id' => entity.id,
- 'bulk_import_id' => entity.bulk_import_id,
- 'bulk_import_entity_type' => entity.source_type,
- 'source_full_path' => entity.source_full_path,
- 'message' => 'Skipping pipeline due to failed entity'
- )
- )
+ .with(hash_including(message: 'Skipping pipeline due to failed entity'))
end
worker.perform(pipeline_tracker.id, pipeline_tracker.stage, entity.id)
@@ -323,23 +314,15 @@ RSpec.describe BulkImports::PipelineWorker, feature_category: :importers do
end
end
- it 'reenqueues the worker' do
+ it 're_enqueues the worker' do
expect_any_instance_of(BulkImports::Tracker) do |tracker|
expect(tracker).to receive(:retry).and_call_original
end
expect_next_instance_of(BulkImports::Logger) do |logger|
- expect(logger)
- .to receive(:info)
- .with(
- hash_including(
- 'pipeline_class' => 'FakePipeline',
- 'bulk_import_entity_id' => entity.id,
- 'bulk_import_id' => entity.bulk_import_id,
- 'bulk_import_entity_type' => entity.source_type,
- 'source_full_path' => entity.source_full_path
- )
- )
+ expect(logger).to receive(:with_tracker).and_call_original
+ expect(logger).to receive(:with_entity).and_call_original
+ expect(logger).to receive(:info)
end
expect(described_class)
@@ -495,8 +478,8 @@ RSpec.describe BulkImports::PipelineWorker, feature_category: :importers do
end
end
- context 'when export is batched' do
- let(:batches_count) { 2 }
+ context 'when export is batched', :aggregate_failures do
+ let(:batches_count) { 3 }
before do
allow_next_instance_of(BulkImports::ExportStatus) do |status|
@@ -506,10 +489,30 @@ RSpec.describe BulkImports::PipelineWorker, feature_category: :importers do
allow(status).to receive(:empty?).and_return(false)
allow(status).to receive(:failed?).and_return(false)
end
+ allow(worker).to receive(:log_extra_metadata_on_done).and_call_original
end
it 'enqueues pipeline batches' do
+ expect(BulkImports::PipelineBatchWorker).to receive(:perform_async).exactly(3).times
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:batched, true)
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:tracker_batch_numbers_enqueued, [1, 2, 3])
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:tracker_final_batch_was_enqueued, true)
+
+ worker.perform(pipeline_tracker.id, pipeline_tracker.stage, entity.id)
+
+ pipeline_tracker.reload
+
+ expect(pipeline_tracker.status_name).to eq(:started)
+ expect(pipeline_tracker.batched).to eq(true)
+ expect(pipeline_tracker.batches.pluck_batch_numbers).to contain_exactly(1, 2, 3)
+ expect(described_class.jobs).to be_empty
+ end
+
+ it 'enqueues only missing pipelines batches' do
+ create(:bulk_import_batch_tracker, tracker: pipeline_tracker, batch_number: 2)
expect(BulkImports::PipelineBatchWorker).to receive(:perform_async).twice
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:tracker_batch_numbers_enqueued, [1, 3])
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:tracker_final_batch_was_enqueued, true)
worker.perform(pipeline_tracker.id, pipeline_tracker.stage, entity.id)
@@ -517,7 +520,8 @@ RSpec.describe BulkImports::PipelineWorker, feature_category: :importers do
expect(pipeline_tracker.status_name).to eq(:started)
expect(pipeline_tracker.batched).to eq(true)
- expect(pipeline_tracker.batches.count).to eq(batches_count)
+ expect(pipeline_tracker.batches.pluck_batch_numbers).to contain_exactly(1, 2, 3)
+ expect(described_class.jobs).to be_empty
end
context 'when batches count is less than 1' do
@@ -531,6 +535,127 @@ RSpec.describe BulkImports::PipelineWorker, feature_category: :importers do
expect(pipeline_tracker.reload.status_name).to eq(:finished)
end
end
+
+ context 'when pipeline batch enqueuing should be limited' do
+ using RSpec::Parameterized::TableSyntax
+
+ before do
+ allow(::Gitlab::CurrentSettings).to receive(:bulk_import_concurrent_pipeline_batch_limit).and_return(2)
+ end
+
+ it 'only enqueues limited batches and reenqueues itself' do
+ expect(BulkImports::PipelineBatchWorker).to receive(:perform_async).twice
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:tracker_batch_numbers_enqueued, [1, 2])
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:tracker_final_batch_was_enqueued, false)
+
+ worker.perform(pipeline_tracker.id, pipeline_tracker.stage, entity.id)
+
+ pipeline_tracker.reload
+
+ expect(pipeline_tracker.status_name).to eq(:started)
+ expect(pipeline_tracker.batched).to eq(true)
+ expect(pipeline_tracker.batches.pluck_batch_numbers).to contain_exactly(1, 2)
+ expect(described_class.jobs).to contain_exactly(
+ hash_including(
+ 'args' => [pipeline_tracker.id, pipeline_tracker.stage, entity.id],
+ 'scheduled_at' => be_within(1).of(10.seconds.from_now.to_i)
+ )
+ )
+ end
+
+ context 'when there is a batch in progress' do
+ where(:status) { BulkImports::BatchTracker::IN_PROGRESS_STATES }
+
+ with_them do
+ before do
+ create(:bulk_import_batch_tracker, status, batch_number: 1, tracker: pipeline_tracker)
+ end
+
+ it 'counts the in progress batch against the limit' do
+ expect(BulkImports::PipelineBatchWorker).to receive(:perform_async).once
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:tracker_batch_numbers_enqueued, [2])
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:tracker_final_batch_was_enqueued, false)
+
+ worker.perform(pipeline_tracker.id, pipeline_tracker.stage, entity.id)
+
+ pipeline_tracker.reload
+
+ expect(pipeline_tracker.status_name).to eq(:started)
+ expect(pipeline_tracker.batched).to eq(true)
+ expect(pipeline_tracker.batches.pluck_batch_numbers).to contain_exactly(1, 2)
+ expect(described_class.jobs).to contain_exactly(
+ hash_including(
+ 'args' => [pipeline_tracker.id, pipeline_tracker.stage, entity.id],
+ 'scheduled_at' => be_within(1).of(10.seconds.from_now.to_i)
+ )
+ )
+ end
+ end
+ end
+
+ context 'when there is a batch that has finished' do
+ where(:status) do
+ all_statuses = BulkImports::BatchTracker.state_machines[:status].states.map(&:name)
+ all_statuses - BulkImports::BatchTracker::IN_PROGRESS_STATES
+ end
+
+ with_them do
+ before do
+ create(:bulk_import_batch_tracker, status, batch_number: 1, tracker: pipeline_tracker)
+ end
+
+ it 'does not count the finished batch against the limit' do
+ expect(BulkImports::PipelineBatchWorker).to receive(:perform_async).twice
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:tracker_batch_numbers_enqueued, [2, 3])
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:tracker_final_batch_was_enqueued, true)
+
+ worker.perform(pipeline_tracker.id, pipeline_tracker.stage, entity.id)
+
+ pipeline_tracker.reload
+
+ expect(pipeline_tracker.batches.pluck_batch_numbers).to contain_exactly(1, 2, 3)
+ expect(described_class.jobs).to be_empty
+ end
+ end
+ end
+
+ context 'when the feature flag is disabled' do
+ before do
+ stub_feature_flags(bulk_import_limit_concurrent_batches: false)
+ end
+
+ it 'does not limit batches' do
+ expect(BulkImports::PipelineBatchWorker).to receive(:perform_async).exactly(3).times
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:tracker_batch_numbers_enqueued, [1, 2, 3])
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:tracker_final_batch_was_enqueued, true)
+
+ worker.perform(pipeline_tracker.id, pipeline_tracker.stage, entity.id)
+
+ pipeline_tracker.reload
+
+ expect(pipeline_tracker.status_name).to eq(:started)
+ expect(pipeline_tracker.batched).to eq(true)
+ expect(pipeline_tracker.batches.pluck_batch_numbers).to contain_exactly(1, 2, 3)
+ expect(described_class.jobs).to be_empty
+ end
+
+ it 'still enqueues only missing pipelines batches' do
+ create(:bulk_import_batch_tracker, tracker: pipeline_tracker, batch_number: 2)
+ expect(BulkImports::PipelineBatchWorker).to receive(:perform_async).twice
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:tracker_batch_numbers_enqueued, [1, 3])
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:tracker_final_batch_was_enqueued, true)
+
+ worker.perform(pipeline_tracker.id, pipeline_tracker.stage, entity.id)
+
+ pipeline_tracker.reload
+
+ expect(pipeline_tracker.status_name).to eq(:started)
+ expect(pipeline_tracker.batched).to eq(true)
+ expect(pipeline_tracker.batches.pluck_batch_numbers).to contain_exactly(1, 2, 3)
+ expect(described_class.jobs).to be_empty
+ end
+ end
+ end
end
end
end
diff --git a/spec/workers/bulk_imports/stuck_import_worker_spec.rb b/spec/workers/bulk_imports/stuck_import_worker_spec.rb
index eadf3864190..09fd1e1b524 100644
--- a/spec/workers/bulk_imports/stuck_import_worker_spec.rb
+++ b/spec/workers/bulk_imports/stuck_import_worker_spec.rb
@@ -5,10 +5,21 @@ require 'spec_helper'
RSpec.describe BulkImports::StuckImportWorker, feature_category: :importers do
let_it_be(:created_bulk_import) { create(:bulk_import, :created) }
let_it_be(:started_bulk_import) { create(:bulk_import, :started) }
- let_it_be(:stale_created_bulk_import) { create(:bulk_import, :created, created_at: 3.days.ago) }
- let_it_be(:stale_started_bulk_import) { create(:bulk_import, :started, created_at: 3.days.ago) }
- let_it_be(:stale_created_bulk_import_entity) { create(:bulk_import_entity, :created, created_at: 3.days.ago) }
- let_it_be(:stale_started_bulk_import_entity) { create(:bulk_import_entity, :started, created_at: 3.days.ago) }
+ let_it_be(:stale_created_bulk_import) do
+ create(:bulk_import, :created, updated_at: 3.days.ago)
+ end
+
+ let_it_be(:stale_started_bulk_import) do
+ create(:bulk_import, :started, updated_at: 3.days.ago)
+ end
+
+ let_it_be(:stale_created_bulk_import_entity) do
+ create(:bulk_import_entity, :created, updated_at: 3.days.ago)
+ end
+
+ let_it_be(:stale_started_bulk_import_entity) do
+ create(:bulk_import_entity, :started, updated_at: 3.days.ago)
+ end
let_it_be(:started_bulk_import_tracker) do
create(:bulk_import_tracker, :started, entity: stale_started_bulk_import_entity)
@@ -37,16 +48,12 @@ RSpec.describe BulkImports::StuckImportWorker, feature_category: :importers do
it 'updates the status of bulk import entities to timeout' do
expect_next_instance_of(BulkImports::Logger) do |logger|
allow(logger).to receive(:error)
- expect(logger).to receive(:error).with(
- message: 'BulkImports::Entity stale',
- bulk_import_entity_id: stale_created_bulk_import_entity.id,
- bulk_import_id: stale_created_bulk_import_entity.bulk_import_id
- )
- expect(logger).to receive(:error).with(
- message: 'BulkImports::Entity stale',
- bulk_import_entity_id: stale_started_bulk_import_entity.id,
- bulk_import_id: stale_started_bulk_import_entity.bulk_import_id
- )
+
+ expect(logger).to receive(:with_entity).with(stale_created_bulk_import_entity).and_call_original
+ expect(logger).to receive(:error).with(message: 'BulkImports::Entity stale')
+
+ expect(logger).to receive(:with_entity).with(stale_started_bulk_import_entity).and_call_original
+ expect(logger).to receive(:error).with(message: 'BulkImports::Entity stale')
end
expect { subject }.to change { stale_created_bulk_import_entity.reload.status_name }.from(:created).to(:timeout)
@@ -61,5 +68,29 @@ RSpec.describe BulkImports::StuckImportWorker, feature_category: :importers do
expect { subject }.to not_change { created_bulk_import.reload.status }
.and not_change { started_bulk_import.reload.status }
end
+
+ context 'when bulk import has been updated recently', :clean_gitlab_redis_shared_state do
+ before do
+ stale_created_bulk_import.update!(updated_at: 2.minutes.ago)
+ stale_started_bulk_import.update!(updated_at: 2.minutes.ago)
+ end
+
+ it 'does not update the status of the import' do
+ expect { subject }.to not_change { stale_created_bulk_import.reload.status_name }
+ .and not_change { stale_started_bulk_import.reload.status_name }
+ end
+ end
+
+ context 'when bulk import entity has been updated recently', :clean_gitlab_redis_shared_state do
+ before do
+ stale_created_bulk_import_entity.update!(updated_at: 2.minutes.ago)
+ stale_started_bulk_import_entity.update!(updated_at: 2.minutes.ago)
+ end
+
+ it 'does not update the status of the entity' do
+ expect { subject }.to not_change { stale_created_bulk_import_entity.reload.status_name }
+ .and not_change { stale_started_bulk_import_entity.reload.status_name }
+ end
+ end
end
end
diff --git a/spec/workers/bulk_imports/transform_references_worker_spec.rb b/spec/workers/bulk_imports/transform_references_worker_spec.rb
new file mode 100644
index 00000000000..6295ecb47d6
--- /dev/null
+++ b/spec/workers/bulk_imports/transform_references_worker_spec.rb
@@ -0,0 +1,257 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::TransformReferencesWorker, feature_category: :importers do
+ let_it_be(:project) do
+ project = create(:project)
+ project.add_owner(user)
+ project
+ end
+
+ let_it_be(:user) { create(:user) }
+ let_it_be(:bulk_import) { create(:bulk_import) }
+
+ let_it_be(:entity) do
+ create(:bulk_import_entity, :project_entity, project: project, bulk_import: bulk_import,
+ source_full_path: 'source/full/path')
+ end
+
+ let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
+ let_it_be(:config) { create(:bulk_import_configuration, bulk_import: bulk_import, url: 'https://my.gitlab.com') }
+
+ let_it_be_with_refind(:issue) do
+ create(:issue,
+ project: project,
+ description: 'https://my.gitlab.com/source/full/path/-/issues/1')
+ end
+
+ let_it_be(:merge_request) do
+ create(:merge_request,
+ source_project: project,
+ description: 'https://my.gitlab.com/source/full/path/-/merge_requests/1 @source_username? @bob, @alice!')
+ end
+
+ let_it_be(:issue_note) do
+ create(:note,
+ noteable: issue,
+ project: project,
+ note: 'https://my.gitlab.com/source/full/path/-/issues/1 @older_username, not_a@username, and @old_username.')
+ end
+
+ let_it_be(:merge_request_note) do
+ create(:note,
+ noteable: merge_request,
+ project: project,
+ note: 'https://my.gitlab.com/source/full/path/-/merge_requests/1 @same_username')
+ end
+
+ let_it_be(:system_note) do
+ create(:note,
+ project: project,
+ system: true,
+ noteable: issue,
+ note: "mentioned in merge request !#{merge_request.iid} created by @old_username",
+ note_html: 'note html'
+ )
+ end
+
+ let(:expected_url) do
+ expected_url = URI('')
+ expected_url.scheme = ::Gitlab.config.gitlab.https ? 'https' : 'http'
+ expected_url.host = ::Gitlab.config.gitlab.host
+ expected_url.port = ::Gitlab.config.gitlab.port
+ expected_url.path = "/#{project.full_path}"
+ expected_url
+ end
+
+ subject { described_class.new.perform([object.id], object.class.to_s, tracker.id) }
+
+ before do
+ allow(Gitlab::Cache::Import::Caching)
+ .to receive(:values_from_hash)
+ .and_return({
+ 'old_username' => 'new_username',
+ 'older_username' => 'newer_username',
+ 'source_username' => 'destination_username',
+ 'bob' => 'alice-gdk',
+ 'alice' => 'bob-gdk',
+ 'manuelgrabowski' => 'manuelgrabowski-admin',
+ 'manuelgrabowski-admin' => 'manuelgrabowski',
+ 'boaty-mc-boatface' => 'boatymcboatface',
+ 'boatymcboatface' => 'boaty-mc-boatface'
+ })
+ end
+
+ it_behaves_like 'an idempotent worker' do
+ let(:job_args) { [[issue.id], 'Issue', tracker.id] }
+ end
+
+ it 'transforms and saves multiple objects' do
+ old_note = merge_request_note.note
+ merge_request_note_2 = create(:note, noteable: merge_request, project: project, note: old_note)
+
+ described_class.new.perform([merge_request_note.id, merge_request_note_2.id], 'Note', tracker.id)
+
+ expect(merge_request_note.reload.note).not_to eq(old_note)
+ expect(merge_request_note_2.reload.note).not_to eq(old_note)
+ end
+
+ shared_examples 'transforms and saves references' do
+ it 'transforms references and saves the object' do
+ expect_any_instance_of(object.class) do |object|
+ expect(object).to receive(:save!)
+ end
+
+ expect { subject }.not_to change { object.updated_at }
+
+ expect(body).to eq(expected_body)
+ end
+
+ context 'when an error is raised' do
+ before do
+ allow(BulkImports::UsersMapper).to receive(:new).and_raise(StandardError)
+ end
+
+ it 'tracks the error and creates an import failure' do
+ expect(Gitlab::ErrorTracking).to receive(:track_exception)
+ .with(anything, hash_including(bulk_import_id: bulk_import.id))
+
+ expect(BulkImports::Failure).to receive(:create)
+ .with(hash_including(bulk_import_entity_id: entity.id, pipeline_class: 'ReferencesPipeline'))
+
+ subject
+ end
+ end
+ end
+
+ context 'for issue description' do
+ let(:object) { issue }
+ let(:body) { object.reload.description }
+ let(:expected_body) { "http://localhost:80/#{object.namespace.full_path}/-/issues/1" }
+
+ include_examples 'transforms and saves references'
+
+ shared_examples 'returns object unchanged' do
+ it 'returns object unchanged' do
+ issue.update!(description: description)
+
+ subject
+
+ expect(issue.reload.description).to eq(description)
+ end
+
+ it 'does not save the object' do
+ expect_any_instance_of(object.class) do |object|
+ expect(object).to receive(:save!)
+ end
+
+ subject
+ end
+ end
+
+ context 'when object does not have reference or username' do
+ let(:description) { 'foo' }
+
+ include_examples 'returns object unchanged'
+ end
+
+ context 'when there are no matched urls or usernames' do
+ let(:description) { 'https://my.gitlab.com/another/project/path/-/issues/1 @random_username' }
+
+ include_examples 'returns object unchanged'
+ end
+
+ context 'when url path does not start with source full path' do
+ let(:description) { 'https://my.gitlab.com/another/source/full/path/-/issues/1' }
+
+ include_examples 'returns object unchanged'
+ end
+
+ context 'when host does not match and url path starts with source full path' do
+ let(:description) { 'https://another.gitlab.com/source/full/path/-/issues/1' }
+
+ include_examples 'returns object unchanged'
+ end
+
+ context 'when url does not match at all' do
+ let(:description) { 'https://website.example/foo/bar' }
+
+ include_examples 'returns object unchanged'
+ end
+ end
+
+ context 'for merge request description' do
+ let(:object) { merge_request }
+ let(:body) { object.reload.description }
+ let(:expected_body) do
+ "#{expected_url}/-/merge_requests/#{merge_request.iid} @destination_username? @alice-gdk, @bob-gdk!"
+ end
+
+ include_examples 'transforms and saves references'
+ end
+
+ context 'for issue notes' do
+ let(:object) { issue_note }
+ let(:body) { object.reload.note }
+ let(:expected_body) { "#{expected_url}/-/issues/#{issue.iid} @newer_username, not_a@username, and @new_username." }
+
+ include_examples 'transforms and saves references'
+ end
+
+ context 'for merge request notes' do
+ let(:object) { merge_request_note }
+ let(:body) { object.reload.note }
+ let(:expected_body) { "#{expected_url}/-/merge_requests/#{merge_request.iid} @same_username" }
+
+ include_examples 'transforms and saves references'
+ end
+
+ context 'for system notes' do
+ let(:object) { system_note }
+ let(:body) { object.reload.note }
+ let(:expected_body) { "mentioned in merge request !#{merge_request.iid} created by @new_username" }
+
+ include_examples 'transforms and saves references'
+
+ context 'when the note includes a username' do
+ let_it_be(:object) do
+ create(:note,
+ project: project,
+ system: true,
+ noteable: issue,
+ note: 'mentioned in merge request created by @source_username.',
+ note_html: 'empty'
+ )
+ end
+
+ let(:body) { object.reload.note }
+ let(:expected_body) { 'mentioned in merge request created by @destination_username.' }
+
+ include_examples 'transforms and saves references'
+ end
+ end
+
+ context 'when old and new usernames are interchanged' do
+ # e.g
+ # |------------------------|-------------------------|
+ # | old_username | new_username |
+ # |------------------------|-------------------------|
+ # | @manuelgrabowski-admin | @manuelgrabowski |
+ # | @manuelgrabowski | @manuelgrabowski-admin |
+ # |------------------------|-------------------------|
+
+ let_it_be(:object) do
+ create(:note,
+ project: project,
+ noteable: merge_request,
+ note: '@manuelgrabowski-admin, @boaty-mc-boatface'
+ )
+ end
+
+ let(:body) { object.reload.note }
+ let(:expected_body) { '@manuelgrabowski, @boatymcboatface' }
+
+ include_examples 'transforms and saves references'
+ end
+end
diff --git a/spec/workers/ci/catalog/resources/process_sync_events_worker_spec.rb b/spec/workers/ci/catalog/resources/process_sync_events_worker_spec.rb
new file mode 100644
index 00000000000..036cc54e9ba
--- /dev/null
+++ b/spec/workers/ci/catalog/resources/process_sync_events_worker_spec.rb
@@ -0,0 +1,52 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::Catalog::Resources::ProcessSyncEventsWorker, feature_category: :pipeline_composition do
+ subject(:worker) { described_class.new }
+
+ include_examples 'an idempotent worker'
+
+ it 'has the `until_executed` deduplicate strategy' do
+ expect(described_class.get_deduplicate_strategy).to eq(:until_executed)
+ end
+
+ it 'has the option to reschedule once if deduplicated and a TTL of 1 minute' do
+ expect(described_class.get_deduplication_options).to include({ if_deduplicated: :reschedule_once, ttl: 1.minute })
+ end
+
+ describe '#perform' do
+ let_it_be(:project) { create(:project, name: 'Old Name') }
+ let_it_be(:resource) { create(:ci_catalog_resource, project: project) }
+
+ before_all do
+ create(:ci_catalog_resource_sync_event, catalog_resource: resource, status: :processed)
+ create_list(:ci_catalog_resource_sync_event, 2, catalog_resource: resource)
+ # PG trigger adds an event for this update
+ project.update!(name: 'New Name', description: 'Test', visibility_level: Gitlab::VisibilityLevel::INTERNAL)
+ end
+
+ subject(:perform) { worker.perform }
+
+ it 'consumes all sync events' do
+ expect { perform }.to change { Ci::Catalog::Resources::SyncEvent.status_pending.count }
+ .from(3).to(0)
+ end
+
+ it 'syncs the denormalized columns of catalog resource with the project' do
+ perform
+
+ expect(resource.reload.name).to eq(project.name)
+ expect(resource.reload.description).to eq(project.description)
+ expect(resource.reload.visibility_level).to eq(project.visibility_level)
+ end
+
+ it 'logs the service result', :aggregate_failures do
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:estimated_total_events, 3)
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:consumable_events, 3)
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:processed_events, 3)
+
+ perform
+ end
+ end
+end
diff --git a/spec/workers/ci/low_urgency_cancel_redundant_pipelines_worker_spec.rb b/spec/workers/ci/low_urgency_cancel_redundant_pipelines_worker_spec.rb
new file mode 100644
index 00000000000..da09a28b384
--- /dev/null
+++ b/spec/workers/ci/low_urgency_cancel_redundant_pipelines_worker_spec.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::LowUrgencyCancelRedundantPipelinesWorker, feature_category: :continuous_integration do
+ it 'is labeled as low urgency' do
+ expect(described_class.get_urgency).to eq(:low)
+ end
+end
diff --git a/spec/workers/ci/pipeline_artifacts/coverage_report_worker_spec.rb b/spec/workers/ci/pipeline_artifacts/coverage_report_worker_spec.rb
index b594f661a9a..a7624fdfe01 100644
--- a/spec/workers/ci/pipeline_artifacts/coverage_report_worker_spec.rb
+++ b/spec/workers/ci/pipeline_artifacts/coverage_report_worker_spec.rb
@@ -3,6 +3,10 @@
require 'spec_helper'
RSpec.describe Ci::PipelineArtifacts::CoverageReportWorker, feature_category: :code_testing do
+ it 'has the `until_executed` deduplicate strategy' do
+ expect(described_class.get_deduplicate_strategy).to eq(:until_executed)
+ end
+
describe '#perform' do
let(:pipeline_id) { pipeline.id }
diff --git a/spec/workers/ci/resource_groups/assign_resource_from_resource_group_worker_spec.rb b/spec/workers/ci/resource_groups/assign_resource_from_resource_group_worker_spec.rb
index e3e7047db56..e61d2e5450a 100644
--- a/spec/workers/ci/resource_groups/assign_resource_from_resource_group_worker_spec.rb
+++ b/spec/workers/ci/resource_groups/assign_resource_from_resource_group_worker_spec.rb
@@ -19,13 +19,13 @@ RSpec.describe Ci::ResourceGroups::AssignResourceFromResourceGroupWorker, featur
let(:resource_group) { create(:ci_resource_group) }
let(:resource_group_id) { resource_group.id }
- include_examples 'an idempotent worker' do
+ it_behaves_like 'an idempotent worker' do
let(:job_args) { [resource_group_id] }
end
context 'when resource group exists' do
it 'executes AssignResourceFromResourceGroupService' do
- expect_next_instances_of(Ci::ResourceGroups::AssignResourceFromResourceGroupService, 2, false, resource_group.project, nil) do |service|
+ expect_next_instance_of(Ci::ResourceGroups::AssignResourceFromResourceGroupService, resource_group.project, nil) do |service|
expect(service).to receive(:execute).with(resource_group)
end
diff --git a/spec/workers/ci/runners/process_runner_version_update_worker_spec.rb b/spec/workers/ci/runners/process_runner_version_update_worker_spec.rb
index 30b451f2112..64e2e8cd037 100644
--- a/spec/workers/ci/runners/process_runner_version_update_worker_spec.rb
+++ b/spec/workers/ci/runners/process_runner_version_update_worker_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Ci::Runners::ProcessRunnerVersionUpdateWorker, feature_category: :runner_fleet do
+RSpec.describe Ci::Runners::ProcessRunnerVersionUpdateWorker, feature_category: :fleet_visibility do
subject(:worker) { described_class.new }
describe '#perform' do
diff --git a/spec/workers/ci/runners/reconcile_existing_runner_versions_cron_worker_spec.rb b/spec/workers/ci/runners/reconcile_existing_runner_versions_cron_worker_spec.rb
index 34b1cb33e6b..7157a3e7beb 100644
--- a/spec/workers/ci/runners/reconcile_existing_runner_versions_cron_worker_spec.rb
+++ b/spec/workers/ci/runners/reconcile_existing_runner_versions_cron_worker_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Ci::Runners::ReconcileExistingRunnerVersionsCronWorker, feature_category: :runner_fleet do
+RSpec.describe Ci::Runners::ReconcileExistingRunnerVersionsCronWorker, feature_category: :fleet_visibility do
subject(:worker) { described_class.new }
describe '#perform' do
diff --git a/spec/workers/ci/runners/stale_machines_cleanup_cron_worker_spec.rb b/spec/workers/ci/runners/stale_machines_cleanup_cron_worker_spec.rb
index 79d1fadfd2b..4c5ea621191 100644
--- a/spec/workers/ci/runners/stale_machines_cleanup_cron_worker_spec.rb
+++ b/spec/workers/ci/runners/stale_machines_cleanup_cron_worker_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Ci::Runners::StaleMachinesCleanupCronWorker, feature_category: :runner_fleet do
+RSpec.describe Ci::Runners::StaleMachinesCleanupCronWorker, feature_category: :fleet_visibility do
let(:worker) { described_class.new }
describe '#perform', :freeze_time do
diff --git a/spec/workers/click_house/events_sync_worker_spec.rb b/spec/workers/click_house/events_sync_worker_spec.rb
index 01267db36a7..9662f26115a 100644
--- a/spec/workers/click_house/events_sync_worker_spec.rb
+++ b/spec/workers/click_house/events_sync_worker_spec.rb
@@ -5,6 +5,12 @@ require 'spec_helper'
RSpec.describe ClickHouse::EventsSyncWorker, feature_category: :value_stream_management do
let(:worker) { described_class.new }
+ specify do
+ expect(worker.class.click_house_worker_attrs).to match(
+ a_hash_including(migration_lock_ttl: ClickHouse::MigrationSupport::ExclusiveLock::DEFAULT_CLICKHOUSE_WORKER_TTL)
+ )
+ end
+
it_behaves_like 'an idempotent worker' do
context 'when the event_sync_worker_for_click_house feature flag is on', :click_house do
before do
@@ -63,11 +69,32 @@ RSpec.describe ClickHouse::EventsSyncWorker, feature_category: :value_stream_man
end
it 'inserts all records' do
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:result,
+ { status: :processed, records_inserted: 4, reached_end_of_table: true })
+
worker.perform
events = ClickHouse::Client.select('SELECT * FROM events', :main)
expect(events.size).to eq(4)
end
+
+ context 'when new records are inserted while processing' do
+ it 'does not process new records created during the iteration' do
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:result,
+ { status: :processed, records_inserted: 4,
+ reached_end_of_table: true })
+
+ # Simulating the case when there is an insert during the iteration
+ call_count = 0
+ allow(worker).to receive(:next_batch).and_wrap_original do |method|
+ call_count += 1
+ create(:event) if call_count == 3
+ method.call
+ end
+
+ worker.perform
+ end
+ end
end
context 'when time limit is reached' do
@@ -96,6 +123,9 @@ RSpec.describe ClickHouse::EventsSyncWorker, feature_category: :value_stream_man
end
it 'syncs records after the cursor' do
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:result,
+ { status: :processed, records_inserted: 3, reached_end_of_table: true })
+
worker.perform
events = ClickHouse::Client.select('SELECT id FROM events ORDER BY id', :main)
@@ -121,7 +151,7 @@ RSpec.describe ClickHouse::EventsSyncWorker, feature_category: :value_stream_man
context 'when clickhouse is not configured' do
before do
- allow(ClickHouse::Client.configuration).to receive(:databases).and_return({})
+ allow(ClickHouse::Client).to receive(:database_configured?).and_return(false)
end
it 'skips execution' do
@@ -135,7 +165,7 @@ RSpec.describe ClickHouse::EventsSyncWorker, feature_category: :value_stream_man
context 'when exclusive lease error happens' do
it 'skips execution' do
stub_feature_flags(event_sync_worker_for_click_house: true)
- allow(ClickHouse::Client.configuration).to receive(:databases).and_return({ main: :some_db })
+ allow(ClickHouse::Client).to receive(:database_configured?).with(:main).and_return(true)
expect(worker).to receive(:in_lock).and_raise(Gitlab::ExclusiveLeaseHelpers::FailedToObtainLockError)
expect(worker).to receive(:log_extra_metadata_on_done).with(:result, { status: :skipped })
diff --git a/spec/workers/concerns/click_house_worker_spec.rb b/spec/workers/concerns/click_house_worker_spec.rb
new file mode 100644
index 00000000000..cb8bf9c7578
--- /dev/null
+++ b/spec/workers/concerns/click_house_worker_spec.rb
@@ -0,0 +1,88 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ClickHouseWorker, feature_category: :database do
+ let(:worker) do
+ Class.new do
+ def self.name
+ 'DummyWorker'
+ end
+
+ include ApplicationWorker
+ include ClickHouseWorker
+
+ def perform
+ AnotherWorker.perform_async('identifier')
+ end
+ end
+ end
+
+ let(:another_worker) do
+ Class.new do
+ def self.name
+ 'AnotherWorker'
+ end
+
+ include ApplicationWorker
+ end
+ end
+
+ before do
+ stub_const('DummyWorker', worker)
+ stub_const('AnotherWorker', another_worker)
+ end
+
+ describe '.register_click_house_worker?' do
+ subject(:register_click_house_worker?) { worker.register_click_house_worker? }
+
+ context 'when click_house_migration_lock is set' do
+ before do
+ worker.click_house_migration_lock(1.minute)
+ end
+
+ it { is_expected.to be(true) }
+ end
+
+ context 'when click_house_migration_lock is not set' do
+ it { is_expected.to be(true) }
+ end
+
+ context 'when worker does not include module' do
+ it { expect(another_worker).not_to respond_to(:register_click_house_worker?) }
+ end
+ end
+
+ describe '.click_house_worker_attrs' do
+ subject(:click_house_worker_attrs) { worker.click_house_migration_lock(ttl) }
+
+ let(:ttl) { 1.minute }
+
+ it { expect { click_house_worker_attrs }.not_to raise_error }
+ it { is_expected.to match(a_hash_including(migration_lock_ttl: 60.seconds)) }
+
+ context 'with invalid ttl' do
+ let(:ttl) { {} }
+
+ it 'raises exception' do
+ expect { click_house_worker_attrs }.to raise_error(ArgumentError)
+ end
+ end
+ end
+
+ it 'registers ClickHouse worker' do
+ expect(worker.register_click_house_worker?).to be_truthy
+ expect(another_worker).not_to respond_to(:register_click_house_worker?)
+ end
+
+ it 'sets default TTL for worker registration' do
+ expect(worker.click_house_worker_attrs).to match(
+ a_hash_including(migration_lock_ttl: ClickHouse::MigrationSupport::ExclusiveLock::DEFAULT_CLICKHOUSE_WORKER_TTL)
+ )
+ end
+
+ it 'registers worker to pause on ClickHouse migrations' do
+ expect(worker.get_pause_control).to eq(:click_house_migration)
+ expect(another_worker.get_pause_control).to be_nil
+ end
+end
diff --git a/spec/workers/concerns/gitlab/github_import/object_importer_spec.rb b/spec/workers/concerns/gitlab/github_import/object_importer_spec.rb
index b2bc502d156..bba855f5095 100644
--- a/spec/workers/concerns/gitlab/github_import/object_importer_spec.rb
+++ b/spec/workers/concerns/gitlab/github_import/object_importer_spec.rb
@@ -67,10 +67,8 @@ RSpec.describe Gitlab::GithubImport::ObjectImporter, :aggregate_failures, featur
describe '#import', :clean_gitlab_redis_cache do
before do
- expect(worker)
- .to receive(:importer_class)
- .at_least(:once)
- .and_return(importer_class)
+ allow(Gitlab::Redis::SharedState).to receive(:with).and_return('OK')
+ expect(worker).to receive(:importer_class).at_least(:once).and_return(importer_class)
end
it 'imports the object' do
@@ -203,25 +201,22 @@ RSpec.describe Gitlab::GithubImport::ObjectImporter, :aggregate_failures, featur
expect(project.import_failures.last.exception_message).to eq('some error')
end
- context 'without github_identifiers defined' do
+ context 'when a NoMethod error is raised' do
let(:stubbed_representation) { representation_class.instance_eval { undef_method :github_identifiers } }
- it 'logs error when representation does not have a github_id' do
- expect(importer_class).not_to receive(:new)
-
+ it 'logs the error but does not re-raise it, so the worker does not retry' do
expect(Gitlab::Import::ImportFailureService)
.to receive(:track)
.with(
project_id: project.id,
exception: a_kind_of(NoMethodError),
error_source: 'klass_name',
- fail_import: true,
+ fail_import: false,
external_identifiers: { object_type: 'dummy' }
)
.and_call_original
- expect { worker.import(project, client, { 'number' => 10 }) }
- .to raise_error(NoMethodError, /^undefined method `github_identifiers/)
+ worker.import(project, client, { 'number' => 10 })
end
end
@@ -239,7 +234,7 @@ RSpec.describe Gitlab::GithubImport::ObjectImporter, :aggregate_failures, featur
.and_raise(exception)
end
- it 'logs an error' do
+ it 'logs the error but does not re-raise it, so the worker does not retry' do
expect(Gitlab::GithubImport::Logger)
.to receive(:info)
.with(
diff --git a/spec/workers/concerns/gitlab/github_import/stage_methods_spec.rb b/spec/workers/concerns/gitlab/github_import/stage_methods_spec.rb
index fa782967441..37e686f9f92 100644
--- a/spec/workers/concerns/gitlab/github_import/stage_methods_spec.rb
+++ b/spec/workers/concerns/gitlab/github_import/stage_methods_spec.rb
@@ -15,6 +15,10 @@ RSpec.describe Gitlab::GithubImport::StageMethods, feature_category: :importers
end.new
end
+ it 'has a Sidekiq retry of 6' do
+ expect(worker.class.sidekiq_options['retry']).to eq(6)
+ end
+
describe '#perform' do
it 'returns if no project could be found' do
expect(worker).not_to receive(:try_import)
@@ -138,6 +142,10 @@ RSpec.describe Gitlab::GithubImport::StageMethods, feature_category: :importers
end
describe '#try_import' do
+ before do
+ allow(worker).to receive(:jid).and_return('jid')
+ end
+
it 'imports the project' do
client = double(:client)
@@ -145,7 +153,7 @@ RSpec.describe Gitlab::GithubImport::StageMethods, feature_category: :importers
.to receive(:import)
.with(client, project)
- expect(project.import_state).to receive(:refresh_jid_expiration)
+ expect(Gitlab::GithubImport::RefreshImportJidWorker).to receive(:perform_in_the_future).with(project.id, 'jid')
worker.try_import(client, project)
end
@@ -153,7 +161,7 @@ RSpec.describe Gitlab::GithubImport::StageMethods, feature_category: :importers
it 'reschedules the worker if RateLimitError was raised' do
client = double(:client, rate_limit_resets_in: 10)
- expect(project.import_state).to receive(:refresh_jid_expiration)
+ expect(Gitlab::GithubImport::RefreshImportJidWorker).to receive(:perform_in_the_future).with(project.id, 'jid')
expect(worker)
.to receive(:import)
@@ -186,7 +194,7 @@ RSpec.describe Gitlab::GithubImport::StageMethods, feature_category: :importers
end
end
- describe '.resumes_work_when_interrupted!' do
+ describe '.sidekiq_options!' do
subject(:sidekiq_options) { worker.class.sidekiq_options }
it 'does not set the `max_retries_after_interruption` if not called' do
@@ -199,16 +207,8 @@ RSpec.describe Gitlab::GithubImport::StageMethods, feature_category: :importers
is_expected.to include('max_retries_after_interruption' => 20)
end
- context 'when the flag is disabled' do
- before do
- stub_feature_flags(github_importer_raise_max_interruptions: false)
- end
-
- it 'does not set `max_retries_after_interruption`' do
- worker.class.resumes_work_when_interrupted!
-
- is_expected.not_to have_key('max_retries_after_interruption')
- end
+ it 'sets the status_expiration' do
+ is_expected.to include('status_expiration' => Gitlab::Import::StuckImportJob::IMPORT_JOBS_EXPIRATION)
end
end
end
diff --git a/spec/workers/concerns/worker_attributes_spec.rb b/spec/workers/concerns/worker_attributes_spec.rb
index 767a55162fb..1c9d9a5a1ad 100644
--- a/spec/workers/concerns/worker_attributes_spec.rb
+++ b/spec/workers/concerns/worker_attributes_spec.rb
@@ -75,7 +75,7 @@ RSpec.describe WorkerAttributes, feature_category: :shared do
describe '.data_consistency' do
context 'with invalid data_consistency' do
- it 'raise exception' do
+ it 'raises exception' do
expect { worker.data_consistency(:invalid) }
.to raise_error('Invalid data consistency: invalid')
end
diff --git a/spec/workers/delete_user_worker_spec.rb b/spec/workers/delete_user_worker_spec.rb
index 8a99f69c079..13a7390f592 100644
--- a/spec/workers/delete_user_worker_spec.rb
+++ b/spec/workers/delete_user_worker_spec.rb
@@ -30,28 +30,91 @@ RSpec.describe DeleteUserWorker, feature_category: :user_management do
end
end
- context 'when user is banned' do
+ context 'when user deleted their own account' do
subject(:perform) { described_class.new.perform(current_user.id, user.id) }
before do
- user.ban
+ # user is blocked as part of User#delete_async
+ user.block
+ # custom attribute is created as part of User#delete_async
+ UserCustomAttribute.set_deleted_own_account_at(user)
end
- it_behaves_like 'does nothing'
+ shared_examples 'proceeds with deletion' do
+ it "proceeds with deletion" do
+ expect_next_instance_of(Users::DestroyService) do |service|
+ expect(service).to receive(:execute).with(user, {})
+ end
+
+ perform
+ end
+ end
+
+ it_behaves_like 'proceeds with deletion'
context 'when delay_delete_own_user feature flag is disabled' do
before do
stub_feature_flags(delay_delete_own_user: false)
end
- it "proceeds with deletion" do
- expect_next_instance_of(Users::DestroyService) do |service|
- expect(service).to receive(:execute).with(user, {})
- end
+ it_behaves_like 'proceeds with deletion'
+ end
+
+ shared_examples 'logs' do |reason|
+ it 'logs' do
+ expect(Gitlab::AppLogger).to receive(:info).with({
+ message: 'Skipped own account deletion.',
+ reason: reason,
+ user_id: user.id,
+ username: user.username
+ })
perform
end
end
+
+ shared_examples 'updates the user\'s custom attributes' do
+ it 'destroys the user\'s DELETED_OWN_ACCOUNT_AT custom attribute' do
+ key = UserCustomAttribute::DELETED_OWN_ACCOUNT_AT
+ expect { perform }.to change { user.custom_attributes.by_key(key).count }.from(1).to(0)
+ end
+
+ context 'when custom attribute is not present' do
+ before do
+ UserCustomAttribute.delete_all
+ end
+
+ it 'does nothing' do
+ expect { perform }.not_to raise_error
+ end
+ end
+
+ it 'creates a SKIPPED_ACCOUNT_DELETION_AT custom attribute for the user' do
+ key = UserCustomAttribute::SKIPPED_ACCOUNT_DELETION_AT
+ expect { perform }.to change { user.custom_attributes.by_key(key).count }.from(0).to(1)
+ end
+ end
+
+ context 'when user is banned' do
+ before do
+ user.activate
+ user.ban
+ end
+
+ it_behaves_like 'does nothing'
+ it_behaves_like 'logs', 'User has been banned.'
+ it_behaves_like 'updates the user\'s custom attributes'
+ end
+
+ context 'when user is not blocked (e.g. result of user reinstatement request)' do
+ before do
+ user.activate
+ end
+
+ it_behaves_like 'does nothing'
+ it_behaves_like 'logs', 'User has been unblocked.'
+ it_behaves_like 'updates the user\'s custom attributes'
+ end
end
context 'when user to delete does not exist' do
diff --git a/spec/workers/every_sidekiq_worker_spec.rb b/spec/workers/every_sidekiq_worker_spec.rb
index 4c2cff434a7..c60e8d37c2e 100644
--- a/spec/workers/every_sidekiq_worker_spec.rb
+++ b/spec/workers/every_sidekiq_worker_spec.rb
@@ -21,7 +21,7 @@ RSpec.describe 'Every Sidekiq worker', feature_category: :shared do
file_worker_queues = Gitlab::SidekiqConfig.worker_queues.to_set
worker_queues = Gitlab::SidekiqConfig.workers.map(&:generated_queue_name).to_set
- worker_queues << ActionMailer::MailDeliveryJob.new.queue_name
+ worker_queues << ActionMailer::MailDeliveryJob.new('Notify').queue_name
worker_queues << 'default'
missing_from_file = worker_queues - file_worker_queues
@@ -120,6 +120,7 @@ RSpec.describe 'Every Sidekiq worker', feature_category: :shared do
{
'AdjournedProjectDeletionWorker' => 3,
'AdminEmailsWorker' => 3,
+ 'Ai::SyncServiceTokenWorker' => 3,
'Analytics::CodeReviewMetricsWorker' => 3,
'Analytics::DevopsAdoption::CreateSnapshotWorker' => 3,
'Analytics::UsageTrends::CounterJobWorker' => 3,
@@ -140,9 +141,10 @@ RSpec.describe 'Every Sidekiq worker', feature_category: :shared do
'BulkImportWorker' => 3,
'BulkImports::ExportRequestWorker' => 5,
'BulkImports::EntityWorker' => 3,
- 'BulkImports::PipelineWorker' => 3,
- 'BulkImports::PipelineBatchWorker' => 3,
- 'BulkImports::FinishProjectImportWorker' => 5,
+ 'BulkImports::PipelineWorker' => 6,
+ 'BulkImports::PipelineBatchWorker' => 6,
+ 'BulkImports::FinishProjectImportWorker' => 3,
+ 'BulkImports::TransformReferencesWorker' => 3,
'Chaos::CpuSpinWorker' => 3,
'Chaos::DbSpinWorker' => 3,
'Chaos::KillWorker' => false,
@@ -233,30 +235,19 @@ RSpec.describe 'Every Sidekiq worker', feature_category: :shared do
'ExternalServiceReactiveCachingWorker' => 3,
'FileHookWorker' => false,
'FlushCounterIncrementsWorker' => 3,
- 'Geo::Batch::ProjectRegistrySchedulerWorker' => 3,
- 'Geo::Batch::ProjectRegistryWorker' => 3,
'Geo::ContainerRepositorySyncWorker' => 1,
'Geo::DestroyWorker' => 3,
'Geo::EventWorker' => 3,
'Geo::FileRemovalWorker' => 3,
- 'Geo::ProjectSyncWorker' => 1,
- 'Geo::RenameRepositoryWorker' => 3,
- 'Geo::RepositoryCleanupWorker' => 3,
- 'Geo::RepositoryShardSyncWorker' => false,
- 'Geo::RepositoryVerification::Primary::ShardWorker' => false,
- 'Geo::RepositoryVerification::Primary::SingleWorker' => false,
- 'Geo::RepositoryVerification::Secondary::SingleWorker' => false,
'Geo::ReverificationBatchWorker' => 0,
'Geo::BulkMarkPendingBatchWorker' => 0,
'Geo::BulkMarkVerificationPendingBatchWorker' => 0,
- 'Geo::Scheduler::Primary::SchedulerWorker' => false,
'Geo::Scheduler::SchedulerWorker' => false,
'Geo::Scheduler::Secondary::SchedulerWorker' => false,
'Geo::VerificationBatchWorker' => 0,
'Geo::VerificationStateBackfillWorker' => false,
'Geo::VerificationTimeoutWorker' => false,
'Geo::VerificationWorker' => 3,
- 'GeoRepositoryDestroyWorker' => 3,
'Gitlab::BitbucketImport::AdvanceStageWorker' => 3,
'Gitlab::BitbucketImport::Stage::FinishImportWorker' => 3,
'Gitlab::BitbucketImport::Stage::ImportIssuesWorker' => 3,
@@ -271,7 +262,8 @@ RSpec.describe 'Every Sidekiq worker', feature_category: :shared do
'Gitlab::BitbucketServerImport::Stage::ImportNotesWorker' => 3,
'Gitlab::BitbucketServerImport::Stage::ImportPullRequestsWorker' => 3,
'Gitlab::BitbucketServerImport::Stage::ImportRepositoryWorker' => 3,
- 'Gitlab::GithubImport::AdvanceStageWorker' => 3,
+ 'Gitlab::BitbucketServerImport::Stage::ImportUsersWorker' => 3,
+ 'Gitlab::GithubImport::AdvanceStageWorker' => 6,
'Gitlab::GithubImport::Attachments::ImportReleaseWorker' => 5,
'Gitlab::GithubImport::Attachments::ImportNoteWorker' => 5,
'Gitlab::GithubImport::Attachments::ImportIssueWorker' => 5,
@@ -288,20 +280,20 @@ RSpec.describe 'Every Sidekiq worker', feature_category: :shared do
'Gitlab::GithubImport::PullRequests::ImportMergedByWorker' => 5,
'Gitlab::GithubImport::ImportPullRequestWorker' => 5,
'Gitlab::GithubImport::RefreshImportJidWorker' => 5,
- 'Gitlab::GithubImport::Stage::FinishImportWorker' => 5,
- 'Gitlab::GithubImport::Stage::ImportBaseDataWorker' => 5,
- 'Gitlab::GithubImport::Stage::ImportIssuesAndDiffNotesWorker' => 5,
- 'Gitlab::GithubImport::Stage::ImportIssueEventsWorker' => 5,
- 'Gitlab::GithubImport::Stage::ImportLfsObjectsWorker' => 5,
- 'Gitlab::GithubImport::Stage::ImportAttachmentsWorker' => 5,
- 'Gitlab::GithubImport::Stage::ImportProtectedBranchesWorker' => 5,
- 'Gitlab::GithubImport::Stage::ImportNotesWorker' => 5,
- 'Gitlab::GithubImport::Stage::ImportCollaboratorsWorker' => 5,
- 'Gitlab::GithubImport::Stage::ImportPullRequestsMergedByWorker' => 5,
- 'Gitlab::GithubImport::Stage::ImportPullRequestsReviewRequestsWorker' => 5,
- 'Gitlab::GithubImport::Stage::ImportPullRequestsReviewsWorker' => 5,
- 'Gitlab::GithubImport::Stage::ImportPullRequestsWorker' => 5,
- 'Gitlab::GithubImport::Stage::ImportRepositoryWorker' => 5,
+ 'Gitlab::GithubImport::Stage::FinishImportWorker' => 6,
+ 'Gitlab::GithubImport::Stage::ImportBaseDataWorker' => 6,
+ 'Gitlab::GithubImport::Stage::ImportIssuesAndDiffNotesWorker' => 6,
+ 'Gitlab::GithubImport::Stage::ImportIssueEventsWorker' => 6,
+ 'Gitlab::GithubImport::Stage::ImportLfsObjectsWorker' => 6,
+ 'Gitlab::GithubImport::Stage::ImportAttachmentsWorker' => 6,
+ 'Gitlab::GithubImport::Stage::ImportProtectedBranchesWorker' => 6,
+ 'Gitlab::GithubImport::Stage::ImportNotesWorker' => 6,
+ 'Gitlab::GithubImport::Stage::ImportCollaboratorsWorker' => 6,
+ 'Gitlab::GithubImport::Stage::ImportPullRequestsMergedByWorker' => 6,
+ 'Gitlab::GithubImport::Stage::ImportPullRequestsReviewRequestsWorker' => 6,
+ 'Gitlab::GithubImport::Stage::ImportPullRequestsReviewsWorker' => 6,
+ 'Gitlab::GithubImport::Stage::ImportPullRequestsWorker' => 6,
+ 'Gitlab::GithubImport::Stage::ImportRepositoryWorker' => 6,
'Gitlab::GithubGistsImport::ImportGistWorker' => 5,
'Gitlab::GithubGistsImport::StartImportWorker' => 5,
'Gitlab::GithubGistsImport::FinishImportWorker' => 5,
@@ -391,6 +383,7 @@ RSpec.describe 'Every Sidekiq worker', feature_category: :shared do
'Packages::MarkPackageFilesForDestructionWorker' => 3,
'Packages::Maven::Metadata::SyncWorker' => 3,
'Packages::Npm::CleanupStaleMetadataCacheWorker' => 0,
+ 'Packages::Nuget::CleanupStaleSymbolsWorker' => 0,
'Packages::Nuget::ExtractionWorker' => 3,
'Packages::Rubygems::ExtractionWorker' => 3,
'PagesDomainSslRenewalWorker' => 3,
@@ -482,8 +475,9 @@ RSpec.describe 'Every Sidekiq worker', feature_category: :shared do
'ComplianceManagement::MergeRequests::ComplianceViolationsWorker' => 3,
'Zoekt::IndexerWorker' => 2,
'Issuable::RelatedLinksCreateWorker' => 3,
- 'BulkImports::RelationBatchExportWorker' => 3,
- 'BulkImports::RelationExportWorker' => 3
+ 'BulkImports::RelationBatchExportWorker' => 6,
+ 'BulkImports::RelationExportWorker' => 6,
+ 'Ci::Runners::ExportUsageCsvWorker' => 3
}.merge(extra_retry_exceptions)
end
diff --git a/spec/workers/gitlab/bitbucket_import/advance_stage_worker_spec.rb b/spec/workers/gitlab/bitbucket_import/advance_stage_worker_spec.rb
index 673988a3275..6fcf1ac8822 100644
--- a/spec/workers/gitlab/bitbucket_import/advance_stage_worker_spec.rb
+++ b/spec/workers/gitlab/bitbucket_import/advance_stage_worker_spec.rb
@@ -53,7 +53,7 @@ RSpec.describe Gitlab::BitbucketImport::AdvanceStageWorker, :clean_gitlab_redis_
it 'schedules the next stage' do
expect(import_state)
- .to receive(:refresh_jid_expiration)
+ .to receive(:refresh_jid_expiration).twice
expect(Gitlab::BitbucketImport::Stage::FinishImportWorker)
.to receive(:perform_async)
diff --git a/spec/workers/gitlab/bitbucket_server_import/stage/import_repository_worker_spec.rb b/spec/workers/gitlab/bitbucket_server_import/stage/import_repository_worker_spec.rb
index 7ea23041e79..1531b30089c 100644
--- a/spec/workers/gitlab/bitbucket_server_import/stage/import_repository_worker_spec.rb
+++ b/spec/workers/gitlab/bitbucket_server_import/stage/import_repository_worker_spec.rb
@@ -18,12 +18,25 @@ RSpec.describe Gitlab::BitbucketServerImport::Stage::ImportRepositoryWorker, fea
end
it 'schedules the next stage' do
- expect(Gitlab::BitbucketServerImport::Stage::ImportPullRequestsWorker).to receive(:perform_async)
+ expect(Gitlab::BitbucketServerImport::Stage::ImportUsersWorker).to receive(:perform_async)
.with(project.id)
worker.perform(project.id)
end
+ context 'when the bitbucket_server_convert_mentions_to_users flag is disabled' do
+ before do
+ stub_feature_flags(bitbucket_server_convert_mentions_to_users: false)
+ end
+
+ it 'skips the user import and schedules the next stage' do
+ expect(Gitlab::BitbucketServerImport::Stage::ImportPullRequestsWorker).to receive(:perform_async)
+ .with(project.id)
+
+ worker.perform(project.id)
+ end
+ end
+
it 'logs stage start and finish' do
expect(Gitlab::BitbucketServerImport::Logger)
.to receive(:info).with(hash_including(message: 'starting stage', project_id: project.id))
diff --git a/spec/workers/gitlab/bitbucket_server_import/stage/import_users_worker_spec.rb b/spec/workers/gitlab/bitbucket_server_import/stage/import_users_worker_spec.rb
new file mode 100644
index 00000000000..d4cd1b82349
--- /dev/null
+++ b/spec/workers/gitlab/bitbucket_server_import/stage/import_users_worker_spec.rb
@@ -0,0 +1,77 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BitbucketServerImport::Stage::ImportUsersWorker, feature_category: :importers do
+ let_it_be(:project) { create(:project, :import_started) }
+
+ let(:worker) { described_class.new }
+
+ it_behaves_like Gitlab::BitbucketServerImport::StageMethods
+
+ describe '#perform' do
+ context 'when the import succeeds' do
+ before do
+ allow_next_instance_of(Gitlab::BitbucketServerImport::Importers::UsersImporter) do |importer|
+ allow(importer).to receive(:execute)
+ end
+ end
+
+ it 'schedules the next stage' do
+ expect(Gitlab::BitbucketServerImport::Stage::ImportPullRequestsWorker).to receive(:perform_async)
+ .with(project.id)
+
+ worker.perform(project.id)
+ end
+
+ it 'logs stage start and finish' do
+ expect(Gitlab::BitbucketServerImport::Logger)
+ .to receive(:info).with(hash_including(message: 'starting stage', project_id: project.id))
+ expect(Gitlab::BitbucketServerImport::Logger)
+ .to receive(:info).with(hash_including(message: 'stage finished', project_id: project.id))
+
+ worker.perform(project.id)
+ end
+ end
+
+ context 'when project does not exists' do
+ it 'does not call importer' do
+ expect(Gitlab::BitbucketServerImport::Importers::UsersImporter).not_to receive(:new)
+
+ worker.perform(-1)
+ end
+ end
+
+ context 'when project import state is not `started`' do
+ it 'does not call importer' do
+ project = create(:project, :import_canceled)
+
+ expect(Gitlab::BitbucketServerImport::Importers::UsersImporter).not_to receive(:new)
+
+ worker.perform(project.id)
+ end
+ end
+
+ context 'when the importer fails' do
+ it 'does not schedule the next stage and raises error' do
+ exception = StandardError.new('Error')
+
+ allow_next_instance_of(Gitlab::BitbucketServerImport::Importers::UsersImporter) do |importer|
+ allow(importer).to receive(:execute).and_raise(exception)
+ end
+
+ expect(Gitlab::Import::ImportFailureService)
+ .to receive(:track).with(
+ project_id: project.id,
+ exception: exception,
+ error_source: described_class.name,
+ fail_import: false
+ ).and_call_original
+
+ expect { worker.perform(project.id) }
+ .to change { Gitlab::BitbucketServerImport::Stage::ImportUsersWorker.jobs.size }.by(0)
+ .and raise_error(exception)
+ end
+ end
+ end
+end
diff --git a/spec/workers/gitlab/github_import/advance_stage_worker_spec.rb b/spec/workers/gitlab/github_import/advance_stage_worker_spec.rb
index 60c117a2a90..dcf016c550b 100644
--- a/spec/workers/gitlab/github_import/advance_stage_worker_spec.rb
+++ b/spec/workers/gitlab/github_import/advance_stage_worker_spec.rb
@@ -4,4 +4,8 @@ require 'spec_helper'
RSpec.describe Gitlab::GithubImport::AdvanceStageWorker, feature_category: :importers do
it_behaves_like Gitlab::Import::AdvanceStage, factory: :import_state
+
+ it 'has a Sidekiq retry of 6' do
+ expect(described_class.sidekiq_options['retry']).to eq(6)
+ end
end
diff --git a/spec/workers/gitlab/github_import/refresh_import_jid_worker_spec.rb b/spec/workers/gitlab/github_import/refresh_import_jid_worker_spec.rb
index abba6cd7734..5d0cb05c8d5 100644
--- a/spec/workers/gitlab/github_import/refresh_import_jid_worker_spec.rb
+++ b/spec/workers/gitlab/github_import/refresh_import_jid_worker_spec.rb
@@ -9,7 +9,7 @@ RSpec.describe Gitlab::GithubImport::RefreshImportJidWorker, feature_category: :
it 'schedules a job in the future' do
expect(described_class)
.to receive(:perform_in)
- .with(1.minute.to_i, 10, '123')
+ .with(5.minutes.to_i, 10, '123')
described_class.perform_in_the_future(10, '123')
end
@@ -33,15 +33,20 @@ RSpec.describe Gitlab::GithubImport::RefreshImportJidWorker, feature_category: :
allow(worker)
.to receive(:find_import_state)
.with(project.id)
- .and_return(project)
+ .and_return(import_state)
expect(Gitlab::SidekiqStatus)
.to receive(:running?)
.with('123')
.and_return(true)
- expect(project)
- .to receive(:refresh_jid_expiration)
+ expect(Gitlab::SidekiqStatus)
+ .to receive(:expire)
+ .with('123', Gitlab::Import::StuckImportJob::IMPORT_JOBS_EXPIRATION)
+
+ expect(Gitlab::SidekiqStatus)
+ .to receive(:set)
+ .with(import_state.jid, Gitlab::Import::StuckImportJob::IMPORT_JOBS_EXPIRATION)
expect(worker.class)
.to receive(:perform_in_the_future)
@@ -63,8 +68,11 @@ RSpec.describe Gitlab::GithubImport::RefreshImportJidWorker, feature_category: :
.with('123')
.and_return(false)
- expect(project)
- .not_to receive(:refresh_jid_expiration)
+ expect(Gitlab::SidekiqStatus)
+ .not_to receive(:expire)
+
+ expect(Gitlab::SidekiqStatus)
+ .not_to receive(:set)
worker.perform(project.id, '123')
end
diff --git a/spec/workers/gitlab/github_import/stage/import_repository_worker_spec.rb b/spec/workers/gitlab/github_import/stage/import_repository_worker_spec.rb
index f4a306eeb0c..020f7539bf4 100644
--- a/spec/workers/gitlab/github_import/stage/import_repository_worker_spec.rb
+++ b/spec/workers/gitlab/github_import/stage/import_repository_worker_spec.rb
@@ -10,16 +10,6 @@ RSpec.describe Gitlab::GithubImport::Stage::ImportRepositoryWorker, feature_cate
it_behaves_like Gitlab::GithubImport::StageMethods
describe '#import' do
- before do
- expect(Gitlab::GithubImport::RefreshImportJidWorker)
- .to receive(:perform_in_the_future)
- .with(project.id, '123')
-
- expect(worker)
- .to receive(:jid)
- .and_return('123')
- end
-
context 'when the import succeeds' do
context 'with issues' do
it 'schedules the importing of the base data' do
diff --git a/spec/workers/gitlab/jira_import/import_issue_worker_spec.rb b/spec/workers/gitlab/jira_import/import_issue_worker_spec.rb
index 6dfab44b228..3dc3971385e 100644
--- a/spec/workers/gitlab/jira_import/import_issue_worker_spec.rb
+++ b/spec/workers/gitlab/jira_import/import_issue_worker_spec.rb
@@ -22,15 +22,15 @@ RSpec.describe Gitlab::JiraImport::ImportIssueWorker, feature_category: :importe
describe '#perform', :clean_gitlab_redis_cache do
let(:assignee_ids) { [user.id] }
let(:issue_attrs) do
- build(:issue, project_id: project.id, namespace_id: project.project_namespace_id, title: 'jira issue')
- .as_json.merge(
- 'label_ids' => [jira_issue_label_1.id, jira_issue_label_2.id], 'assignee_ids' => assignee_ids
- ).except('issue_type')
+ build(:issue, project_id: project.id, namespace_id: project.project_namespace_id, title: 'jira issue').as_json
+ .merge('label_ids' => [jira_issue_label_1.id, jira_issue_label_2.id], 'assignee_ids' => assignee_ids)
+ .except('issue_type')
.compact
end
context 'when any exception raised while inserting to DB' do
before do
+ allow(Gitlab::Redis::SharedState).to receive(:with).and_return('OK')
allow(subject).to receive(:insert_and_return_id).and_raise(StandardError)
expect(Gitlab::JobWaiter).to receive(:notify)
diff --git a/spec/workers/integrations/slack_event_worker_spec.rb b/spec/workers/integrations/slack_event_worker_spec.rb
index 6e8c73f1506..7a0a17569b2 100644
--- a/spec/workers/integrations/slack_event_worker_spec.rb
+++ b/spec/workers/integrations/slack_event_worker_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Integrations::SlackEventWorker, :clean_gitlab_redis_cluster_shared_state,
+RSpec.describe Integrations::SlackEventWorker, :clean_gitlab_redis_shared_state,
feature_category: :integrations do
describe '.event?' do
subject { described_class.event?(event) }
diff --git a/spec/workers/merge_requests/set_reviewer_reviewed_worker_spec.rb b/spec/workers/merge_requests/set_reviewer_reviewed_worker_spec.rb
index 7341a0dcc5b..c49b4339f7b 100644
--- a/spec/workers/merge_requests/set_reviewer_reviewed_worker_spec.rb
+++ b/spec/workers/merge_requests/set_reviewer_reviewed_worker_spec.rb
@@ -12,6 +12,10 @@ RSpec.describe MergeRequests::SetReviewerReviewedWorker, feature_category: :sour
it_behaves_like 'subscribes to event' do
let(:event) { approved_event }
+
+ before do
+ stub_feature_flags(mr_request_changes: false)
+ end
end
it 'calls MergeRequests::UpdateReviewerStateService' do
diff --git a/spec/workers/packages/cleanup_package_registry_worker_spec.rb b/spec/workers/packages/cleanup_package_registry_worker_spec.rb
index f2787a92fbf..0d2f9629327 100644
--- a/spec/workers/packages/cleanup_package_registry_worker_spec.rb
+++ b/spec/workers/packages/cleanup_package_registry_worker_spec.rb
@@ -80,6 +80,28 @@ RSpec.describe Packages::CleanupPackageRegistryWorker, feature_category: :packag
end
end
+ context 'with nuget symbols pending destruction' do
+ let_it_be(:nuget_symbol) { create(:nuget_symbol, :stale) }
+
+ include_examples 'an idempotent worker' do
+ it 'queues the cleanup job' do
+ expect(Packages::Nuget::CleanupStaleSymbolsWorker).to receive(:perform_with_capacity)
+
+ perform
+ end
+ end
+ end
+
+ context 'with no nuget symbols pending destruction' do
+ include_examples 'an idempotent worker' do
+ it 'does not queue the cleanup job' do
+ expect(Packages::Nuget::CleanupStaleSymbolsWorker).not_to receive(:perform_with_capacity)
+
+ perform
+ end
+ end
+ end
+
describe 'counts logging' do
let_it_be(:processing_package_file) { create(:package_file, status: :processing) }
diff --git a/spec/workers/packages/npm/create_metadata_cache_worker_spec.rb b/spec/workers/packages/npm/create_metadata_cache_worker_spec.rb
index 360cc4223b4..a061d97ddf5 100644
--- a/spec/workers/packages/npm/create_metadata_cache_worker_spec.rb
+++ b/spec/workers/packages/npm/create_metadata_cache_worker_spec.rb
@@ -58,13 +58,5 @@ RSpec.describe Packages::Npm::CreateMetadataCacheWorker, type: :worker, feature_
it_behaves_like 'does not trigger service to create npm metadata cache'
end
-
- context 'when npm_metadata_cache flag is disabled' do
- before do
- stub_feature_flags(npm_metadata_cache: false)
- end
-
- it_behaves_like 'does not trigger service to create npm metadata cache'
- end
end
end
diff --git a/spec/workers/packages/nuget/cleanup_stale_symbols_worker_spec.rb b/spec/workers/packages/nuget/cleanup_stale_symbols_worker_spec.rb
new file mode 100644
index 00000000000..41afe64a808
--- /dev/null
+++ b/spec/workers/packages/nuget/cleanup_stale_symbols_worker_spec.rb
@@ -0,0 +1,77 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Packages::Nuget::CleanupStaleSymbolsWorker, type: :worker, feature_category: :package_registry do
+ let(:worker) { described_class.new }
+
+ describe '#perform_work' do
+ subject(:perform_work) { worker.perform_work }
+
+ context 'with no work to do' do
+ it { is_expected.to be_nil }
+ end
+
+ context 'with work to do' do
+ let_it_be(:symbol_1) { create(:nuget_symbol) }
+ let_it_be(:symbol_2) { create(:nuget_symbol, :stale) }
+
+ it 'deletes the stale symbol', :aggregate_failures do
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:nuget_symbol_id, symbol_2.id)
+ expect(Packages::Nuget::Symbol).to receive(:next_pending_destruction).with(order_by: nil).and_call_original
+ expect { perform_work }.to change { Packages::Nuget::Symbol.count }.by(-1)
+ expect { symbol_2.reload }.to raise_error(ActiveRecord::RecordNotFound)
+ end
+ end
+
+ context 'with a stale symbol' do
+ let_it_be(:symbol) { create(:nuget_symbol, :stale) }
+
+ context 'with an error during deletion' do
+ before do
+ allow_next_found_instance_of(Packages::Nuget::Symbol) do |instance|
+ allow(instance).to receive(:destroy!).and_raise(StandardError)
+ end
+ end
+
+ it 'handles the error' do
+ expect(Gitlab::ErrorTracking).to receive(:log_exception).with(
+ instance_of(StandardError), class: described_class.name
+ )
+
+ expect { perform_work }.to change { Packages::Nuget::Symbol.error.count }.by(1)
+ expect(symbol.reload).to be_error
+ end
+ end
+
+ context 'when trying to destroy a destroyed record' do
+ before do
+ allow_next_found_instance_of(Packages::Nuget::Symbol) do |instance|
+ destroy_method = instance.method(:destroy!)
+
+ allow(instance).to receive(:destroy!) do
+ destroy_method.call
+
+ raise StandardError
+ end
+ end
+ end
+
+ it 'handles the error' do
+ expect(Gitlab::ErrorTracking).to receive(:log_exception)
+ .with(instance_of(StandardError), class: described_class.name)
+ expect { perform_work }.not_to change { Packages::Nuget::Symbol.count }
+ expect(symbol.reload).to be_error
+ end
+ end
+ end
+ end
+
+ describe '#max_running_jobs' do
+ let(:capacity) { described_class::MAX_CAPACITY }
+
+ subject { worker.max_running_jobs }
+
+ it { is_expected.to eq(capacity) }
+ end
+end
diff --git a/spec/workers/pages/deactivate_mr_deployments_worker_spec.rb b/spec/workers/pages/deactivate_mr_deployments_worker_spec.rb
new file mode 100644
index 00000000000..c060118a062
--- /dev/null
+++ b/spec/workers/pages/deactivate_mr_deployments_worker_spec.rb
@@ -0,0 +1,44 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Pages::DeactivateMrDeploymentsWorker, feature_category: :pages do
+ subject(:worker) { described_class.new }
+
+ describe '#perform' do
+ let(:merge_request) { create(:merge_request) }
+ let(:pipeline_1) { create(:ci_pipeline, merge_request: merge_request) }
+ let(:pipeline_2) { create(:ci_pipeline, merge_request: merge_request) }
+
+ context 'when MR does not have a Pages Build' do
+ it 'does not raise an error' do
+ expect { worker.perform(merge_request) }.not_to raise_error
+ end
+ end
+
+ context 'when MR does have a Pages Build' do
+ let(:build_1) { create(:ci_build, pipeline: pipeline_1) }
+ let(:build_2) { create(:ci_build, pipeline: pipeline_2) }
+
+ context 'with a path_prefix' do
+ it 'deactivates the deployment', :freeze_time do
+ pages_deployment_1 = create(:pages_deployment, path_prefix: '/foo', ci_build: build_1)
+ pages_deployment_2 = create(:pages_deployment, path_prefix: '/bar', ci_build: build_1)
+
+ expect { worker.perform(merge_request.id) }
+ .to change { pages_deployment_1.reload.deleted_at }.from(nil).to(Time.now.utc)
+ .and change { pages_deployment_2.reload.deleted_at }.from(nil).to(Time.now.utc)
+ end
+ end
+
+ context 'without a path_prefix' do
+ it 'does not deactivate the deployment' do
+ pages_deployment_1 = create(:pages_deployment, path_prefix: '', ci_build: build_1)
+
+ expect { worker.perform(merge_request) }
+ .to not_change { pages_deployment_1.reload.deleted_at }
+ end
+ end
+ end
+ end
+end
diff --git a/spec/workers/pipeline_schedule_worker_spec.rb b/spec/workers/pipeline_schedule_worker_spec.rb
index 48138034c33..5648c5bc4c5 100644
--- a/spec/workers/pipeline_schedule_worker_spec.rb
+++ b/spec/workers/pipeline_schedule_worker_spec.rb
@@ -14,13 +14,15 @@ RSpec.describe PipelineScheduleWorker, :sidekiq_inline, feature_category: :conti
create(:ci_pipeline_schedule, :nightly, project: project, owner: user)
end
+ let(:next_run_at) { pipeline_schedule.next_run_at }
+
before do
stub_application_setting(auto_devops_enabled: false)
stub_ci_pipeline_to_return_yaml_file
end
around do |example|
- travel_to(pipeline_schedule.next_run_at + 1.hour) do
+ travel_to(next_run_at + 1.hour) do
example.run
end
end
@@ -142,4 +144,52 @@ RSpec.describe PipelineScheduleWorker, :sidekiq_inline, feature_category: :conti
expect { subject }.to raise_error(Gitlab::ExclusiveLeaseHelpers::FailedToObtainLockError)
end
end
+
+ context 'with scheduling delay' do
+ before do
+ stub_const("#{described_class}::BATCH_SIZE", 1)
+ end
+
+ let!(:other_pipeline_schedule) do
+ create(:ci_pipeline_schedule, :every_minute, project: project, owner: user)
+ end
+
+ let(:next_run_at) do
+ [pipeline_schedule, other_pipeline_schedule].maximum(:next_run_at)
+ end
+
+ it 'calls bulk_perform_in with the arguments and delay' do
+ expect(RunPipelineScheduleWorker)
+ .to receive(:bulk_perform_in)
+ .with(1.second, [[pipeline_schedule.id, user.id, { scheduling: true }]])
+ .and_call_original
+
+ expect(RunPipelineScheduleWorker)
+ .to receive(:bulk_perform_in)
+ .with(7.seconds, [[other_pipeline_schedule.id, user.id, { scheduling: true }]])
+ .and_call_original
+
+ subject
+ end
+
+ context 'with run_pipeline_schedule_worker_with_delay disabled' do
+ before do
+ stub_feature_flags(run_pipeline_schedule_worker_with_delay: false)
+ end
+
+ it 'calls bulk_perform_async with the arguments and delay' do
+ expect(RunPipelineScheduleWorker)
+ .to receive(:bulk_perform_async)
+ .with([[pipeline_schedule.id, user.id, { scheduling: true }]])
+ .and_call_original
+
+ expect(RunPipelineScheduleWorker)
+ .to receive(:bulk_perform_async)
+ .with([[other_pipeline_schedule.id, user.id, { scheduling: true }]])
+ .and_call_original
+
+ subject
+ end
+ end
+ end
end
diff --git a/spec/workers/process_commit_worker_spec.rb b/spec/workers/process_commit_worker_spec.rb
index 02221285ad3..956e29ec7f4 100644
--- a/spec/workers/process_commit_worker_spec.rb
+++ b/spec/workers/process_commit_worker_spec.rb
@@ -3,180 +3,186 @@
require 'spec_helper'
RSpec.describe ProcessCommitWorker, feature_category: :source_code_management do
- let(:worker) { described_class.new }
- let(:user) { create(:user) }
+ let_it_be(:user) { create(:user) }
+
let(:project) { create(:project, :public, :repository) }
let(:issue) { create(:issue, project: project, author: user) }
let(:commit) { project.commit }
+ let(:worker) { described_class.new }
+
it "is deduplicated" do
expect(described_class.get_deduplicate_strategy).to eq(:until_executed)
expect(described_class.get_deduplication_options).to include(feature_flag: :deduplicate_process_commit_worker)
end
describe '#perform' do
- it 'does not process the commit when the project does not exist' do
- expect(worker).not_to receive(:close_issues)
+ subject(:perform) { worker.perform(project_id, user_id, commit.to_hash, default) }
- worker.perform(-1, user.id, commit.to_hash)
- end
-
- it 'does not process the commit when the user does not exist' do
- expect(worker).not_to receive(:close_issues)
+ let(:project_id) { project.id }
+ let(:user_id) { user.id }
- worker.perform(project.id, -1, commit.to_hash)
+ before do
+ allow(Commit).to receive(:build_from_sidekiq_hash).and_return(commit)
end
- include_examples 'an idempotent worker' do
- subject do
- perform_multiple([project.id, user.id, commit.to_hash], worker: worker)
- end
-
- it 'processes the commit message' do
- expect(worker).to receive(:process_commit_message)
- .exactly(IdempotentWorkerHelper::WORKER_EXEC_TIMES)
- .and_call_original
+ context 'when pushing to the default branch' do
+ let(:default) { true }
- subject
- end
+ context 'when project does not exist' do
+ let(:project_id) { -1 }
- it 'updates the issue metrics' do
- expect(worker).to receive(:update_issue_metrics)
- .exactly(IdempotentWorkerHelper::WORKER_EXEC_TIMES)
- .and_call_original
+ it 'does not close related issues' do
+ expect { perform }.to change { Issues::CloseWorker.jobs.size }.by(0)
- subject
+ perform
+ end
end
- end
- end
- describe '#process_commit_message' do
- context 'when pushing to the default branch' do
- before do
- allow(commit).to receive(:safe_message).and_return("Closes #{issue.to_reference}")
- end
+ context 'when user does not exist' do
+ let(:user_id) { -1 }
- it 'closes issues that should be closed per the commit message' do
- expect(worker).to receive(:close_issues).with(project, user, user, commit, [issue])
+ it 'does not close related issues' do
+ expect { perform }.not_to change { Issues::CloseWorker.jobs.size }
- worker.process_commit_message(project, commit, user, user, true)
+ perform
+ end
end
- it 'creates cross references' do
- expect(commit).to receive(:create_cross_references!).with(user, [issue])
-
- worker.process_commit_message(project, commit, user, user, true)
- end
- end
+ include_examples 'an idempotent worker' do
+ before do
+ allow(commit).to receive(:safe_message).and_return("Closes #{issue.to_reference}")
+ issue.metrics.update!(first_mentioned_in_commit_at: nil)
+ end
- context 'when pushing to a non-default branch' do
- it 'does not close any issues' do
- allow(commit).to receive(:safe_message).and_return("Closes #{issue.to_reference}")
+ subject do
+ perform_multiple([project.id, user.id, commit.to_hash], worker: worker)
+ end
- expect(worker).not_to receive(:close_issues)
+ it 'closes related issues' do
+ expect { perform }.to change { Issues::CloseWorker.jobs.size }.by(1)
- worker.process_commit_message(project, commit, user, user, false)
+ subject
+ end
end
- it 'does not create cross references' do
- expect(commit).to receive(:create_cross_references!).with(user, [])
+ context 'when commit is not a merge request merge commit' do
+ context 'when commit has issue reference' do
+ before do
+ allow(commit).to receive(:safe_message).and_return("Closes #{issue.to_reference}")
+ end
+
+ it 'closes issues that should be closed per the commit message' do
+ expect { perform }.to change { Issues::CloseWorker.jobs.size }.by(1)
+ end
+
+ it 'creates cross references' do
+ expect(commit).to receive(:create_cross_references!).with(user, [issue])
+
+ perform
+ end
+
+ describe 'issue metrics', :clean_gitlab_redis_cache do
+ context 'when issue has no first_mentioned_in_commit_at set' do
+ before do
+ issue.metrics.update!(first_mentioned_in_commit_at: nil)
+ end
+
+ it 'updates issue metrics' do
+ expect { perform }.to change { issue.metrics.reload.first_mentioned_in_commit_at }
+ .to(commit.committed_date)
+ end
+ end
+
+ context 'when issue has first_mentioned_in_commit_at earlier than given committed_date' do
+ before do
+ issue.metrics.update!(first_mentioned_in_commit_at: commit.committed_date - 1.day)
+ end
+
+ it "doesn't update issue metrics" do
+ expect { perform }.not_to change { issue.metrics.reload.first_mentioned_in_commit_at }
+ end
+ end
+
+ context 'when issue has first_mentioned_in_commit_at later than given committed_date' do
+ before do
+ issue.metrics.update!(first_mentioned_in_commit_at: commit.committed_date + 1.day)
+ end
+
+ it 'updates issue metrics' do
+ expect { perform }.to change { issue.metrics.reload.first_mentioned_in_commit_at }
+ .to(commit.committed_date)
+ end
+ end
+ end
+ end
- worker.process_commit_message(project, commit, user, user, false)
- end
- end
+ context 'when commit has no issue references' do
+ before do
+ allow(commit).to receive(:safe_message).and_return("Lorem Ipsum")
+ end
- context 'when commit is a merge request merge commit to the default branch' do
- let(:merge_request) do
- create(
- :merge_request,
- description: "Closes #{issue.to_reference}",
- source_branch: 'feature-merged',
- target_branch: 'master',
- source_project: project
- )
+ describe 'issue metrics' do
+ it "doesn't execute any queries with false conditions" do
+ expect { perform }.not_to make_queries_matching(/WHERE (?:1=0|0=1)/)
+ end
+ end
+ end
end
- let(:commit) do
- project.repository.create_branch('feature-merged', 'feature')
- project.repository.after_create_branch
+ context 'when commit is a merge request merge commit' do
+ let(:merge_request) do
+ create(
+ :merge_request,
+ description: "Closes #{issue.to_reference}",
+ source_branch: 'feature-merged',
+ target_branch: 'master',
+ source_project: project
+ )
+ end
- MergeRequests::MergeService
- .new(project: project, current_user: merge_request.author, params: { sha: merge_request.diff_head_sha })
- .execute(merge_request)
+ let(:commit) do
+ project.repository.create_branch('feature-merged', 'feature')
+ project.repository.after_create_branch
- merge_request.reload.merge_commit
- end
+ MergeRequests::MergeService
+ .new(project: project, current_user: merge_request.author, params: { sha: merge_request.diff_head_sha })
+ .execute(merge_request)
- it 'does not close any issues from the commit message' do
- expect(worker).not_to receive(:close_issues)
+ merge_request.reload.merge_commit
+ end
- worker.process_commit_message(project, commit, user, user, true)
- end
+ it 'does not close any issues from the commit message' do
+ expect { perform }.not_to change { Issues::CloseWorker.jobs.size }
- it 'still creates cross references' do
- expect(commit).to receive(:create_cross_references!).with(user, [])
+ perform
+ end
- worker.process_commit_message(project, commit, user, user, true)
- end
- end
- end
+ it 'still creates cross references' do
+ expect(commit).to receive(:create_cross_references!).with(commit.author, [])
- describe '#close_issues' do
- it 'creates Issue::CloseWorker jobs' do
- expect do
- worker.close_issues(project, user, user, commit, [issue])
- end.to change { Issues::CloseWorker.jobs.size }.by(1)
+ perform
+ end
+ end
end
- end
- describe '#update_issue_metrics', :clean_gitlab_redis_cache do
- context 'when commit has issue reference' do
- subject(:update_metrics_and_reload) do
- -> {
- worker.update_issue_metrics(commit, user)
- issue.metrics.reload
- }
- end
+ context 'when pushing to a non-default branch' do
+ let(:default) { false }
before do
allow(commit).to receive(:safe_message).and_return("Closes #{issue.to_reference}")
end
- context 'when issue has no first_mentioned_in_commit_at set' do
- it 'updates issue metrics' do
- expect { update_metrics_and_reload.call }
- .to change { issue.metrics.first_mentioned_in_commit_at }.to(commit.committed_date)
- end
- end
-
- context 'when issue has first_mentioned_in_commit_at earlier than given committed_date' do
- before do
- issue.metrics.update!(first_mentioned_in_commit_at: commit.committed_date - 1.day)
- end
-
- it "doesn't update issue metrics" do
- expect { update_metrics_and_reload.call }.not_to change { issue.metrics.first_mentioned_in_commit_at }
- end
- end
-
- context 'when issue has first_mentioned_in_commit_at later than given committed_date' do
- before do
- issue.metrics.update!(first_mentioned_in_commit_at: commit.committed_date + 1.day)
- end
+ it 'does not close any issues from the commit message' do
+ expect { perform }.not_to change { Issues::CloseWorker.jobs.size }
- it "doesn't update issue metrics" do
- expect { update_metrics_and_reload.call }
- .to change { issue.metrics.first_mentioned_in_commit_at }.to(commit.committed_date)
- end
+ perform
end
- end
- context 'when commit has no issue references' do
- it "doesn't execute any queries with false conditions" do
- allow(commit).to receive(:safe_message).and_return("Lorem Ipsum")
+ it 'still creates cross references' do
+ expect(commit).to receive(:create_cross_references!).with(user, [])
- expect { worker.update_issue_metrics(commit, user) }
- .not_to make_queries_matching(/WHERE (?:1=0|0=1)/)
+ perform
end
end
end
diff --git a/spec/workers/projects/inactive_projects_deletion_cron_worker_spec.rb b/spec/workers/projects/inactive_projects_deletion_cron_worker_spec.rb
index 68af5e61e3b..226ecaa89c5 100644
--- a/spec/workers/projects/inactive_projects_deletion_cron_worker_spec.rb
+++ b/spec/workers/projects/inactive_projects_deletion_cron_worker_spec.rb
@@ -95,7 +95,7 @@ RSpec.describe Projects::InactiveProjectsDeletionCronWorker, feature_category: :
expect(redis).to receive(:hset).with(
'inactive_projects_deletion_warning_email_notified',
"project:#{inactive_large_project.id}",
- Date.current
+ Date.current.to_s
)
end
expect(::Projects::InactiveProjectsDeletionNotificationWorker).to receive(:perform_async).with(
diff --git a/spec/workers/projects/update_repository_storage_worker_spec.rb b/spec/workers/projects/update_repository_storage_worker_spec.rb
index 91445c2bbf6..44c2dc41b2b 100644
--- a/spec/workers/projects/update_repository_storage_worker_spec.rb
+++ b/spec/workers/projects/update_repository_storage_worker_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe Projects::UpdateRepositoryStorageWorker, feature_category: :sourc
it_behaves_like 'an update storage move worker' do
let_it_be_with_refind(:container) { create(:project, :repository) }
- let_it_be(:repository_storage_move) { create(:project_repository_storage_move) }
+ let_it_be_with_reload(:repository_storage_move) { create(:project_repository_storage_move) }
let(:service_klass) { Projects::UpdateRepositoryStorageService }
let(:repository_storage_move_klass) { Projects::RepositoryStorageMove }